code stringlengths 51 2.38k | docstring stringlengths 4 15.2k |
|---|---|
def _assert_obj_type(pub, name="pub", obj_type=DBPublication):
if not isinstance(pub, obj_type):
raise InvalidType(
"`%s` have to be instance of %s, not %s!" % (
name,
obj_type.__name__,
pub.__class__.__name__
)
) | Make sure, that `pub` is instance of the `obj_type`.
Args:
pub (obj): Instance which will be checked.
name (str): Name of the instance. Used in exception. Default `pub`.
obj_type (class): Class of which the `pub` should be instance. Default
:class:`.DBPublication`.
Rai... |
def save(self, *args, **kwargs):
if not self.pk:
old_votes = Vote.objects.filter(user=self.user, node=self.node)
for old_vote in old_votes:
old_vote.delete()
super(Vote, self).save(*args, **kwargs) | ensure users cannot vote the same node multiple times
but let users change their votes |
def from_session(cls, session):
session.error_wrapper = lambda e: NvimError(e[1])
channel_id, metadata = session.request(b'vim_get_api_info')
if IS_PYTHON3:
metadata = walk(decode_if_bytes, metadata)
types = {
metadata['types']['Buffer']['id']: Buffer,
... | Create a new Nvim instance for a Session instance.
This method must be called to create the first Nvim instance, since it
queries Nvim metadata for type information and sets a SessionHook for
creating specialized objects from Nvim remote handles. |
def authenticate(self, username, password):
self._username = username
self._password = password
self.disconnect()
self._open_connection()
return self.authenticated | Authenticate user on server.
:param username: Username used to be authenticated.
:type username: six.string_types
:param password: Password used to be authenticated.
:type password: six.string_types
:return: True if successful.
:raises: InvalidCredentials, Authentication... |
def order_percent(self,
asset,
percent,
limit_price=None,
stop_price=None,
style=None):
if not self._can_order_asset(asset):
return None
amount = self._calculate_order_percent_amount... | Place an order in the specified asset corresponding to the given
percent of the current portfolio value.
Parameters
----------
asset : Asset
The asset that this order is for.
percent : float
The percentage of the portfolio value to allocate to ``asset``.
... |
def fetch(self):
params = values.of({})
payload = self._version.fetch(
'GET',
self._uri,
params=params,
)
return AddOnResultInstance(
self._version,
payload,
account_sid=self._solution['account_sid'],
ref... | Fetch a AddOnResultInstance
:returns: Fetched AddOnResultInstance
:rtype: twilio.rest.api.v2010.account.recording.add_on_result.AddOnResultInstance |
def DbGetAttributeAliasList(self, argin):
self._log.debug("In DbGetAttributeAliasList()")
if not argin:
argin = "%"
else:
argin = replace_wildcard(argin)
return self.db.get_attribute_alias_list(argin) | Get attribute alias list for a specified filter
:param argin: attribute alias filter string (eg: att*)
:type: tango.DevString
:return: attribute aliases
:rtype: tango.DevVarStringArray |
def get_axis(self, undefined=np.zeros(3)):
tolerance = 1e-17
self._normalise()
norm = np.linalg.norm(self.vector)
if norm < tolerance:
return undefined
else:
return self.vector / norm | Get the axis or vector about which the quaternion rotation occurs
For a null rotation (a purely real quaternion), the rotation angle will
always be `0`, but the rotation axis is undefined.
It is by default assumed to be `[0, 0, 0]`.
Params:
undefined: [optional] specify the... |
def spliceext(filepath, s):
root, ext = os.path.splitext(safepath(filepath))
return root + s + ext | Add s into filepath before the extension
Args:
filepath (str, path): file path
s (str): string to splice
Returns:
str |
def create_package_node(self, team, user, package, dry_run=False):
contents = RootNode(dict())
if dry_run:
return contents
self.check_name(team, user, package)
assert contents is not None
self.create_dirs()
path = self.package_path(team, user, package)
... | Creates a new package and initializes its contents. See `install_package`. |
def p_flatten(self, obj, **kwargs):
if isinstance(obj, six.string_types):
return obj
result = ""
for i in obj:
result += self.p_flatten(i)
return result | Flatten a list of lists of lists... of strings into a string
This is usually used as the action for sequence expressions:
.. code-block::
my_rule <- 'a' . 'c' {p_flatten}
With the input "abc" and no action, this rule returns [ 'a', 'b', 'c'].
{ p_flatten } procuces "abc".... |
def CreateJarBuilder(env):
try:
java_jar = env['BUILDERS']['JarFile']
except KeyError:
fs = SCons.Node.FS.get_default_fs()
jar_com = SCons.Action.Action('$JARCOM', '$JARCOMSTR')
java_jar = SCons.Builder.Builder(action = jar_com,
suffix = '... | The Jar builder expects a list of class files
which it can package into a jar file.
The jar tool provides an interface for passing other types
of java files such as .java, directories or swig interfaces
and will build them to class files in which it can package
into the jar. |
def save_xml(self, doc, element):
for cond in self._targets:
new_element = doc.createElementNS(RTS_NS, RTS_NS_S + 'targets')
new_element.setAttributeNS(XSI_NS, XSI_NS_S + 'type', 'rtsExt:condition_ext')
cond.save_xml(doc, new_element)
element.appendChild(new_eleme... | Save this message_sending object into an xml.dom.Element object. |
def find_path_with_profiles(self, conversion_profiles, in_, out):
original_profiles = dict(self.conversion_profiles)
self._setup_profiles(conversion_profiles)
results = self.find_path(in_, out)
self.conversion_profiles = original_profiles
return results | Like find_path, except forces the conversion profiles to be the given
conversion profile setting. Useful for "temporarily overriding" the
global conversion profiles with your own. |
def credit_card_owner(self, gender: Optional[Gender] = None) -> dict:
owner = {
'credit_card': self.credit_card_number(),
'expiration_date': self.credit_card_expiration_date(),
'owner': self.__person.full_name(gender=gender).upper(),
}
return owner | Generate credit card owner.
:param gender: Gender of credit card owner.
:type gender: Gender's enum object.
:return: |
def bulk_activate(workers, lbn, profile='default'):
ret = {}
if isinstance(workers, six.string_types):
workers = workers.split(',')
for worker in workers:
try:
ret[worker] = worker_activate(worker, lbn, profile)
except Exception:
ret[worker] = False
return... | Activate all the given workers in the specific load balancer
CLI Examples:
.. code-block:: bash
salt '*' modjk.bulk_activate node1,node2,node3 loadbalancer1
salt '*' modjk.bulk_activate node1,node2,node3 loadbalancer1 other-profile
salt '*' modjk.bulk_activate ["node1","node2","node3... |
def send(self, request, socket, context, *args):
for handler, pattern in self.handlers:
no_channel = not pattern and not socket.channels
if self.name.endswith("subscribe") and pattern:
matches = [pattern.match(args[0])]
else:
matches = [pattern... | When an event is sent, run all relevant handlers. Relevant
handlers are those without a channel pattern when the given
socket is not subscribed to any particular channel, or the
handlers with a channel pattern that matches any of the
channels that the given socket is subscribed to.
... |
def commentless(data):
it = iter(data)
while True:
line = next(it)
while ":" in line or not line.lstrip().startswith(".."):
yield line
line = next(it)
indent = indent_size(line)
it = itertools.dropwhile(lambda el: indent_size(el) > indent
... | Generator that removes from a list of strings the double dot
reStructuredText comments and its contents based on indentation,
removing trailing empty lines after each comment as well. |
def update_db(self, giver, receiverkarma):
for receiver in receiverkarma:
if receiver != giver:
urow = KarmaStatsTable(
ude(giver), ude(receiver), receiverkarma[receiver])
self.db.session.add(urow)
self.db.session.commit() | Record a the giver of karma, the receiver of karma, and the karma
amount. Typically the count will be 1, but it can be any positive or
negative integer. |
def send_output(self, value, stdout):
writer = self.writer
if value is not None:
writer.write('{!r}\n'.format(value).encode('utf8'))
if stdout:
writer.write(stdout.encode('utf8'))
yield from writer.drain() | Write the output or value of the expression back to user.
>>> 5
5
>>> print('cash rules everything around me')
cash rules everything around me |
def wait_for(self, pattern, timeout=None):
should_continue = True
if self.block:
raise TypeError(NON_BLOCKING_ERROR_MESSAGE)
def stop(signum, frame):
nonlocal should_continue
if should_continue:
raise TimeoutError()
if timeout:
... | Block until a pattern have been found in stdout and stderr
Args:
pattern(:class:`~re.Pattern`): The pattern to search
timeout(int): Maximum number of second to wait. If None, wait infinitely
Raises:
TimeoutError: When timeout is reach |
def stacked_node_layout(self,EdgeAttribute=None,network=None,NodeAttribute=None,\
nodeList=None,x_position=None,y_start_position=None,verbose=None):
network=check_network(self,network,verbose=verbose)
PARAMS=set_param(['EdgeAttribute','network','NodeAttribute','nodeList',\
'x_position','y_start_position'],[Edge... | Execute the Stacked Node Layout on a network.
:param EdgeAttribute (string, optional): The name of the edge column contai
ning numeric values that will be used as weights in the layout algor
ithm. Only columns containing numeric values are shown
:param network (string, optional): Specifies a network by name,... |
def _is_skippable(filename_full):
if not Settings.follow_symlinks and os.path.islink(filename_full):
return True
if os.path.basename(filename_full) == timestamp.RECORD_FILENAME:
return True
if not os.path.exists(filename_full):
if Settings.verbose:
print(filename_full, 'w... | Handle things that are not optimizable files. |
def handle_comment(self, comment):
match = _COND_COMMENT_PATTERN.match(comment)
if match is not None:
cond = match.group(1)
content = match.group(2)
self._buffer.append(_COND_COMMENT_START_FORMAT % cond)
self._push_status()
self.feed(content)
... | Remove comment except IE conditional comment.
.. seealso::
`About conditional comments
<http://msdn.microsoft.com/en-us/library/ms537512.ASPX>`_ |
def installed(name, default=False, user=None):
ret = {'name': name, 'result': None, 'comment': '', 'changes': {}}
if name.startswith('python-'):
name = re.sub(r'^python-', '', name)
if __opts__['test']:
ret['comment'] = 'python {0} is set to be installed'.format(name)
return ret
... | Verify that the specified python is installed with pyenv. pyenv is
installed if necessary.
name
The version of python to install
default : False
Whether to make this python the default.
user: None
The user to run pyenv as.
.. versionadded:: 0.17.0
.. versionadded... |
def download_from_search(query_str, folder, do_extract_text=True,
max_results=None):
piis = get_piis(query_str)
for pii in piis[:max_results]:
if os.path.exists(os.path.join(folder, '%s.txt' % pii)):
continue
logger.info('Downloading %s' % pii)
xml = ... | Save raw text files based on a search for papers on ScienceDirect.
This performs a search to get PIIs, downloads the XML corresponding to
the PII, extracts the raw text and then saves the text into a file
in the designated folder.
Parameters
----------
query_str : str
The query string ... |
def get_aa_letter(aa_code):
aa_letter = 'X'
for key, val in standard_amino_acids.items():
if val == aa_code:
aa_letter = key
return aa_letter | Get one-letter version of aa_code if possible. If not, return 'X'.
Parameters
----------
aa_code : str
Three-letter amino acid code.
Returns
-------
aa_letter : str
One-letter aa code.
Default value is 'X'. |
def center(self, X):
X = X.copy()
inan = numpy.isnan(X)
if self.mu is None:
X_ = numpy.ma.masked_array(X, inan)
self.mu = X_.mean(0).base
self.sigma = X_.std(0).base
reduce(lambda y,x: setitem(x[0], x[1], x[2]), zip(X.T, inan.T, self.mu), None)
... | Center `X` in PCA space. |
def resolutions(self):
r_json = self._get_json('resolution')
resolutions = [Resolution(
self._options, self._session, raw_res_json) for raw_res_json in r_json]
return resolutions | Get a list of resolution Resources from the server.
:rtype: List[Resolution] |
def write_info(self, w):
w.write_b_varchar("")
w.write_b_varchar(self._table_type.typ_schema)
w.write_b_varchar(self._table_type.typ_name) | Writes TVP_TYPENAME structure
spec: https://msdn.microsoft.com/en-us/library/dd302994.aspx
@param w: TdsWriter
@return: |
def display(self):
pygame.init()
self.display = pygame.display.set_mode((self.width,self.height))
self.display.blit(self.cloud,(0,0))
pygame.display.update()
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
... | Displays the word cloud to the screen. |
def mode(self, target, mode_string=None, tags=None):
params = [target]
if mode_string:
params += mode_string
self.send('MODE', params=params, source=self.nick, tags=tags) | Sends new modes to or requests existing modes from the given target. |
def is_namespace_valid( namespace_id ):
if not is_b40( namespace_id ) or "+" in namespace_id or namespace_id.count(".") > 0:
return False
if len(namespace_id) == 0 or len(namespace_id) > LENGTHS['blockchain_id_namespace_id']:
return False
return True | Is a namespace ID valid?
>>> is_namespace_valid('abcd')
True
>>> is_namespace_valid('+abcd')
False
>>> is_namespace_valid('abc.def')
False
>>> is_namespace_valid('.abcd')
False
>>> is_namespace_valid('abcdabcdabcdabcdabcd')
False
>>> is_namespace_valid('abcdabcdabcdabcdabc')... |
def deg2fmt(ra_deg, dec_deg, format):
rhr, rmn, rsec = degToHms(ra_deg)
dsgn, ddeg, dmn, dsec = degToDms(dec_deg)
if format == 'hms':
return rhr, rmn, rsec, dsgn, ddeg, dmn, dsec
elif format == 'str':
ra_txt = '%d:%02d:%06.3f' % (rhr, rmn, rsec)
if dsgn < 0:
dsgn = '-... | Format coordinates. |
def register_api_doc_endpoints(config, endpoints, base_path='/api-docs'):
for endpoint in endpoints:
path = base_path.rstrip('/') + endpoint.path
config.add_route(endpoint.route_name, path)
config.add_view(
endpoint.view,
route_name=endpoint.route_name,
re... | Create and register pyramid endpoints to service swagger api docs.
Routes and views will be registered on the `config` at `path`.
:param config: a pyramid configuration to register the new views and routes
:type config: :class:`pyramid.config.Configurator`
:param endpoints: a list of endpoints to regi... |
def get(self, recipe=None, plugin=None):
if plugin is not None:
if recipe is None:
recipes_list = {}
for key in self.recipes.keys():
if self.recipes[key].plugin == plugin:
recipes_list[key] = self.recipes[key]
... | Get one or more recipes.
:param recipe: Name of the recipe
:type recipe: str
:param plugin: Plugin object, under which the recipe was registered
:type plugin: GwBasePattern |
def drawBezier(page, p1, p2, p3, p4, color=None, fill=None,
dashes=None, width=1, morph=None,
closePath=False, roundCap=False, overlay=True):
img = page.newShape()
Q = img.drawBezier(Point(p1), Point(p2), Point(p3), Point(p4))
img.finish(color=color, fill=fill, dashes=dashes, w... | Draw a general cubic Bezier curve from p1 to p4 using control points p2 and p3. |
def read_git_commit_timestamp(repo_path=None):
repo = git.repo.base.Repo(path=repo_path, search_parent_directories=True)
head_commit = repo.head.commit
return head_commit.committed_datetime | Obtain the timestamp from the current head commit of a Git repository.
Parameters
----------
repo_path : `str`, optional
Path to the Git repository. Leave as `None` to use the current working
directory.
Returns
-------
commit_timestamp : `datetime.datetime`
The datetime... |
def _get_meta(self, row, col):
if self.meta is None:
logging.error("unable to get meta: empty section")
return {}
if not row in self._get_row_hdrs() or\
not col in self._get_col_hdrs():
logging.error("unable to get meta: cell [%s,%s] does not exist"
... | Get metadata for a particular cell |
def extract_lzma(path):
tlfile = pathlib.Path(path)
with tlfile.open("rb") as td:
data = lzma.decompress(td.read())
fd, tmpname = tempfile.mkstemp(prefix="odt_ex_", suffix=".tar")
with open(fd, "wb") as fo:
fo.write(data)
return tmpname | Extract an lzma file and return the temporary file name |
def in_same_box(self, a, b):
assert a in self.micro_indices
assert b in self.micro_indices
for part in self.partition:
if a in part and b in part:
return True
return False | Return ``True`` if nodes ``a`` and ``b``` are in the same box. |
def connect_mysql(host, port, user, password, database):
return pymysql.connect(
host=host, port=port,
user=user, passwd=password,
db=database
) | Connect to MySQL with retries. |
def as_categorical(self):
if len(self.shape) > 1:
raise ValueError("Can't convert a 2D array to a categorical.")
with ignore_pandas_nan_categorical_warning():
return pd.Categorical.from_codes(
self.as_int_array(),
self.categories.copy(),
... | Coerce self into a pandas categorical.
This is only defined on 1D arrays, since that's all pandas supports. |
def _identify_os(self, msg):
ret = []
for dev_os, data in self.compiled_prefixes.items():
log.debug('Matching under %s', dev_os)
msg_dict = self._identify_prefix(msg, data)
if msg_dict:
log.debug('Adding %s to list of matched OS', dev_os)
... | Using the prefix of the syslog message,
we are able to identify the operating system and then continue parsing. |
def as_dict(self):
data = super(BaseEmail, self).as_dict()
data["Headers"] = [{"Name": name, "Value": value} for name, value in data["Headers"].items()]
for field in ("To", "Cc", "Bcc"):
if field in data:
data[field] = list_to_csv(data[field])
data["Attachment... | Additionally encodes headers.
:return: |
def _to_linear(M, N, L, q):
"Converts a qubit in chimera coordinates to its linear index."
(x, y, u, k) = q
return 2 * L * N * x + 2 * L * y + L * u + k | Converts a qubit in chimera coordinates to its linear index. |
def calculateRange(self):
if not self.autoRangeCti or not self.autoRangeCti.configValue:
return (self.rangeMinCti.data, self.rangeMaxCti.data)
else:
rangeFunction = self._rangeFunctions[self.autoRangeMethod]
return rangeFunction() | Calculates the range depending on the config settings. |
def maybe_coroutine(decide):
def _maybe_coroutine(f):
@functools.wraps(f)
def __maybe_coroutine(*args, **kwargs):
if decide(*args, **kwargs):
return coroutine(f)(*args, **kwargs)
else:
return no_coroutine(f)(*args, **kwargs)
return __ma... | Either be a coroutine or not.
Use as a decorator:
@maybe_coroutine(lambda maybeAPromise: return isinstance(maybeAPromise, Promise))
def foo(maybeAPromise):
result = yield maybeAPromise
print("hello")
return result
The function passed should be a generator yielding either only P... |
def reset(self):
if self.__row_number > self.__sample_size:
self.__parser.reset()
self.__extract_sample()
self.__extract_headers()
self.__row_number = 0 | Resets the stream pointer to the beginning of the file. |
def choose(msg, items, attr):
if len(items) == 1:
return items[0]
print()
for index, i in enumerate(items):
name = attr(i) if callable(attr) else getattr(i, attr)
print(' %s: %s' % (index, name))
print()
while True:
try:
inp = input('%s: ' % msg)
... | Command line helper to display a list of choices, asking the
user to choose one of the options. |
def ds_geom(ds, t_srs=None):
gt = ds.GetGeoTransform()
ds_srs = get_ds_srs(ds)
if t_srs is None:
t_srs = ds_srs
ns = ds.RasterXSize
nl = ds.RasterYSize
x = np.array([0, ns, ns, 0, 0], dtype=float)
y = np.array([0, 0, nl, nl, 0], dtype=float)
x -= 0.5
y -= 0.5
mx, my = pix... | Return dataset bbox envelope as geom |
def get_result(self, *, block=False, timeout=None):
return self.messages[-1].get_result(block=block, timeout=timeout) | Get the result of this pipeline.
Pipeline results are represented by the result of the last
message in the chain.
Parameters:
block(bool): Whether or not to block until a result is set.
timeout(int): The maximum amount of time, in ms, to wait for
a result when b... |
def _endReq(self, key, result=None, success=True):
future = self._futures.pop(key, None)
self._reqId2Contract.pop(key, None)
if future:
if result is None:
result = self._results.pop(key, [])
if not future.done():
if success:
... | Finish the future of corresponding key with the given result.
If no result is given then it will be popped of the general results. |
def reorder(args):
import csv
p = OptionParser(reorder.__doc__)
p.set_sep()
opts, args = p.parse_args(args)
if len(args) != 2:
sys.exit(not p.print_help())
tabfile, order = args
sep = opts.sep
order = [int(x) - 1 for x in order.split(",")]
reader = csv.reader(must_open(tabfil... | %prog reorder tabfile 1,2,4,3 > newtabfile
Reorder columns in tab-delimited files. The above syntax will print out a
new file with col-1,2,4,3 from the old file. |
def delete(self, obj):
obj = self.api.get_object(getattr(obj, 'id', obj))
obj.delete()
self.remove(obj.id) | Delete an object in CDSTAR and remove it from the catalog.
:param obj: An object ID or an Object instance. |
def call(self, callname, arguments=None):
action = getattr(self.api, callname, None)
if action is None:
try:
action = self.api.ENDPOINT_OVERRIDES.get(callname, None)
except AttributeError:
action = callname
if not callable(action):
... | Executed on each scheduled iteration |
def metrics(self, raw=False):
if raw:
return self._metrics.metrics.copy()
metrics = {}
for k, v in six.iteritems(self._metrics.metrics.copy()):
if k.group not in metrics:
metrics[k.group] = {}
if k.name not in metrics[k.group]:
... | Get metrics on producer performance.
This is ported from the Java Producer, for details see:
https://kafka.apache.org/documentation/#producer_monitoring
Warning:
This is an unstable interface. It may change in future
releases without warning. |
def map_size(self, key):
rv = self.get(key)
return len(rv.value) | Get the number of items in the map.
:param str key: The document ID of the map
:return int: The number of items in the map
:raise: :cb_exc:`NotFoundError` if the document does not exist.
.. seealso:: :meth:`map_add` |
def serialize(data):
return rapidjson.dumps(data, skipkeys=False, ensure_ascii=False,
sort_keys=True) | Serialize a dict into a JSON formatted string.
This function enforces rules like the separator and order of keys.
This ensures that all dicts are serialized in the same way.
This is specially important for hashing data. We need to make sure that
everyone serializes their data in the sa... |
def trim_wav_pydub(in_path: Path, out_path: Path,
start_time: int, end_time: int) -> None:
logger.info(
"Using pydub/ffmpeg to create {} from {}".format(out_path, in_path) +
" using a start_time of {} and an end_time of {}".format(start_time,
... | Crops the wav file. |
def analyzer_api(url):
response.content_type = JSON_MIME
ri = get_cached_or_new(url)
try:
if ri.is_old():
logger.info("Running the analysis.")
ri = get_cached_or_new(url, new=True)
ri.paralel_processing()
except (requests.exceptions.Timeout, requests.Connectio... | Analyze given `url` and return output as JSON. |
def shape_offset_y(self):
min_y = self._start_y
for drawing_operation in self:
if hasattr(drawing_operation, 'y'):
min_y = min(min_y, drawing_operation.y)
return min_y | Return y distance of shape origin from local coordinate origin.
The returned integer represents the topmost extent of the freeform
shape, in local coordinates. Note that the bounding box of the shape
need not start at the local origin. |
def list(self, id=None):
args = {'id': id}
self._job_chk.check(args)
return self._client.json('job.list', args) | List all running jobs
:param id: optional ID for the job to list |
def get_text(self):
self._load_raw_content()
if self._text is None:
assert self._raw_content is not None
ret_cont = self._raw_content
if self.compressed:
ret_cont = zlib.decompress(ret_cont, zlib.MAX_WBITS+16)
if self.encoded:
... | Get the loaded, decompressed, and decoded text of this content. |
def _fmtos(self):
plotters = self.plotters
if len(plotters) == 0:
return {}
p0 = plotters[0]
if len(plotters) == 1:
return p0._fmtos
return (getattr(p0, key) for key in set(p0).intersection(
*map(set, plotters[1:]))) | An iterator over formatoption objects
Contains only the formatoption whose keys are in all plotters in this
list |
def take_bug_reports(ads, test_name, begin_time, destination=None):
begin_time = mobly_logger.normalize_log_line_timestamp(str(begin_time))
def take_br(test_name, begin_time, ad, destination):
ad.take_bug_report(test_name, begin_time, destination=destination)
args = [(test_name, begin_time, ad, dest... | Takes bug reports on a list of android devices.
If you want to take a bug report, call this function with a list of
android_device objects in on_fail. But reports will be taken on all the
devices in the list concurrently. Bug report takes a relative long
time to take, so use this cautiously.
Args:... |
def _dlog(self, msg, indent_increase=0):
self._log.debug("interp", msg, indent_increase, filename=self._orig_filename, coord=self._coord) | log the message to the log |
def scoped_format(txt, **objects):
pretty = objects.pop("pretty", RecursiveAttribute.format_pretty)
expand = objects.pop("expand", RecursiveAttribute.format_expand)
attr = RecursiveAttribute(objects, read_only=True)
formatter = scoped_formatter(**objects)
return formatter.format(txt, pretty=pretty, ... | Format a string with respect to a set of objects' attributes.
Example:
>>> Class Foo(object):
>>> def __init__(self):
>>> self.name = "Dave"
>>> print scoped_format("hello {foo.name}", foo=Foo())
hello Dave
Args:
objects (dict): Dict of objects to f... |
def _update_callsafety(self, response):
if self.ratelimit is not None:
self.callsafety['lastcalltime'] = time()
self.callsafety['lastlimitremaining'] = int(response.headers.get('X-Rate-Limit-Remaining', 0)) | Update the callsafety data structure |
def append_logs_to_result_object(result_obj, result):
logs = result.has_logs()
result_obj["exec"]["logs"] = []
if logs and result.logfiles:
for log in logs:
typ = None
parts = log.split(os.sep)
if "bench" in parts[len(parts) - 1]:
typ = "framework"... | Append log files to cloud result object from Result.
:param result_obj: Target result object
:param result: Result
:return: Nothing, modifies result_obj in place. |
def load_manifests(self):
for path in self.plugin_paths:
for item in os.listdir(path):
item_path = os.path.join(path, item)
if os.path.isdir(item_path):
self.load_manifest(item_path) | Loads all plugin manifests on the plugin path |
def _get_YYTfactor(self, Y):
N, D = Y.shape
if (N>=D):
return Y.view(np.ndarray)
else:
return jitchol(tdot(Y)) | find a matrix L which satisfies LLT = YYT.
Note that L may have fewer columns than Y. |
def setup(options):
sys.path.insert(0, options.gae_lib_path)
from dev_appserver import fix_sys_path
fix_sys_path() | Grabs the gae_lib_path from the options and inserts it into the first
index of the sys.path. Then calls GAE's fix_sys_path to get all the proper
GAE paths included.
:param options: |
def build_attachment2():
attachment = Attachment()
attachment.content = "BwdW"
attachment.type = "image/png"
attachment.filename = "banner.png"
attachment.disposition = "inline"
attachment.content_id = "Banner"
return attachment | Build attachment mock. |
def update(self):
response = requests.get(self.update_url, timeout=timeout)
match = ip_pattern.search(response.content)
if not match:
raise ApiError("Couldn't parse the server's response",
response.content)
self.ip = match.group(0) | Updates remote DNS record by requesting its special endpoint URL |
def fib(n):
assert n > 0
a, b = 1, 1
for i in range(n - 1):
a, b = b, a + b
return a | Fibonacci example function
Args:
n (int): integer
Returns:
int: n-th Fibonacci number |
def _download_wrapper(self, url, *args, **kwargs):
try:
return url, self._file_downloader.download(url, *args, **kwargs)
except Exception as e:
logging.error("AbstractDownloader: %s", traceback.format_exc())
return url, e | Actual download call. Calls the underlying file downloader,
catches all exceptions and returns the result. |
def username(self):
token = self.session.params.get('access_token')
if not token:
raise errors.TokenError(
"session does not have a valid access_token param")
data = token.split('.')[1]
data = data.replace('-', '+').replace('_', '/') + "==="
try:
... | The username in the service's access token
Returns
-------
str |
def _freebayes_custom(in_file, ref_file, data):
if vcfutils.get_paired_phenotype(data):
return None
config = data["config"]
bv_ver = programs.get_version("bcbio_variation", config=config)
if LooseVersion(bv_ver) < LooseVersion("0.1.1"):
return None
out_file = "%s-filter%s" % os.path.... | Custom FreeBayes filtering using bcbio.variation, tuned to human NA12878 results.
Experimental: for testing new methods. |
def find_next(self):
state = self.find(changed=False, forward=True, rehighlight=False,
multiline_replace_check=False)
self.editor.setFocus()
self.search_text.add_current_text()
return state | Find next occurrence |
def upload(self, file_path, timeout=-1):
return self._client.upload(file_path, timeout=timeout) | Upload an SPP ISO image file or a hotfix file to the appliance.
The API supports upload of one hotfix at a time into the system.
For the successful upload of a hotfix, ensure its original name and extension are not altered.
Args:
file_path: Full path to firmware.
timeout... |
def readBatchTupleQuotes(self, symbols, start, end):
if end is None:
end=sys.maxint
ret={}
session=self.getReadSession()()
try:
symbolChunks=splitListEqually(symbols, 100)
for chunk in symbolChunks:
rows=session.query(Quote.symb... | read batch quotes as tuple to save memory |
def update(self, old, new):
i = self.rank[old]
del self.rank[old]
self.heap[i] = new
self.rank[new] = i
if old < new:
self.down(i)
else:
self.up(i) | Replace an element in the heap |
def asset_create(self, name, items, tag='', description='', atype='static'):
data = {
'name': name,
'description': description,
'type': atype,
'tags': tag
}
if atype == 'static':
data['definedIPs'] = ','.join(items)
if atype == ... | asset_create_static name, ips, tags, description
Create a new asset list with the defined information.
UN-DOCUMENTED CALL: This function is not considered stable.
:param name: asset list name (must be unique)
:type name: string
:param items: list of IP Addresses, CIDR, and Netw... |
def listen_until_return(self, *temporary_handlers, timeout=0):
start = time.time()
while timeout == 0 or time.time() - start < timeout:
res = self.listen(*temporary_handlers)
if res is not None:
return res | Calls listen repeatedly until listen returns something else than None.
Then returns listen's result. If timeout is not zero listen_until_return
stops after timeout seconds and returns None. |
def _getsolution(self, config, section, **kwargs):
if section not in config:
raise ValueError('Section [{}] not found in [{}]'.format(section, ', '.join(config.sections())))
s = VSGSolution(**kwargs)
s.Name = config.get(section, 'name', fallback=s.Name)
s.FileName = os.path.n... | Creates a VSG solution from a configparser instance.
:param object config: The instance of the configparser class
:param str section: The section name to read.
:param kwargs: List of additional keyworded arguments to be passed into the VSGSolution.
:return: A valid VSGSolution instance... |
async def connect(self):
self.tls_context = None
if self.tls:
self.tls_context = self.create_tls_context()
(self.reader, self.writer) = await asyncio.open_connection(
host=self.hostname,
port=self.port,
local_addr=self.source_address,
s... | Connect to target. |
def pkgdb(opts):
return LazyLoader(
_module_dirs(
opts,
'pkgdb',
base_path=os.path.join(SALT_BASE_PATH, 'spm')
),
opts,
tag='pkgdb'
) | Return modules for SPM's package database
.. versionadded:: 2015.8.0 |
async def close(self, code: int = 1006, reason: str = "Connection closed"):
if self._closed:
return
self._closed = True
if self._scope is not None:
await self._scope.cancel()
data = self._connection.send(CloseConnection(code=code, reason=reason))
await sel... | Closes the websocket. |
def create_info(name, info_type, url=None, parent=None, id=None,
context=ctx_default, store=False):
id = str(uuid4()) if id is None else id
pubsub = _pubsub_key(id)
info = {'id': id,
'type': info_type,
'pubsub': pubsub,
'url': url,
'parent': pa... | Return a group object |
def preview(pid, record, template=None, **kwargs):
fileobj = current_previewer.record_file_factory(
pid, record, request.view_args.get(
'filename', request.args.get('filename', type=str))
)
if not fileobj:
abort(404)
try:
file_previewer = fileobj['previewer']
exce... | Preview file for given record.
Plug this method into your ``RECORDS_UI_ENDPOINTS`` configuration:
.. code-block:: python
RECORDS_UI_ENDPOINTS = dict(
recid=dict(
# ...
route='/records/<pid_value/preview/<path:filename>',
view_imp='invenio_pr... |
def _explode_raster(raster, band_names=[]):
if not band_names:
band_names = raster.band_names
else:
band_names = list(IndexedSet(raster.band_names).intersection(band_names))
return [_Raster(image=raster.bands_data([band_name]), band_names=[band_name]) for band_name in band_names] | Splits a raster into multiband rasters. |
def _GetMessage(self, event_object):
formatter_mediator = formatters_mediator.FormatterMediator()
result = ''
try:
result, _ = formatters_manager.FormattersManager.GetMessageStrings(
formatter_mediator, event_object)
except KeyError as exception:
logging.warning(
'Unable ... | Returns a properly formatted message string.
Args:
event_object: the event object (instance od EventObject).
Returns:
A formatted message string. |
def break_bond(self, ind1, ind2, tol=0.2):
sites = self._sites
clusters = [[sites[ind1]], [sites[ind2]]]
sites = [site for i, site in enumerate(sites) if i not in (ind1, ind2)]
def belongs_to_cluster(site, cluster):
for test_site in cluster:
if CovalentBond.is... | Returns two molecules based on breaking the bond between atoms at index
ind1 and ind2.
Args:
ind1 (int): Index of first site.
ind2 (int): Index of second site.
tol (float): Relative tolerance to test. Basically, the code
checks if the distance between... |
def data_transforms_mnist(args, mnist_mean=None, mnist_std=None):
if mnist_mean is None:
mnist_mean = [0.5]
if mnist_std is None:
mnist_std = [0.5]
train_transform = transforms.Compose(
[
transforms.RandomCrop(28, padding=4),
transforms.RandomHorizontalFlip(),... | data_transforms for mnist dataset |
def lf (self):
old_r = self.cur_r
self.cursor_down()
if old_r == self.cur_r:
self.scroll_up ()
self.erase_line() | This moves the cursor down with scrolling. |
def value(self, raw_value):
try:
return decimal.Decimal(raw_value)
except decimal.InvalidOperation:
raise ValueError(
"Could not parse '{}' value as decimal".format(raw_value)
) | Decode param as decimal value. |
def total_scores_in(self, leaderboard_name):
return sum([leader[self.SCORE_KEY] for leader in self.all_leaders_from(self.leaderboard_name)]) | Sum of scores for all members in the named leaderboard.
@param leaderboard_name Name of the leaderboard.
@return Sum of scores for all members in the named leaderboard. |
def getParentElementCustomFilter(self, filterFunc):
parentNode = self.parentNode
while parentNode:
if filterFunc(parentNode) is True:
return parentNode
parentNode = parentNode.parentNode
return None | getParentElementCustomFilter - Runs through parent on up to document root, returning the
first tag which filterFunc(tag) returns True.
@param filterFunc <function/lambda> - A function or lambda expression that should return "True" if the passed node matche... |
def _GenApiConfigCallback(args, api_func=GenApiConfig):
service_configs = api_func(args.service,
hostname=args.hostname,
application_path=args.application)
for api_name_version, config in service_configs.iteritems():
_WriteFile(args.output, api_name_vers... | Generate an api file.
Args:
args: An argparse.Namespace object to extract parameters from.
api_func: A function that generates and returns an API configuration
for a list of services. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.