code stringlengths 51 2.38k | docstring stringlengths 4 15.2k |
|---|---|
def match(self, route):
_resource = trim_resource(self.resource)
self.method = self.method.lower()
resource_match = route.resource_regex.search(_resource)
if resource_match is None:
return None
params = resource_match.groupdict()
querystring = params.pop("quer... | Match input route and return new Message instance
with parsed content |
def ExpandRecursiveGlobs(cls, path, path_separator):
glob_regex = r'(.*)?{0:s}\*\*(\d{{1,2}})?({0:s})?$'.format(
re.escape(path_separator))
match = re.search(glob_regex, path)
if not match:
return [path]
skip_first = False
if match.group(3):
skip_first = True
if match.group(2... | Expands recursive like globs present in an artifact path.
If a path ends in '**', with up to two optional digits such as '**10',
the '**' will recursively match all files and zero or more directories
from the specified path. The optional digits indicate the recursion depth.
By default recursion depth i... |
def _npiter(arr):
for a in np.nditer(arr, flags=["refs_ok"]):
c = a.item()
if c is not None:
yield c | Wrapper for iterating numpy array |
def set_hflip(self, val):
self.__horizontal_flip = val
for image in self.images:
image.h_flip = val | Flip all the images in the animation list horizontally. |
def fader(self, value: int):
self._fader = int(value) if 0 < value < 1024 else 0
self.outport.send(mido.Message('control_change', control=0,
value=self._fader >> 7))
self.outport.send(mido.Message('control_change', control=32,
... | Move the fader to a new position in the range 0 to 1023. |
def get_merge_rules(schema=None):
schema = schema or get_release_schema_url(get_tags()[-1])
if isinstance(schema, dict):
deref_schema = jsonref.JsonRef.replace_refs(schema)
else:
deref_schema = _get_merge_rules_from_url_or_path(schema)
return dict(_get_merge_rules(deref_schema['propertie... | Returns merge rules as key-value pairs, in which the key is a JSON path as a tuple, and the value is a list of
merge properties whose values are `true`. |
def download_models(self, uniprot_acc, outdir='', force_rerun=False):
downloaded = []
subset = self.get_models(uniprot_acc)
for entry in subset:
ident = '{}_{}_{}_{}'.format(uniprot_acc, entry['template'], entry['from'], entry['to'])
outfile = op.join(outdir, ident + '.pd... | Download all models available for a UniProt accession number.
Args:
uniprot_acc (str): UniProt ACC/ID
outdir (str): Path to output directory, uses working directory if not set
force_rerun (bool): Force a redownload the models if they already exist
Returns:
... |
def get_files(self, commit, paths, recursive=False):
filtered_file_infos = []
for path in paths:
fi = self.inspect_file(commit, path)
if fi.file_type == proto.FILE:
filtered_file_infos.append(fi)
else:
filtered_file_infos += self.list_f... | Returns the contents of a list of files at a specific Commit as a
dictionary of file paths to data.
Params:
* commit: A tuple, string, or Commit object representing the commit.
* paths: A list of paths to retrieve.
* recursive: If True, will go into each directory in the list
... |
def truncate_money(money: Money) -> Money:
amount = truncate_to(money.amount, money.currency)
return Money(amount, money.currency) | Truncates money amount to the number of decimals corresponding to the currency |
def remove_selected(self, *args):
self.collapse_nested(self.selected)
self.remove(self.selected) | Remove the selected catalog - allow the passing of arbitrary
args so that buttons work. Also remove any nested catalogs. |
def remove_external_references(self):
for ex_ref_node in self.node.findall('externalReferences'):
self.node.remove(ex_ref_node) | Removes any external reference from the role |
def add_partition(self, spec, location=None):
part_schema = self.partition_schema()
stmt = ddl.AddPartition(
self._qualified_name, spec, part_schema, location=location
)
return self._execute(stmt) | Add a new table partition, creating any new directories in HDFS if
necessary.
Partition parameters can be set in a single DDL statement, or you can
use alter_partition to set them after the fact.
Returns
-------
None (for now) |
def write_command_line(self):
cmd = [" ".join(sys.argv)]
try:
previous = self.attrs["cmd"]
if isinstance(previous, str):
previous = [previous]
elif isinstance(previous, numpy.ndarray):
previous = previous.tolist()
except KeyErro... | Writes command line to attributes.
The command line is written to the file's ``attrs['cmd']``. If this
attribute already exists in the file (this can happen when resuming
from a checkpoint), ``attrs['cmd']`` will be a list storing the current
command line and all previous command lines. |
def encode_dataset(dataset, vocabulary):
def encode(features):
return {k: vocabulary.encode_tf(v) for k, v in features.items()}
return dataset.map(encode, num_parallel_calls=tf.data.experimental.AUTOTUNE) | Encode from strings to token ids.
Args:
dataset: a tf.data.Dataset with string values.
vocabulary: a mesh_tensorflow.transformer.Vocabulary
Returns:
a tf.data.Dataset with integer-vector values ending in EOS=1 |
def prune_indices(self, transforms=None):
if self.ndim >= 3:
return self._prune_3d_indices(transforms)
def prune_non_3d_indices(transforms):
row_margin = self._pruning_base(
hs_dims=transforms, axis=self.row_direction_axis
)
row_indices = s... | Return indices of pruned rows and columns as list.
The return value has one of three possible forms:
* a 1-element list of row indices (in case of 1D cube)
* 2-element list of row and col indices (in case of 2D cube)
* n-element list of tuples of 2 elements (if it's 3D cube).
... |
def comment_stream(reddit_session, subreddit, limit=None, verbosity=1):
get_function = partial(reddit_session.get_comments,
six.text_type(subreddit))
return _stream_generator(get_function, limit, verbosity) | Indefinitely yield new comments from the provided subreddit.
Comments are yielded from oldest to newest.
:param reddit_session: The reddit_session to make requests from. In all the
examples this is assigned to the variable ``r``.
:param subreddit: Either a subreddit object, or the name of a
... |
def unlock(self, key):
check_not_none(key, "key can't be None")
key_data = self._to_data(key)
return self._encode_invoke_on_key(map_unlock_codec, key_data, key=key_data, thread_id=thread_id(),
reference_id=self.reference_id_generator.get_and_increment()) | Releases the lock for the specified key. It never blocks and returns immediately. If the current thread is the
holder of this lock, then the hold count is decremented. If the hold count is zero, then the lock is released.
:param key: (object), the key to lock. |
def zharkov_pel(v, temp, v0, e0, g, n, z, t_ref=300.,
three_r=3. * constants.R):
v_mol = vol_uc2mol(v, z)
x = v / v0
def f(t):
return three_r * n / 2. * e0 * np.power(x, g) * np.power(t, 2.) * \
g / v_mol * 1.e-9
return f(temp) - f(t_ref) | calculate electronic contributions in pressure for the Zharkov equation
the equation can be found in Sokolova and Dorogokupets 2013
:param v: unit-cell volume in A^3
:param temp: temperature in K
:param v0: unit-cell volume in A^3 at 1 bar
:param e0: parameter in K-1 for the Zharkov equation
:p... |
def get_plugin_by_model(self, model_class):
self._import_plugins()
assert issubclass(model_class, ContentItem)
try:
name = self._name_for_model[model_class]
except KeyError:
raise PluginNotFound("No plugin found for model '{0}'.".format(model_class.__name__))
... | Return the corresponding plugin for a given model.
You can also use the :attr:`ContentItem.plugin <fluent_contents.models.ContentItem.plugin>` property directly.
This is the low-level function that supports that feature. |
def set_logger_level(logger_name, log_level='error'):
logging.getLogger(logger_name).setLevel(
LOG_LEVELS.get(log_level.lower(), logging.ERROR)
) | Tweak a specific logger's logging level |
def set_legend_position(self, legend_position):
if legend_position:
self.legend_position = quote(legend_position)
else:
self.legend_position = None | Sets legend position. Default is 'r'.
b - At the bottom of the chart, legend entries in a horizontal row.
bv - At the bottom of the chart, legend entries in a vertical column.
t - At the top of the chart, legend entries in a horizontal row.
tv - At the top of the chart, legend entries i... |
def elapsed(self):
if self.end is None:
return (self() - self.start) * self.factor
else:
return (self.end - self.start) * self.factor | Return the current elapsed time since start
If the `elapsed` property is called in the context manager scope,
the elapsed time bewteen start and property access is returned.
However, if it is accessed outside of the context manager scope,
it returns the elapsed time bewteen entering and ... |
def get(self, field):
if field in ('username', 'uuid', 'app_data'):
return self.data[field]
else:
return self.data.get('app_data', {})[field] | Returns the value of a user field.
:param str field:
The name of the user field.
:returns: str -- the value |
def get_status(self):
if self.status is not None:
return self.status
if self.subsection == "dmdSec":
if self.older is None:
return "original"
else:
return "updated"
if self.subsection in ("techMD", "rightsMD"):
if se... | Returns the STATUS when serializing.
Calculates based on the subsection type and if it's replacing anything.
:returns: None or the STATUS string. |
def dequeue(self) -> Tuple[int, TItem]:
if self._len == 0:
raise ValueError('BucketPriorityQueue is empty.')
while self._buckets and not self._buckets[0]:
self._buckets.pop(0)
self._offset += 1
item = self._buckets[0].pop(0)
priority = self._offset
... | Removes and returns an item from the priority queue.
Returns:
A tuple whose first element is the priority of the dequeued item
and whose second element is the dequeued item.
Raises:
ValueError:
The queue is empty. |
def print_num(num):
out('hex: 0x{0:08x}'.format(num))
out('dec: {0:d}'.format(num))
out('oct: 0o{0:011o}'.format(num))
out('bin: 0b{0:032b}'.format(num)) | Write a numeric result in various forms |
def publish_scene_add(self, scene_id, animation_id, name, color, velocity, config):
self.sequence_number += 1
self.publisher.send_multipart(msgs.MessageBuilder.scene_add(self.sequence_number, scene_id, animation_id, name, color, velocity, config))
return self.sequence_number | publish added scene |
def launch(self, callback_function=None):
self._check_registered()
self._socket_client.receiver_controller.launch_app(
self.supporting_app_id, callback_function=callback_function) | If set, launches app related to the controller. |
def diff(name, **kwargs):
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
ret['changes'] = __salt__['junos.diff'](**kwargs)
return ret | Gets the difference between the candidate and the current configuration.
.. code-block:: yaml
get the diff:
junos:
- diff
- id: 10
Parameters:
Optional
* id:
The rollback id value [0-49]. (default = 0) |
def _names_to_bytes(names):
names = sorted(names)
names_bytes = json.dumps(names).encode('utf8')
return names_bytes | Reproducibly converts an iterable of strings to bytes
:param iter[str] names: An iterable of strings
:rtype: bytes |
def set_user_password(name, passwd, **client_args):
if not user_exists(name, **client_args):
log.info('User \'%s\' does not exist', name)
return False
client = _client(**client_args)
client.set_user_password(name, passwd)
return True | Change password of a user.
name
Name of the user for whom to set the password.
passwd
New password of the user.
CLI Example:
.. code-block:: bash
salt '*' influxdb.set_user_password <name> <password> |
def create_knowledge_base(project_id, display_name):
import dialogflow_v2beta1 as dialogflow
client = dialogflow.KnowledgeBasesClient()
project_path = client.project_path(project_id)
knowledge_base = dialogflow.types.KnowledgeBase(
display_name=display_name)
response = client.create_knowledg... | Creates a Knowledge base.
Args:
project_id: The GCP project linked with the agent.
display_name: The display name of the Knowledge base. |
def get_axis_value_discrete(self, axis):
if self.type != EventType.POINTER_AXIS:
raise AttributeError(_wrong_meth.format(self.type))
return self._libinput.libinput_event_pointer_get_axis_value_discrete(
self._handle, axis) | Return the axis value in discrete steps for a given axis event.
How a value translates into a discrete step depends on the source.
If the source is :attr:`~libinput.constant.PointerAxisSource.WHEEL`,
the discrete value correspond to the number of physical mouse wheel
clicks.
If the source is :attr:`~libinpu... |
def set_session(self, headers=None):
if headers is None:
headers = {
'User-Agent':
('Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_3)'
' AppleWebKit/537.36 (KHTML, like Gecko) '
'Chrome/48.0.2564.116 Safari/537.36')
}
... | Init session with default or custom headers
Args:
headers: A dict of headers (default None, thus using the default
header to init the session) |
def event_return(events):
options = _get_options()
index = options['master_event_index']
doc_type = options['master_event_doc_type']
if options['index_date']:
index = '{0}-{1}'.format(index,
datetime.date.today().strftime('%Y.%m.%d'))
_ensure_index(index)
for event in events:... | Return events to Elasticsearch
Requires that the `event_return` configuration be set in master config. |
def lemmatize(text, lowercase=True, remove_stopwords=True):
doc = nlp(text)
if lowercase and remove_stopwords:
lemmas = [t.lemma_.lower() for t in doc if not (t.is_stop or t.orth_.lower() in STOPWORDS)]
elif lowercase:
lemmas = [t.lemma_.lower() for t in doc]
elif remove_stopwords:
... | Return the lemmas of the tokens in a text. |
def stop(self):
if self.original_attributes is not None:
termios.tcsetattr(
self.fd,
termios.TCSADRAIN,
self.original_attributes,
) | Restores the terminal attributes back to before setting raw mode.
If the raw terminal was not started, does nothing. |
def _compile_signature(self, iexec, call_name):
if iexec is not None:
summary = iexec.summary
if isinstance(iexec, Function):
summary = iexec.returns + "| " + iexec.summary
elif isinstance(iexec, Subroutine) and len(iexec.modifiers) > 0:
summar... | Compiles the signature for the specified executable and returns
as a dictionary. |
def get(self, key, bucket):
try:
return self._cache[bucket][key]
except (KeyError, TypeError):
return None | Get a cached item by key
If the cached item isn't found the return None. |
def rmse(params1, params2):
r
assert len(params1) == len(params2)
params1 = np.asarray(params1) - np.mean(params1)
params2 = np.asarray(params2) - np.mean(params2)
sqrt_n = math.sqrt(len(params1))
return np.linalg.norm(params1 - params2, ord=2) / sqrt_n | r"""Compute the root-mean-squared error between two models.
Parameters
----------
params1 : array_like
Parameters of the first model.
params2 : array_like
Parameters of the second model.
Returns
-------
error : float
Root-mean-squared error. |
def by_readings(self, role_names=['', 'Author']):
if not spectator_apps.is_enabled('reading'):
raise ImproperlyConfigured("To use the CreatorManager.by_readings() method, 'spectator.reading' must by in INSTALLED_APPS.")
qs = self.get_queryset()
qs = qs.filter(publication_roles__role_... | The Creators who have been most-read, ordered by number of readings.
By default it will only include Creators whose role was left empty,
or is 'Author'.
Each Creator will have a `num_readings` attribute. |
def get_property_by_hash(self, property_hash: str) -> Optional[Property]:
return self.session.query(Property).filter(Property.sha512 == property_hash).one_or_none() | Get a property by its hash if it exists. |
def activate(self):
response = self._manager.activate(self.ID)
self._update(response["Bounce"])
return response["Message"] | Activates the bounce instance and updates it with the latest data.
:return: Activation status.
:rtype: `str` |
def _create_date_slug(self):
if not self.pk:
d = utc_now()
elif self.published and self.published_on:
d = self.published_on
elif self.updated_on:
d = self.updated_on
self.date_slug = u"{0}/{1}".format(d.strftime("%Y/%m/%d"), self.slug) | Prefixes the slug with the ``published_on`` date. |
def ExecuteRaw(self, position, command):
self.EnsureGdbPosition(position[0], None, None)
return gdb.execute(command, to_string=True) | Send a command string to gdb. |
def add_bases(cls, *bases):
assert inspect.isclass(cls), "Expected class object"
for mixin in bases:
assert inspect.isclass(mixin), "Expected class object for bases"
new_bases = (bases + cls.__bases__)
cls.__bases__ = new_bases | Add bases to class
>>> class Base(object): pass
>>> class A(Base): pass
>>> class B(Base): pass
>>> issubclass(A, B)
False
>>> add_bases(A, B)
>>> issubclass(A, B)
True |
def __handle_events(self):
events = pygame.event.get()
for event in events:
if event.type == pygame.QUIT:
self.exit() | This is the place to put all event handeling. |
def transform_deprecated_concepts(rdf, cs):
deprecated_concepts = []
for conc in rdf.subjects(RDF.type, SKOSEXT.DeprecatedConcept):
rdf.add((conc, RDF.type, SKOS.Concept))
rdf.add((conc, OWL.deprecated, Literal("true", datatype=XSD.boolean)))
deprecated_concepts.append(conc)
if len(d... | Transform deprecated concepts so they are in their own concept
scheme. |
def register(self, mimetype):
def dec(func):
self._reg[mimetype] = func
return func
return dec | Register a function to handle a particular mimetype. |
def hook_outputs(modules:Collection[nn.Module], detach:bool=True, grad:bool=False)->Hooks:
"Return `Hooks` that store activations of all `modules` in `self.stored`"
return Hooks(modules, _hook_inner, detach=detach, is_forward=not grad) | Return `Hooks` that store activations of all `modules` in `self.stored` |
def suspend(self):
vm_state = yield from self._get_vm_state()
if vm_state == "running":
yield from self._control_vm("pause")
self.status = "suspended"
log.info("VirtualBox VM '{name}' [{id}] suspended".format(name=self.name, id=self.id))
else:
log.... | Suspends this VirtualBox VM. |
def update(self, name=None, email=None, blog=None, company=None,
location=None, hireable=False, bio=None):
user = {'name': name, 'email': email, 'blog': blog,
'company': company, 'location': location,
'hireable': hireable, 'bio': bio}
self._remove_none(user... | If authenticated as this user, update the information with
the information provided in the parameters.
:param str name: e.g., 'John Smith', not login name
:param str email: e.g., 'john.smith@example.com'
:param str blog: e.g., 'http://www.example.com/jsmith/blog'
:param str comp... |
def decode_example(self, serialized_example):
data_fields, data_items_to_decoders = self.example_reading_spec()
data_fields["batch_prediction_key"] = tf.FixedLenFeature([1], tf.int64, 0)
if data_items_to_decoders is None:
data_items_to_decoders = {
field: tf.contrib.slim.tfexample_decoder.Te... | Return a dict of Tensors from a serialized tensorflow.Example. |
def run_command(cmd, *args):
command = ' '.join((cmd, args))
p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate()
return p.retcode, stdout, stderr | Runs command on the system with given ``args``. |
def update_statistics(self, activityVectors):
Y = activityVectors
n = self.output_size
A = np.zeros((n, n))
batchSize = len(Y)
for y in Y:
active_units = np.where( y == 1 )[0]
for i in active_units:
for j in active_units:
... | Updates the variable that maintains exponential moving averages of
individual and pairwise unit activiy |
def t_escaped_CARRIAGE_RETURN_CHAR(self, t):
r'\x72'
t.lexer.pop_state()
t.value = unichr(0x000d)
return t | r'\x72 |
def _split_url_string(query_string):
parameters = parse_qs(to_utf8(query_string), keep_blank_values=True)
for k, v in parameters.iteritems():
parameters[k] = urllib.unquote(v[0])
return parameters | Turns a `query_string` into a Python dictionary with unquoted values |
def eeg_create_mne_events(onsets, conditions=None):
event_id = {}
if conditions is None:
conditions = ["Event"] * len(onsets)
if len(conditions) != len(onsets):
print("NeuroKit Warning: eeg_create_events(): conditions parameter of different length than onsets. Aborting.")
return()
... | Create MNE compatible events.
Parameters
----------
onsets : list or array
Events onsets.
conditions : list
A list of equal length containing the stimuli types/conditions.
Returns
----------
(events, event_id) : tuple
MNE-formated events and a dictionary with event... |
def delete(self, table_id):
from google.api_core.exceptions import NotFound
if not self.exists(table_id):
raise NotFoundException("Table does not exist")
table_ref = self.client.dataset(self.dataset_id).table(table_id)
try:
self.client.delete_table(table_ref)
... | Delete a table in Google BigQuery
Parameters
----------
table : str
Name of table to be deleted |
def _list_queues():
queue_dir = __opts__['sqlite_queue_dir']
files = os.path.join(queue_dir, '*.db')
paths = glob.glob(files)
queues = [os.path.splitext(os.path.basename(item))[0] for item in paths]
return queues | Return a list of sqlite databases in the queue_dir |
def disassemble(self, data, start_address=0):
return _opcodes.disassemble(self._ptr, data, start_address) | Return a list containing the virtual memory address, instruction length
and disassembly code for the given binary buffer. |
def get(self):
tasks = self._get_avaliable_tasks()
if not tasks:
return None
name, data = tasks[0]
self._client.kv.delete(name)
return data | Get a task from the queue. |
def open(self):
if self._is_open:
raise HIDException("Failed to open device: HIDDevice already open")
path = self.path.encode('utf-8')
dev = hidapi.hid_open_path(path)
if dev:
self._is_open = True
self._device = dev
else:
raise HIDE... | Open the HID device for reading and writing. |
def _normalize_compare_config(self, diff):
ignore_strings = [
"Contextual Config Diffs",
"No changes were found",
"ntp clock-period",
]
if self.auto_file_prompt:
ignore_strings.append("file prompt quiet")
new_list = []
for line in d... | Filter out strings that should not show up in the diff. |
def find_peakset(dataset, basecolumn=-1, method='', where=None):
peakset = []
where_i = None
for data in dataset:
base = data[basecolumn]
base = maidenhair.statistics.average(base)
if where:
adata = [maidenhair.statistics.average(x) for x in data]
where_i = np... | Find peakset from the dataset
Parameters
-----------
dataset : list
A list of data
basecolumn : int
An index of column for finding peaks
method : str
A method name of numpy for finding peaks
where : function
A function which recieve ``data`` and return numpy inde... |
def get_text_contents(self):
contents = self.get_contents()
if contents[:len(codecs.BOM_UTF8)] == codecs.BOM_UTF8:
return contents[len(codecs.BOM_UTF8):].decode('utf-8')
if contents[:len(codecs.BOM_UTF16_LE)] == codecs.BOM_UTF16_LE:
return contents[len(codecs.BOM_UTF16_LE... | This attempts to figure out what the encoding of the text is
based upon the BOM bytes, and then decodes the contents so that
it's a valid python string. |
def get_xml(html, content_tag='ekb', fail_if_empty=False):
cont = re.findall(r'<%(tag)s(.*?)>(.*?)</%(tag)s>' % {'tag': content_tag},
html, re.MULTILINE | re.DOTALL)
if cont:
events_terms = ''.join([l.strip() for l in cont[0][1].splitlines()])
if 'xmlns' in cont[0][0]:
... | Extract the content XML from the HTML output of the TRIPS web service.
Parameters
----------
html : str
The HTML output from the TRIPS web service.
content_tag : str
The xml tag used to label the content. Default is 'ekb'.
fail_if_empty : bool
If True, and if the xml content... |
def validate(self):
if (self.scheme is None or self.scheme != '') \
and (self.host is None or self.host == ''):
return False
return True | Validates the URL object. The URL object is invalid if it does not represent an absolute URL.
Returns True or False based on this. |
def startElement(self, name, attrs):
self.stack.append((self.current, self.chardata))
self.current = {}
self.chardata = [] | Initialize new node and store current node into stack. |
def get_account_history(self, account_id, **kwargs):
endpoint = '/accounts/{}/ledger'.format(account_id)
return self._send_paginated_message(endpoint, params=kwargs) | List account activity. Account activity either increases or
decreases your account balance.
Entry type indicates the reason for the account change.
* transfer: Funds moved to/from Coinbase to cbpro
* match: Funds moved as a result of a trade
* fee: Fee as a result of a trade... |
def reorient_z(structure):
struct = structure.copy()
sop = get_rot(struct)
struct.apply_operation(sop)
return struct | reorients a structure such that the z axis is concurrent with the
normal to the A-B plane |
def compute_node_positions(self):
xs = []
ys = []
self.locs = dict()
for node in self.nodes:
x = self.graph.node[node][self.node_lon]
y = self.graph.node[node][self.node_lat]
xs.append(x)
ys.append(y)
self.locs[node] = (x, y)
... | Extracts the node positions based on the specified longitude and
latitude keyword arguments. |
def list(self, argv):
def read(index):
print(index.name)
for key in sorted(index.content.keys()):
value = index.content[key]
print(" %s: %s" % (key, value))
if len(argv) == 0:
for index in self.service.indexes:
count... | List available indexes if no names provided, otherwise list the
properties of the named indexes. |
def invoked(self, ctx):
if not ctx.ansi.is_enabled:
print("You need color support to use this demo")
else:
print(ctx.ansi.cmd('erase_display'))
self._demo_fg_color(ctx)
self._demo_bg_color(ctx)
self._demo_bg_indexed(ctx)
self._demo_... | Method called when the command is invoked. |
def _regexSearchKeyValueCombo(policy_data, policy_regpath, policy_regkey):
if policy_data:
specialValueRegex = salt.utils.stringutils.to_bytes(r'(\*\*Del\.|\*\*DelVals\.){0,1}')
_thisSearch = b''.join([salt.utils.stringutils.to_bytes(r'\['),
re.escape(policy_regpath),
... | helper function to do a search of Policy data from a registry.pol file
for a policy_regpath and policy_regkey combo |
def change_node_subscriptions(self, jid, node, subscriptions_to_set):
iq = aioxmpp.stanza.IQ(
type_=aioxmpp.structs.IQType.SET,
to=jid,
payload=pubsub_xso.OwnerRequest(
pubsub_xso.OwnerSubscriptions(
node,
subscriptions=... | Update the subscriptions at a node.
:param jid: Address of the PubSub service.
:type jid: :class:`aioxmpp.JID`
:param node: Name of the node to modify
:type node: :class:`str`
:param subscriptions_to_set: The subscriptions to set at the node.
:type subscriptions_to_set: ... |
def _get_geocoding(self, key, location):
url = self._location_query_base % quote_plus(key)
if self.api_key:
url += "&key=%s" % self.api_key
data = self._read_from_url(url)
response = json.loads(data)
if response["status"] == "OK":
formatted_address = respo... | Lookup the Google geocoding API information for `key` |
def _get_external_workers(worker):
worker_that_blocked_task = collections.defaultdict(set)
get_work_response_history = worker._get_work_response_history
for get_work_response in get_work_response_history:
if get_work_response['task_id'] is None:
for running_task in get_work_response['run... | This returns a dict with a set of tasks for all of the other workers |
def process_request_thread(self, request, client_address):
from ..blockstackd import get_gc_thread
try:
self.finish_request(request, client_address)
except Exception:
self.handle_error(request, client_address)
finally:
self.shutdown_request(request)
... | Same as in BaseServer but as a thread.
In addition, exception handling is done here. |
def get_events_with_error_code(event_number, event_status, select_mask=0b1111111111111111, condition=0b0000000000000000):
logging.debug("Calculate events with certain error code")
return np.unique(event_number[event_status & select_mask == condition]) | Selects the events with a certain error code.
Parameters
----------
event_number : numpy.array
event_status : numpy.array
select_mask : int
The mask that selects the event error code to check.
condition : int
The value the selected event error code should have.
Returns
... |
def submit_statsd_measurements(self):
for key, value in self.measurement.counters.items():
self.statsd.incr(key, value)
for key, values in self.measurement.durations.items():
for value in values:
self.statsd.add_timing(key, value)
for key, value in self.me... | Submit a measurement for a message to statsd as individual items. |
def _synchronized(meth):
@functools.wraps(meth)
def wrapper(self, *args, **kwargs):
with self._lock:
return meth(self, *args, **kwargs)
return wrapper | Call method while holding a lock. |
def create_kernel_spec(self, is_cython=False,
is_pylab=False, is_sympy=False):
CONF.set('main', 'spyder_pythonpath',
self.main.get_spyder_pythonpath())
return SpyderKernelSpec(is_cython=is_cython,
is_pylab=is_pylab,
... | Create a kernel spec for our own kernels |
def send_data(self, **kwargs):
put_url = None
if 'put_url' in kwargs:
put_url = kwargs['put_url']
else:
put_url = self.put_upload_url
if 'data' not in kwargs:
raise AttributeError("'data' parameter is required")
if not put_url:
rais... | This method transmits data to the Gett service.
Input:
* ``put_url`` A PUT url to use when transmitting the data (required)
* ``data`` A byte stream (required)
Output:
* ``True``
Example::
if file.send_data(put_url=file.upload_url, data=open("e... |
def pydeps(**args):
_args = args if args else cli.parse_args(sys.argv[1:])
inp = target.Target(_args['fname'])
log.debug("Target: %r", inp)
if _args.get('output'):
_args['output'] = os.path.abspath(_args['output'])
else:
_args['output'] = os.path.join(
inp.calling_dir,
... | Entry point for the ``pydeps`` command.
This function should do all the initial parameter and environment
munging before calling ``_pydeps`` (so that function has a clean
execution path). |
def parse_objective_coefficient(entry):
for parameter in entry.kinetic_law_reaction_parameters:
pid, name, value, units = parameter
if (pid == 'OBJECTIVE_COEFFICIENT' or
name == 'OBJECTIVE_COEFFICIENT'):
return value
return None | Return objective value for reaction entry.
Detect objectives that are specified using the non-standardized
kinetic law parameters which are used by many pre-FBC SBML models. The
objective coefficient is returned for the given reaction, or None if
undefined.
Args:
entry: :class:`SBMLReactio... |
def collect_filepaths(self, directories):
plugin_filepaths = set()
directories = util.to_absolute_paths(directories)
for directory in directories:
filepaths = util.get_filepaths_from_dir(directory)
filepaths = self._filter_filepaths(filepaths)
plugin_filepaths... | Collects and returns every filepath from each directory in
`directories` that is filtered through the `file_filters`.
If no `file_filters` are present, passes every file in directory
as a result.
Always returns a `set` object
`directories` can be a object or an iterable. Recomme... |
def write_gif(dataset, filename, fps=10):
try:
check_dataset(dataset)
except ValueError as e:
dataset = try_fix_dataset(dataset)
check_dataset(dataset)
delay_time = 100 // int(fps)
def encode(d):
four_d = isinstance(dataset, numpy.ndarray) and len(dataset.shape) == 4
... | Write a NumPy array to GIF 89a format.
Or write a list of NumPy arrays to an animation (GIF 89a format).
- Positional arguments::
:param dataset: A NumPy arrayor list of arrays with shape
rgb x rows x cols and integer values in [0, 255].
:param filename: The output fil... |
def iterkeys(self, key_type=None, return_all_keys=False):
if(key_type is not None):
the_key = str(key_type)
if the_key in self.__dict__:
for key in self.__dict__[the_key].keys():
if return_all_keys:
yield self.__dict__[the... | Returns an iterator over the dictionary's keys.
@param key_type if specified, iterator for a dictionary of this type will be used.
Otherwise (if not specified) tuples containing all (multiple) keys
for this dictionary will be generated.
@param return_al... |
def _validate_allowed_settings(self, application_id, application_config, allowed_settings):
for setting_key in application_config.keys():
if setting_key not in allowed_settings:
raise ImproperlyConfigured(
"Platform {}, app {} does not support the setting: {}.".format(
application_config["PLATFORM"]... | Confirm only allowed settings are present. |
def ifilter(self, recursive=True, matches=None, flags=FLAGS,
forcetype=None):
gen = self._indexed_ifilter(recursive, matches, flags, forcetype)
return (node for i, node in gen) | Iterate over nodes in our list matching certain conditions.
If *forcetype* is given, only nodes that are instances of this type (or
tuple of types) are yielded. Setting *recursive* to ``True`` will
iterate over all children and their descendants. ``RECURSE_OTHERS``
will only iterate ove... |
def parse(binary, **params):
encoding = params.get('charset', 'UTF-8')
return json.loads(binary, encoding=encoding) | Turns a JSON structure into a python object. |
def client_getter():
def wrapper(f):
@wraps(f)
def decorated(*args, **kwargs):
if 'client_id' not in kwargs:
abort(500)
client = Client.query.filter_by(
client_id=kwargs.pop('client_id'),
user_id=current_user.get_id(),
... | Decorator to retrieve Client object and check user permission. |
def get_preds(self, ds_type:DatasetType=DatasetType.Valid, with_loss:bool=False, n_batch:Optional[int]=None, pbar:Optional[PBar]=None,
ordered:bool=False) -> List[Tensor]:
"Return predictions and targets on the valid, train, or test set, depending on `ds_type`."
self.model.reset()
... | Return predictions and targets on the valid, train, or test set, depending on `ds_type`. |
def diff_dictionaries(old_dict, new_dict):
old_set = set(old_dict)
new_set = set(new_dict)
added_set = new_set - old_set
removed_set = old_set - new_set
common_set = old_set & new_set
changes = 0
output = []
for key in added_set:
changes += 1
output.append(DictValue(key, ... | Diffs two single dimension dictionaries
Returns the number of changes and an unordered list
expressing the common entries and changes.
Args:
old_dict(dict): old dictionary
new_dict(dict): new dictionary
Returns: list()
int: number of changed records
list: [DictValue] |
def create_branch(self, branch_name):
self.create()
self.ensure_working_tree()
logger.info("Creating branch '%s' in %s ..", branch_name, format_path(self.local))
self.context.execute(*self.get_create_branch_command(branch_name)) | Create a new branch based on the working tree's revision.
:param branch_name: The name of the branch to create (a string).
This method automatically checks out the new branch, but note that the
new branch may not actually exist until a commit has been made on the
branch. |
def read_local_config(cfg):
try:
if os.path.exists(cfg):
config = import_file_object(cfg)
return config
else:
logger.warning(
'%s: local config file (%s) not found, cannot be read' %
(inspect.stack()[0][3], str(cfg)))
except IOE... | Parses local config file for override values
Args:
:local_file (str): filename of local config file
Returns:
dict object of values contained in local config file |
def get_user_details(self, response):
email = response['email']
username = response.get('nickname', email).split('@', 1)[0]
return {'username': username,
'email': email,
'fullname': '',
'first_name': '',
'last_name': ''} | Return user details from OAuth Profile Google App Engine App |
def decrypt(self,ciphertext,n=''):
self.ed = 'd'
if self.mode == MODE_XTS:
return self.chain.update(ciphertext,'d',n)
else:
return self.chain.update(ciphertext,'d') | Decrypt some ciphertext
ciphertext = a string of binary data
n = the 'tweak' value when the chaining mode is XTS
The decrypt function will decrypt the supplied ciphertext.
The behavior varies slightly depending on the chaining mode.
ECB, CBC:
-------... |
def _session_key(self):
if not hasattr(self, "_cached_session_key"):
session_id_bytes = self.get_secure_cookie("session_id")
session_id = None
if session_id_bytes:
try:
session_id = session_id_bytes.decode('utf-8')
except:
... | Gets the redis key for a session |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.