positive stringlengths 100 30.3k | anchor stringlengths 1 15k |
|---|---|
def occipital_flatmap(cortex, radius=None):
'''
occipital_flatmap(cortex) yields a flattened mesh of the occipital cortex of the given cortex
object.
Note that if the cortex is not registrered to fsaverage, this will fail.
The option radius may be given to specify the fraction of the corti... | occipital_flatmap(cortex) yields a flattened mesh of the occipital cortex of the given cortex
object.
Note that if the cortex is not registrered to fsaverage, this will fail.
The option radius may be given to specify the fraction of the cortical sphere (in radians) to
include in the map. |
def find_plugin(value,
key=DEFAULT_LOOKUP_KEY,
conn=None):
"""
get's the plugin matching the key and value
example: find_plugin("plugin1", "ServiceName") => list of 0 or 1 item
example: find_plugin("plugin1", "Name") => list of 0-to-many items
:param value:
:par... | get's the plugin matching the key and value
example: find_plugin("plugin1", "ServiceName") => list of 0 or 1 item
example: find_plugin("plugin1", "Name") => list of 0-to-many items
:param value:
:param key: <str> (default "Name")
:param conn:
:return: |
def MySend(request_path, payload=None,
content_type="application/octet-stream",
timeout=None, force_auth=True,
**kwargs):
"""Run MySend1 maybe twice, because Rietveld is unreliable."""
try:
return MySend1(request_path, payload, content_type, timeout, force_auth, **kwargs)
except Exception, e:
if type(e) !=... | Run MySend1 maybe twice, because Rietveld is unreliable. |
def get_relationships(schema, model_field=False):
"""Return relationship fields of a schema
:param Schema schema: a marshmallow schema
:param list: list of relationship fields of a schema
"""
relationships = [key for (key, value) in schema._declared_fields.items() if isinstance(value, Relationship)... | Return relationship fields of a schema
:param Schema schema: a marshmallow schema
:param list: list of relationship fields of a schema |
def _start(self):
"""Start the long running operation.
On completion, runs any callbacks.
:param callable update_cmd: The API reuqest to check the status of
the operation.
"""
try:
self._polling_method.run()
except Exception as err:
self.... | Start the long running operation.
On completion, runs any callbacks.
:param callable update_cmd: The API reuqest to check the status of
the operation. |
def timezone(zone):
"""Try to get timezone using pytz or python-dateutil
:param zone: timezone str
:return: timezone tzinfo or None
"""
try:
import pytz
return pytz.timezone(zone)
except ImportError:
pass
try:
from dateutil.tz import gettz
return gett... | Try to get timezone using pytz or python-dateutil
:param zone: timezone str
:return: timezone tzinfo or None |
def send_using_soap(self, request, destination, headers=None, sign=False):
"""
Send a message using SOAP+POST
:param request:
:param destination:
:param headers:
:param sign:
:return:
"""
# _response = self.server.post(soap_message, headers, path... | Send a message using SOAP+POST
:param request:
:param destination:
:param headers:
:param sign:
:return: |
def status(self, job_ids):
''' Get the status of a list of jobs identified by their ids.
Args:
- job_ids (List of ids) : List of identifiers for the jobs
Returns:
- List of status codes.
'''
logging.debug("Checking status of : {0}".format(job_ids))
... | Get the status of a list of jobs identified by their ids.
Args:
- job_ids (List of ids) : List of identifiers for the jobs
Returns:
- List of status codes. |
def MI_referenceNames(self,
env,
objectName,
resultClassName,
role):
# pylint: disable=invalid-name
"""Return instance names of an association class.
Implements the WBEM operation ReferenceNa... | Return instance names of an association class.
Implements the WBEM operation ReferenceNames in terms
of the references method. A derived class will not normally
override this method. |
def get_siblings(self):
"""
:returns: A queryset of all the node's siblings, including the node
itself.
"""
qset = get_result_class(self.__class__).objects.filter(
depth=self.depth
).order_by(
'path'
)
if self.depth > 1:
... | :returns: A queryset of all the node's siblings, including the node
itself. |
def total_consumption(self):
"""Get the total power consumpuntion in the device lifetime."""
if self.use_legacy_protocol:
# TotalConsumption currently fails on the legacy protocol and
# creates a mess in the logs. Just return 'N/A' for now.
return 'N/A'
res =... | Get the total power consumpuntion in the device lifetime. |
def order_by_descending(self, key):
"""
Returns new Enumerable sorted in descending order by given key
:param key: key to sort by as lambda expression
:return: new Enumerable object
"""
if key is None:
raise NullArgumentError(u"No key for sorting given")
... | Returns new Enumerable sorted in descending order by given key
:param key: key to sort by as lambda expression
:return: new Enumerable object |
def __sweeten(self, dumper: 'Dumper', class_: Type, node: Node) -> None:
"""Applies the user's yatiml_sweeten() function(s), if any.
Sweetening is done for the base classes first, then for the \
derived classes, down the hierarchy to the class we're \
constructing.
Args:
... | Applies the user's yatiml_sweeten() function(s), if any.
Sweetening is done for the base classes first, then for the \
derived classes, down the hierarchy to the class we're \
constructing.
Args:
dumper: The dumper that is dumping this object.
class_: The type o... |
def is_probably_prime(self):
"""Tests with miller-rabin
:return: True iff prime
"""
if self.is_naive_prime():
return True
# check if multiple pf low primes
for prime in LOW_PRIMES:
if self.to_int % prime == 0:
return False
... | Tests with miller-rabin
:return: True iff prime |
def RecordEvent(self, metric_name, value, fields=None):
"""See base class."""
self._event_metrics[metric_name].Record(value, fields) | See base class. |
def compare_lists(old=None, new=None):
'''
Compare before and after results from various salt functions, returning a
dict describing the changes that were made
'''
ret = dict()
for item in new:
if item not in old:
ret['new'] = item
for item in old:
if item not in ... | Compare before and after results from various salt functions, returning a
dict describing the changes that were made |
def calculate_timeout(http_date):
"""Extract request timeout from e.g. ``Retry-After`` header.
Notes:
Per :rfc:`2616#section-14.37`, the ``Retry-After`` header can
be either an integer number of seconds or an HTTP date. This
function can handle either.
Arguments:
... | Extract request timeout from e.g. ``Retry-After`` header.
Notes:
Per :rfc:`2616#section-14.37`, the ``Retry-After`` header can
be either an integer number of seconds or an HTTP date. This
function can handle either.
Arguments:
http_date (:py:class:`str`): The da... |
def _sample_conditional(Xnew, feat, kern, f, *, full_cov=False, full_output_cov=False, q_sqrt=None, white=False, num_samples=None):
"""
`sample_conditional` will return a sample from the conditional distribution.
In most cases this means calculating the conditional mean m and variance v and then
returni... | `sample_conditional` will return a sample from the conditional distribution.
In most cases this means calculating the conditional mean m and variance v and then
returning m + sqrt(v) * eps, with eps ~ N(0, 1).
However, for some combinations of Mok and Mof more efficient sampling routines exists.
The dis... |
def encode(self, value):
'''
:param value: value to encode
'''
encoded = strToBytes(value) + b'\x00'
return Bits(bytes=encoded) | :param value: value to encode |
def set_dependencies(analysis, dependencies, path):
"""
Syncronize the Analysis result with the needed dependencies.
"""
for toc in (analysis.binaries, analysis.datas):
for i, tpl in enumerate(toc):
if not tpl[1] in dependencies.keys():
logger.info("Adding dependency... | Syncronize the Analysis result with the needed dependencies. |
def update_conf(self):
"""Update configuration values from database.
This method should be called when there is an update notification.
"""
parsed = self.parse_conf()
if not parsed:
return None
# Update app config
self.app.config.update(parsed) | Update configuration values from database.
This method should be called when there is an update notification. |
def coarsegrain(F, sets):
r"""Coarse-grains the flux to the given sets
$fc_{i,j} = \sum_{i \in I,j \in J} f_{i,j}$
Note that if you coarse-grain a net flux, it does not necessarily have a net
flux property anymore. If want to make sure you get a netflux,
use to_netflux(coarsegrain(F,sets)).
Pa... | r"""Coarse-grains the flux to the given sets
$fc_{i,j} = \sum_{i \in I,j \in J} f_{i,j}$
Note that if you coarse-grain a net flux, it does not necessarily have a net
flux property anymore. If want to make sure you get a netflux,
use to_netflux(coarsegrain(F,sets)).
Parameters
----------
F ... |
def rs_find_error_evaluator(synd, err_loc, nsym):
'''Compute the error (or erasures if you supply sigma=erasures locator polynomial, or errata) evaluator polynomial Omega from the syndrome and the error/erasures/errata locator Sigma. Omega is already computed at the same time as Sigma inside the Berlekamp-Massey im... | Compute the error (or erasures if you supply sigma=erasures locator polynomial, or errata) evaluator polynomial Omega from the syndrome and the error/erasures/errata locator Sigma. Omega is already computed at the same time as Sigma inside the Berlekamp-Massey implemented above, but in case you modify Sigma, you can re... |
def iter_predict_proba(self, X, include_init=False):
"""Returns the predicted probabilities for ``X`` at every stage of the boosting procedure.
Arguments:
X (array-like or sparse matrix of shape (n_samples, n_features)): The input samples.
Sparse matrices are accepted only i... | Returns the predicted probabilities for ``X`` at every stage of the boosting procedure.
Arguments:
X (array-like or sparse matrix of shape (n_samples, n_features)): The input samples.
Sparse matrices are accepted only if they are supported by the weak model.
include_init... |
def get_edge_init_data(self, fn, save_path=None):
"""
Creates the initialization data from the edge structure
"""
edge_init_data = {key: self.edges[fn][key].get('data') for key in
self.edges[fn].keys()}
edge_init_done = {key: self.edges[fn][key].get('do... | Creates the initialization data from the edge structure |
def deserialized_objects(self):
"""Returns a generator of deserialized objects.
"""
if not self._deserialized_objects:
json_text = self.read()
self._deserialized_objects = self.deserialize(json_text=json_text)
return self._deserialized_objects | Returns a generator of deserialized objects. |
def description(self):
""" Get the textual description of the category """
if self._meta and self._meta.get_payload():
return utils.TrueCallableProxy(self._description)
return utils.CallableProxy(None) | Get the textual description of the category |
def hex2termhex(hexval: str, allow_short: bool = False) -> str:
""" Convert a hex value into the nearest terminal color matched hex. """
return rgb2termhex(*hex2rgb(hexval, allow_short=allow_short)) | Convert a hex value into the nearest terminal color matched hex. |
def iterfields(klass):
"""Iterate over the input class members and yield its TypedFields.
Args:
klass: A class (usually an Entity subclass).
Yields:
(class attribute name, TypedField instance) tuples.
"""
is_field = lambda x: isinstance(x, TypedField)
for name, field in inspec... | Iterate over the input class members and yield its TypedFields.
Args:
klass: A class (usually an Entity subclass).
Yields:
(class attribute name, TypedField instance) tuples. |
def change_generated_target_suffix (type, properties, suffix):
""" Change the suffix previously registered for this type/properties
combination. If suffix is not yet specified, sets it.
"""
assert isinstance(type, basestring)
assert is_iterable_typed(properties, basestring)
assert isinstance... | Change the suffix previously registered for this type/properties
combination. If suffix is not yet specified, sets it. |
def merge_programs(prog_list):
"""
Merges a list of pyQuil programs into a single one by appending them in sequence.
If multiple programs in the list contain the same gate and/or noisy gate definition
with identical name, this definition will only be applied once. If different definitions
with the s... | Merges a list of pyQuil programs into a single one by appending them in sequence.
If multiple programs in the list contain the same gate and/or noisy gate definition
with identical name, this definition will only be applied once. If different definitions
with the same name appear multiple times in the progr... |
def hash_host(hostname, salt=None):
"""
Return a "hashed" form of the hostname, as used by OpenSSH when storing
hashed hostnames in the known_hosts file.
:param str hostname: the hostname to hash
:param str salt: optional salt to use when hashing (must be 20 bytes long)
... | Return a "hashed" form of the hostname, as used by OpenSSH when storing
hashed hostnames in the known_hosts file.
:param str hostname: the hostname to hash
:param str salt: optional salt to use when hashing (must be 20 bytes long)
:return: the hashed hostname as a `str` |
def skolemize(gin: Graph) -> Graph:
"""
Replace all of the blank nodes in graph gin with FHIR paths
:param gin: input graph
:return: output graph
"""
gout = Graph()
# Emit any unreferenced subject BNodes (boxes)
anon_subjs = [s for s in gin.subjects() if isinstance(s, BNode) and len([gi... | Replace all of the blank nodes in graph gin with FHIR paths
:param gin: input graph
:return: output graph |
def draw_lineage(self, recs, nodecolor="mediumseagreen",
edgecolor="lightslateblue", dpi=96,
lineage_img="GO_lineage.png", engine="pygraphviz",
gml=False, draw_parents=True, draw_children=True):
"""Draw GO DAG subplot."""
assert engine in Gr... | Draw GO DAG subplot. |
def add_binding(self, node, value, report_redef=True):
"""Called when a binding is altered.
- `node` is the statement responsible for the change
- `value` is the optional new value, a Binding instance, associated
with the binding; if None, the binding is deleted if it exists.
- ... | Called when a binding is altered.
- `node` is the statement responsible for the change
- `value` is the optional new value, a Binding instance, associated
with the binding; if None, the binding is deleted if it exists.
- if `report_redef` is True (default), rebinding while unused will b... |
def keys_values(data, *keys):
"""Get an entry as a list from a dict. Provide a fallback key."""
values = []
if is_mapping(data):
for key in keys:
if key in data:
values.extend(ensure_list(data[key]))
return values | Get an entry as a list from a dict. Provide a fallback key. |
def evaluate_barycentric_multi(self, param_vals, _verify=True):
r"""Compute multiple points on the surface.
Assumes ``param_vals`` has three columns of barycentric coordinates.
See :meth:`evaluate_barycentric` for more details on how each row of
parameter values is evaluated.
.... | r"""Compute multiple points on the surface.
Assumes ``param_vals`` has three columns of barycentric coordinates.
See :meth:`evaluate_barycentric` for more details on how each row of
parameter values is evaluated.
.. image:: ../../images/surface_evaluate_barycentric_multi.png
... |
def _process_abundance_vector(self, a, z, isomers, yps):
'''
This private method takes as input one vector definition and
processes it, including sorting by charge number and
mass number. It returns the processed input variables
plus an element and isotope vector and a list of
... | This private method takes as input one vector definition and
processes it, including sorting by charge number and
mass number. It returns the processed input variables
plus an element and isotope vector and a list of
isomers. |
def pave_event_space(fn=pair):
"""
:return:
a pair producer that ensures the seeder and delegator share the same event space.
"""
global _event_space
event_space = next(_event_space)
@_ensure_seeders_list
def p(seeders, delegator_factory, *args, **kwargs):
return fn(seeders ... | :return:
a pair producer that ensures the seeder and delegator share the same event space. |
def generate_seasonal_averages(qout_file, seasonal_average_file,
num_cpus=multiprocessing.cpu_count()):
"""
This function loops through a CF compliant rapid streamflow
file to produce a netCDF file with a seasonal average for
365 days a year
"""
with RAPIDDataset(q... | This function loops through a CF compliant rapid streamflow
file to produce a netCDF file with a seasonal average for
365 days a year |
def _build_option_description(k):
"""Builds a formatted description of a registered option and prints it."""
o = _get_registered_option(k)
d = _get_deprecated_option(k)
buf = ['{} '.format(k)]
if o.doc:
doc = '\n'.join(o.doc.strip().splitlines())
else:
doc = 'No description ava... | Builds a formatted description of a registered option and prints it. |
def copy_models(module_from, module_to):
"""Copy models from one module to another
:param module_from:
:param module_to:
:return:
"""
module_from = get_module(module_from)
module_to = get_module(module_to)
models = get_models(module_from)
if models:
models = models.copy()
... | Copy models from one module to another
:param module_from:
:param module_to:
:return: |
def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, object_pairs_hook=None,
use_decimal=False, **kw):
"""Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
a JSON document) to a Python object.
*encoding* determ... | Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
a JSON document) to a Python object.
*encoding* determines the encoding used to interpret any
:class:`str` objects decoded by this instance (``'utf-8'`` by
default). It has no effect when decoding :class:`unicode` objects.
N... |
def enforce_bounds(self):
""" enforce bounds violation resulting from the
parameter pertubation calculations
"""
too_big = self.parameter_data.loc[:,"parval1"] > \
self.parameter_data.loc[:,"parubnd"]
self.parameter_data.loc[too_big,"parval1"] = \
self.pa... | enforce bounds violation resulting from the
parameter pertubation calculations |
def from_payload(self, payload):
"""Init frame from binary data."""
self.session_id = payload[0]*256 + payload[1]
self.originator = Originator(payload[2])
self.priority = Priority(payload[3])
self.scene_id = payload[4]
self.velocity = Velocity(payload[5]) | Init frame from binary data. |
def _transform_list_of_states_to_state(self, state: List[int]) -> State:
"""
Private method which transform a list which contains the state of the gene
in the models to a State object.
Examples
--------
The model contains 2 genes: operon = {0, 1, 2}
... | Private method which transform a list which contains the state of the gene
in the models to a State object.
Examples
--------
The model contains 2 genes: operon = {0, 1, 2}
mucuB = {0, 1}
>>> graph._transform_list_of_states_to_dict_of_states(... |
def rtgen_family(self, value):
"""Family setter."""
self.bytearray[self._get_slicers(0)] = bytearray(c_ubyte(value or 0)) | Family setter. |
def make_remote_view(data, settings, more_excluded_names=None):
"""
Make a remote view of dictionary *data*
-> globals explorer
"""
data = get_remote_data(data, settings, mode='editable',
more_excluded_names=more_excluded_names)
remote = {}
for key, value in list(d... | Make a remote view of dictionary *data*
-> globals explorer |
def atmost(cls, lits, weights=None, bound=1, top_id=None,
encoding=EncType.best):
"""
A synonim for :meth:`PBEnc.leq`.
"""
return cls.leq(lits, weights, bound, top_id, encoding) | A synonim for :meth:`PBEnc.leq`. |
def register_new_suffix_tree(case_insensitive=False):
"""Factory method, returns new suffix tree object.
"""
assert isinstance(case_insensitive, bool)
root_node = register_new_node()
suffix_tree_id = uuid4()
event = SuffixTree.Created(
originator_id=suffix_tree_id,
root_node_id=... | Factory method, returns new suffix tree object. |
def required(field):
"""Decorator that checks if return value is set, if not, raises exception.
"""
def wrap(f):
def wrappedf(*args):
result = f(*args)
if result is None or result == "":
raise Exception(
"Config option '%s' is required." %... | Decorator that checks if return value is set, if not, raises exception. |
def addContext(self, layer, hiddenLayerName = 'hidden', verbosity = 0):
"""
Adds a context layer. Necessary to keep self.contextLayers dictionary up to date.
"""
# better not add context layer first if using sweep() without mapInput
SRN.add(self, layer, verbosity)
if hid... | Adds a context layer. Necessary to keep self.contextLayers dictionary up to date. |
def reorient(image, orientation):
"""Return reoriented view of image array.
Parameters
----------
image : numpy array
Non-squeezed output of asarray() functions.
Axes -3 and -2 must be image length and width respectively.
orientation : int or str
One of TIFF_ORIENTATIONS key... | Return reoriented view of image array.
Parameters
----------
image : numpy array
Non-squeezed output of asarray() functions.
Axes -3 and -2 must be image length and width respectively.
orientation : int or str
One of TIFF_ORIENTATIONS keys or values. |
def _is_child_wikicode(self, obj, recursive=True):
"""Return whether the given :class:`.Wikicode` is a descendant."""
def deref(nodes):
if isinstance(nodes, _ListProxy):
return nodes._parent # pylint: disable=protected-access
return nodes
target = deref(... | Return whether the given :class:`.Wikicode` is a descendant. |
def solve_chol(A,B):
"""
Solve cholesky decomposition::
return A\(A'\B)
"""
# X = linalg.solve(A,linalg.solve(A.transpose(),B))
# much faster version
X = linalg.cho_solve((A, True), B)
return X | Solve cholesky decomposition::
return A\(A'\B) |
def chhome(name, home, **kwargs):
'''
Change the home directory of the user
CLI Example:
.. code-block:: bash
salt '*' user.chhome foo /Users/foo
'''
kwargs = salt.utils.args.clean_kwargs(**kwargs)
persist = kwargs.pop('persist', False)
if kwargs:
salt.utils.args.inval... | Change the home directory of the user
CLI Example:
.. code-block:: bash
salt '*' user.chhome foo /Users/foo |
def extrema(self, x0, y0, w, h):
"""
Returns the minimum and maximum values contained in a given area.
:param x0: Starting x index.
:param y0: Starting y index.
:param w: Width of the area to scan.
:param h: Height of the area to scan.
:return: Tuple containi... | Returns the minimum and maximum values contained in a given area.
:param x0: Starting x index.
:param y0: Starting y index.
:param w: Width of the area to scan.
:param h: Height of the area to scan.
:return: Tuple containing the minimum and maximum values of the given area. |
def rooms_favorite(self, room_id=None, room_name=None, favorite=True):
"""Favorite or unfavorite room."""
if room_id is not None:
return self.__call_api_post('rooms.favorite', roomId=room_id, favorite=favorite)
elif room_name is not None:
return self.__call_api_post('room... | Favorite or unfavorite room. |
def reference_contexts_for_variants(
variants,
context_size,
transcript_id_whitelist=None):
"""
Extract a set of reference contexts for each variant in the collection.
Parameters
----------
variants : varcode.VariantCollection
context_size : int
Max of nucleotid... | Extract a set of reference contexts for each variant in the collection.
Parameters
----------
variants : varcode.VariantCollection
context_size : int
Max of nucleotides to include to the left and right of the variant
in the context sequence.
transcript_id_whitelist : set, optional... |
def get_mesh_assets_by_site(self):
"""
:returns: (Mesh instance, assets_by_site list)
"""
assets_by_loc = general.groupby(self, key=lambda a: a.location)
mesh = geo.Mesh.from_coords(list(assets_by_loc))
assets_by_site = [
assets_by_loc[lonlat] for lonlat in zi... | :returns: (Mesh instance, assets_by_site list) |
def extract_archive(client, archive_path, extract_path=None):
"""
Extract the archive in current path using the provided client.
If extract_path is provided extract the archive there.
"""
command = 'tar -xf {path}'.format(path=archive_path)
if extract_path:
command += ' -C {extract_pat... | Extract the archive in current path using the provided client.
If extract_path is provided extract the archive there. |
def get_group_metadata(self):
"""Gets the metadata for a group.
return: (osid.Metadata) - metadata for the group
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceForm.get_group_metadata_template
metada... | Gets the metadata for a group.
return: (osid.Metadata) - metadata for the group
*compliance: mandatory -- This method must be implemented.* |
def dec(data, **kwargs):
'''
Alias to `{box_type}_decrypt`
box_type: secretbox, sealedbox(default)
'''
kwargs['opts'] = __opts__
return salt.utils.nacl.dec(data, **kwargs) | Alias to `{box_type}_decrypt`
box_type: secretbox, sealedbox(default) |
def dispatch(self, request, *args, **kwargs):
"""
Overrides Django's default dispatch to provide caching.
If the should_cache method returns True, this will call
two functions get_cache_version and get_cache_prefix
the results of those two functions are combined and passed to
... | Overrides Django's default dispatch to provide caching.
If the should_cache method returns True, this will call
two functions get_cache_version and get_cache_prefix
the results of those two functions are combined and passed to
the standard django caching middleware. |
def shewhart(self, data: ['SASdata', str] = None,
boxchart: str = None,
cchart: str = None,
irchart: str = None,
mchart: str = None,
mrchart: str = None,
npchart: str = None,
pchart: str = None,
... | Python method to call the SHEWHART procedure
Documentation link:
https://go.documentation.sas.com/?cdcId=pgmsascdc&cdcVersion=9.4_3.4&docsetId=qcug&docsetTarget=qcug_shewhart_toc.htm&locale=en
:param data: SASdata object or string. This parameter is required.
:parm boxchart: The boxcha... |
def diff_medians(array_one, array_two):
"""
Computes the difference in medians between two arrays of values.
Given arrays will be flattened (to 1D array) regardless of dimension,
and any non-finite/NaN values will be ignored.
Parameters
----------
array_one, array_two : iterable
... | Computes the difference in medians between two arrays of values.
Given arrays will be flattened (to 1D array) regardless of dimension,
and any non-finite/NaN values will be ignored.
Parameters
----------
array_one, array_two : iterable
Two arrays of values, possibly of different length... |
def multi_future(
children: Union[List[_Yieldable], Dict[Any, _Yieldable]],
quiet_exceptions: "Union[Type[Exception], Tuple[Type[Exception], ...]]" = (),
) -> "Union[Future[List], Future[Dict]]":
"""Wait for multiple asynchronous futures in parallel.
Since Tornado 6.0, this function is exactly the same... | Wait for multiple asynchronous futures in parallel.
Since Tornado 6.0, this function is exactly the same as `multi`.
.. versionadded:: 4.0
.. versionchanged:: 4.2
If multiple ``Futures`` fail, any exceptions after the first (which is
raised) will be logged. Added the ``quiet_exceptions``
... |
def _get_description(self):
"""
Tries to get WF description from 'collabration' or 'process' or 'pariticipant'
Returns:
"""
ns = {'ns': '{%s}' % BPMN_MODEL_NS}
desc = (
self.doc_xpath('.//{ns}collaboration/{ns}documentation'.format(**ns)) or
self.... | Tries to get WF description from 'collabration' or 'process' or 'pariticipant'
Returns: |
def serialize_seeds(seeds, block):
"""
Serialize the seeds in peer instruction XBlock to xml
Args:
seeds (lxml.etree.Element): The <seeds> XML element.
block (PeerInstructionXBlock): The XBlock with configuration to serialize.
Returns:
None
"""
for seed_dict in block.se... | Serialize the seeds in peer instruction XBlock to xml
Args:
seeds (lxml.etree.Element): The <seeds> XML element.
block (PeerInstructionXBlock): The XBlock with configuration to serialize.
Returns:
None |
def _interfaces_removed(self, object_path, interfaces):
"""Internal method."""
old_state = copy(self._objects[object_path])
for interface in interfaces:
del self._objects[object_path][interface]
new_state = self._objects[object_path]
if Interface['Drive'] in interfac... | Internal method. |
def makediagram(edges):
"""make the diagram with the edges"""
graph = pydot.Dot(graph_type='digraph')
nodes = edges2nodes(edges)
epnodes = [(node,
makeanode(node[0])) for node in nodes if nodetype(node)=="epnode"]
endnodes = [(node,
makeendnode(node[0])) for node in nodes if nodety... | make the diagram with the edges |
def _evaluate(self):
"""Lazily retrieve and paginate report results and build Record instances from returned data"""
if self._elements:
for element in self._elements:
yield element
else:
for page in itertools.count():
raw_elements = self._r... | Lazily retrieve and paginate report results and build Record instances from returned data |
def sendRemoteVoiceClips(
self, clip_urls, message=None, thread_id=None, thread_type=ThreadType.USER
):
"""
Sends voice clips from URLs to a thread
:param clip_urls: URLs of clips to upload and send
:param message: Additional message
:param thread_id: User/Group ID t... | Sends voice clips from URLs to a thread
:param clip_urls: URLs of clips to upload and send
:param message: Additional message
:param thread_id: User/Group ID to send to. See :ref:`intro_threads`
:param thread_type: See :ref:`intro_threads`
:type thread_type: models.ThreadType
... |
def duel_command(f):
""" indicate it's a command need to be called on both SP
:param f: function that returns the command in list
:return: command execution result on both sps (tuple of 2)
"""
@functools.wraps(f)
def func_wrapper(self, *argv, **kwargs):
commands = _get_commands(f, self... | indicate it's a command need to be called on both SP
:param f: function that returns the command in list
:return: command execution result on both sps (tuple of 2) |
def create(filename: str, layers: Union[np.ndarray, Dict[str, np.ndarray], loompy.LayerManager], row_attrs: Union[loompy.AttributeManager, Dict[str, np.ndarray]], col_attrs: Union[loompy.AttributeManager, Dict[str, np.ndarray]], *, file_attrs: Dict[str, str] = None) -> None:
"""
Create a new Loom file from the given ... | Create a new Loom file from the given data.
Args:
filename (str): The filename (typically using a ``.loom`` file extension)
layers: One of the following:
* Two-dimensional (N-by-M) numpy ndarray of float values
* Sparse matrix (e.g. :class:`scipy.sparse.csr_matrix`)
* Dictiona... |
def confirmation_pdf(self, confirmation_id):
"""
Opens a pdf of a confirmation
:param confirmation_id: the confirmation id
:return: dict
"""
return self._create_get_request(resource=CONFIRMATIONS, billomat_id=confirmation_id, command=PDF) | Opens a pdf of a confirmation
:param confirmation_id: the confirmation id
:return: dict |
def _get_externalcmd_stats(self, app_stats):
"""
Process:
* high_external_command_buffer_slots
* total_external_command_buffer_slots
* used_external_command_buffer_slots
* external_command_stats=
"""
khigh = "high_external_command_buffer_slots"
... | Process:
* high_external_command_buffer_slots
* total_external_command_buffer_slots
* used_external_command_buffer_slots
* external_command_stats= |
def server():
"""Runs the server"""
tornado.log.enable_pretty_logging()
# Get and validate the server_type
server_type = oz.settings["server_type"]
if server_type not in [None, "wsgi", "asyncio", "twisted"]:
raise Exception("Unknown server type: %s" % server_type)
# Install the correc... | Runs the server |
def p_portfolio(I,sigma,r,alpha,beta):
"""p_portfolio -- modified markowitz model for portfolio optimization.
Parameters:
- I: set of items
- sigma[i]: standard deviation of item i
- r[i]: revenue of item i
- alpha: acceptance threshold
- beta: desired confidence level
... | p_portfolio -- modified markowitz model for portfolio optimization.
Parameters:
- I: set of items
- sigma[i]: standard deviation of item i
- r[i]: revenue of item i
- alpha: acceptance threshold
- beta: desired confidence level
Returns a model, ready to be solved. |
def generate_project(self):
"""
Generate the whole project. Returns True if at least one
file has been generated, False otherwise."""
# checks needed properties
if not self.name or not self.destdir or \
not os.path.isdir(self.destdir):
raise ValueError(... | Generate the whole project. Returns True if at least one
file has been generated, False otherwise. |
def _get_asym_comb(self,d):
"""
Find the combined asymmetry for slr runs. Elegant 4-counter method.
"""
# get data
d0 = d[0]; d1 = d[2]; d2 = d[1]; d3 = d[3]
# pre-calcs
r_denom = d0*d3
r_denom[r_denom==0] = np.nan
r = np.sqrt((d1... | Find the combined asymmetry for slr runs. Elegant 4-counter method. |
def recognize(self,
audio,
model=None,
language_customization_id=None,
acoustic_customization_id=None,
base_model_version=None,
customization_weight=None,
inactivity_timeout=None,
... | Recognize audio.
Sends audio and returns transcription results for a recognition request. You can
pass a maximum of 100 MB and a minimum of 100 bytes of audio with a request. The
service automatically detects the endianness of the incoming audio and, for audio
that includes multiple cha... |
def _return_pub_syndic(self, values, master_id=None):
'''
Wrapper to call the '_return_pub_multi' a syndic, best effort to get the one you asked for
'''
func = '_return_pub_multi'
for master, syndic_future in self.iter_master_options(master_id):
if not syndic_future.d... | Wrapper to call the '_return_pub_multi' a syndic, best effort to get the one you asked for |
def get_client_ip(request):
"""
Get the client IP from the request
"""
# set the default value of the ip to be the REMOTE_ADDR if available
# else None
ip = request.META.get('REMOTE_ADDR')
# try to get the first non-proxy ip (not a private ip) from the
# HTTP_X_FORWARDED_FOR
x_forwar... | Get the client IP from the request |
def update_sensors(self):
"""
Check path for each sensor and record wall proximity
"""
assert isinstance(self.player.cshape.center, eu.Vector2)
pos = self.player.cshape.center
a = math.radians(self.player.rotation)
for sensor in self.player.sensors:
s... | Check path for each sensor and record wall proximity |
def columns_equal(a: Column, b: Column) -> bool:
"""
Are two SQLAlchemy columns are equal? Checks based on:
- column ``name``
- column ``type`` (see :func:`column_types_equal`)
- ``nullable``
"""
return (
a.name == b.name and
column_types_equal(a.type, b.type) and
a.... | Are two SQLAlchemy columns are equal? Checks based on:
- column ``name``
- column ``type`` (see :func:`column_types_equal`)
- ``nullable`` |
def get_callable_from_line(self, module_file, lineno):
"""Get the callable that the line number belongs to."""
module_name = _get_module_name_from_fname(module_file)
if module_name not in self._modules_dict:
self.trace([module_file])
ret = None
# Sort callables by sta... | Get the callable that the line number belongs to. |
def resolve_selector(self):
"""Resolve the selector variable in place
"""
effective_selector_list = []
for current_selector in self._selector_list:
# INLINE SELECTOR
if self.get_type(current_selector) != 'selector_variable':
effective_selector_li... | Resolve the selector variable in place |
def print_tree(self, *, verbose=True):
"""Print a ascii-formatted tree representation of the data contents."""
print("{0} ({1})".format(self.natural_name, self.filepath))
self._print_branch("", depth=0, verbose=verbose) | Print a ascii-formatted tree representation of the data contents. |
def _bsecurate_cli_compare_basis_sets(args):
'''Handles compare-basis-sets subcommand'''
ret = curate.compare_basis_sets(args.basis1, args.basis2, args.version1, args.version2, args.uncontract_general,
args.data_dir, args.data_dir)
if ret:
return "No difference found"
else:
ret... | Handles compare-basis-sets subcommand |
def checkForDeadlocks(self):
"""
Checks if the system is deadlocked running service jobs.
"""
totalRunningJobs = len(self.batchSystem.getRunningBatchJobIDs())
totalServicesIssued = self.serviceJobsIssued + self.preemptableServiceJobsIssued
# If there are no updated jobs a... | Checks if the system is deadlocked running service jobs. |
def __get_query_filters(cls, filters={}, inverse=False):
"""
Convert a dict with the filters to be applied ({"name1":"value1", "name2":"value2"})
to a list of query objects which can be used together in a query using boolean
combination logic.
:param filters: dict with the filte... | Convert a dict with the filters to be applied ({"name1":"value1", "name2":"value2"})
to a list of query objects which can be used together in a query using boolean
combination logic.
:param filters: dict with the filters to be applied
:param inverse: if True include all the inverse filt... |
def indent_selection(self, cursor):
"""
Indent selected text
:param cursor: QTextCursor
"""
doc = self.editor.document()
tab_len = self.editor.tab_length
cursor.beginEditBlock()
nb_lines = len(cursor.selection().toPlainText().splitlines())
c = sel... | Indent selected text
:param cursor: QTextCursor |
def get_token_network(
self,
token_address: TokenAddress,
block_identifier: BlockSpecification = 'latest',
) -> Optional[Address]:
""" Return the token network address for the given token or None if
there is no correspoding address.
"""
if not isin... | Return the token network address for the given token or None if
there is no correspoding address. |
def build_cfg(cls, node):
"""Build a CFG for a function.
Args:
node: A function definition the body of which to analyze.
Returns:
A CFG object.
Raises:
TypeError: If the input is not a function definition.
"""
if not isinstance(node, gast.FunctionDef):
raise TypeError(... | Build a CFG for a function.
Args:
node: A function definition the body of which to analyze.
Returns:
A CFG object.
Raises:
TypeError: If the input is not a function definition. |
def fit(self, dataset):
"""
Computes the inverse document frequency.
:param dataset: an RDD of term frequency vectors
"""
if not isinstance(dataset, RDD):
raise TypeError("dataset should be an RDD of term frequency vectors")
jmodel = callMLlibFunc("fitIDF", s... | Computes the inverse document frequency.
:param dataset: an RDD of term frequency vectors |
def _update(self):
"""Rebuilds the shaders, and repositions the objects
that are used internally by the ColorBarVisual
"""
x, y = self._pos
halfw, halfh = self._halfdim
# test that width and height are non-zero
if halfw <= 0:
raise ValueError("hal... | Rebuilds the shaders, and repositions the objects
that are used internally by the ColorBarVisual |
def make_single_array(ds, batch_size=8*1024):
"""Create a single numpy array from a dataset.
The dataset must have only one dimension, that is,
the length of its `output_shapes` and `output_types`
is 1, and its output shape must be `[]`, that is,
every tensor in the dataset must be a scalar.
A... | Create a single numpy array from a dataset.
The dataset must have only one dimension, that is,
the length of its `output_shapes` and `output_types`
is 1, and its output shape must be `[]`, that is,
every tensor in the dataset must be a scalar.
Args:
ds: a TF Dataset.
batch_size: how ... |
def printOptions(options, tm, outFile):
"""
Pretty print the set of options
"""
print >>outFile, "TM parameters:"
printTemporalMemory(tm, outFile)
print >>outFile, "Experiment parameters:"
for k,v in options.__dict__.iteritems():
print >>outFile, " %s : %s" % (k,str(v))
outFile.flush() | Pretty print the set of options |
def get_next_value(
sequence_name='default', initial_value=1, reset_value=None,
*, nowait=False, using=None):
"""
Return the next value for a given sequence.
"""
# Inner import because models cannot be imported before their application.
from .models import Sequence
if reset_val... | Return the next value for a given sequence. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.