positive stringlengths 100 30.3k | anchor stringlengths 1 15k |
|---|---|
def get_or_create_environment(self, repo: str, branch: str, git_repo: Repo, repo_path: Path) -> str:
""" Returns the path to the current Python executable.
"""
return sys.executable | Returns the path to the current Python executable. |
def commit(
self,
confirm=False,
confirm_delay=None,
check=False,
comment="",
and_quit=False,
delay_factor=1,
):
"""
Commit the candidate configuration.
Commit the entered configuration. Raise an error and return the failure
if... | Commit the candidate configuration.
Commit the entered configuration. Raise an error and return the failure
if the commit fails.
Automatically enters configuration mode
default:
command_string = commit
check and (confirm or confirm_dely or comment):
Exc... |
def _do_load(
self, data, many=None, partial=None, unknown=None,
postprocess=True,
):
"""Deserialize `data`, returning the deserialized result.
:param data: The data to deserialize.
:param bool many: Whether to deserialize `data` as a collection. If `None`, the
v... | Deserialize `data`, returning the deserialized result.
:param data: The data to deserialize.
:param bool many: Whether to deserialize `data` as a collection. If `None`, the
value for `self.many` is used.
:param bool|tuple partial: Whether to validate required fields. If its value is... |
def _where(self, filter_fn):
''' use this to filter VLists, simply provide a filter function to filter the current found objects '''
assert callable(filter_fn), 'filter_fn needs to be callable'
return VList(i for i in self if filter_fn(i())) | use this to filter VLists, simply provide a filter function to filter the current found objects |
def dump_model(self, num_iteration=None, start_iteration=0):
"""Dump Booster to JSON format.
Parameters
----------
num_iteration : int or None, optional (default=None)
Index of the iteration that should be dumped.
If None, if the best iteration exists, it is dump... | Dump Booster to JSON format.
Parameters
----------
num_iteration : int or None, optional (default=None)
Index of the iteration that should be dumped.
If None, if the best iteration exists, it is dumped; otherwise, all iterations are dumped.
If <= 0, all itera... |
def replace(oldEl, newEl):
# type: (Union[Rule, _RuleConnectable], Union[Rule, _RuleConnectable]) -> Union[Rule, _RuleConnectable]
"""
Replace element in the parsed tree. Can be nonterminal, terminal or rule.
:param oldEl: Element already in the tree.
:param newEl: Element to rep... | Replace element in the parsed tree. Can be nonterminal, terminal or rule.
:param oldEl: Element already in the tree.
:param newEl: Element to replace with.
:return: New element attached to the tree. |
def _set_flag(self, flag):
"""Turns the specified flag on"""
self.folder._invalidate_cache()
# TODO::: turn the flag off when it's already on
def replacer(m):
return "%s/%s.%s%s" % (
joinpath(self.folder.base, self.folder.folder, "cur"),
m.grou... | Turns the specified flag on |
def nucleotide_linkage(residues):
"""Support for DNA/RNA ligands by finding missing covalent linkages to stitch DNA/RNA together."""
nuc_covalent = []
#######################################
# Basic support for RNA/DNA as ligand #
#######################################
nucleotides = ['A', 'C',... | Support for DNA/RNA ligands by finding missing covalent linkages to stitch DNA/RNA together. |
def _xread(self, streams, timeout=0, count=None, latest_ids=None):
"""Wraps up common functionality between ``xread()``
and ``xread_group()``
You should probably be using ``xread()`` or ``xread_group()`` directly.
"""
if latest_ids is None:
latest_ids = ['$'] * len(s... | Wraps up common functionality between ``xread()``
and ``xread_group()``
You should probably be using ``xread()`` or ``xread_group()`` directly. |
def decompress_from_curve(self, x, flag):
"""
calculate the y coordinate given only the x value.
there are 2 possible solutions, use 'flag' to select.
"""
cq = self.field.p
x = self.field.value(x)
ysquare = x ** 3 + self.a * x + self.b
ysquare_root = sq... | calculate the y coordinate given only the x value.
there are 2 possible solutions, use 'flag' to select. |
def int2str(num, radix=10, alphabet=BASE85):
"""helper function for quick base conversions from integers to strings"""
return NumConv(radix, alphabet).int2str(num) | helper function for quick base conversions from integers to strings |
def im_json_to_graph(im_json):
"""Return networkx graph from Kappy's influence map JSON.
Parameters
----------
im_json : dict
A JSON dict which contains an influence map generated by Kappy.
Returns
-------
graph : networkx.MultiDiGraph
A graph representing the influence map... | Return networkx graph from Kappy's influence map JSON.
Parameters
----------
im_json : dict
A JSON dict which contains an influence map generated by Kappy.
Returns
-------
graph : networkx.MultiDiGraph
A graph representing the influence map. |
def install(cls):
"""Create the required directories in the home directory"""
[os.makedirs('{}/{}'.format(cls.home, cls.dirs[d])) for d in cls.dirs] | Create the required directories in the home directory |
def toggle_wrap_mode(self, checked):
"""Toggle wrap mode"""
self.plain_text.editor.toggle_wrap_mode(checked)
self.set_option('wrap', checked) | Toggle wrap mode |
def join_json_files(prefix):
"""Join different REACH output JSON files into a single JSON object.
The output of REACH is broken into three files that need to be joined
before processing. Specifically, there will be three files of the form:
`<prefix>.uaz.<subcategory>.json`.
Parameters
--------... | Join different REACH output JSON files into a single JSON object.
The output of REACH is broken into three files that need to be joined
before processing. Specifically, there will be three files of the form:
`<prefix>.uaz.<subcategory>.json`.
Parameters
----------
prefix : str
The abso... |
def _readsie(self, pos):
"""Return interpretation of next bits as a signed interleaved exponential-Golomb code.
Advances position to after the read code.
Raises ReadError if the end of the bitstring is encountered while
reading the code.
"""
codenum, pos = self._readui... | Return interpretation of next bits as a signed interleaved exponential-Golomb code.
Advances position to after the read code.
Raises ReadError if the end of the bitstring is encountered while
reading the code. |
def onUserError(self, fail, message):
"""
Handle user errors
"""
self.log.error(fail)
self.log.error(message) | Handle user errors |
def load_obj(fn):
"""Load 3d mesh form .obj' file.
Args:
fn: Input file name or file-like object.
Returns:
dictionary with the following keys (some of which may be missing):
position: np.float32, (n, 3) array, vertex positions
uv: np.float32, (n, 2) array, vertex uv coordinates
n... | Load 3d mesh form .obj' file.
Args:
fn: Input file name or file-like object.
Returns:
dictionary with the following keys (some of which may be missing):
position: np.float32, (n, 3) array, vertex positions
uv: np.float32, (n, 2) array, vertex uv coordinates
normal: np.float32, (n, ... |
def get_model(sender, model_name, model_inst, model_info, model_config):
"""
#todo Add objcache support
"""
MC = get_mc()
if MC:
model = MC.get((MC.c.model_name==model_name) & (MC.c.uuid!=''))
if model:
cached_inst = __cache__.get(model_name)
if not cached_ins... | #todo Add objcache support |
def bind_key(pymux, variables):
"""
Bind a key sequence.
-n: Not necessary to use the prefix.
"""
key = variables['<key>']
command = variables['<command>']
arguments = variables['<arguments>']
needs_prefix = not variables['-n']
try:
pymux.key_bindings_manager.add_custom_bind... | Bind a key sequence.
-n: Not necessary to use the prefix. |
def create_es(self):
"""Create an ES (intermediate) file for this BAM file.
This is the function which asses if an alignment is correct
"""
with (gzip.open(self._es_fn, "tw+") if self.compress_intermediate_files else open(self._es_fn, "w+")) as es_fo:
self.bam2es(
bam_fn... | Create an ES (intermediate) file for this BAM file.
This is the function which asses if an alignment is correct |
def get_artist(self, id_):
"""Data for a specific artist."""
endpoint = "artists/{id}".format(id=id_)
return self._make_request(endpoint) | Data for a specific artist. |
def objects_to_record(self, preference=None):
"""Create file records from objects. """
from ambry.orm.file import File
raise NotImplementedError("Still uses obsolete file_info_map")
for file_const, (file_name, clz) in iteritems(file_info_map):
f = self.file(file_const)
... | Create file records from objects. |
def restore_defaults_ratio(self):
"""Restore InaSAFE default ratio."""
# Set the flag to true because user ask to.
self.is_restore_default = True
# remove current default ratio
for i in reversed(list(range(self.container_layout.count()))):
widget = self.container_layo... | Restore InaSAFE default ratio. |
def _get_suffix(path):
"""
Return suffix from `path`.
``/home/xex/somefile.txt`` --> ``txt``.
Args:
path (str): Full file path.
Returns:
str: Suffix.
Raises:
UserWarning: When ``/`` is detected in suffix.
"""
suffix = os.path.basename(path).split(".")[-1]
... | Return suffix from `path`.
``/home/xex/somefile.txt`` --> ``txt``.
Args:
path (str): Full file path.
Returns:
str: Suffix.
Raises:
UserWarning: When ``/`` is detected in suffix. |
def log_action(self, instance, action, action_date=None, url="",
update_parent=True):
"""
Store an action in the database using the CMSLog model.
The following attributes are calculated and set on the log entry:
* **model_repr** - A unicode representation of the inst... | Store an action in the database using the CMSLog model.
The following attributes are calculated and set on the log entry:
* **model_repr** - A unicode representation of the instance.
* **object_repr** - The verbose_name of the instance model class.
* **section** - The name of ancesto... |
def _xml_escape_attr(attr, skip_single_quote=True):
"""Escape the given string for use in an HTML/XML tag attribute.
By default this doesn't bother with escaping `'` to `'`, presuming that
the tag attribute is surrounded by double quotes.
"""
escaped = (attr
.replace('&', '&')
... | Escape the given string for use in an HTML/XML tag attribute.
By default this doesn't bother with escaping `'` to `'`, presuming that
the tag attribute is surrounded by double quotes. |
def focusOutEvent(self, event):
"""Reimplement Qt method to close the widget when loosing focus."""
event.ignore()
# Inspired from CompletionWidget.focusOutEvent() in file
# widgets/sourcecode/base.py line 212
if sys.platform == "darwin":
if event.reason() != Qt... | Reimplement Qt method to close the widget when loosing focus. |
def sieve(cache, segment=None):
"""Filter the cache to find those entries that overlap ``segment``
Parameters
----------
cache : `list`
Input list of file paths
segment : `~gwpy.segments.Segment`
The ``[start, stop)`` interval to match against.
"""
return type(cache)(e for ... | Filter the cache to find those entries that overlap ``segment``
Parameters
----------
cache : `list`
Input list of file paths
segment : `~gwpy.segments.Segment`
The ``[start, stop)`` interval to match against. |
def write_quick(self):
"""
Send only the read / write bit
"""
self.bus.write_quick(self.address)
self.log.debug("write_quick: Sent the read / write bit") | Send only the read / write bit |
def present(profile='pagerduty', subdomain=None, api_key=None, **kwargs):
'''
Ensure that a pagerduty schedule exists.
This method accepts as args everything defined in
https://developer.pagerduty.com/documentation/rest/schedules/create.
This means that most arguments are in a dict called "schedule.... | Ensure that a pagerduty schedule exists.
This method accepts as args everything defined in
https://developer.pagerduty.com/documentation/rest/schedules/create.
This means that most arguments are in a dict called "schedule."
User id's can be pagerduty id, or name, or email address. |
def bovy_ars(domain,isDomainFinite,abcissae,hx,hpx,nsamples=1,
hxparams=(),maxn=100):
"""bovy_ars: Implementation of the Adaptive-Rejection Sampling
algorithm by Gilks & Wild (1992): Adaptive Rejection Sampling
for Gibbs Sampling, Applied Statistics, 41, 337
Based on Wild & Gilks (1993), Al... | bovy_ars: Implementation of the Adaptive-Rejection Sampling
algorithm by Gilks & Wild (1992): Adaptive Rejection Sampling
for Gibbs Sampling, Applied Statistics, 41, 337
Based on Wild & Gilks (1993), Algorithm AS 287: Adaptive Rejection
Sampling from Log-concave Density Functions, Applied Statistics, 42... |
def meta_group(self, meta, meta_aggregates=None):
"""
*Wrapper of* ``GROUP``
Group operation only for metadata. For further information check :meth:`~.group`
"""
return self.group(meta=meta, meta_aggregates=meta_aggregates) | *Wrapper of* ``GROUP``
Group operation only for metadata. For further information check :meth:`~.group` |
def GetAttachmentCollection(self, _id):
"""Get Attachments for given List Item ID"""
# Build Request
soap_request = soap('GetAttachmentCollection')
soap_request.add_parameter('listName', self.listName)
soap_request.add_parameter('listItemID', _id)
self.last_request = str... | Get Attachments for given List Item ID |
def log_likelihood(self, y, _const=math.log(2.0*math.pi), quiet=False):
"""
Compute the marginalized likelihood of the GP model
The factorized matrix from the previous call to :func:`GP.compute` is
used so ``compute`` must be called first.
Args:
y (array[n]): The ob... | Compute the marginalized likelihood of the GP model
The factorized matrix from the previous call to :func:`GP.compute` is
used so ``compute`` must be called first.
Args:
y (array[n]): The observations at coordinates ``x`` from
:func:`GP.compute`.
quiet (... |
def set_mode(self, anchor_id, mode):
"""
Send a packet to set the anchor mode. If the anchor receive the packet,
it will change mode and resets.
"""
data = struct.pack('<BB', LoPoAnchor.LPP_TYPE_MODE, mode)
self.crazyflie.loc.send_short_lpp_packet(anchor_id, data) | Send a packet to set the anchor mode. If the anchor receive the packet,
it will change mode and resets. |
def sendUssd(self, ussdString, responseTimeout=15):
""" Starts a USSD session by dialing the the specified USSD string, or \
sends the specified string in the existing USSD session (if any)
:param ussdString: The USSD access number to dial
:param responseTimeout: Maximum... | Starts a USSD session by dialing the the specified USSD string, or \
sends the specified string in the existing USSD session (if any)
:param ussdString: The USSD access number to dial
:param responseTimeout: Maximum time to wait a response, in seconds
:raise Tim... |
def screensaver():
'''
Launch the screensaver.
CLI Example:
.. code-block:: bash
salt '*' desktop.screensaver
'''
cmd = 'open /System/Library/Frameworks/ScreenSaver.framework/Versions/A/Resources/ScreenSaverEngine.app'
call = __salt__['cmd.run_all'](
cmd,
output_lo... | Launch the screensaver.
CLI Example:
.. code-block:: bash
salt '*' desktop.screensaver |
def _margtimephase_loglr(self, mf_snr, opt_snr):
"""Returns the log likelihood ratio marginalized over time and phase.
"""
return special.logsumexp(numpy.log(special.i0(mf_snr)),
b=self._deltat) - 0.5*opt_snr | Returns the log likelihood ratio marginalized over time and phase. |
def _prepare_discharge_hook(req, client):
''' Return the hook function (called when the response is received.)
This allows us to intercept the response and do any necessary
macaroon discharge before returning.
'''
class Retry:
# Define a local class so that we can use its class variable as
... | Return the hook function (called when the response is received.)
This allows us to intercept the response and do any necessary
macaroon discharge before returning. |
def _safe_output(line):
'''
Looks for rabbitmqctl warning, or general formatting, strings that aren't
intended to be parsed as output.
Returns a boolean whether the line can be parsed as rabbitmqctl output.
'''
return not any([
line.startswith('Listing') and line.endswith('...'),
... | Looks for rabbitmqctl warning, or general formatting, strings that aren't
intended to be parsed as output.
Returns a boolean whether the line can be parsed as rabbitmqctl output. |
def set_coupl_old(self):
""" Using the adjacency matrix, sample a coupling matrix.
"""
if self.model == 'krumsiek11' or self.model == 'var':
# we already built the coupling matrix in set_coupl20()
return
self.Coupl = np.zeros((self.dim,self.dim))
for i in ... | Using the adjacency matrix, sample a coupling matrix. |
def table_ensure(cls, rr):
'''
Creates the table if it doesn't exist.
'''
dbs = rr.db_list().run()
if not rr.dbname in dbs:
logging.info('creating rethinkdb database %s', repr(rr.dbname))
rr.db_create(rr.dbname).run()
tables = rr.table_list().run()... | Creates the table if it doesn't exist. |
def get_project(id=None, name=None):
"""
Get a specific Project by ID or name
"""
content = get_project_raw(id, name)
if content:
return utils.format_json(content) | Get a specific Project by ID or name |
def ReadGRRUser(self, username, cursor=None):
"""Reads a user object corresponding to a given name."""
cursor.execute(
"SELECT username, password, ui_mode, canary_mode, user_type "
"FROM grr_users WHERE username_hash = %s", [mysql_utils.Hash(username)])
row = cursor.fetchone()
if row is... | Reads a user object corresponding to a given name. |
def refresh(self)->None:
"Apply any logit, flow, or affine transfers that have been sent to the `Image`."
if self._logit_px is not None:
self._px = self._logit_px.sigmoid_()
self._logit_px = None
if self._affine_mat is not None or self._flow is not None:
self.... | Apply any logit, flow, or affine transfers that have been sent to the `Image`. |
def enable_host_svc_notifications(self, host):
"""Enable services notifications for a host
Format of the line that triggers function call::
ENABLE_HOST_SVC_NOTIFICATIONS;<host_name>
:param host: host to edit
:type host: alignak.objects.host.Host
:return: None
""... | Enable services notifications for a host
Format of the line that triggers function call::
ENABLE_HOST_SVC_NOTIFICATIONS;<host_name>
:param host: host to edit
:type host: alignak.objects.host.Host
:return: None |
def compile_action_bound_constraints(self,
state: Sequence[tf.Tensor]) -> Dict[str, Bounds]:
'''Compiles all actions bounds for the given `state`.
Args:
state (Sequence[tf.Tensor]): The current state fluents.
Returns:
A mapping from action names to a pair of... | Compiles all actions bounds for the given `state`.
Args:
state (Sequence[tf.Tensor]): The current state fluents.
Returns:
A mapping from action names to a pair of
:obj:`rddl2tf.fluent.TensorFluent` representing
its lower and upper bounds. |
def iscontainer(*items):
"""
Checks whether all the provided items are containers (i.e of class list,
dict, tuple, etc...)
"""
return all(isinstance(i, Iterable) and not isinstance(i, basestring)
for i in items) | Checks whether all the provided items are containers (i.e of class list,
dict, tuple, etc...) |
def minimum_geometries(self, n=None, symmetry_measure_type=None, max_csm=None):
"""
Returns a list of geometries with increasing continuous symmetry measure in this ChemicalEnvironments object
:param n: Number of geometries to be included in the list
:return: list of geometries with incr... | Returns a list of geometries with increasing continuous symmetry measure in this ChemicalEnvironments object
:param n: Number of geometries to be included in the list
:return: list of geometries with increasing continuous symmetry measure in this ChemicalEnvironments object
:raise: ValueError if... |
def close_window(self, window_name=None, title=None, url=None):
"""
WebDriver implements only closing current window. If you want to close
some window without having to switch to it, use this method.
"""
main_window_handle = self.current_window_handle
self.switch_to_windo... | WebDriver implements only closing current window. If you want to close
some window without having to switch to it, use this method. |
def backdoor_handler(clientsock, namespace=None):
"""start an interactive python interpreter on an existing connection
.. note::
this function will block for as long as the connection remains alive.
:param sock: the socket on which to serve the interpreter
:type sock: :class:`Socket<greenhouse... | start an interactive python interpreter on an existing connection
.. note::
this function will block for as long as the connection remains alive.
:param sock: the socket on which to serve the interpreter
:type sock: :class:`Socket<greenhouse.io.sockets.Socket>`
:param namespace:
the lo... |
def _removeContentPanels(cls, remove):
"""
Remove the panels and so hide the fields named.
"""
if type(remove) is str:
remove = [remove]
cls.content_panels = [panel for panel in cls.content_panels
if getattr(panel, "field_name", None) not... | Remove the panels and so hide the fields named. |
def disassociate_eip_address(public_ip=None, association_id=None, region=None,
key=None, keyid=None, profile=None):
'''
Disassociate an Elastic IP address from a currently running instance. This
requires exactly one of either 'association_id' or 'public_ip', depending
on whe... | Disassociate an Elastic IP address from a currently running instance. This
requires exactly one of either 'association_id' or 'public_ip', depending
on whether you’re dealing with a VPC or EC2 Classic address.
public_ip
(string) – Public IP address, for EC2 Classic allocations.
association_id
... |
def add_copy_spec_scl(self, scl, copyspecs):
"""Same as add_copy_spec, except that it prepends path to SCL root
to "copyspecs".
"""
if isinstance(copyspecs, six.string_types):
copyspecs = [copyspecs]
scl_copyspecs = []
for copyspec in copyspecs:
sc... | Same as add_copy_spec, except that it prepends path to SCL root
to "copyspecs". |
def confidence_interval_hazard_(self):
"""
The confidence interval of the hazard.
"""
return self._compute_confidence_bounds_of_transform(self._hazard, self.alpha, self._ci_labels) | The confidence interval of the hazard. |
def remove_edge_fun(graph):
"""
Returns a function that removes an edge from the `graph`.
..note:: The out node is removed if this is isolate.
:param graph:
A directed graph.
:type graph: networkx.classes.digraph.DiGraph
:return:
A function that remove an edge from the `graph`... | Returns a function that removes an edge from the `graph`.
..note:: The out node is removed if this is isolate.
:param graph:
A directed graph.
:type graph: networkx.classes.digraph.DiGraph
:return:
A function that remove an edge from the `graph`.
:rtype: callable |
def package_hidden(self):
"""
Flattens the hidden state from all LSTM layers into one tensor (for
the sequence generator).
"""
if self.inference:
hidden = torch.cat(tuple(itertools.chain(*self.next_hidden)))
else:
hidden = None
return hidde... | Flattens the hidden state from all LSTM layers into one tensor (for
the sequence generator). |
def map_to_matype(self, matype):
""" Convert to the alpha vantage math type integer. It returns an
integer correspondent to the type of math to apply to a function. It
raises ValueError if an integer greater than the supported math types
is given.
Keyword Arguments:
... | Convert to the alpha vantage math type integer. It returns an
integer correspondent to the type of math to apply to a function. It
raises ValueError if an integer greater than the supported math types
is given.
Keyword Arguments:
matype: The math type of the alpha vantage a... |
def open_like(a, path, **kwargs):
"""Open a persistent array like `a`."""
_like_args(a, kwargs)
if isinstance(a, Array):
kwargs.setdefault('fill_value', a.fill_value)
return open_array(path, **kwargs) | Open a persistent array like `a`. |
def toLily(self):
'''
Method which converts the object instance, its attributes and children to a string of lilypond code
:return: str of lilypond code
'''
self.CheckDivisions()
self.CheckTotals()
staves = self.GetChildrenIndexes()
name = ""
short... | Method which converts the object instance, its attributes and children to a string of lilypond code
:return: str of lilypond code |
def _load_plugin(self, plugin_script, args=None, config=None):
"""Load the plugin (script), init it and add to the _plugin dict."""
# The key is the plugin name
# for example, the file glances_xxx.py
# generate self._plugins_list["xxx"] = ...
name = plugin_script[len(self.header)... | Load the plugin (script), init it and add to the _plugin dict. |
def resolve(self, token):
"""Attempts to resolve the :class:`SymbolToken` against the current table.
If the ``text`` is not None, the token is returned, otherwise, a token
in the table is attempted to be retrieved. If not token is found, then
this method will raise.
"""
... | Attempts to resolve the :class:`SymbolToken` against the current table.
If the ``text`` is not None, the token is returned, otherwise, a token
in the table is attempted to be retrieved. If not token is found, then
this method will raise. |
def map_constructor(self, loader, node, deep=False):
""" Walk the mapping, recording any duplicate keys.
"""
mapping = {}
for key_node, value_node in node.value:
key = loader.construct_object(key_node, deep=deep)
value = loader.construct_object(value_node, deep=... | Walk the mapping, recording any duplicate keys. |
def group_citation_edges(edges: Iterable[EdgeTuple]) -> Iterable[Tuple[str, Iterable[EdgeTuple]]]:
"""Return an iterator over pairs of citation values and their corresponding edge iterators."""
return itt.groupby(edges, key=_citation_sort_key) | Return an iterator over pairs of citation values and their corresponding edge iterators. |
def set_webhook(self, *args, **kwargs):
"""See :func:`set_webhook`"""
return set_webhook(*args, **self._merge_overrides(**kwargs)).run() | See :func:`set_webhook` |
def html(self):
"""
Returns ``innerHTML`` of whole page. On page have to be tag ``body``.
.. versionadded:: 2.2
"""
try:
body = self.get_elm(tag_name='body')
except selenium_exc.NoSuchElementException:
return None
else:
return ... | Returns ``innerHTML`` of whole page. On page have to be tag ``body``.
.. versionadded:: 2.2 |
def no_new_errors(new_data, old_data, strict=False):
"""
Pylint Validator that will fail any review if there are
new Pylint errors in it (Pylint message starts with 'E:')
:param new_data:
:param old_data:
:return:
"""
success = True
score = 0
message = ''
if new_data['errors... | Pylint Validator that will fail any review if there are
new Pylint errors in it (Pylint message starts with 'E:')
:param new_data:
:param old_data:
:return: |
def _set_anycast_gateway_mac(self, v, load=False):
"""
Setter method for anycast_gateway_mac, mapped from YANG variable /rbridge_id/ipv6/static_ag_ipv6_config/anycast_gateway_mac (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_anycast_gateway_mac is considere... | Setter method for anycast_gateway_mac, mapped from YANG variable /rbridge_id/ipv6/static_ag_ipv6_config/anycast_gateway_mac (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_anycast_gateway_mac is considered as a private
method. Backends looking to populate this va... |
def update_params_for_auth(self, headers, querys, auth_settings):
"""
Updates header and query params based on authentication setting.
:param headers: Header parameters dict to be updated.
:param querys: Query parameters tuple list to be updated.
:param auth_settings: Authentica... | Updates header and query params based on authentication setting.
:param headers: Header parameters dict to be updated.
:param querys: Query parameters tuple list to be updated.
:param auth_settings: Authentication setting identifiers list. |
def _compute_counts_from_intensity(intensity, bexpcube):
""" Make the counts map from the intensity
"""
data = intensity.data * np.sqrt(bexpcube.data[1:] * bexpcube.data[0:-1])
return HpxMap(data, intensity.hpx) | Make the counts map from the intensity |
def get_pidfile(pidfile):
'''
Return the pid from a pidfile as an integer
'''
try:
with salt.utils.files.fopen(pidfile) as pdf:
pid = pdf.read().strip()
return int(pid)
except (OSError, IOError, TypeError, ValueError):
return -1 | Return the pid from a pidfile as an integer |
def obj(self):
"""Returns the value of :meth:`ObjectMixin.get_object` and sets a private
property called _obj. This property ensures the logic around allow_none
is enforced across Endpoints using the Object interface.
:raises: :class:`werkzeug.exceptions.BadRequest`
:returns: T... | Returns the value of :meth:`ObjectMixin.get_object` and sets a private
property called _obj. This property ensures the logic around allow_none
is enforced across Endpoints using the Object interface.
:raises: :class:`werkzeug.exceptions.BadRequest`
:returns: The result of :meth:ObjectM... |
def try_sort_fmt_opts(rdf_format_opts_list, uri):
"""reorder fmt options based on uri file type suffix - if available - so to test most likely serialization first when parsing some RDF
NOTE this is not very nice as it is hardcoded and assumes the origin serializations to be this: ['turtle', 'xml', 'n3', 'nt',... | reorder fmt options based on uri file type suffix - if available - so to test most likely serialization first when parsing some RDF
NOTE this is not very nice as it is hardcoded and assumes the origin serializations to be this: ['turtle', 'xml', 'n3', 'nt', 'json-ld', 'rdfa'] |
def applySiiRanking(siiContainer, specfile):
"""Iterates over all Sii entries of a specfile in siiContainer and sorts Sii
elements of the same spectrum according to the score attribute specified in
``siiContainer.info[specfile]['rankAttr']``. Sorted Sii elements are then
ranked according to their sorte... | Iterates over all Sii entries of a specfile in siiContainer and sorts Sii
elements of the same spectrum according to the score attribute specified in
``siiContainer.info[specfile]['rankAttr']``. Sorted Sii elements are then
ranked according to their sorted position, if multiple Sii have the same
score,... |
def _unpack_bin(self, packed):
"""
Internal. Decodes 16 bit RGB565 into python list [R,G,B]
"""
output = struct.unpack('H', packed)
bits16 = output[0]
r = (bits16 & 0xF800) >> 11
g = (bits16 & 0x7E0) >> 5
b = (bits16 & 0x1F)
return [int(r << 3), i... | Internal. Decodes 16 bit RGB565 into python list [R,G,B] |
def database(
state, host, name,
present=True, owner=None,
template=None, encoding=None,
lc_collate=None, lc_ctype=None, tablespace=None,
connection_limit=None,
# Details for speaking to PostgreSQL via `psql` CLI
postgresql_user=None, postgresql_password=None,
postgresql_host=None, postg... | Add/remove PostgreSQL databases.
+ name: name of the database
+ present: whether the database should exist or not
+ owner: the PostgreSQL role that owns the database
+ template: name of the PostgreSQL template to use
+ encoding: encoding of the database
+ lc_collate: lc_collate of the database
... |
def to(self, unit):
"""Convert this distance to the given AstroPy unit."""
from astropy.units import au
return (self.au * au).to(unit) | Convert this distance to the given AstroPy unit. |
def project_branches(self):
"""
List all branches associated with a repository.
:return:
"""
request_url = "{}git/branches".format(self.create_basic_url())
return_value = self._call_api(request_url)
return return_value['branches'] | List all branches associated with a repository.
:return: |
def ob_is_tty(ob):
""" checks if an object (like a file-like object) is a tty. """
fileno = get_fileno(ob)
is_tty = False
if fileno:
is_tty = os.isatty(fileno)
return is_tty | checks if an object (like a file-like object) is a tty. |
def add_argument(self, parser, permissive=False, **override_kwargs):
"""Add an option to a an argparse parser.
:keyword permissive: when true, build a parser that does not validate
required arguments.
"""
kwargs = {}
required = None
if self.kwargs:
... | Add an option to a an argparse parser.
:keyword permissive: when true, build a parser that does not validate
required arguments. |
def get_entry_type(hosts_entry=None):
"""
Return the type of entry for the line of hosts file passed
:param hosts_entry: A line from the hosts file
:return: 'comment' | 'blank' | 'ipv4' | 'ipv6'
"""
if hosts_entry and isinstance(hosts_entry, str):
entry = host... | Return the type of entry for the line of hosts file passed
:param hosts_entry: A line from the hosts file
:return: 'comment' | 'blank' | 'ipv4' | 'ipv6' |
def level_at_index(self, index):
"""
Return the list of nodes at level ``index``,
in DFS order.
:param int index: the index
:rtype: list of :class:`~aeneas.tree.Tree`
:raises: ValueError if the given ``index`` is not valid
"""
if not isinstance(index, in... | Return the list of nodes at level ``index``,
in DFS order.
:param int index: the index
:rtype: list of :class:`~aeneas.tree.Tree`
:raises: ValueError if the given ``index`` is not valid |
def _parse_timeframe_line(self, line):
"""Parse timeframe line and return start and end timestamps."""
tf = self._validate_timeframe_line(line)
if not tf:
raise MalformedCaptionError('Invalid time format')
return tf.group(1), tf.group(2) | Parse timeframe line and return start and end timestamps. |
def status(self):
"""
The current status of the event (started, finished or pending).
"""
myNow = timezone.localtime(timezone=self.tz)
daysDelta = dt.timedelta(days=self.num_days - 1)
# NB: postponements can be created after the until date
# so ignore that
... | The current status of the event (started, finished or pending). |
def in_date(objet, pattern):
""" abstractSearch dans une date datetime.date"""
if objet:
pattern = re.sub(" ", '', pattern)
objet_str = abstractRender.date(objet)
return bool(re.search(pattern, objet_str))
return False | abstractSearch dans une date datetime.date |
def get_mol_filename(chebi_id):
'''Returns mol file'''
mol = get_mol(chebi_id)
if mol is None:
return None
file_descriptor, mol_filename = tempfile.mkstemp(str(chebi_id) +
'_', '.mol')
mol_file = open(mol_filename, 'w')
mol_file.writ... | Returns mol file |
def __validate_definitions(self, definitions, field):
""" Validate a field's value against its defined rules. """
def validate_rule(rule):
validator = self.__get_rule_handler('validate', rule)
return validator(definitions.get(rule, None), field, value)
definitions = sel... | Validate a field's value against its defined rules. |
def autobuild_shiparchive(src_file):
"""Create a ship file archive containing a yaml_file and its dependencies.
If yaml_file depends on any build products as external files, it must
be a jinja2 template that references the file using the find_product
filter so that we can figure out where those build p... | Create a ship file archive containing a yaml_file and its dependencies.
If yaml_file depends on any build products as external files, it must
be a jinja2 template that references the file using the find_product
filter so that we can figure out where those build products are going
and create the right d... |
def ascend_bip32(bip32_pub_node, secret_exponent, child):
"""
Given a BIP32Node with public derivation child "child" with a known private key,
return the secret exponent for the bip32_pub_node.
"""
i_as_bytes = struct.pack(">l", child)
sec = public_pair_to_sec(bip32_pub_node.public_pair(), compr... | Given a BIP32Node with public derivation child "child" with a known private key,
return the secret exponent for the bip32_pub_node. |
def run_pyxbgen(self, args):
"""Args:
args:
"""
cmd = 'pyxbgen {}'.format(' '.join(args))
print(cmd)
os.system(cmd) | Args:
args: |
def link_bus(self, bus_idx):
"""
Return the indices of elements linking the given buses
:param bus_idx:
:return:
"""
ret = []
if not self._config['is_series']:
self.log(
'link_bus function is not valid for non-series model <{}>'.
... | Return the indices of elements linking the given buses
:param bus_idx:
:return: |
def availability_set_present(name, resource_group, tags=None, platform_update_domain_count=None,
platform_fault_domain_count=None, virtual_machines=None, sku=None, connection_auth=None,
**kwargs):
'''
.. versionadded:: 2019.2.0
Ensure an availabilit... | .. versionadded:: 2019.2.0
Ensure an availability set exists.
:param name:
Name of the availability set.
:param resource_group:
The resource group assigned to the availability set.
:param tags:
A dictionary of strings can be passed as tag metadata to the availability set obje... |
def add_f90_to_env(env):
"""Add Builders and construction variables for f90 to an Environment."""
try:
F90Suffixes = env['F90FILESUFFIXES']
except KeyError:
F90Suffixes = ['.f90']
#print("Adding %s to f90 suffixes" % F90Suffixes)
try:
F90PPSuffixes = env['F90PPFILESUFFIXES']... | Add Builders and construction variables for f90 to an Environment. |
def query_flag(ifo, name, start_time, end_time,
source='any', server="segments.ligo.org",
veto_definer=None, cache=False):
"""Return the times where the flag is active
Parameters
----------
ifo: string
The interferometer to query (H1, L1).
name: string
... | Return the times where the flag is active
Parameters
----------
ifo: string
The interferometer to query (H1, L1).
name: string
The status flag to query from LOSC.
start_time: int
The starting gps time to begin querying from LOSC
end_time: int
The end gps time of ... |
def build_src(ctx, dest=None):
"""
build source archive
"""
if dest:
if not dest.startswith('/'):
# Relative
dest = os.path.join(os.getcwd(), dest)
os.chdir(PROJECT_DIR)
ctx.run('python setup.py sdist --dist-dir {0}'.format(dest))
else:
os.chd... | build source archive |
def polynomial(img, mask, inplace=False, replace_all=False,
max_dev=1e-5, max_iter=20, order=2):
'''
replace all masked values
calculate flatField from 2d-polynomal fit filling
all high gradient areas within averaged fit-image
returns flatField, average background level, fitte... | replace all masked values
calculate flatField from 2d-polynomal fit filling
all high gradient areas within averaged fit-image
returns flatField, average background level, fitted image, valid indices mask |
def _k_value_tapered_reduction(ent_pipe_id, exit_pipe_id, fitting_angle, re, f):
"""Returns the minor loss coefficient for a tapered reducer.
Parameters:
ent_pipe_id: Entrance pipe's inner diameter.
exit_pipe_id: Exit pipe's inner diameter.
fitting_angle: Fitting angle between entrance ... | Returns the minor loss coefficient for a tapered reducer.
Parameters:
ent_pipe_id: Entrance pipe's inner diameter.
exit_pipe_id: Exit pipe's inner diameter.
fitting_angle: Fitting angle between entrance and exit pipes.
re: Reynold's number.
f: Darcy friction factor. |
def _kalman_prediction_step_SVD(k, p_m , p_P, p_dyn_model_callable, calc_grad_log_likelihood=False,
p_dm = None, p_dP = None):
"""
Desctrete prediction function
Input:
k:int
Iteration No. Starts at 0. Total number of iterations equal t... | Desctrete prediction function
Input:
k:int
Iteration No. Starts at 0. Total number of iterations equal to the
number of measurements.
p_m: matrix of size (state_dim, time_series_no)
Mean value from the previous step. For "multiple time se... |
def evaluate_trace_request(data, tracer=tracer):
"""
Evaluate given string trace request.
Usage::
Umbra -t "{'umbra.engine' : ('.*', 0), 'umbra.preferences' : (r'.*', 0)}"
Umbra -t "['umbra.engine', 'umbra.preferences']"
Umbra -t "'umbra.engine, umbra.preferences"
:param data:... | Evaluate given string trace request.
Usage::
Umbra -t "{'umbra.engine' : ('.*', 0), 'umbra.preferences' : (r'.*', 0)}"
Umbra -t "['umbra.engine', 'umbra.preferences']"
Umbra -t "'umbra.engine, umbra.preferences"
:param data: Trace request.
:type data: unicode
:param tracer: Tr... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.