positive stringlengths 100 30.3k | anchor stringlengths 1 15k |
|---|---|
def rotate_around_vector_v3(v, angle_rad, norm_vec):
""" rotate v around norm_vec by angle_rad."""
cos_val = math.cos(angle_rad)
sin_val = math.sin(angle_rad)
## (v * cosVal) +
## ((normVec * v) * (1.0 - cosVal)) * normVec +
## (v ^ normVec) * sinVal)
#line1: scaleV3(v,cosVal)
#line2: do... | rotate v around norm_vec by angle_rad. |
def set_defaults(self):
"""Fill the dictionary with all defaults
"""
self['mswitch'] = 1
self['elem'] = '../grid/elem.dat'
self['elec'] = '../grid/elec.dat'
self['volt'] = '../mod/volt.dat'
self['inv_dir'] = '../inv'
self['diff_inv'] = 'F ! difference inve... | Fill the dictionary with all defaults |
def create_as(self, klass, name, **attributes):
"""
Create an instance of the given model and type and persist it to the database.
:param klass: The class
:type klass: class
:param name: The type
:type name: str
:param attributes: The instance attributes
... | Create an instance of the given model and type and persist it to the database.
:param klass: The class
:type klass: class
:param name: The type
:type name: str
:param attributes: The instance attributes
:type attributes: dict
:return: mixed |
def unpack(self, to_unpack):
"""
Unpack is a recursive function that will unpack anything that inherits
from abstract base class Container provided it is not also inheriting from Python basestring.
Raise Exception if resulting object is neither a container or a string
Code work... | Unpack is a recursive function that will unpack anything that inherits
from abstract base class Container provided it is not also inheriting from Python basestring.
Raise Exception if resulting object is neither a container or a string
Code working in both Python 2 and Python 3 |
def getSpecificAssociatedDeviceInfo(self, macAddress, wifiInterfaceId=1, timeout=1):
"""Execute GetSpecificAssociatedDeviceInfo action to get detailed information about a Wifi client.
:param str macAddress: MAC address in the form ``38:C9:86:26:7E:38``; be aware that the MAC address might
b... | Execute GetSpecificAssociatedDeviceInfo action to get detailed information about a Wifi client.
:param str macAddress: MAC address in the form ``38:C9:86:26:7E:38``; be aware that the MAC address might
be case sensitive, depending on the router
:param int wifiInterfaceId: the id of the Wifi... |
def dump_links(self, o):
"""Dump links."""
params = {'versionId': o.version_id}
data = {
'self': url_for(
'.object_api',
bucket_id=o.bucket_id,
key=o.key,
_external=True,
**(params if not o.is_head or o.d... | Dump links. |
def close(self):
"""
Closes this VirtualBox VM.
"""
if self._closed:
# VM is already closed
return
if not (yield from super().close()):
return False
log.debug("VirtualBox VM '{name}' [{id}] is closing".format(name=self.name, id=self.... | Closes this VirtualBox VM. |
def copy(self):
"""Return a "clean" copy of this instance.
Return:
(instance): A clean copy of this instance.
"""
resource = copy.copy(self)
# workaround for bytes/str issue in Py3 with copy of instance
# TypeError: a bytes-like object is required, not 'str'... | Return a "clean" copy of this instance.
Return:
(instance): A clean copy of this instance. |
def _get_args(self):
"""
Lazily evaluate the args.
"""
if not hasattr(self, '_args_evaled'):
# cache the args in case handler is re-invoked due to flags change
self._args_evaled = list(chain.from_iterable(self._args))
return self._args_evaled | Lazily evaluate the args. |
def on_ok(self, sender):
"""
This callback is called when one task reaches status `S_OK`.
It removes the WFKQ file if all its children have reached `S_OK`.
"""
if self.remove_wfkq:
for task in self.wfkq_tasks:
if task.status != task.S_OK: continue
... | This callback is called when one task reaches status `S_OK`.
It removes the WFKQ file if all its children have reached `S_OK`. |
def add_section(self, section, section_params):
"""
Adds parameters into this ConfigParams under specified section.
Keys for the new parameters are appended with section dot prefix.
:param section: name of the section where add new parameters
:param section_params: new paramete... | Adds parameters into this ConfigParams under specified section.
Keys for the new parameters are appended with section dot prefix.
:param section: name of the section where add new parameters
:param section_params: new parameters to be added. |
def info(self, fetch=False):
"""
Print a table of info about the current filter
"""
# Get the info from the class
tp = (int, bytes, bool, str, float, tuple, list, np.ndarray)
info = [[k, str(v)] for k, v in vars(self).items() if isinstance(v, tp)
and k not... | Print a table of info about the current filter |
def set(self, key, value):
"""
Updates the value of the given key in the loaded content.
Args:
key (str): Key of the property to update.
value (str): New value of the property.
Return:
bool: Indicates whether or not a change was made.
"""
... | Updates the value of the given key in the loaded content.
Args:
key (str): Key of the property to update.
value (str): New value of the property.
Return:
bool: Indicates whether or not a change was made. |
def create_time_series(self, label_values, func):
"""Create a derived measurement to trac `func`.
:type label_values: list(:class:`LabelValue`)
:param label_values: The measurement's label values.
:type func: function
:param func: The function to track.
:rtype: :class:... | Create a derived measurement to trac `func`.
:type label_values: list(:class:`LabelValue`)
:param label_values: The measurement's label values.
:type func: function
:param func: The function to track.
:rtype: :class:`DerivedGaugePoint`
:return: A read-only measurement ... |
def targets_by_artifact_set(self, targets):
"""Partitions the input targets by the sets of pinned artifacts they are managed by.
:param collections.Iterable targets: the input targets (typically just JarLibrary targets).
:return: a mapping of PinnedJarArtifactSet -> list of targets.
:rtype: dict
""... | Partitions the input targets by the sets of pinned artifacts they are managed by.
:param collections.Iterable targets: the input targets (typically just JarLibrary targets).
:return: a mapping of PinnedJarArtifactSet -> list of targets.
:rtype: dict |
def encompassed_by(self, span):
"""
Returns true if the given span encompasses this span.
"""
if isinstance(span, list):
return [sp for sp in span if sp.encompasses(self)]
return span.encompasses(self) | Returns true if the given span encompasses this span. |
def is_scalar_nan(x):
"""Tests if x is NaN
This function is meant to overcome the issue that np.isnan does not allow
non-numerical types as input, and that np.nan is not np.float('nan').
Parameters
----------
x : any type
Returns
-------
boolean
Examples
--------
>>> is_s... | Tests if x is NaN
This function is meant to overcome the issue that np.isnan does not allow
non-numerical types as input, and that np.nan is not np.float('nan').
Parameters
----------
x : any type
Returns
-------
boolean
Examples
--------
>>> is_scalar_nan(np.nan)
True
... |
def dataframe_select(df, *cols, **filters):
'''
dataframe_select(df, k1=v1, k2=v2...) yields df after selecting all the columns in which the
given keys (k1, k2, etc.) have been selected such that the associated columns in the dataframe
contain only the rows whose cells match the given values.
da... | dataframe_select(df, k1=v1, k2=v2...) yields df after selecting all the columns in which the
given keys (k1, k2, etc.) have been selected such that the associated columns in the dataframe
contain only the rows whose cells match the given values.
dataframe_select(df, col1, col2...) selects the given colu... |
def javascript_tag(self, *args):
"""
Convenience tag to output 1 or more javascript tags.
:param args: 1 or more javascript file names
:return: Script tag(s) containing the asset
"""
tags = []
for arg in args:
asset_path = self.asset_url_for('{0}.js'... | Convenience tag to output 1 or more javascript tags.
:param args: 1 or more javascript file names
:return: Script tag(s) containing the asset |
def write(self):
"""write the current settings to the config file"""
with open(storage.config_file, 'w') as cfg:
yaml.dump(self.as_dict(), cfg, default_flow_style=False)
storage.refresh() | write the current settings to the config file |
def get_source(self):
"""Return this context’s source.
:returns:
An instance of :class:`Pattern` or one of its sub-classes,
a new Python object referencing the existing cairo pattern.
"""
return Pattern._from_pointer(
cairo.cairo_get_source(self._poi... | Return this context’s source.
:returns:
An instance of :class:`Pattern` or one of its sub-classes,
a new Python object referencing the existing cairo pattern. |
def select_columns(self, column_names):
"""
Selects all columns where the name of the column or the type of column
is included in the column_names. An exception is raised if duplicate columns
are selected i.e. sf.select_columns(['a','a']), or non-existent columns
are selected.
... | Selects all columns where the name of the column or the type of column
is included in the column_names. An exception is raised if duplicate columns
are selected i.e. sf.select_columns(['a','a']), or non-existent columns
are selected.
Throws an exception for all other input types.
... |
def set_guess_to_fit_result(self):
"""
If you have a fit result, set the guess parameters to the
fit parameters.
"""
if self.results is None:
print("No fit results to use! Run fit() first.")
return
# loop over the results and set the guess values
... | If you have a fit result, set the guess parameters to the
fit parameters. |
def _get_token(self, req):
""" Get the token from the Authorization header
If the header is actually malformed where Bearer Auth was
indicated by the request then an InvalidAuthSyntax exception
is raised. Otherwise an AuthRequired exception since it's
unclear in this scenario if... | Get the token from the Authorization header
If the header is actually malformed where Bearer Auth was
indicated by the request then an InvalidAuthSyntax exception
is raised. Otherwise an AuthRequired exception since it's
unclear in this scenario if the requestor was even aware
A... |
def find_following_duplicates(array):
"""
Find the duplicates that are following themselves.
Parameters
----------
array : list or ndarray
A list containing duplicates.
Returns
----------
uniques : list
A list containing True for each unique and False for following dup... | Find the duplicates that are following themselves.
Parameters
----------
array : list or ndarray
A list containing duplicates.
Returns
----------
uniques : list
A list containing True for each unique and False for following duplicates.
Example
----------
>>> impor... |
def musixmatch(song):
"""
Returns the lyrics found in musixmatch for the specified mp3 file or an
empty string if not found.
"""
escape = re.sub("'-¡¿", '', URLESCAPE)
translate = {
escape: '',
' ': '-'
}
artist = song.artist.title()
artist = re.sub(r"( '|' )", '', ar... | Returns the lyrics found in musixmatch for the specified mp3 file or an
empty string if not found. |
def _filter_invalid_routes(routes, board, railroad):
"""
Given a collection of routes, returns a new set containing only valid routes. Invalid routes removed:
- contain less than 2 cities, or
- go through Chicago using an impassable exit
- only contain Chicago as a station, but don't use the correct... | Given a collection of routes, returns a new set containing only valid routes. Invalid routes removed:
- contain less than 2 cities, or
- go through Chicago using an impassable exit
- only contain Chicago as a station, but don't use the correct exit path
This fltering after the fact keeps the path findi... |
def convert_elementwise_mul_scalar(net, node, module, builder):
"""Convert a scalar multiplication from mxnet to coreml.
Parameters
----------
net: network
A mxnet network object.
node: layer
Node to convert.
module: module
An module for MXNet
builder: NeuralNetwo... | Convert a scalar multiplication from mxnet to coreml.
Parameters
----------
net: network
A mxnet network object.
node: layer
Node to convert.
module: module
An module for MXNet
builder: NeuralNetworkBuilder
A neural network builder object. |
def _parsed_string_to_bounds(date_type, resolution, parsed):
"""Generalization of
pandas.tseries.index.DatetimeIndex._parsed_string_to_bounds
for use with non-standard calendars and cftime.datetime
objects.
"""
if resolution == 'year':
return (date_type(parsed.year, 1, 1),
... | Generalization of
pandas.tseries.index.DatetimeIndex._parsed_string_to_bounds
for use with non-standard calendars and cftime.datetime
objects. |
def update_branch(profile, name, sha):
"""Move a branch's HEAD to a new SHA.
Args:
profile
A profile generated from ``simplygithub.authentication.profile``.
Such profiles tell this module (i) the ``repo`` to connect to,
and (ii) the ``token`` to connect with.
... | Move a branch's HEAD to a new SHA.
Args:
profile
A profile generated from ``simplygithub.authentication.profile``.
Such profiles tell this module (i) the ``repo`` to connect to,
and (ii) the ``token`` to connect with.
name
The name of the branch to ... |
def Xor(bytestr, key):
"""Returns a `bytes` object where each byte has been xored with key."""
# TODO(hanuszczak): Remove this import when string migration is done.
# pytype: disable=import-error
from builtins import bytes # pylint: disable=redefined-builtin, g-import-not-at-top
# pytype: enable=import-error... | Returns a `bytes` object where each byte has been xored with key. |
def is_stopword_record(self, record):
"""
Determine whether a single MeCab record represents a stopword.
This mostly determines words to strip based on their parts of speech.
If common_words is set to True (default), it will also strip common
verbs and nouns such as くる and よう. I... | Determine whether a single MeCab record represents a stopword.
This mostly determines words to strip based on their parts of speech.
If common_words is set to True (default), it will also strip common
verbs and nouns such as くる and よう. If more_stopwords is True, it
will look at the sub-... |
async def get_box_ids_issued(self) -> str:
"""
Return json object on lists of all unique box identifiers (schema identifiers,
credential definition identifiers, and revocation registry identifiers) for
all credential definitions and credentials issued; e.g.,
::
{
... | Return json object on lists of all unique box identifiers (schema identifiers,
credential definition identifiers, and revocation registry identifiers) for
all credential definitions and credentials issued; e.g.,
::
{
"schema_id": [
"R17v42T4pk...... |
def _validate_non_abstract_edge_has_defined_endpoint_types(class_name, properties):
"""Validate that the non-abstract edge properties dict has defined in/out link properties."""
edge_source = properties.get(EDGE_SOURCE_PROPERTY_NAME, None)
edge_destination = properties.get(EDGE_DESTINATION_PROPERTY_NAME, No... | Validate that the non-abstract edge properties dict has defined in/out link properties. |
def demean(self, mask=NotSpecified, groupby=NotSpecified):
"""
Construct a Factor that computes ``self`` and subtracts the mean from
row of the result.
If ``mask`` is supplied, ignore values where ``mask`` returns False
when computing row means, and output NaN anywhere the mask ... | Construct a Factor that computes ``self`` and subtracts the mean from
row of the result.
If ``mask`` is supplied, ignore values where ``mask`` returns False
when computing row means, and output NaN anywhere the mask is False.
If ``groupby`` is supplied, compute by partitioning each row... |
def restore_breakpoints_state(cls, breakpoints_state_list):
"""Restore the state of breakpoints given a list provided by
backup_breakpoints_state(). If list of breakpoint has changed
since backup missing or added breakpoints are ignored.
breakpoints_state_list is a list of tup... | Restore the state of breakpoints given a list provided by
backup_breakpoints_state(). If list of breakpoint has changed
since backup missing or added breakpoints are ignored.
breakpoints_state_list is a list of tuple. Each tuple is of form:
(breakpoint_number, enabled, conditi... |
def name(self, key, value):
"""Populate the ``name`` key.
Also populates the ``status``, ``birth_date`` and ``death_date`` keys through side effects.
"""
def _get_title(value):
c_value = force_single_element(value.get('c', ''))
if c_value != 'title (e.g. Sir)':
return c_valu... | Populate the ``name`` key.
Also populates the ``status``, ``birth_date`` and ``death_date`` keys through side effects. |
def convert_values(self, matchdict: Dict[str, str]) -> Dict[str, Any]:
""" convert values of ``matchdict``
with converter this object has."""
converted = {}
for varname, value in matchdict.items():
converter = self.converters[varname]
converted[varname] = convert... | convert values of ``matchdict``
with converter this object has. |
def source_bash(args, stdin=None):
"""Simply bash-specific wrapper around source-foreign
Returns a dict to be used as a new environment"""
args = list(args)
new_args = ['bash', '--sourcer=source']
new_args.extend(args)
return source_foreign(new_args, stdin=stdin) | Simply bash-specific wrapper around source-foreign
Returns a dict to be used as a new environment |
def _get_filesystem_types(args, sample_file):
"""Retrieve the types of inputs and staging based on sample JSON and arguments.
"""
out = set([])
ext = "" if args.no_container else "_container"
with open(sample_file) as in_handle:
for f in _get_file_paths(json.load(in_handle)):
if ... | Retrieve the types of inputs and staging based on sample JSON and arguments. |
def prepare_policy_template(self, scaling_type, period_sec, server_group):
"""Renders scaling policy templates based on configs and variables.
After rendering, POSTs the json to Spinnaker for creation.
Args:
scaling_type (str): ``scale_up`` or ``scaling_down``. Type of policy
... | Renders scaling policy templates based on configs and variables.
After rendering, POSTs the json to Spinnaker for creation.
Args:
scaling_type (str): ``scale_up`` or ``scaling_down``. Type of policy
period_sec (int): Period of time to look at metrics for determining scale
... |
def refresh_grant(request, token_uri, refresh_token, client_id, client_secret):
"""Implements the OAuth 2.0 refresh token grant.
For more details, see `rfc678 section 6`_.
Args:
request (google.auth.transport.Request): A callable used to make
HTTP requests.
token_uri (str): The... | Implements the OAuth 2.0 refresh token grant.
For more details, see `rfc678 section 6`_.
Args:
request (google.auth.transport.Request): A callable used to make
HTTP requests.
token_uri (str): The OAuth 2.0 authorizations server's token endpoint
URI.
refresh_toke... |
def _config_params(base_config, assoc_files, region, out_file, items):
"""Add parameters based on configuration variables, associated files and genomic regions.
"""
params = []
dbsnp = assoc_files.get("dbsnp")
if dbsnp:
params += ["--dbsnp", dbsnp]
cosmic = assoc_files.get("cosmic")
... | Add parameters based on configuration variables, associated files and genomic regions. |
def exists(self, share_name, directory_name=None, file_name=None, timeout=None, snapshot=None):
'''
Returns a boolean indicating whether the share exists if only share name is
given. If directory_name is specificed a boolean will be returned indicating
if the directory exists. If file_na... | Returns a boolean indicating whether the share exists if only share name is
given. If directory_name is specificed a boolean will be returned indicating
if the directory exists. If file_name is specified as well, a boolean will be
returned indicating if the file exists.
:param str share... |
def _DropCommonSuffixes(filename):
"""Drops common suffixes like _test.cc or -inl.h from filename.
For example:
>>> _DropCommonSuffixes('foo/foo-inl.h')
'foo/foo'
>>> _DropCommonSuffixes('foo/bar/foo.cc')
'foo/bar/foo'
>>> _DropCommonSuffixes('foo/foo_internal.h')
'foo/foo'
>>> _DropCom... | Drops common suffixes like _test.cc or -inl.h from filename.
For example:
>>> _DropCommonSuffixes('foo/foo-inl.h')
'foo/foo'
>>> _DropCommonSuffixes('foo/bar/foo.cc')
'foo/bar/foo'
>>> _DropCommonSuffixes('foo/foo_internal.h')
'foo/foo'
>>> _DropCommonSuffixes('foo/foo_unusualinternal.h')... |
def substitute_environ(self):
"""
Substitute environment variables into settings.
"""
for attr_name in dir(self):
if attr_name.startswith('_') or attr_name.upper() != attr_name:
continue
orig_value = getattr(self, attr_name)
is_require... | Substitute environment variables into settings. |
def _get_possible_circular_ref_contigs(self, nucmer_hits, log_fh=None, log_outprefix=None):
'''Returns a dict ref name => tuple(hit at start, hit at end) for each ref sequence in the hash nucmer_hits (each value is a list of nucmer hits)'''
writing_log_file = None not in [log_fh, log_outprefix]
... | Returns a dict ref name => tuple(hit at start, hit at end) for each ref sequence in the hash nucmer_hits (each value is a list of nucmer hits) |
def find_user_file(self, option_name, filename_list):
"""! @brief Search the project directory for a file."""
if option_name is not None:
filePath = self._options.get(option_name, None)
else:
filePath = None
# Look for default filenames if a path wasn't p... | ! @brief Search the project directory for a file. |
def attention_bias_local(length, max_backward, max_forward):
"""Create an bias tensor to be added to attention logits.
A position may attend to positions at most max_distance from it,
forward and backwards.
This does not actually save any computation.
Args:
length: int
max_backward: int, maximum di... | Create an bias tensor to be added to attention logits.
A position may attend to positions at most max_distance from it,
forward and backwards.
This does not actually save any computation.
Args:
length: int
max_backward: int, maximum distance backward to attend. Negative values
indicate unlimite... |
def escape(identifier, ansi_quotes, should_quote):
"""
Escape identifiers.
ANSI uses single quotes, but many databases use back quotes.
"""
if not should_quote(identifier):
return identifier
quote = '"' if ansi_quotes else '`'
identifier = identifier.replace(quote, 2*quote)
re... | Escape identifiers.
ANSI uses single quotes, but many databases use back quotes. |
def _create_jobs(self, target, jumpkind, current_function_addr, soot_block, addr, cfg_node, stmt_addr, stmt_idx): # pylint:disable=arguments-differ
"""
Given a node and details of a successor, makes a list of CFGJobs
and if it is a call or exit marks it appropriately so in the CFG
:pa... | Given a node and details of a successor, makes a list of CFGJobs
and if it is a call or exit marks it appropriately so in the CFG
:param int target: Destination of the resultant job
:param str jumpkind: The jumpkind of the edge going to this node
:param int current_funct... |
def weld_str_get(array, i):
"""Retrieve character at index i.
Parameters
----------
array : numpy.ndarray or WeldObject
Input data.
i : int
Index of character to retrieve. If greater than length of string, returns None.
Returns
-------
WeldObject
Representation ... | Retrieve character at index i.
Parameters
----------
array : numpy.ndarray or WeldObject
Input data.
i : int
Index of character to retrieve. If greater than length of string, returns None.
Returns
-------
WeldObject
Representation of this computation. |
def _build_metric_list_to_collect(self, additional_metrics):
"""
Build the metric list to collect based on the instance preferences.
"""
metrics_to_collect = {}
# Defaut metrics
for default_metrics in itervalues(self.DEFAULT_METRICS):
metrics_to_collect.updat... | Build the metric list to collect based on the instance preferences. |
def _file_name(self, dtype_out_time, extension='nc'):
"""Create the name of the aospy file."""
if dtype_out_time is None:
dtype_out_time = ''
out_lbl = utils.io.data_out_label(self.intvl_out, dtype_out_time,
dtype_vert=self.dtype_out_vert)
... | Create the name of the aospy file. |
def languages(self, key, value):
"""Populate the ``languages`` key."""
languages = self.get('languages', [])
values = force_list(value.get('a'))
for value in values:
for language in RE_LANGUAGE.split(value):
try:
name = language.strip().capitalize()
l... | Populate the ``languages`` key. |
def merge_commit(commit):
"Fetches the latest code and merges up the specified commit."
with cd(env.path):
run('git fetch')
if '@' in commit:
branch, commit = commit.split('@')
run('git checkout {0}'.format(branch))
run('git merge {0}'.format(commit)) | Fetches the latest code and merges up the specified commit. |
def create_session(self, **params):
"""
Create the session
date format: YYYY-mm-dd
location: ISO code
"""
return self.make_request(self.PRICING_SESSION_URL,
method='post',
headers=self._session_headers(),
... | Create the session
date format: YYYY-mm-dd
location: ISO code |
def _create_color_buttons(self):
"""Create color choice buttons"""
button_size = (30, 30)
button_style = wx.NO_BORDER
try:
self.linecolor_choice = \
csel.ColourSelect(self, -1, unichr(0x2500), (0, 0, 0),
size=button_size, st... | Create color choice buttons |
def empirical_SVD(stream_list, linear=True):
"""
Depreciated. Use empirical_svd.
"""
warnings.warn('Depreciated, use empirical_svd instead.')
return empirical_svd(stream_list=stream_list, linear=linear) | Depreciated. Use empirical_svd. |
def _all(field, value, document):
"""
Returns True if the value of document field contains all the values
specified by ``value``. If supplied value is not an iterable, a
MalformedQueryException is raised. If the value of the document field
is not an iterable, False is returned
"""
try:
... | Returns True if the value of document field contains all the values
specified by ``value``. If supplied value is not an iterable, a
MalformedQueryException is raised. If the value of the document field
is not an iterable, False is returned |
def get_channel_access(self, channel=None, read_mode='volatile'):
"""Get channel access
:param channel: number [1:7]
:param read_mode:
non_volatile = get non-volatile Channel Access
volatile = get present volatile (active) setting of Channel Access
:return: A Pyth... | Get channel access
:param channel: number [1:7]
:param read_mode:
non_volatile = get non-volatile Channel Access
volatile = get present volatile (active) setting of Channel Access
:return: A Python dict with the following keys/values:
{
- alerting:
... |
def AtMaximumDepth(self, search_depth):
"""Determines if the find specification is at maximum depth.
Args:
search_depth (int): number of location path segments to compare.
Returns:
bool: True if at maximum depth, False if not.
"""
if self._location_segments is not None:
if search... | Determines if the find specification is at maximum depth.
Args:
search_depth (int): number of location path segments to compare.
Returns:
bool: True if at maximum depth, False if not. |
def get_values(self, *args):
"""
Gets environment variables values.
Usage::
>>> environment = Environment("HOME")
>>> environment.get_values()
{'HOME': u'/Users/JohnDoe'}
>>> environment.get_values("USER")
{'HOME': u'/Users/JohnDoe', ... | Gets environment variables values.
Usage::
>>> environment = Environment("HOME")
>>> environment.get_values()
{'HOME': u'/Users/JohnDoe'}
>>> environment.get_values("USER")
{'HOME': u'/Users/JohnDoe', 'USER': u'JohnDoe'}
:param \*args: Addit... |
def to_dict(self):
"""
Returns a dictionary that represents this object, to be used for JSONification.
:return: the object dictionary
:rtype: dict
"""
result = {}
result["type"] = "Configurable"
result["class"] = get_classname(self)
result["config... | Returns a dictionary that represents this object, to be used for JSONification.
:return: the object dictionary
:rtype: dict |
def apply_dict_depth_first(nodes, func, depth=0, as_dict=True, parents=None, pre=None, post=None):
'''
This function is similar to the `apply_depth_first` except that it operates
on the `OrderedDict`-based structure returned from `apply_depth_first` when
`as_dict=True`.
Note that if `as_dict` is `F... | This function is similar to the `apply_depth_first` except that it operates
on the `OrderedDict`-based structure returned from `apply_depth_first` when
`as_dict=True`.
Note that if `as_dict` is `False`, the result of this function is given in
the entry/tuple form. |
def not_downgrade(self, package):
"""Don't downgrade packages if repository version is lower than
installed"""
name = split_package(package)[0]
rep_ver = split_package(package)[1]
ins_ver = GetFromInstalled(name).version()[1:]
if not ins_ver:
ins_ver = "0"
... | Don't downgrade packages if repository version is lower than
installed |
def blame(self, rev='HEAD', committer=True, by='repository', ignore_globs=None, include_globs=None):
"""
Returns the blame from the current HEAD of the repository as a DataFrame. The DataFrame is grouped by committer
name, so it will be the sum of all contributions to the repository by each com... | Returns the blame from the current HEAD of the repository as a DataFrame. The DataFrame is grouped by committer
name, so it will be the sum of all contributions to the repository by each committer. As with the commit history
method, extensions and ignore_dirs parameters can be passed to exclude certain... |
def generic_send_mail(sender, dests, subject, message, key, origin='', html_message=False):
"""Generic mail sending function"""
# If no EBUIO Mail settings have been set, then no e-mail shall be sent
if settings.EBUIO_MAIL_SECRET_KEY and settings.EBUIO_MAIL_SECRET_HASH:
headers = {}
if key... | Generic mail sending function |
def list_datasets():
"""Get a string representation of all available datasets with descriptions."""
lines = []
for name, resource in itertools.chain(LOCAL_DATASETS.items(), REMOTE_DATASETS.items()):
if isinstance(resource, LocalFileMetadata):
location = "local: {}".format(resource.filen... | Get a string representation of all available datasets with descriptions. |
def down(p_queue, host=None):
if host is not None:
return _path(_c.FSQ_DOWN, root=_path(host, root=hosts(p_queue)))
'''Construct a path to the down file for a queue'''
return _path(p_queue, _c.FSQ_DOWN) | Construct a path to the down file for a queue |
def delay_svc_notification(self, service, notification_time):
"""Modify service first notification delay
Format of the line that triggers function call::
DELAY_SVC_NOTIFICATION;<host_name>;<service_description>;<notification_time>
:param service: service to edit
:type service: ... | Modify service first notification delay
Format of the line that triggers function call::
DELAY_SVC_NOTIFICATION;<host_name>;<service_description>;<notification_time>
:param service: service to edit
:type service: alignak.objects.service.Service
:param notification_time: new val... |
def format_vk(vk):
"""Format vk before using it"""
# Force extension require to be a list
for ext in get_extensions_filtered(vk):
req = ext['require']
if not isinstance(req, list):
ext['require'] = [req] | Format vk before using it |
def __set_values(self, values):
"""
Sets values in this cell range from an iterable.
This is much more effective than writing cell values one by one.
"""
array = tuple((self._clean_value(v),) for v in values)
self._get_target().setDataArray(array) | Sets values in this cell range from an iterable.
This is much more effective than writing cell values one by one. |
def namedtuple_storable(namedtuple, *args, **kwargs):
"""
Storable factory for named tuples.
"""
return default_storable(namedtuple, namedtuple._fields, *args, **kwargs) | Storable factory for named tuples. |
def report_altitude(self, altitude):
'''possibly report a new altitude'''
master = self.master
if getattr(self.console, 'ElevationMap', None) is not None and self.mpstate.settings.basealt != 0:
lat = master.field('GLOBAL_POSITION_INT', 'lat', 0)*1.0e-7
lon = master.field(... | possibly report a new altitude |
def _validate(self, val):
"""
val must be None or one of the objects in self.objects.
"""
if not self.check_on_set:
self._ensure_value_is_in_objects(val)
return
if not (val in self.objects or (self.allow_None and val is None)):
# CEBALERT: can... | val must be None or one of the objects in self.objects. |
def export(self, nidm_version, export_dir):
"""
Create prov graph.
"""
# Contrast Map entity
atts = (
(PROV['type'], NIDM_CONTRAST_MAP),
(NIDM_CONTRAST_NAME, self.name))
if not self.isderfrommap:
atts = atts + (
(NIDM_I... | Create prov graph. |
def gen_csv(sc, filename, field_list, source, filters):
'''csv SecurityCenterObj, AssetListName, CSVFields, EmailAddress
'''
# First thing we need to do is initialize the csvfile and build the header
# for the file.
datafile = open(filename, 'wb')
csvfile = csv.writer(datafile)
header = []
... | csv SecurityCenterObj, AssetListName, CSVFields, EmailAddress |
def do_searchfy(self, query, **kwargs):
"""
Verifying a searchfy query in this platform.
This might be redefined in any class inheriting from Platform.
Performing additional procesing may be possible by iterating the requested profiles
to extract more entities from the URI woul... | Verifying a searchfy query in this platform.
This might be redefined in any class inheriting from Platform.
Performing additional procesing may be possible by iterating the requested profiles
to extract more entities from the URI would be slow. Sample code may be:
if kwargs["proce... |
def appliance_node_information(self):
"""
Gets the ApplianceNodeInformation API client.
Returns:
ApplianceNodeInformation:
"""
if not self.__appliance_node_information:
self.__appliance_node_information = ApplianceNodeInformation(self.__connection)
... | Gets the ApplianceNodeInformation API client.
Returns:
ApplianceNodeInformation: |
def is_critical_flow(P1, P2, k):
r'''Determines if a flow of a fluid driven by pressure gradient
P1 - P2 is critical, for a fluid with the given isentropic coefficient.
This function calculates critical flow pressure, and checks if this is
larger than P2. If so, the flow is critical and choked.
Par... | r'''Determines if a flow of a fluid driven by pressure gradient
P1 - P2 is critical, for a fluid with the given isentropic coefficient.
This function calculates critical flow pressure, and checks if this is
larger than P2. If so, the flow is critical and choked.
Parameters
----------
P1 : float... |
def _format_operation_list(operation, parameters):
"""Formats parameters in operation in the way BigQuery expects.
The input operation will be a query like ``SELECT %s`` and the output
will be a query like ``SELECT ?``.
:type operation: str
:param operation: A Google BigQuery query string.
:t... | Formats parameters in operation in the way BigQuery expects.
The input operation will be a query like ``SELECT %s`` and the output
will be a query like ``SELECT ?``.
:type operation: str
:param operation: A Google BigQuery query string.
:type parameters: Sequence[Any]
:param parameters: Seque... |
def load_repo_addons(_globals):
'''Load all fabsetup addons which are stored under ~/.fabsetup-addon-repos
as git repositories.
Args:
_globals(dict): the globals() namespace of the fabric script.
Return: None
'''
repos_dir = os.path.expanduser('~/.fabsetup-addon-repos')
if os.path.... | Load all fabsetup addons which are stored under ~/.fabsetup-addon-repos
as git repositories.
Args:
_globals(dict): the globals() namespace of the fabric script.
Return: None |
def item_afdeling_adapter(obj, request):
"""
Adapter for rendering an object of
:class: `crabpy.gateway.capakey.Afdeling` to json.
"""
return {
'id': obj.id,
'naam': obj.naam,
'gemeente': {
'id': obj.gemeente.id,
'naam': obj.gemeente.naam
},
... | Adapter for rendering an object of
:class: `crabpy.gateway.capakey.Afdeling` to json. |
def main():
"""
Example application that prints messages from the panel to the terminal.
"""
try:
# Retrieve the first USB device
device = AlarmDecoder(USBDevice.find())
# Set up an event handler and open the device
device.on_message += handle_message
with device... | Example application that prints messages from the panel to the terminal. |
def check_expected_infos(self, test_method):
"""
This method is called after each test. It will read decorated
informations and check if there are expected infos.
You can set expected infos by decorators :py:func:`.expected_info_messages`
and :py:func:`.allowed_info_messages`.
... | This method is called after each test. It will read decorated
informations and check if there are expected infos.
You can set expected infos by decorators :py:func:`.expected_info_messages`
and :py:func:`.allowed_info_messages`. |
def context(self):
"""
An execution context created using :mod:`executor.contexts`.
The value of :attr:`context` defaults to a
:class:`~executor.contexts.LocalContext` object with the following
characteristics:
- The working directory of the execution context is set to ... | An execution context created using :mod:`executor.contexts`.
The value of :attr:`context` defaults to a
:class:`~executor.contexts.LocalContext` object with the following
characteristics:
- The working directory of the execution context is set to the
value of :attr:`directory... |
def as_tree(self, visitor=None, children=None):
""" Recursively traverses each tree (starting from each root) in order
to generate a dictionary-based tree structure of the entire forest.
Each level of the forest/tree is a list of nodes, and each node
consists of a dictionary ... | Recursively traverses each tree (starting from each root) in order
to generate a dictionary-based tree structure of the entire forest.
Each level of the forest/tree is a list of nodes, and each node
consists of a dictionary representation, where the entry
``children`` (by... |
def list(self):
'''
View the list of the pages.
'''
kwd = {
'pager': '',
'title': '单页列表',
}
self.render('wiki_page/page_list.html',
kwd=kwd,
view=MWiki.query_recent(),
view_all=MWiki.query... | View the list of the pages. |
def is_equal(self, other):
"""
Computes whether two Partial Orderings contain the same information
"""
if not (hasattr(other, 'get_domain') or hasattr(other, 'upper') or hasattr(other, 'lower')):
other = self.coerce(other)
if self.is_domain_equal(other) \
... | Computes whether two Partial Orderings contain the same information |
def get_parts_of_url_path(url):
"""Given a url, take out the path part and split it by '/'.
Args:
url (str): the url slice
returns
list: parts after the domain name of the URL
"""
parsed = urlparse(url)
path = unquote(parsed.path).lstrip('/')
parts = path.split('/')
re... | Given a url, take out the path part and split it by '/'.
Args:
url (str): the url slice
returns
list: parts after the domain name of the URL |
def download_magic(infile, dir_path='.', input_dir_path='',
overwrite=False, print_progress=True,
data_model=3., separate_locs=False):
"""
takes the name of a text file downloaded from the MagIC database and
unpacks it into magic-formatted files. by default, download_ma... | takes the name of a text file downloaded from the MagIC database and
unpacks it into magic-formatted files. by default, download_magic assumes
that you are doing everything in your current directory. if not, you may
provide optional arguments dir_path (where you want the results to go) and
input_dir_pat... |
def adjust(self, amount, update=True, flow=True, fee=0.0):
"""
Adjust capital - used to inject capital to a Strategy. This injection
of capital will have no effect on the children.
Args:
* amount (float): Amount to adjust by.
* update (bool): Force update?
... | Adjust capital - used to inject capital to a Strategy. This injection
of capital will have no effect on the children.
Args:
* amount (float): Amount to adjust by.
* update (bool): Force update?
* flow (bool): Is this adjustment a flow? A flow will not have an
... |
def _quote_username(name):
'''
Usernames can only contain ascii chars, so make sure we return a str type
'''
if not isinstance(name, six.string_types):
return str(name) # future lint: disable=blacklisted-function
else:
return salt.utils.stringutils.to_str(name) | Usernames can only contain ascii chars, so make sure we return a str type |
def _localectl_set(locale=''):
'''
Use systemd's localectl command to set the LANG locale parameter, making
sure not to trample on other params that have been set.
'''
locale_params = _parse_dbus_locale() if dbus is not None else _localectl_status().get('system_locale', {})
locale_params['LANG']... | Use systemd's localectl command to set the LANG locale parameter, making
sure not to trample on other params that have been set. |
def get_pool(parallel, kwargs):
"""
Yields:
a ThreadPoolExecutor if parallel is True and `concurrent.futures` exists.
`None` otherwise.
"""
if parallel:
try:
from concurrent.futures import ThreadPoolExecutor
with ThreadPoolExecutor(thread_name_prefix="ins... | Yields:
a ThreadPoolExecutor if parallel is True and `concurrent.futures` exists.
`None` otherwise. |
def record(self):
# type: () -> bytes
'''
Generate a string representing the Rock Ridge Extensions Reference
record.
Parameters:
None.
Returns:
String containing the Rock Ridge record.
'''
if not self._initialized:
raise pycd... | Generate a string representing the Rock Ridge Extensions Reference
record.
Parameters:
None.
Returns:
String containing the Rock Ridge record. |
def get_learning_objectives(self):
""" This method also mirrors that in the Item."""
# This is pretty much identicial to the method in assessment.Item!
mgr = self._get_provider_manager('LEARNING')
lookup_session = mgr.get_objective_lookup_session(proxy=getattr(self, "_proxy", None))
... | This method also mirrors that in the Item. |
def clock_resized_cb(self, viewer, width, height):
"""This method is called when an individual clock is resized.
It deletes and reconstructs the placement of the text objects
in the canvas.
"""
self.logger.info("resized canvas to %dx%d" % (width, height))
# add text objec... | This method is called when an individual clock is resized.
It deletes and reconstructs the placement of the text objects
in the canvas. |
def validate_config(self, organization, config, actor=None):
"""
```
if config['foo'] and not config['bar']:
raise PluginError('You cannot configure foo with bar')
return config
```
"""
if config.get('name'):
client = self.get_client(actor)... | ```
if config['foo'] and not config['bar']:
raise PluginError('You cannot configure foo with bar')
return config
``` |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.