positive stringlengths 100 30.3k | anchor stringlengths 1 15k |
|---|---|
def set(self, key, value, *args, **kwargs):
"""Store the given value into Redis.
:returns: a coroutine
"""
if self.cfg.jsonpickle:
value = jsonpickle.encode(value)
return self.conn.set(key, value, *args, **kwargs) | Store the given value into Redis.
:returns: a coroutine |
def filter_(predicate, *structures, **kwargs):
# pylint: disable=differing-param-doc,missing-param-doc, too-many-branches
"""Select elements of a nested structure based on a predicate function.
If multiple structures are provided as input, their structure must match and
the function will be applied to correspo... | Select elements of a nested structure based on a predicate function.
If multiple structures are provided as input, their structure must match and
the function will be applied to corresponding groups of elements. The nested
structure can consist of any combination of lists, tuples, and dicts.
Args:
predica... |
def _description(self):
"""A concise html explanation of this Action."""
inst = self.timemachine.presently
if self.action_type == "dl":
return "Deleted %s" % inst.content_type.name
elif self.action_type == "cr":
return "Created %s" % inst._object_type_html()
... | A concise html explanation of this Action. |
def hex_xformat_decode(s: str) -> Optional[bytes]:
"""
Reverse :func:`hex_xformat_encode`.
The parameter is a hex-encoded BLOB like
.. code-block:: none
"X'CDE7A24B1A9DBA3148BCB7A0B9DA5BB6A424486C'"
Original purpose and notes:
- SPECIAL HANDLING for BLOBs: a string like ``X'01FF'`` ... | Reverse :func:`hex_xformat_encode`.
The parameter is a hex-encoded BLOB like
.. code-block:: none
"X'CDE7A24B1A9DBA3148BCB7A0B9DA5BB6A424486C'"
Original purpose and notes:
- SPECIAL HANDLING for BLOBs: a string like ``X'01FF'`` means a hex-encoded
BLOB. Titanium is rubbish at BLOBs, s... |
def example_splits(url_file, all_files):
"""Generate splits of the data."""
def generate_hash(inp):
"""Generate a sha1 hash to match the raw url to the filename extracted."""
h = hashlib.sha1()
h.update(inp)
return h.hexdigest()
all_files_map = {f.split("/")[-1]: f for f in all_files}
urls = ... | Generate splits of the data. |
def get_similar_users(self, users=None, k=10):
"""Get the k most similar users for each entry in `users`.
Each type of recommender has its own model for the similarity
between users. For example, the factorization_recommender will
return the nearest users based on the cosine similarity
... | Get the k most similar users for each entry in `users`.
Each type of recommender has its own model for the similarity
between users. For example, the factorization_recommender will
return the nearest users based on the cosine similarity
between latent user factors. (This method is not ... |
def fetch_path(self, name):
"""
Fetch contents from the path retrieved via lookup_path.
No caching will be done.
"""
with codecs.open(self.lookup_path(name), encoding='utf-8') as fd:
return fd.read() | Fetch contents from the path retrieved via lookup_path.
No caching will be done. |
def maybe_gzip_open(path, *args, **kwargs):
"""
Open file with either open or gzip.open, depending on file extension.
This function doesn't handle json lines format, just opens a file
in a way it is decoded transparently if needed.
"""
path = path_to_str(path)
if path.endswith('.gz') or pat... | Open file with either open or gzip.open, depending on file extension.
This function doesn't handle json lines format, just opens a file
in a way it is decoded transparently if needed. |
def _format_numer(number_format, prefix='', suffix=''):
"""Format a number to a string."""
@_surpress_formatting_errors
def inner(v):
if isinstance(v, Number):
return ("{{}}{{:{}}}{{}}"
.format(number_format)
.format(prefix, v, suffix))
els... | Format a number to a string. |
def to_grey(self, on: bool=False):
"""
Change the LED to grey.
:param on: Unused, here for API consistency with the other states
:return: None
"""
self._on = False
self._load_new(led_grey) | Change the LED to grey.
:param on: Unused, here for API consistency with the other states
:return: None |
def user_exists(alias, **kwargs):
'''
Checks if user with given alias exists.
.. versionadded:: 2016.3.0
:param alias: user alias
:param _connection_user: Optional - zabbix user (can also be set in opts or pillar, see module's docstring)
:param _connection_password: Optional - zabbix password ... | Checks if user with given alias exists.
.. versionadded:: 2016.3.0
:param alias: user alias
:param _connection_user: Optional - zabbix user (can also be set in opts or pillar, see module's docstring)
:param _connection_password: Optional - zabbix password (can also be set in opts or pillar, see module... |
def wallet(self):
"""Fetch and return this user's default (only) Wallet."""
if not hasattr(self, '_wallet'):
wallet_resource = self.resource.default_wallet.get()
self._wallet = Wallet(wallet_resource, self.client)
return self._wallet | Fetch and return this user's default (only) Wallet. |
def _start_beacon(port=None):
"""Start a beacon thread within this process if no beacon is currently
running on this machine.
In general this is called automatically when an attempt is made to
advertise or discover. It might be convenient, though, to call this
function directly if you want to h... | Start a beacon thread within this process if no beacon is currently
running on this machine.
In general this is called automatically when an attempt is made to
advertise or discover. It might be convenient, though, to call this
function directly if you want to have a process whose only job is
t... |
def extract_view(view, decorators=None):
"""
Extract a view object out of any wrapping decorators.
"""
# http://stackoverflow.com/questions/9222129/python-inspect-getmembers-does-not-return-the-actual-function-when-used-with-dec
if decorators is None:
decorators = []
if getattr(view, 'fu... | Extract a view object out of any wrapping decorators. |
def pkg_security(pkgs):
"""Check packages before install or upgrade for security
reasons. Configuration file in the /etc/slpkg/pkg_security"""
security_packages = Utils().read_file("/etc/slpkg/pkg_security")
packages = []
for read in security_packages.splitlines():
read = read.lstrip()
... | Check packages before install or upgrade for security
reasons. Configuration file in the /etc/slpkg/pkg_security |
def construct_where_filter_predicate(query_metadata_table, simple_optional_root_info):
"""Return an Expression that is True if and only if each simple optional filter is True.
Construct filters for each simple optional, that are True if and only if `edge_field` does
not exist in the `simple_optional_root_l... | Return an Expression that is True if and only if each simple optional filter is True.
Construct filters for each simple optional, that are True if and only if `edge_field` does
not exist in the `simple_optional_root_location` OR the `inner_location` is not defined.
Return an Expression that evaluates to Tr... |
def commit(cwd,
message,
opts='',
git_opts='',
user=None,
password=None,
filename=None,
ignore_retcode=False,
output_encoding=None):
'''
Interface to `git-commit(1)`_
cwd
The path to the git checkout
messag... | Interface to `git-commit(1)`_
cwd
The path to the git checkout
message
Commit message
opts
Any additional options to add to the command line, in a single string.
These opts will be added to the end of the git command being run.
.. note::
On the Salt CL... |
def is_iban(potential_iban):
"""
Check if a string is a valid IBAN number.
IBAN is described in ISO 13616-1:2007 Part 1.
Spaces are ignored.
# CODE
0 = always zero
b = BIC or National Bank code
c = Account number
i = holder's kennitala (national identification number)
k = IBAN... | Check if a string is a valid IBAN number.
IBAN is described in ISO 13616-1:2007 Part 1.
Spaces are ignored.
# CODE
0 = always zero
b = BIC or National Bank code
c = Account number
i = holder's kennitala (national identification number)
k = IBAN check digits
n = Branch number
t... |
def output_size(self) -> Tuple[Sequence[Shape], Sequence[Shape], Sequence[Shape], int]:
'''Returns the simulation output size.'''
return self._cell.output_size | Returns the simulation output size. |
def _imm_resolve_deps(cls):
'''
_imm_resolve_deps(imm_class) resolves the dependencies of the given immutable class imm_class
and edits the immutable metadata appropriately.
'''
dat = cls._pimms_immutable_data_
params = dat['params']
values = dat['values']
consts = dat['consts']
chec... | _imm_resolve_deps(imm_class) resolves the dependencies of the given immutable class imm_class
and edits the immutable metadata appropriately. |
def order_by_t(func):
"""
Transformation for Sequence.order_by
:param func: order_by function
:return: transformation
"""
return Transformation(
'order_by({0})'.format(name(func)),
lambda sequence: sorted(sequence, key=func),
None
) | Transformation for Sequence.order_by
:param func: order_by function
:return: transformation |
def ensure_row_dep_constraint(
self, M_c, T, X_L, X_D, row1, row2, dependent=True, wrt=None,
max_iter=100, force=False):
"""Ensures dependencey or indepdendency between rows with respect to
columns."""
X_L_list, X_D_list, was_multistate = su.ensure_multistate(X_L, X_D)
... | Ensures dependencey or indepdendency between rows with respect to
columns. |
def split_every(iterable, n): # TODO: Remove this, or make it return a generator.
"""
A generator of n-length chunks of an input iterable
"""
i = iter(iterable)
piece = list(islice(i, n))
while piece:
yield piece
piece = list(islice(i, n)) | A generator of n-length chunks of an input iterable |
def choose_location_ids(gtfs, stop_ids=None):
"""Chooses a set of location ids (stations and their children) for
rendering a pathway graph.
If stop_ids is None, then all stations that have pathways are chosen.
If stop_ids is not None, then the station with this stop_id (or
with a child with this s... | Chooses a set of location ids (stations and their children) for
rendering a pathway graph.
If stop_ids is None, then all stations that have pathways are chosen.
If stop_ids is not None, then the station with this stop_id (or
with a child with this stop_id) is chosen. |
def getPlannedFor(self, plannedfor_name, projectarea_id=None,
projectarea_name=None, archived=False,
returned_properties=None):
"""Get :class:`rtcclient.models.PlannedFor` object by its name
:param plannedfor_name: the plannedfor name
:param projectar... | Get :class:`rtcclient.models.PlannedFor` object by its name
:param plannedfor_name: the plannedfor name
:param projectarea_id: the :class:`rtcclient.project_area.ProjectArea`
id
:param projectarea_name: the project area name
:param archived: (default is False) whether the pl... |
def off(self, motors=None, brake=True):
"""
Stop motors immediately. Configure motors to brake if ``brake`` is set.
"""
motors = motors if motors is not None else self.motors.values()
for motor in motors:
motor._set_brake(brake)
for motor in motors:
... | Stop motors immediately. Configure motors to brake if ``brake`` is set. |
def _handle_create(self, response, ignore_tombstone, auto_refresh):
'''
Handles response from self.create()
Args:
response (requests.models.Response): response object from self.create()
ignore_tombstone (bool): If True, will attempt creation, if tombstone exists (409), will delete tombstone and retry
''... | Handles response from self.create()
Args:
response (requests.models.Response): response object from self.create()
ignore_tombstone (bool): If True, will attempt creation, if tombstone exists (409), will delete tombstone and retry |
def search_reddit_names(self, query):
"""Return subreddits whose display name contains the query."""
data = {'query': query}
results = self.request_json(self.config['search_reddit_names'],
data=data)
return [self.get_subreddit(name) for name in results... | Return subreddits whose display name contains the query. |
def execfile(fname, _globals, _locals):
"""
Usage: execfile('path/to/file.py', globals(), locals())
"""
if os.path.exists(fname):
with open(fname) as f:
code = compile(f.read(), os.path.basename(fname), 'exec')
exec(code, _globals, _locals)
return True
els... | Usage: execfile('path/to/file.py', globals(), locals()) |
def _checkpoint_out_filenames(self):
"""
RAxML generates a crapload of checkpoint files so need to
walk directory to collect names of all of them.
"""
out_filenames = []
if self.Parameters['-n'].isOn():
out_name = str(self.Parameters['-n'].Value)
w... | RAxML generates a crapload of checkpoint files so need to
walk directory to collect names of all of them. |
def hostgroup_exists(name=None, groupid=None, node=None, nodeids=None, **kwargs):
'''
Checks if at least one host group that matches the given filter criteria exists.
.. versionadded:: 2016.3.0
:param name: names of the host groups
:param groupid: host group IDs
:param node: name of the node t... | Checks if at least one host group that matches the given filter criteria exists.
.. versionadded:: 2016.3.0
:param name: names of the host groups
:param groupid: host group IDs
:param node: name of the node the host groups must belong to (zabbix API < 2.4)
:param nodeids: IDs of the nodes the host... |
def address(address=None, begin=None, end=None):
'''
HTTP REQUEST
GET https://api.nasa.gov/planetary/earth/temperature/address
QUERY PARAMETERS
Parameter Type Default Description
text string n/a Address string
begin int 1880 beginning year for date range, inclusive
end int 2014 end ye... | HTTP REQUEST
GET https://api.nasa.gov/planetary/earth/temperature/address
QUERY PARAMETERS
Parameter Type Default Description
text string n/a Address string
begin int 1880 beginning year for date range, inclusive
end int 2014 end year for date range, inclusive
api_key string DEMO_KEY api.... |
def get(self, conn, key, default=None):
"""Gets a single value from the server.
:param key: ``bytes``, is the key for the item being fetched
:param default: default value if there is no value.
:return: ``bytes``, is the data for this specified key.
"""
values, _ = yield ... | Gets a single value from the server.
:param key: ``bytes``, is the key for the item being fetched
:param default: default value if there is no value.
:return: ``bytes``, is the data for this specified key. |
def do_ams_sto_put(endpoint, body, content_length):
'''Do a PUT request to the Azure Storage API and return JSON.
Args:
endpoint (str): Azure Media Services Initial Endpoint.
body (str): Azure Media Services Content Body.
content_length (str): Content_length.
Returns:
HTTP ... | Do a PUT request to the Azure Storage API and return JSON.
Args:
endpoint (str): Azure Media Services Initial Endpoint.
body (str): Azure Media Services Content Body.
content_length (str): Content_length.
Returns:
HTTP response. JSON body. |
def syncItems(self, client=None, clientId=None):
""" Returns an instance of :class:`plexapi.sync.SyncList` for specified client.
Parameters:
client (:class:`~plexapi.myplex.MyPlexDevice`): a client to query SyncItems for.
clientId (str): an identifier of a client to ... | Returns an instance of :class:`plexapi.sync.SyncList` for specified client.
Parameters:
client (:class:`~plexapi.myplex.MyPlexDevice`): a client to query SyncItems for.
clientId (str): an identifier of a client to query SyncItems for.
If both `client` and `clien... |
def sent_tokenize(text, tokenizer=None):
"""Convenience function for tokenizing sentences (not iterable).
If tokenizer is not specified, the default tokenizer NLTKPunktTokenizer()
is used (same behaviour as in the main `TextBlob`_ library).
This function returns the sentences as a generator object.
... | Convenience function for tokenizing sentences (not iterable).
If tokenizer is not specified, the default tokenizer NLTKPunktTokenizer()
is used (same behaviour as in the main `TextBlob`_ library).
This function returns the sentences as a generator object.
.. _TextBlob: http://textblob.readthedocs.org... |
def call_local_plugin_method(self, chname, plugin_name, method_name,
args, kwargs):
"""
Parameters
----------
chname : str
The name of the channel containing the plugin.
plugin_name : str
The name of the local plugin conta... | Parameters
----------
chname : str
The name of the channel containing the plugin.
plugin_name : str
The name of the local plugin containing the method to call.
method_name : str
The name of the method to call.
args : list or tuple
... |
def p_args(p):
"""
args : arg1
| args arg1
"""
if len(p) == 2:
p[0] = node.expr_list([p[1]])
else:
p[0] = p[1]
p[0].append(p[2]) | args : arg1
| args arg1 |
def initial_edges(self) -> iter:
"""Yield edges in the initial (uncompressed) graphs. Possible doublons."""
nodes_in = lambda n: ([n] if self.is_node(n) else self.nodes_in(n))
for node, succs in self.edges.items():
twos = tuple(two for succ in succs for two in nodes_in(succ))
... | Yield edges in the initial (uncompressed) graphs. Possible doublons. |
def process_response(self, request_id=None):
"""
Process the SAML Response sent by the IdP.
:param request_id: Is an optional argument. Is the ID of the AuthNRequest sent by this SP to the IdP.
:type request_id: string
:raises: OneLogin_Saml2_Error.SAML_RESPONSE_NOT_FOUND, when... | Process the SAML Response sent by the IdP.
:param request_id: Is an optional argument. Is the ID of the AuthNRequest sent by this SP to the IdP.
:type request_id: string
:raises: OneLogin_Saml2_Error.SAML_RESPONSE_NOT_FOUND, when a POST with a SAMLResponse is not found |
def _querystring(self):
"""Additional keyword arguments"""
kw = {"studyoid": self.studyoid}
if self.location_oid is not None:
kw["locationoid"] = self.location_oid
return kw | Additional keyword arguments |
def missingkeys_nonstandard(block, commdct, dtls, objectlist, afield='afiled %s'):
"""This is an object list where thre is no first field name
to give a hint of what the first field name should be"""
afield = 'afield %s'
for key_txt in objectlist:
key_i = dtls.index(key_txt.upper())
comm... | This is an object list where thre is no first field name
to give a hint of what the first field name should be |
def trim_dense(M, n_std=3, s_min=None, s_max=None):
"""By default, return a matrix stripped of component
vectors whose sparsity (i.e. total contact count on a
single column or row) deviates more than specified number
of standard deviations from the mean. Boolean variables
s_min and s_max act as abso... | By default, return a matrix stripped of component
vectors whose sparsity (i.e. total contact count on a
single column or row) deviates more than specified number
of standard deviations from the mean. Boolean variables
s_min and s_max act as absolute fixed values which override
such behaviour when sp... |
def get_queues(*queue_names, **kwargs):
"""
Return queue instances from specified queue names.
All instances must use the same Redis connection.
"""
from .settings import QUEUES
if len(queue_names) <= 1:
# Return "default" queue if no queue name is specified
# or one queue with ... | Return queue instances from specified queue names.
All instances must use the same Redis connection. |
def _mysql_aes_key(key):
"""Format key."""
final_key = bytearray(16)
for i, c in enumerate(key):
final_key[i % 16] ^= key[i] if PY3 else ord(key[i])
return bytes(final_key) | Format key. |
def begin(self):
"""Called once before using the session to check global step."""
self._global_step_tensor = tf.train.get_global_step()
if self._global_step_tensor is None:
raise RuntimeError(
'Global step should be created to use StepCounterHook.') | Called once before using the session to check global step. |
def parse_PISCES_output(pisces_output, path=False):
""" Takes the output list of a PISCES cull and returns in a usable dictionary.
Notes
-----
Designed for outputs of protein sequence redundancy culls conducted using the PISCES server.
http://dunbrack.fccc.edu/PISCES.php
G. Wang and R. L. Dunbr... | Takes the output list of a PISCES cull and returns in a usable dictionary.
Notes
-----
Designed for outputs of protein sequence redundancy culls conducted using the PISCES server.
http://dunbrack.fccc.edu/PISCES.php
G. Wang and R. L. Dunbrack, Jr. PISCES: a protein sequence culling server. Bioinfor... |
def _check_html_response(self, response):
"""
Checks if the API Key is valid and if the request returned a 200 status (ok)
"""
error1 = "Access to this form requires a valid API key. For more info see: http://www.clublog.org/need_api.php"
error2 = "Invalid or missing API Key... | Checks if the API Key is valid and if the request returned a 200 status (ok) |
def count_nulls(self, field):
"""
Count the number of null values in a column
"""
try:
n = self.df[field].isnull().sum()
except KeyError:
self.warning("Can not find column", field)
return
except Exception as e:
self.err(e, "... | Count the number of null values in a column |
def create_app_from_template(self, image_name, name, template, name_in_template,
other_images=None, oc_new_app_args=None, project=None):
"""
Helper function to create app from template
:param image_name: image to be used as builder image
:param name: name... | Helper function to create app from template
:param image_name: image to be used as builder image
:param name: name of app from template
:param template: str, url or local path to a template to use
:param name_in_template: dict, {repository:tag} image name used in the template
:pa... |
def empty(self):
"""
Clear out the buffer and return all data that was in it.
:return:
any data that was in the buffer prior to clearing it out, as a
`str`
"""
self._lock.acquire()
try:
out = self._buffer_tobytes()
del self... | Clear out the buffer and return all data that was in it.
:return:
any data that was in the buffer prior to clearing it out, as a
`str` |
def run(self):
'''
Initialise the runner function with the passed args, kwargs
'''
# Retrieve args/kwargs here; and fire up the processing using them
try:
transcript = self.fn(*self.args, **self.kwargs)
except:
traceback.print_exc()
ex... | Initialise the runner function with the passed args, kwargs |
def run(self, run_priority: Optional[int] = None):
"""
Run a pyquil program on the QPU.
This formats the classified data from the QPU server by stacking measured bits into
an array of shape (trials, classical_addresses). The mapping of qubit to
classical address is backed out fr... | Run a pyquil program on the QPU.
This formats the classified data from the QPU server by stacking measured bits into
an array of shape (trials, classical_addresses). The mapping of qubit to
classical address is backed out from MEASURE instructions in the program, so
only do measurements... |
def _get_label_uuid(xapi, rectype, label):
'''
Internal, returns label's uuid
'''
try:
return getattr(xapi, rectype).get_by_name_label(label)[0]
except Exception:
return False | Internal, returns label's uuid |
def api_ebuio_forum_get_topics_by_tag_for_user(request, key=None, hproPk=None, tag=None, userPk=None):
"""Return the list of topics using the tag pk"""
# Check API key (in order to be sure that we have a valid one and that's correspond to the project
if not check_api_key(request, key, hproPk):
retu... | Return the list of topics using the tag pk |
def using_bzr(cwd):
"""Test whether the directory cwd is contained in a bazaar repository."""
try:
bzr_log = shell_out(["bzr", "log"], cwd=cwd)
return True
except (CalledProcessError, OSError):
return False | Test whether the directory cwd is contained in a bazaar repository. |
def timeticks(tdiff):
"""
NOTE do NOT use "interval" or ticks are misaligned! use "bysecond" only!
"""
if isinstance(tdiff, xarray.DataArray): # len==1
tdiff = timedelta(seconds=tdiff.values / np.timedelta64(1, 's'))
assert isinstance(tdiff, timedelta), 'expecting datetime.timedelta'
... | NOTE do NOT use "interval" or ticks are misaligned! use "bysecond" only! |
def _filter_results(self, result, anchor):
"""Filter search results by checking category titles and dates"""
valid = True
try:
cat_tag = result.find('a', {'rel': 'category tag'}).string
title = anchor.string.lower()
date_tag = result.find('time').string
... | Filter search results by checking category titles and dates |
def get_next_section_start_line(self, data):
"""Get the starting line number of next section.
It will return -1 if no section was found.
The section is a section key (e.g. 'Parameters:')
then the content
:param data: a list of strings containing the docstring's lines
:re... | Get the starting line number of next section.
It will return -1 if no section was found.
The section is a section key (e.g. 'Parameters:')
then the content
:param data: a list of strings containing the docstring's lines
:returns: the index of next section else -1 |
def _mini_purge(self, spiderid, appid, crawlid):
'''
Actually purges the crawlid from the queue
@param spiderid: the spider id
@param appid: the app id
@param crawlid: the crawl id
@return: The number of requests purged
'''
total_purged = 0
match... | Actually purges the crawlid from the queue
@param spiderid: the spider id
@param appid: the app id
@param crawlid: the crawl id
@return: The number of requests purged |
def list(self, toa=None, show_history=False):
"""Return all revisions for this stack
:param int toa: The time of action as a UTC timestamp
:param bool show_history: Whether to show historical revisions
"""
if not toa:
toa = time.mktime(datetime.datetime.now().timetup... | Return all revisions for this stack
:param int toa: The time of action as a UTC timestamp
:param bool show_history: Whether to show historical revisions |
def main(self, din):
"""
:param din: bit in
:return: True if 100% correlation
"""
self.shr = self.shr[1:] + [din]
if self.cooldown > 0:
self.cooldown = self.cooldown - 1
return False
if self.shr != self.HEADER:
return False
... | :param din: bit in
:return: True if 100% correlation |
def featurize(*features):
""" Put features into proper MRO order. """
from functools import cmp_to_key
def compare_subclass(left, right):
if issubclass(left, right):
return -1
elif issubclass(right, left):
return 1
return 0
sorted_features = sorted(featu... | Put features into proper MRO order. |
def constrained_by(self):
"""
returns a list of parameters that constrain this parameter
"""
if self._is_constraint is None:
return []
params = []
for var in self.is_constraint._vars:
param = var.get_parameter()
if param.uniqueid != sel... | returns a list of parameters that constrain this parameter |
def create_vs(self, name, ip, port, protocol, profile, pool_name):
'''
Create a virtual server
'''
vs = self.bigIP.LocalLB.VirtualServer
vs_def = vs.typefactory.create('Common.VirtualServerDefinition')
vs_def.name = name
vs_def.address = ip
vs_def.port = ... | Create a virtual server |
def GetFormatSpecification(cls):
"""Retrieves the format specification.
Returns:
FormatSpecification: format specification.
"""
format_specification = specification.FormatSpecification(cls.NAME)
format_specification.AddNewSignature(b'BAAD', offset=0)
format_specification.AddNewSignature(b... | Retrieves the format specification.
Returns:
FormatSpecification: format specification. |
def load_template_help(builtin):
"""Loads the help for a given template"""
help_file = "templates/%s-help.yml" % builtin
help_file = resource_filename(__name__, help_file)
help_obj = {}
if os.path.exists(help_file):
help_data = yaml.safe_load(open(help_file))
if 'name' in help_data:... | Loads the help for a given template |
def parsePropertyValue(self):
""" Called when "[" encountered (but not consumed), ends when the next
property, node, or variation encountered. Parses and returns a list
of property values. Raises 'PropertyValueParseError' if there is a
problem."""
pvlist = []
while self.index < self.datalen:
match = s... | Called when "[" encountered (but not consumed), ends when the next
property, node, or variation encountered. Parses and returns a list
of property values. Raises 'PropertyValueParseError' if there is a
problem. |
def _unpack_msg(self, *msg):
"""
Convert all message elements to string
"""
l = []
for m in msg:
l.append(str(m))
return " ".join(l) | Convert all message elements to string |
def enabled_flags(self):
"""Return the objects for each individual set flag."""
if not self.value:
yield self.__flags_members__[0]
return
val = self.value
while val:
lowest_bit = val & -val
val ^= lowest_bit
yield self.__flags_... | Return the objects for each individual set flag. |
def serverless(self):
"""
Access the Serverless Twilio Domain
:returns: Serverless Twilio Domain
:rtype: twilio.rest.serverless.Serverless
"""
if self._serverless is None:
from twilio.rest.serverless import Serverless
self._serverless = Serverless... | Access the Serverless Twilio Domain
:returns: Serverless Twilio Domain
:rtype: twilio.rest.serverless.Serverless |
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
"""
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
"""
assert all(stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
... | See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values. |
def merge(self, other):
"""Merge other (dict or OrderedSet) into this environment.
Only works for basic types: str, list, tuple, dict and OrderedSet.
"""
for key, value in other.items():
if not key in self:
self[key] = value
elif isinstance(value,... | Merge other (dict or OrderedSet) into this environment.
Only works for basic types: str, list, tuple, dict and OrderedSet. |
def _choose_host(self):
"""
This method randomly chooses a server from the server list given as
a parameter to the parent PythonSDK
:return: The selected host to which the Sender will attempt to
connect
"""
# If a host hasn't been chosen yet or there is o... | This method randomly chooses a server from the server list given as
a parameter to the parent PythonSDK
:return: The selected host to which the Sender will attempt to
connect |
def sequence(db, chrom, start, end):
"""
return the sequence for a region using the UCSC DAS
server. note the start is 1-based
each feature will have it's own .sequence method which sends
the correct start and end to this function.
>>> sequence('hg18', 'chr2', 2223, 2230)
'caacttag'
"""... | return the sequence for a region using the UCSC DAS
server. note the start is 1-based
each feature will have it's own .sequence method which sends
the correct start and end to this function.
>>> sequence('hg18', 'chr2', 2223, 2230)
'caacttag' |
def _stamped_deps(stamp_directory, func, dependencies, *args, **kwargs):
"""Run func, assumed to have dependencies as its first argument."""
if not isinstance(dependencies, list):
jobstamps_dependencies = [dependencies]
else:
jobstamps_dependencies = dependencies
kwargs.update({
... | Run func, assumed to have dependencies as its first argument. |
def forecast(self, ts, nfuture):
"""
Provided fitted values for timeseries ts as 1-step ahead forecasts, based on current
model parameters, and then provide `nFuture` periods of forecast. We assume AR terms
prior to the start of the series are equal to the model's intercept term (or 0.0,... | Provided fitted values for timeseries ts as 1-step ahead forecasts, based on current
model parameters, and then provide `nFuture` periods of forecast. We assume AR terms
prior to the start of the series are equal to the model's intercept term (or 0.0, if fit
without and intercept term).Meanwhile... |
def pose_inv(pose):
"""
Computes the inverse of a homogenous matrix corresponding to the pose of some
frame B in frame A. The inverse is the pose of frame A in frame B.
Args:
pose: numpy array of shape (4,4) for the pose to inverse
Returns:
numpy array of shape (4,4) for the invers... | Computes the inverse of a homogenous matrix corresponding to the pose of some
frame B in frame A. The inverse is the pose of frame A in frame B.
Args:
pose: numpy array of shape (4,4) for the pose to inverse
Returns:
numpy array of shape (4,4) for the inverse pose |
def keyserver(self):
"""
The preferred key server specified in this signature, if any. Otherwise, an empty ``str``.
"""
if 'PreferredKeyServer' in self._signature.subpackets:
return next(iter(self._signature.subpackets['h_PreferredKeyServer'])).uri
return '' | The preferred key server specified in this signature, if any. Otherwise, an empty ``str``. |
def by_resource_user_and_perm(
cls, user_id, perm_name, resource_id, db_session=None
):
"""
return all instances by user name, perm name and resource id
:param user_id:
:param perm_name:
:param resource_id:
:param db_session:
:return:
"""
... | return all instances by user name, perm name and resource id
:param user_id:
:param perm_name:
:param resource_id:
:param db_session:
:return: |
def cache_info(self):
"""Report repertoire cache statistics."""
return {
'single_node_repertoire':
self._single_node_repertoire_cache.info(),
'repertoire': self._repertoire_cache.info(),
'mice': self._mice_cache.info()
} | Report repertoire cache statistics. |
def _set_pattern_columns(self, path_column):
"""Get a column of values for each field in pattern
"""
try:
# CategoricalDtype allows specifying known categories when
# creating objects. It was added in pandas 0.21.0.
from pandas.api.types import CategoricalDtyp... | Get a column of values for each field in pattern |
def set_feature_flag_users(self, user_id, feature, state=None):
"""
Set feature flag.
Set a feature flag for a given Account, Course, or User. This call will fail if a parent account sets
a feature flag for the same feature in any state other than "allowed".
"""
p... | Set feature flag.
Set a feature flag for a given Account, Course, or User. This call will fail if a parent account sets
a feature flag for the same feature in any state other than "allowed". |
def _make_before_request(self):
"""
Generate the before_request function to be added to the app.
Currently this function is static, however it is very likely we will
need to programmatically generate this function in the future.
"""
def before_request():
"""
... | Generate the before_request function to be added to the app.
Currently this function is static, however it is very likely we will
need to programmatically generate this function in the future. |
def get_field_type(field):
"""
Returns field type/possible values.
"""
if isinstance(field, core_filters.MappedMultipleChoiceFilter):
return ' | '.join(['"%s"' % f for f in sorted(field.mapped_to_model)])
if isinstance(field, OrderingFilter) or isinstance(field, ChoiceFilter):
return... | Returns field type/possible values. |
def _from_proto_sparse_tensor(sparse_tensor_proto, process_leafs):
"""Deserializes a `tf.SparseTensor` from `sparse_tensor_proto`.
Args:
sparse_tensor_proto: A proto representing a `tf.SparseTensor`.
process_leafs: A function to be applied to the leaf valued of the nested
structure.
Returns:
A... | Deserializes a `tf.SparseTensor` from `sparse_tensor_proto`.
Args:
sparse_tensor_proto: A proto representing a `tf.SparseTensor`.
process_leafs: A function to be applied to the leaf valued of the nested
structure.
Returns:
An instance of `tf.SparseTensor`. |
def angle(x, y):
"""Return the angle between vectors a and b in degrees."""
return arccos(dot(x, y)/(norm(x)*norm(y)))*180./pi | Return the angle between vectors a and b in degrees. |
def get_changesets(self, project=None, max_comment_length=None, skip=None, top=None, orderby=None, search_criteria=None):
"""GetChangesets.
Retrieve Tfvc Changesets
:param str project: Project ID or project name
:param int max_comment_length: Include details about associated work items i... | GetChangesets.
Retrieve Tfvc Changesets
:param str project: Project ID or project name
:param int max_comment_length: Include details about associated work items in the response. Default: null
:param int skip: Number of results to skip. Default: null
:param int top: The maximum n... |
def timeline(self):
"""Get timeline, reloading the site if needed."""
rev = int(self.db.get('site:rev'))
if rev != self.revision:
self.reload_site()
return self._timeline | Get timeline, reloading the site if needed. |
def _retry(n, f, *args, **kwargs):
'''Try to call f(*args, **kwargs) "n" times before giving up. Wait
2**n seconds before retries.'''
for i in range(n):
try:
return f(*args, **kwargs)
except Exception as exc:
if i == n - 1:
log.error(
... | Try to call f(*args, **kwargs) "n" times before giving up. Wait
2**n seconds before retries. |
def _pct_escape_handler(err):
'''
Encoding error handler that does percent-escaping of Unicode, to be used
with codecs.register_error
TODO: replace use of this with urllib.parse.quote as appropriate
'''
chunk = err.object[err.start:err.end]
replacements = _pct_encoded_replacements(chunk)
... | Encoding error handler that does percent-escaping of Unicode, to be used
with codecs.register_error
TODO: replace use of this with urllib.parse.quote as appropriate |
def _get_username_hostname():
'''Best attempt to get username and hostname, returns "na" if problem.'''
user = 'na'
host = 'na'
try:
user = getpass.getuser()
except Exception:
pass
try:
host = socket.gethostname()
except Exception:
pass
return user, host | Best attempt to get username and hostname, returns "na" if problem. |
def new_closure(vals):
"""
Build a new closure
"""
args = ','.join('x%i' % i for i in range(len(vals)))
f = eval("lambda %s:lambda:(%s)" % (args, args))
if sys.version_info[0] >= 3:
return f(*vals).__closure__
return f(*vals).func_closure | Build a new closure |
def start(self):
"""Start this gateway agent."""
self._prepare()
self._disconnector = tornado.ioloop.PeriodicCallback(self._disconnect_hanging_devices, 1000, self._loop)
self._disconnector.start() | Start this gateway agent. |
def _unpack_model(self, om):
""" Returns data from the OPF model.
"""
buses = om.case.connected_buses
branches = om.case.online_branches
gens = om.case.online_generators
cp = om.get_cost_params()
# Bf = om._Bf
# Pfinj = om._Pfinj
return buses, bra... | Returns data from the OPF model. |
def has_attribute(self, attribute: str) -> bool:
"""Whether the node has an attribute with the given name.
Use only if is_mapping() returns True.
Args:
attribute: The name of the attribute to check for.
Returns:
True iff the attribute is present.
"""
... | Whether the node has an attribute with the given name.
Use only if is_mapping() returns True.
Args:
attribute: The name of the attribute to check for.
Returns:
True iff the attribute is present. |
def _push_render(self):
"""Render the plot with bokeh.io and push to notebook.
"""
bokeh.io.push_notebook(handle=self.handle)
self.last_update = time.time() | Render the plot with bokeh.io and push to notebook. |
def sum(x, axis=None, keepdims=False):
"""Reduction along axes with sum operation.
Args:
x (Variable): An input variable.
axis (None, int or tuple of ints): Axis or axes along which the sum is
calculated. Passing the default value `None` will reduce all dimensions.
keepdims ... | Reduction along axes with sum operation.
Args:
x (Variable): An input variable.
axis (None, int or tuple of ints): Axis or axes along which the sum is
calculated. Passing the default value `None` will reduce all dimensions.
keepdims (bool): Flag whether the reduced axes are kept... |
def read(self, request, filter=None, id=None):
""" Build a query based on the filter or the idenfier, send the query
to the database, and return a Feature or a FeatureCollection. """
ret = None
if id is not None:
o = self.Session().query(self.mapped_class).get(id)
... | Build a query based on the filter or the idenfier, send the query
to the database, and return a Feature or a FeatureCollection. |
def get_level_fmt(self, level):
"""Get format for log level."""
key = None
if level == logging.DEBUG:
key = 'debug'
elif level == logging.INFO:
key = 'info'
elif level == logging.WARNING:
key = 'warning'
elif level == logging.ERROR:
... | Get format for log level. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.