positive stringlengths 100 30.3k | anchor stringlengths 1 15k |
|---|---|
def getMibSymbol(self):
"""Returns MIB variable symbolic identification.
Returns
-------
str
MIB module name
str
MIB variable symbolic name
: :py:class:`~pysnmp.proto.rfc1902.ObjectName`
class instance representing MIB variable inst... | Returns MIB variable symbolic identification.
Returns
-------
str
MIB module name
str
MIB variable symbolic name
: :py:class:`~pysnmp.proto.rfc1902.ObjectName`
class instance representing MIB variable instance index.
Raises
... |
def fvga(a, i, g, n):
""" This function is for the future value of an annuity
with growth rate. It is the future value of a growing
stream of periodic investments.
a = Periodic Investment (1000)
i = interest rate as decimal (.0675)
g = the growth rate (.05)
n = the number of compound per... | This function is for the future value of an annuity
with growth rate. It is the future value of a growing
stream of periodic investments.
a = Periodic Investment (1000)
i = interest rate as decimal (.0675)
g = the growth rate (.05)
n = the number of compound periods (20)
Example: fv(1000, .... |
def idle_task(self):
'''handle missing parameters'''
self.check_new_target_system()
sysid = self.get_sysid()
self.pstate[sysid].vehicle_name = self.vehicle_name
self.pstate[sysid].fetch_check(self.master) | handle missing parameters |
def dict_subset(dict_, keys, default=util_const.NoParam):
r"""
Args:
dict_ (dict):
keys (list):
Returns:
dict: subset dictionary
Example:
>>> # ENABLE_DOCTEST
>>> from utool.util_dict import * # NOQA
>>> import utool as ut
>>> dict_ = {'K': 3, '... | r"""
Args:
dict_ (dict):
keys (list):
Returns:
dict: subset dictionary
Example:
>>> # ENABLE_DOCTEST
>>> from utool.util_dict import * # NOQA
>>> import utool as ut
>>> dict_ = {'K': 3, 'dcvs_clip_max': 0.2, 'p': 0.1}
>>> keys = ['K', 'dcvs_... |
def fix_germline_samplename(in_file, sample_name, data):
"""Replace germline sample names, originally from normal BAM file.
"""
out_file = "%s-fixnames%s" % utils.splitext_plus(in_file)
if not utils.file_exists(out_file):
with file_transaction(data, out_file) as tx_out_file:
sample_f... | Replace germline sample names, originally from normal BAM file. |
def group_by(self, *args):
"""
This method lets you specify the grouping fields explicitly. The `args` must
be names of grouping fields or calculated fields that this queryset was
created with.
"""
for name in args:
assert name in self._fields or name in self.... | This method lets you specify the grouping fields explicitly. The `args` must
be names of grouping fields or calculated fields that this queryset was
created with. |
def face_encodings(face_image, known_face_locations=None, num_jitters=1):
"""
Given an image, return the 128-dimension face encoding for each face in the image.
:param face_image: The image that contains one or more faces
:param known_face_locations: Optional - the bounding boxes of each face if you al... | Given an image, return the 128-dimension face encoding for each face in the image.
:param face_image: The image that contains one or more faces
:param known_face_locations: Optional - the bounding boxes of each face if you already know them.
:param num_jitters: How many times to re-sample the face when cal... |
async def xack(self, name: str, group: str, stream_id: str) -> int:
"""
[NOTICE] Not officially released yet
XACK is the command that allows a consumer to mark a pending message as correctly processed.
:param name: name of the stream
:param group: name of the consumer group
... | [NOTICE] Not officially released yet
XACK is the command that allows a consumer to mark a pending message as correctly processed.
:param name: name of the stream
:param group: name of the consumer group
:param stream_id: id of the entry the consumer wants to mark
:return: numbe... |
def _set_mpls_adjust_bandwidth_lsp(self, v, load=False):
"""
Setter method for mpls_adjust_bandwidth_lsp, mapped from YANG variable /brocade_mpls_rpc/mpls_adjust_bandwidth_lsp (rpc)
If this variable is read-only (config: false) in the
source YANG file, then _set_mpls_adjust_bandwidth_lsp is considered a... | Setter method for mpls_adjust_bandwidth_lsp, mapped from YANG variable /brocade_mpls_rpc/mpls_adjust_bandwidth_lsp (rpc)
If this variable is read-only (config: false) in the
source YANG file, then _set_mpls_adjust_bandwidth_lsp is considered as a private
method. Backends looking to populate this variable sh... |
def update(self):
"""Update reviewers' anomalous scores and products' summaries.
Returns:
maximum absolute difference between old summary and new one, and
old anomalous score and new one.
"""
if self.updated:
return 0
res = super(BipartiteGraph, ... | Update reviewers' anomalous scores and products' summaries.
Returns:
maximum absolute difference between old summary and new one, and
old anomalous score and new one. |
def je(self):
r"""Execute operations, returns a string ( '' if the result is None, join='').
This works like :attr:`j` except it returns an empty string if the execution
result is None.
Examples:
>>> echo(None).je
''
"""
text = self._process()
... | r"""Execute operations, returns a string ( '' if the result is None, join='').
This works like :attr:`j` except it returns an empty string if the execution
result is None.
Examples:
>>> echo(None).je
'' |
def verify(path):
"""Verify that `path` is a zip file with Phasics TIFF files"""
valid = False
try:
zf = zipfile.ZipFile(path)
except (zipfile.BadZipfile, IsADirectoryError):
pass
else:
names = sorted(zf.namelist())
names = [nn for ... | Verify that `path` is a zip file with Phasics TIFF files |
def astype(self, dtype):
"""Return a copy of this element with new ``dtype``.
Parameters
----------
dtype :
Scalar data type of the returned space. Can be provided
in any way the `numpy.dtype` constructor understands, e.g.
as built-in type or as a str... | Return a copy of this element with new ``dtype``.
Parameters
----------
dtype :
Scalar data type of the returned space. Can be provided
in any way the `numpy.dtype` constructor understands, e.g.
as built-in type or as a string. Data types with non-trivial
... |
def load_subcommand(subparsers):
"""Load this subcommand
"""
parser_analyze = subparsers.add_parser('analyze', help='Analyze uwsgi log to get report')
parser_analyze.add_argument('-f', '--filepath', type=argparse.FileType('r'), dest='filepath',
help='Path of uwsgi log fil... | Load this subcommand |
def meta_wrapped(f):
"""
Add a field label, errors, and a description (if it exists) to
a field.
"""
@wraps(f)
def wrapped(self, field, *args, **kwargs):
html = "{label}{errors}{original}<small>{description}</small>".format(
label=field.label(class_='control-label'),
... | Add a field label, errors, and a description (if it exists) to
a field. |
def _md5_compute(self, f):
'''
Computes the checksum of the file
'''
md5 = hashlib.md5()
block_size = 16384
f.seek(0, 2)
remaining = f.tell()
f.seek(0)
while (remaining > block_size):
data = f.read(block_size)
remaining = r... | Computes the checksum of the file |
def choice(*es):
"""
Create a PEG function to match an ordered choice.
"""
msg = 'Expected one of: {}'.format(', '.join(map(repr, es)))
def match_choice(s, grm=None, pos=0):
errs = []
for e in es:
try:
return e(s, grm, pos)
except PegreError as... | Create a PEG function to match an ordered choice. |
def fcontext_add_or_delete_policy(action, name, filetype=None, sel_type=None, sel_user=None, sel_level=None):
'''
.. versionadded:: 2017.7.0
Adds or deletes the SELinux policy for a given filespec and other optional parameters.
Returns the result of the call to semanage.
Note that you don't have ... | .. versionadded:: 2017.7.0
Adds or deletes the SELinux policy for a given filespec and other optional parameters.
Returns the result of the call to semanage.
Note that you don't have to remove an entry before setting a new
one for a given filespec and filetype, as adding one with semanage
automat... |
def compile_temp(d, key, value):
"""
Compiles temporary dictionaries for metadata. Adds a new entry to an existing dictionary.
:param dict d:
:param str key:
:param any value:
:return dict:
"""
if not value:
d[key] = None
elif len(value) == 1:
d[key] = value[0]
el... | Compiles temporary dictionaries for metadata. Adds a new entry to an existing dictionary.
:param dict d:
:param str key:
:param any value:
:return dict: |
def wait(self, num_slaves, timeout=0):
"""his command blocks the current client until all the previous write
commands are successfully transferred and acknowledged by at least the
specified number of slaves. If the timeout, specified in milliseconds,
is reached, the command returns even ... | his command blocks the current client until all the previous write
commands are successfully transferred and acknowledged by at least the
specified number of slaves. If the timeout, specified in milliseconds,
is reached, the command returns even if the specified number of slaves
were not... |
def verify_env(
dirs,
user,
permissive=False,
pki_dir='',
skip_extra=False,
root_dir=ROOT_DIR):
'''
Verify that the named directories are in place and that the environment
can shake the salt
'''
if salt.utils.platform.is_windows():
return win_v... | Verify that the named directories are in place and that the environment
can shake the salt |
def _friends_bootstrap_radius(args):
"""Internal method used to compute the radius (half-side-length) for each
ball (cube) used in :class:`RadFriends` (:class:`SupFriends`) using
bootstrapping."""
# Unzipping.
points, ftype = args
rstate = np.random
# Resampling.
npoints, ndim = points... | Internal method used to compute the radius (half-side-length) for each
ball (cube) used in :class:`RadFriends` (:class:`SupFriends`) using
bootstrapping. |
def convert_camel_case_to_snake_case(name):
"""Convert CamelCase to snake_case."""
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower() | Convert CamelCase to snake_case. |
def get_redis(**kwargs):
"""Returns a redis client instance.
Parameters
----------
redis_cls : class, optional
Defaults to ``redis.StrictRedis``.
url : str, optional
If given, ``redis_cls.from_url`` is used to instantiate the class.
**kwargs
Extra parameters to be passed... | Returns a redis client instance.
Parameters
----------
redis_cls : class, optional
Defaults to ``redis.StrictRedis``.
url : str, optional
If given, ``redis_cls.from_url`` is used to instantiate the class.
**kwargs
Extra parameters to be passed to the ``redis_cls`` class.
... |
def close(self):
"""Closes out the stream."""
_LOGGER.debug("Closing stream")
if not hasattr(self, "footer"):
raise SerializationError("Footer not read")
super(StreamDecryptor, self).close() | Closes out the stream. |
def has(self, querypart_name, value=None):
"""Returns True if `querypart_name` with `value` is set.
For example you can check if you already used condition by `sql.has('where')`.
If you want to check for more information, for example if that condition
also contain ID, you can do this b... | Returns True if `querypart_name` with `value` is set.
For example you can check if you already used condition by `sql.has('where')`.
If you want to check for more information, for example if that condition
also contain ID, you can do this by `sql.has('where', 'id')`. |
async def runItemCmdr(item, outp=None, **opts):
'''
Create a cmdr for the given item and run the cmd loop.
Example:
runItemCmdr(foo)
'''
cmdr = await getItemCmdr(item, outp=outp, **opts)
await cmdr.runCmdLoop() | Create a cmdr for the given item and run the cmd loop.
Example:
runItemCmdr(foo) |
def _make_options(x):
"""Standardize the options tuple format.
The returned tuple should be in the format (('label', value), ('label', value), ...).
The input can be
* an iterable of (label, value) pairs
* an iterable of values, and labels will be generated
"""
# Check if x is a mapping of... | Standardize the options tuple format.
The returned tuple should be in the format (('label', value), ('label', value), ...).
The input can be
* an iterable of (label, value) pairs
* an iterable of values, and labels will be generated |
def rowCount(self, index=None):
"""Get number of rows in the header."""
if self.axis == 0:
return max(1, self._shape[0])
else:
if self.total_rows <= self.rows_loaded:
return self.total_rows
else:
return self.rows_loaded | Get number of rows in the header. |
def getStrips(self, maxstrips=None):
"""Get comic strips."""
if maxstrips:
word = u"strip" if maxstrips == 1 else "strips"
msg = u'Retrieving %d %s' % (maxstrips, word)
else:
msg = u'Retrieving all strips'
if self.indexes:
if len(self.index... | Get comic strips. |
def eval_detection_voc(pred_boxlists, gt_boxlists, iou_thresh=0.5, use_07_metric=False):
"""Evaluate on voc dataset.
Args:
pred_boxlists(list[BoxList]): pred boxlist, has labels and scores fields.
gt_boxlists(list[BoxList]): ground truth boxlist, has labels field.
iou_thresh: iou thresh
... | Evaluate on voc dataset.
Args:
pred_boxlists(list[BoxList]): pred boxlist, has labels and scores fields.
gt_boxlists(list[BoxList]): ground truth boxlist, has labels field.
iou_thresh: iou thresh
use_07_metric: boolean
Returns:
dict represents the results |
def lock(self):
"""Lock the device."""
success = self.set_status(CONST.STATUS_LOCKCLOSED_INT)
if success:
self._json_state['status'] = CONST.STATUS_LOCKCLOSED
return success | Lock the device. |
def human_and_00(X, y, model_generator, method_name):
""" AND (false/false)
This tests how well a feature attribution method agrees with human intuition
for an AND operation combined with linear effects. This metric deals
specifically with the question of credit allocation for the following function
... | AND (false/false)
This tests how well a feature attribution method agrees with human intuition
for an AND operation combined with linear effects. This metric deals
specifically with the question of credit allocation for the following function
when all three inputs are true:
if fever: +2 points
... |
def request_get_next(request, default_next):
"""
get next url form request
order: POST.next GET.next HTTP_REFERER, default_next
"""
next_url = request.POST.get('next')\
or request.GET.get('next')\
or request.META.get('HTTP_REFERER')\
or default_next
return next_url | get next url form request
order: POST.next GET.next HTTP_REFERER, default_next |
def _ConvertMethodType(self, methodType):
"""
Convert vmodl.reflect.DynamicTypeManager.MethodTypeInfo to pyVmomi method
definition
"""
if methodType:
name = methodType.name
wsdlName = methodType.wsdlName
version = methodType.version
params = self._Filter... | Convert vmodl.reflect.DynamicTypeManager.MethodTypeInfo to pyVmomi method
definition |
def get_env_variable(var_name, default=None):
"""Get the environment variable or raise exception."""
try:
return os.environ[var_name]
except KeyError:
if default is not None:
return default
else:
error_msg = 'The environment variable {} was missing, abort...'\... | Get the environment variable or raise exception. |
def if_pandas_df_convert_to_numpy(obj):
"""Return a Numpy array from a Pandas dataframe.
Iterating over a DataFrame has weird side effects, such as the first
row being the column names. Converting to Numpy is more safe.
"""
if pd is not None and isinstance(obj, pd.DataFrame):
return obj.val... | Return a Numpy array from a Pandas dataframe.
Iterating over a DataFrame has weird side effects, such as the first
row being the column names. Converting to Numpy is more safe. |
def getTargetNamespace(self):
"""return targetNamespace
"""
parent = self
targetNamespace = 'targetNamespace'
tns = self.attributes.get(targetNamespace)
while not tns and parent and parent._parent is not None:
parent = parent._parent()
tns = parent... | return targetNamespace |
def flash(path_to_python=None, paths_to_microbits=None,
path_to_runtime=None, python_script=None, minify=False):
"""
Given a path to or source of a Python file will attempt to create a hex
file and then flash it onto the referenced BBC micro:bit.
If the path_to_python & python_script are unsp... | Given a path to or source of a Python file will attempt to create a hex
file and then flash it onto the referenced BBC micro:bit.
If the path_to_python & python_script are unspecified it will simply flash
the unmodified MicroPython runtime onto the device.
If used, the python_script argument should be... |
def create_geometry(self, input_geometry, dip, upper_depth, lower_depth,
mesh_spacing=1.0):
'''
If geometry is defined as a numpy array then create instance of
nhlib.geo.line.Line class, otherwise if already instance of class
accept class
:param input_geo... | If geometry is defined as a numpy array then create instance of
nhlib.geo.line.Line class, otherwise if already instance of class
accept class
:param input_geometry:
Trace (line) of the fault source as either
i) instance of nhlib.geo.line.Line class
ii) numpy... |
def upgrade_tools_all(call=None):
'''
To upgrade VMware Tools on all virtual machines present in
the specified provider
.. note::
If the virtual machine is running Windows OS, this function
will attempt to suppress the automatic reboot caused by a
VMware Tools upgrade.
CLI... | To upgrade VMware Tools on all virtual machines present in
the specified provider
.. note::
If the virtual machine is running Windows OS, this function
will attempt to suppress the automatic reboot caused by a
VMware Tools upgrade.
CLI Example:
.. code-block:: bash
s... |
def extract_causal_relations(self):
"""Extract causal relations as Statements."""
# Get the extractions that are labeled as directed and causal
relations = [e for e in self.doc.extractions if
'DirectedRelation' in e['labels'] and
'Causal' in e['labels']]... | Extract causal relations as Statements. |
def fromJSON(value):
"""loads the GP object from a JSON string """
j = json.loads(value)
v = GPString()
if "defaultValue" in j:
v.value = j['defaultValue']
else:
v.value = j['value']
if 'paramName' in j:
v.paramName = j['paramName']
... | loads the GP object from a JSON string |
def _estimate_progress(self):
"""
estimates the current progress that is then used in _receive_signal
:return: current progress in percent
"""
estimate = True
# ==== get the current subscript and the time it takes to execute it =====
current_subscript = self._cur... | estimates the current progress that is then used in _receive_signal
:return: current progress in percent |
def _on_timeout():
"""Invoked periodically to ensure that metrics that have been collected
are submitted to InfluxDB.
:rtype: tornado.concurrent.Future or None
"""
global _buffer_size
LOGGER.debug('No metrics submitted in the last %.2f seconds',
_timeout_interval / 1000.0)
... | Invoked periodically to ensure that metrics that have been collected
are submitted to InfluxDB.
:rtype: tornado.concurrent.Future or None |
def process_messages(self):
"""
Read from the incoming_message_mailbox and report to the storage backend
based on the first message found there.
Returns: None
"""
try:
msg = self.msgbackend.pop(self.incoming_message_mailbox)
self.handle_incoming_me... | Read from the incoming_message_mailbox and report to the storage backend
based on the first message found there.
Returns: None |
def button_clicked(self, button):
"""Action when button was clicked.
Parameters
----------
button : instance of QPushButton
which button was pressed
"""
if button is self.idx_ok:
fn = Path(self.filename)
xp_format = self.xp... | Action when button was clicked.
Parameters
----------
button : instance of QPushButton
which button was pressed |
def get(self, timeout=None):
"""Return status"""
status = self.status
if status >= COMPLETED:
return status
else:
self.wait(timeout)
return self.status | Return status |
def abort_expired_batches(self, request_timeout_ms, cluster):
"""Abort the batches that have been sitting in RecordAccumulator for
more than the configured request_timeout due to metadata being
unavailable.
Arguments:
request_timeout_ms (int): milliseconds to timeout
... | Abort the batches that have been sitting in RecordAccumulator for
more than the configured request_timeout due to metadata being
unavailable.
Arguments:
request_timeout_ms (int): milliseconds to timeout
cluster (ClusterMetadata): current metadata for kafka cluster
... |
def get_tick(self, name):
"""Check the config variables to see if there is a configurable tick.
Sensor Graph has a built-in 10 second tick that is sent every 10
seconds to allow for triggering timed events. There is a second
'user' tick that is generated internally by the sensorgraph c... | Check the config variables to see if there is a configurable tick.
Sensor Graph has a built-in 10 second tick that is sent every 10
seconds to allow for triggering timed events. There is a second
'user' tick that is generated internally by the sensorgraph compiler
and used for fast ope... |
def _on_interface_opened(self, success, result, failure_reason, context, next_characteristic=None):
"""Callback function called when the notification related to an interface has been enabled.
It is executed in the baBLE working thread: should not be blocking.
Args:
success (bool): A... | Callback function called when the notification related to an interface has been enabled.
It is executed in the baBLE working thread: should not be blocking.
Args:
success (bool): A bool indicating that the operation is successful or not
result (dict): Information (if successful)... |
def is_replication_enabled(host=None, core_name=None):
'''
SLAVE CALL
Check for errors, and determine if a slave is replicating or not.
host : str (None)
The solr host to query. __opts__['host'] is default.
core_name : str (None)
The name of the solr core if using cores. Leave this ... | SLAVE CALL
Check for errors, and determine if a slave is replicating or not.
host : str (None)
The solr host to query. __opts__['host'] is default.
core_name : str (None)
The name of the solr core if using cores. Leave this blank if you are
not using cores or if you want to check al... |
def build(self, title, text, img_url):
"""
:param title: Title of the card
:param text: Description of the card
:param img_url: Image of the card
"""
super(ImageCard, self).build()
self.title = Title(id=self.id + "-title", text=title, classname="card-title", size=... | :param title: Title of the card
:param text: Description of the card
:param img_url: Image of the card |
def evals_get(self, service_staff_id, start_date, end_date, session):
'''taobao.wangwang.eservice.evals.get 获取评价详细
根据用户id查询用户对应的评价详细情况, 主账号id可以查询店铺内子账号的评价 组管理员可以查询组内账号的评价 非管理员的子账号可以查自己的评价'''
request = TOPRequest('taobao.wangwang.eservice.evals.get')
request['service_staff_id'] =... | taobao.wangwang.eservice.evals.get 获取评价详细
根据用户id查询用户对应的评价详细情况, 主账号id可以查询店铺内子账号的评价 组管理员可以查询组内账号的评价 非管理员的子账号可以查自己的评价 |
def _CRsweep(A, B, Findex, Cindex, nu, thetacr, method):
"""Perform CR sweeps on a target vector.
Internal function called by CR. Performs habituated or concurrent
relaxation sweeps on target vector. Stops when either (i) very fast
convergence, CF < 0.1*thetacr, are observed, or at least a given number... | Perform CR sweeps on a target vector.
Internal function called by CR. Performs habituated or concurrent
relaxation sweeps on target vector. Stops when either (i) very fast
convergence, CF < 0.1*thetacr, are observed, or at least a given number
of sweeps have been performed and the relative change in CF... |
def _load_torrents_directory(self):
"""
Load torrents directory
If it does not exist yet, this request will cause the system to create
one
"""
r = self._req_lixian_get_id(torrent=True)
self._downloads_directory = self._load_directory(r['cid']) | Load torrents directory
If it does not exist yet, this request will cause the system to create
one |
def get_by_id(self, id):
"""Return user info by user id."""
with contextlib.closing(self.database.cursor()) as cursor:
cursor.execute('SELECT id, name FROM users WHERE id=?', (id,))
return cursor.fetchone() | Return user info by user id. |
def _recode_for_categories(codes, old_categories, new_categories):
"""
Convert a set of codes for to a new set of categories
Parameters
----------
codes : array
old_categories, new_categories : Index
Returns
-------
new_codes : array
Examples
--------
>>> old_cat = pd.... | Convert a set of codes for to a new set of categories
Parameters
----------
codes : array
old_categories, new_categories : Index
Returns
-------
new_codes : array
Examples
--------
>>> old_cat = pd.Index(['b', 'a', 'c'])
>>> new_cat = pd.Index(['a', 'b'])
>>> codes = n... |
def inc_n(self, n, exception=None): # type: (int, Optional[ParseError]) -> bool
"""
Increments the parser by n characters
if the end of the input has not been reached.
"""
return self._src.inc_n(n=n, exception=exception) | Increments the parser by n characters
if the end of the input has not been reached. |
def register_provider(cls, provider):
"""Register method to keep list of providers."""
def decorator(subclass):
"""Register as decorator function."""
cls._providers[provider] = subclass
subclass.name = provider
return subclass
return decorator | Register method to keep list of providers. |
def get_renderer(self, with_layout=True):
""" Get the default renderer """
if with_layout and self.is_lti():
return self._default_renderer_lti
elif with_layout:
return self._default_renderer
else:
return self._default_renderer_nolayout | Get the default renderer |
def parse(self):
'''parse is the base function for parsing the recipe, whether it be
a Dockerfile or Singularity recipe. The recipe is read in as lines,
and saved to a list if needed for the future. If the client has
it, the recipe type specific _parse function is called.
... | parse is the base function for parsing the recipe, whether it be
a Dockerfile or Singularity recipe. The recipe is read in as lines,
and saved to a list if needed for the future. If the client has
it, the recipe type specific _parse function is called.
Instructions for makin... |
def guestfs_conn_mount_ro(disk_path, disk_root, retries=5, wait=1):
"""
Open a GuestFS handle with `disk_path` and try mounting the root
filesystem. `disk_root` is a hint where it should be looked and will
only be used if GuestFS will not be able to deduce it independently.
Note that mounting a liv... | Open a GuestFS handle with `disk_path` and try mounting the root
filesystem. `disk_root` is a hint where it should be looked and will
only be used if GuestFS will not be able to deduce it independently.
Note that mounting a live guest, can lead to filesystem inconsistencies,
causing the mount operation... |
def compress_folder_dump(path, target):
'''
Compress folder dump to tar.gz file
'''
import tarfile
if not path or not os.path.isdir(path):
raise SystemExit(_error_codes.get(105))
name_out_file = (target + 'dump-' +
datetime.datetime.now().strftime('%Y-%m-%d-%H-%M-%S'... | Compress folder dump to tar.gz file |
def setTags(self, tags):
"""Set the tags for current photo to list tags.
(flickr.photos.settags)
"""
method = 'flickr.photos.setTags'
tags = uniq(tags)
_dopost(method, auth=True, photo_id=self.id, tags=tags)
self._load_properties() | Set the tags for current photo to list tags.
(flickr.photos.settags) |
def _make_request(self, bbox, meta_info, timestamps):
""" Make OGC request to create input for cloud detector classifier
:param bbox: Bounding box
:param meta_info: Meta-info dictionary of input eopatch
:return: Requested data
"""
service_type = ServiceType(meta_info['se... | Make OGC request to create input for cloud detector classifier
:param bbox: Bounding box
:param meta_info: Meta-info dictionary of input eopatch
:return: Requested data |
def select_entry(self, *arguments):
"""
Select a password from the available choices.
:param arguments: Refer to :func:`smart_search()`.
:returns: The name of a password (a string) or :data:`None`
(when no password matched the given `arguments`).
"""
ma... | Select a password from the available choices.
:param arguments: Refer to :func:`smart_search()`.
:returns: The name of a password (a string) or :data:`None`
(when no password matched the given `arguments`). |
def generate_scaling_plot(timing_data, title, ylabel, description, plot_file):
"""
Generate a scaling plot.
Args:
timing_data: data returned from a `*_scaling` method
title: the title of the plot
ylabel: the y-axis label of the plot
description: a description of the plot
... | Generate a scaling plot.
Args:
timing_data: data returned from a `*_scaling` method
title: the title of the plot
ylabel: the y-axis label of the plot
description: a description of the plot
plot_file: the file to write out to
Returns:
an image element containing ... |
def copy_scubadir_file(self, name, source):
'''Copies source into the scubadir
Returns the container-path of the copied file
'''
dest = os.path.join(self.__scubadir_hostpath, name)
assert not os.path.exists(dest)
shutil.copy2(source, dest)
return os.path.join(se... | Copies source into the scubadir
Returns the container-path of the copied file |
def make_response(self, image, size, mode, filename=None, *args, **kwargs):
"""
:param image: image as bytes
:param size: requested maximum width/height size
:param mode: one of 'scale', 'fit' or 'crop'
:param filename: filename
"""
try:
fmt = get_form... | :param image: image as bytes
:param size: requested maximum width/height size
:param mode: one of 'scale', 'fit' or 'crop'
:param filename: filename |
def _index_list_of_values(d, k):
"""Returns d[k] or [d[k]] if the value is not a list"""
v = d[k]
if isinstance(v, list):
return v
return [v] | Returns d[k] or [d[k]] if the value is not a list |
def sanitize_qualifiers(repos=None, followers=None, language=None):
'''
qualifiers = c repos:+42 followers:+1000 language:
params = {'q': 'tom repos:>42 followers:>1000'}
'''
qualifiers = ''
if repos:
qualifiers += 'repos:{0} '.format(repos)
qualifiers = re.sub(r"([+])([=a-zA-Z0... | qualifiers = c repos:+42 followers:+1000 language:
params = {'q': 'tom repos:>42 followers:>1000'} |
def apply_ufunc(
func: Callable,
*args: Any,
input_core_dims: Optional[Sequence[Sequence]] = None,
output_core_dims: Optional[Sequence[Sequence]] = ((),),
exclude_dims: AbstractSet = frozenset(),
vectorize: bool = False,
join: str = 'exact',
dataset_join: str = 'exact',
dataset_fill_... | Apply a vectorized function for unlabeled arrays on xarray objects.
The function will be mapped over the data variable(s) of the input
arguments using xarray's standard rules for labeled computation, including
alignment, broadcasting, looping over GroupBy/Dataset variables, and
merging of coordinates.
... |
def choice_default_invalidator(self, obj):
"""Invalidated cached items when the Choice changes."""
invalid = [('Question', obj.question_id, True)]
for pk in obj.voters.values_list('pk', flat=True):
invalid.append(('User', pk, False))
return invalid | Invalidated cached items when the Choice changes. |
def _parse_files(self, msp_pth, chunk, db_type, celery_obj=False):
"""Parse the MSP files and insert into database
Args:
msp_pth (str): path to msp file or directory [required]
db_type (str): The type of database to submit to (either 'sqlite', 'mysql' or 'django_mysql') [require... | Parse the MSP files and insert into database
Args:
msp_pth (str): path to msp file or directory [required]
db_type (str): The type of database to submit to (either 'sqlite', 'mysql' or 'django_mysql') [required]
chunk (int): Chunks of spectra to parse data (useful to control... |
def tar_file(files, tarname):
'''Compress a file or directory into a tar file.'''
if isinstance(files, basestring):
files = [files]
o = tarfile.open(tarname, 'w:gz')
for file in files:
o.add(file)
o.close() | Compress a file or directory into a tar file. |
def is_verbose():
"""
Only safe to call within a click context.
"""
ctx = click.get_current_context()
state = ctx.ensure_object(CommandState)
return state.is_verbose() | Only safe to call within a click context. |
def begin(self, user_url, anonymous=False):
"""Start the OpenID authentication process. See steps 1-2 in
the overview at the top of this file.
@param user_url: Identity URL given by the user. This method
performs a textual transformation of the URL to try and
make sure i... | Start the OpenID authentication process. See steps 1-2 in
the overview at the top of this file.
@param user_url: Identity URL given by the user. This method
performs a textual transformation of the URL to try and
make sure it is normalized. For example, a user_url of
... |
def attributes(self, **kwargs): # pragma: no cover
"""Retrieve the attribute configuration object.
Retrieves a mapping that identifies the custom directory
attributes configured for the Directory SyncService instance,
and the mapping of the custom attributes to standard directory
... | Retrieve the attribute configuration object.
Retrieves a mapping that identifies the custom directory
attributes configured for the Directory SyncService instance,
and the mapping of the custom attributes to standard directory
attributes.
Args:
**kwargs: Supported :... |
def mapillary_tag_exists(self):
'''
Check existence of required Mapillary tags
'''
description_tag = "Image ImageDescription"
if description_tag not in self.tags:
return False
for requirement in ["MAPSequenceUUID", "MAPSettingsUserKey", "MAPCaptureTime", "MAPL... | Check existence of required Mapillary tags |
def get_groups(self, env, token):
"""Get groups for the given token.
:param env: The current WSGI environment dictionary.
:param token: Token to validate and return a group string for.
:returns: None if the token is invalid or a string containing a comma
separated lis... | Get groups for the given token.
:param env: The current WSGI environment dictionary.
:param token: Token to validate and return a group string for.
:returns: None if the token is invalid or a string containing a comma
separated list of groups the authenticated user is a membe... |
def segment(f, output, target_duration, mpegts):
"""Segment command."""
try:
target_duration = int(target_duration)
except ValueError:
exit('Error: Invalid target duration.')
try:
mpegts = int(mpegts)
except ValueError:
exit('Error: Invalid MPEGTS value.')
WebVT... | Segment command. |
def _parse_unrecognized_segment(self, fptr):
"""Looks like a valid marker, but not sure from reading the specs.
"""
msg = ("Unrecognized codestream marker 0x{marker_id:x} encountered at "
"byte offset {offset}.")
msg = msg.format(marker_id=self._marker_id, offset=fptr.tell... | Looks like a valid marker, but not sure from reading the specs. |
def _create_scales(hist: HistogramBase, vega: dict, kwargs: dict):
"""Find proper scales for axes."""
if hist.ndim == 1:
bins0 = hist.bins.astype(float)
else:
bins0 = hist.bins[0].astype(float)
xlim = kwargs.pop("xlim", "auto")
ylim = kwargs.pop("ylim", "auto")
if xlim is "auto... | Find proper scales for axes. |
def surface_or_abstract(cls, predstr):
"""Instantiate a Pred from either its surface or abstract symbol."""
if predstr.strip('"').lstrip("'").startswith('_'):
return cls.surface(predstr)
else:
return cls.abstract(predstr) | Instantiate a Pred from either its surface or abstract symbol. |
def syscall_noreturn(self, func):
'''
Call a syscall method. A syscall method is executed outside of any routines, directly
in the scheduler loop, which gives it chances to directly operate the event loop.
See :py:method::`vlcp.event.core.Scheduler.syscall`.
'''
matcher =... | Call a syscall method. A syscall method is executed outside of any routines, directly
in the scheduler loop, which gives it chances to directly operate the event loop.
See :py:method::`vlcp.event.core.Scheduler.syscall`. |
def included_length(self):
"""Surveyed length, not including "excluded" shots"""
return sum([shot.length for shot in self.shots if shot.is_included]) | Surveyed length, not including "excluded" shots |
def _ordered_categories(df, categories):
"""
Make the columns in df categorical
Parameters:
-----------
categories: dict
Of the form {str: list},
where the key the column name and the value is
the ordered category list
"""
for col, cats in categories.items():
... | Make the columns in df categorical
Parameters:
-----------
categories: dict
Of the form {str: list},
where the key the column name and the value is
the ordered category list |
def multipublish(self, topic, messages, block=True, timeout=None,
raise_error=True):
"""Publish an iterable of messages to the given topic.
:param topic: the topic to publish to
:param messages: iterable of bytestrings to publish
:param block: wait for a connectio... | Publish an iterable of messages to the given topic.
:param topic: the topic to publish to
:param messages: iterable of bytestrings to publish
:param block: wait for a connection to become available before
publishing the message. If block is `False` and no connections
a... |
def from_dict(cls, data, read_only=False):
'''Recreate a feature collection from a dictionary.
The dictionary is of the format dumped by :meth:`to_dict`.
Additional information, such as whether the feature collection
should be read-only, is not included in this dictionary, and
i... | Recreate a feature collection from a dictionary.
The dictionary is of the format dumped by :meth:`to_dict`.
Additional information, such as whether the feature collection
should be read-only, is not included in this dictionary, and
is instead passed as parameters to this function. |
def measure(self, v, rf, off=None):
"""Create/convert a measure using the frame state set on the measures
server instance (via :meth:`do_frame`)
:param v: The measure to convert
:param rf: The frame reference to convert to
:param off: The optional offset for the measure
... | Create/convert a measure using the frame state set on the measures
server instance (via :meth:`do_frame`)
:param v: The measure to convert
:param rf: The frame reference to convert to
:param off: The optional offset for the measure |
def init_blueprint(self, blueprint, path="templates.yaml"):
"""Initialize a Flask Blueprint, similar to init_app, but without the access
to the application config.
Keyword Arguments:
blueprint {Flask Blueprint} -- Flask Blueprint instance to initialize
... | Initialize a Flask Blueprint, similar to init_app, but without the access
to the application config.
Keyword Arguments:
blueprint {Flask Blueprint} -- Flask Blueprint instance to initialize
(Default: {None})
path {str} -- path to templates... |
def populate_from_staging(self, staging_table, from_column_list, output_table):
"""
generate SQL to insert staging table records into
the core table based on column_list (If no column list
then insert sequentially)
"""
self.sql_text += 'INSERT INTO ' + output_table + ' (... | generate SQL to insert staging table records into
the core table based on column_list (If no column list
then insert sequentially) |
def disable(cls):
"""Restore sys.stderr and sys.stdout to their original objects. Resets colors to their original values.
:return: If streams restored successfully.
:rtype: bool
"""
# Skip if not on Windows.
if not IS_WINDOWS:
return False
# Restore ... | Restore sys.stderr and sys.stdout to their original objects. Resets colors to their original values.
:return: If streams restored successfully.
:rtype: bool |
def _at_dump_functions(self, calculator, rule, scope, block):
"""
Implements @dump_functions
"""
sys.stderr.write("%s\n" % repr(rule.namespace._functions)) | Implements @dump_functions |
def parse_file(fname):
"""Parse a python file into an AST.
This is a very thin wrapper around ast.parse
TODO: Handle encodings other than the default for Python 2
(issue #26)
"""
try:
with fopen(fname) as f:
fstr = f.read(... | Parse a python file into an AST.
This is a very thin wrapper around ast.parse
TODO: Handle encodings other than the default for Python 2
(issue #26) |
def vpc_peering_connection_present(name, requester_vpc_id=None, requester_vpc_name=None,
peer_vpc_id=None, peer_vpc_name=None, conn_name=None,
peer_owner_id=None, peer_region=None, region=None,
key=None, keyid=None,... | name
Name of the state
requester_vpc_id
ID of the requesting VPC. Exclusive with requester_vpc_name.
requester_vpc_name
Name tag of the requesting VPC. Exclusive with requester_vpc_id.
peer_vpc_id
ID of the VPC tp crete VPC peering connection with. This can be a VPC in
... |
def statement(self, days=60):
"""Download the :py:class:`ofxparse.Statement` given the time range
:param days: Number of days to look back at
:type days: integer
:rtype: :py:class:`ofxparser.Statement`
"""
parsed = self.download_parsed(days=days)
return parsed.ac... | Download the :py:class:`ofxparse.Statement` given the time range
:param days: Number of days to look back at
:type days: integer
:rtype: :py:class:`ofxparser.Statement` |
def p_ansible_sentence(self, t):
"""ansible_sentence : ANSIBLE VAR LPAREN features RPAREN"""
t[0] = ansible(t[2], t[4], line=t.lineno(1)) | ansible_sentence : ANSIBLE VAR LPAREN features RPAREN |
def gpsFromUTC(year, month, day, hour, min, sec, leapSecs=14):
"""converts UTC to: gpsWeek, secsOfWeek, gpsDay, secsOfDay
a good reference is: http://www.oc.nps.navy.mil/~jclynch/timsys.html
This is based on the following facts (see reference above):
GPS time is basically measured in (atomic) second... | converts UTC to: gpsWeek, secsOfWeek, gpsDay, secsOfDay
a good reference is: http://www.oc.nps.navy.mil/~jclynch/timsys.html
This is based on the following facts (see reference above):
GPS time is basically measured in (atomic) seconds since
January 6, 1980, 00:00:00.0 (the GPS Epoch)
The... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.