positive stringlengths 100 30.3k | anchor stringlengths 1 15k |
|---|---|
def find(file_node, dirs=ICON_DIRS, default_name=None, file_ext='.png'):
"""
Iterating all icon dirs, try to find a file called like the node's
extension / mime subtype / mime type (in that order).
For instance, for an MP3 file ("audio/mpeg"), this would look for:
"mp3.png" / "au... | Iterating all icon dirs, try to find a file called like the node's
extension / mime subtype / mime type (in that order).
For instance, for an MP3 file ("audio/mpeg"), this would look for:
"mp3.png" / "audio/mpeg.png" / "audio.png" |
def storage_keys(self):
"""
Return a list of the keys for values stored for the module.
Keys will contain the following metadata entries:
- '_ctime': storage creation timestamp
- '_mtime': storage last modification timestamp
"""
if not self._module:
r... | Return a list of the keys for values stored for the module.
Keys will contain the following metadata entries:
- '_ctime': storage creation timestamp
- '_mtime': storage last modification timestamp |
def line_pos_from_number(self, line_number):
"""
Computes line position on Y-Axis (at the center of the line) from line
number.
:param line_number: The line number for which we want to know the
position in pixels.
:return: The center position of the l... | Computes line position on Y-Axis (at the center of the line) from line
number.
:param line_number: The line number for which we want to know the
position in pixels.
:return: The center position of the line. |
def setPendingKeyExchange(self, sequence, ourBaseKey, ourRatchetKey, ourIdentityKey):
"""
:type sequence: int
:type ourBaseKey: ECKeyPair
:type ourRatchetKey: ECKeyPair
:type ourIdentityKey: IdentityKeyPair
"""
structure = self.sessionStructure.PendingKeyExchange... | :type sequence: int
:type ourBaseKey: ECKeyPair
:type ourRatchetKey: ECKeyPair
:type ourIdentityKey: IdentityKeyPair |
def create_elb(self):
"""Create or Update the ELB after rendering JSON data from configs.
Asserts that the ELB task was successful.
"""
json_data = self.make_elb_json()
LOG.debug('Block ELB JSON Data:\n%s', pformat(json_data))
wait_for_task(json_data)
self.add_... | Create or Update the ELB after rendering JSON data from configs.
Asserts that the ELB task was successful. |
def _set_serializer_by_mime_type(self, mime_type):
"""
:param mime_type:
:return:
used by content_type_set to set get a reference to the appropriate serializer
"""
# ignore if binary response
if isinstance(self._app_iter, BinaryResponse):
self.logger... | :param mime_type:
:return:
used by content_type_set to set get a reference to the appropriate serializer |
def add_segments(self, segments):
"""Add a list of segments to the composition
:param segments: Segments to add to composition
:type segments: list of :py:class:`radiotool.composer.Segment`
"""
self.tracks.update([seg.track for seg in segments])
self.segments.extend(segm... | Add a list of segments to the composition
:param segments: Segments to add to composition
:type segments: list of :py:class:`radiotool.composer.Segment` |
def _prep_time_data(ds):
"""Prepare time coordinate information in Dataset for use in aospy.
1. If the Dataset contains a time bounds coordinate, add attributes
representing the true beginning and end dates of the time interval used
to construct the Dataset
2. If the Dataset contains a time b... | Prepare time coordinate information in Dataset for use in aospy.
1. If the Dataset contains a time bounds coordinate, add attributes
representing the true beginning and end dates of the time interval used
to construct the Dataset
2. If the Dataset contains a time bounds coordinate, overwrite the ... |
def find_loci(self, cluster_size, maxgap, locusview=False, colordict=None):
'''
Finds the loci of a given cluster size & maximum gap between cluster members.
Args
cluster_size (int): minimum number of genes in the cluster.
maxgap (int): max basepair gap between ... | Finds the loci of a given cluster size & maximum gap between cluster members.
Args
cluster_size (int): minimum number of genes in the cluster.
maxgap (int): max basepair gap between genes in the cluster.
Kwargs
locusview (bool): whether or not a map is generated for... |
def mute(func):
""" Decorator
Make stdout silent
"""
def _f(*args, **kwargs):
sys.stdout = open(os.devnull, 'w')
res = func(*args, **kwargs)
sys.stdout.close()
sys.stdout = sys.__stdout__
return res
return _f | Decorator
Make stdout silent |
def create(self):
"""
Calls various methods sequentially in order to fully build the
database.
"""
# Calls each of these methods in order. _populate_from_lines and
# _update_relations must be implemented in subclasses.
self._init_tables()
self._populate_f... | Calls various methods sequentially in order to fully build the
database. |
def create_project(type, schema, server, name, output, verbose):
"""Create a new project on an entity matching server.
See entity matching service documentation for details on mapping type and schema
Returns authentication details for the created project.
"""
if verbose:
log("Entity Matchin... | Create a new project on an entity matching server.
See entity matching service documentation for details on mapping type and schema
Returns authentication details for the created project. |
def argmin(self, axis=None, skipna=True, *args, **kwargs):
"""
Returns the indices of the minimum values along an axis.
See `numpy.ndarray.argmin` for more information on the
`axis` parameter.
See Also
--------
numpy.ndarray.argmin
"""
nv.validat... | Returns the indices of the minimum values along an axis.
See `numpy.ndarray.argmin` for more information on the
`axis` parameter.
See Also
--------
numpy.ndarray.argmin |
def get_payload(self):
"""Return Payload."""
ret = bytes([self.status.value])
ret += bytes([self.session_id >> 8 & 255, self.session_id & 255])
return ret | Return Payload. |
def timeout(timeout):
"""
A decorator to timeout a function. Decorated method calls are executed in a separate new thread
with a specified timeout.
Also check if a thread for the same function already exists before creating a new one.
Note: Compatible with Windows (thread based).
"""
def de... | A decorator to timeout a function. Decorated method calls are executed in a separate new thread
with a specified timeout.
Also check if a thread for the same function already exists before creating a new one.
Note: Compatible with Windows (thread based). |
def download_urls(cls, urls, force_download=False):
"""Downloads all CTD URLs that don't exist
:param iter[str] urls: iterable of URL of CTD
:param bool force_download: force method to download
"""
for url in urls:
file_path = cls.get_path_to_file_from_url(url)
... | Downloads all CTD URLs that don't exist
:param iter[str] urls: iterable of URL of CTD
:param bool force_download: force method to download |
def yaml_to_ordered_dict(stream, loader=yaml.SafeLoader):
"""Provides yaml.load alternative with preserved dictionary order.
Args:
stream (string): YAML string to load.
loader (:class:`yaml.loader`): PyYAML loader class. Defaults to safe
load.
Returns:
OrderedDict: Pars... | Provides yaml.load alternative with preserved dictionary order.
Args:
stream (string): YAML string to load.
loader (:class:`yaml.loader`): PyYAML loader class. Defaults to safe
load.
Returns:
OrderedDict: Parsed YAML. |
def phenotypesGenerator(self, request):
"""
Returns a generator over the (phenotypes, nextPageToken) pairs
defined by the (JSON string) request
"""
# TODO make paging work using SPARQL?
compoundId = datamodel.PhenotypeAssociationSetCompoundId.parse(
request.ph... | Returns a generator over the (phenotypes, nextPageToken) pairs
defined by the (JSON string) request |
def refreshWidgets(self):
"""
This function manually refreshed all widgets attached to this simulation.
You want to call this function if any particle data has been manually changed.
"""
if hasattr(self, '_widgets'):
for w in self._widgets:
w.... | This function manually refreshed all widgets attached to this simulation.
You want to call this function if any particle data has been manually changed. |
def _reindex_non_unique(self, target):
"""
Create a new index with target's values (move/add/delete values as
necessary) use with non-unique Index and a possibly non-unique target.
Parameters
----------
target : an iterable
Returns
-------
new_in... | Create a new index with target's values (move/add/delete values as
necessary) use with non-unique Index and a possibly non-unique target.
Parameters
----------
target : an iterable
Returns
-------
new_index : pd.Index
Resulting index.
indexer... |
def check_newline_after_last_paragraph(self, definition, docstring):
"""D209: Put multi-line docstring closing quotes on separate line.
Unless the entire docstring fits on a line, place the closing
quotes on a line by themselves.
"""
if docstring:
lines = [l for l i... | D209: Put multi-line docstring closing quotes on separate line.
Unless the entire docstring fits on a line, place the closing
quotes on a line by themselves. |
def collect_results(results_file):
"""Return the result (pass/fail) for json file."""
with open(results_file, 'r') as results:
data = json.load(results)
return data | Return the result (pass/fail) for json file. |
def _generate_recommendation(self,
query_analysis,
db_name,
collection_name):
"""Generates an ideal query recommendation"""
index_rec = '{'
for query_field in query_analysis['analyzedFields']:
... | Generates an ideal query recommendation |
def get_basket_items(request):
"""
Get all items in the basket
"""
bid = basket_id(request)
return BasketItem.objects.filter(basket_id=bid), bid | Get all items in the basket |
def parse_azimuth(azimuth):
"""
Parses an azimuth measurement in azimuth or quadrant format.
Parameters
-----------
azimuth : string or number
An azimuth measurement in degrees or a quadrant measurement of azimuth.
Returns
-------
azi : float
The azimuth in degrees cloc... | Parses an azimuth measurement in azimuth or quadrant format.
Parameters
-----------
azimuth : string or number
An azimuth measurement in degrees or a quadrant measurement of azimuth.
Returns
-------
azi : float
The azimuth in degrees clockwise from north (range: 0-360)
See... |
def generate_date_tail_boost_queries(
field, timedeltas_and_boosts, relative_to=None):
"""
Generate a list of RangeQueries usable to boost the scores of more
recent documents.
Example:
```
queries = generate_date_tail_boost_queries("publish_date", {
timedelta(days=90): 1,
... | Generate a list of RangeQueries usable to boost the scores of more
recent documents.
Example:
```
queries = generate_date_tail_boost_queries("publish_date", {
timedelta(days=90): 1,
timedelta(days=30): 2,
timedelta(days=10): 4,
})
s = Search(BoolQuery(must=.... |
def to_json(self):
"""
Returns a JSON of the entire DataFrame that can be reconstructed back with raccoon.from_json(input). Any object
that cannot be serialized will be replaced with the representation of the object using repr(). In that instance
the DataFrame will have a string represen... | Returns a JSON of the entire DataFrame that can be reconstructed back with raccoon.from_json(input). Any object
that cannot be serialized will be replaced with the representation of the object using repr(). In that instance
the DataFrame will have a string representation in place of the object and will ... |
def infer(self, input_data, input_label):
"""
Description : Print sentence for prediction result
"""
sum_losses = 0
len_losses = 0
for data, label in zip(input_data, input_label):
pred = self.net(data)
sum_losses += mx.nd.array(self.loss_fn(pred, l... | Description : Print sentence for prediction result |
def _get_notmuch_message(self, mid):
"""returns :class:`notmuch.database.Message` with given id"""
mode = Database.MODE.READ_ONLY
db = Database(path=self.path, mode=mode)
try:
return db.find_message(mid)
except:
errmsg = 'no message with id %s exists!' % m... | returns :class:`notmuch.database.Message` with given id |
def levels(self):
"""
A generator of (idx, label) sequences representing the category
hierarchy from the bottom up. The first level contains all leaf
categories, and each subsequent is the next level up.
"""
def levels(categories):
# yield all lower levels
... | A generator of (idx, label) sequences representing the category
hierarchy from the bottom up. The first level contains all leaf
categories, and each subsequent is the next level up. |
def get_orthogonal_selection(self, selection, out=None, fields=None):
"""Retrieve data by making a selection for each dimension of the array. For
example, if an array has 2 dimensions, allows selecting specific rows and/or
columns. The selection for each dimension can be either an integer (index... | Retrieve data by making a selection for each dimension of the array. For
example, if an array has 2 dimensions, allows selecting specific rows and/or
columns. The selection for each dimension can be either an integer (indexing a
single item), a slice, an array of integers, or a Boolean array whe... |
def reordc(iorder, ndim, lenvals, array):
"""
Re-order the elements of an array of character strings
according to a given order vector.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/reordc_c.html
:param iorder: Order vector to be used to re-order array.
:type iorder: Array of ints
... | Re-order the elements of an array of character strings
according to a given order vector.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/reordc_c.html
:param iorder: Order vector to be used to re-order array.
:type iorder: Array of ints
:param ndim: Dimension of array.
:type ndim: int... |
def create_random_population(num=100):
"""
create a list of people with randomly generated names and stats
"""
people = []
for _ in range(num):
nme = 'blah'
tax_min = random.randint(1,40)/100
tax_max = tax_min + random.randint(1,40)/100
tradition = random.randint(1,10... | create a list of people with randomly generated names and stats |
def exact(self, *args, **kwargs):
"""Compare attributes of pairs exactly.
Shortcut of :class:`recordlinkage.compare.Exact`::
from recordlinkage.compare import Exact
indexer = recordlinkage.Compare()
indexer.add(Exact())
"""
compare = Exact(*args, *... | Compare attributes of pairs exactly.
Shortcut of :class:`recordlinkage.compare.Exact`::
from recordlinkage.compare import Exact
indexer = recordlinkage.Compare()
indexer.add(Exact()) |
def upload_supervisor_app_conf(app_name, template_name=None, context=None):
"""Upload Supervisor app configuration from a template."""
default = {'app_name': app_name}
context = context or {}
default.update(context)
template_name = template_name or [u'supervisor/%s.conf' % app_name, u'supervisor/ba... | Upload Supervisor app configuration from a template. |
def translate_identifier(self, identifier, target_namespaces=None, translate_ncbi_namespace=None):
"""Given a string identifier, return a list of aliases (as
identifiers) that refer to the same sequence.
"""
namespace, alias = identifier.split(nsa_sep) if nsa_sep in identifier else (Non... | Given a string identifier, return a list of aliases (as
identifiers) that refer to the same sequence. |
def build(self, builder):
"""
Build XML by appending to builder
.. note:: Questions can contain translations
"""
builder.start("Question", {})
for translation in self.translations:
translation.build(builder)
builder.end("Question") | Build XML by appending to builder
.. note:: Questions can contain translations |
def send_text(self, message, opcode=OPCODE_TEXT):
"""
Important: Fragmented(=continuation) messages are not supported since
their usage cases are limited - when we don't know the payload length.
"""
# Validate message
if isinstance(message, bytes):
message = ... | Important: Fragmented(=continuation) messages are not supported since
their usage cases are limited - when we don't know the payload length. |
def remove_child_banks(self, bank_id):
"""Removes all children from a bank.
arg: bank_id (osid.id.Id): the ``Id`` of a bank
raise: NotFound - ``bank_id`` is not in hierarchy
raise: NullArgument - ``bank_id`` is ``null``
raise: OperationFailed - unable to complete request
... | Removes all children from a bank.
arg: bank_id (osid.id.Id): the ``Id`` of a bank
raise: NotFound - ``bank_id`` is not in hierarchy
raise: NullArgument - ``bank_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization fa... |
def notify(cls, user_or_email_, object_id=None, **filters):
"""Start notifying the given user or email address when this event
occurs and meets the criteria given in ``filters``.
Return the created (or the existing matching) Watch so you can call
:meth:`~tidings.models.Watch.activate()`... | Start notifying the given user or email address when this event
occurs and meets the criteria given in ``filters``.
Return the created (or the existing matching) Watch so you can call
:meth:`~tidings.models.Watch.activate()` on it if you're so inclined.
Implementations in subclasses ma... |
def reset(self):
""" (re)set all attributes to defaults (eg. empty sets or ``None``). """
# Use first matching element as title (0 or more xpath expressions)
self.title = OrderedSet()
# Use first matching element as body (0 or more xpath expressions)
self.body = OrderedSet()
... | (re)set all attributes to defaults (eg. empty sets or ``None``). |
def Input_synthesizePinchGesture(self, x, y, scaleFactor, **kwargs):
"""
Function path: Input.synthesizePinchGesture
Domain: Input
Method name: synthesizePinchGesture
WARNING: This function is marked 'Experimental'!
Parameters:
Required arguments:
'x' (type: number) -> X coordinate of the... | Function path: Input.synthesizePinchGesture
Domain: Input
Method name: synthesizePinchGesture
WARNING: This function is marked 'Experimental'!
Parameters:
Required arguments:
'x' (type: number) -> X coordinate of the start of the gesture in CSS pixels.
'y' (type: number) -> Y coordinate ... |
def increase_writes_in_units(
current_provisioning, units, max_provisioned_writes,
consumed_write_units_percent, log_tag):
""" Increase the current_provisioning with units units
:type current_provisioning: int
:param current_provisioning: The current provisioning
:type units: int
:p... | Increase the current_provisioning with units units
:type current_provisioning: int
:param current_provisioning: The current provisioning
:type units: int
:param units: How many units should we increase with
:returns: int -- New provisioning value
:type max_provisioned_writes: int
:param max... |
def find_modules(module_path):
"""Find all modules in the module (possibly package) represented by `module_path`.
Args:
module_path: A pathlib.Path to a Python package or module.
Returns: An iterable of paths Python modules (i.e. *py files).
"""
if module_path.is_file():
if module_... | Find all modules in the module (possibly package) represented by `module_path`.
Args:
module_path: A pathlib.Path to a Python package or module.
Returns: An iterable of paths Python modules (i.e. *py files). |
def getObject(self, url_or_requests_response, params=None):
'Take a url or some xml response from JottaCloud and wrap it up with the corresponding JFS* class'
if isinstance(url_or_requests_response, requests.models.Response):
# this is a raw xml response that we need to parse
url... | Take a url or some xml response from JottaCloud and wrap it up with the corresponding JFS* class |
def get(self, statediag):
"""
Args:
statediag (list): The states of the PDA
Returns:
list: A reduced list of states using BFS
"""
if len(statediag) < 1:
print 'PDA is empty and can not be reduced'
return statediag
newstatedi... | Args:
statediag (list): The states of the PDA
Returns:
list: A reduced list of states using BFS |
def on_trial_remove(self, trial_runner, trial):
"""Marks trial as completed if it is paused and has previously ran."""
if trial.status is Trial.PAUSED and trial in self._results:
self._completed_trials.add(trial) | Marks trial as completed if it is paused and has previously ran. |
def for_format(filename, format=None, writable=False, prefix_dir=None):
"""
Create a MeshIO instance for file `filename` with forced `format`.
Parameters
----------
filename : str
The name of the mesh file.
format : str
One of supported formats. If None,
:func:`MeshIO.an... | Create a MeshIO instance for file `filename` with forced `format`.
Parameters
----------
filename : str
The name of the mesh file.
format : str
One of supported formats. If None,
:func:`MeshIO.any_from_filename()` is called instead.
writable : bool
If True, verify th... |
def interconnect_all(self):
"""Propagate dependencies for provided instances"""
for dep in topologically_sorted(self._provides):
if hasattr(dep, '__injections__') and not hasattr(dep, '__injections_source__'):
self.inject(dep) | Propagate dependencies for provided instances |
async def container_load(self, container_type, params=None, container=None, obj=None):
"""
Loads container of elements from the reader. Supports the container ref.
Returns loaded container.
:param container_type:
:param params:
:param container:
:param obj:
... | Loads container of elements from the reader. Supports the container ref.
Returns loaded container.
:param container_type:
:param params:
:param container:
:param obj:
:return: |
def add_experiment(self, id, port, time, file_name, platform):
'''set {key:value} paris to self.experiment'''
self.experiments[id] = {}
self.experiments[id]['port'] = port
self.experiments[id]['startTime'] = time
self.experiments[id]['endTime'] = 'N/A'
self.experiments[id... | set {key:value} paris to self.experiment |
def execute(self, query, *parameters, **kwargs):
"""Same as query, but do not process results. Always returns `None`."""
cursor = self._cursor()
try:
self._execute(cursor, query, parameters, kwargs)
except:
raise
finally:
cursor.close() | Same as query, but do not process results. Always returns `None`. |
def delete(self, **kwds):
"""
Endpoint: /action/<id>/delete.json
Deletes this action.
Returns True if successful.
Raises a TroveboxError if not.
"""
result = self._client.action.delete(self, **kwds)
self._delete_fields()
return result | Endpoint: /action/<id>/delete.json
Deletes this action.
Returns True if successful.
Raises a TroveboxError if not. |
def get_root_path(self, name):
"""
Attempt to compute a root path for a (hopefully importable) name.
Based in part on Flask's `root_path` calculation. See:
https://github.com/mitsuhiko/flask/blob/master/flask/helpers.py#L777
"""
module = modules.get(name)
i... | Attempt to compute a root path for a (hopefully importable) name.
Based in part on Flask's `root_path` calculation. See:
https://github.com/mitsuhiko/flask/blob/master/flask/helpers.py#L777 |
def _on_add_library(self, *event):
"""Callback method handling the addition of a new library
"""
self.view['library_tree_view'].grab_focus()
if react_to_event(self.view, self.view['library_tree_view'], event):
temp_library_name = "<LIB_NAME_%s>" % self._lib_counter
... | Callback method handling the addition of a new library |
def print_update(self):
"""
print some status information in between.
"""
print("\r\n")
now = datetime.datetime.now()
print("Update info: (from: %s)" % now.strftime("%c"))
current_total_size = self.total_stined_bytes + self.total_new_bytes
if self.total_... | print some status information in between. |
def error(self):
"""Check if the async response is an error.
Take care to call `is_done` before calling `error`. Note that the error
messages are always encoded as strings.
:raises CloudUnhandledError: When not checking `is_done` first
:return: the error value/payload, if found... | Check if the async response is an error.
Take care to call `is_done` before calling `error`. Note that the error
messages are always encoded as strings.
:raises CloudUnhandledError: When not checking `is_done` first
:return: the error value/payload, if found.
:rtype: str |
def get_next_action(self, request, application, roles):
""" Retrieve the next state. """
application.reopen()
link, is_secret = base.get_email_link(application)
emails.send_invite_email(application, link, is_secret)
messages.success(
request,
"Sent an invi... | Retrieve the next state. |
def difference(self, other, sort=None):
"""
Compute set difference of two MultiIndex objects
Parameters
----------
other : MultiIndex
sort : False or None, default None
Sort the resulting MultiIndex if possible
.. versionadded:: 0.24.0
... | Compute set difference of two MultiIndex objects
Parameters
----------
other : MultiIndex
sort : False or None, default None
Sort the resulting MultiIndex if possible
.. versionadded:: 0.24.0
.. versionchanged:: 0.24.1
Changed the de... |
def _handle_heartbeat(self, sender, data):
"""
Handles a raw heart beat
:param sender: Sender (address, port) tuple
:param data: Raw packet data
"""
# Format of packet
parsed, data = self._unpack("<B", data)
format = parsed[0]
if format == PACKET_... | Handles a raw heart beat
:param sender: Sender (address, port) tuple
:param data: Raw packet data |
def iso_datetime(timestamp=None):
""" Convert UNIX timestamp to ISO datetime string.
@param timestamp: UNIX epoch value (default: the current time).
@return: Timestamp formatted as "YYYY-mm-dd HH:MM:SS".
"""
if timestamp is None:
timestamp = time.time()
return datetime.datetime.... | Convert UNIX timestamp to ISO datetime string.
@param timestamp: UNIX epoch value (default: the current time).
@return: Timestamp formatted as "YYYY-mm-dd HH:MM:SS". |
def export(request, page_id, export_unpublished=False):
"""
API endpoint of this source site to export a part of the page tree
rooted at page_id
Requests are made by a destination site's import_from_api view.
"""
try:
if export_unpublished:
root_page = Page.objects.get(id=pa... | API endpoint of this source site to export a part of the page tree
rooted at page_id
Requests are made by a destination site's import_from_api view. |
def hide(cls):
"""
Hide the log interface.
"""
cls.el.style.display = "none"
cls.overlay.hide()
cls.bind() | Hide the log interface. |
def _report_profile(self, command, lock_name, elapsed_time, memory):
"""
Writes a string to self.pipeline_profile_file.
"""
message_raw = str(command) + "\t " + \
str(lock_name) + "\t" + \
str(datetime.timedelta(seconds = round(elapsed_time, 2))) + "\t " + \
... | Writes a string to self.pipeline_profile_file. |
def iterative_stratification(node_label_matrix, training_set_size, number_of_categories, random_seed=0):
"""
Iterative data fold stratification/balancing for two folds.
Based on: Sechidis, K., Tsoumakas, G., & Vlahavas, I. (2011).
On the stratification of multi-label data.
In Ma... | Iterative data fold stratification/balancing for two folds.
Based on: Sechidis, K., Tsoumakas, G., & Vlahavas, I. (2011).
On the stratification of multi-label data.
In Machine Learning and Knowledge Discovery in Databases (pp. 145-158).
Springer Berlin Heidelberg.
Inp... |
def get_limits(self, limit_sum=None):
"""
Gets the current limit data if it is different from the data
indicated by limit_sum. The db argument is used for hydrating
the limit objects. Raises a NoChangeException if the
limit_sum represents no change, otherwise returns a tuple
... | Gets the current limit data if it is different from the data
indicated by limit_sum. The db argument is used for hydrating
the limit objects. Raises a NoChangeException if the
limit_sum represents no change, otherwise returns a tuple
consisting of the current limit_sum and a list of Li... |
def do_video(self, args):
"""Video management command demonstrates multiple layers of sub-commands being handled by AutoCompleter"""
func = getattr(args, 'func', None)
if func is not None:
# Call whatever subcommand function was selected
func(self, args)
else:
... | Video management command demonstrates multiple layers of sub-commands being handled by AutoCompleter |
def copy_assets(self, path='assets'):
"""
Copy assets into the destination directory.
"""
path = os.path.join(self.root_path, path)
for root, _, files in os.walk(path):
for file in files:
fullpath = os.path.join(root, file)
relpath = os.path.relpath(fullpath, path)
copy_to = os.path.join(self._... | Copy assets into the destination directory. |
def nvmlDeviceGetBoardId(handle):
r"""
/**
* Retrieves the device boardId from 0-N.
* Devices with the same boardId indicate GPUs connected to the same PLX. Use in conjunction with
* \ref nvmlDeviceGetMultiGpuBoard() to decide if they are on the same board as well.
* The boardId returned ... | r"""
/**
* Retrieves the device boardId from 0-N.
* Devices with the same boardId indicate GPUs connected to the same PLX. Use in conjunction with
* \ref nvmlDeviceGetMultiGpuBoard() to decide if they are on the same board as well.
* The boardId returned is a unique ID for the current configu... |
def reset(self, force_flush_cache: bool = False) -> None:
"""
Reset transaction back to original state, discarding all
uncompleted transactions.
"""
super(LDAPwrapper, self).reset()
if len(self._transactions) == 0:
raise RuntimeError("reset called outside a tr... | Reset transaction back to original state, discarding all
uncompleted transactions. |
def resource_request_send(self, request_id, uri_type, uri, transfer_type, storage, force_mavlink1=False):
'''
The autopilot is requesting a resource (file, binary, other type of
data)
request_id : Request ID. This ID should be re-used when ... | The autopilot is requesting a resource (file, binary, other type of
data)
request_id : Request ID. This ID should be re-used when sending back URI contents (uint8_t)
uri_type : The type of requested URI. 0 = a file via URL. 1 = a UAVCAN bi... |
def receive(self, msg):
'''
The message received from the queue specify a method of the
class the actor represents. This invokes it. If the
communication is an ASK, sends the result back
to the channel included in the message as an
ASKRESPONSE.
If it is a FUTURE,... | The message received from the queue specify a method of the
class the actor represents. This invokes it. If the
communication is an ASK, sends the result back
to the channel included in the message as an
ASKRESPONSE.
If it is a FUTURE, generates a FUTURERESPONSE
to send ... |
def match_handle(loc, tokens):
"""Process match blocks."""
if len(tokens) == 4:
matches, match_type, item, stmts = tokens
cond = None
elif len(tokens) == 5:
matches, match_type, item, cond, stmts = tokens
else:
raise CoconutInternalException("invalid match statement token... | Process match blocks. |
def to_string_short(self):
"""
see also :meth:`to_string`
:return: a shorter abreviated string reprentation of the parameter
"""
opt = np.get_printoptions()
np.set_printoptions(threshold=8, edgeitems=3, linewidth=opt['linewidth']-len(self.uniquetwig)-2)
str_ = su... | see also :meth:`to_string`
:return: a shorter abreviated string reprentation of the parameter |
def start(ctx, alias, description, f):
"""
Use it when you start working on the given activity. This will add the
activity and the current time to your entries file. When you're finished,
use the stop command.
"""
today = datetime.date.today()
try:
timesheet_collection = get_timeshe... | Use it when you start working on the given activity. This will add the
activity and the current time to your entries file. When you're finished,
use the stop command. |
def insertIndividual(self, individual):
"""
Inserts the specified individual into this repository.
"""
try:
models.Individual.create(
id=individual.getId(),
datasetId=individual.getParentContainer().getId(),
name=individual.getL... | Inserts the specified individual into this repository. |
def validate_valid_transition(enum, from_value, to_value):
"""
Validate that to_value is a valid choice and that to_value is a valid transition from from_value.
"""
validate_available_choice(enum, to_value)
if hasattr(enum, '_transitions') and not enum.is_valid_transition(from_value, to_value):
... | Validate that to_value is a valid choice and that to_value is a valid transition from from_value. |
def MessageSetItemEncoder(field_number):
"""Encoder for extensions of MessageSet.
The message set message looks like this:
message MessageSet {
repeated group Item = 1 {
required int32 type_id = 2;
required string message = 3;
}
}
"""
start_bytes = b"".join([
TagBytes(... | Encoder for extensions of MessageSet.
The message set message looks like this:
message MessageSet {
repeated group Item = 1 {
required int32 type_id = 2;
required string message = 3;
}
} |
def set_status(self, status):
"""
Save the new status and call all defined callbacks
"""
self.status = status
for callback in self._update_status_callbacks:
callback(self) | Save the new status and call all defined callbacks |
def upvoters(self):
"""่ทๅๆ็ซ ็็น่ต็จๆท
:return: ๆ็ซ ็็น่ต็จๆท๏ผ่ฟๅ็ๆๅจใ
"""
from .author import Author, ANONYMOUS
self._make_soup()
headers = dict(Default_Header)
headers['Host'] = 'zhuanlan.zhihu.com'
json = self._session.get(
Post_Get_Upvoter.format(self.s... | ่ทๅๆ็ซ ็็น่ต็จๆท
:return: ๆ็ซ ็็น่ต็จๆท๏ผ่ฟๅ็ๆๅจใ |
async def get(self, request):
"""Get collection of resources."""
form = await self.get_form(request)
ctx = dict(active=self, form=form, request=request)
if self.resource:
return self.app.ps.jinja2.render(self.template_item, **ctx)
return self.app.ps.jinja2.render(self... | Get collection of resources. |
def replace_cells(self, key, sorted_row_idxs):
"""Replaces cells in current selection so that they are sorted"""
row, col, tab = key
new_keys = {}
del_keys = []
selection = self.grid.actions.get_selection()
for __row, __col, __tab in self.grid.code_array:
... | Replaces cells in current selection so that they are sorted |
def unique(series: pd.Series) -> pd.Series:
"""Test that the data items do not repeat."""
return ~series.duplicated(keep=False) | Test that the data items do not repeat. |
def parse_requirements(file_):
"""Parse a requirements formatted file.
Traverse a string until a delimiter is detected, then split at said
delimiter, get module name by element index, create a dict consisting of
module:version, and add dict to list of parsed modules.
Args:
file_: File to p... | Parse a requirements formatted file.
Traverse a string until a delimiter is detected, then split at said
delimiter, get module name by element index, create a dict consisting of
module:version, and add dict to list of parsed modules.
Args:
file_: File to parse.
Raises:
OSerror: If... |
def codingthreads(self):
"""
Find CDS features in .gff files to filter out non-coding sequences from the analysis
"""
printtime('Extracting CDS features', self.start)
# Create and start threads
for i in range(self.cpus):
# Send the threads to the appropriate d... | Find CDS features in .gff files to filter out non-coding sequences from the analysis |
def phonenumber(anon, obj, field, val):
"""
Generates a random US-style phone number
"""
return anon.faker.phone_number(field=field) | Generates a random US-style phone number |
def tag_syntax(self):
""" Parses this text with the syntactic analyzer (``self.__syntactic_parser``),
and stores the found syntactic analyses: into the layer LAYER_CONLL (if MaltParser
is used, default), or into the layer LAYER_VISLCG3 (if VISLCG3Parser is used).
"""
# ... | Parses this text with the syntactic analyzer (``self.__syntactic_parser``),
and stores the found syntactic analyses: into the layer LAYER_CONLL (if MaltParser
is used, default), or into the layer LAYER_VISLCG3 (if VISLCG3Parser is used). |
def async_lru(size=100):
""" An LRU cache for asyncio coroutines in Python 3.5
..
@async_lru(1024)
async def slow_coroutine(*args, **kwargs):
return await some_other_slow_coroutine()
..
"""
cache = collections.OrderedDict()
def decorator(fn):
... | An LRU cache for asyncio coroutines in Python 3.5
..
@async_lru(1024)
async def slow_coroutine(*args, **kwargs):
return await some_other_slow_coroutine()
.. |
def get_connection(self, url, proxies=None):
"""Returns a urllib3 connection for the given URL. This should not be
called from user code, and is only exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param url: The URL to connect to.
:pa... | Returns a urllib3 connection for the given URL. This should not be
called from user code, and is only exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param url: The URL to connect to.
:param proxies: (optional) A Requests-style dictionary of p... |
def getTCPportConnStatus(self, ipv4=True, ipv6=True, include_listen=False,
**kwargs):
"""Returns the number of TCP endpoints discriminated by status.
@param ipv4: Include IPv4 ports in output if True.
@param ipv6: Include IPv6 ports in ou... | Returns the number of TCP endpoints discriminated by status.
@param ipv4: Include IPv4 ports in output if True.
@param ipv6: Include IPv6 ports in output if True.
@param include_listen: Include listening ports in output if True.
@param **kwargs: Keyword... |
def write(series, output, scale=None):
"""Write a `TimeSeries` to a WAV file
Parameters
----------
series : `TimeSeries`
the series to write
output : `file`, `str`
the file object or filename to write to
scale : `float`, optional
the factor to apply to scale the data t... | Write a `TimeSeries` to a WAV file
Parameters
----------
series : `TimeSeries`
the series to write
output : `file`, `str`
the file object or filename to write to
scale : `float`, optional
the factor to apply to scale the data to (-1.0, 1.0),
pass `scale=1` to not a... |
def fire(data, tag, timeout=None):
'''
Fire an event on the local minion event bus. Data must be formed as a dict.
CLI Example:
.. code-block:: bash
salt '*' event.fire '{"data":"my event data"}' 'tag'
'''
if timeout is None:
timeout = 60000
else:
timeout = timeout... | Fire an event on the local minion event bus. Data must be formed as a dict.
CLI Example:
.. code-block:: bash
salt '*' event.fire '{"data":"my event data"}' 'tag' |
def _get_containers(self):
"""Return available containers."""
def full_fn(path):
return os.path.join(self.abs_root, path)
return [self.cont_cls.from_path(self, d)
for d in os.listdir(self.abs_root) if is_dir(full_fn(d))] | Return available containers. |
def handle_data(self, data):
'''
handle_data - Internal for parsing
'''
if data:
inTag = self._inTag
if len(inTag) > 0:
if inTag[-1].tagName not in PRESERVE_CONTENTS_TAGS:
data = data.replace('\t', ' ').strip('\r\n')
... | handle_data - Internal for parsing |
def auc(x, y, reorder=False): #from sklearn, http://scikit-learn.org, licensed under BSD License
"""Compute Area Under the Curve (AUC) using the trapezoidal rule
This is a general fuction, given points on a curve. For computing the area
under the ROC-curve, see :func:`auc_score`.
Parameters
-----... | Compute Area Under the Curve (AUC) using the trapezoidal rule
This is a general fuction, given points on a curve. For computing the area
under the ROC-curve, see :func:`auc_score`.
Parameters
----------
x : array, shape = [n]
x coordinates.
y : array, shape = [n]
y coordinate... |
def get(self, account_id):
"""
Return a specific account given its ID
"""
response = self.client._make_request('/accounts/{0}'.format(account_id))
return response.json() | Return a specific account given its ID |
def find_skill(self, param, author=None, skills=None):
# type: (str, str, List[SkillEntry]) -> SkillEntry
"""Find skill by name or url"""
if param.startswith('https://') or param.startswith('http://'):
repo_id = SkillEntry.extract_repo_id(param)
for skill in self.list():
... | Find skill by name or url |
def use_federated_repository_view(self):
"""Pass through to provider AssetLookupSession.use_federated_repository_view"""
self._repository_view = FEDERATED
# self._get_provider_session('asset_lookup_session') # To make sure the session is tracked
for session in self._get_provider_sessions... | Pass through to provider AssetLookupSession.use_federated_repository_view |
def unique_rows(data, digits=None):
"""
Returns indices of unique rows. It will return the
first occurrence of a row that is duplicated:
[[1,2], [3,4], [1,2]] will return [0,1]
Parameters
---------
data: (n,m) set of floating point data
digits: how many digits to consider for the purpos... | Returns indices of unique rows. It will return the
first occurrence of a row that is duplicated:
[[1,2], [3,4], [1,2]] will return [0,1]
Parameters
---------
data: (n,m) set of floating point data
digits: how many digits to consider for the purposes of uniqueness
Returns
--------
u... |
def barplot(bars, title='', upColor='blue', downColor='red'):
"""
Create candlestick plot for the given bars. The bars can be given as
a DataFrame or as a list of bar objects.
"""
import pandas as pd
import matplotlib.pyplot as plt
from matplotlib.lines import Line2D
from matplotlib.patc... | Create candlestick plot for the given bars. The bars can be given as
a DataFrame or as a list of bar objects. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.