positive stringlengths 100 30.3k | anchor stringlengths 1 15k |
|---|---|
def random_quat(rand=None):
"""Return uniform random unit quaternion.
rand: array like or None
Three independent random variables that are uniformly distributed
between 0 and 1.
>>> q = random_quat()
>>> np.allclose(1.0, vector_norm(q))
True
>>> q = random_quat(np.random.random(3... | Return uniform random unit quaternion.
rand: array like or None
Three independent random variables that are uniformly distributed
between 0 and 1.
>>> q = random_quat()
>>> np.allclose(1.0, vector_norm(q))
True
>>> q = random_quat(np.random.random(3))
>>> q.shape
(4,) |
def get_cli_event_returns(
self,
jid,
minions,
timeout=None,
tgt='*',
tgt_type='glob',
verbose=False,
progress=False,
show_timeout=False,
show_jid=False,
**kwargs):
'''
Get... | Get the returns for the command line interface via the event system |
def prompt_and_select_link(self):
"""
Prompt the user to select a link from a list to open.
Return the link that was selected, or ``None`` if no link was selected.
"""
data = self.get_selected_item()
url_full = data.get('url_full')
permalink = data.get('permalink... | Prompt the user to select a link from a list to open.
Return the link that was selected, or ``None`` if no link was selected. |
def check_response(response):
""" Checks that a response is successful, raising the appropriate Exceptions otherwise. """
status_code = response.status_code
if 100 < status_code < 299:
return True
elif status_code == 401 or status_code == 403:
message = get_response_data(response)
... | Checks that a response is successful, raising the appropriate Exceptions otherwise. |
def get_z_variable(nc):
'''
Returns the name of the variable that defines the Z axis or height/depth
:param netCDF4.Dataset nc: netCDF dataset
'''
axis_z = nc.get_variables_by_attributes(axis='Z')
if axis_z:
return axis_z[0].name
valid_standard_names = ('depth', 'height', 'altitude'... | Returns the name of the variable that defines the Z axis or height/depth
:param netCDF4.Dataset nc: netCDF dataset |
def setGroups(self, groups, kerningGroupConversionRenameMaps=None):
""" Copy the groups into our font. """
skipping = []
for name, members in groups.items():
checked = []
for m in members:
if m in self.font:
checked.append(m)
... | Copy the groups into our font. |
def check_ups_output_current(the_session, the_helper, the_snmp_value):
"""
OID .1.3.6.1.2.1.33.1.4.4.1.3.1
MIB excerpt
The present output current.
"""
a_current = calc_output_current_from_snmpvalue(the_snmp_value)
the_helper.add_metric(
label=the_helper.options.type,
val... | OID .1.3.6.1.2.1.33.1.4.4.1.3.1
MIB excerpt
The present output current. |
def get_dataset(self, dataset_id, ds_info):
"""Read a GRIB message into an xarray DataArray."""
msg = self._get_message(ds_info)
ds_info = self.get_metadata(msg, ds_info)
fill = msg['missingValue']
data = msg.values.astype(np.float32)
if msg.valid_key('jScansPositively') ... | Read a GRIB message into an xarray DataArray. |
def schedule_next_requests(self):
"""Schedules a request if available"""
# TODO: While there is capacity, schedule a batch of redis requests.
for req in self.next_requests():
self.crawler.engine.crawl(req, spider=self) | Schedules a request if available |
def drain(self):
"""
Let the write buffer of the underlying transport a chance to be flushed.
"""
data = self._stream.getvalue()
if len(data):
yield from self._protocol.send(data)
self._stream = io.BytesIO(b'') | Let the write buffer of the underlying transport a chance to be flushed. |
def delete(self):
"""Deletes a selection if any else deletes the cursor cell
Refreshes grid after deletion
"""
if self.grid.IsSelection():
# Delete selection
self.grid.actions.delete_selection()
else:
# Delete cell at cursor
cur... | Deletes a selection if any else deletes the cursor cell
Refreshes grid after deletion |
def ofp_instruction_from_str(ofproto, action_str):
"""
Parse an ovs-ofctl style action string and return a list of
jsondict representations of OFPInstructionActions, which
can then be passed to ofproto_parser.ofp_instruction_from_jsondict.
Please note that this is for making transition from ovs-ofc... | Parse an ovs-ofctl style action string and return a list of
jsondict representations of OFPInstructionActions, which
can then be passed to ofproto_parser.ofp_instruction_from_jsondict.
Please note that this is for making transition from ovs-ofctl
easier. Please consider using OFPAction constructors whe... |
def _get_closest_week(self, metric_date):
"""
Gets the closest monday to the date provided.
"""
#find the offset to the closest monday
days_after_monday = metric_date.isoweekday() - 1
return metric_date - datetime.timedelta(days=days_after_monday) | Gets the closest monday to the date provided. |
def AddNodeTags(r, node, tags, dry_run=False):
"""
Adds tags to a node.
@type node: str
@param node: node to add tags to
@type tags: list of str
@param tags: tags to add to the node
@type dry_run: bool
@param dry_run: whether to perform a dry run
@rtype: int
@return: job id
... | Adds tags to a node.
@type node: str
@param node: node to add tags to
@type tags: list of str
@param tags: tags to add to the node
@type dry_run: bool
@param dry_run: whether to perform a dry run
@rtype: int
@return: job id |
def _exit_door(self, _input):
"""This function is passed to each SelectableObject as a callback
The SelectableObjects have to call it once there are ready"""
self.results.append(_input)
if self._ended:
return
self._ended = True
self._release_all() | This function is passed to each SelectableObject as a callback
The SelectableObjects have to call it once there are ready |
def parse(self, filePath, skipLines=0, separator = ',', stringSeparator = '"', lineSeparator = '\n') :
"""Loads a CSV file"""
self.filename = filePath
f = open(filePath)
if lineSeparator == '\n' :
lines = f.readlines()
else :
lines = f.read().split(lineSeparator)
f.flush()
f.close()
lines = ... | Loads a CSV file |
def first_image(self):
"""Ready-only attribute that provides the value of the first non-none image that's
not the thumbnail override field.
"""
# loop through image fields and grab the first non-none one
for model_field in self._meta.fields:
if isinstance(model_field,... | Ready-only attribute that provides the value of the first non-none image that's
not the thumbnail override field. |
def reset_to_default(self):
"""Reset to default values of the shortcuts making a confirmation."""
reset = QMessageBox.warning(self, _("Shortcuts reset"),
_("Do you want to reset "
"to default values?"),
... | Reset to default values of the shortcuts making a confirmation. |
def should_send(self, request):
"""Returns whether or not the request should be sent to the
modules, based on the filters."""
if self.filters.get('whitelist', None):
return request.tree.type in self.filters['whitelist']
elif self.filters.get('blacklist', None):
re... | Returns whether or not the request should be sent to the
modules, based on the filters. |
def storage_volume_attachments(self):
"""
Gets the StorageVolumeAttachments API client.
Returns:
StorageVolumeAttachments:
"""
if not self.__storage_volume_attachments:
self.__storage_volume_attachments = StorageVolumeAttachments(self.__connection)
... | Gets the StorageVolumeAttachments API client.
Returns:
StorageVolumeAttachments: |
def file_follow_durable( path,
min_dump_interval=10,
xattr_name='user.collectd.logtail.pos', xattr_update=True,
**follow_kwz ):
'''Records log position into xattrs after reading line every
min_dump_interval seconds.
Checksum of the last line at the position
is also recorded (so line itself don't have to ... | Records log position into xattrs after reading line every
min_dump_interval seconds.
Checksum of the last line at the position
is also recorded (so line itself don't have to fit into xattr) to make sure
file wasn't truncated between last xattr dump and re-open. |
def _tls_auth_decrypt(self, s):
"""
Provided with the record header and AEAD-ciphered data, return the
sliced and clear tuple (TLSInnerPlaintext, tag). Note that
we still return the slicing of the original input in case of decryption
failure. Also, if the integrity check fails, a... | Provided with the record header and AEAD-ciphered data, return the
sliced and clear tuple (TLSInnerPlaintext, tag). Note that
we still return the slicing of the original input in case of decryption
failure. Also, if the integrity check fails, a warning will be issued,
but we still return... |
def gte(min_value):
"""
Validates that a field value is greater than or equal to the
value given to this validator.
"""
def validate(value):
if value < min_value:
return e("{} is not greater than or equal to {}", value, min_value)
return validate | Validates that a field value is greater than or equal to the
value given to this validator. |
def nested_key_indices(nested_dict):
"""
Give an ordering to the outer and inner keys used in a dictionary that
maps to dictionaries.
"""
outer_keys, inner_keys = collect_nested_keys(nested_dict)
outer_key_indices = {k: i for (i, k) in enumerate(outer_keys)}
inner_key_indices = {k: i for (i,... | Give an ordering to the outer and inner keys used in a dictionary that
maps to dictionaries. |
def read_PIA0_A_control(self, cpu_cycles, op_address, address):
"""
read from 0xff01 -> PIA 0 A side control register
"""
value = 0xb3
log.error(
"%04x| read $%04x (PIA 0 A side Control reg.) send $%02x (%s) back.\t|%s",
op_address, address, value, byte2bi... | read from 0xff01 -> PIA 0 A side control register |
def driver_name(self):
"""
Returns the name of the driver that provides this tacho motor device.
"""
(self._driver_name, value) = self.get_cached_attr_string(self._driver_name, 'driver_name')
return value | Returns the name of the driver that provides this tacho motor device. |
def renameMenu( self ):
"""
Prompts the user to supply a new name for the menu.
"""
item = self.uiMenuTREE.currentItem()
name, accepted = QInputDialog.getText( self,
'Rename Menu',
... | Prompts the user to supply a new name for the menu. |
def get_response(self):
"""Generate the response block of this request.
Careful: it only sets the fields which can be set from the request
"""
res = IODWriteRes()
for field in ["seqNum", "ARUUID", "API", "slotNumber",
"subslotNumber", "index"]:
r... | Generate the response block of this request.
Careful: it only sets the fields which can be set from the request |
def handel_default(self) -> None:
"""
处理设置到body上的数据默认 headers
"""
raw_body = self._body
body = cast(Optional[bytes], None)
default_type = 2
charset = self._charset or self._default_charset
if raw_body is None:
pass
elif isinstance(raw_b... | 处理设置到body上的数据默认 headers |
def begin(self, transaction=None, headers=None, **keyword_headers):
"""
Begin a transaction.
:param str transaction: the identifier for the transaction (optional - if not specified
a unique transaction id will be generated)
:param dict headers: a map of any additional header... | Begin a transaction.
:param str transaction: the identifier for the transaction (optional - if not specified
a unique transaction id will be generated)
:param dict headers: a map of any additional headers the broker requires
:param keyword_headers: any additional headers the broker ... |
def _parse_timestamp(timestamp):
"""
Parse a given timestamp value, raising ValueError if None or Flasey
"""
if timestamp:
try:
return aniso8601.parse_datetime(timestamp)
except AttributeError:
# raised by aniso8601 if raw_timestamp... | Parse a given timestamp value, raising ValueError if None or Flasey |
def _get_input_readers(self, state):
"""Get input readers.
Args:
state: a MapreduceState model.
Returns:
A tuple: (a list of input readers, a model._HugeTaskPayload entity).
The payload entity contains the json serialized input readers.
(None, None) when input reader inplitting returne... | Get input readers.
Args:
state: a MapreduceState model.
Returns:
A tuple: (a list of input readers, a model._HugeTaskPayload entity).
The payload entity contains the json serialized input readers.
(None, None) when input reader inplitting returned no data to process. |
def toggleCollapseAfter( self ):
"""
Collapses the splitter after this handle.
"""
if ( self.isCollapsed() ):
self.uncollapse()
else:
self.collapse( XSplitterHandle.CollapseDirection.After ) | Collapses the splitter after this handle. |
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
xr = self.sx * x
yr = self.sy * y
return xr, yr | Next point in iteration |
def send_video_note(self, chat_id, data, duration=None, length=None, reply_to_message_id=None, reply_markup=None,
disable_notification=None, timeout=None):
"""
Use this method to send video files, Telegram clients support mp4 videos.
:param chat_id: Integer : Unique ident... | Use this method to send video files, Telegram clients support mp4 videos.
:param chat_id: Integer : Unique identifier for the message recipient — User or GroupChat id
:param data: InputFile or String : Video note to send. You can either pass a file_id as String to resend a video that is already on the T... |
def analyze(output_dir, dataset, cloud=False, project_id=None):
"""Blocking version of analyze_async. See documentation of analyze_async."""
job = analyze_async(
output_dir=output_dir,
dataset=dataset,
cloud=cloud,
project_id=project_id)
job.wait()
print('Analyze: ' + str(job.state)) | Blocking version of analyze_async. See documentation of analyze_async. |
def corr_flat_dir(a1, a2):
'''
Returns the correlation coefficient between two flattened adjacency
matrices. Similarity metric for weighted matrices.
Parameters
----------
A1 : NxN np.ndarray
directed matrix 1
A2 : NxN np.ndarray
directed matrix 2
Returns
-------
... | Returns the correlation coefficient between two flattened adjacency
matrices. Similarity metric for weighted matrices.
Parameters
----------
A1 : NxN np.ndarray
directed matrix 1
A2 : NxN np.ndarray
directed matrix 2
Returns
-------
r : float
Correlation coeffi... |
def _do_smart_punctuation(self, text):
"""Fancifies 'single quotes', "double quotes", and apostrophes.
Converts --, ---, and ... into en dashes, em dashes, and ellipses.
Inspiration is: <http://daringfireball.net/projects/smartypants/>
See "test/tm-cases/smarty_pants.text" for a full di... | Fancifies 'single quotes', "double quotes", and apostrophes.
Converts --, ---, and ... into en dashes, em dashes, and ellipses.
Inspiration is: <http://daringfireball.net/projects/smartypants/>
See "test/tm-cases/smarty_pants.text" for a full discussion of the
support here and
<... |
def _to_dict(self):
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'document_label') and self.document_label is not None:
_dict['document_label'] = self.document_label
if hasattr(self, 'location') and self.location is not None:
... | Return a json dictionary representing this model. |
def _get_model_table(self, part):
"""
Returns a list that represents the table.
:param part: The table header, table footer or table body.
:type part: hatemile.util.html.htmldomelement.HTMLDOMElement
:return: The list that represents the table.
:rtype: list(list(hatemile... | Returns a list that represents the table.
:param part: The table header, table footer or table body.
:type part: hatemile.util.html.htmldomelement.HTMLDOMElement
:return: The list that represents the table.
:rtype: list(list(hatemile.util.html.htmldomelement.HTMLDOMElement)) |
def _get_degree(num_nodes):
"""Get the degree of the current surface.
Args:
num_nodes (int): The number of control points for a
B |eacute| zier surface.
Returns:
int: The degree :math:`d` such that :math:`(d + 1)(d + 2)/2`
equals ``num_nodes`... | Get the degree of the current surface.
Args:
num_nodes (int): The number of control points for a
B |eacute| zier surface.
Returns:
int: The degree :math:`d` such that :math:`(d + 1)(d + 2)/2`
equals ``num_nodes``.
Raises:
ValueEr... |
def get_bond_length(sp1, sp2, bond_order=1):
"""
Get the bond length between two species.
Args:
sp1 (Specie): First specie.
sp2 (Specie): Second specie.
bond_order: For species with different possible bond orders,
this allows one to obtain the bond length for a particula... | Get the bond length between two species.
Args:
sp1 (Specie): First specie.
sp2 (Specie): Second specie.
bond_order: For species with different possible bond orders,
this allows one to obtain the bond length for a particular bond
order. For example, to get the C=C bon... |
def _populate(self, soup):
"""
Populate the list, assuming ``soup`` is a ``BeautifulSoup`` object.
"""
tables = soup.select('table[rules=all]')
if not tables:
return
trs = tables[0].select('tr')[1:]
if len(trs[0]) == 5:
# M1
se... | Populate the list, assuming ``soup`` is a ``BeautifulSoup`` object. |
def stacks_2_eqns(self,stacks):
"""returns equation strings from stacks"""
if stacks:
return list(map(lambda p: self.stack_2_eqn(p), stacks))
else:
return [] | returns equation strings from stacks |
def space_toolbar(settings_items, empty_space):
""" formats the toolbar """
counter = 0
for part in settings_items:
counter += len(part)
if len(settings_items) == 1:
spacing = ''
else:
spacing = empty_space[
:int(math.floor((len(empty_space) - counter) / (len(set... | formats the toolbar |
def destroys(self):
""" Returns which single registers (including f, flag)
this instruction changes.
Registers are: a, b, c, d, e, i, h, l, ixh, ixl, iyh, iyl, r
LD a, X => Destroys a
LD a, a => Destroys nothing
INC a => Destroys a, f
POP af => Destroys a, f, s... | Returns which single registers (including f, flag)
this instruction changes.
Registers are: a, b, c, d, e, i, h, l, ixh, ixl, iyh, iyl, r
LD a, X => Destroys a
LD a, a => Destroys nothing
INC a => Destroys a, f
POP af => Destroys a, f, sp
PUSH af => Destroys sp... |
def recover_all_handler(self):
"""
Relink the file handler association you just removed.
"""
for handler in self._handler_cache:
self.logger.addHandler(handler)
self._handler_cache = list() | Relink the file handler association you just removed. |
def data(self, name, subject_ids=None, visit_ids=None, session_ids=None,
**kwargs):
"""
Returns the Fileset(s) or Field(s) associated with the provided spec
name(s), generating derived filesets as required. Multiple names in a
list can be provided, to allow their workflows t... | Returns the Fileset(s) or Field(s) associated with the provided spec
name(s), generating derived filesets as required. Multiple names in a
list can be provided, to allow their workflows to be combined into a
single workflow.
Parameters
----------
name : str | List[str]
... |
def pub_date(soup):
"""
Return the publishing date in struct format
pub_date_date, pub_date_day, pub_date_month, pub_date_year, pub_date_timestamp
Default date_type is pub
"""
pub_date = first(raw_parser.pub_date(soup, date_type="pub"))
if pub_date is None:
pub_date = first(raw_parse... | Return the publishing date in struct format
pub_date_date, pub_date_day, pub_date_month, pub_date_year, pub_date_timestamp
Default date_type is pub |
def generate_by_hash(hashcode):
"""Generates an PIL image avatar based on the given
hash String. Acts as the main accessor to pagan."""
img = Image.new(IMAGE_MODE, IMAGE_SIZE, BACKGROUND_COLOR)
if len(hashcode) < 32:
print ("hashcode must have lenght >= 32, %s" % hashcode)
raise FalseHas... | Generates an PIL image avatar based on the given
hash String. Acts as the main accessor to pagan. |
def run_info(template):
""" Print information about a specific template. """
template.project_name = 'TowelStuff' # fake project name, always the same
name = template_name_from_class_name(template.__class__.__name__)
term = TerminalView()
term.print_info("Content of template {} with an example proje... | Print information about a specific template. |
def get_custom_implementations(self):
"""Retrieve a list of cutom implementations.
Yields:
(str, str, ImplementationProperty) tuples: The name of the attribute
an implementation lives at, the name of the related transition,
and the related implementation.
... | Retrieve a list of cutom implementations.
Yields:
(str, str, ImplementationProperty) tuples: The name of the attribute
an implementation lives at, the name of the related transition,
and the related implementation. |
def from_file(cls, f):
"""
Constructs a :class:`~mwxml.iteration.dump.Dump` from a `file` pointer.
:Parameters:
f : `file`
A plain text file pointer containing XML to process
"""
element = ElementIterator.from_file(f)
assert element.tag == "me... | Constructs a :class:`~mwxml.iteration.dump.Dump` from a `file` pointer.
:Parameters:
f : `file`
A plain text file pointer containing XML to process |
def _check_avail(cmd):
'''
Check to see if the given command can be run
'''
if isinstance(cmd, list):
cmd = ' '.join([six.text_type(x) if not isinstance(x, six.string_types) else x
for x in cmd])
bret = True
wret = False
if __salt__['config.get']('cmd_blacklis... | Check to see if the given command can be run |
def wr_py_nts(fout_py, nts, docstring=None, varname="nts"):
"""Save namedtuples into a Python module."""
if nts:
with open(fout_py, 'w') as prt:
prt.write('"""{DOCSTRING}"""\n\n'.format(DOCSTRING=docstring))
prt.write("# Created: {DATE}\n".format(DATE=str(datetime.date.today())))... | Save namedtuples into a Python module. |
def main():
"""
How to control a DMX light through an Anyma USB controller
"""
# Channel value list for channels 1-512
cv = [0 for v in range(0, 512)]
# Create an instance of the DMX controller and open it
print("Opening DMX controller...")
dev = pyudmx.uDMXDevice()
# This will... | How to control a DMX light through an Anyma USB controller |
def reverse_ip_whois(self, query=None, ip=None, country=None, server=None, include_total_count=False, page=1,
**kwargs):
"""Pass in an IP address or a list of free text query terms."""
if (ip and query) or not (ip or query):
raise ValueError('Query or IP Address (but... | Pass in an IP address or a list of free text query terms. |
def get_mac(self):
''' Obtain the device's mac address. '''
ifreq = struct.pack('16sH14s', self.name, AF_UNIX, b'\x00'*14)
res = fcntl.ioctl(sockfd, SIOCGIFHWADDR, ifreq)
address = struct.unpack('16sH14s', res)[2]
mac = struct.unpack('6B8x', address)
return ":".join(['%0... | Obtain the device's mac address. |
def generate_base(path: str) -> str:
""" Convert path, which can be a URL or a file path into a base URI
:param path: file location or url
:return: file location or url sans actual name
"""
if ':' in path:
parts = urlparse(path)
parts_dict = parts._asdict()
parts_dict['path'... | Convert path, which can be a URL or a file path into a base URI
:param path: file location or url
:return: file location or url sans actual name |
def _conversion_function(pt_wrapper, dtype=None, name=None, as_ref=False):
"""Allows PrettyTensors and Loss to work as a tensor."""
# Ignore as_ref to not create backward compatibility issues.
_ = name, as_ref
t = pt_wrapper.tensor
if dtype and not dtype.is_compatible_with(t.dtype):
raise ValueError(
... | Allows PrettyTensors and Loss to work as a tensor. |
def _text2bool(val):
"""
Converts strings to True/False depending on the 'truth' expressed by
the string. If the string can't be converted, the original value
will be returned.
See '__true_strings' and '__false_strings' for values considered
'true' or 'false respectively.
This is usable as... | Converts strings to True/False depending on the 'truth' expressed by
the string. If the string can't be converted, the original value
will be returned.
See '__true_strings' and '__false_strings' for values considered
'true' or 'false respectively.
This is usable as 'converter' for SCons' Variables... |
def gaussian_filter(data, sigma):
"""
Drop-in replacement for scipy.ndimage.gaussian_filter.
(note: results are only approximately equal to the output of
gaussian_filter)
"""
if np.isscalar(sigma):
sigma = (sigma,) * data.ndim
baseline = data.mean()
filtered = data - baseline
... | Drop-in replacement for scipy.ndimage.gaussian_filter.
(note: results are only approximately equal to the output of
gaussian_filter) |
def get_tag_cloud(context, steps=6, min_count=None,
template='zinnia/tags/tag_cloud.html'):
"""
Return a cloud of published tags.
"""
tags = Tag.objects.usage_for_queryset(
Entry.published.all(), counts=True,
min_count=min_count)
return {'template': template,
... | Return a cloud of published tags. |
def mass_3d(self, r, rho0, gamma):
"""
mass enclosed a 3d sphere or radius r
:param r:
:param a:
:param s:
:return:
"""
mass_3d = 4 * np.pi * rho0 /(-gamma + 3) * r ** (-gamma + 3)
return mass_3d | mass enclosed a 3d sphere or radius r
:param r:
:param a:
:param s:
:return: |
def find_n50(self):
"""
Calculate the N50 for each strain. N50 is defined as the largest contig such that at least half of the total
genome size is contained in contigs equal to or larger than this contig
"""
for sample in self.metadata:
# Initialise the N50 attribute... | Calculate the N50 for each strain. N50 is defined as the largest contig such that at least half of the total
genome size is contained in contigs equal to or larger than this contig |
def handle_exception(self, exception):
"""
Handle a unspecified exception and return the correct method that should be used
for handling it.
If the exception has the `can_redirect` property set to False, it is
rendered to the browser. Otherwise, it will be redirected to the loc... | Handle a unspecified exception and return the correct method that should be used
for handling it.
If the exception has the `can_redirect` property set to False, it is
rendered to the browser. Otherwise, it will be redirected to the location
provided in the `RedirectUri` object that is ... |
def write_branch_data(self, file):
""" Writes branch data to an Excel spreadsheet.
"""
branch_sheet = self.book.add_sheet("Branches")
for i, branch in enumerate(self.case.branches):
for j, attr in enumerate(BRANCH_ATTRS):
branch_sheet.write(i, j, getattr(bran... | Writes branch data to an Excel spreadsheet. |
def _collate_results(self, field=None):
"""For a list of objects associated with a classification result, return the results as a
DataFrame and dict of taxa info.
Parameters
----------
field : {'readcount_w_children', 'readcount', 'abundance'}
Which field to use for ... | For a list of objects associated with a classification result, return the results as a
DataFrame and dict of taxa info.
Parameters
----------
field : {'readcount_w_children', 'readcount', 'abundance'}
Which field to use for the abundance/count of a particular taxon in a samp... |
def addcomment(self, creditmemo_increment_id,
comment, email=True, include_in_email=False):
"""
Add new comment to credit memo
:param creditmemo_increment_id: Credit memo increment ID
:return: bool
"""
return bool(
self.call(
'sal... | Add new comment to credit memo
:param creditmemo_increment_id: Credit memo increment ID
:return: bool |
def load_corpus(self, path, config):
'''Load a dialogue corpus; eventually, support pickles and potentially other formats'''
# use the default dataset if no path is provided
# TODO -- change this to use a pre-saved dataset
if path == '':
path = self.default_path_to_corpus
... | Load a dialogue corpus; eventually, support pickles and potentially other formats |
def predict(self, X):
"""Rank samples according to survival times
Lower ranks indicate shorter survival, higher ranks longer survival.
Parameters
----------
X : array-like, shape = (n_samples, n_features)
The input samples.
Returns
-------
y... | Rank samples according to survival times
Lower ranks indicate shorter survival, higher ranks longer survival.
Parameters
----------
X : array-like, shape = (n_samples, n_features)
The input samples.
Returns
-------
y : ndarray, shape = (n_samples,)
... |
def _get_reference(self):
"""
Sets up necessary reference for robots, grippers, and objects.
"""
super()._get_reference()
# indices for joints in qpos, qvel
self.robot_joints = list(self.mujoco_robot.joints)
self._ref_joint_pos_indexes = [
self.sim.mo... | Sets up necessary reference for robots, grippers, and objects. |
def attach_image(field, nested_fields, page, record_keeper=None):
'''
Returns a function that attaches an image to page if it exists
Currenlty assumes that images have already been imported and info
has been stored in record_keeper
'''
if (field in nested_fields) and nested_fields[field]:
... | Returns a function that attaches an image to page if it exists
Currenlty assumes that images have already been imported and info
has been stored in record_keeper |
def plot_frequency_recency_matrix(
model,
T=1,
max_frequency=None,
max_recency=None,
title=None,
xlabel="Customer's Historical Frequency",
ylabel="Customer's Recency",
**kwargs
):
"""
Plot recency frequecy matrix as heatmap.
Plot a figure of expected transactions in T next u... | Plot recency frequecy matrix as heatmap.
Plot a figure of expected transactions in T next units of time by a customer's frequency and recency.
Parameters
----------
model: lifetimes model
A fitted lifetimes model.
T: fload, optional
Next units of time to make predictions for
ma... |
def add_qtl_to_map(qtlfile, mapfile, outputfile='map_with_qtls.csv'):
""" This function adds to a genetic map for each marker the number
of significant QTLs found.
:arg qtlfile, the output from MapQTL transformed to a csv file via
'parse_mapqtl_file' which contains the closest markers.
:arg map... | This function adds to a genetic map for each marker the number
of significant QTLs found.
:arg qtlfile, the output from MapQTL transformed to a csv file via
'parse_mapqtl_file' which contains the closest markers.
:arg mapfile, the genetic map with all the markers.
:kwarg outputfile, the name of... |
def download(self, bundle_uuid, replica, version="", download_dir="",
metadata_files=('*',), data_files=('*',),
num_retries=10, min_delay_seconds=0.25):
"""
Download a bundle and save it to the local filesystem as a directory.
:param str bundle_uuid: The uuid o... | Download a bundle and save it to the local filesystem as a directory.
:param str bundle_uuid: The uuid of the bundle to download
:param str replica: the replica to download from. The supported replicas are: `aws` for Amazon Web Services, and
`gcp` for Google Cloud Platform. [aws, gcp]
... |
def check_y(y, link, dist, min_samples=1, verbose=True):
"""
tool to ensure that the targets:
- are in the domain of the link function
- are numerical
- have at least min_samples
- is finite
Parameters
----------
y : array-like
link : Link object
dist : Distribution object
... | tool to ensure that the targets:
- are in the domain of the link function
- are numerical
- have at least min_samples
- is finite
Parameters
----------
y : array-like
link : Link object
dist : Distribution object
min_samples : int, default: 1
verbose : bool, default: True
... |
def facade(factory):
"""Declare a method as a facade factory."""
wrapper = FacadeDescriptor(factory.__name__, factory)
return update_wrapper(wrapper, factory) | Declare a method as a facade factory. |
def question_image_filepath(instance, filename):
"""
Function DocString
"""
return '/'.join(['images', str(instance.question_level), str(instance.question_level_id), binascii.b2a_hex(os.urandom(15)), filename]) | Function DocString |
def __repair_unconnected_nodes(self):
"""
Adds a (``dominance_relation``) edge from the sentence root node to all
previously unconnected nodes (token nodes, that either represent a
punctuation mark or are part of a headline 'sentence' that has no
full syntax structure annotation)... | Adds a (``dominance_relation``) edge from the sentence root node to all
previously unconnected nodes (token nodes, that either represent a
punctuation mark or are part of a headline 'sentence' that has no
full syntax structure annotation). |
def from_raw_message(cls, rawmessage):
"""Create a message from a raw byte stream."""
if (rawmessage[5] &
MESSAGE_FLAG_EXTENDED_0X10) == MESSAGE_FLAG_EXTENDED_0X10:
if len(rawmessage) >= ExtendedSend.receivedSize:
msg = ExtendedSend.from_raw_message(rawmessage... | Create a message from a raw byte stream. |
def is_distributed(partition_column, lower_bound, upper_bound):
""" Check if is possible distribute a query given that args
Args:
partition_column: column used to share the data between the workers
lower_bound: the minimum value to be requested from the partition_column
upper_bound: the... | Check if is possible distribute a query given that args
Args:
partition_column: column used to share the data between the workers
lower_bound: the minimum value to be requested from the partition_column
upper_bound: the maximum value to be requested from the partition_column
Returns:
... |
def vcf_writer(parser, keep, extract, args):
"""Writes the data in VCF format."""
# The output
output = sys.stdout if args.output == "-" else open(args.output, "w")
try:
# Getting the samples
samples = np.array(parser.get_samples(), dtype=str)
k = _get_sample_select(samples=samp... | Writes the data in VCF format. |
def network_sampling(n, filename, directory=None, snowball=False, user=None):
"""
Selects a few users and exports a CSV of indicators for them.
TODO: Returns the network/graph between the selected users.
Parameters
----------
n : int
Number of users to select.
filename : string
... | Selects a few users and exports a CSV of indicators for them.
TODO: Returns the network/graph between the selected users.
Parameters
----------
n : int
Number of users to select.
filename : string
File to export to.
directory: string
Directory to select users from if us... |
def _deleteCompletedMeasurement(self, measurementId):
"""
Deletes the named measurement from the completed measurement store if it exists.
:param measurementId:
:return:
String: error messages
Integer: count of measurements deleted
"""
message, cou... | Deletes the named measurement from the completed measurement store if it exists.
:param measurementId:
:return:
String: error messages
Integer: count of measurements deleted |
def _publish_metrics(self, prev_keys, key, data, publishfn=None):
"""Recursively publish keys"""
if key not in data:
return
value = data[key]
keys = prev_keys + [key]
if not publishfn:
publishfn = self.publish
if isinstance(value, dict):
... | Recursively publish keys |
def all_groupings(partition):
"""Return all possible groupings of states for a particular coarse graining
(partition) of a network.
Args:
partition (tuple[tuple]): A partition of micro-elements into macro
elements.
Yields:
tuple[tuple[tuple]]: A grouping of micro-states int... | Return all possible groupings of states for a particular coarse graining
(partition) of a network.
Args:
partition (tuple[tuple]): A partition of micro-elements into macro
elements.
Yields:
tuple[tuple[tuple]]: A grouping of micro-states into macro states of
system.
... |
def to_dict(self, experiment):
"""Create a Json-like object for an experiment. Extends the basic
object with subject, image group, and (optional) functional data
identifiers.
Parameters
----------
experiment : ExperimentHandle
Returns
-------
Jso... | Create a Json-like object for an experiment. Extends the basic
object with subject, image group, and (optional) functional data
identifiers.
Parameters
----------
experiment : ExperimentHandle
Returns
-------
Json Object
Json-like object, i.e... |
def safe_dump(data, stream=None, **kwds):
"""implementation of safe dumper using Ordered Dict Yaml Dumper"""
return yaml.dump(data, stream=stream, Dumper=ODYD, **kwds) | implementation of safe dumper using Ordered Dict Yaml Dumper |
def set_logger(self, **fields):
"""Change the name of the logger that log.* should call
Args:
**fields: Extra fields to be logged. Logger name will be:
".".join([<module_name>, <cls_name>] + fields_sorted_on_key)
"""
names = [self.__module__, self.__class__._... | Change the name of the logger that log.* should call
Args:
**fields: Extra fields to be logged. Logger name will be:
".".join([<module_name>, <cls_name>] + fields_sorted_on_key) |
def set_callbacks(self, worker_start_callback: callable, worker_end_callback: callable, are_async: bool = False):
"""
:param are_async: True if the callbacks execute asynchronously, posting any heavy work to another thread.
"""
# We are setting self.worker_start_callback and self.worker_... | :param are_async: True if the callbacks execute asynchronously, posting any heavy work to another thread. |
def get_physical_port(self):
"""Returns the link aggregation object or the ethernet port object."""
obj = None
if self.is_link_aggregation():
obj = UnityLinkAggregation.get(self._cli, self.get_id())
else:
obj = UnityEthernetPort.get(self._cli, self.get_id())
... | Returns the link aggregation object or the ethernet port object. |
def get_asset_notification_session(self, asset_receiver, proxy):
"""Gets the notification session for notifications pertaining to
asset changes.
arg: asset_receiver (osid.repository.AssetReceiver): the
notification callback
arg proxy (osid.proxy.Proxy): a proxy
... | Gets the notification session for notifications pertaining to
asset changes.
arg: asset_receiver (osid.repository.AssetReceiver): the
notification callback
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetNotificationSession) - an
... |
def add_resource(
self,
base_rule,
base_view,
alternate_view=None,
alternate_rule=None,
id_rule=None,
app=None,
):
"""Add route or routes for a resource.
:param str base_rule: The URL rule for the resource. This will be
prefixed by... | Add route or routes for a resource.
:param str base_rule: The URL rule for the resource. This will be
prefixed by the API prefix.
:param base_view: Class-based view for the resource.
:param alternate_view: If specified, an alternate class-based view for
the resource. Usu... |
def _isVerbExpansible( verbObj, clauseTokens, clauseID ):
'''
Kontrollib, kas tavaline verb on laiendatav etteantud osalauses:
*) verbi kontekstis (osalauses) on veel teisi verbe;
*) verb kuulub etteantud osalausesse;
*) tegemist ei ole olema-verbiga (neid vaatame mujal e... | Kontrollib, kas tavaline verb on laiendatav etteantud osalauses:
*) verbi kontekstis (osalauses) on veel teisi verbe;
*) verb kuulub etteantud osalausesse;
*) tegemist ei ole olema-verbiga (neid vaatame mujal eraldi);
*) tegemist pole maks|mas|mast|mata-verbiga;
*)... |
def _createConfig(self):
""" Creates a config tree item (CTI) hierarchy containing default children.
"""
rootItem = MainGroupCti('debug inspector')
if DEBUGGING:
# Some test config items.
import numpy as np
from argos.config.untypedcti import UntypedC... | Creates a config tree item (CTI) hierarchy containing default children. |
def generate_tensor_filename(self, field_name, file_num, compressed=True):
""" Generate a filename for a tensor. """
file_ext = TENSOR_EXT
if compressed:
file_ext = COMPRESSED_TENSOR_EXT
filename = os.path.join(self.filename, 'tensors', '%s_%05d%s' %(field_name, file_num, fil... | Generate a filename for a tensor. |
def estimate_chi2mixture(self, lrt):
"""
estimates the parameters of a mixture of a chi-squared random variable of degree
0 and a scaled chi-squared random variable of degree d
(1-mixture)*chi2(0) + (mixture)*scale*chi2(dof),
where
scale is the scaling paramet... | estimates the parameters of a mixture of a chi-squared random variable of degree
0 and a scaled chi-squared random variable of degree d
(1-mixture)*chi2(0) + (mixture)*scale*chi2(dof),
where
scale is the scaling parameter for the scales chi-square distribution
dof ... |
def begin(self):
"""
This method will implement the handshake of the
Bitcoin protocol. It will send the Version message,
and block until it receives a VerAck.
Once we receive the version, we'll send the verack,
and begin downloading.
"""
log.debug("handsha... | This method will implement the handshake of the
Bitcoin protocol. It will send the Version message,
and block until it receives a VerAck.
Once we receive the version, we'll send the verack,
and begin downloading. |
def load_block_type(self, block_type):
"""
Returns a subclass of :class:`.XBlock` that corresponds to the specified `block_type`.
"""
return XBlock.load_class(block_type, self.default_class, self.select) | Returns a subclass of :class:`.XBlock` that corresponds to the specified `block_type`. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.