positive stringlengths 100 30.3k | anchor stringlengths 1 15k |
|---|---|
def vector_dot(vector1, vector2):
""" Computes the dot-product of the input vectors.
:param vector1: input vector 1
:type vector1: list, tuple
:param vector2: input vector 2
:type vector2: list, tuple
:return: result of the dot product
:rtype: float
"""
try:
if vector1 is No... | Computes the dot-product of the input vectors.
:param vector1: input vector 1
:type vector1: list, tuple
:param vector2: input vector 2
:type vector2: list, tuple
:return: result of the dot product
:rtype: float |
def connect(self, keyfile=None):
"""Connect to the node via ssh using the paramiko library.
:return: :py:class:`paramiko.SSHClient` - ssh connection or None on
failure
"""
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
... | Connect to the node via ssh using the paramiko library.
:return: :py:class:`paramiko.SSHClient` - ssh connection or None on
failure |
def end(self, folder=None):
"""End the simulation and destroy the current simulation environment.
"""
ret = self.env.destroy(folder=folder)
self._end_time = time.time()
self._log(logging.DEBUG, "Simulation run with {} steps took {:.3f}s to"
" complete, while act... | End the simulation and destroy the current simulation environment. |
def marshall(self):
"""Return the measurement in the line protocol format.
:rtype: str
"""
return '{},{} {} {}'.format(
self._escape(self.name),
','.join(['{}={}'.format(self._escape(k), self._escape(v))
for k, v in self.tags.items()]),
... | Return the measurement in the line protocol format.
:rtype: str |
def _download_mirbase(args, version="CURRENT"):
"""
Download files from mirbase
"""
if not args.hairpin or not args.mirna:
logger.info("Working with version %s" % version)
hairpin_fn = op.join(op.abspath(args.out), "hairpin.fa.gz")
mirna_fn = op.join(op.abspath(args.out), "miRNA.... | Download files from mirbase |
def total_amount(qs) -> Total:
"""Sums the amounts of the objects in the queryset, keeping each currency separate.
:param qs: A querystring containing objects that have an amount field of type Money.
:return: A Total object.
"""
aggregate = qs.values('amount_currency').annotate(sum=Sum('amount'))
... | Sums the amounts of the objects in the queryset, keeping each currency separate.
:param qs: A querystring containing objects that have an amount field of type Money.
:return: A Total object. |
def aa3_to_aa1(seq):
"""convert string of 3-letter amino acids to 1-letter amino acids
>>> aa3_to_aa1("CysAlaThrSerAlaArgGluLeuAlaMetGlu")
'CATSARELAME'
>>> aa3_to_aa1(None)
"""
if seq is None:
return None
return "".join(aa3_to_aa1_lut[aa3]
for aa3 in [seq[i:i +... | convert string of 3-letter amino acids to 1-letter amino acids
>>> aa3_to_aa1("CysAlaThrSerAlaArgGluLeuAlaMetGlu")
'CATSARELAME'
>>> aa3_to_aa1(None) |
def next(transport, wizard, step, data):
"""
Validate step and go to the next one (or finish the wizard)
:param transport: Transport object
:param wizard: Wizard block name
:param step: Current step number
:param data: form data for the step
"""
step = int(step)
wizard = blocks.get(... | Validate step and go to the next one (or finish the wizard)
:param transport: Transport object
:param wizard: Wizard block name
:param step: Current step number
:param data: form data for the step |
def validate_line_list(dist, attr, value):
"""
Validate that the value is compatible
"""
# does not work as reliably in Python 2.
if isinstance(value, str):
value = value.split()
value = list(value)
try:
check = (' '.join(value)).split()
if check == value:
... | Validate that the value is compatible |
def modify_url_for_impersonation(cls, url, impersonate_user, username):
"""
Modify the SQL Alchemy URL object with the user to impersonate if applicable.
:param url: SQLAlchemy URL object
:param impersonate_user: Bool indicating if impersonation is enabled
:param username: Effect... | Modify the SQL Alchemy URL object with the user to impersonate if applicable.
:param url: SQLAlchemy URL object
:param impersonate_user: Bool indicating if impersonation is enabled
:param username: Effective username |
def Detect(self, baseline, host_data):
"""Run host_data through detectors and return them if a detector triggers.
Args:
baseline: The base set of rdf values used to evaluate whether an issue
exists.
host_data: The rdf values passed back by the filters.
Returns:
A CheckResult mess... | Run host_data through detectors and return them if a detector triggers.
Args:
baseline: The base set of rdf values used to evaluate whether an issue
exists.
host_data: The rdf values passed back by the filters.
Returns:
A CheckResult message containing anomalies if any detectors iden... |
def get_batch(self):
"""Returns the Batch
"""
context = self.context
parent = api.get_parent(context)
if context.portal_type == "Batch":
return context
elif parent.portal_type == "Batch":
return parent
return None | Returns the Batch |
def generate_map(map, name='url_map'):
"""
Generates a JavaScript function containing the rules defined in
this map, to be used with a MapAdapter's generate_javascript
method. If you don't pass a name the returned JavaScript code is
an expression that returns a function. Otherwise it's a standalon... | Generates a JavaScript function containing the rules defined in
this map, to be used with a MapAdapter's generate_javascript
method. If you don't pass a name the returned JavaScript code is
an expression that returns a function. Otherwise it's a standalone
script that assigns the function with that na... |
def known_dists():
'''Return a list of all Distributions exporting udata.* entrypoints'''
return (
dist for dist in pkg_resources.working_set
if any(k in ENTRYPOINTS for k in dist.get_entry_map().keys())
) | Return a list of all Distributions exporting udata.* entrypoints |
def modify_cache_parameter_group(name, region=None, key=None, keyid=None, profile=None,
**args):
'''
Update a cache parameter group in place.
Note that due to a design limitation in AWS, this function is not atomic -- a maximum of 20
params may be modified in one underl... | Update a cache parameter group in place.
Note that due to a design limitation in AWS, this function is not atomic -- a maximum of 20
params may be modified in one underlying boto call. This means that if more than 20 params
need to be changed, the update is performed in blocks of 20, which in turns means ... |
def release():
"check release before upload to PyPI"
sh("paver bdist_wheel")
wheels = path("dist").files("*.whl")
if not wheels:
error("\n*** ERROR: No release wheel was built!")
sys.exit(1)
if any(".dev" in i for i in wheels):
error("\n*** ERROR: You're still using a 'dev' v... | check release before upload to PyPI |
def extract_vars(template):
""" Extract variables from template. Variables are enclosed in
double curly braces.
"""
keys = set()
for match in re.finditer(r"\{\{ (?P<key>\w+) \}\}", template.getvalue()):
keys.add(match.groups()[0])
return sorted(list(keys)) | Extract variables from template. Variables are enclosed in
double curly braces. |
def get_interface_switchport_output_switchport_acceptable_frame_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_in... | Auto Generated Code |
def upper_diag_self_prodx(list_):
"""
upper diagnoal of cartesian product of self and self.
Weird name. fixme
Args:
list_ (list):
Returns:
list:
CommandLine:
python -m utool.util_alg --exec-upper_diag_self_prodx
Example:
>>> # ENABLE_DOCTEST
>>> fr... | upper diagnoal of cartesian product of self and self.
Weird name. fixme
Args:
list_ (list):
Returns:
list:
CommandLine:
python -m utool.util_alg --exec-upper_diag_self_prodx
Example:
>>> # ENABLE_DOCTEST
>>> from utool.util_alg import * # NOQA
>>>... |
def nl_complete_msg(sk, msg):
"""Finalize Netlink message.
https://github.com/thom311/libnl/blob/libnl3_2_25/lib/nl.c#L450
This function finalizes a Netlink message by completing the message with desirable flags and values depending on the
socket configuration.
- If not yet filled out, the source... | Finalize Netlink message.
https://github.com/thom311/libnl/blob/libnl3_2_25/lib/nl.c#L450
This function finalizes a Netlink message by completing the message with desirable flags and values depending on the
socket configuration.
- If not yet filled out, the source address of the message (`nlmsg_pid`)... |
def update_user(self, user_id, **kwargs):
"""Update a user."""
body = self._formdata(kwargs, FastlyUser.FIELDS)
content = self._fetch("/user/%s" % user_id, method="PUT", body=body)
return FastlyUser(self, content) | Update a user. |
def make_psf_kernel(psf, npix, cdelt, xpix, ypix, psf_scale_fn=None, normalize=False):
"""
Generate a kernel for a point-source.
Parameters
----------
psf : `~fermipy.irfs.PSFModel`
npix : int
Number of pixels in X and Y dimensions.
cdelt : float
Pixel size in degrees.
... | Generate a kernel for a point-source.
Parameters
----------
psf : `~fermipy.irfs.PSFModel`
npix : int
Number of pixels in X and Y dimensions.
cdelt : float
Pixel size in degrees. |
def _bundleable(desc):
"""Creates a function that transforms an API call into a bundling call.
It transform a_func from an API call that receives the requests and returns
the response into a callable that receives the same request, and
returns a :class:`bundling.Event`.
The returned Event object c... | Creates a function that transforms an API call into a bundling call.
It transform a_func from an API call that receives the requests and returns
the response into a callable that receives the same request, and
returns a :class:`bundling.Event`.
The returned Event object can be used to obtain the event... |
def getStickXY(TableName):
"""
Get X and Y for fine plotting of a stick spectrum.
Usage: X,Y = getStickXY(TableName).
"""
cent,intens = getColumns(TableName,('nu','sw'))
n = len(cent)
cent_ = zeros(n*3)
intens_ = zeros(n*3)
for i in range(n):
intens_[3*i] = 0
intens_[... | Get X and Y for fine plotting of a stick spectrum.
Usage: X,Y = getStickXY(TableName). |
def sinwave(n=4,inc=.25):
"""
Returns a DataFrame with the required format for
a surface (sine wave) plot
Parameters:
-----------
n : int
Ranges for X and Y axis (-n,n)
n_y : int
Size of increment along the axis
"""
x=np.arange(-n,n,inc)
y=np.arange(-n,n,inc)
X,Y=np.meshgrid(x,y)
R = np.sqrt(X**2... | Returns a DataFrame with the required format for
a surface (sine wave) plot
Parameters:
-----------
n : int
Ranges for X and Y axis (-n,n)
n_y : int
Size of increment along the axis |
def _oversized_subqueries(
self,
coordinate,
radiusArcsec):
"""
*subdivide oversized query*
**Key Arguments:**
# -
**Return:**
- None
.. todo::
- @review: when complete, clean _oversized_subqueries method... | *subdivide oversized query*
**Key Arguments:**
# -
**Return:**
- None
.. todo::
- @review: when complete, clean _oversized_subqueries method
- @review: when complete add logging |
def estimate_frequency_for_zero(self, sample_rate: float, nbits=42) -> float:
"""
Calculates the frequency of at most nbits logical zeros and returns the mean of these frequencies
:param nbits:
:return:
"""
return self.__estimate_frequency_for_bit(False, sample_rate, nbi... | Calculates the frequency of at most nbits logical zeros and returns the mean of these frequencies
:param nbits:
:return: |
def all_devices(cl_device_type=None, platform=None):
"""Get multiple device environments, optionally only of the indicated type.
This will only fetch devices that support double point precision.
Args:
cl_device_type (cl.device_type.* or string): The type of the device we want,
... | Get multiple device environments, optionally only of the indicated type.
This will only fetch devices that support double point precision.
Args:
cl_device_type (cl.device_type.* or string): The type of the device we want,
can be a opencl device type or a string matching 'GP... |
def read_umi_tools(filename: PathLike, dtype: str='float32') -> AnnData:
"""Read a gzipped condensed count matrix from umi_tools.
Parameters
----------
filename
File name to read from.
"""
# import pandas for conversion of a dict of dicts into a matrix
# import gzip to read a gzippe... | Read a gzipped condensed count matrix from umi_tools.
Parameters
----------
filename
File name to read from. |
def update(self, callback_method=values.unset, callback_url=values.unset,
friendly_name=values.unset):
"""
Update the TriggerInstance
:param unicode callback_method: The HTTP method to use to call callback_url
:param unicode callback_url: The URL we call when the trigger ... | Update the TriggerInstance
:param unicode callback_method: The HTTP method to use to call callback_url
:param unicode callback_url: The URL we call when the trigger fires
:param unicode friendly_name: A string to describe the resource
:returns: Updated TriggerInstance
:rtype: t... |
def _python_to_mod_new(changes: Changeset) -> Dict[str, List[List[bytes]]]:
""" Convert a LdapChanges object to a modlist for add operation. """
table: LdapObjectClass = type(changes.src)
fields = table.get_fields()
result: Dict[str, List[List[bytes]]] = {}
for name, field in fields.items():
... | Convert a LdapChanges object to a modlist for add operation. |
def pow(self, other, axis="columns", level=None, fill_value=None):
"""Pow this DataFrame against another DataFrame/Series/scalar.
Args:
other: The object to use to apply the pow against this.
axis: The axis to pow over.
level: The Multilevel index level to appl... | Pow this DataFrame against another DataFrame/Series/scalar.
Args:
other: The object to use to apply the pow against this.
axis: The axis to pow over.
level: The Multilevel index level to apply pow over.
fill_value: The value to fill NaNs with.
Re... |
def set_wd_noise(self, wd_noise):
"""Add White Dwarf Background Noise
This adds the White Dwarf (WD) Background noise. This can either do calculations with,
without, or with and without WD noise.
Args:
wd_noise (bool or str, optional): Add or remove WD background noise. Fir... | Add White Dwarf Background Noise
This adds the White Dwarf (WD) Background noise. This can either do calculations with,
without, or with and without WD noise.
Args:
wd_noise (bool or str, optional): Add or remove WD background noise. First option is to
have only cal... |
def populate(self, **values):
"""Populate values to fields. Skip non-existing."""
values = values.copy()
fields = list(self.iterate_with_name())
for _, structure_name, field in fields:
if structure_name in values:
field.__set__(self, values.pop(structure_name)... | Populate values to fields. Skip non-existing. |
def __reset_unique_identities(self):
"""Clear identities relationships and enrollments data"""
self.log("Reseting unique identities...")
self.log("Clearing identities relationships")
nids = 0
uidentities = api.unique_identities(self.db)
for uidentity in uidentities:
... | Clear identities relationships and enrollments data |
def get_all(kind='2'):
'''
Get All the records.
'''
return TabPost.select().where(
(TabPost.kind == kind) &
(TabPost.valid == 1)
).order_by(
TabPost.time_update.desc()
) | Get All the records. |
def remove_entry(self, fs_entry):
"""Removes an FSEntry object from this METS document.
Any children of this FSEntry will also be removed. This will be removed
as a child of it's parent, if any.
:param metsrw.mets.FSEntry fs_entry: FSEntry to remove from the METS
"""
tr... | Removes an FSEntry object from this METS document.
Any children of this FSEntry will also be removed. This will be removed
as a child of it's parent, if any.
:param metsrw.mets.FSEntry fs_entry: FSEntry to remove from the METS |
def get(self, id, domain='messages'):
"""
Gets a message translation.
@rtype: str
@return: The message translation
"""
assert isinstance(id, (str, unicode))
assert isinstance(domain, (str, unicode))
if self.defines(id, domain):
return self.me... | Gets a message translation.
@rtype: str
@return: The message translation |
def is_connected(self, use_cached=True):
"""Return True if the device is currrently connect and False if not"""
device_json = self.get_device_json(use_cached)
return int(device_json.get("dpConnectionStatus")) > 0 | Return True if the device is currrently connect and False if not |
def _check_age(self, pub, min_interval=timedelta(seconds=0)):
"""Check the age of the receiver.
"""
now = datetime.utcnow()
if (now - self._last_age_check) <= min_interval:
return
LOGGER.debug("%s - checking addresses", str(datetime.utcnow()))
self._last_age_... | Check the age of the receiver. |
def _serialize(self, value, attr, obj):
"""Convert the Arrow object into a string."""
if isinstance(value, arrow.arrow.Arrow):
value = value.datetime
return super(ArrowField, self)._serialize(value, attr, obj) | Convert the Arrow object into a string. |
def create_image(self, image_file, caption):
""" Create an image with a caption """
suffix = 'png'
if image_file:
img = Image.open(os.path.join(self.gallery, image_file))
width, height = img.size
ratio = width/WIDTH
img = img.resize((int(width // r... | Create an image with a caption |
def requestAccountUpdates(self, subscribe=True):
"""
Register to account updates
https://www.interactivebrokers.com/en/software/api/apiguide/java/reqaccountupdates.htm
"""
if self.subscribeAccount != subscribe:
self.subscribeAccount = subscribe
self.ibConn... | Register to account updates
https://www.interactivebrokers.com/en/software/api/apiguide/java/reqaccountupdates.htm |
def feedback_results_to_measurements_frame(feedback_result):
'''
Extract measured data from `FeedbackResults` instance into
`pandas.DataFrame`.
'''
index = pd.Index(feedback_result.time * 1e-3, name='seconds')
df_feedback = pd.DataFrame(np.column_stack([feedback_result.V_fb,
... | Extract measured data from `FeedbackResults` instance into
`pandas.DataFrame`. |
def check_acknowledgment(self, ds):
'''
Check if acknowledgment/acknowledgment attribute is present. Because
acknowledgement has its own check, we are keeping it out of the Global
Attributes (even though it is a Global Attr).
:param netCDF4.Dataset ds: An open netCDF dataset
... | Check if acknowledgment/acknowledgment attribute is present. Because
acknowledgement has its own check, we are keeping it out of the Global
Attributes (even though it is a Global Attr).
:param netCDF4.Dataset ds: An open netCDF dataset |
def get_hex_color_range(start_color, end_color, quantity):
"""
Generates a list of quantity Hex colors from start_color to end_color.
:param start_color: Hex or plain English color for start of range
:param end_color: Hex or plain English color for end of range
:param quantity: ... | Generates a list of quantity Hex colors from start_color to end_color.
:param start_color: Hex or plain English color for start of range
:param end_color: Hex or plain English color for end of range
:param quantity: Number of colours to return
:return: A list of Hex color values |
def start(self, activity, action):
'''
Mark an action as started
:param activity: The virtualenv activity name
:type activity: ``str``
:param action: The virtualenv action
:type action: :class:`tox.session.Action`
'''
try:
self._start_actio... | Mark an action as started
:param activity: The virtualenv activity name
:type activity: ``str``
:param action: The virtualenv action
:type action: :class:`tox.session.Action` |
def _get_index_nd(self, key):
"""Returns an index array for use in scatter_nd and gather_nd."""
def _is_advanced_index(index):
"""The definition of advanced index here includes integers as well, while
integers are considered as basic index type when the key contains only
... | Returns an index array for use in scatter_nd and gather_nd. |
def newEntry(self, ident = "", seq = "", plus = "", qual = "") :
"""Appends an empty entry at the end of the CSV and returns it"""
e = FastqEntry()
self.data.append(e)
return e | Appends an empty entry at the end of the CSV and returns it |
def monotonic(values, mode="<", atol=1.e-8):
"""
Returns False if values are not monotonic (decreasing|increasing).
mode is "<" for a decreasing sequence, ">" for an increasing sequence.
Two numbers are considered equal if they differ less that atol.
.. warning:
Not very efficient for large... | Returns False if values are not monotonic (decreasing|increasing).
mode is "<" for a decreasing sequence, ">" for an increasing sequence.
Two numbers are considered equal if they differ less that atol.
.. warning:
Not very efficient for large data sets.
>>> values = [1.2, 1.3, 1.4]
>>> mon... |
def run_query(self, collection_name, query):
""" method runs query on a specified collection and return a list of filtered Job records """
cursor = self.ds.filter(collection_name, query)
return [Job.from_json(document) for document in cursor] | method runs query on a specified collection and return a list of filtered Job records |
def get_default_config_help(self):
"""
Returns the default collector help text
"""
config_help = super(UsersCollector, self).get_default_config_help()
config_help.update({
})
return config_help | Returns the default collector help text |
def noperiodic(r_array, periodic, reference=None):
'''Rearrange the array of coordinates *r_array* in a way that doensn't
cross the periodic boundary.
Parameters
----------
r_array : :class:`numpy.ndarray`, (Nx3)
Array of 3D coordinates.
periodic: :class:`numpy.ndarray`, ... | Rearrange the array of coordinates *r_array* in a way that doensn't
cross the periodic boundary.
Parameters
----------
r_array : :class:`numpy.ndarray`, (Nx3)
Array of 3D coordinates.
periodic: :class:`numpy.ndarray`, (3)
Periodic boundary dimensions.
reference... |
def _linkFeature(self, feature):
"""
Link a feature with its parents.
"""
parentNames = feature.attributes.get("Parent")
if parentNames is None:
self.roots.add(feature)
else:
for parentName in parentNames:
self._linkToParent(feature... | Link a feature with its parents. |
def _contour(f, vertexlabels=None, contourfunc=None, **kwargs):
'''Workhorse function for the above, where ``contourfunc`` is the contour
plotting function to use for actual plotting.'''
if contourfunc is None:
contourfunc = plt.tricontour
if vertexlabels is None:
vertexlabels = ('1','2... | Workhorse function for the above, where ``contourfunc`` is the contour
plotting function to use for actual plotting. |
def handleOneNodeMsg(self, wrappedMsg):
"""
Validate and process one message from a node.
:param wrappedMsg: Tuple of message and the name of the node that sent
the message
"""
try:
vmsg = self.validateNodeMsg(wrappedMsg)
if vmsg:
... | Validate and process one message from a node.
:param wrappedMsg: Tuple of message and the name of the node that sent
the message |
def effective_FPS(self):
"""
Calculates the effective frames-per-second - this should largely
correlate to the desired FPS supplied in the constructor, but no
guarantees are given.
:returns: The effective frame rate.
:rtype: float
"""
if self.start_time i... | Calculates the effective frames-per-second - this should largely
correlate to the desired FPS supplied in the constructor, but no
guarantees are given.
:returns: The effective frame rate.
:rtype: float |
def _check_transition_target(self, transition):
"""Checks the validity of a transition target
Checks whether the transition target is valid.
:param rafcon.core.transition.Transition transition: The transition to be checked
:return bool validity, str message: validity is True, when the ... | Checks the validity of a transition target
Checks whether the transition target is valid.
:param rafcon.core.transition.Transition transition: The transition to be checked
:return bool validity, str message: validity is True, when the transition is valid, False else. message gives
... |
def fit(self, X, y=None):
"""Fit FeatureSetSelector for feature selection
Parameters
----------
X: array-like of shape (n_samples, n_features)
The training input samples.
y: array-like, shape (n_samples,)
The target values (integers that correspond to cla... | Fit FeatureSetSelector for feature selection
Parameters
----------
X: array-like of shape (n_samples, n_features)
The training input samples.
y: array-like, shape (n_samples,)
The target values (integers that correspond to classes in classification, real numbers ... |
def findOptimalResults(expName, suite, outFile):
"""
Go through every experiment in the specified folder. For each experiment, find
the iteration with the best validation score, and return the metrics
associated with that iteration.
"""
writer = csv.writer(outFile)
headers = ["testAccuracy", "bgAccuracy",... | Go through every experiment in the specified folder. For each experiment, find
the iteration with the best validation score, and return the metrics
associated with that iteration. |
def readlink(self, path):
"""
Return the target of a symbolic link (shortcut). You can use
L{symlink} to create these. The result may be either an absolute or
relative pathname.
@param path: path of the symbolic link file
@type path: str
@return: target path
... | Return the target of a symbolic link (shortcut). You can use
L{symlink} to create these. The result may be either an absolute or
relative pathname.
@param path: path of the symbolic link file
@type path: str
@return: target path
@rtype: str |
def load_variants(adapter, vcf_obj, case_obj, skip_case_id=False, gq_treshold=None,
max_window=3000, variant_type='snv'):
"""Load variants for a family into the database.
Args:
adapter (loqusdb.plugins.Adapter): initialized plugin
case_obj(Case): dict with case information
... | Load variants for a family into the database.
Args:
adapter (loqusdb.plugins.Adapter): initialized plugin
case_obj(Case): dict with case information
nr_variants(int)
skip_case_id (bool): whether to include the case id on variant level
or not
gq_t... |
async def reseed_init(self, next_seed: str = None) -> str:
"""
Begin reseed operation: generate new key. Raise WalletState if wallet is closed.
:param next_seed: incoming replacement seed (default random)
:return: new verification key
"""
LOGGER.debug('Wallet.reseed_ini... | Begin reseed operation: generate new key. Raise WalletState if wallet is closed.
:param next_seed: incoming replacement seed (default random)
:return: new verification key |
def tag(self, name, formatter=None):
"""Return instance of Tag.
Args:
name (str): The value for this tag.
formatter (method, optional): A method that take a tag value and returns a
formatted tag.
Returns:
obj: An instance of Tag.
"""
... | Return instance of Tag.
Args:
name (str): The value for this tag.
formatter (method, optional): A method that take a tag value and returns a
formatted tag.
Returns:
obj: An instance of Tag. |
def validate_bool_kwarg(value, arg_name):
""" Ensures that argument passed in arg_name is of type bool. """
if not (is_bool(value) or value is None):
raise ValueError('For argument "{arg}" expected type bool, received '
'type {typ}.'.format(arg=arg_name,
... | Ensures that argument passed in arg_name is of type bool. |
def _can_construct_from_str(strict_mode: bool, from_type: Type, to_type: Type) -> bool:
"""
Returns true if the provided types are valid for constructor_with_str_arg conversion
Explicitly declare that we are not able to convert primitive types (they already have their own converters)
:param strict_mode... | Returns true if the provided types are valid for constructor_with_str_arg conversion
Explicitly declare that we are not able to convert primitive types (they already have their own converters)
:param strict_mode:
:param from_type:
:param to_type:
:return: |
def is_used(self, regs, i, top=None):
""" Checks whether any of the given regs are required from the given point
to the end or not.
"""
if i < 0:
i = 0
if self.lock:
return True
regs = list(regs) # make a copy
if top is None:
... | Checks whether any of the given regs are required from the given point
to the end or not. |
async def wait_done(self) -> int:
"""Coroutine to wait for subprocess run completion.
Returns:
The exit code of the subprocess.
"""
await self._done_running_evt.wait()
if self._exit_code is None:
raise SublemonLifetimeError(
'Subprocess e... | Coroutine to wait for subprocess run completion.
Returns:
The exit code of the subprocess. |
def _refresh_multi_axis(self):
""" If linked axis' are used, setup and link them """
d = self.declaration
#: Create a separate viewbox
self.viewbox = pg.ViewBox()
#: If this is the first nested plot, use the parent right axis
_plots = [c for c in self.pa... | If linked axis' are used, setup and link them |
def moving_average(iterable, n):
"""
From Python collections module documentation
moving_average([40, 30, 50, 46, 39, 44]) --> 40.0 42.0 45.0 43.0
"""
it = iter(iterable)
d = collections.deque(itertools.islice(it, n - 1))
d.appendleft(0)
s = sum(d)
for elem in it:
s += elem ... | From Python collections module documentation
moving_average([40, 30, 50, 46, 39, 44]) --> 40.0 42.0 45.0 43.0 |
def from_unit_cube(self, x):
"""
Used by multinest
:param x: 0 < x < 1
:param lower_bound:
:param upper_bound:
:return:
"""
mu = self.mu.value
sigma = self.sigma.value
sqrt_two = 1.414213562
if x < 1e-16 or (1 - x) < 1e-16:
... | Used by multinest
:param x: 0 < x < 1
:param lower_bound:
:param upper_bound:
:return: |
def init_with_context(self, context):
"""
Initializes the status list.
"""
super(CacheStatusGroup, self).init_with_context(context)
if 'dashboardmods' in settings.INSTALLED_APPS:
import dashboardmods
memcache_mods = dashboardmods.get_memcache_dash_modules... | Initializes the status list. |
def add_resource_types(resource_i, types):
"""
Save a reference to the types used for this resource.
@returns a list of type_ids representing the type ids
on the resource.
"""
if types is None:
return []
existing_type_ids = []
if resource_i.types:
for t in resource_i.t... | Save a reference to the types used for this resource.
@returns a list of type_ids representing the type ids
on the resource. |
def _get_stmt_by_group(self, stmt_type, stmts_this_type, eh):
"""Group Statements of `stmt_type` by their hierarchical relations."""
# Dict of stmt group key tuples, indexed by their first Agent
stmt_by_first = collections.defaultdict(lambda: [])
# Dict of stmt group key tuples, indexed ... | Group Statements of `stmt_type` by their hierarchical relations. |
def _gen_keep_files(name, require, walk_d=None):
'''
Generate the list of files that need to be kept when a dir based function
like directory or recurse has a clean.
'''
def _is_child(path, directory):
'''
Check whether ``path`` is child of ``directory``
'''
path = os... | Generate the list of files that need to be kept when a dir based function
like directory or recurse has a clean. |
def _sample_item(self, **kwargs):
"""Sample an item from the pool according to the instrumental
distribution
"""
t = self.t_
if 'fixed_stratum' in kwargs:
stratum_idx = kwargs['fixed_stratum']
else:
stratum_idx = None
if stratum_idx is not ... | Sample an item from the pool according to the instrumental
distribution |
def uyirmei_constructed( mei_idx, uyir_idx):
""" construct uyirmei letter give mei index and uyir index """
idx,idy = mei_idx,uyir_idx
assert ( idy >= 0 and idy < uyir_len() )
assert ( idx >= 0 and idx < 6+mei_len() )
return grantha_agaram_letters[mei_idx]+accent_symbols[uyir_idx] | construct uyirmei letter give mei index and uyir index |
def _delete_vlan_profile(self, handle, vlan_id, ucsm_ip):
"""Deletes VLAN Profile from UCS Manager."""
vlan_name = self.make_vlan_name(vlan_id)
vlan_profile_dest = (const.VLAN_PATH + const.VLAN_PROFILE_PATH_PREFIX +
vlan_name)
try:
handle.StartTra... | Deletes VLAN Profile from UCS Manager. |
def tail_messages(self, topic="", passive=False, **kw):
"""
Subscribe to messages published on the sockets listed in :ref:`conf-endpoints`.
Args:
topic (six.text_type): The topic to subscribe to. The default is to
subscribe to all topics.
passive (bool): ... | Subscribe to messages published on the sockets listed in :ref:`conf-endpoints`.
Args:
topic (six.text_type): The topic to subscribe to. The default is to
subscribe to all topics.
passive (bool): If ``True``, bind to the :ref:`conf-endpoints` sockets
inste... |
def _jws_header(keyid, algorithm):
"""Produce a base64-encoded JWS header."""
data = {
'typ': 'JWT',
'alg': algorithm.name,
# 'kid' is used to indicate the public part of the key
# used during signing.
'kid': keyid
}
datajson = json.dumps(data, sort_keys=True).en... | Produce a base64-encoded JWS header. |
def energy_coefficients(m1, m2, s1z=0, s2z=0, phase_order=-1, spin_order=-1):
""" Return the energy coefficients. This assumes that the system has aligned spins only.
"""
implemented_phase_order = 7
implemented_spin_order = 7
if phase_order > implemented_phase_order:
raise ValueError("pN coe... | Return the energy coefficients. This assumes that the system has aligned spins only. |
def retrieve_data_directory(self):
"""
Retrieve the data directory
Look first into config_filename_global
then into config_filename_user. The latter takes preeminence.
"""
args = self.args
try:
if args['datadirectory']:
aux.ensure_dir(a... | Retrieve the data directory
Look first into config_filename_global
then into config_filename_user. The latter takes preeminence. |
def get_range(self):
""" Get range """
if not self.page:
return (1, self.last_blocks[self.coinid])
# Get start of the range
start = self.page * self.limit
# Get finish of the range
end = (self.page + 1) * self.limit
if start > self.last_blocks[self.coinid]:
return (1,1)
if end > self.last_bloc... | Get range |
def create_vault_ec2_client_configuration(self, access_key, secret_key, endpoint=None, mount_point='aws-ec2'):
"""POST /auth/<mount_point>/config/client
Configure the credentials required to perform API calls to AWS as well as custom endpoints to talk to AWS APIs.
The instance identity document... | POST /auth/<mount_point>/config/client
Configure the credentials required to perform API calls to AWS as well as custom endpoints to talk to AWS APIs.
The instance identity document fetched from the PKCS#7 signature will provide the EC2 instance ID. The
credentials configured using this endpoin... |
def parent(self, resource):
"""Set parent resource
:param resource: parent resource
:type resource: Resource
:raises ResourceNotFound: resource not found on the API
"""
resource.check()
self['parent_type'] = resource.type
self['parent_uuid'] = resource.u... | Set parent resource
:param resource: parent resource
:type resource: Resource
:raises ResourceNotFound: resource not found on the API |
def _get_view_result(view, raw_result, **kwargs):
""" Get view results helper. """
if raw_result:
return view(**kwargs)
if kwargs:
return Result(view, **kwargs)
return view.result | Get view results helper. |
def set_header(self, name, format, *args):
"""
Set node header; these are provided to other nodes during discovery
and come in each ENTER message.
"""
return lib.zyre_set_header(self._as_parameter_, name, format, *args) | Set node header; these are provided to other nodes during discovery
and come in each ENTER message. |
def scatter2d(data, **kwargs):
"""Create a 2D scatter plot
Builds upon `matplotlib.pyplot.scatter` with nice defaults
and handles categorical colors / legends better.
Parameters
----------
data : array-like, shape=[n_samples, n_features]
Input data. Only the first two components will b... | Create a 2D scatter plot
Builds upon `matplotlib.pyplot.scatter` with nice defaults
and handles categorical colors / legends better.
Parameters
----------
data : array-like, shape=[n_samples, n_features]
Input data. Only the first two components will be used.
c : list-like or None, opt... |
def _read_waypoints_v110(self, file):
'''read a version 110 waypoint'''
comment = ''
for line in file:
if line.startswith('#'):
comment = line[1:].lstrip()
continue
line = line.strip()
if not line:
continue
... | read a version 110 waypoint |
def cartesian_to_barycentric_3D(tri, xy):
'''
cartesian_to_barycentric_3D(tri,xy) is identical to cartesian_to_barycentric_2D(tri,xy) except
it works on 3D data. Note that if tri is a 3 x 3 x n, a 3 x n x 3 or an n x 3 x 3 matrix, the
first dimension must always be the triangle vertices and the second 3... | cartesian_to_barycentric_3D(tri,xy) is identical to cartesian_to_barycentric_2D(tri,xy) except
it works on 3D data. Note that if tri is a 3 x 3 x n, a 3 x n x 3 or an n x 3 x 3 matrix, the
first dimension must always be the triangle vertices and the second 3-sized dimension must be
the (x,y,z) coordinates. |
def _found_barcode(self, record, sample, barcode=None):
"""Hook called when barcode is found"""
assert record.id == self.current_record['sequence_name']
self.current_record['sample'] = sample | Hook called when barcode is found |
def daterange(start, stop, step=1, inclusive=False):
"""In the spirit of :func:`range` and :func:`xrange`, the `daterange`
generator that yields a sequence of :class:`~datetime.date`
objects, starting at *start*, incrementing by *step*, until *stop*
is reached.
When *inclusive* is True, the final d... | In the spirit of :func:`range` and :func:`xrange`, the `daterange`
generator that yields a sequence of :class:`~datetime.date`
objects, starting at *start*, incrementing by *step*, until *stop*
is reached.
When *inclusive* is True, the final date may be *stop*, **if**
*step* falls evenly on it. By ... |
def _take_ownership(self):
"""Make the Python instance take ownership of the GIBaseInfo. i.e.
unref if the python instance gets gc'ed.
"""
if self:
ptr = cast(self.value, GIBaseInfo)
_UnrefFinalizer.track(self, ptr)
self.__owns = True | Make the Python instance take ownership of the GIBaseInfo. i.e.
unref if the python instance gets gc'ed. |
def just_load_srno(srno, prm_filename=None):
"""Simply load an dataset based on serial number (srno).
This convenience function reads a dataset based on a serial number. This
serial number (srno) must then be defined in your database. It is mainly
used to check that things are set up correctly.
Ar... | Simply load an dataset based on serial number (srno).
This convenience function reads a dataset based on a serial number. This
serial number (srno) must then be defined in your database. It is mainly
used to check that things are set up correctly.
Args:
prm_filename: name of parameter file (op... |
def validate(self, data):
"""Validate data. Raise NotValid error for invalid data."""
validated = self._validated(data)
errors = []
for validator in self.additional_validators:
if not validator(validated):
errors.append(
"%s invalidated by ... | Validate data. Raise NotValid error for invalid data. |
def from_spcm(filepath, name=None, *, delimiter=",", parent=None, verbose=True) -> Data:
"""Create a ``Data`` object from a Becker & Hickl spcm file (ASCII-exported, ``.asc``).
If provided, setup parameters are stored in the ``attrs`` dictionary of the ``Data`` object.
See the `spcm`__ software hompage fo... | Create a ``Data`` object from a Becker & Hickl spcm file (ASCII-exported, ``.asc``).
If provided, setup parameters are stored in the ``attrs`` dictionary of the ``Data`` object.
See the `spcm`__ software hompage for more info.
__ http://www.becker-hickl.com/software/spcm.htm
Parameters
---------... |
def sigma_to_pressure(sigma, psfc, ptop):
r"""Calculate pressure from sigma values.
Parameters
----------
sigma : ndarray
The sigma levels to be converted to pressure levels.
psfc : `pint.Quantity`
The surface pressure value.
ptop : `pint.Quantity`
The pressure value a... | r"""Calculate pressure from sigma values.
Parameters
----------
sigma : ndarray
The sigma levels to be converted to pressure levels.
psfc : `pint.Quantity`
The surface pressure value.
ptop : `pint.Quantity`
The pressure value at the top of the model domain.
Returns
... |
def stackplot_t(tarray, seconds=None, start_time=None, ylabels=None):
"""
will plot a stack of traces one above the other assuming
tarray.shape = numSamples, numRows
"""
data = tarray
numSamples, numRows = tarray.shape
# data = np.random.randn(numSamples,numRows) # test data
# data.shape = numS... | will plot a stack of traces one above the other assuming
tarray.shape = numSamples, numRows |
def find_files(directory=".", ext=None, name=None,
match_case=False, disable_glob=False, depth=None,
abspath=False, enable_scandir=False):
"""
Walk through a file directory and return an iterator of files
that match requirements. Will autodetect if name has glob as magic
ch... | Walk through a file directory and return an iterator of files
that match requirements. Will autodetect if name has glob as magic
characters.
Note: For the example below, you can use find_files_list to return as a
list, this is simply an easy way to show the output.
.. code:: python
list(r... |
def sync(self):
"""
Syncs the parent app changes with the current app instance.
:return: Synced App object.
"""
app = self._api.post(url=self._URL['sync'].format(id=self.id)).json()
return App(api=self._api, **app) | Syncs the parent app changes with the current app instance.
:return: Synced App object. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.