positive stringlengths 100 30.3k | anchor stringlengths 1 15k |
|---|---|
def cmyk_to_rgb(Class, c, m, y, k):
"""CMYK in % to RGB in 0-255
based on https://www.openprocessing.org/sketch/46231#
"""
c = float(c)/100.0
m = float(m)/100.0
y = float(y)/100.0
k = float(k)/100.0
nc = (c * (1-k)) + k
nm = (m * (1-k)) ... | CMYK in % to RGB in 0-255
based on https://www.openprocessing.org/sketch/46231# |
def epoch_info(self, training_info: TrainingInfo, global_idx: int, local_idx: int) -> EpochInfo:
""" Create Epoch info """
raise NotImplementedError | Create Epoch info |
def create_subscriber(self):
'''Create a subscriber instance using specified addresses and
message types.
'''
if self.subscriber is None:
if self.topics:
self.subscriber = NSSubscriber(self.services, self.topics,
... | Create a subscriber instance using specified addresses and
message types. |
def _consolidate(self, inplace=False):
"""
Compute NDFrame with "consolidated" internals (data of each dtype
grouped together in a single ndarray).
Parameters
----------
inplace : boolean, default False
If False return new object, otherwise modify existing ob... | Compute NDFrame with "consolidated" internals (data of each dtype
grouped together in a single ndarray).
Parameters
----------
inplace : boolean, default False
If False return new object, otherwise modify existing object
Returns
-------
consolidated ... |
def matrix(self):
"""The current calibration matrix for this device.
Returns:
(bool, (float, float, float, float, float, float)): :obj:`False` if
no calibration is set and
the returned matrix is the identity matrix, :obj:`True`
otherwise. :obj:`tuple` representing the first two rows of
a 3x3 matrix ... | The current calibration matrix for this device.
Returns:
(bool, (float, float, float, float, float, float)): :obj:`False` if
no calibration is set and
the returned matrix is the identity matrix, :obj:`True`
otherwise. :obj:`tuple` representing the first two rows of
a 3x3 matrix as described in :meth:`... |
def restart(self, soft=False):
"""
Restarts client. If soft is True, the client attempts to re-subscribe
to all channels which it was previously subscribed to.
:return:
"""
log.info("BitfinexWSS.restart(): Restarting client..")
super(BitfinexWSS, self).restart()
... | Restarts client. If soft is True, the client attempts to re-subscribe
to all channels which it was previously subscribed to.
:return: |
def add_edge(self, x, y, label=None):
"""Add an edge from distribution *x* to distribution *y* with the given
*label*.
:type x: :class:`distutils2.database.InstalledDistribution` or
:class:`distutils2.database.EggInfoDistribution`
:type y: :class:`distutils2.database.In... | Add an edge from distribution *x* to distribution *y* with the given
*label*.
:type x: :class:`distutils2.database.InstalledDistribution` or
:class:`distutils2.database.EggInfoDistribution`
:type y: :class:`distutils2.database.InstalledDistribution` or
:class:`... |
def increment(self, size: int):
'''Increment the number of files downloaded.
Args:
size: The size of the file
'''
assert size >= 0, size
self.files += 1
self.size += size
self.bandwidth_meter.feed(size) | Increment the number of files downloaded.
Args:
size: The size of the file |
def _save(self):
"""
save the cache index, in case it was modified.
Saves the index table and the file name repository in the file
`index.dat`
"""
if self.__modified_flag:
self.__filename_rep.update_id_counter()
indexfilename = os.path.join(self.... | save the cache index, in case it was modified.
Saves the index table and the file name repository in the file
`index.dat` |
def clear_from(self, timestamp):
"""Clear all data from `timestamp` onwards. Note that the timestamp
is rounded down to the nearest block boundary"""
block_size = self.config.block_size
offset, remainder = timestamp // block_size, timestamp % block_size
if remainder:
... | Clear all data from `timestamp` onwards. Note that the timestamp
is rounded down to the nearest block boundary |
def initiate_migration(self):
"""
Initiates a pending migration that is already scheduled for this Linode
Instance
"""
self._client.post('{}/migrate'.format(Instance.api_endpoint), model=self) | Initiates a pending migration that is already scheduled for this Linode
Instance |
def cross(triangles):
"""
Returns the cross product of two edges from input triangles
Parameters
--------------
triangles: (n, 3, 3) float
Vertices of triangles
Returns
--------------
crosses : (n, 3) float
Cross product of two edge vectors
"""
vectors = np.diff(tri... | Returns the cross product of two edges from input triangles
Parameters
--------------
triangles: (n, 3, 3) float
Vertices of triangles
Returns
--------------
crosses : (n, 3) float
Cross product of two edge vectors |
async def get_creds(self) -> dict:
"""
Gets the credentials from a disclosed proof
Example:
msg_id = '1'
phone_number = '8019119191'
connection = await Connection.create(source_id)
await connection.connect(phone_number)
disclosed_proof = await DisclosedPro... | Gets the credentials from a disclosed proof
Example:
msg_id = '1'
phone_number = '8019119191'
connection = await Connection.create(source_id)
await connection.connect(phone_number)
disclosed_proof = await DisclosedProof.create_with_msgid(source_id, connection, msg_id)
... |
def backup_db(self):
"""
" Generate a xxxxx.backup.json.
"""
with self.db_mutex:
if os.path.exists(self.json_db_path):
try:
shutil.copy2(self.json_db_path, self.backup_json_db_path)
except (IOError, OSError):
... | " Generate a xxxxx.backup.json. |
def owns_data_key(self, data_key):
"""Determines if data_key object is owned by this RawMasterKey.
:param data_key: Data key to evaluate
:type data_key: :class:`aws_encryption_sdk.structures.DataKey`,
:class:`aws_encryption_sdk.structures.RawDataKey`,
or :class:`aws_encr... | Determines if data_key object is owned by this RawMasterKey.
:param data_key: Data key to evaluate
:type data_key: :class:`aws_encryption_sdk.structures.DataKey`,
:class:`aws_encryption_sdk.structures.RawDataKey`,
or :class:`aws_encryption_sdk.structures.EncryptedDataKey`
... |
def get_documents(self, subtypes=None, refresh=False):
"""Return list of author's publications using ScopusSearch, which
fit a specified set of document subtypes.
"""
search = ScopusSearch('au-id({})'.format(self.identifier), refresh)
if subtypes:
return [p for p in s... | Return list of author's publications using ScopusSearch, which
fit a specified set of document subtypes. |
def search(query, tld='com', lang='en', num=10, start=0, stop=None, pause=2.0,
only_standard=False):
"""
Search the given query string using Google.
@type query: str
@param query: Query string. Must NOT be url-encoded.
@type tld: str
@param tld: Top level domain.
@... | Search the given query string using Google.
@type query: str
@param query: Query string. Must NOT be url-encoded.
@type tld: str
@param tld: Top level domain.
@type lang: str
@param lang: Languaje.
@type num: int
@param num: Number of results per page.
@type s... |
def get_group_tokens(root):
"""Function to extract tokens in hyphenated groups (saunameheks-tallimeheks).
Parameters
----------
root: str
The root form.
Returns
-------
list of (list of str)
List of grouped root tokens.
"""
global all_markers
if root in all_mark... | Function to extract tokens in hyphenated groups (saunameheks-tallimeheks).
Parameters
----------
root: str
The root form.
Returns
-------
list of (list of str)
List of grouped root tokens. |
def load(cls, path, prefix, network=None):
r"""
Load data from the \'dat\' files located in specified folder.
Parameters
----------
path : string
The full path to the folder containing the set of \'dat\' files.
prefix : string
The file name prefi... | r"""
Load data from the \'dat\' files located in specified folder.
Parameters
----------
path : string
The full path to the folder containing the set of \'dat\' files.
prefix : string
The file name prefix on each file. The data files are stored
... |
def screenshot(self):
"""
Take screenshot with session check
Returns:
PIL.Image
"""
b64data = self.http.get('/screenshot').value
raw_data = base64.b64decode(b64data)
from PIL import Image
buff = io.BytesIO(raw_data)
return Image.open(b... | Take screenshot with session check
Returns:
PIL.Image |
def set_runtime_value_bool(self, ihcid: int, value: bool) -> bool:
""" Set bool runtime value with re-authenticate if needed"""
if self.client.set_runtime_value_bool(ihcid, value):
return True
self.re_authenticate()
return self.client.set_runtime_value_bool(ihcid, value) | Set bool runtime value with re-authenticate if needed |
def _unicode_to_native(s):
"""Convert string from unicode to native format (required in Python 2)."""
if six.PY2:
return s.encode("utf-8") if isinstance(s, unicode) else s
else:
return s | Convert string from unicode to native format (required in Python 2). |
def set_freq(self, fout, freq):
"""
Sets new output frequency, required parameters are real current frequency at output and new required frequency.
"""
hsdiv_tuple = (4, 5, 6, 7, 9, 11) # possible dividers
n1div_tuple = (1,) + tuple(range(2,129,2)) #
fdco_min =... | Sets new output frequency, required parameters are real current frequency at output and new required frequency. |
def decode(self,
initial_state: State,
transition_function: TransitionFunction,
supervision: SupervisionType) -> Dict[str, torch.Tensor]:
"""
Takes an initial state object, a means of transitioning from state to state, and a
supervision signal, and us... | Takes an initial state object, a means of transitioning from state to state, and a
supervision signal, and uses the supervision to train the transition function to pick
"good" states.
This function should typically return a ``loss`` key during training, which the ``Model``
will use as i... |
def _set_igp_sync(self, v, load=False):
"""
Setter method for igp_sync, mapped from YANG variable /mpls_state/rsvp/igp_sync (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_igp_sync is considered as a private
method. Backends looking to populate this varia... | Setter method for igp_sync, mapped from YANG variable /mpls_state/rsvp/igp_sync (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_igp_sync is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_igp_sy... |
def run_main(args: argparse.Namespace, do_exit=True) -> None:
"""Runs the checks and exits.
To extend this tool, use this function and set do_exit to False
to get returned the status code.
"""
if args.init:
generate()
return None # exit after generate instead of starting to lint
... | Runs the checks and exits.
To extend this tool, use this function and set do_exit to False
to get returned the status code. |
def validate_config(self, values, argv=None, strict=False):
"""Validate all config values through the command-line parser.
This takes all supplied options (which could have been retrieved from a
number of sources (such as CLI, env vars, etc...) and then validates
them by running them th... | Validate all config values through the command-line parser.
This takes all supplied options (which could have been retrieved from a
number of sources (such as CLI, env vars, etc...) and then validates
them by running them through argparser (and raises SystemExit on
failure).
:r... |
def has_entities(status):
"""
Returns true if a Status object has entities.
Args:
status: either a tweepy.Status object or a dict returned from Twitter API
"""
try:
if sum(len(v) for v in status.entities.values()) > 0:
return True
except AttributeError:
if s... | Returns true if a Status object has entities.
Args:
status: either a tweepy.Status object or a dict returned from Twitter API |
async def officers(self, root):
"""Regional Officers. Does not include the Founder or
the Delegate, unless they have additional titles as Officers.
In the correct order.
Returns
-------
an :class:`ApiQuery` of a list of :class:`Officer`
"""
officers = s... | Regional Officers. Does not include the Founder or
the Delegate, unless they have additional titles as Officers.
In the correct order.
Returns
-------
an :class:`ApiQuery` of a list of :class:`Officer` |
def get_starsep_RaDecDeg(ra1_deg, dec1_deg, ra2_deg, dec2_deg):
"""Calculate separation."""
sep = deltaStarsRaDecDeg(ra1_deg, dec1_deg, ra2_deg, dec2_deg)
sgn, deg, mn, sec = degToDms(sep)
if deg != 0:
txt = '%02d:%02d:%06.3f' % (deg, mn, sec)
else:
txt = '%02d:%06.3f' % (mn, sec)
... | Calculate separation. |
def prepare(self):
"""Un-serialize data from data attribute and add instance_id key if necessary
:return: None
"""
# Maybe the Brok is a old daemon one or was already prepared
# if so, the data is already ok
if hasattr(self, 'prepared') and not self.prepared:
... | Un-serialize data from data attribute and add instance_id key if necessary
:return: None |
def runWizard( self ):
"""
Runs the current wizard.
"""
plugin = self.currentPlugin()
if ( plugin and plugin.runWizard(self) ):
self.accept() | Runs the current wizard. |
def get_mac_acl_for_intf_input_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_mac_acl_for_intf = ET.Element("get_mac_acl_for_intf")
config = get_mac_acl_for_intf
input = ET.SubElement(get_mac_acl_for_intf, "input")
int... | Auto Generated Code |
def mosaic_info(name, pretty):
'''Get information for a specific mosaic'''
cl = clientv1()
echo_json_response(call_and_wrap(cl.get_mosaic_by_name, name), pretty) | Get information for a specific mosaic |
def _IsHomeDir(self, subject, token):
"""Checks user access permissions for paths under aff4:/users."""
h = CheckAccessHelper("IsHomeDir")
h.Allow("aff4:/users/%s" % token.username)
h.Allow("aff4:/users/%s/*" % token.username)
try:
return h.CheckAccess(subject, token)
except access_control... | Checks user access permissions for paths under aff4:/users. |
def _is_valid_inherit_element(self, element):
"""
Check that the children of element can be manipulated to apply the CSS
properties.
:param element: The element.
:type element: hatemile.util.html.htmldomelement.HTMLDOMElement
:return: True if the children of element can ... | Check that the children of element can be manipulated to apply the CSS
properties.
:param element: The element.
:type element: hatemile.util.html.htmldomelement.HTMLDOMElement
:return: True if the children of element can be manipulated to apply
the CSS properties or Fal... |
def _normalize_correlation_data(self, corr_data, norm_unit):
"""Normalize the correlation data if necessary.
Fisher-transform and then z-score the data for every norm_unit samples
if norm_unit > 1.
Parameters
----------
corr_data: the correlation data
... | Normalize the correlation data if necessary.
Fisher-transform and then z-score the data for every norm_unit samples
if norm_unit > 1.
Parameters
----------
corr_data: the correlation data
in shape [num_samples, num_processed_voxels, num_voxels]
norm_... |
def change_user_password(self, ID, data):
"""Change password of a User."""
# http://teampasswordmanager.com/docs/api-users/#change_password
log.info('Change user %s password' % ID)
self.put('users/%s/change_password.json' % ID, data) | Change password of a User. |
async def set_review(self, **params):
"""Writes review for content
Accepts:
- cid
- review
- public_key
- rating
- txid
- coinid
"""
if params.get("message"):
params = json.loads(params.get("message", "{}"))
if not params:
return {"error":400, "reason":"Missed required fields"}
cid = i... | Writes review for content
Accepts:
- cid
- review
- public_key
- rating
- txid
- coinid |
def topological_sort(dag):
"""
topological sort
:param dag: directed acyclic graph
:type dag: dict
.. seealso:: `Topographical Sorting
<http://en.wikipedia.org/wiki/Topological_sorting>`_,
`Directed Acyclic Graph (DAG)
<https://en.wikipedia.org/wiki/Directed_acyclic_graph>`... | topological sort
:param dag: directed acyclic graph
:type dag: dict
.. seealso:: `Topographical Sorting
<http://en.wikipedia.org/wiki/Topological_sorting>`_,
`Directed Acyclic Graph (DAG)
<https://en.wikipedia.org/wiki/Directed_acyclic_graph>`_ |
def get_gradebook_ids_by_grade_system(self, grade_system_id):
"""Gets the list of ``Gradebook`` ``Ids`` mapped to a ``GradeSystem``.
arg: grade_system_id (osid.id.Id): ``Id`` of a
``GradeSystem``
return: (osid.id.IdList) - list of gradebook ``Ids``
raise: NotFound -... | Gets the list of ``Gradebook`` ``Ids`` mapped to a ``GradeSystem``.
arg: grade_system_id (osid.id.Id): ``Id`` of a
``GradeSystem``
return: (osid.id.IdList) - list of gradebook ``Ids``
raise: NotFound - ``grade_system_id`` is not found
raise: NullArgument - ``grade_... |
def get_group_node_size(node):
"""
Shared getter for AddrmapNode and RegfileNode's "size" property
"""
# After structural placement, children are sorted
if( not node.inst.children
or (not isinstance(node.inst.children[-1], comp.AddressableComponent))
):
# No addressable child exi... | Shared getter for AddrmapNode and RegfileNode's "size" property |
def mod_root(a, p):
""" Return a root of `a' modulo p """
if a == 0:
return 0
if not mod_issquare(a, p):
raise ValueError
n = 2
while mod_issquare(n, p):
n += 1
q = p - 1
r = 0
while not q.getbit(r):
r += 1
q = q >> r
y = pow(n, q, p)
h = q >> ... | Return a root of `a' modulo p |
def cells(self):
"""The number of cells in the MOC.
This gives the total number of cells at all orders,
with cells from every order counted equally.
>>> m = MOC(0, (1, 2))
>>> m.cells
2
"""
n = 0
for (order, cells) in self:
n += len... | The number of cells in the MOC.
This gives the total number of cells at all orders,
with cells from every order counted equally.
>>> m = MOC(0, (1, 2))
>>> m.cells
2 |
def push_all(self, record_shard_pairs):
"""Push multiple (record, shard) pairs at once, with only one :meth:`heapq.heapify` call to maintain order.
:param record_shard_pairs: list of ``(record, shard)`` tuples
(see :func:`~bloop.stream.buffer.RecordBuffer.push`).
"""
# Faste... | Push multiple (record, shard) pairs at once, with only one :meth:`heapq.heapify` call to maintain order.
:param record_shard_pairs: list of ``(record, shard)`` tuples
(see :func:`~bloop.stream.buffer.RecordBuffer.push`). |
def log_tail(self, nlines=10):
"""
Return the last ``nlines`` lines of the log file
"""
log_path = os.path.join(self.working_dir, self.log_name)
with open(log_path) as fp:
d = collections.deque(maxlen=nlines)
d.extend(fp)
return ''.join(d) | Return the last ``nlines`` lines of the log file |
def _create_default_config_file(self):
"""
If config file does not exists create and set default values.
"""
logger.info('Initialize Maya launcher, creating config file...\n')
self.add_section(self.DEFAULTS)
self.add_section(self.PATTERNS)
self.add_section(... | If config file does not exists create and set default values. |
def parse_args():
"""
Parse the command line arguments
"""
parser = argparse.ArgumentParser(description = 'Decrypt AEADs',
add_help = True,
formatter_class = argparse.ArgumentDefaultsHelpFormatter,
... | Parse the command line arguments |
def __load_settings_from_file(self):
"""
Loads settings info from the settings json file
:returns: True if the settings info is valid
:rtype: boolean
"""
filename = self.get_base_path() + 'settings.json'
if not exists(filename):
raise OneLogin_Saml2_... | Loads settings info from the settings json file
:returns: True if the settings info is valid
:rtype: boolean |
def _proc_gnusparse_00(self, next, pax_headers, buf):
"""Process a GNU tar extended sparse header, version 0.0.
"""
offsets = []
for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf):
offsets.append(int(match.group(1)))
numbytes = []
for match in re... | Process a GNU tar extended sparse header, version 0.0. |
def createProfile(self, profile=None, clearLayout=True):
"""
Prompts the user to create a new profile.
"""
if profile:
prof = profile
elif not self.viewWidget() or clearLayout:
prof = XViewProfile()
else:
prof = self.viewWidget... | Prompts the user to create a new profile. |
def mask(args):
"""
%prog mask fastafile
Mask the contaminants. By default, this will compare against UniVec_Core and
Ecoli.fasta. Merge the contaminant results, and use `maskFastaFromBed`. Can
perform FASTA tidy if requested.
"""
p = OptionParser(mask.__doc__)
p.add_option("--db",
... | %prog mask fastafile
Mask the contaminants. By default, this will compare against UniVec_Core and
Ecoli.fasta. Merge the contaminant results, and use `maskFastaFromBed`. Can
perform FASTA tidy if requested. |
def orphan_entry(self, rval: RawObject) -> "ArrayEntry":
"""Return an isolated entry of the receiver.
Args:
rval: Raw object to be used for the returned entry.
"""
val = self.entry_from_raw(rval)
return ArrayEntry(0, EmptyList(), EmptyList(), val, None, self,
... | Return an isolated entry of the receiver.
Args:
rval: Raw object to be used for the returned entry. |
def progress(count, total, suffix=''):
'''
Display progress bar
sources: https://gist.github.com/vladignatyev/06860ec2040cb497f0f3
'''
bar_len = 60
filled_len = int(round(bar_len * count / float(total)))
percents = round(100.0 * count / float(total), 1)
bar = '=' * filled_len + '-' * (ba... | Display progress bar
sources: https://gist.github.com/vladignatyev/06860ec2040cb497f0f3 |
def use_plenary_composition_view(self):
"""Pass through to provider CompositionLookupSession.use_plenary_composition_view"""
self._object_views['composition'] = PLENARY
# self._get_provider_session('composition_lookup_session') # To make sure the session is tracked
for session in self._g... | Pass through to provider CompositionLookupSession.use_plenary_composition_view |
def get_cdd_only_candidate_models(
data, minimum_non_zero_cdd, minimum_total_cdd, beta_cdd_maximum_p_value, weights_col
):
""" Return a list of all possible candidate cdd-only models.
Parameters
----------
data : :any:`pandas.DataFrame`
A DataFrame containing at least the column ``meter_val... | Return a list of all possible candidate cdd-only models.
Parameters
----------
data : :any:`pandas.DataFrame`
A DataFrame containing at least the column ``meter_value`` and 1 to n
columns with names of the form ``cdd_<balance_point>``. All columns
with names of this form will be use... |
def remove_child(self, child):
"""
Removes a child from this node (parent and child
nodes still exit but are no longer connected).
"""
try:
self.children.remove(child)
except ValueError as e:
raise TreeError("child not found")
else:
... | Removes a child from this node (parent and child
nodes still exit but are no longer connected). |
def use_plenary_log_view(self):
"""Pass through to provider LogEntryLogSession.use_plenary_log_view"""
self._log_view = PLENARY
# self._get_provider_session('log_entry_log_session') # To make sure the session is tracked
for session in self._get_provider_sessions():
try:
... | Pass through to provider LogEntryLogSession.use_plenary_log_view |
def submit_task(self,
function_descriptor,
args,
actor_id=None,
actor_handle_id=None,
actor_counter=0,
actor_creation_id=None,
actor_creation_dummy_object_id=None,
... | Submit a remote task to the scheduler.
Tell the scheduler to schedule the execution of the function with
function_descriptor with arguments args. Retrieve object IDs for the
outputs of the function from the scheduler and immediately return them.
Args:
function_descriptor: T... |
def restore_repository_from_recycle_bin(self, repository_details, project, repository_id):
"""RestoreRepositoryFromRecycleBin.
[Preview API] Recover a soft-deleted Git repository. Recently deleted repositories go into a soft-delete state for a period of time before they are hard deleted and become unrec... | RestoreRepositoryFromRecycleBin.
[Preview API] Recover a soft-deleted Git repository. Recently deleted repositories go into a soft-delete state for a period of time before they are hard deleted and become unrecoverable.
:param :class:`<GitRecycleBinRepositoryDetails> <azure.devops.v5_1.git.models.GitRec... |
def grad_and_hess(self):
"""
Computes self's gradient and Hessian. Used if the
optimization method for a NormApprox doesn't
use gradients and hessians, for instance fmin.
"""
for i in xrange(self.len):
di = self.diff(i)
self.grad[i] = di
... | Computes self's gradient and Hessian. Used if the
optimization method for a NormApprox doesn't
use gradients and hessians, for instance fmin. |
def create_repository(cls, repository_data):
"""Create a standalone, in-memory repository.
Using this function bypasses the `package_repository_manager` singleton.
This is usually desired however, since in-memory repositories are for
temporarily storing programmatically created packages... | Create a standalone, in-memory repository.
Using this function bypasses the `package_repository_manager` singleton.
This is usually desired however, since in-memory repositories are for
temporarily storing programmatically created packages, which we do not
want to cache and that do not ... |
def validate(self, uri):
""" Check an URI for compatibility with this specification. Return True if the URI is compatible.
:param uri: an URI to check
:return: bool
"""
requirement = self.requirement()
uri_component = uri.component(self.component())
if uri_component is None:
return requirement != WU... | Check an URI for compatibility with this specification. Return True if the URI is compatible.
:param uri: an URI to check
:return: bool |
def request_chunked(self, method, url, body=None, headers=None):
"""
Alternative to the common request method, which sends the
body with chunked encoding and not as one block
"""
headers = HTTPHeaderDict(headers if headers is not None else {})
skip_accept_encoding = 'acce... | Alternative to the common request method, which sends the
body with chunked encoding and not as one block |
def insert_element_to_dict_of_dicts(dict_of_dicts: Dict[str, Dict[str, str]], first_key: str, second_key: str, contents):
"""
Utility method
:param dict_of_dicts:
:param first_key:
:param second_key:
:param contents:
:return:
"""
if first_key not in dict_of_dicts.keys():
di... | Utility method
:param dict_of_dicts:
:param first_key:
:param second_key:
:param contents:
:return: |
def set_task_object(self,
task_id,
task_progress_object):
"""
Defines a new progress bar with the given information using a TaskProgress object.
:param task_id: Unique identifier for this progress bar. Will erase if already existing... | Defines a new progress bar with the given information using a TaskProgress object.
:param task_id: Unique identifier for this progress bar. Will erase if already existing.
:param task_progress_object: TaskProgress object holding the progress bar information. |
def logsumexp(a, axis=None, b=None, use_numexpr=True):
"""Compute the log of the sum of exponentials of input elements.
Parameters
----------
a : array_like
Input array.
axis : None or int, optional, default=None
Axis or axes over which the sum is taken. By default `axis` is None,
... | Compute the log of the sum of exponentials of input elements.
Parameters
----------
a : array_like
Input array.
axis : None or int, optional, default=None
Axis or axes over which the sum is taken. By default `axis` is None,
and all elements are summed.
b : array-like, option... |
def smart_reroot(treefile, outgroupfile, outfile, format=0):
"""
simple function to reroot Newick format tree using ete2
Tree reading format options see here:
http://packages.python.org/ete2/tutorial/tutorial_trees.html#reading-newick-trees
"""
tree = Tree(treefile, format=format)
leaves = ... | simple function to reroot Newick format tree using ete2
Tree reading format options see here:
http://packages.python.org/ete2/tutorial/tutorial_trees.html#reading-newick-trees |
def is_hidden_container(self, key, val):
"""
The key is not one of the Mapfile keywords, and its
values are a list
"""
if key in ("layers", "classes", "styles", "symbols", "labels",
"outputformats", "features", "scaletokens",
"composites") a... | The key is not one of the Mapfile keywords, and its
values are a list |
def run(self, force=False):
""" Run all pending tasks; 'force' will run all tasks whether they're
pending or not. """
now = time.time()
for func, spec in self.tasks.items():
if force or now >= spec.get('next_run', 0):
func()
spec['next_run'] = ... | Run all pending tasks; 'force' will run all tasks whether they're
pending or not. |
def fdate(self, *cols, precision: str="S", format: str=None):
"""
Convert column values to formated date string
:param \*cols: names of the colums
:type \*cols: str, at least one
:param precision: time precision: Y, M, D, H, Min S, defaults to "S"
:type precision: str, o... | Convert column values to formated date string
:param \*cols: names of the colums
:type \*cols: str, at least one
:param precision: time precision: Y, M, D, H, Min S, defaults to "S"
:type precision: str, optional
:param format: python date format, defaults to None
:type ... |
def python_2_unicode_compatible(klass):
"""
A decorator that defines __unicode__ and __str__ methods under Python 2.
Under Python 3 it does nothing.
To support Python 2 and 3 with a single code base, define a __str__ method
returning text and apply this decorator to the class.
"""
if six.PY... | A decorator that defines __unicode__ and __str__ methods under Python 2.
Under Python 3 it does nothing.
To support Python 2 and 3 with a single code base, define a __str__ method
returning text and apply this decorator to the class. |
def _init_map(self, record_types=None):
"""Initialize map for form"""
OsidForm._init_map(self)
self._my_map['displayName'] = dict(self._display_name_default)
self._my_map['description'] = dict(self._description_default)
self._my_map['genusTypeId'] = self._genus_type_default
... | Initialize map for form |
def process_rollout(self, batch_info, rollout: Rollout):
""" Process rollout for ALGO before any chunking/shuffling """
assert isinstance(rollout, Trajectories), "A2C requires trajectory rollouts"
advantages = discount_bootstrap_gae(
rewards_buffer=rollout.transition_tensors['rewar... | Process rollout for ALGO before any chunking/shuffling |
def start(self):
""" Run the commands"""
self.check_dependencies()
self.args = self.parser.parse_args()
# Python 3 doesn't set the cmd if no args are given
if not hasattr(self.args, 'cmd'):
self.parser.print_help()
return
cmd = self.args.cmd
... | Run the commands |
def addUsage_Label(self,usage_label):
'''Appends one Usage_Label to usage_labels
'''
if isinstance(usage_label, Usage_Label):
self.usage_labels.append(usage_label)
else:
raise (Usage_LabelError,
'usage_label Type should be Usage_Label, not %s' %... | Appends one Usage_Label to usage_labels |
def _list_like_func(self, func, axis, *args, **kwargs):
"""Apply list-like function across given axis.
Args:
func: The function to apply.
axis: Target axis to apply the function along.
Returns:
A new PandasQueryCompiler.
"""
func_prepared = s... | Apply list-like function across given axis.
Args:
func: The function to apply.
axis: Target axis to apply the function along.
Returns:
A new PandasQueryCompiler. |
def serialize(self, method="urlencoded", lev=0, **kwargs):
"""
Convert this instance to another representation. Which representation
is given by the choice of serialization method.
:param method: A serialization method. Presently 'urlencoded', 'json',
'jwt' and 'dic... | Convert this instance to another representation. Which representation
is given by the choice of serialization method.
:param method: A serialization method. Presently 'urlencoded', 'json',
'jwt' and 'dict' is supported.
:param lev:
:param kwargs: Extra key word arg... |
def solve_tuple(expr, vars):
"""Build a tuple from subexpressions."""
result = tuple(solve(x, vars).value for x in expr.children)
return Result(result, ()) | Build a tuple from subexpressions. |
def _iter_grouped(self):
"""Iterate over each element in this group"""
for indices in self._group_indices:
yield self._obj.isel(**{self._group_dim: indices}) | Iterate over each element in this group |
def deprecate(name, alternative, version, alt_name=None,
klass=None, stacklevel=2, msg=None):
"""
Return a new function that emits a deprecation warning on use.
To use this method for a deprecated function, another function
`alternative` with the same signature must exist. The deprecated
... | Return a new function that emits a deprecation warning on use.
To use this method for a deprecated function, another function
`alternative` with the same signature must exist. The deprecated
function will emit a deprecation warning, and in the docstring
it will contain the deprecation directive with th... |
def is_zero_bytes_file(self, path):
"""Return True if file <path> is zero bytes in size, else return False"""
return self._getReturnCodeCmd([self._hadoop_cmd, 'fs', '-test', '-z', self._full_hdfs_path(path)]) == 0 | Return True if file <path> is zero bytes in size, else return False |
def output(self, stream, value):
"""SPL output port assignment expression.
Arguments:
stream(Stream): Output stream the assignment is for.
value(str): SPL expression used for an output assignment. This can be a string, a constant, or an :py:class:`Expression`.
Returns:
... | SPL output port assignment expression.
Arguments:
stream(Stream): Output stream the assignment is for.
value(str): SPL expression used for an output assignment. This can be a string, a constant, or an :py:class:`Expression`.
Returns:
Expression: Output assignment ex... |
def getTimeZone(lat, lon):
"""Get timezone for a given lat/lon
"""
#Need to fix for Python 2.x and 3.X support
import urllib.request, urllib.error, urllib.parse
import xml.etree.ElementTree as ET
#http://api.askgeo.com/v1/918/aa8292ec06199d1207ccc15be3180213c984832707f0cbf3d3859db279b4b324/query... | Get timezone for a given lat/lon |
def init_hardware(self, serial=None, device_number=ANY_MODULE):
"""
Initializes the device with the corresponding serial or device number.
:param int or None serial: Serial number of the USB-CANmodul.
:param int device_number: Device number (0 – 254, or :const:`ANY_MODULE` for the first... | Initializes the device with the corresponding serial or device number.
:param int or None serial: Serial number of the USB-CANmodul.
:param int device_number: Device number (0 – 254, or :const:`ANY_MODULE` for the first device). |
def get(self, index):
"""
Constructs a SyncListItemContext
:param index: The index
:returns: twilio.rest.preview.sync.service.sync_list.sync_list_item.SyncListItemContext
:rtype: twilio.rest.preview.sync.service.sync_list.sync_list_item.SyncListItemContext
"""
r... | Constructs a SyncListItemContext
:param index: The index
:returns: twilio.rest.preview.sync.service.sync_list.sync_list_item.SyncListItemContext
:rtype: twilio.rest.preview.sync.service.sync_list.sync_list_item.SyncListItemContext |
def error_messages(self, driver_id=None):
"""Get the error messages for all drivers or a specific driver.
Args:
driver_id: The specific driver to get the errors for. If this is
None, then this method retrieves the errors for all drivers.
Returns:
A dicti... | Get the error messages for all drivers or a specific driver.
Args:
driver_id: The specific driver to get the errors for. If this is
None, then this method retrieves the errors for all drivers.
Returns:
A dictionary mapping driver ID to a list of the error messag... |
def scroll(self, query=None, scroll='5m', size=100, unpack=True):
"""Scroll an index with the specified search query.
Works as a generator. Will yield `size` results per iteration until all hits are returned.
"""
query = self.match_all if query is None else query
response = self... | Scroll an index with the specified search query.
Works as a generator. Will yield `size` results per iteration until all hits are returned. |
def _unbind_topics(self, topics):
"""Unsubscribe to all of the topics we needed for communication with device
Args:
topics (MQTTTopicValidator): The topic validator for this device that
we have connected to.
"""
self.client.unsubscribe(topics.status)
... | Unsubscribe to all of the topics we needed for communication with device
Args:
topics (MQTTTopicValidator): The topic validator for this device that
we have connected to. |
def generate_adsorption_structures(self, molecule, repeat=None, min_lw=5.0,
reorient=True, find_args={}):
"""
Function that generates all adsorption structures for a given
molecular adsorbate. Can take repeat argument or minimum
length/width of pre... | Function that generates all adsorption structures for a given
molecular adsorbate. Can take repeat argument or minimum
length/width of precursor slab as an input
Args:
molecule (Molecule): molecule corresponding to adsorbate
repeat (3-tuple or list): repeat argument for... |
def linesubst(line, variables):
"""
In a string, substitute '{{varname}}' occurrences with the value of
variables['varname'], '\\' being an escaping char...
If at first you don't understand this function, draw its finite state
machine and everything will become crystal clear :)
"""
# trivial... | In a string, substitute '{{varname}}' occurrences with the value of
variables['varname'], '\\' being an escaping char...
If at first you don't understand this function, draw its finite state
machine and everything will become crystal clear :) |
def _reset_timeout(self):
"""Reset timeout for date keep alive."""
if self._timeout:
self._timeout.cancel()
self._timeout = self.loop.call_later(self.client.timeout,
self.transport.close) | Reset timeout for date keep alive. |
def count_in_category(x='call_type', filter_dict=None, model=DEFAULT_MODEL, app=DEFAULT_APP, sort=True, limit=1000):
"""
Count the number of records for each discrete (categorical) value of a field and return a dict of two lists, the field values and the counts.
>>> x, y = count_in_category(x='call_type', ... | Count the number of records for each discrete (categorical) value of a field and return a dict of two lists, the field values and the counts.
>>> x, y = count_in_category(x='call_type', filter_dict={'model__startswith': 'LC60'}, limit=5, sort=1)
>>> len(x) == len(y) == 5
True
>>> y[1] >= y[0]
True |
def laplacian_eigenmaps(adjacency_matrix, k):
"""
Performs spectral graph embedding using the graph symmetric normalized Laplacian matrix.
Introduced in: Belkin, M., & Niyogi, P. (2003).
Laplacian eigenmaps for dimensionality reduction and data representation.
Neural c... | Performs spectral graph embedding using the graph symmetric normalized Laplacian matrix.
Introduced in: Belkin, M., & Niyogi, P. (2003).
Laplacian eigenmaps for dimensionality reduction and data representation.
Neural computation, 15(6), 1373-1396.
Inputs: - A in R^(nx... |
def _load_model(self):
"""Loads robot and optionally add grippers."""
super()._load_model()
self.mujoco_robot = Baxter()
if self.has_gripper_right:
self.gripper_right = gripper_factory(self.gripper_right_name)
if not self.gripper_visualization:
sel... | Loads robot and optionally add grippers. |
def lv_to_pypsa(network):
"""
Convert LV grid topology to PyPSA representation
Includes grid topology of all LV grids of :attr:`~.grid.grid.Grid.lv_grids`
Parameters
----------
network : Network
eDisGo grid container
Returns
-------
dict of :pandas:`pandas.DataFrame<datafr... | Convert LV grid topology to PyPSA representation
Includes grid topology of all LV grids of :attr:`~.grid.grid.Grid.lv_grids`
Parameters
----------
network : Network
eDisGo grid container
Returns
-------
dict of :pandas:`pandas.DataFrame<dataframe>`
A DataFrame for each typ... |
def top(self, objects: Set[Object]) -> Set[Object]:
"""
Return the topmost objects (i.e. minimum y_loc). The comparison is done separately for each
box.
"""
objects_per_box = self._separate_objects_by_boxes(objects)
return_set: Set[Object] = set()
for _, box_objec... | Return the topmost objects (i.e. minimum y_loc). The comparison is done separately for each
box. |
def filter_genes_cv_deprecated(X, Ecutoff, cvFilter):
"""Filter genes by coefficient of variance and mean.
See `filter_genes_dispersion`.
Reference: Weinreb et al. (2017).
"""
if issparse(X):
raise ValueError('Not defined for sparse input. See `filter_genes_dispersion`.')
mean_filter =... | Filter genes by coefficient of variance and mean.
See `filter_genes_dispersion`.
Reference: Weinreb et al. (2017). |
def reset_flags(self):
""" Resets flags to an "unknown state"
"""
self.C = None
self.Z = None
self.P = None
self.S = None | Resets flags to an "unknown state" |
def render_scene(self):
"render scene one time"
self.init_gl() # should be a no-op after the first frame is rendered
glfw.make_context_current(self.window)
self.renderer.render_scene()
# Done rendering
# glfw.swap_buffers(self.window) # avoid double buffering to avoid sta... | render scene one time |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.