positive stringlengths 100 30.3k | anchor stringlengths 1 15k |
|---|---|
def compute_search_volume_in_bins(found, total, ndbins, sim_to_bins_function):
"""
Calculate search sensitive volume by integrating efficiency in distance bins
No cosmological corrections are applied: flat space is assumed.
The first dimension of ndbins must be bins over injected distance.
sim_to_b... | Calculate search sensitive volume by integrating efficiency in distance bins
No cosmological corrections are applied: flat space is assumed.
The first dimension of ndbins must be bins over injected distance.
sim_to_bins_function must maps an object to a tuple indexing the ndbins. |
def _split(self, rect):
"""
Split all max_rects intersecting the rectangle rect into up to
4 new max_rects.
Arguments:
rect (Rectangle): Rectangle
Returns:
split (Rectangle list): List of rectangles resulting from the split
"""
ma... | Split all max_rects intersecting the rectangle rect into up to
4 new max_rects.
Arguments:
rect (Rectangle): Rectangle
Returns:
split (Rectangle list): List of rectangles resulting from the split |
def authenticate_keystone(self, keystone_ip, username, password,
api_version=False, admin_port=False,
user_domain_name=None, domain_name=None,
project_domain_name=None, project_name=None):
"""Authenticate with Keystone"""
... | Authenticate with Keystone |
def _add_genetic_models(self, variant_obj, info_dict):
"""Add the genetic models found
Args:
variant_obj (puzzle.models.Variant)
info_dict (dict): A info dictionary
"""
genetic_models_entry = info_dict.get('GeneticModels')
if genetic_mode... | Add the genetic models found
Args:
variant_obj (puzzle.models.Variant)
info_dict (dict): A info dictionary |
def isfile(self, version=None, *args, **kwargs):
'''
Check whether the path exists and is a file
'''
version = _process_version(self, version)
path = self.get_version_path(version)
self.authority.fs.isfile(path, *args, **kwargs) | Check whether the path exists and is a file |
def get_magnet(self, url):
"""Get magnet from torrent page. Url already got domain."""
content_most_rated = requests.get(url)
rated_soup = BeautifulSoup(content_most_rated.content, 'lxml')
if self.page == 'torrent_project':
self.magnet = rated_soup.find(
'a',... | Get magnet from torrent page. Url already got domain. |
def _build(self, input_sequence, state):
"""Connects the BidirectionalRNN module into the graph.
Args:
input_sequence: tensor (time, batch, [feature_1, ..]). It must be
time_major.
state: tuple of states for the forward and backward cores.
Returns:
A dict with forward/backard s... | Connects the BidirectionalRNN module into the graph.
Args:
input_sequence: tensor (time, batch, [feature_1, ..]). It must be
time_major.
state: tuple of states for the forward and backward cores.
Returns:
A dict with forward/backard states and output sequences:
"outputs":{... |
def store_from(self, last_level_store):
"""Set level where to store to."""
assert isinstance(last_level_store, Cache), \
"last_level needs to be a Cache object."
assert last_level_store.store_to is None, \
"last_level_store must be a last level cache (.store_to is None)."... | Set level where to store to. |
def compute_edges(ast: BELAst, spec: BELSpec) -> Edges:
"""Compute edges"""
edges = []
if ast.bel_object.__class__.__name__ == "BELAst":
edges.append(ast.bel_object)
process_ast(edges, ast, spec)
return edges | Compute edges |
def getattr_in_cls_list(cls_list, attr, default):
""" Search for an attribute (attr) in class list (cls_list). Returns
attribute value if exists or None if not. """
for cls in cls_list:
if hasattr(cls, attr):
return getattr(cls, attr)
return default | Search for an attribute (attr) in class list (cls_list). Returns
attribute value if exists or None if not. |
def get_components(self):
""" Returns all the applications from the store """
components = []
for app_id in self.components:
components.append(self.components[app_id])
return components | Returns all the applications from the store |
def update_repository(self, new_repository_info, repository_id, project=None):
"""UpdateRepository.
[Preview API] Updates the Git repository with either a new repo name or a new default branch.
:param :class:`<GitRepository> <azure.devops.v5_1.git.models.GitRepository>` new_repository_info: Spec... | UpdateRepository.
[Preview API] Updates the Git repository with either a new repo name or a new default branch.
:param :class:`<GitRepository> <azure.devops.v5_1.git.models.GitRepository>` new_repository_info: Specify a new repo name or a new default branch of the repository
:param str repositor... |
def weather_at_places(self, pattern, searchtype, limit=None):
"""
Queries the OWM Weather API for the currently observed weather in all the
locations whose name is matching the specified text search parameters.
A twofold search can be issued: *'accurate'* (exact matching) and
*'l... | Queries the OWM Weather API for the currently observed weather in all the
locations whose name is matching the specified text search parameters.
A twofold search can be issued: *'accurate'* (exact matching) and
*'like'* (matches names that are similar to the supplied pattern).
:param pa... |
def clean_previous_run(self):
"""Clean variables from previous configuration
:return: None
"""
# Execute the base class treatment...
super(Alignak, self).clean_previous_run()
# Clean all lists
self.pollers.clear()
self.reactionners.clear()
self.b... | Clean variables from previous configuration
:return: None |
def set_lights(self, button: bool = None, rails: bool = None):
""" Control the robot lights.
:param button: If specified, turn the button light on (`True`) or
off (`False`). If not specified, do not change the
button light.
:param rails: If specifie... | Control the robot lights.
:param button: If specified, turn the button light on (`True`) or
off (`False`). If not specified, do not change the
button light.
:param rails: If specified, turn the rail lights on (`True`) or
off (`False`).... |
def get_subnet(self, subnet_id, **kwargs):
"""Returns information about a single subnet.
:param string id: Either the ID for the subnet or its network
identifier
:returns: A dictionary of information about the subnet
"""
if 'mask' not in kwargs:
... | Returns information about a single subnet.
:param string id: Either the ID for the subnet or its network
identifier
:returns: A dictionary of information about the subnet |
def sort_single_reference(ref_entry):
"""Sorts a dictionary containing data for a single reference into a standard order
"""
# yapf: disable
_keyorder = [
# Schema stuff
# This function gets called on the schema 'entry', too
'schema_type', 'schema_version',
# Type of th... | Sorts a dictionary containing data for a single reference into a standard order |
def cleanup(self):
"""
Stops any running entities in the prefix and uninitializes it, usually
you want to do this if you are going to remove the prefix afterwards
Returns:
None
"""
with LogTask('Stop prefix'):
self.stop()
with LogTask("Tag... | Stops any running entities in the prefix and uninitializes it, usually
you want to do this if you are going to remove the prefix afterwards
Returns:
None |
def bootstrap_vi(version=None, venvargs=None):
'''
Bootstrap virtualenv into current directory
:param str version: Virtualenv version like 13.1.0 or None for latest version
:param list venvargs: argv list for virtualenv.py or None for default
'''
if not version:
version = get_latest_vir... | Bootstrap virtualenv into current directory
:param str version: Virtualenv version like 13.1.0 or None for latest version
:param list venvargs: argv list for virtualenv.py or None for default |
def addIVMInputs(imageObjectList,ivmlist):
""" Add IVM filenames provided by user to outputNames dictionary for each input imageObject.
"""
if ivmlist is None:
return
for img,ivmname in zip(imageObjectList,ivmlist):
img.updateIVMName(ivmname) | Add IVM filenames provided by user to outputNames dictionary for each input imageObject. |
def _print_summary(case, summary):
""" Show some statistics from the run """
for dof, data in summary.items():
b4b = data["Bit for Bit"]
conf = data["Configurations"]
stdout = data["Std. Out Files"]
print(" " + case + " " + str(dof))
print(" --------------------")
... | Show some statistics from the run |
def bind_filter(self, direction, filter_name):
"""
Adds a packet filter to this NIO.
Filter "freq_drop" drops packets.
Filter "capture" captures packets.
:param direction: "in", "out" or "both"
:param filter_name: name of the filter to apply
"""
if direc... | Adds a packet filter to this NIO.
Filter "freq_drop" drops packets.
Filter "capture" captures packets.
:param direction: "in", "out" or "both"
:param filter_name: name of the filter to apply |
def _extract_where(cls, queryset):
"""
Was this a queryset with filters/excludes/expressions set? If so,
extract the WHERE clause from the ORM output so we can use it in the
handler queries.
"""
if not cls._is_simple_query(queryset.query):
raise ValueError("Th... | Was this a queryset with filters/excludes/expressions set? If so,
extract the WHERE clause from the ORM output so we can use it in the
handler queries. |
def search_agents(self, start=0, limit=100, filter={}, **kwargs):
'''
search_agents(self, start=0, limit=100, filter={}, **kwargs)
Search agents
:Parameters:
* *start* (`int`) -- start index to retrieve from. Default is 0
* *limit* (`int`) -- maximum number of entities ... | search_agents(self, start=0, limit=100, filter={}, **kwargs)
Search agents
:Parameters:
* *start* (`int`) -- start index to retrieve from. Default is 0
* *limit* (`int`) -- maximum number of entities to retrieve. Default is 100
* *filter* (`object`) -- free text search pattern ... |
def _slugify(text, delim=u'-'):
"""Generates an ASCII-only slug."""
result = []
for word in _punct_re.split(text.lower()):
word = word.encode('utf-8')
if word:
result.append(word)
slugified = delim.join([i.decode('utf-8') for i in result])
return re.sub('[^a-zA-Z0-9\\s\\-... | Generates an ASCII-only slug. |
def _prepare_version(self):
"""Setup the application version"""
if config.VERSION not in self._config:
self._config[config.VERSION] = __version__ | Setup the application version |
def _calc_covariance(r, pmut, tol=1e-14):
"""Calculate the covariance matrix of the fitted parameters
Parameters:
r - n-by-n matrix, the full upper triangle of R
pmut - n-vector, defines the permutation of R
tol - scalar, relative column scale for determining rank
deficiency. Default 1e-14.
Returns:
co... | Calculate the covariance matrix of the fitted parameters
Parameters:
r - n-by-n matrix, the full upper triangle of R
pmut - n-vector, defines the permutation of R
tol - scalar, relative column scale for determining rank
deficiency. Default 1e-14.
Returns:
cov - n-by-n matrix, the covariance matrix C
Give... |
def add_to_recent(self, notebook):
"""
Add an entry to recent notebooks.
We only maintain the list of the 20 most recent notebooks.
"""
if notebook not in self.recent_notebooks:
self.recent_notebooks.insert(0, notebook)
self.recent_notebooks = sel... | Add an entry to recent notebooks.
We only maintain the list of the 20 most recent notebooks. |
def get_label_vocab(vocab_path):
"""Returns a list of label strings loaded from the provided path."""
if vocab_path:
try:
with tf.io.gfile.GFile(vocab_path, 'r') as f:
return [line.rstrip('\n') for line in f]
except tf.errors.NotFoundError as err:
tf.logging.error('error reading vocab fi... | Returns a list of label strings loaded from the provided path. |
def generateThumbnail(self):
"""Generates a square thumbnail"""
source = ROOT / self.source.name
thumbnail = source.parent / '_{}.jpg'.format(source.namebase)
# -- Save thumbnail and put into queue
poster = source.parent / '__{}.jpg'.format(source.namebase)
cmd = [FROG_F... | Generates a square thumbnail |
def check_autosign(self, keyid, autosign_grains=None):
'''
Checks if the specified keyid should automatically be signed.
'''
if self.opts['auto_accept']:
return True
if self.check_signing_file(keyid, self.opts.get('autosign_file', None)):
return True
... | Checks if the specified keyid should automatically be signed. |
def _process_callbacks(self):
"""
Process callbacks from `call_from_executor` in eventloop.
"""
# Flush all the pipe content.
os.read(self._schedule_pipe[0], 1024)
# Process calls from executor.
calls_from_executor, self._calls_from_executor = self._calls_from_ex... | Process callbacks from `call_from_executor` in eventloop. |
def synthesize(self, s, method='chebyshev', order=30):
r"""Convenience wrapper around :meth:`filter`.
Will be an alias to `adjoint().filter()` in the future.
"""
if s.shape[-1] != self.Nf:
raise ValueError('Last dimension (#features) should be the number '
... | r"""Convenience wrapper around :meth:`filter`.
Will be an alias to `adjoint().filter()` in the future. |
def from_path(cls, path):
"""Creates rule instance from path.
:type path: pathlib.Path
:rtype: Rule
"""
name = path.name[:-3]
with logs.debug_time(u'Importing rule: {};'.format(name)):
rule_module = load_source(name, str(path))
priority = getattr... | Creates rule instance from path.
:type path: pathlib.Path
:rtype: Rule |
def mission_write_partial_list_encode(self, target_system, target_component, start_index, end_index):
'''
This message is sent to the MAV to write a partial list. If start
index == end index, only one item will be transmitted
/ updated. If the start index ... | This message is sent to the MAV to write a partial list. If start
index == end index, only one item will be transmitted
/ updated. If the start index is NOT 0 and above the
current list size, this request should be REJECTED!
target_system : Sy... |
def _validate_inputs(self, inputdict):
""" Validate input links.
"""
# Check code
try:
code = inputdict.pop(self.get_linkname('code'))
except KeyError:
raise InputValidationError("No code specified for this "
"calcula... | Validate input links. |
def transform(self, X):
"""Transform data by mapping it into the latent space."""
# Note: This maps to mean of distribution, we could alternatively
# sample from Gaussian distribution
return self.sess.run(self.z_mean, feed_dict={self.x: X}) | Transform data by mapping it into the latent space. |
def holtWinters(request):
"""
Performs Holt Winters Smoothing on the given post data.
Expects the following values set in the post of the request:
smoothingFactor - float
trendSmoothingFactor - float
seasonSmoothingFactor - float
seasonLength - integer
valuesToForecas... | Performs Holt Winters Smoothing on the given post data.
Expects the following values set in the post of the request:
smoothingFactor - float
trendSmoothingFactor - float
seasonSmoothingFactor - float
seasonLength - integer
valuesToForecast - integer
data - two dimensi... |
def _parse_sparkml(spark, scope, model, global_inputs, output_dict):
'''
This is a delegate function. It doesn't nothing but invoke the correct parsing function according to the input
model's type.
:param scope: Scope object
:param model: A spark-ml object (e.g., OneHotEncoder and LogisticRegression... | This is a delegate function. It doesn't nothing but invoke the correct parsing function according to the input
model's type.
:param scope: Scope object
:param model: A spark-ml object (e.g., OneHotEncoder and LogisticRegression)
:param inputs: A list of variables
:return: The output variables produc... |
def plot_all(self, fig=None, iabscissa=1, iteridx=None,
foffset=1e-19, x_opt=None, fontsize=9):
"""
plot data from a `CMADataLogger` (using the files written by the logger).
Arguments
---------
`fig`
figure number, by default 425
`iab... | plot data from a `CMADataLogger` (using the files written by the logger).
Arguments
---------
`fig`
figure number, by default 425
`iabscissa`
``0==plot`` versus iteration count,
``1==plot`` versus function evaluation number
... |
def read_names(rows, source_id=1):
"""Return an iterator of rows ready to insert into table
"names". Adds columns "is_primary" (identifying the primary name
for each tax_id with a vaule of 1) and "is_classified" (always None).
* rows - iterator of lists (eg, output from read_archive or read_dmp)
* ... | Return an iterator of rows ready to insert into table
"names". Adds columns "is_primary" (identifying the primary name
for each tax_id with a vaule of 1) and "is_classified" (always None).
* rows - iterator of lists (eg, output from read_archive or read_dmp)
* unclassified_regex - a compiled re matchin... |
def starts(self, layer):
"""Retrieve start positions of elements if given layer."""
starts = []
for data in self[layer]:
starts.append(data[START])
return starts | Retrieve start positions of elements if given layer. |
def _get_kernel_from_bayesian_model(self, model):
"""
Computes the Gibbs transition models from a Bayesian Network.
'Probabilistic Graphical Model Principles and Techniques', Koller and
Friedman, Section 12.3.3 pp 512-513.
Parameters:
-----------
model: BayesianM... | Computes the Gibbs transition models from a Bayesian Network.
'Probabilistic Graphical Model Principles and Techniques', Koller and
Friedman, Section 12.3.3 pp 512-513.
Parameters:
-----------
model: BayesianModel
The model from which probabilities will be computed. |
def create_request_with_query(self, kind, query, size="thumb", fmt="json"):
"""api/data.[fmt], api/images/[size].[fmt] api/files.[fmt]
kind = ['data', 'images', 'files']
"""
if kind == "data" or kind == "files":
url = "{}/{}.{}".format(base_url, kind, fmt)
elif kin... | api/data.[fmt], api/images/[size].[fmt] api/files.[fmt]
kind = ['data', 'images', 'files'] |
def clean_dict(d, test=lambda v: v):
"""
Return only keys that meet the test
:param d: Dictionary
:param test: the test to run on the value (example override is: "lambda v: v is not None")
:return: Cleaned dictionary
"""
return {k: v for k, v in d.items() if test(v)} | Return only keys that meet the test
:param d: Dictionary
:param test: the test to run on the value (example override is: "lambda v: v is not None")
:return: Cleaned dictionary |
def _get_column(in_file, out_file, column, data=None):
"""Subset one column from a file
"""
with file_transaction(data, out_file) as tx_out_file:
with open(in_file) as in_handle:
with open(tx_out_file, 'w') as out_handle:
for line in in_handle:
cols = ... | Subset one column from a file |
def create_user(name, groups=None, key_file=None):
"""Create a user. Adds a key file to authorized_keys if given."""
groups = groups or []
if not user_exists(name):
for group in groups:
if not group_exists(group):
sudo(u"addgroup %s" % group)
groups = groups and ... | Create a user. Adds a key file to authorized_keys if given. |
def async_event_handler(self, event: dict) -> None:
"""Receive event from websocket and identifies where the event belong.
{
"t": "event",
"e": "changed",
"r": "sensors",
"id": "12",
"state": { "buttonevent": 2002 }
}
"""
... | Receive event from websocket and identifies where the event belong.
{
"t": "event",
"e": "changed",
"r": "sensors",
"id": "12",
"state": { "buttonevent": 2002 }
} |
def pos(self):
"""
Lazy-loads the part of speech tag for this word
:getter: Returns the plain string value of the POS tag for the word
:type: str
"""
if self._pos is None:
poses = self._element.xpath('POS/text()')
if len(poses) > 0:
... | Lazy-loads the part of speech tag for this word
:getter: Returns the plain string value of the POS tag for the word
:type: str |
def extract_native_client_tarball(dir):
r'''
Download a native_client.tar.xz file from TaskCluster and extract it to dir.
'''
assert_valid_dir(dir)
target_tarball = os.path.join(dir, 'native_client.tar.xz')
if os.path.isfile(target_tarball) and os.stat(target_tarball).st_size == 0:
retu... | r'''
Download a native_client.tar.xz file from TaskCluster and extract it to dir. |
def calculate_size(name, new_value):
""" Calculates the request payload size"""
data_size = 0
data_size += calculate_size_str(name)
data_size += BOOLEAN_SIZE_IN_BYTES
if new_value is not None:
data_size += calculate_size_data(new_value)
return data_size | Calculates the request payload size |
def get_notification(self, subscription_id, notification_id):
"""GetNotification.
Get a specific notification for a subscription.
:param str subscription_id: ID for a subscription.
:param int notification_id:
:rtype: :class:`<Notification> <azure.devops.v5_0.service_hooks.models.... | GetNotification.
Get a specific notification for a subscription.
:param str subscription_id: ID for a subscription.
:param int notification_id:
:rtype: :class:`<Notification> <azure.devops.v5_0.service_hooks.models.Notification>` |
def list():
"""
List available format.
"""
choice_len = max(map(len, _input_choices.keys()))
tmpl = " {:<%d}: {}\n" % choice_len
text = ''.join(map(
lambda k_v: tmpl.format(k_v[0], k_v[1][0]), six.iteritems(_input_choices)))
click.echo(text) | List available format. |
def indices(self, data):
'''Generate patch start indices
Parameters
----------
data : dict of np.ndarray
As produced by pumpp.transform
Yields
------
start : int >= 0
The start index of a sample patch
'''
duration = self.d... | Generate patch start indices
Parameters
----------
data : dict of np.ndarray
As produced by pumpp.transform
Yields
------
start : int >= 0
The start index of a sample patch |
def modify(
login, password=None, password_hashed=False,
domain=None, profile=None, script=None,
drive=None, homedir=None, fullname=None,
account_desc=None, account_control=None,
machine_sid=None, user_sid=None,
reset_login_hours=False, reset_bad_password_count=False,
):
'''
Modify user ... | Modify user account
login : string
login name
password : string
password
password_hashed : boolean
set if password is a nt hash instead of plain text
domain : string
users domain
profile : string
profile path
script : string
logon script
drive... |
def queries_start_with(queries, prefixes):
"""Check if any queries start with any item from *prefixes*."""
for query in sqlparse.split(queries):
if query and query_starts_with(query, prefixes) is True:
return True
return False | Check if any queries start with any item from *prefixes*. |
def geocode(
self,
query,
country_codes=None,
exactly_one=True,
timeout=DEFAULT_SENTINEL,
):
"""
Return a location point by address.
:param str query: The address or query you wish to geocode.
For a structured query, p... | Return a location point by address.
:param str query: The address or query you wish to geocode.
For a structured query, provide a dictionary whose keys
are one of: `country`, `state`, `city`, `zipcode`, `street`, `address`,
`houseNumber` or `subNumber`.
:param coun... |
def select_entry(self, core_element_id, by_cursor=True):
"""Selects the row entry belonging to the given core_element_id by cursor or tree selection"""
path = self.get_path_for_core_element(core_element_id)
if path:
if by_cursor:
self.tree_view.set_cursor(path)
... | Selects the row entry belonging to the given core_element_id by cursor or tree selection |
def rename_proteins(prot_in, prot_out=None, chunk_size=DEFAULT_CHUNK_SIZE):
"""Rename prodigal output files
Rename output files from prodigal according to the following naming
scheme: >contigX_chunkY__geneZ
Chunk numbering starts at 0 and gene identification is taken from prodigal.
Parameters
... | Rename prodigal output files
Rename output files from prodigal according to the following naming
scheme: >contigX_chunkY__geneZ
Chunk numbering starts at 0 and gene identification is taken from prodigal.
Parameters
----------
prot_in : file, str or pathlib.Path
The input protein file ... |
def _do_photometry(self, param_tab, n_start=1):
"""
Helper function which performs the iterations of the photometry
process.
Parameters
----------
param_names : list
Names of the columns which represent the initial guesses.
For example, ['x_0', '... | Helper function which performs the iterations of the photometry
process.
Parameters
----------
param_names : list
Names of the columns which represent the initial guesses.
For example, ['x_0', 'y_0', 'flux_0'], for intial guesses on
the center positi... |
def is_promisc(ip, fake_bcast="ff:ff:00:00:00:00", **kargs):
"""Try to guess if target is in Promisc mode. The target is provided by its ip.""" # noqa: E501
responses = srp1(Ether(dst=fake_bcast) / ARP(op="who-has", pdst=ip), type=ETH_P_ARP, iface_hint=ip, timeout=1, verbose=0, **kargs) # noqa: E501
ret... | Try to guess if target is in Promisc mode. The target is provided by its ip. |
def _check_triangular_bounds(self, var, coords=None, axis='x', nans=None):
"""
Checks whether the bounds in the variable attribute are triangular
Parameters
----------
%(CFDecoder.get_cell_node_coord.parameters)s
Returns
-------
bool or None
... | Checks whether the bounds in the variable attribute are triangular
Parameters
----------
%(CFDecoder.get_cell_node_coord.parameters)s
Returns
-------
bool or None
True, if unstructered, None if it could not be determined
xarray.Coordinate or None
... |
def _process_elem_attrs(elem, dic, subdic, container=dict, attrs="@attrs",
**options):
"""
:param elem: ET Element object or None
:param dic: <container> (dict[-like]) object converted from elem
:param subdic: Sub <container> object converted from elem
:param options:
... | :param elem: ET Element object or None
:param dic: <container> (dict[-like]) object converted from elem
:param subdic: Sub <container> object converted from elem
:param options:
Keyword options, see the description of :func:`elem_to_container` for
more details.
:return: None but updatin... |
def wrap(self, data, many):
"""Wrap response in envelope."""
if not many:
return data
else:
data = {'parts': data}
multipart = self.context.get('multipart')
if multipart:
data.update(MultipartObjectSchema(context={
... | Wrap response in envelope. |
def _maybe_connect(self, node_id):
"""Idempotent non-blocking connection attempt to the given node id."""
with self._lock:
conn = self._conns.get(node_id)
if conn is None:
broker = self.cluster.broker_metadata(node_id)
assert broker, 'Broker id %s... | Idempotent non-blocking connection attempt to the given node id. |
def rest_name(cls):
""" Represents a singular REST name
"""
if cls.__name__ == "NURESTRootObject" or cls.__name__ == "NURESTObject":
return "Not Implemented"
if cls.__rest_name__ is None:
raise NotImplementedError('%s has no defined name. Implement rest_name prop... | Represents a singular REST name |
def msi_conformant_version():
'''
An msi installer uninstalls/replaces a lower "internal version" of itself.
"internal version" is ivMAJOR.ivMINOR.ivBUILD with max values 255.255.65535.
Using the build nr allows continuous integration of the installer.
"Display version" is indipendent and free forma... | An msi installer uninstalls/replaces a lower "internal version" of itself.
"internal version" is ivMAJOR.ivMINOR.ivBUILD with max values 255.255.65535.
Using the build nr allows continuous integration of the installer.
"Display version" is indipendent and free format: Year.Month.Bugfix as in Salt 2016.11.3.... |
def _read_header(fid):
"""Based on neo/rawio/axonrawio.py, but I only kept of data with no-gaps
and in one segment.
"""
fid.seek(0, SEEK_SET)
fFileSignature = fid.read(4)
assert fFileSignature == b'ABF2', 'only format ABF2 is currently supported'
header = {}
for key, offset, fmt in head... | Based on neo/rawio/axonrawio.py, but I only kept of data with no-gaps
and in one segment. |
def get(cls, format):
"""
Gets an emitter, returns the class and a content-type.
"""
if cls.EMITTERS.has_key(format):
return cls.EMITTERS.get(format)
raise ValueError("No emitters found for type %s" % format) | Gets an emitter, returns the class and a content-type. |
def pop(self):
"""Pop a request"""
method_frame, header, body = self.server.basic_get(queue=self.key)
if body:
return self._decode_request(body) | Pop a request |
def transmit(self, bytes, protocol=None):
'''Gain exclusive access to card during APDU transmission for if this
decorator decorates a PCSCCardConnection.'''
data, sw1, sw2 = CardConnectionDecorator.transmit(
self, bytes, protocol)
return data, sw1, sw2 | Gain exclusive access to card during APDU transmission for if this
decorator decorates a PCSCCardConnection. |
def system_find_global_workflows(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/findGlobalWorkflows API method.
"""
return DXHTTPRequest('/system/findGlobalWorkflows', input_params, always_retry=always_retry, **kwargs) | Invokes the /system/findGlobalWorkflows API method. |
def delete(self, refobj):
"""Delete the content of the given refobj
:param refobj: the refobj that represents the content that should be deleted
:type refobj: refobj
:returns: None
:rtype: None
:raises: None
"""
refobjinter = self.get_refobjinter()
... | Delete the content of the given refobj
:param refobj: the refobj that represents the content that should be deleted
:type refobj: refobj
:returns: None
:rtype: None
:raises: None |
def stage_wbem_connection(self, wbem_connection):
"""
Log connection information. This includes the connection id (conn_id)
that is output with the log entry. This entry is logged if either
http or api loggers are enable. It honors both the logger and
detail level of either api l... | Log connection information. This includes the connection id (conn_id)
that is output with the log entry. This entry is logged if either
http or api loggers are enable. It honors both the logger and
detail level of either api logger if defined or http logger if defined.
If the api logger ... |
def get_randomness_stream(self, decision_point: str, for_initialization: bool=False) -> RandomnessStream:
"""Provides a new source of random numbers for the given decision point.
Parameters
----------
decision_point :
A unique identifier for a stream of random numbers. Typi... | Provides a new source of random numbers for the given decision point.
Parameters
----------
decision_point :
A unique identifier for a stream of random numbers. Typically represents
a decision that needs to be made each time step like 'moves_left' or
'gets_d... |
def dms2dec(dms):
"""
Convert latitude from degrees,minutes,seconds in string or 3-array
format to decimal degrees.
"""
DEGREE = 360.
HOUR = 24.
MINUTE = 60.
SECOND = 3600.
# Be careful here, degree needs to be a float so that negative zero
# can have its signbit set:
# http:... | Convert latitude from degrees,minutes,seconds in string or 3-array
format to decimal degrees. |
def get_default_field_names(self, declared_fields, model_info):
"""
Return the default list of field names that will be used if the
`Meta.fields` option is not specified.
"""
return (
[model_info.pk.name] +
list(declared_fields.keys()) +
list(m... | Return the default list of field names that will be used if the
`Meta.fields` option is not specified. |
def list_parse(name_list):
"""Parse a comma-separated list of values, or a filename (starting with @)
containing a list value on each line.
"""
if name_list and name_list[0] == '@':
value = name_list[1:]
if not os.path.exists(value):
log.warning('The file %s does not exist' ... | Parse a comma-separated list of values, or a filename (starting with @)
containing a list value on each line. |
def convert_path(pathname):
"""Return 'pathname' as a name that will work on the native filesystem.
The path is split on '/' and put back together again using the current
directory separator. Needed because filenames in the setup script are
always supplied in Unix style, and have to be converted to th... | Return 'pathname' as a name that will work on the native filesystem.
The path is split on '/' and put back together again using the current
directory separator. Needed because filenames in the setup script are
always supplied in Unix style, and have to be converted to the local
convention before we ca... |
def _check_data_flow_ports(self, data_flow):
"""Checks the validity of the ports of a data flow
Checks whether the ports of a data flow are existing and whether it is allowed to connect these ports.
:param rafcon.core.data_flow.DataFlow data_flow: The data flow to be checked
:return bo... | Checks the validity of the ports of a data flow
Checks whether the ports of a data flow are existing and whether it is allowed to connect these ports.
:param rafcon.core.data_flow.DataFlow data_flow: The data flow to be checked
:return bool validity, str message: validity is True, when the dat... |
def imagetransformer_base_8l_8h_big_cond_dr03_dan():
"""big 1d model for conditional image generation.2.99 on cifar10."""
hparams = imagetransformer_sep_channels_8l()
hparams.block_width = 256
hparams.block_length = 256
hparams.hidden_size = 512
hparams.num_heads = 8
hparams.filter_size = 2048
hparams.b... | big 1d model for conditional image generation.2.99 on cifar10. |
def override_args(**kwargs):
"""Creates a custom getter that applies specified named arguments.
Args:
**kwargs: Overriding arguments for the custom getter to use in preference
the named arguments it's called with.
Returns:
Custom getter.
"""
override_kwargs = kwargs
def custom_getter(gette... | Creates a custom getter that applies specified named arguments.
Args:
**kwargs: Overriding arguments for the custom getter to use in preference
the named arguments it's called with.
Returns:
Custom getter. |
def refine(video, **kwargs):
"""Refine a video by searching `OMDb API <http://omdbapi.com/>`_.
Several :class:`~subliminal.video.Episode` attributes can be found:
* :attr:`~subliminal.video.Episode.series`
* :attr:`~subliminal.video.Episode.year`
* :attr:`~subliminal.video.Episode.series_imd... | Refine a video by searching `OMDb API <http://omdbapi.com/>`_.
Several :class:`~subliminal.video.Episode` attributes can be found:
* :attr:`~subliminal.video.Episode.series`
* :attr:`~subliminal.video.Episode.year`
* :attr:`~subliminal.video.Episode.series_imdb_id`
Similarly, for a :class:`... |
def _add_linux_ethernet(self, port_info, bridge_name):
"""
Use raw sockets on Linux.
If interface is a bridge we connect a tap to it
"""
interface = port_info["interface"]
if gns3server.utils.interfaces.is_interface_bridge(interface):
network_interfaces = [i... | Use raw sockets on Linux.
If interface is a bridge we connect a tap to it |
def _getitem_with_mask(self, key, fill_value=dtypes.NA):
"""Index this Variable with -1 remapped to fill_value."""
# TODO(shoyer): expose this method in public API somewhere (isel?) and
# use it for reindex.
# TODO(shoyer): add a sanity check that all other integers are
# non-neg... | Index this Variable with -1 remapped to fill_value. |
def register_notification(self, notification):
"""
Registers given notification.
:param notification: Notification to register.
:type notification: Notification
:return: Method success.
:rtype: bool
"""
LOGGER.debug("> Registering notification: '{0}'.".f... | Registers given notification.
:param notification: Notification to register.
:type notification: Notification
:return: Method success.
:rtype: bool |
def WriteFileHeader(self, arcname=None, compress_type=None, st=None):
"""Writes a file header."""
if not self._stream:
raise ArchiveAlreadyClosedError(
"Attempting to write to a ZIP archive that was already closed.")
self.cur_zinfo = self._GenerateZipInfo(
arcname=arcname, compress... | Writes a file header. |
def searchitem(self, item, threshold=None):
"""Search the index for items whose key exceeds the threshold
similarity to the key of the given item.
:return: list of pairs of (item, similarity) by decreasing similarity.
>>> from ngram import NGram
>>> n = NGram([(0, "SPAM"), (1, ... | Search the index for items whose key exceeds the threshold
similarity to the key of the given item.
:return: list of pairs of (item, similarity) by decreasing similarity.
>>> from ngram import NGram
>>> n = NGram([(0, "SPAM"), (1, "SPAN"), (2, "EG"),
... (3, "SPANN")], key=lamb... |
def GetTSKVsPartByPathSpec(tsk_volume, path_spec):
"""Retrieves the TSK volume system part object from the TSK volume object.
Args:
tsk_volume (pytsk3.Volume_Info): TSK volume information.
path_spec (PathSpec): path specification.
Returns:
tuple: contains:
pytsk3.TSK_VS_PART_INFO: TSK volume ... | Retrieves the TSK volume system part object from the TSK volume object.
Args:
tsk_volume (pytsk3.Volume_Info): TSK volume information.
path_spec (PathSpec): path specification.
Returns:
tuple: contains:
pytsk3.TSK_VS_PART_INFO: TSK volume system part information or
None on error.
... |
def weld_unique(array, weld_type):
"""Return the unique elements of the array.
Parameters
----------
array : numpy.ndarray or WeldObject
Input array.
weld_type : WeldType
Type of each element in the input array.
Returns
-------
WeldObject
Representation of this ... | Return the unique elements of the array.
Parameters
----------
array : numpy.ndarray or WeldObject
Input array.
weld_type : WeldType
Type of each element in the input array.
Returns
-------
WeldObject
Representation of this computation. |
def page_templates(mapping):
"""Like the *page_template* decorator but manage multiple paginations.
You can map multiple templates to *querystring_keys* using the *mapping*
dict, e.g.::
@page_templates({
'page_contents1.html': None,
'page_contents2.html': 'go_to_page',
... | Like the *page_template* decorator but manage multiple paginations.
You can map multiple templates to *querystring_keys* using the *mapping*
dict, e.g.::
@page_templates({
'page_contents1.html': None,
'page_contents2.html': 'go_to_page',
})
def myview(request):
... |
def get_version():
"""
Get project version (using versioneer)
:return: string containing version
"""
setup_versioneer()
clean_cache()
import versioneer
version = versioneer.get_version()
parsed_version = parse_version(version)
if '*@' in str(parsed_version):
import time
... | Get project version (using versioneer)
:return: string containing version |
def getDefaultApplicationForMimeType(self, pchMimeType, pchAppKeyBuffer, unAppKeyBufferLen):
"""return the app key that will open this mime type"""
fn = self.function_table.getDefaultApplicationForMimeType
result = fn(pchMimeType, pchAppKeyBuffer, unAppKeyBufferLen)
return result | return the app key that will open this mime type |
def is_subdomain(self, other):
"""Test if the object is a subdomain of the other.
:param other: the object to which we compare this instance
:returns: True if this instance is a subdomain of the other
"""
compared = other.value if hasattr(other, 'value') else other
try:
... | Test if the object is a subdomain of the other.
:param other: the object to which we compare this instance
:returns: True if this instance is a subdomain of the other |
def norm(self, limits=None):
"""Unity-based normalization to scale data into 0-1 range.
(values - min) / (max - min)
Args:
limits: tuple of (min, max) defining the normalization range
"""
kwargs = {}
if limits is not None:
kwargs = {'min': li... | Unity-based normalization to scale data into 0-1 range.
(values - min) / (max - min)
Args:
limits: tuple of (min, max) defining the normalization range |
def convert_units(self, desired, guess=False):
"""
Convert the units of the mesh into a specified unit.
Parameters
----------
desired : string
Units to convert to (eg 'inches')
guess : boolean
If self.units are not defined should we
guess th... | Convert the units of the mesh into a specified unit.
Parameters
----------
desired : string
Units to convert to (eg 'inches')
guess : boolean
If self.units are not defined should we
guess the current units of the document and then convert? |
def next(self):
"""Iterator protocol: get next item or raise StopIteration."""
if self._fut is None:
self._fut = self._iter.getq()
try:
try:
# The future result is set by this class's _extended_callback
# method.
# pylint: disable=unpacking-non-sequence
(ent,
... | Iterator protocol: get next item or raise StopIteration. |
def Kdiag(self, X):
"""Compute the diagonal of the covariance matrix associated to X."""
Kdiag = np.zeros(X.shape[0])
ly=1/self.lengthscale_Y
lu=np.sqrt(3)/self.lengthscale_U
Vu = self.variance_U
Vy=self.variance_Y
k1 = (2*lu+ly)/(lu+ly)**2
k2 = (ly-2*lu... | Compute the diagonal of the covariance matrix associated to X. |
def close(self):
"""Close underlying stream"""
if self.tabix_file and not self.tabix_file.closed:
self.tabix_file.close()
if self.stream:
self.stream.close() | Close underlying stream |
def upgrade():
"""Upgrade database."""
with op.batch_alter_table('accounts_user_session_activity') as batch_op:
batch_op.add_column(sa.Column('browser', sa.String(80), nullable=True))
batch_op.add_column(
sa.Column('browser_version', sa.String(30), nullable=True))
batch_op.ad... | Upgrade database. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.