positive stringlengths 100 30.3k | anchor stringlengths 1 15k |
|---|---|
def get_scale(self, gg):
"""
Create a scale
"""
# This method does some introspection to save users from
# scale mismatch error. This could happen when the
# aesthetic is mapped to a categorical but the limits
# are not provided in categorical form. We only handle... | Create a scale |
def get_num_gpu():
"""
Returns:
int: #available GPUs in CUDA_VISIBLE_DEVICES, or in the system.
"""
def warn_return(ret, message):
try:
import tensorflow as tf
except ImportError:
return ret
built_with_cuda = tf.test.is_built_with_cuda()
... | Returns:
int: #available GPUs in CUDA_VISIBLE_DEVICES, or in the system. |
def _query(cls, *args, **kwds):
"""Create a Query object for this class.
Args:
distinct: Optional bool, short hand for group_by = projection.
*args: Used to apply an initial filter
**kwds: are passed to the Query() constructor.
Returns:
A Query object.
"""
# Validating dist... | Create a Query object for this class.
Args:
distinct: Optional bool, short hand for group_by = projection.
*args: Used to apply an initial filter
**kwds: are passed to the Query() constructor.
Returns:
A Query object. |
def override_default_templates(self):
"""
Override the default emails already defined by other apps
"""
if plugs_mail_settings['OVERRIDE_TEMPLATE_DIR']:
dir_ = plugs_mail_settings['OVERRIDE_TEMPLATE_DIR']
for file_ in os.listdir(dir_):
if file_.end... | Override the default emails already defined by other apps |
def load_config(args, config_path=".inlineplz.yml"):
"""Load inline-plz config from yaml config file with reasonable defaults."""
config = {}
try:
with open(config_path) as configfile:
config = yaml.safe_load(configfile) or {}
if config:
print("Loaded config f... | Load inline-plz config from yaml config file with reasonable defaults. |
def get_appliances(self, location_id):
"""Get the appliances added for a specified location.
Args:
location_id (string): identifiying string of appliance
Returns:
list: dictionary objects containing appliances data
"""
url = "https://api.neur.io/v1/appliances"
headers = self.__gen... | Get the appliances added for a specified location.
Args:
location_id (string): identifiying string of appliance
Returns:
list: dictionary objects containing appliances data |
def cancelAllPendingResults( self ):
"""Cancel all pending results. Note that this only affects the
notebook's record, not any job running in a lab."""
for k in self._results.keys():
rs = self._results[k]
self._results[k] = [ j for j in rs if isinstance(j, dict) ]
... | Cancel all pending results. Note that this only affects the
notebook's record, not any job running in a lab. |
def _DownloadScript(self, url, dest_dir):
"""Download the contents of the URL to the destination.
Args:
url: string, the URL to download.
dest_dir: string, the path to a directory for storing metadata scripts.
Returns:
string, the path to the file storing the metadata script.
"""
... | Download the contents of the URL to the destination.
Args:
url: string, the URL to download.
dest_dir: string, the path to a directory for storing metadata scripts.
Returns:
string, the path to the file storing the metadata script. |
def automaster(config='/etc/auto_salt'):
'''
List the contents of the auto master
CLI Example:
.. code-block:: bash
salt '*' mount.automaster
'''
ret = {}
if not os.path.isfile(config):
return ret
with salt.utils.files.fopen(config) as ifile:
for line in ifile:... | List the contents of the auto master
CLI Example:
.. code-block:: bash
salt '*' mount.automaster |
def single_download_photos(photos):
"""Use single process to download photos
:param photos: The photos to be downloaded
:type photos: list of dicts
"""
global counter
counter = len(photos)
for photo in photos:
download_photo(photo) | Use single process to download photos
:param photos: The photos to be downloaded
:type photos: list of dicts |
def cci(self, n, array=False):
"""CCI指标"""
result = talib.CCI(self.high, self.low, self.close, n)
if array:
return result
return result[-1] | CCI指标 |
def update(self, other):
'''
Add all pileup elements from other into self.
'''
assert self.locus == other.locus
self.elements.update(other.elements) | Add all pileup elements from other into self. |
def print_params(self, allpars=False, loglevel=logging.INFO):
"""Print information about the model parameters (values,
errors, bounds, scale)."""
pars = self.get_params()
o = '\n'
o += '%4s %-20s%10s%10s%10s%10s%10s%5s\n' % (
'idx', 'parname', 'value', 'error',
... | Print information about the model parameters (values,
errors, bounds, scale). |
def send(self, obj):
"""Send object"""
buf = io.BytesIO()
ForkingPickler(buf, pickle.HIGHEST_PROTOCOL).dump(obj)
self.send_bytes(buf.getvalue()) | Send object |
def make_url(contents, domain=DEFAULT_DOMAIN, force_gist=False,
size_for_gist=MAX_URL_LEN):
"""
Returns the URL to open given the domain and contents.
If the file contents are large, an anonymous gist will be created.
Parameters
----------
contents
* string - assumed to be... | Returns the URL to open given the domain and contents.
If the file contents are large, an anonymous gist will be created.
Parameters
----------
contents
* string - assumed to be GeoJSON
* an object that implements __geo_interface__
A FeatureCollection will be constructed wi... |
def _transform(self, data, transform, step_size):
'''
Transform the data. If the transform is not supported by this series,
returns the data unaltered.
'''
if transform=='mean':
total = sum( k*v for k,v in data.items() )
count = sum( data.values() )
data = float(total)/float(count)... | Transform the data. If the transform is not supported by this series,
returns the data unaltered. |
def reset_next_ids(classes):
"""
For each class in the list, if the .next_id attribute is not None
(meaning the table has an ID generator associated with it), set
.next_id to 0. This has the effect of reseting the ID generators,
and is useful in applications that process multiple documents and
add new rows to ta... | For each class in the list, if the .next_id attribute is not None
(meaning the table has an ID generator associated with it), set
.next_id to 0. This has the effect of reseting the ID generators,
and is useful in applications that process multiple documents and
add new rows to tables in those documents. Calling t... |
def lin_sim_calc(goid1, goid2, sim_r, termcnts):
'''
Computes Lin's similarity measure using pre-calculated Resnik's similarities.
'''
if sim_r is not None:
info = get_info_content(goid1, termcnts) + get_info_content(goid2, termcnts)
if info != 0:
return (2*sim_r)/info | Computes Lin's similarity measure using pre-calculated Resnik's similarities. |
def _get_dense_tensor(self, inputs, weight_collections=None, trainable=None):
"""Returns a `Tensor`."""
del weight_collections
text_batch = tf.reshape(inputs.get(self), shape=[-1])
m = module.Module(self.module_spec, trainable=self.trainable and trainable)
return m(text_batch) | Returns a `Tensor`. |
def bexpcube_moon(self, **kwargs):
""" return the name of a binned exposure cube file
"""
kwargs_copy = self.base_dict.copy()
kwargs_copy.update(**kwargs)
kwargs_copy['dataset'] = kwargs.get('dataset', self.dataset(**kwargs))
kwargs_copy['component'] = kwargs.get(
... | return the name of a binned exposure cube file |
def is_android_raw(raw):
"""
Returns a string that describes the type of file, for common Android
specific formats
"""
val = None
# We do not check for META-INF/MANIFEST.MF,
# as you also want to analyze unsigned APKs...
# AndroidManifest.xml should be in every APK.
# classes.dex an... | Returns a string that describes the type of file, for common Android
specific formats |
def logging_raslog_message_msgId_msgId(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
logging = ET.SubElement(config, "logging", xmlns="urn:brocade.com:mgmt:brocade-ras")
raslog = ET.SubElement(logging, "raslog")
message = ET.SubElement(raslog, ... | Auto Generated Code |
def chain_to_quadratic(chain, target_adjacency, chain_strength):
"""Determine the quadratic biases that induce the given chain.
Args:
chain (iterable):
The variables that make up a chain.
target_adjacency (dict/:class:`networkx.Graph`):
Should be a dict of the form {s: ... | Determine the quadratic biases that induce the given chain.
Args:
chain (iterable):
The variables that make up a chain.
target_adjacency (dict/:class:`networkx.Graph`):
Should be a dict of the form {s: Ns, ...} where s is a variable
in the target graph and Ns is... |
def norm_score(self):
"""Return the normalized score.
Equals 1.0 for a z-score of 0, falling to 0.0 for extremely positive
or negative values.
"""
cdf = (1.0 + math.erf(self.score / math.sqrt(2.0))) / 2.0
return 1 - 2*math.fabs(0.5 - cdf) | Return the normalized score.
Equals 1.0 for a z-score of 0, falling to 0.0 for extremely positive
or negative values. |
def sample_image(d, data, u, v, w, i=-1, verbose=0, imager='xy', wres=100):
""" Samples one integration and returns image
i is integration to image. Default is mid int.
"""
if i == -1:
i = len(data)/2
if imager == 'xy':
image = rtlib.imgonefullxy(n.outer(u, d['freq']/d['freq_orig']... | Samples one integration and returns image
i is integration to image. Default is mid int. |
def answering_questions(self, attempt, validation_token, quiz_submission_id, access_code=None, quiz_questions=None):
"""
Answering questions.
Provide or update an answer to one or more QuizQuestions.
"""
path = {}
data = {}
params = {}
# REQUIR... | Answering questions.
Provide or update an answer to one or more QuizQuestions. |
def generate_substitution_structures(self, atom, target_species=[],
sub_both_sides=False, range_tol=1e-2,
dist_from_surf=0):
"""
Function that performs substitution-type doping on the surface and
returns all po... | Function that performs substitution-type doping on the surface and
returns all possible configurations where one dopant is substituted
per surface. Can substitute one surface or both.
Args:
atom (str): atom corresponding to substitutional dopant
sub_both_sides (b... |
def wrap_object(func, before, after):
'''
before/after call will encapsulate callable object
'''
def _wrapper(*args, **kwargs):
before()
try:
return func(*args, **kwargs)
except Exception as e:
raise e
finally:
after()
return _wrapp... | before/after call will encapsulate callable object |
def gather_candidates(self):
"""Gather candidates from the slave environments.
The candidates are stored in :attr:`candidates`, overriding any
previous candidates.
"""
async def slave_task(addr):
r_manager = await self.env.connect(addr)
return await r_man... | Gather candidates from the slave environments.
The candidates are stored in :attr:`candidates`, overriding any
previous candidates. |
def do_title(s):
"""Return a titlecased version of the value. I.e. words will start with
uppercase letters, all remaining characters are lowercase.
"""
return ''.join(
[item[0].upper() + item[1:].lower()
for item in _word_beginning_split_re.split(soft_unicode(s))
if item]) | Return a titlecased version of the value. I.e. words will start with
uppercase letters, all remaining characters are lowercase. |
def sec_to_public_pair(sec, generator=None, strict=True):
"""Convert a public key in sec binary format to a public pair."""
byte_count = (generator.p().bit_length() + 7) >> 3 if generator else (len(sec) - 1)
x = from_bytes_32(sec[1:1 + byte_count])
sec0 = sec[:1]
if len(sec) == 1 + byte_count * 2:
... | Convert a public key in sec binary format to a public pair. |
def hypot(x, y, context=None):
"""
Return the Euclidean norm of x and y, i.e., the square root of the sum of
the squares of x and y.
"""
return _apply_function_in_current_context(
BigFloat,
mpfr.mpfr_hypot,
(
BigFloat._implicit_convert(x),
BigFloat._i... | Return the Euclidean norm of x and y, i.e., the square root of the sum of
the squares of x and y. |
def _init_az_api(self):
"""
Initialise client objects for talking to Azure API.
This is in a separate function so to be called by ``__init__``
and ``__setstate__``.
"""
with self.__lock:
if self._resource_client is None:
log.debug("Making Azur... | Initialise client objects for talking to Azure API.
This is in a separate function so to be called by ``__init__``
and ``__setstate__``. |
def _exists(fs, path):
"""
Check that the given path exists on the filesystem.
Note that unlike `os.path.exists`, we *do* propagate file system errors
other than a non-existent path or non-existent directory component.
E.g., should EPERM or ELOOP be raised, an exception will bubble up.
"""
... | Check that the given path exists on the filesystem.
Note that unlike `os.path.exists`, we *do* propagate file system errors
other than a non-existent path or non-existent directory component.
E.g., should EPERM or ELOOP be raised, an exception will bubble up. |
def _prepareSubForm(self, liveForm):
"""
Utility for turning liveforms into subforms, and compacting them as
necessary.
@param liveForm: a liveform.
@type liveForm: L{LiveForm}
@return: a sub form.
@rtype: L{LiveForm}
"""
liveForm = liveForm.asSu... | Utility for turning liveforms into subforms, and compacting them as
necessary.
@param liveForm: a liveform.
@type liveForm: L{LiveForm}
@return: a sub form.
@rtype: L{LiveForm} |
def upcoming(self, chamber, congress=CURRENT_CONGRESS):
"Shortcut for upcoming bills"
path = "bills/upcoming/{chamber}.json".format(chamber=chamber)
return self.fetch(path) | Shortcut for upcoming bills |
def img2img_transformer_base_tpu():
"""Hparams for training img2img_transformer on tpu."""
hparams = img2img_transformer_base()
update_hparams_for_tpu(hparams)
hparams.batch_size = 2
hparams.num_heads = 4 # heads are expensive on tpu
hparams.num_decoder_layers = 8
hparams.num_encoder_layers = 4
hparam... | Hparams for training img2img_transformer on tpu. |
def disable(name, **kwargs):
'''
Disable the named service to start at boot
CLI Example:
.. code-block:: bash
salt '*' service.disable <service name> <runlevels=single-runlevel>
salt '*' service.disable <service name> <runlevels=[runlevel1,runlevel2]>
'''
levels = []
if 'r... | Disable the named service to start at boot
CLI Example:
.. code-block:: bash
salt '*' service.disable <service name> <runlevels=single-runlevel>
salt '*' service.disable <service name> <runlevels=[runlevel1,runlevel2]> |
def has_control_chars(i):
""" Returns true if the passed token is an unknown string
or a constant string having control chars (inverse, etc
"""
if not hasattr(i, 'type_'):
return False
if i.type_ != Type.string:
return False
if i.token in ('VAR',... | Returns true if the passed token is an unknown string
or a constant string having control chars (inverse, etc |
def get_sort_order(molecules):
"""
Count up the total number of scores whose values are positve and negative.
If a greater number are negative, then sort in ascending order (e.g. for binding energy estimates)
Otherwise, sort in descending order (e.g. for similarity values)
"""
neg_count = 0
pos_count =... | Count up the total number of scores whose values are positve and negative.
If a greater number are negative, then sort in ascending order (e.g. for binding energy estimates)
Otherwise, sort in descending order (e.g. for similarity values) |
def get_marker_size(self):
"""
Gets the size of a message marker.
:return: QSize
"""
h = self.get_marker_height()
if h < 1:
h = 1
return QtCore.QSize(self.sizeHint().width() / 2, h) | Gets the size of a message marker.
:return: QSize |
def find_hal(self, atoms):
"""Look for halogen bond acceptors (Y-{O|P|N|S}, with Y=C,P,S)"""
data = namedtuple('hal_acceptor', 'o o_orig_idx y y_orig_idx')
a_set = []
# All oxygens, nitrogen, sulfurs with neighboring carbon, phosphor, nitrogen or sulfur
for a in [at for at in ato... | Look for halogen bond acceptors (Y-{O|P|N|S}, with Y=C,P,S) |
def status(self, status_id, raise_exception_on_failure=False):
"""Return the status of the generation job."""
query = {"output": "json", "user_credentials": self.api_key}
resp = requests.get(
"%sstatus/%s" % (self._url, status_id), params=query, timeout=self._timeout
)
... | Return the status of the generation job. |
def _divide_widths(self, cli, width):
"""
Return the widths for all columns.
Or None when there is not enough space.
"""
if not self.children:
return []
# Calculate widths.
given_dimensions = self.get_dimensions(cli) if self.get_dimensions else None
... | Return the widths for all columns.
Or None when there is not enough space. |
def check_aggregate(self, variable, components=None, exclude_on_fail=False,
multiplier=1, **kwargs):
"""Check whether a timeseries matches the aggregation of its components
Parameters
----------
variable: str
variable to be checked for matching aggreg... | Check whether a timeseries matches the aggregation of its components
Parameters
----------
variable: str
variable to be checked for matching aggregation of sub-categories
components: list of str, default None
list of variables, defaults to all sub-categories of `... |
def send_mail(self, subject, to, template, **template_ctx):
"""
Utility method to send mail with the `mail` template context.
"""
if not self.mail:
from warnings import warn
warn('Attempting to send mail without the mail bundle installed! '
'Pleas... | Utility method to send mail with the `mail` template context. |
def error_view(template_dir=None):
"""
Create the Error view
Must be instantiated
import error_view
ErrorView = error_view()
:param template_dir: The directory containing the view pages
:return:
"""
if not template_dir:
template_dir = "Pylot/Error"
template_page = "%s/... | Create the Error view
Must be instantiated
import error_view
ErrorView = error_view()
:param template_dir: The directory containing the view pages
:return: |
def fractal_dimension(image):
'''Estimates the fractal dimension of an image with box counting.
Counts pixels with value 0 as empty and everything else as non-empty.
Input image has to be grayscale.
See, e.g `Wikipedia <https://en.wikipedia.org/wiki/Fractal_dimension>`_.
:param image: numpy.ndarra... | Estimates the fractal dimension of an image with box counting.
Counts pixels with value 0 as empty and everything else as non-empty.
Input image has to be grayscale.
See, e.g `Wikipedia <https://en.wikipedia.org/wiki/Fractal_dimension>`_.
:param image: numpy.ndarray
:returns: estimation of fractal... |
def _verify_params(self):
"""Verifies the parameters don't use any reserved parameter.
Raises:
ValueError: If a reserved parameter is used.
"""
reserved_in_use = self._RESERVED_PARAMS.intersection(self.extra_params)
if reserved_in_use:
raise ValueError("U... | Verifies the parameters don't use any reserved parameter.
Raises:
ValueError: If a reserved parameter is used. |
def iters(cls, batch_size=32, device=0, root='.data', vectors=None, **kwargs):
"""Create iterator objects for splits of the SST dataset.
Arguments:
batch_size: Batch_size
device: Device to create batches on. Use - 1 for CPU and None for
the currently active GPU d... | Create iterator objects for splits of the SST dataset.
Arguments:
batch_size: Batch_size
device: Device to create batches on. Use - 1 for CPU and None for
the currently active GPU device.
root: The root directory that the dataset's zip archive will be
... |
def read_raw(self, params=None):
"""Get information about the current entity.
Make an HTTP GET call to ``self.path('self')``. Return the response.
:return: A ``requests.response`` object.
"""
path_type = self._meta.get('read_type', 'self')
return client.get(
... | Get information about the current entity.
Make an HTTP GET call to ``self.path('self')``. Return the response.
:return: A ``requests.response`` object. |
def save_vlen(self, key, data):
"""
Save a sequence of variable-length arrays
:param key: name of the dataset
:param data: data to store as a list of arrays
"""
shape = (None,) + data[0].shape[:-1]
try:
dset = self[key]
except KeyError:
... | Save a sequence of variable-length arrays
:param key: name of the dataset
:param data: data to store as a list of arrays |
def cli(ctx, feature_id, start, end, organism="", sequence=""):
"""Set the boundaries of a genomic feature
Output:
A standard apollo feature dictionary ({"features": [{...}]})
"""
return ctx.gi.annotations.set_boundaries(feature_id, start, end, organism=organism, sequence=sequence) | Set the boundaries of a genomic feature
Output:
A standard apollo feature dictionary ({"features": [{...}]}) |
def get_txn_outputs(raw_tx_hex, output_addr_list, coin_symbol):
'''
Used to verify a transaction hex does what's expected of it.
Must supply a list of output addresses so that the library can try to
convert from script to address using both pubkey and script.
Returns a list of the following form:
... | Used to verify a transaction hex does what's expected of it.
Must supply a list of output addresses so that the library can try to
convert from script to address using both pubkey and script.
Returns a list of the following form:
[{'value': 12345, 'address': '1abc...'}, ...]
Uses @vbuterin's ... |
def _GetSignatureMatchParserNames(self, file_object):
"""Determines if a file-like object matches one of the known signatures.
Args:
file_object (file): file-like object whose contents will be checked
for known signatures.
Returns:
list[str]: parser names for which the contents of th... | Determines if a file-like object matches one of the known signatures.
Args:
file_object (file): file-like object whose contents will be checked
for known signatures.
Returns:
list[str]: parser names for which the contents of the file-like object
matches their known signatures. |
def deprecated_for(replace_message):
"""
Decorate a deprecated function, with info about what to use instead, like:
@deprecated_for("toBytes()")
def toAscii(arg):
...
"""
def decorator(to_wrap):
@functools.wraps(to_wrap)
def wrapper(*args, **kwargs):
warnings... | Decorate a deprecated function, with info about what to use instead, like:
@deprecated_for("toBytes()")
def toAscii(arg):
... |
def read_word_data(self, i2c_addr, register, force=None):
"""
Read a single word (2 bytes) from a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read
:type register: int
:param force:
:type force: Boolean
... | Read a single word (2 bytes) from a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read
:type register: int
:param force:
:type force: Boolean
:return: 2-byte word
:rtype: int |
def device_selected(self, index):
"""Handler for selecting a device from the list in the UI"""
device = self.devicelist_model.itemFromIndex(index)
print(device.device.addr)
self.btnConnect.setEnabled(True) | Handler for selecting a device from the list in the UI |
def process_error_labels(value):
""" Process the error labels of a dependent variable 'value' to ensure uniqueness. """
observed_error_labels = {}
for error in value.get('errors', []):
label = error.get('label', 'error')
if label not in observed_error_labels:
... | Process the error labels of a dependent variable 'value' to ensure uniqueness. |
def destroy_digidoc_session(self):
""" Closes DigiDocService session and clears request.session[I{DIGIDOC_SESSION_KEY}]
"""
# cleanup data too
self.destroy_digidoc_session_data()
try:
session = self.request.session[self.DIGIDOC_SESSION_KEY]
if session:
... | Closes DigiDocService session and clears request.session[I{DIGIDOC_SESSION_KEY}] |
def listdir(*paths, glob=None):
'''
List the (optionally glob filtered) full paths from a dir.
Args:
*paths ([str,...]): A list of path elements
glob (str): An optional fnmatch glob str
'''
path = genpath(*paths)
names = os.listdir(path)
if glob is not None:
names =... | List the (optionally glob filtered) full paths from a dir.
Args:
*paths ([str,...]): A list of path elements
glob (str): An optional fnmatch glob str |
def expose_endpoints (module, *args):
"""
Expose methods to the given module for each API endpoint
"""
for op in args:
# Capture the closure state
def create_method (o):
return lambda exp: send_request(o, exp)
setattr(sys.modules[__name__], op, create_me... | Expose methods to the given module for each API endpoint |
def _fetch_stock_data(self, stock_list):
"""获取股票信息"""
pool = multiprocessing.pool.ThreadPool(len(stock_list))
try:
res = pool.map(self.get_stocks_by_range, stock_list)
finally:
pool.close()
return [d for d in res if d is not None] | 获取股票信息 |
def check_stops(pfeed, *, as_df=False, include_warnings=False):
"""
Analog of :func:`check_frequencies` for ``pfeed.stops``
"""
# Use gtfstk's stop validator
if pfeed.stops is not None:
stop_times = pd.DataFrame(columns=['stop_id'])
feed = gt.Feed(stops=pfeed.stops, stop_times=stop_t... | Analog of :func:`check_frequencies` for ``pfeed.stops`` |
def _new_open_bin(self, width=None, height=None, rid=None):
"""
Extract the next empty bin and append it to open bins
Returns:
PackingAlgorithm: Initialized empty packing bin.
None: No bin big enough for the rectangle was found
"""
factories_to_delete = s... | Extract the next empty bin and append it to open bins
Returns:
PackingAlgorithm: Initialized empty packing bin.
None: No bin big enough for the rectangle was found |
def replace_namespaced_config_map(self, name, namespace, body, **kwargs): # noqa: E501
"""replace_namespaced_config_map # noqa: E501
replace the specified ConfigMap # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass... | replace_namespaced_config_map # noqa: E501
replace the specified ConfigMap # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_namespaced_config_map(name, namespace, body, async... |
def sampling_query(sql, fields=None, count=5, sampling=None):
"""Returns a sampling query for the SQL object.
Args:
sql: the SQL object to sample
fields: an optional list of field names to retrieve.
count: an optional count of rows to retrieve which is used if a specific
sampling is... | Returns a sampling query for the SQL object.
Args:
sql: the SQL object to sample
fields: an optional list of field names to retrieve.
count: an optional count of rows to retrieve which is used if a specific
sampling is not specified.
sampling: an optional sampling strategy to appl... |
def read(filename, backed=False, sheet=None, ext=None, delimiter=None,
first_column_names=False, backup_url=None, cache=False, **kwargs) -> AnnData:
"""Read file and return :class:`~anndata.AnnData` object.
To speed up reading, consider passing `cache=True`, which creates an hdf5
cache file.
... | Read file and return :class:`~anndata.AnnData` object.
To speed up reading, consider passing `cache=True`, which creates an hdf5
cache file.
Parameters
----------
filename : `str`
If the filename has no file extension, it is interpreted as a key for
generating a filename via `sc.se... |
def calculate_best_chunk_size(self, data_length):
"""
Calculates the best chunk size for a list of length data_length. The current implemented formula is more or
less an empirical result for multiprocessing case on one machine.
:param data_length: A length which defines how man... | Calculates the best chunk size for a list of length data_length. The current implemented formula is more or
less an empirical result for multiprocessing case on one machine.
:param data_length: A length which defines how many calculations there need to be.
:type data_length: int
... |
def pop(self, sexp):
'''
Notes: Sequence works a bit different than other nodes.
This method (like others) expectes a list. However, sequence matches
against the list, whereas other nodes try to match against elements
of the list.
'''
for t in self.terms:
... | Notes: Sequence works a bit different than other nodes.
This method (like others) expectes a list. However, sequence matches
against the list, whereas other nodes try to match against elements
of the list. |
def manifests_parse(self):
'''parse manifests present on system'''
self.manifests = []
for manifest_path in self.find_manifests():
if self.manifest_path_is_old(manifest_path):
print("fw: Manifest (%s) is old; consider 'manifest download'" % (manifest_path))
... | parse manifests present on system |
def plot(self,axis=None,**kargs):
"""
- plot(axis=None, **kwarg): Finally, sphviewer.Scene class has its own plotting method.
It shows the scene as seen by the camera. It is to say, it plots the particles according
to their aparent coordinates; axis makes a reference to an existing axis... | - plot(axis=None, **kwarg): Finally, sphviewer.Scene class has its own plotting method.
It shows the scene as seen by the camera. It is to say, it plots the particles according
to their aparent coordinates; axis makes a reference to an existing axis. In case axis is None,
the plot is made on th... |
def from_connection_string(s):
"""
Credential input format:
<domain>/<username>/<secret_type>:<secret>@<dc_ip_or_hostname>
"""
cred = KerberosCredential()
cred.domain, t = s.split('/', 1)
cred.username, t = t.split('/', 1)
secret_type, t = t.split(':', 1)
secret, target = t.rsplit('@', 1)
st =... | Credential input format:
<domain>/<username>/<secret_type>:<secret>@<dc_ip_or_hostname> |
def _get_keys_defdict(self):
'''Get the keys and the default dictionary of the given function's
arguments
'''
# inspect argspecs
argspec = inspect.getargspec(self.func)
keys, defvals = argspec.args, argspec.defaults
# convert to (list_of_argkeys, dict_of_default_... | Get the keys and the default dictionary of the given function's
arguments |
def _delete_upload_id(conn: Connection, table: Table, upload_id: int) -> int:
"""Remove all table records with the supplied upload_id
:param conn: sql connection
:param table: table to modify
:param upload_id: target upload_id
:return: number of records removed
"""
... | Remove all table records with the supplied upload_id
:param conn: sql connection
:param table: table to modify
:param upload_id: target upload_id
:return: number of records removed |
def refresh_queues(self, fatal=False):
""" Updates the list of currently known queues and subqueues """
try:
queues = []
prefixes = [q for q in self.config["queues"] if q.endswith("/")]
known_subqueues = Queue.all_known(prefixes=prefixes)
for q in self.c... | Updates the list of currently known queues and subqueues |
def get_thumbnail_paths(self):
"""
Helper function used to avoid processing thumbnail files during `os.walk`.
"""
thumbnail_path_tuples = []
# channel thumbnail
channel_info = self.get_channel_info()
chthumbnail_path = channel_info.get('thumbnail_chan_path', None)... | Helper function used to avoid processing thumbnail files during `os.walk`. |
async def set_lock(self, resource, lock_identifier, lock_timeout):
"""
Lock this instance and set lock expiration time to lock_timeout
:param resource: redis key to set
:param lock_identifier: uniquie id of lock
:param lock_timeout: timeout for lock in seconds
:raises: Lo... | Lock this instance and set lock expiration time to lock_timeout
:param resource: redis key to set
:param lock_identifier: uniquie id of lock
:param lock_timeout: timeout for lock in seconds
:raises: LockError if lock is not acquired |
def load_word_file(filename):
"""Loads a words file as a list of lines"""
words_file = resource_filename(__name__, "words/%s" % filename)
handle = open(words_file, 'r')
words = handle.readlines()
handle.close()
return words | Loads a words file as a list of lines |
def pwm_scan(self, fa, cutoff=0.9, nreport=50, scan_rc=True):
"""Scan sequences with this motif.
Scan sequences from a FASTA object with this motif. Less efficient
than using a Scanner object. By setting the cutoff to 0.0 and
nreport to 1, the best match for every sequence will be ret... | Scan sequences with this motif.
Scan sequences from a FASTA object with this motif. Less efficient
than using a Scanner object. By setting the cutoff to 0.0 and
nreport to 1, the best match for every sequence will be returned.
Only the position of the matches is returned.
Par... |
def correlation(a, b):
"Returns correlation distance between a and b"
if isinstance(a, list):
a = np.array(a)
if isinstance(b, list):
b = np.array(b)
a = a.reshape(1, -1)
b = b.reshape(1, -1)
return cdist(a, b, 'correlation') | Returns correlation distance between a and b |
def format_additional_features_server_configurations(result):
'''
Formats the AdditionalFeaturesServerConfigurations object removing arguments that are empty
'''
from collections import OrderedDict
# Only display parameters that have content
order_dict = OrderedDict()
if result.is_rservices_... | Formats the AdditionalFeaturesServerConfigurations object removing arguments that are empty |
def parse_md_to_rst(file):
"""Read Markdown file and convert to ReStructured Text."""
try:
from m2r import parse_from_file
return parse_from_file(file).replace(
"artwork/", "http://198.27.119.65/"
)
except ImportError:
# m2r may not be installed in user environmen... | Read Markdown file and convert to ReStructured Text. |
def relationships_strict(instance):
"""Ensure that only the relationship types defined in the specification are
used.
"""
# Don't check objects that aren't relationships or that are custom objects
if (instance['type'] != 'relationship' or
instance['type'] not in enums.TYPES):
ret... | Ensure that only the relationship types defined in the specification are
used. |
def handleEvent(self, eventObj):
"""This method should be called every time through the main loop.
It handles all of the dragging
Parameters:
| eventObj - the event object obtained by calling pygame.event.get()
Returns:
| False most of the time
... | This method should be called every time through the main loop.
It handles all of the dragging
Parameters:
| eventObj - the event object obtained by calling pygame.event.get()
Returns:
| False most of the time
| True when the user finishes dragging ... |
def get_time_slide_id(self, offsetdict, create_new = None, superset_ok = False, nonunique_ok = False):
"""
Return the time_slide_id corresponding to the offset vector
described by offsetdict, a dictionary of instrument/offset
pairs.
If the optional create_new argument is None (the default),
then the table ... | Return the time_slide_id corresponding to the offset vector
described by offsetdict, a dictionary of instrument/offset
pairs.
If the optional create_new argument is None (the default),
then the table must contain a matching offset vector. The
return value is the ID of that vector. If the table does
not c... |
def decrypt(self, esp, key, icv_size=None):
"""
Decrypt an ESP packet
@param esp: an encrypted ESP packet
@param key: the secret key used for encryption
@param icv_size: the length of the icv used for integrity check
@return: a valid ESP packet encryp... | Decrypt an ESP packet
@param esp: an encrypted ESP packet
@param key: the secret key used for encryption
@param icv_size: the length of the icv used for integrity check
@return: a valid ESP packet encrypted with this algorithm
@raise IPSecIntegrityError: if t... |
def request(self, action, data={}, headers={}, method='GET'):
"""
Append the user authentication details to every incoming request
"""
data = self.merge(data, {'user': self.username, 'password': self.password, 'api_id': self.apiId})
return Transport.request(self, action, data, he... | Append the user authentication details to every incoming request |
def _check_position(self, feature, info):
"""
Takes the featur and the info dict and checks for the forced position
:param feature:
:param info:
:return:
"""
pos = info.get('position')
if pos is not None:
feature_pos = self.get_feature_position... | Takes the featur and the info dict and checks for the forced position
:param feature:
:param info:
:return: |
def dragRel(xOffset=0, yOffset=0, duration=0.0, tween=linear, button='left', pause=None, _pause=True, mouseDownUp=True):
"""Performs a mouse drag (mouse movement while a button is held down) to a
point on the screen, relative to its current position.
The x and y parameters detail where the mouse event happ... | Performs a mouse drag (mouse movement while a button is held down) to a
point on the screen, relative to its current position.
The x and y parameters detail where the mouse event happens. If None, the
current mouse position is used. If a float value, it is rounded down. If
outside the boundaries of the... |
def hook(self, name):
""" Return a decorator that attaches a callback to a hook. """
def wrapper(func):
self.hooks.add(name, func)
return func
return wrapper | Return a decorator that attaches a callback to a hook. |
def put(self, namespacePrefix):
"""Update a specific configuration namespace"""
self.reqparse.add_argument('name', type=str, required=True)
self.reqparse.add_argument('sortOrder', type=int, required=True)
args = self.reqparse.parse_args()
ns = db.ConfigNamespace.find_one(ConfigN... | Update a specific configuration namespace |
def normalize(self, timestamp, steps=0):
'''
Normalize a timestamp according to the interval configuration. Optionally
can be used to calculate the timestamp N steps away.
'''
# So far, the only commonality with RelativeTime
return self.from_bucket( self.to_bucket(timestamp, steps) ) | Normalize a timestamp according to the interval configuration. Optionally
can be used to calculate the timestamp N steps away. |
def returns_true_or_raises(f):
"""A safety net.
Decorator for functions that are only allowed to return True or raise
an exception.
Args:
f: A function whose only expected return value is True.
Returns:
A wrapped functions whose guaranteed only return value is True.
"""
@f... | A safety net.
Decorator for functions that are only allowed to return True or raise
an exception.
Args:
f: A function whose only expected return value is True.
Returns:
A wrapped functions whose guaranteed only return value is True. |
def set_log_level(logger, level): # type: (logging.Logger, int) -> None
"""Dynamic reconfiguration of the log level"""
if level > 2:
level = 2
if level < -1:
level = -1
levels = {
-1: logging.ERROR,
0: logging.WARN,
1: logging.INFO,
2: logging.DEBUG
... | Dynamic reconfiguration of the log level |
def _CreateBudget(client):
"""Creates the budget.
Args:
client: an AdWordsClient instance.
Returns:
a suds.sudsobject.Object representation of the created budget.
"""
budget_service = client.GetService('BudgetService', version='v201809')
# Create the campaign budget
operation = {
'operand... | Creates the budget.
Args:
client: an AdWordsClient instance.
Returns:
a suds.sudsobject.Object representation of the created budget. |
def show_current_number(parser, token):
"""Show the current page number, or insert it in the context.
This tag can for example be useful to change the page title according to
the current page number.
To just show current page number:
.. code-block:: html+django
{% show_current_number %}
... | Show the current page number, or insert it in the context.
This tag can for example be useful to change the page title according to
the current page number.
To just show current page number:
.. code-block:: html+django
{% show_current_number %}
If you use multiple paginations in the sam... |
def call_pre_hook(awsclient, cloudformation):
"""Invoke the pre_hook BEFORE the config is read.
:param awsclient:
:param cloudformation:
"""
# TODO: this is deprecated!! move this to glomex_config_reader
# no config available
if not hasattr(cloudformation, 'pre_hook'):
# hook is not... | Invoke the pre_hook BEFORE the config is read.
:param awsclient:
:param cloudformation: |
def get_by_username(cls, username):
"""Get profile by username.
:param username: A username to query for (case insensitive).
"""
return cls.query.filter(
UserProfile._username == username.lower()
).one() | Get profile by username.
:param username: A username to query for (case insensitive). |
def read(self, entity=None, attrs=None, ignore=None, params=None):
"""Ignore the template inputs when initially reading the job template.
Look up each TemplateInput entity separately
and afterwords add them to the JobTemplate entity."""
if attrs is None:
attrs = self.... | Ignore the template inputs when initially reading the job template.
Look up each TemplateInput entity separately
and afterwords add them to the JobTemplate entity. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.