code stringlengths 51 2.38k | docstring stringlengths 4 15.2k |
|---|---|
def on_save(self, event):
dlg = wx.FileDialog(None, self.settings.get_title(), '', "", '*.*',
wx.FD_SAVE | wx.FD_OVERWRITE_PROMPT)
if dlg.ShowModal() == wx.ID_OK:
self.settings.save(dlg.GetPath()) | called on save button |
def _ewp_flags_set(self, ewp_dic_subset, project_dic, flag_type, flag_dic):
try:
if flag_type in project_dic['misc'].keys():
index_option = self._get_option(ewp_dic_subset, flag_dic['enable'])
self._set_option(ewp_dic_subset[index_option], '1')
index_o... | Flags from misc to set to ewp project |
def getGenomeList() :
import rabaDB.filters as rfilt
f = rfilt.RabaQuery(Genome_Raba)
names = []
for g in f.iterRun() :
names.append(g.name)
return names | Return the names of all imported genomes |
def fit(fqdn, result, *argl, **argd):
global _machines
out = None
if len(argl) > 0:
machine = argl[0]
key = id(machine)
_machines[key] = (machine, argl[0], argl[1])
if isclassifier(machine):
out = classify_fit(fqdn, result, *argl, **argd)
elif isregressor(... | Analyzes the result of a generic fit operation performed by `sklearn`.
Args:
fqdn (str): full-qualified name of the method that was called.
result: result of calling the method with `fqdn`.
argl (tuple): positional arguments passed to the method call.
argd (dict): keyword arguments ... |
def save_assessment_offered(self, assessment_offered_form, *args, **kwargs):
if assessment_offered_form.is_for_update():
return self.update_assessment_offered(assessment_offered_form, *args, **kwargs)
else:
return self.create_assessment_offered(assessment_offered_form, *args, **k... | Pass through to provider AssessmentOfferedAdminSession.update_assessment_offered |
def pop_events(self, regex_pattern, timeout):
if not self.started:
raise IllegalStateError(
"Dispatcher needs to be started before popping.")
deadline = time.time() + timeout
while True:
results = self._match_and_pop(regex_pattern)
if len(resul... | Pop events whose names match a regex pattern.
If such event(s) exist, pop one event from each event queue that
satisfies the condition. Otherwise, wait for an event that satisfies
the condition to occur, with timeout.
Results are sorted by timestamp in ascending order.
Args:
... |
def assert_looks_like(first, second, msg=None):
first = _re.sub("\s+", " ", first.strip())
second = _re.sub("\s+", " ", second.strip())
if first != second:
raise AssertionError(msg or "%r does not look like %r" % (first, second)) | Compare two strings if all contiguous whitespace is coalesced. |
def _collect_infos(dirname):
for r, _ds, fs in walk(dirname):
if not islink(r) and r != dirname:
i = ZipInfo()
i.filename = join(relpath(r, dirname), "")
i.file_size = 0
i.compress_size = 0
i.CRC = 0
yield i.filename, i
for f in... | Utility function used by ExplodedZipFile to generate ZipInfo
entries for all of the files and directories under dirname |
def needs_fully_loaded(method):
@functools.wraps(method)
def inner(self, *args, **kwargs):
if not self.fully_loaded:
loaded_yaml = yaml_loader.YamlLoader.load_yaml_by_path(self.path)
self.parsed_yaml = loaded_yaml
self.fully_loaded = True
return method(self, *... | Wraps all publicly callable methods of YamlAssistant. If the assistant was loaded
from cache, this decorator will fully load it first time a publicly callable method
is used. |
def _extract_subdomain(host):
host = host.split(':')[0]
try:
socket.inet_aton(host)
except socket.error:
return '.'.join(host.split('.')[:-2]) | Returns a subdomain from a host. This host is typically the
HTTP_HOST request envvar. If the host is an IP address, `None` is
returned
:param host: Request's target host |
def get_cur_file_size(fp, position_to_eof=False):
if not position_to_eof:
cur_pos = fp.tell()
fp.seek(0, os.SEEK_END)
cur_file_size = fp.tell()
if not position_to_eof:
fp.seek(cur_pos, os.SEEK_SET)
return cur_file_size | Returns size of file, optionally leaving fp positioned at EOF. |
def _get_local_files(local_dir, pattern=''):
local_files = {}
if pattern:
cwd = os.getcwd()
os.chdir(local_dir)
patterns = pattern.split('|')
local_list = set([])
for p in patterns: local_list = local_list | set(glob(p))
for path in local_list:
dir, fi... | Returns a dictionary with directories as keys, and filenames as values
for filenames matching the glob ``pattern`` under the ``local_dir``
``pattern can contain the Boolean OR | to evaluated multiple patterns into
a combined set. |
def _sort_policy(doc):
if isinstance(doc, list):
return sorted([_sort_policy(i) for i in doc])
elif isinstance(doc, (dict, OrderedDict)):
return dict([(k, _sort_policy(v)) for k, v in six.iteritems(doc)])
return doc | List-type sub-items in policies don't happen to be order-sensitive, but
compare operations will render them unequal, leading to non-idempotent
state runs. We'll sort any list-type subitems before comparison to reduce
the likelihood of false negatives. |
def parse (cls, line, lineno, log, cmddict=None):
delay = -1
token = line.split()[0]
start = line.find(token)
pos = SeqPos(line, lineno, start + 1, start + len(token))
try:
delay = float(token)
except ValueError:
msg = 'String "%s" could not be interpreted as a numeric time delay.'... | Parses the SeqDelay from a line of text. Warning and error
messages are logged via the SeqMsgLog log. |
def to_dict(self, data=True):
d = self.variable.to_dict(data=data)
d.update({'coords': {}, 'name': self.name})
for k in self.coords:
d['coords'][k] = self.coords[k].variable.to_dict(data=data)
return d | Convert this xarray.DataArray into a dictionary following xarray
naming conventions.
Converts all variables and attributes to native Python objects.
Useful for coverting to json. To avoid datetime incompatibility
use decode_times=False kwarg in xarrray.open_dataset.
Parameters
... |
def _init_cfg_interfaces(self, cb, intf_list=None, all_intf=True):
if not all_intf:
self.intf_list = intf_list
else:
self.intf_list = sys_utils.get_all_run_phy_intf()
self.cb = cb
self.intf_attr = {}
self.cfg_lldp_interface_list(self.intf_list) | Configure the interfaces during init time. |
def keys(self, namespace, prefix=None, limit=None, offset=None):
params = [namespace]
query = 'SELECT key FROM gauged_keys WHERE namespace = %s'
if prefix is not None:
query += ' AND key LIKE %s'
params.append(prefix + '%')
if limit is not None:
query ... | Get keys from a namespace |
def _split_ns_command(cmd_token):
namespace = None
cmd_split = cmd_token.split(".", 1)
if len(cmd_split) == 1:
command = cmd_split[0]
else:
namespace = cmd_split[0]
command = cmd_split[1]
if not namespace:
namespace = ""
return namespace.lower(), command.lower() | Extracts the name space and the command name of the given command token.
:param cmd_token: The command token
:return: The extracted (name space, command) tuple |
def read(self, path, filename=None, offset=None, size=-1):
storageScheme, key = self.getkey(path, filename=filename)
if offset or (size > -1):
if not offset:
offset = 0
if size > -1:
sizeStr = offset + size - 1
else:
siz... | Read a file specified by path. |
def load_shared_data(path: typing.Union[str, None]) -> dict:
if path is None:
return dict()
if not os.path.exists(path):
raise FileNotFoundError('No such shared data file "{}"'.format(path))
try:
with open(path, 'r') as fp:
data = json.load(fp)
except Exception:
... | Load shared data from a JSON file stored on disk |
def drawHUD(self):
self.win.move(self.height - 2, self.x_pad)
self.win.clrtoeol()
self.win.box()
self.addstr(2, self.x_pad + 1, "Population: %i" % len(self.grid))
self.addstr(3, self.x_pad + 1, "Generation: %s" % self.current_gen)
self.addstr(3, self.x_grid - 21, "s: star... | Draw information on population size and current generation |
def tile_y_size(self, zoom):
warnings.warn(DeprecationWarning("tile_y_size is deprecated"))
validate_zoom(zoom)
return round(self.y_size / self.matrix_height(zoom), ROUND) | Height of a tile in SRID units at zoom level.
- zoom: zoom level |
def load_fixture(fixture_file):
utils.check_for_local_server()
local_url = config["local_server"]["url"]
server = Server(local_url)
fixture = json.load(fixture_file)
for db_name, _items in fixture.items():
db = server[db_name]
with click.progressbar(
_items, label=db_name... | Populate the database from a JSON file. Reads the JSON file FIXTURE_FILE
and uses it to populate the database. Fuxture files should consist of a
dictionary mapping database names to arrays of objects to store in those
databases. |
def print_mem(unit="MB"):
try:
import psutil
B = float(psutil.Process(os.getpid()).memory_info().vms)
KB = B / 1024
MB = KB / 1024
GB = MB / 1024
result = vars()[unit]
print_info("memory usage: %.2f(%s)" % (result, unit))
return result
except Impor... | Show the proc-mem-cost with psutil, use this only for lazinesssss.
:param unit: B, KB, MB, GB. |
def item(self, name, fuzzy_threshold=100):
match = process.extractOne(
name,
self._items.keys(),
score_cutoff=(fuzzy_threshold-1),
)
if match:
exact_name = match[0]
item = self._items[exact_name]
item.decrypt_with(self)
... | Extract a password from an unlocked Keychain using fuzzy
matching. ``fuzzy_threshold`` can be an integer between 0 and
100, where 100 is an exact match. |
def set_affinity(pid, cpuset):
_cpuset = cpu_set_t()
__CPU_ZERO(_cpuset)
for i in cpuset:
if i in range(0, sizeof(cpu_set_t) * 8):
__CPU_SET(i, _cpuset)
if libnuma.sched_setaffinity(pid, sizeof(cpu_set_t), byref(_cpuset)) < 0:
raise RuntimeError() | Sets the CPU affinity mask of the process whose ID is pid to the value specified by mask.
If pid is zero, then the calling process is used.
@param pid: process PID (0 == current process)
@type pid: C{int}
@param cpuset: set of CPU ids
@type cpuset: C{set} |
def _get_rnn_layer(mode, num_layers, input_size, hidden_size, dropout, weight_dropout):
if mode == 'rnn_relu':
rnn_block = functools.partial(rnn.RNN, activation='relu')
elif mode == 'rnn_tanh':
rnn_block = functools.partial(rnn.RNN, activation='tanh')
elif mode == 'lstm':
rnn_block =... | create rnn layer given specs |
def get_new_term_doc_mat(self, doc_domains):
assert len(doc_domains) == self.term_doc_matrix.get_num_docs()
doc_domain_set = set(doc_domains)
num_terms = self.term_doc_matrix.get_num_terms()
num_domains = len(doc_domain_set)
domain_mat = lil_matrix((num_domains, num_terms), dtype=int)
X = self.term_doc_matr... | Combines documents together that are in the same domain
Parameters
----------
doc_domains : array-like
Returns
-------
scipy.sparse.csr_matrix |
def _compute(self, feed_dict, shard):
try:
descriptor, enq = self._tfrun(self._tf_expr[shard], feed_dict=feed_dict)
self._inputs_waiting.decrement(shard)
except Exception as e:
montblanc.log.exception("Compute Exception")
raise | Call the tensorflow compute |
def interface(self, value):
self._interface = value
if isinstance(value, int):
self._device_number = value
else:
self._serial_number = value | Sets the interface used to connect to the device.
:param value: may specify either the serial number or the device index
:type value: string or int |
def codemirror_field_css_assets(*args):
manifesto = CodemirrorAssetTagRender()
manifesto.register_from_fields(*args)
return mark_safe(manifesto.css_html()) | Tag to render CodeMirror CSS assets needed for all given fields.
Example:
::
{% load djangocodemirror_tags %}
{% codemirror_field_css_assets form.myfield1 form.myfield2 %} |
def fix_timezone_separator(cls, timestr):
tz_sep = cls.TIMEZONE_SEPARATOR.match(timestr)
if tz_sep is not None:
return tz_sep.group(1) + tz_sep.group(2) + ':' + tz_sep.group(3)
return timestr | Replace invalid timezone separator to prevent
`dateutil.parser.parse` to raise.
:return: the new string if invalid separators were found,
`None` otherwise |
def add_package(
self,
package,
node_paths=None,
type_option=PackageInstallationTypeOption.PROD,
version_option=None):
args=self._get_add_package_args(
package,
type_option=type_option,
version_option=version_option)
return self.run_command(args=args, node_paths=node_paths... | Returns a command that when executed will add a node package to current node module.
:param package: string. A valid npm/yarn package description. The accepted forms are
package-name, package-name@version, package-name@tag, file:/folder, file:/path/to.tgz
https://url/to.tgz
:param node_paths: A l... |
def run(self):
with utils.ChangeDir(self.dirname):
sys.path.insert(0, self.dirname)
sys.argv[1:] = self.args
runpy.run_module(self.not_suffixed(self.filename),
run_name='__main__',
alter_sys=True) | Executes the code of the specified module. |
def configure(self, args):
for plug in self._plugins:
plug_name = self.plugin_name(plug)
plug.enabled = getattr(args, "plugin_%s" % plug_name, False)
if plug.enabled and getattr(plug, "configure", None):
if callable(getattr(plug, "configure", None)):
... | Configure the set of plugins with the given args.
After configuration, disabled plugins are removed from the plugins list. |
def setHandler(self,handler,cbfn):
if handler == "async-responses":
self.async_responses_callback = cbfn
elif handler == "registrations-expired":
self.registrations_expired_callback = cbfn
elif handler == "de-registrations":
self.de_registrations_callback = cbfn
elif handler == "reg-updates":
self.r... | Register a handler for a particular notification type.
These are the types of notifications that are acceptable.
| 'async-responses'
| 'registrations-expired'
| 'de-registrations'
| 'reg-updates'
| 'registrations'
| 'notifications'
:param str handler: name of the notification type
:param fnptr cb... |
def banner(message, width=30, style='banner', out=sys.stdout):
out.write(header([message], width=max(width, len(message)), style=style) + '\n')
out.flush() | Prints a banner message
Parameters
----------
message : string
The message to print in the banner
width : int
The minimum width of the banner (Default: 30)
style : string
A line formatting style (Default: 'banner')
out : writer
An object that has write() and f... |
def login_required(self, f):
@wraps(f)
def wrapped_f(*args, **kwargs):
if current_user.anonymous:
msg = ("Rejected User '%s access to '%s' as user"
" could not be authenticated.")
self.logger.warn(msg % (
current_user... | Require a user to be validated by Identity to access an endpoint.
:raises: FlaskKeystoneUnauthorized
This method will gate a particular endpoint to only be accessed by
:class:`FlaskKeystone.User`'s. This means that a valid token will need
to be passed to grant access. If a User is not ... |
def tune(self, verbose=None):
if not self._tune:
return False
else:
self.w_tune.append(
abs(self.stochastic.last_value - self.stochastic.value))
self.w = 2 * (sum(self.w_tune) / len(self.w_tune))
return True | Tuning initial slice width parameter |
def on(event, *args, **kwargs):
def wrapper(func):
for i, arg in args:
kwargs[i] = arg
func.event = Event(event, kwargs)
return func
return wrapper | Event method wrapper for bot mixins. When a bot is constructed,
its metaclass inspects all members of all base classes, and
looks for methods marked with an event attribute which is assigned
via this wrapper. It then stores all the methods in a dict
that maps event names to lists of these methods, which... |
def _default_format(self, occur):
if self.text or self.children:
return self.start_tag() + "%s" + self.end_tag()
return self.start_tag(empty=True) | Return the default serialization format. |
def _open(filename=None, mode='r'):
if not filename or filename == '-':
if not mode or 'r' in mode:
file = sys.stdin
elif 'w' in mode:
file = sys.stdout
else:
raise ValueError('Invalid mode for file: {}'.format(mode))
else:
file = open(filename... | Open a file or ``sys.stdout`` depending on the provided filename.
Args:
filename (str): The path to the file that should be opened. If
``None`` or ``'-'``, ``sys.stdout`` or ``sys.stdin`` is
returned depending on the desired mode. Defaults to ``None``.
mode (str): The mode t... |
def command(self, function=None, prefix=None, unobserved=False):
captured_f = self.capture(function, prefix=prefix)
captured_f.unobserved = unobserved
self.commands[function.__name__] = captured_f
return captured_f | Decorator to define a new command for this Ingredient or Experiment.
The name of the command will be the name of the function. It can be
called from the command-line or by using the run_command function.
Commands are automatically also captured functions.
The command can be given a pr... |
def config(self, charm_id, channel=None):
url = '{}/{}/meta/charm-config'.format(self.url, _get_path(charm_id))
data = self._get(_add_channel(url, channel))
return data.json() | Get the config data for a charm.
@param charm_id The charm's id.
@param channel Optional channel name. |
def ApplyPluginToMultiTypeCollection(plugin, output_collection,
source_urn=None):
for chunk in plugin.Start():
yield chunk
for stored_type_name in sorted(output_collection.ListStoredTypes()):
stored_cls = rdfvalue.RDFValue.classes[stored_type_name]
def GetValues():
... | Applies instant output plugin to a multi-type collection.
Args:
plugin: InstantOutputPlugin instance.
output_collection: MultiTypeCollection instance.
source_urn: If not None, override source_urn for collection items. This has
to be used when exporting flow results - their GrrMessages don't have
... |
def plot_state_histogram(self, ax):
title = "Estimated state"
nqc = int(round(np.log2(self.rho_est.data.shape[0])))
labels = ut.basis_labels(nqc)
return ut.state_histogram(self.rho_est, ax, title) | Visualize the complex matrix elements of the estimated state.
:param matplotlib.Axes ax: A matplotlib Axes object to plot into. |
def _build_xpath_expr(attrs):
if 'class_' in attrs:
attrs['class'] = attrs.pop('class_')
s = ["@{key}={val!r}".format(key=k, val=v) for k, v in attrs.items()]
return '[{expr}]'.format(expr=' and '.join(s)) | Build an xpath expression to simulate bs4's ability to pass in kwargs to
search for attributes when using the lxml parser.
Parameters
----------
attrs : dict
A dict of HTML attributes. These are NOT checked for validity.
Returns
-------
expr : unicode
An XPath expression th... |
def normalize_missing(xs):
if isinstance(xs, dict):
for k, v in xs.items():
xs[k] = normalize_missing(v)
elif isinstance(xs, (list, tuple)):
xs = [normalize_missing(x) for x in xs]
elif isinstance(xs, six.string_types):
if xs.lower() in ["none", "null"]:
xs = ... | Normalize missing values to avoid string 'None' inputs. |
def literalize(self):
if self.isliteral:
return self
args = tuple(arg.literalize() for arg in self.args)
if all(arg is self.args[i] for i, arg in enumerate(args)):
return self
return self.__class__(*args) | Return an expression where NOTs are only occurring as literals.
Applied recursively to subexpressions. |
def LoadFromFile(cls, script_path):
_name, dev = ComponentRegistry().load_extension(script_path, class_filter=VirtualTile, unique=True)
return dev | Import a virtual tile from a file rather than an installed module
script_path must point to a python file ending in .py that contains exactly one
VirtualTile class definition. That class is loaded and executed as if it
were installed.
To facilitate development, if there is a proxy obj... |
def save_config(self):
if not os.path.exists(self._conf_dir):
os.makedirs(self._conf_dir)
conf_file = os.path.join(self._conf_dir, "dql.json")
with open(conf_file, "w") as ofile:
json.dump(self.conf, ofile, indent=2) | Save the conf file |
def get_broks_from_satellites(self):
for satellites in [self.conf.brokers, self.conf.schedulers,
self.conf.pollers, self.conf.reactionners, self.conf.receivers]:
for satellite in satellites:
if not satellite.reachable:
continue
... | Get broks from my all internal satellite links
The arbiter get the broks from ALL the known satellites
:return: None |
def get_diagonalizing_basis(list_of_pauli_terms):
qubit_ops = set(reduce(lambda x, y: x + y,
[list(term._ops.items()) for term in list_of_pauli_terms]))
qubit_ops = sorted(list(qubit_ops), key=lambda x: x[0])
return PauliTerm.from_list(list(map(lambda x: tuple(reversed(x)), qubit_ops)... | Find the Pauli Term with the most non-identity terms
:param list_of_pauli_terms: List of Pauli terms to check
:return: The highest weight Pauli Term
:rtype: PauliTerm |
def convert_coordinates(self, points, axisorder='blr'):
return convert_coordinates_sequence(points,self._boundary_scale,
self._axis_limits, axisorder) | Convert data coordinates to simplex coordinates for plotting
in the case that axis limits have been applied. |
def _get_column_dtype(llwcol):
try:
dtype = llwcol.dtype
if dtype is numpy.dtype('O'):
raise AttributeError
return dtype
except AttributeError:
try:
llwtype = llwcol.parentNode.validcolumns[llwcol.Name]
except AttributeError:
try:
... | Get the data type of a LIGO_LW `Column`
Parameters
----------
llwcol : :class:`~ligo.lw.table.Column`, `numpy.ndarray`, iterable
a LIGO_LW column, a numpy array, or an iterable
Returns
-------
dtype : `type`, None
the object data type for values in the given column, `None` is
... |
def search_results_total(html, xpath, check, delimiter):
for container in html.findall(xpath):
if check in container.findtext('.'):
text = container.findtext('.').split(delimiter)
total = int(text[-1].strip())
return total | Get the total number of results from the DOM of a search index. |
def set_replication_enabled(status, host=None, core_name=None):
if not _is_master() and _get_none_or_value(host) is None:
return _get_return_dict(False,
errors=['Only minions configured as master can run this'])
cmd = 'enablereplication' if status else 'disablereplication'
if _get_no... | MASTER ONLY
Sets the master to ignore poll requests from the slaves. Useful when you
don't want the slaves replicating during indexing or when clearing the
index.
status : boolean
Sets the replication status to the specified state.
host : str (None)
The solr host to query. __opts__[... |
def eps(self, file, scale=1, module_color=(0, 0, 0),
background=None, quiet_zone=4):
builder._eps(self.code, self.version, file, scale, module_color,
background, quiet_zone) | This method writes the QR code out as an EPS document. The
code is drawn by only writing the data modules corresponding to a 1.
They are drawn using a line, such that contiguous modules in a row
are drawn with a single line.
The *file* parameter is used to specify where to write the doc... |
def ec2_credentials_create(user_id=None, name=None,
tenant_id=None, tenant=None,
profile=None, **connection_args):
kstone = auth(profile, **connection_args)
if name:
user_id = user_get(name=name, profile=profile,
**connecti... | Create EC2-compatible credentials for user per tenant
CLI Examples:
.. code-block:: bash
salt '*' keystone.ec2_credentials_create name=admin tenant=admin
salt '*' keystone.ec2_credentials_create \
user_id=c965f79c4f864eaaa9c3b41904e67082 \
tenant_id=722787eb540849158668370dc6... |
def create_graphics(self):
rnftools.utils.shell('"{}" "{}"'.format("gnuplot", self._gp_fn))
if self.render_pdf_method is not None:
svg_fn = self._svg_fn
pdf_fn = self._pdf_fn
svg42pdf(svg_fn, pdf_fn, method=self.render_pdf_method) | Create images related to this BAM file using GnuPlot. |
def list_(prefix='', region=None, key=None, keyid=None, profile=None):
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
def extract_name(queue_url):
return _urlparse(queue_url).path.split('/')[2]
try:
r = conn.list_queues(QueueNamePrefix=prefix)
urls = r.get('Qu... | Return a list of the names of all visible queues.
.. versionadded:: 2016.11.0
CLI Example:
.. code-block:: bash
salt myminion boto_sqs.list region=us-east-1 |
def display_info(self):
if self.moc is None:
print('No MOC information present')
return
if self.moc.name is not None:
print('Name:', self.moc.name)
if self.moc.id is not None:
print('Identifier:', self.moc.id)
print('Order:', self.moc.order... | Display basic information about the running MOC. |
def open(self, print_matlab_welcome=False):
if self.process and not self.process.returncode:
raise MatlabConnectionError('Matlab(TM) process is still active. Use close to '
'close it')
self.process = subprocess.Popen(
[self.matlab_p... | Opens the matlab process. |
def createNode(self, cls, name, *args, **kw):
m = self.findNode(name)
if m is None:
m = cls(name, *args, **kw)
self.addNode(m)
return m | Add a node of type cls to the graph if it does not already exist
by the given name |
def _query(self, text):
params = (
('v', self.api_version),
('query', text),
('lang', self.language),
('sessionId', self.session_id),
('timezone', self.timezone),
)
if self.query_response:
self.previous_query_response = self... | Takes natural language text and information as query parameters and returns information as JSON. |
def fn_std(self, a, axis=None):
return numpy.nanstd(self._to_ndarray(a), axis=axis) | Compute the standard deviation of an array, ignoring NaNs.
:param a: The array.
:return: The standard deviation of the array. |
def set_level(self, level):
for handler in self.__coloredlogs_handlers:
handler.setLevel(level=level)
self.logger.setLevel(level=level) | Set the logging level of this logger.
:param level: must be an int or a str. |
def get_clusters(self, platform, retry_contexts, all_clusters):
possible_cluster_info = {}
candidates = set(copy.copy(all_clusters))
while candidates and not possible_cluster_info:
wait_for_any_cluster(retry_contexts)
for cluster in sorted(candidates, key=attrgetter('prio... | return clusters sorted by load. |
def cmd_tr(self, x=None, y=None, xy=None, ch=None):
viewer = self.get_viewer(ch)
if viewer is None:
self.log("No current viewer/channel.")
return
fx, fy, sxy = viewer.get_transforms()
if x is None and y is None and xy is None:
self.log("x=%s y=%s xy=%s... | tr x=0|1 y=0|1 xy=0|1 ch=chname
Transform the image for the given viewer/channel by flipping
(x=1 and/or y=1) or swapping axes (xy=1).
If no value is given, reports the current rotation. |
def report(self, name, **kwargs):
group_obj = Report(name, **kwargs)
return self._group(group_obj) | Add Report data to Batch object.
Args:
name (str): The name for this Group.
file_name (str): The name for the attached file for this Group.
date_added (str, kwargs): The date timestamp the Indicator was created.
file_content (str;method, kwargs): The file content... |
def override_temp(replacement):
pkg_resources.py31compat.makedirs(replacement, exist_ok=True)
saved = tempfile.tempdir
tempfile.tempdir = replacement
try:
yield
finally:
tempfile.tempdir = saved | Monkey-patch tempfile.tempdir with replacement, ensuring it exists |
def checkASN(filename):
extnType = filename[filename.rfind('_')+1:filename.rfind('.')]
if isValidAssocExtn(extnType):
return True
else:
return False | Determine if the filename provided to the function belongs to
an association.
Parameters
----------
filename: string
Returns
-------
validASN : boolean value |
def get_state(self):
D = {}
for key in self._state_props:
D[key] = getattr(self, key)
return D | Get the current view state of the camera
Returns a dict of key-value pairs. The exact keys depend on the
camera. Can be passed to set_state() (of this or another camera
of the same type) to reproduce the state. |
def __get_connection_info():
conn_info = {}
try:
conn_info['hostname'] = __opts__['mysql_auth']['hostname']
conn_info['username'] = __opts__['mysql_auth']['username']
conn_info['password'] = __opts__['mysql_auth']['password']
conn_info['database'] = __opts__['mysql_auth']['databa... | Grab MySQL Connection Details |
def ancestor(self, index):
if not isinstance(index, int):
self.log_exc(u"index is not an integer", None, True, TypeError)
if index < 0:
self.log_exc(u"index cannot be negative", None, True, ValueError)
parent_node = self
for i in range(index):
if paren... | Return the ``index``-th ancestor.
The 0-th ancestor is the node itself,
the 1-th ancestor is its parent node,
etc.
:param int index: the number of levels to go up
:rtype: :class:`~aeneas.tree.Tree`
:raises: TypeError if ``index`` is not an int
:raises: ValueErro... |
def delete(python_data: LdapObject, database: Optional[Database] = None) -> None:
dn = python_data.get_as_single('dn')
assert dn is not None
database = get_database(database)
connection = database.connection
connection.delete(dn) | Delete a LdapObject from the database. |
def get_key_policy(key_id, policy_name, region=None, key=None, keyid=None,
profile=None):
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
r = {}
try:
key_policy = conn.get_key_policy(key_id, policy_name)
r['key_policy'] = salt.serializers.json.deseri... | Get the policy for the specified key.
CLI example::
salt myminion boto_kms.get_key_policy 'alias/mykey' mypolicy |
def wncomd(left, right, window):
assert isinstance(window, stypes.SpiceCell)
assert window.dtype == 1
left = ctypes.c_double(left)
right = ctypes.c_double(right)
result = stypes.SpiceCell.double(window.size)
libspice.wncomd_c(left, right, ctypes.byref(window), result)
return result | Determine the complement of a double precision window with
respect to a specified interval.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/wncomd_c.html
:param left: left endpoints of complement interval.
:type left: float
:param right: right endpoints of complement interval.
:type ... |
def decode_header(header, normalize=False):
regex = r'"(=\?.+?\?.+?\?[^ ?]+\?=)"'
value = re.sub(regex, r'\1', header)
logging.debug("unquoted header: |%s|", value)
valuelist = email.header.decode_header(value)
decoded_list = []
for v, enc in valuelist:
v = string_decode(v, enc)
... | decode a header value to a unicode string
values are usually a mixture of different substrings
encoded in quoted printable using different encodings.
This turns it into a single unicode string
:param header: the header value
:type header: str
:param normalize: replace trailing spaces after new... |
def sitespeptidesproteins(df, site_localization_probability=0.75):
sites = filters.filter_localization_probability(df, site_localization_probability)['Sequence window']
peptides = set(df['Sequence window'])
proteins = set([str(p).split(';')[0] for p in df['Proteins']])
return len(sites), len(peptides), ... | Generate summary count of modified sites, peptides and proteins in a processed dataset ``DataFrame``.
Returns the number of sites, peptides and proteins as calculated as follows:
- `sites` (>0.75; or specified site localization probability) count of all sites > threshold
- `peptides` the set of `Sequence ... |
def register(cls, *args, **kwargs):
if cls.app is None:
return register(*args, handler=cls, **kwargs)
return cls.app.register(*args, handler=cls, **kwargs) | Register view to handler. |
def get_merged_filter(self):
track = set()
follow = set()
for handler in self.handlers:
track.update(handler.filter.track)
follow.update(handler.filter.follow)
return TweetFilter(track=list(track), follow=list(follow)) | Return merged filter from list of handlers
:return: merged filter
:rtype: :class:`~responsebot.models.TweetFilter` |
def parse_PISCES_output(pisces_output, path=False):
pisces_dict = {}
if path:
pisces_path = Path(pisces_output)
pisces_content = pisces_path.read_text().splitlines()[1:]
else:
pisces_content = pisces_output.splitlines()[1:]
for line in pisces_content:
pdb = line.split()[0... | Takes the output list of a PISCES cull and returns in a usable dictionary.
Notes
-----
Designed for outputs of protein sequence redundancy culls conducted using the PISCES server.
http://dunbrack.fccc.edu/PISCES.php
G. Wang and R. L. Dunbrack, Jr. PISCES: a protein sequence culling server. Bioinfor... |
def get_sequence_rules(self):
collection = JSONClientValidated('assessment_authoring',
collection='SequenceRule',
runtime=self._runtime)
result = collection.find(self._view_filter()).sort('_id', DESCENDING)
return ... | Gets all ``SequenceRules``.
return: (osid.assessment.authoring.SequenceRuleList) - the
returned ``SequenceRule`` list
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be imple... |
def shell_process(command, input_data=None, background=False, exitcode=False):
data = None
try:
kwargs = {
'shell': isinstance(command, basestring),
'stdout': subprocess.PIPE,
'stderr': subprocess.PIPE
}
if not input_data is None:
kwargs['s... | Shells a process with the given shell command.
`command`
Shell command to spawn.
`input_data`
String to pipe to process as input.
`background`
Set to ``True`` to fork process into background.
NOTE: This exits immediately with no result returned.
... |
def push(item, remote_addr, trg_queue, protocol=u'jsonrpc'):
if protocol == u'jsonrpc':
try:
server = Server(remote_addr, encoding=_c.FSQ_CHARSET)
return server.enqueue(item.id, trg_queue, item.item.read())
except Exception, e:
raise FSQPushError(e)
raise Valu... | Enqueue an FSQWorkItem at a remote queue |
def get_global_shelf_fpath(appname='default', ensure=False):
global_cache_dir = get_global_cache_dir(appname, ensure=ensure)
shelf_fpath = join(global_cache_dir, meta_util_constants.global_cache_fname)
return shelf_fpath | Returns the filepath to the global shelf |
def _add_plots_to_output(out, data):
out["plot"] = {}
diagram_plot = _add_diagram_plot(out, data)
if diagram_plot:
out["plot"]["diagram"] = diagram_plot
scatter = _add_scatter_plot(out, data)
if scatter:
out["plot"]["scatter"] = scatter
scatter_global = _add_global_scatter_plot(o... | Add CNVkit plots summarizing called copy number values. |
def partof(self, ns1, id1, ns2, id2):
rel_fun = lambda node, graph: self.partof_objects(node)
return self.directly_or_indirectly_related(ns1, id1, ns2, id2,
self.partof_closure,
rel_fun) | Return True if one entity is "partof" another.
Parameters
----------
ns1 : str
Namespace code for an entity.
id1 : str
URI for an entity.
ns2 : str
Namespace code for an entity.
id2 : str
URI for an entity.
Returns... |
def get_dataset(self, X, y=None):
if is_dataset(X):
return X
dataset = self.dataset
is_initialized = not callable(dataset)
kwargs = self._get_params_for('dataset')
if kwargs and is_initialized:
raise TypeError("Trying to pass an initialized Dataset while "... | Get a dataset that contains the input data and is passed to
the iterator.
Override this if you want to initialize your dataset
differently.
Parameters
----------
X : input data, compatible with skorch.dataset.Dataset
By default, you should be able to pass:
... |
def get(cls, pid_value, pid_type=None, **kwargs):
return cls(
PersistentIdentifier.get(pid_type or cls.pid_type, pid_value,
pid_provider=cls.pid_provider),
**kwargs) | Get a persistent identifier for this provider.
:param pid_type: Persistent identifier type. (Default: configured
:attr:`invenio_pidstore.providers.base.BaseProvider.pid_type`)
:param pid_value: Persistent identifier value.
:param kwargs: See
:meth:`invenio_pidstore.provi... |
def getBlocks(sentences, n):
blocks = []
for i in range(0, len(sentences), n):
blocks.append(sentences[i:(i+n)])
return blocks | Get blocks of n sentences together.
:param sentences: List of strings where each string is a sentence.
:type sentences: list
:param n: Maximum blocksize for sentences, i.e. a block will be composed of
``n`` sentences.
:type n: int.
:returns: Blocks of n sentences.
:rtype: list-o... |
def _rel_path(self, path, basepath=None):
basepath = basepath or self.src_dir
return path[len(basepath) + 1:] | trim off basepath |
def cal_k_bm3(p, k):
v = cal_v_bm3(p, k)
return cal_k_bm3_from_v(v, k) | calculate bulk modulus
:param p: pressure
:param k: [v0, k0, k0p]
:return: bulk modulus at high pressure |
def LaplaceCentreWeight(self):
sz = [1,] * self.S.ndim
for ax in self.axes:
sz[ax] = self.S.shape[ax]
lcw = 2*len(self.axes)*np.ones(sz, dtype=self.dtype)
for ax in self.axes:
lcw[(slice(None),)*ax + ([0, -1],)] -= 1.0
return lcw | Centre weighting matrix for TV Laplacian. |
def new_filename(data, file_kind, ext):
nb_key = file_kind + "number"
if nb_key not in data.keys():
data[nb_key] = -1
if not data["override externals"]:
file_exists = True
while file_exists:
data[nb_key] = data[nb_key] + 1
filename, name = _gen_filename(data, ... | Returns an available filename.
:param file_kind: Name under which numbering is recorded, such as 'img' or
'table'.
:type file_kind: str
:param ext: Filename extension.
:type ext: str
:returns: (filename, rel_filepath) where filename is a path in the
filesystem ... |
def get_fallback_languages():
lang = translation.get_language()
fallback_list = settings.FALLBACK_LANGUAGES.get(lang, None)
if fallback_list:
return fallback_list
return settings.FALLBACK_LANGUAGES.get(lang[:2], []) | Retrieve the fallback languages from the settings.py |
def error(self, argparser, target, message):
warnings.warn(
'Runtime.error is deprecated and will be removed by calmjs-4.0.0',
DeprecationWarning)
details = self.get_argparser_details(argparser)
argparser = details.subparsers[target] if details else self.argparser
... | This was used as part of the original non-recursive lookup for
the target parser. |
def has_changed(self, field_name: str = None) -> bool:
changed = self._diff_with_initial.keys()
if self._meta.get_field(field_name).get_internal_type() == 'ForeignKey':
if not field_name.endswith('_id'):
field_name = field_name+'_id'
if field_name in changed:
... | Check if a field has changed since the model was instantiated. |
def spec(self, name):
if isinstance(name, (BaseData, Parameter)):
name = name.name
if name in self._param_specs:
return self._param_specs[name]
else:
return self.bound_spec(name) | Returns either the input corresponding to a fileset or field
field spec or a spec or parameter that has either
been passed to the study as an input or can be derived.
Parameters
----------
name : Str | BaseData | Parameter
A parameter, fileset or field or name of one |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.