code stringlengths 51 2.38k | docstring stringlengths 4 15.2k |
|---|---|
def _read_apps(self):
apps = {}
for cfgfile in glob.iglob(os.path.join(self.confdir, '*.conf')):
name = os.path.basename(cfgfile)[0:-5]
try:
app = AppLogParser(name, cfgfile, self.args, self.logdir,
self.fields, self.name_cache, ... | Read the configuration of applications returning a dictionary
:return: A dictionary with application names as keys and configuration \
object as values. |
def hostname(hn, ft, si):
if not hn or not hn.fqdn:
hn = ft
if hn and hn.fqdn:
fqdn = hn.fqdn
hostname = hn.hostname if hn.hostname else fqdn.split(".")[0]
domain = hn.domain if hn.domain else ".".join(fqdn.split(".")[1:])
return Hostname(fqdn, hostname, domain)
else:... | Check hostname, facter and systemid to get the fqdn, hostname and domain.
Prefer hostname to facter and systemid.
Returns:
insights.combiners.hostname.Hostname: A named tuple with `fqdn`,
`hostname` and `domain` components.
Raises:
Exception: If no hostname can be found in any of ... |
def calculate_entropy(self, entropy_string):
total = 0
for char in entropy_string:
if char.isalpha():
prob = self.frequency[char.lower()]
total += - math.log(prob) / math.log(2)
logging.debug("Entropy score: {0}".format(total))
return total | Calculates the entropy of a string based on known frequency of
English letters.
Args:
entropy_string: A str representing the string to calculate.
Returns:
A negative float with the total entropy of the string (higher
is better). |
def _maybe_match_name(a, b):
a_has = hasattr(a, 'name')
b_has = hasattr(b, 'name')
if a_has and b_has:
if a.name == b.name:
return a.name
else:
return None
elif a_has:
return a.name
elif b_has:
return b.name
return None | Try to find a name to attach to the result of an operation between
a and b. If only one of these has a `name` attribute, return that
name. Otherwise return a consensus name if they match of None if
they have different names.
Parameters
----------
a : object
b : object
Returns
---... |
def _parse_os_release(*os_release_files):
ret = {}
for filename in os_release_files:
try:
with salt.utils.files.fopen(filename) as ifile:
regex = re.compile('^([\\w]+)=(?:\'|")?(.*?)(?:\'|")?$')
for line in ifile:
match = regex.match(line.s... | Parse os-release and return a parameter dictionary
See http://www.freedesktop.org/software/systemd/man/os-release.html
for specification of the file format. |
def parse_source_file(filename):
with open(filename, 'rb') as fid:
content = fid.read()
content = content.replace(b'\r\n', b'\n')
try:
node = ast.parse(content)
return node, content.decode('utf-8')
except SyntaxError:
return None, content.decode('utf-8') | Parse source file into AST node
Parameters
----------
filename : str
File path
Returns
-------
node : AST node
content : utf-8 encoded string |
def _download_mirbase(args, version="CURRENT"):
if not args.hairpin or not args.mirna:
logger.info("Working with version %s" % version)
hairpin_fn = op.join(op.abspath(args.out), "hairpin.fa.gz")
mirna_fn = op.join(op.abspath(args.out), "miRNA.str.gz")
if not file_exists(hairpin_fn):... | Download files from mirbase |
def parse_band_log(self, message):
if "payload" in message and hasattr(message["payload"], "name"):
record = message["payload"]
for k in dir(record):
if k.startswith("workflows_exc_"):
setattr(record, k[14:], getattr(record, k))
del... | Process incoming logging messages from the service. |
def _read_para_hip_signature_2(self, code, cbit, clen, *, desc, length, version):
_algo = self._read_unpack(2)
_sign = self._read_fileng(clen-2)
hip_signature_2 = dict(
type=desc,
critical=cbit,
length=clen,
algorithm=_HI_ALGORITHM.get(_algo, 'Unas... | Read HIP HIP_SIGNATURE_2 parameter.
Structure of HIP HIP_SIGNATURE_2 parameter [RFC 7401]:
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-... |
def save_script_file_for_state_and_source_path(state, state_path_full, as_copy=False):
from rafcon.core.states.execution_state import ExecutionState
if isinstance(state, ExecutionState):
source_script_file = os.path.join(state.script.path, state.script.filename)
destination_script_file = os.path... | Saves the script file for a state to the directory of the state.
The script name will be set to the SCRIPT_FILE constant.
:param state: The state of which the script file should be saved
:param str state_path_full: The path to the file system storage location of the state
:param bool as_copy: Temporar... |
def _find_start_time(hdr, s_freq):
start_time = hdr['stc']['creation_time']
for one_stamp in hdr['stamps']:
if one_stamp['segment_name'].decode() == hdr['erd']['filename']:
offset = one_stamp['start_stamp']
break
erd_time = (hdr['erd']['creation_time'] -
timed... | Find the start time, usually in STC, but if that's not correct, use ERD
Parameters
----------
hdr : dict
header with stc (and stamps) and erd
s_freq : int
sampling frequency
Returns
-------
datetime
either from stc or from erd
Notes
-----
Sometimes, but... |
def _Execute(self, options):
whitelist = dict(
name=options["name"],
description=options.get("description", "<empty>"))
return self._agent.client.compute.security_groups.create(**whitelist) | Handles security groups operations. |
def set_value(self, pymux, value):
try:
value = int(value)
if value < 0:
raise ValueError
except ValueError:
raise SetOptionError('Expecting an integer.')
else:
setattr(pymux, self.attribute_name, value) | Take a string, and return an integer. Raise SetOptionError when the
given text does not parse to a positive integer. |
def genlmsg_valid_hdr(nlh, hdrlen):
if not nlmsg_valid_hdr(nlh, GENL_HDRLEN):
return False
ghdr = genlmsghdr(nlmsg_data(nlh))
if genlmsg_len(ghdr) < NLMSG_ALIGN(hdrlen):
return False
return True | Validate Generic Netlink message headers.
https://github.com/thom311/libnl/blob/libnl3_2_25/lib/genl/genl.c#L117
Verifies the integrity of the Netlink and Generic Netlink headers by enforcing the following requirements:
- Valid Netlink message header (`nlmsg_valid_hdr()`)
- Presence of a complete Gene... |
def changed_path(self):
"Find any changed path and update all changed modification times."
result = None
for path in self.paths_to_modification_times:
lastmod = self.paths_to_modification_times[path]
mod = os.path.getmtime(path)
if mod > lastmod:
... | Find any changed path and update all changed modification times. |
def save(self, expires=None):
if expires is None:
expires = self.expires
s = self.serialize()
key = self._key(self._all_keys())
_cache.set(key, s, expires) | Save a copy of the object into the cache. |
def populateFromRow(self, referenceSetRecord):
self._dataUrl = referenceSetRecord.dataurl
self._description = referenceSetRecord.description
self._assemblyId = referenceSetRecord.assemblyid
self._isDerived = bool(referenceSetRecord.isderived)
self._md5checksum = referenceSetRecor... | Populates this reference set from the values in the specified DB
row. |
def _rev(repo):
try:
repo_info = dict(six.iteritems(CLIENT.info(repo['repo'])))
except (pysvn._pysvn.ClientError, TypeError,
KeyError, AttributeError) as exc:
log.error(
'Error retrieving revision ID for svnfs remote %s '
'(cachedir: %s): %s',
repo... | Returns revision ID of repo |
def prepare(self):
super(RequestHandler, self).prepare()
if self.request.headers.get('content-type', '').startswith(self.JSON):
self.request.body = escape.json_decode(self.request.body) | Prepare the incoming request, checking to see the request is sending
JSON content in the request body. If so, the content is decoded and
assigned to the json_arguments attribute. |
def find_config(directory_or_file, debug=False):
directory_or_file = os.path.realpath(directory_or_file)
if os.path.isfile(directory_or_file):
if debug:
print('using config file {}'.format(directory_or_file),
file=sys.stderr)
return directory_or_file
directory =... | Return configuration filename.
If `directory_or_file` is a file, return the real-path of that file. If it
is a directory, find the configuration (any file name in CONFIG_FILES) in
that directory or its ancestors. |
def run(self, host, port=25, with_ssl=False):
try:
dns_rec = self._lookup(host, port)
self._connect(dns_rec)
if with_ssl:
self._wrap_ssl()
banner = self._get_banner()
self._check_banner(banner)
except Exception:
exc_... | Executes a single health check against a remote host and port. This
method may only be called once per object.
:param host: The hostname or IP address of the SMTP server to check.
:type host: str
:param port: The port number of the SMTP server to check.
:type port: int
:... |
def launchDashboardOverlay(self, pchAppKey):
fn = self.function_table.launchDashboardOverlay
result = fn(pchAppKey)
return result | Launches the dashboard overlay application if it is not already running. This call is only valid for
dashboard overlay applications. |
def _parse_use(self, string):
result = {}
for ruse in self.RE_USE.finditer(string):
name = ruse.group("name").split("!")[0].strip()
if name.lower() == "mpi":
continue
if ruse.group("only"):
only = ruse.group("only").split(",")
... | Extracts use dependencies from the innertext of a module. |
def sphericalAngSep(ra0, dec0, ra1, dec1, radians=False):
if radians==False:
ra0 = np.radians(ra0)
dec0 = np.radians(dec0)
ra1 = np.radians(ra1)
dec1 = np.radians(dec1)
deltaRa= ra1-ra0
deltaDec= dec1-dec0
val = haversine(deltaDec)
val += np.cos(dec0) * np.cos(dec1)... | Compute the spherical angular separation between two
points on the sky.
//Taken from http://www.movable-type.co.uk/scripts/gis-faq-5.1.html
NB: For small distances you can probably use
sqrt( dDec**2 + cos^2(dec)*dRa)
where dDec = dec1 - dec0 and
dRa = ra1 - ra0
... |
def __add_token_annotation_tier(self, tier):
for i, event in enumerate(tier.iter('event')):
anno_key = '{0}:{1}'.format(self.ns, tier.attrib['category'])
anno_val = event.text if event.text else ''
self.node[event.attrib['start']][anno_key] = anno_val | adds a tier to the document graph, in which each event annotates
exactly one token. |
def _prepare_value_nd(self, value, vshape):
if isinstance(value, numeric_types):
value_nd = full(shape=vshape, val=value, ctx=self.context, dtype=self.dtype)
elif isinstance(value, NDArray):
value_nd = value.as_in_context(self.context)
if value_nd.dtype != self.dtype:... | Given value and vshape, create an `NDArray` from value with the same
context and dtype as the current one and broadcast it to vshape. |
def _read_stderr(self):
f = open(self.stderr_file, 'rb')
try:
stderr_text = f.read()
if not stderr_text:
return ''
encoding = get_coding(stderr_text)
stderr_text = to_text_string(stderr_text, encoding)
return stderr_text... | Read the stderr file of the kernel. |
def internal_get_description(dbg, seq, thread_id, frame_id, expression):
try:
frame = dbg.find_frame(thread_id, frame_id)
description = pydevd_console.get_description(frame, thread_id, frame_id, expression)
description = pydevd_xml.make_valid_xml_value(quote(description, '/>_= \t'))
... | Fetch the variable description stub from the debug console |
def get_epoch_namespace_lifetime_grace_period( block_height, namespace_id ):
epoch_config = get_epoch_config( block_height )
if epoch_config['namespaces'].has_key(namespace_id):
return epoch_config['namespaces'][namespace_id]['NAMESPACE_LIFETIME_GRACE_PERIOD']
else:
return epoch_config['name... | what's the namespace lifetime grace period for this epoch? |
def promise(cls, fn, *args, **kwargs):
task = cls.task(target=fn, args=args, kwargs=kwargs)
task.start()
return task | Used to build a task based on a callable function and the arguments.
Kick it off and start execution of the task.
:param fn: callable
:param args: tuple
:param kwargs: dict
:return: SynchronousTask or AsynchronousTask |
def tuple_of(*generators):
class TupleOfGenerators(ArbitraryInterface):
@classmethod
def arbitrary(cls):
return tuple([
arbitrary(generator) for generator in generators
if generator is not tuple
])
TupleOfGenerators.__name__ = ''.join([
... | Generates a tuple by generating values for each of the specified
generators.
This is a class factory, it makes a class which is a closure around the
specified generators. |
def make_ttv_yaml(corpora, path_to_ttv_file, ttv_ratio=DEFAULT_TTV_RATIO, deterministic=False):
dataset = get_dataset(corpora)
data_sets = make_ttv(dataset, ttv_ratio=ttv_ratio, deterministic=deterministic)
def get_for_ttv(key):
return (
data_sets['test'][key],
data_sets['tra... | Create a test, train, validation from the corpora given and saves it as a YAML filename.
Each set will be subject independent, meaning that no one subject can have data in more than one
set
# Arguments;
corpora: a list of the paths to corpora used (these have to be formatted accoring to no... |
def verify_logout_request(cls, logout_request, ticket):
try:
session_index = cls.get_saml_slos(logout_request)
session_index = session_index[0].text
if session_index == ticket:
return True
else:
return False
except (Attribut... | verifies the single logout request came from the CAS server
returns True if the logout_request is valid, False otherwise |
def matrix(ctx, scenario_name, subcommand):
args = ctx.obj.get('args')
command_args = {
'subcommand': subcommand,
}
s = scenarios.Scenarios(
base.get_configs(args, command_args), scenario_name)
s.print_matrix() | List matrix of steps used to test instances. |
def validate(self, val):
if self.validation:
self.type.validate(val)
if self.custom_validator is not None:
self.custom_validator(val)
return True | Validate values according to the requirement |
def request_with_retries_on_post_search(self, session, url, query, json_input, stream, headers):
status_code = 500
if '/v1/search' in url:
retry_count = 10
else:
retry_count = 1
while status_code in (500, 502, 503, 504) and retry_count > 0:
try:
... | Submit a request and retry POST search requests specifically.
We don't currently retry on POST requests, and this is intended as a temporary fix until
the swagger is updated and changes applied to prod. In the meantime, this function will add
retries specifically for POST search (and any other... |
def get_active_keys_to_keycode_list(self):
try:
_libxdo.xdo_get_active_keys_to_keycode_list
except AttributeError:
raise NotImplementedError()
keys = POINTER(charcodemap_t)
nkeys = ctypes.c_int(0)
_libxdo.xdo_get_active_keys_to_keycode_list(
se... | Get a list of active keys. Uses XQueryKeymap |
def log(self, level, message):
if self.log_fd is not None:
prefix = struct.pack('ii', level, len(message))
os.write(self.log_fd, prefix)
os.write(self.log_fd, message) | Write a log message via the child process.
The child process must already exist; call :meth:`live_log_child`
to make sure. If it has died in a way we don't expect then
this will raise :const:`signal.SIGPIPE`. |
def _postrun(self, result):
logger.debug(
"{}.PostRun: {}[{}]".format(
self.__class__.__name__, self.__class__.path, self.uuid
),
extra=dict(
kmsg=Message(
self.uuid, entrypoint=self.__class__.path,
param... | To execute after exection
:param kser.result.Result result: Execution result
:return: Execution result
:rtype: kser.result.Result |
def use_db(path, mode=WorkDB.Mode.create):
database = WorkDB(path, mode)
try:
yield database
finally:
database.close() | Open a DB in file `path` in mode `mode` as a context manager.
On exiting the context the DB will be automatically closed.
Args:
path: The path to the DB file.
mode: The mode in which to open the DB. See the `Mode` enum for
details.
Raises:
FileNotFoundError: If `mode` is `Mode.o... |
def reject_entry(request, entry_id):
return_url = request.GET.get('next', reverse('dashboard'))
try:
entry = Entry.no_join.get(pk=entry_id)
except:
message = 'No such log entry.'
messages.error(request, message)
return redirect(return_url)
if entry.status == Entry.UNVERIF... | Admins can reject an entry that has been verified or approved but not
invoiced to set its status to 'unverified' for the user to fix. |
def list_changes(self):
if not self.is_attached():
raise ItsdbError('changes are not tracked for detached tables.')
return [(i, self[i]) for i, row in enumerate(self._records)
if row is not None] | Return a list of modified records.
This is only applicable for attached tables.
Returns:
A list of `(row_index, record)` tuples of modified records
Raises:
:class:`delphin.exceptions.ItsdbError`: when called on a
detached table |
def decode(addr):
hrpgot, data = bech32_decode(addr)
if hrpgot not in BECH32_VERSION_SET:
return (None, None)
decoded = convertbits(data[1:], 5, 8, False)
if decoded is None or len(decoded) < 2 or len(decoded) > 40:
return (None, None)
if data[0] > 16:
return (None, None)
... | Decode a segwit address. |
def asQuartusTcl(self, buff: List[str], version: str, component: "Component",
packager: "IpPackager", thisIf: 'Interface'):
name = packager.getInterfaceLogicalName(thisIf)
self.quartus_tcl_add_interface(buff, thisIf, packager)
clk = thisIf._getAssociatedClk()
if clk ... | Add interface to Quartus tcl
:param buff: line buffer for output
:param version: Quartus version
:param intfName: name of top interface
:param component: component object from ipcore generator
:param packager: instance of IpPackager which is packagin current design
:para... |
def hasnew(self,allowempty=False):
for e in self.select(New,None,False, False):
if not allowempty and len(e) == 0: continue
return True
return False | Does the correction define new corrected annotations? |
def MakeOdds(self):
for hypo, prob in self.Items():
if prob:
self.Set(hypo, Odds(prob))
else:
self.Remove(hypo) | Transforms from probabilities to odds.
Values with prob=0 are removed. |
def get_self_host(request_data):
if 'http_host' in request_data:
current_host = request_data['http_host']
elif 'server_name' in request_data:
current_host = request_data['server_name']
else:
raise Exception('No hostname defined')
if ':' in current_host... | Returns the current host.
:param request_data: The request as a dict
:type: dict
:return: The current host
:rtype: string |
def schedCoro(self, coro):
import synapse.lib.provenance as s_provenance
if __debug__:
assert s_coro.iscoro(coro)
import synapse.lib.threads as s_threads
assert s_threads.iden() == self.tid
task = self.loop.create_task(coro)
if asyncio.current_task():
... | Schedules a free-running coroutine to run on this base's event loop. Kills the coroutine if Base is fini'd.
It does not pend on coroutine completion.
Precondition:
This function is *not* threadsafe and must be run on the Base's event loop
Returns:
asyncio.Task: An asyn... |
def get_statistics_24h(self, endtime):
js = json.dumps(
{'attrs': ["bytes", "num_sta", "time"], 'start': int(endtime - 86400) * 1000, 'end': int(endtime - 3600) * 1000})
params = urllib.urlencode({'json': js})
return self._read(self.api_url + 'stat/report/hourly.system', params) | Return statistical data last 24h from time |
def check_jobs(jobs):
if jobs == 0:
raise click.UsageError("Jobs must be >= 1 or == -1")
elif jobs < 0:
import multiprocessing
jobs = multiprocessing.cpu_count()
return jobs | Validate number of jobs. |
def when_matches(self, path, good_value, bad_values=None, timeout=None,
event_timeout=None):
future = self.when_matches_async(path, good_value, bad_values)
self.wait_all_futures(
future, timeout=timeout, event_timeout=event_timeout) | Resolve when an path value equals value
Args:
path (list): The path to wait to
good_value (object): the value to wait for
bad_values (list): values to raise an error on
timeout (float): time in seconds to wait for responses, wait
forever if None
... |
def fit(self, data, parent_node=None, estimator=None):
if not parent_node:
if not self.parent_node:
raise ValueError("parent node must be specified for the model")
else:
parent_node = self.parent_node
if parent_node not in data.columns:
... | Computes the CPD for each node from a given data in the form of a pandas dataframe.
If a variable from the data is not present in the model, it adds that node into the model.
Parameters
----------
data : pandas DataFrame object
A DataFrame object with column names same as th... |
def create_index(self):
es = self._init_connection()
if not es.indices.exists(index=self.index):
es.indices.create(index=self.index, body=self.settings) | Override to provide code for creating the target index.
By default it will be created without any special settings or mappings. |
def _from_dict(cls, _dict):
args = {}
if 'environment_id' in _dict:
args['environment_id'] = _dict.get('environment_id')
if 'collection_id' in _dict:
args['collection_id'] = _dict.get('collection_id')
if 'queries' in _dict:
args['queries'] = [
... | Initialize a TrainingDataSet object from a json dictionary. |
def addLabel(self, aminoAcidLabels, excludingModifications=None):
if excludingModifications is not None:
self.excludingModifictions = True
labelEntry = {'aminoAcidLabels': aminoAcidLabels,
'excludingModifications': excludingModifications
}
... | Adds a new labelstate.
:param aminoAcidsLabels: Describes which amino acids can bear which
labels. Possible keys are the amino acids in one letter code and
'nTerm', 'cTerm'. Possible values are the modifications ids from
:attr:`maspy.constants.aaModMass` as strings or a list... |
def detach_all_classes(self):
classes = list(self._observers.keys())
for cls in classes:
self.detach_class(cls) | Detach from all tracked classes. |
def classproperty(func):
doc = func.__doc__
if not isinstance(func, (classmethod, staticmethod)):
func = classmethod(func)
return ClassPropertyDescriptor(func, doc) | Use as a decorator on a method definition to make it a class-level attribute.
This decorator can be applied to a method, a classmethod, or a staticmethod. This decorator will
bind the first argument to the class object.
Usage:
>>> class Foo(object):
... @classproperty
... def name(cls):
... retu... |
def sort_cards(cards, ranks=None):
ranks = ranks or DEFAULT_RANKS
if ranks.get("suits"):
cards = sorted(
cards,
key=lambda x: ranks["suits"][x.suit] if x.suit != None else 0
)
if ranks.get("values"):
cards = sorted(
cards,
key=lambda x:... | Sorts a given list of cards, either by poker ranks, or big two ranks.
:arg cards:
The cards to sort.
:arg dict ranks:
The rank dict to reference for sorting. If ``None``, it will
default to ``DEFAULT_RANKS``.
:returns:
The sorted cards. |
def from_sequence(chain, list_of_residues, sequence_type = None):
s = Sequence(sequence_type)
count = 1
for ResidueAA in list_of_residues:
s.add(Residue(chain, count, ResidueAA, sequence_type))
count += 1
return s | Takes in a chain identifier and protein sequence and returns a Sequence object of Residues, indexed from 1. |
def rule_variable(field_type, label=None, options=None):
options = options or []
def wrapper(func):
if not (type(field_type) == type and issubclass(field_type, BaseType)):
raise AssertionError("{0} is not instance of BaseType in"\
" rule_variable field_type".format(field_... | Decorator to make a function into a rule variable |
def start_in_keepedalive_processes(obj, nb_process):
processes = []
readers_pipes = []
writers_pipes = []
for i in range(nb_process):
local_read_pipe, local_write_pipe = Pipe(duplex=False)
process_read_pipe, process_write_pipe = Pipe(duplex=False)
readers_pipes.append(local_read_... | Start nb_process and keep them alive. Send job to them multiple times, then close thems. |
def ring_position(self):
if self.type != EventType.TABLET_PAD_RING:
raise AttributeError(_wrong_prop.format(self.type))
return self._libinput.libinput_event_tablet_pad_get_ring_position(
self._handle) | The current position of the ring, in degrees
counterclockwise from the northern-most point of the ring in
the tablet's current logical orientation.
If the source is
:attr:`~libinput.constant.TabletPadRingAxisSource.FINGER`,
libinput sends a terminating event with a ring value of -1 when
the finger is lifte... |
def init_services(service_definitions, service_context, state_db,
client_authn_factory=None):
service = {}
for service_name, service_configuration in service_definitions.items():
try:
kwargs = service_configuration['kwargs']
except KeyError:
kwargs = {}
... | Initiates a set of services
:param service_definitions: A dictionary cotaining service definitions
:param service_context: A reference to the service context, this is the same
for all service instances.
:param state_db: A reference to the state database. Shared by all the
services.
:par... |
def _capabilities_dict(envs, tags):
capabilities = {
'browserName': envs['SELENIUM_BROWSER'],
'acceptInsecureCerts': bool(envs.get('SELENIUM_INSECURE_CERTS', False)),
'video-upload-on-pass': False,
'sauce-advisor': False,
'capture-html': True,
'record-screenshots': Tr... | Convert the dictionary of environment variables to
a dictionary of desired capabilities to send to the
Remote WebDriver.
`tags` is a list of string tags to apply to the SauceLabs job. |
def pch_emitter(target, source, env):
validate_vars(env)
pch = None
obj = None
for t in target:
if SCons.Util.splitext(str(t))[1] == '.pch':
pch = t
if SCons.Util.splitext(str(t))[1] == '.obj':
obj = t
if not obj:
obj = SCons.Util.splitext(str(pch))[0]... | Adds the object file target. |
def get_configdir(name):
configdir = os.environ.get('%sCONFIGDIR' % name.upper())
if configdir is not None:
return os.path.abspath(configdir)
p = None
h = _get_home()
if ((sys.platform.startswith('linux') or
sys.platform.startswith('darwin')) and h is not None):
p = os.path.... | Return the string representing the configuration directory.
The directory is chosen as follows:
1. If the ``name.upper() + CONFIGDIR`` environment variable is supplied,
choose that.
2a. On Linux, choose `$HOME/.config`.
2b. On other platforms, choose `$HOME/.matplotlib`.
3. If the chosen... |
def _load_params(params, logger=logging):
if isinstance(params, str):
cur_path = os.path.dirname(os.path.realpath(__file__))
param_file_path = os.path.join(cur_path, params)
logger.info('Loading params from file %s' % param_file_path)
save_dict = nd_load(param_file_path)
arg_... | Given a str as a path to the .params file or a pair of params,
returns two dictionaries representing arg_params and aux_params. |
def clean(self, force=False):
if self.is_finalized and not force:
self.warn("Can't clean; bundle is finalized")
return False
self.log('---- Cleaning ----')
self.state = self.STATES.CLEANING
self.dstate = self.STATES.BUILDING
self.commit()
self.clea... | Clean generated objects from the dataset, but only if there are File contents
to regenerate them |
def call_mr_transform(data, opt='', path='./',
remove_files=True):
r
if not import_astropy:
raise ImportError('Astropy package not found.')
if (not isinstance(data, np.ndarray)) or (data.ndim != 2):
raise ValueError('Input data must be a 2D numpy array.')
executable... | r"""Call mr_transform
This method calls the iSAP module mr_transform
Parameters
----------
data : np.ndarray
Input data, 2D array
opt : list or str, optional
Options to be passed to mr_transform
path : str, optional
Path for output files (default is './')
remove_fil... |
def walk_recursive(f, data):
results = {}
if isinstance(data, list):
return [walk_recursive(f, d) for d in data]
elif isinstance(data, dict):
results = funcy.walk_keys(f, data)
for k, v in data.iteritems():
if isinstance(v, dict):
results[f(k)] = walk_recu... | Recursively apply a function to all dicts in a nested dictionary
:param f: Function to apply
:param data: Dictionary (possibly nested) to recursively apply
function to
:return: |
def set_loader(self, loader, destructor, state):
return lib.zcertstore_set_loader(self._as_parameter_, loader, destructor, state) | Override the default disk loader with a custom loader fn. |
def _raise_error_if_not_drawing_classifier_input_sframe(
dataset, feature, target):
from turicreate.toolkits._internal_utils import _raise_error_if_not_sframe
_raise_error_if_not_sframe(dataset)
if feature not in dataset.column_names():
raise _ToolkitError("Feature column '%s' does not exist" % ... | Performs some sanity checks on the SFrame provided as input to
`turicreate.drawing_classifier.create` and raises a ToolkitError
if something in the dataset is missing or wrong. |
def get_extensions(self, data=False):
ext_list = [key for key in
self.__dict__ if type(self.__dict__[key]) is Extension]
for key in ext_list:
if data:
yield getattr(self, key)
else:
yield key | Yields the extensions or their names
Parameters
----------
data : boolean, optional
If True, returns a generator which yields the extensions.
If False, returns a generator which yields the names of
the extensions (default)
Returns
-------
... |
def linsert(self, key, pivot, value, before=False):
where = b'AFTER' if not before else b'BEFORE'
return self.execute(b'LINSERT', key, where, pivot, value) | Inserts value in the list stored at key either before or
after the reference value pivot. |
def to_placeholder(self, name=None, db_type=None):
if name is None:
placeholder = self.unnamed_placeholder
else:
placeholder = self.named_placeholder.format(name)
if db_type:
return self.typecast(placeholder, db_type)
else:
return placeholder | Returns a placeholder for the specified name, by applying the instance's format strings.
:name: if None an unamed placeholder is returned, otherwise a named placeholder is returned.
:db_type: if not None the placeholder is typecast. |
def round(self, decimals=0, *args, **kwargs):
nv.validate_round(args, kwargs)
result = com.values_from_object(self).round(decimals)
result = self._constructor(result, index=self.index).__finalize__(self)
return result | Round each value in a Series to the given number of decimals.
Parameters
----------
decimals : int
Number of decimal places to round to (default: 0).
If decimals is negative, it specifies the number of
positions to the left of the decimal point.
Retu... |
def _update_to_s3_uri(property_key, resource_property_dict, s3_uri_value="s3://bucket/value"):
uri_property = resource_property_dict.get(property_key, ".")
if isinstance(uri_property, dict) or SamTemplateValidator.is_s3_uri(uri_property):
return
resource_property_dict[property_key] =... | Updates the 'property_key' in the 'resource_property_dict' to the value of 's3_uri_value'
Note: The function will mutate the resource_property_dict that is pass in
Parameters
----------
property_key str, required
Key in the resource_property_dict
resource_property_d... |
def sudo(command, show=True, *args, **kwargs):
if show:
print_command(command)
with hide("running"):
return _sudo(command, *args, **kwargs) | Runs a command as sudo on the remote server. |
def _get_inline_fragment(ast):
if not ast.selection_set:
return None
fragments = [
ast_node
for ast_node in ast.selection_set.selections
if isinstance(ast_node, InlineFragment)
]
if not fragments:
return None
if len(fragments) > 1:
raise GraphQLCompila... | Return the inline fragment at the current AST node, or None if no fragment exists. |
def SetValue(self, row, col, value):
self.dataframe.iloc[row, col] = value | Set value in the pandas DataFrame |
def total_bytes_billed(self):
result = self._job_statistics().get("totalBytesBilled")
if result is not None:
result = int(result)
return result | Return total bytes billed from job statistics, if present.
See:
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.totalBytesBilled
:rtype: int or None
:returns: total bytes processed by the job, or None if job is not
yet complete. |
def register_provider(cls, provider):
def decorator(subclass):
cls._providers[provider] = subclass
subclass.name = provider
return subclass
return decorator | Register method to keep list of providers. |
def set_blacklisted_filepaths(self, filepaths, remove_from_stored=True):
filepaths = util.to_absolute_paths(filepaths)
self.blacklisted_filepaths = filepaths
if remove_from_stored:
self.plugin_filepaths = util.remove_from_set(self.plugin_filepaths,
... | Sets internal blacklisted filepaths to filepaths.
If `remove_from_stored` is `True`, any `filepaths` in
`self.plugin_filepaths` will be automatically removed.
Recommend passing in absolute filepaths but method will attempt
to convert to absolute filepaths based on current working direct... |
def Reset(self):
self._cur_state = self.states['Start']
self._cur_state_name = 'Start'
self._result = []
self._ClearAllRecord() | Preserves FSM but resets starting state and current record. |
def addattr(self, attrname, value=None, persistent=True):
setattr(self, attrname, value)
if persistent and attrname not in self.__persistent_attributes__:
self.__persistent_attributes__.append(attrname) | Adds an attribute to self. If persistent is True, the attribute will
be made a persistent attribute. Persistent attributes are copied
whenever a view or copy of this array is created. Otherwise, new views
or copies of this will not have the attribute. |
def _compute_intensity(ccube, bexpcube):
bexp_data = np.sqrt(bexpcube.data[0:-1, 0:] * bexpcube.data[1:, 0:])
intensity_data = ccube.data / bexp_data
intensity_map = HpxMap(intensity_data, ccube.hpx)
return intensity_map | Compute the intensity map |
def set_created_date(self, date=None):
if date:
match = re.match(DATE_REGEX, date)
if not match:
raise IOCParseError('Created date is not valid. Must be in the form YYYY-MM-DDTHH:MM:SS')
ioc_et.set_root_created_date(self.root, date)
return True | Set the created date of a IOC to the current date.
User may specify the date they want to set as well.
:param date: Date value to set the created date to. This should be in the xsdDate form.
This defaults to the current date if it is not provided.
xsdDate form: YYYY-MM-DDTHH:MM:SS
... |
def os_walk(top, *args, **kwargs):
if six.PY2 and salt.utils.platform.is_windows():
top_query = top
else:
top_query = salt.utils.stringutils.to_str(top)
for item in os.walk(top_query, *args, **kwargs):
yield salt.utils.data.decode(item, preserve_tuples=True) | This is a helper than ensures that all paths returned from os.walk are
unicode. |
def commit(self):
request = self.edits().commit(**self.build_params()).execute()
print 'Edit "%s" has been committed' % (request['id'])
self.edit_id = None | commit current edits. |
def simplex_projection(v, b=1):
r
v = np.asarray(v)
p = len(v)
v = (v > 0) * v
u = np.sort(v)[::-1]
sv = np.cumsum(u)
rho = np.where(u > (sv - b) / np.arange(1, p + 1))[0][-1]
theta = np.max([0, (sv[rho] - b) / (rho + 1)])
w = (v - theta)
w[w < 0] = 0
return w | r"""Projection vectors to the simplex domain
Implemented according to the paper: Efficient projections onto the
l1-ball for learning in high dimensions, John Duchi, et al. ICML 2008.
Implementation Time: 2011 June 17 by Bin@libin AT pmail.ntu.edu.sg
Optimization Problem: min_{w}\| w - v \|_{2}^{2}
... |
def _aux_types(self):
aux_types = []
num_aux = self._num_aux
for i in range(num_aux):
aux_types.append(self._aux_type(i))
return aux_types | The data types of the aux data for the BaseSparseNDArray. |
def escape_newlines(s: str) -> str:
if not s:
return s
s = s.replace("\\", r"\\")
s = s.replace("\n", r"\n")
s = s.replace("\r", r"\r")
return s | Escapes CR, LF, and backslashes.
Its counterpart is :func:`unescape_newlines`.
``s.encode("string_escape")`` and ``s.encode("unicode_escape")`` are
alternatives, but they mess around with quotes, too (specifically,
backslash-escaping single quotes). |
def uncontract_general(basis, use_copy=True):
if use_copy:
basis = copy.deepcopy(basis)
for k, el in basis['elements'].items():
if not 'electron_shells' in el:
continue
newshells = []
for sh in el['electron_shells']:
if len(sh['coefficients']) == 1 or len(... | Removes the general contractions from a basis set
The input basis set is not modified. The returned basis
may have functions with coefficients of zero and may have duplicate
shells.
If use_copy is True, the input basis set is not modified. |
def Images2Rgbd(rgb, d):
data = Rgbd()
data.color=imageMsg2Image(rgb)
data.depth=imageMsg2Image(d)
data.timeStamp = rgb.header.stamp.secs + (rgb.header.stamp.nsecs *1e-9)
return data | Translates from ROS Images to JderobotTypes Rgbd.
@param rgb: ROS color Image to translate
@param d: ROS depth image to translate
@type rgb: ImageROS
@type d: ImageROS
@return a Rgbd translated from Images |
def validate_value(self, value):
if self.readonly:
raise ValidationError(self.record, "Cannot set readonly field '{}'".format(self.name))
if value not in (None, self._unset):
if self.supported_types and not isinstance(value, tuple(self.supported_types)):
raise Val... | Validate value is an acceptable type during set_python operation |
def between(self, objs1: List[float], objs2: List[float], n=1):
from desdeo.preference.base import ReferencePoint
objs1_arr = np.array(objs1)
objs2_arr = np.array(objs2)
segments = n + 1
diff = objs2_arr - objs1_arr
solutions = []
for x in range(1, segments):
... | Generate `n` solutions which attempt to trade-off `objs1` and `objs2`.
Parameters
----------
objs1
First boundary point for desired objective function values
objs2
Second boundary point for desired objective function values
n
Number of solut... |
def process_item(self, item):
group, value = item['group'], item['value']
if group in self._groups:
cur_val = self._groups[group]
self._groups[group] = max(cur_val, value)
else:
self._src.tracking = False
new_max = value
for rec in self... | Calculate new maximum value for each group,
for "new" items only. |
def PmfProbLess(pmf1, pmf2):
total = 0.0
for v1, p1 in pmf1.Items():
for v2, p2 in pmf2.Items():
if v1 < v2:
total += p1 * p2
return total | Probability that a value from pmf1 is less than a value from pmf2.
Args:
pmf1: Pmf object
pmf2: Pmf object
Returns:
float probability |
def _GetStatus(self, two_factor=False):
params = ['status']
if two_factor:
params += ['--twofactor']
retcode = self._RunOsLoginControl(params)
if retcode is None:
if self.oslogin_installed:
self.logger.warning('OS Login not installed.')
self.oslogin_installed = False
re... | Check whether OS Login is installed.
Args:
two_factor: bool, True if two factor should be enabled.
Returns:
bool, True if OS Login is installed. |
def load_remote_settings(self, remote_bucket, remote_file):
if not self.session:
boto_session = boto3.Session()
else:
boto_session = self.session
s3 = boto_session.resource('s3')
try:
remote_env_object = s3.Object(remote_bucket, remote_file).get()
... | Attempt to read a file from s3 containing a flat json object. Adds each
key->value pair as environment variables. Helpful for keeping
sensitiZve or stage-specific configuration variables in s3 instead of
version control. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.