code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def number_of_states(dtrajs, only_used = False): r dtrajs = _ensure_dtraj_list(dtrajs) if only_used: bc = count_states(dtrajs) return np.count_nonzero(bc) else: imax = 0 for dtraj in dtrajs: imax = max(imax, np.max(dtraj)) return imax+1
r"""returns the number of states in the given trajectories. Parameters ---------- dtraj : array_like or list of array_like Discretized trajectory or list of discretized trajectories only_used = False : boolean If False, will return max+1, where max is the largest index used. If ...
def add_exception(self, exception, stack, remote=False): self._check_ended() self.add_fault_flag() if hasattr(exception, '_recorded'): setattr(self, 'cause', getattr(exception, '_cause_id')) return exceptions = [] exceptions.append(Throwable(exception, sta...
Add an exception to trace entities. :param Exception exception: the catched exception. :param list stack: the output from python built-in `traceback.extract_stack()`. :param bool remote: If False it means it's a client error instead of a downstream service.
def num_unused_cpus(thresh=10): import psutil cpu_usage = psutil.cpu_percent(percpu=True) return sum([p < thresh for p in cpu_usage])
Returns the number of cpus with utilization less than `thresh` percent
def stft(func=None, **kwparams): from numpy.fft import fft, ifft return stft.base(transform=fft, inverse_transform=ifft)(func, **kwparams)
Short Time Fourier Transform for complex data. Same to the default STFT strategy, but with new defaults. This is the same to: .. code-block:: python stft.base(transform=numpy.fft.fft, inverse_transform=numpy.fft.ifft) See ``stft.base`` docs for more.
def get_share_file (filename, devel_dir=None): paths = [get_share_dir()] if devel_dir is not None: paths.insert(0, devel_dir) for path in paths: fullpath = os.path.join(path, filename) if os.path.isfile(fullpath): return fullpath msg = "%s not found in %s; check your ...
Return a filename in the share directory. @param devel_dir: directory to search when developing @ptype devel_dir: string @param filename: filename to search for @ptype filename: string @return: the found filename or None @rtype: string @raises: ValueError if not found
def _raise_exception(self, eobj, edata=None): _, _, tbobj = sys.exc_info() if edata: emsg = self._format_msg(eobj["msg"], edata) _rwtb(eobj["type"], emsg, tbobj) else: _rwtb(eobj["type"], eobj["msg"], tbobj)
Raise exception by name.
def set_empty_text(self): self.buffer.insert_with_tags_by_name( self.buffer.get_start_iter(), self.empty_text, 'empty-text')
Display the empty text
def insert(self, state, token): if token == EndSymbol(): return self[state][EndSymbol()] from pydsl.check import check symbol_list = [x for x in self[state] if isinstance(x, TerminalSymbol) and check(x.gd, [token])] if not symbol_list: return {"action":"Fail"} ...
change internal state, return action
def accumulate(self, axis: AxisIdentifier) -> HistogramBase: new_one = self.copy() axis_id = self._get_axis(axis) new_one._frequencies = np.cumsum(new_one.frequencies, axis_id[0]) return new_one
Calculate cumulative frequencies along a certain axis. Returns ------- new_hist: Histogram of the same type & size
def version(self, bundle: str, date: dt.datetime) -> models.Version: return (self.Version.query .join(models.Version.bundle) .filter(models.Bundle.name == bundle, models.Version.created_at == date) ...
Fetch a version from the store.
def clear_all_flair(self): csv = [{'user': x['user']} for x in self.get_flair_list(limit=None)] if csv: return self.set_flair_csv(csv) else: return
Remove all user flair on this subreddit. :returns: The json response from the server when there is flair to clear, otherwise returns None.
def contains(self, value): str_value = StringConverter.to_nullable_string(value) for element in self: str_element = StringConverter.to_string(element) if str_value == None and str_element == None: return True if str_value == None or str_element == None...
Checks if this array contains a value. The check uses direct comparison between elements and the specified value. :param value: a value to be checked :return: true if this array contains the value or false otherwise.
def unit_system_id(self): if self._unit_system_id is None: hash_data = bytearray() for k, v in sorted(self.lut.items()): hash_data.extend(k.encode("utf8")) hash_data.extend(repr(v).encode("utf8")) m = md5() m.update(hash_data) ...
This is a unique identifier for the unit registry created from a FNV hash. It is needed to register a dataset's code unit system in the unit system registry.
def prepare_gag_lsm(self, lsm_precip_data_var, lsm_precip_type, interpolation_type=None): if self.l2g is None: raise ValueError("LSM converter not loaded ...") for unif_precip_card in self.UNIFORM_PRECIP_CARDS: self.project_manager.deleteCard(unif_precip_card, self.db_session) ...
Prepares Gage output for GSSHA simulation Parameters: lsm_precip_data_var(list or str): String of name for precipitation variable name or list of precip variable names. See: :func:`~gsshapy.grid.GRIDtoGSSHA.lsm_precip_to_gssha_precip_gage`. lsm_precip_type(str): Type of precipitation. ...
def __get_supported_file_types_string(self): languages = ["All Files (*)"] for language in self.__languages_model.languages: languages.append("{0} Files ({1})".format(language.name, " ".join(language.extensions.split("|")).replace("\\", "...
Returns the supported file types dialog string.
def extractOne(query, choices, processor=default_processor, scorer=default_scorer, score_cutoff=0): best_list = extractWithoutOrder(query, choices, processor, scorer, score_cutoff) try: return max(best_list, key=lambda i: i[1]) except ValueError: return None
Find the single best match above a score in a list of choices. This is a convenience method which returns the single best choice. See extract() for the full arguments list. Args: query: A string to match against choices: A list or dictionary of choices, suitable for use with ex...
def path_list(self, sep=os.pathsep): from pathlib import Path return [ Path(pathstr) for pathstr in self.split(sep) ]
Return list of Path objects.
async def zrange(self, name, start, end, desc=False, withscores=False, score_cast_func=float): if desc: return await self.zrevrange(name, start, end, withscores, score_cast_func) pieces = ['ZRANGE', name, start, end] if wit...
Return a range of values from sorted set ``name`` between ``start`` and ``end`` sorted in ascending order. ``start`` and ``end`` can be negative, indicating the end of the range. ``desc`` a boolean indicating whether to sort the results descendingly ``withscores`` indicates to return ...
def connect_edges(graph): paths = [] for start, end in graph.array(graph.kdims): start_ds = graph.nodes[:, :, start] end_ds = graph.nodes[:, :, end] if not len(start_ds) or not len(end_ds): raise ValueError('Could not find node positions for all edges') start = start_...
Given a Graph element containing abstract edges compute edge segments directly connecting the source and target nodes. This operation just uses internal HoloViews operations and will be a lot slower than the pandas equivalent.
def delete_duplicates(seq): seen = set() seen_add = seen.add return [x for x in seq if not (x in seen or seen_add(x))]
Remove duplicates from an iterable, preserving the order. Args: seq: Iterable of various type. Returns: list: List of unique objects.
def emit_after(self, event: str) -> Callable: def outer(func): @wraps(func) def wrapper(*args, **kwargs): returned = func(*args, **kwargs) self.emit(event) return returned return wrapper return outer
Decorator that emits events after the function is completed. :param event: Name of the event. :type event: str :return: Callable .. note: This plainly just calls functions without passing params into the subscribed callables. This is great if you want to do som...
def _cldf2lexstat( dataset, segments='segments', transcription='value', row='parameter_id', col='language_id'): D = _cldf2wld(dataset) return lingpy.LexStat(D, segments=segments, transcription=transcription, row=row, col=col)
Read LexStat object from cldf dataset.
def auc(y_true, y_pred, round=True): y_true, y_pred = _mask_value_nan(y_true, y_pred) if round: y_true = y_true.round() if len(y_true) == 0 or len(np.unique(y_true)) < 2: return np.nan return skm.roc_auc_score(y_true, y_pred)
Area under the ROC curve
def notify_systemd(): try: import systemd.daemon except ImportError: if salt.utils.path.which('systemd-notify') \ and systemd_notify_call('--booted'): notify_socket = os.getenv('NOTIFY_SOCKET') if notify_socket: if notify_socket.startswith(...
Notify systemd that this process has started
def unconsumed_ranges(self): res = IntervalTree() prev = None ranges = sorted([x for x in self.range_set], key=lambda x: x.begin) for rng in ranges: if prev is None: prev = rng continue res.add(Interval(prev.end, rng.begin)) ...
Return an IntervalTree of unconsumed ranges, of the format (start, end] with the end value not being included
def _compute_hparam_info_from_values(self, name, values): result = api_pb2.HParamInfo(name=name, type=api_pb2.DATA_TYPE_UNSET) distinct_values = set( _protobuf_value_to_string(v) for v in values if _protobuf_value_type(v)) for v in values: v_type = _protobuf_value_type(v) if not v_type: ...
Builds an HParamInfo message from the hparam name and list of values. Args: name: string. The hparam name. values: list of google.protobuf.Value messages. The list of values for the hparam. Returns: An api_pb2.HParamInfo message.
def do_p(self, arg): try: self.message(bdb.safe_repr(self._getval(arg))) except Exception: pass
p expression Print the value of the expression.
def get_default_ca_certs(): if not hasattr(get_default_ca_certs, '_path'): for path in get_default_ca_cert_paths(): if os.path.exists(path): get_default_ca_certs._path = path break else: get_default_ca_certs._path = None return get_default_...
Try to find out system path with ca certificates. This path is cached and returned. If no path is found out, None is returned.
def standard_lstm_lm_200(dataset_name=None, vocab=None, pretrained=False, ctx=cpu(), root=os.path.join(get_home_dir(), 'models'), **kwargs): r predefined_args = {'embed_size': 200, 'hidden_size': 200, 'mode': 'lstm', '...
r"""Standard 2-layer LSTM language model with tied embedding and output weights. Both embedding and hidden dimensions are 200. Parameters ---------- dataset_name : str or None, default None The dataset name on which the pre-trained model is trained. Options are 'wikitext-2'. If specifi...
def append(self, key, value=None, dir=False, ttl=None, timeout=None): return self.adapter.append(key, value, dir=dir, ttl=ttl, timeout=timeout)
Creates a new automatically increasing key in the given directory key.
def authnkey(self) -> dict: return {k: self._pubkey[k] for k in self._pubkey if self._pubkey[k].authn}
Accessor for public keys marked as authentication keys, by identifier.
def compute_eigenvalues(in_prefix, out_prefix): with open(out_prefix + ".parameters", "w") as o_file: print >>o_file, "genotypename: " + in_prefix + ".bed" print >>o_file, "snpname: " + in_prefix + ".bim" print >>o_file, "indivname: " + in_prefix + ".fam" print >>o_f...
Computes the Eigenvalues using smartpca from Eigensoft. :param in_prefix: the prefix of the input files. :param out_prefix: the prefix of the output files. :type in_prefix: str :type out_prefix: str Creates a "parameter file" used by smartpca and runs it.
def get_hooks(self): if self.__hooks is None and self.hooks_class_name is not None: hooks_class = util.for_name(self.hooks_class_name) if not isinstance(hooks_class, type): raise ValueError("hooks_class_name must refer to a class, got %s" % type(hooks_class).__name__) ...
Returns a hooks.Hooks class or None if no hooks class has been set.
def install_module( self, target=None, package_manager=None, install_optional=False, production_only=False, force=False, node_paths=None, frozen_lockfile=None, workunit_name=None, workunit_labels=None): package_manager = package_manager or self.get_package_manager(target=target) command = package_...
Installs node module using requested package_manager.
def create_table(self, table_name, obj=None, **kwargs): return self.client.create_table( table_name, obj=obj, database=self.name, **kwargs )
Dispatch to ImpalaClient.create_table. See that function's docstring for more
def _set_timeouts(self, timeouts): (send_timeout, recv_timeout) = (None, None) try: (send_timeout, recv_timeout) = timeouts except TypeError: raise EndpointError( '`timeouts` must be a pair of numbers (2, 3) which represent ' 'the timeout v...
Set socket timeouts for send and receive respectively
def create_roadmap_doc(dat, opFile): op = format_title('Roadmap for AIKIF') for h1 in dat['projects']: op += format_h1(h1) if dat[h1] is None: op += '(No details)\n' else: for h2 in dat[h1]: op += '\n' + format_h2(h2) if dat[h1][h2]...
takes a dictionary read from a yaml file and converts it to the roadmap documentation
def join_struct_arrays(arrays): sizes = np.array([a.itemsize for a in arrays]) offsets = np.r_[0, sizes.cumsum()] shape = arrays[0].shape joint = np.empty(shape + (offsets[-1],), dtype=np.uint8) for a, size, offset in zip(arrays, sizes, offsets): joint[...,offset:offset+size] = np.atleast_1d...
Takes a list of possibly structured arrays, concatenates their dtypes, and returns one big array with that dtype. Does the inverse of ``separate_struct_array``. :param list arrays: List of ``np.ndarray``s
def estimate_band_connection(prev_eigvecs, eigvecs, prev_band_order): metric = np.abs(np.dot(prev_eigvecs.conjugate().T, eigvecs)) connection_order = [] for overlaps in metric: maxval = 0 for i in reversed(range(len(metric))): val = overlaps[i] if i in connection_orde...
A function to order the phonon eigenvectors taken from phonopy
def write(self, datapoint): if not isinstance(datapoint, DataPoint): raise TypeError("First argument must be a DataPoint object") datapoint._stream_id = self.get_stream_id() if self._cached_data is not None and datapoint.get_data_type() is None: datapoint._data_type = sel...
Write some raw data to a stream using the DataPoint API This method will mutate the datapoint provided to populate it with information available from the stream as it is available (but without making any new HTTP requests). For instance, we will add in information about the stream data ...
def _resource(resource, pretty: bool = None, **data): data = clean_data(data) ctx = click.get_current_context() if ctx.obj.get("env_prefix"): data["env_prefix"] = ctx.obj["env_prefix"] rsp = resource(**data) dump = partial(json.dumps, indent=4) if pretty else partial(json.dumps) click.ec...
The callback func that will be hooked to the generic resource commands
def ascolumn(x, dtype = None): x = asarray(x, dtype) return x if len(x.shape) >= 2 else x.reshape(len(x),1)
Convert ``x`` into a ``column``-type ``numpy.ndarray``.
def get_next(self): self._counter_curr += 1 suffix = self._separator + "%s" % str(self._counter_curr) return self._base_name + suffix
Return next name.
def insert(self, rectangle): rectangle = np.asanyarray(rectangle, dtype=np.float64) for child in self.child: if child is not None: attempt = child.insert(rectangle) if attempt is not None: return attempt if self.occupied: ...
Insert a rectangle into the bin. Parameters ------------- rectangle: (2,) float, size of rectangle to insert
def make_int(value, missing=-1): if isinstance(value, six.string_types): if not value.strip(): return missing elif value is None: return missing return int(value)
Convert string value to long, '' to missing
def separator(self, *args, **kwargs): levelOverride = kwargs.get('level') or self._lastlevel self._log(levelOverride, '', 'separator', args, kwargs)
Prints a separator to the log. This can be used to separate blocks of log messages. The separator will default its log level to the level of the last message printed unless specified with the level= kwarg. The length and type of the separator string is determined by the current style. ...
def create_paired_device(self, dev_id, agent_path, capability, cb_notify_device, cb_notify_error): return self._interface.CreatePairedDevice(dev_id, agent_path, capability, ...
Creates a new object path for a remote device. This method will connect to the remote device and retrieve all SDP records and then initiate the pairing. If a previously :py:meth:`create_device` was used successfully, this method will only initiate the pairing. Compared to :py:m...
def get_request_headers(self, *args, **kwds): if self.request_headers: return self._unpack_headers(self.request_headers)
A convenience method for obtaining the headers that were sent to the S3 server. The AWS S3 API depends upon setting headers. This method is provided as a convenience for debugging issues with the S3 communications.
def _pixel_to_tile(x: float, y: float) -> Tuple[float, float]: xy = tcod.ffi.new("double[2]", (x, y)) tcod.lib.TCOD_sys_pixel_to_tile(xy, xy + 1) return xy[0], xy[1]
Convert pixel coordinates to tile coordinates.
def post_slack_message(message=None, channel=None, username=None, icon_emoji=None): LOG.debug('Slack Channel: %s\nSlack Message: %s', channel, message) slack = slacker.Slacker(SLACK_TOKEN) try: slack.chat.post_message(channel=channel, text=message, username=username, icon_emoji=icon_emoji) L...
Format the message and post to the appropriate slack channel. Args: message (str): Message to post to slack channel (str): Desired channel. Must start with #
def getAllReadGroupSets(self): for dataset in self.getAllDatasets(): iterator = self._client.search_read_group_sets( dataset_id=dataset.id) for readGroupSet in iterator: yield readGroupSet
Returns all readgroup sets on the server.
def add_field_like(self, name, like_array): new_shape = list(like_array.shape) new_shape[0] = len(self) new_data = ma.empty(new_shape, like_array.dtype) new_data.mask = True self.add_field(name, new_data)
Add a new field to the Datamat with the dtype of the like_array and the shape of the like_array except for the first dimension which will be instead the field-length of this Datamat.
def _make_ssh_forward_server(self, remote_address, local_bind_address): _Handler = self._make_ssh_forward_handler_class(remote_address) try: if isinstance(local_bind_address, string_types): forward_maker_class = self._make_unix_ssh_forward_server_class else: ...
Make SSH forward proxy Server class
def create(self, neighbors): data = {'neighbors': neighbors} return super(ApiV4Neighbor, self).post('api/v4/neighbor/', data)
Method to create neighbors :param neighbors: List containing neighbors desired to be created on database :return: None
def unescape(s, unicode_action="replace"): import HTMLParser hp = HTMLParser.HTMLParser() s = hp.unescape(s) s = s.encode('ascii', unicode_action) s = s.replace("\n", "").strip() return s
Unescape HTML strings, and convert &amp; etc.
def class_balancing_sample_weights(y): h = np.bincount(y) cls_weight = 1.0 / (h.astype(float) * len(np.nonzero(h)[0])) cls_weight[np.isnan(cls_weight)] = 0.0 sample_weight = cls_weight[y] return sample_weight
Compute sample weight given an array of sample classes. The weights are assigned on a per-class basis and the per-class weights are inversely proportional to their frequency. Parameters ---------- y: NumPy array, 1D dtype=int sample classes, values must be 0 or posit...
def ucnstring_to_unicode(ucn_string): ucn_string = ucnstring_to_python(ucn_string).decode('utf-8') assert isinstance(ucn_string, text_type) return ucn_string
Return ucnstring as Unicode.
def start(sync_event_source, loop=None): if not loop: loop = asyncio.get_event_loop() event_source = asyncio.Queue(loop=loop) bridge = threading.Thread(target=_multiprocessing_to_asyncio, args=(sync_event_source, event_source, loop), daemon...
Create and start the WebSocket server.
def list_event_sources(self): path = '/archive/{}/events/sources'.format(self._instance) response = self._client.get_proto(path=path) message = archive_pb2.EventSourceInfo() message.ParseFromString(response.content) sources = getattr(message, 'source') return iter(sources...
Returns the existing event sources. :rtype: ~collections.Iterable[str]
def _compute(self): src_path = self.ctx.src_path if not src_path.exists: return NONE if src_path.is_null: return None try: if self.parse: value = self.parse(src_path) else: value = self._parse(src_path) ...
Processes this fields `src` from `ctx.src`.
async def Check(self, stream): request = await stream.recv_message() checks = self._checks.get(request.service) if checks is None: await stream.send_trailing_metadata(status=Status.NOT_FOUND) elif len(checks) == 0: await stream.send_message(HealthCheckResponse( ...
Implements synchronous periodic checks
def register(self, resource, event, trigger, **kwargs): super(AristaTrunkDriver, self).register(resource, event, trigger, kwargs) registry.subscribe(self.subport_create, resources.SUBPORTS, events.AFTER_CREATE) registry.s...
Called in trunk plugin's AFTER_INIT
def loaders(*specifiers): for specifier in specifiers: if isinstance(specifier, Locality): yield from _LOADERS[specifier] else: yield specifier
Generates loaders in the specified order. Arguments can be `.Locality` instances, producing the loader(s) available for that locality, `str` instances (used as file path templates) or `callable`s. These can be mixed: .. code-block:: python # define a load order using predefined user-local loc...
def dft_task(cls, mol, xc="b3lyp", **kwargs): t = NwTask.from_molecule(mol, theory="dft", **kwargs) t.theory_directives.update({"xc": xc, "mult": t.spin_multiplicity}) return t
A class method for quickly creating DFT tasks with optional cosmo parameter . Args: mol: Input molecule xc: Exchange correlation to use. \\*\\*kwargs: Any of the other kwargs supported by NwTask. Note the theory is always "dft" for a dft task.
def events(self, argv): opts = cmdline(argv, FLAGS_EVENTS) self.foreach(opts.args, lambda job: output(job.events(**opts.kwargs)))
Retrieve events for the specified search jobs.
def _prepare_conn_args(self, kwargs): kwargs['connect_over_uds'] = True kwargs['timeout'] = kwargs.get('timeout', 60) kwargs['cookie'] = kwargs.get('cookie', 'admin') if self._use_remote_connection(kwargs): kwargs['transport'] = kwargs.get('transport', 'https') if...
Set connection arguments for remote or local connection.
def append(self, *nodes: Union[AbstractNode, str]) -> None: node = _to_node_list(nodes) self.appendChild(node)
Append new nodes after last child node.
def query(number, domains, resolver=None): if resolver is None: resolver = dns.resolver.get_default_resolver() for domain in domains: if isinstance(domain, (str, unicode)): domain = dns.name.from_text(domain) qname = dns.e164.from_e164(number, domain) try: ...
Look for NAPTR RRs for the specified number in the specified domains. e.g. lookup('16505551212', ['e164.dnspython.org.', 'e164.arpa.'])
def match(self, package): if isinstance(package, basestring): from .packages import Package package = Package.parse(package) if self.name != package.name: return False if self.version_constraints and \ package.version not in self.version_constr...
Match ``package`` with the requirement. :param package: Package to test with the requirement. :type package: package expression string or :class:`Package` :returns: ``True`` if ``package`` satisfies the requirement. :rtype: bool
def lookup_object(model, object_id, slug, slug_field): lookup_kwargs = {} if object_id: lookup_kwargs['%s__exact' % model._meta.pk.name] = object_id elif slug and slug_field: lookup_kwargs['%s__exact' % slug_field] = slug else: raise GenericViewError( "Generic view mu...
Return the ``model`` object with the passed ``object_id``. If ``object_id`` is None, then return the object whose ``slug_field`` equals the passed ``slug``. If ``slug`` and ``slug_field`` are not passed, then raise Http404 exception.
def get_peer_resources(self, peer_jid): try: d = dict(self._presences[peer_jid]) d.pop(None, None) return d except KeyError: return {}
Return a dict mapping resources of the given bare `peer_jid` to the presence state last received for that resource. Unavailable presence states are not included. If the bare JID is in a error state (i.e. an error presence stanza has been received), the returned mapping is empty.
def tryload_cache(dpath, fname, cfgstr, verbose=None): try: return load_cache(dpath, fname, cfgstr, verbose=verbose) except IOError: return None
returns None if cache cannot be loaded
def clean_community_indexes(communityID): communityID = np.array(communityID) cid_shape = communityID.shape if len(cid_shape) > 1: communityID = communityID.flatten() new_communityID = np.zeros(len(communityID)) for i, n in enumerate(np.unique(communityID)): new_communityID[community...
Takes input of community assignments. Returns reindexed community assignment by using smallest numbers possible. Parameters ---------- communityID : array-like list or array of integers. Output from community detection algorithems. Returns ------- new_communityID : array clea...
def clear_key_before(self, key, namespace=None, timestamp=None): block_size = self.config.block_size if namespace is None: namespace = self.config.namespace if timestamp is not None: offset, remainder = divmod(timestamp, block_size) if remainder: ...
Clear all data before `timestamp` for a given key. Note that the timestamp is rounded down to the nearest block boundary
def format_citations(zid, url='https://zenodo.org/', hits=10, tag_prefix='v'): url = ('{url}/api/records/?' 'page=1&' 'size={hits}&' 'q=conceptrecid:"{id}"&' 'sort=-version&' 'all_versions=True'.format(id=zid, url=url, hits=hits)) metadata = requests.get(ur...
Query and format a citations page from Zenodo entries Parameters ---------- zid : `int`, `str` the Zenodo ID of the target record url : `str`, optional the base URL of the Zenodo host, defaults to ``https://zenodo.org`` hist : `int`, optional the maximum number of hits to ...
def needsattached(func): @functools.wraps(func) def wrap(self, *args, **kwargs): if not self.attached: raise PositionError('Not attached to any process.') return func(self, *args, **kwargs) return wrap
Decorator to prevent commands from being used when not attached.
def print_sorted_counter(counter, tab=1): for key, count in sorted(counter.items(), key=itemgetter(1), reverse=True): print "{0}{1} - {2}".format('\t'*tab, key, count)
print all elements of a counter in descending order
def get_instance(self, payload): return CertificateInstance(self._version, payload, fleet_sid=self._solution['fleet_sid'], )
Build an instance of CertificateInstance :param dict payload: Payload response from the API :returns: twilio.rest.preview.deployed_devices.fleet.certificate.CertificateInstance :rtype: twilio.rest.preview.deployed_devices.fleet.certificate.CertificateInstance
def cmd_sync(self, low): kwargs = copy.deepcopy(low) for ignore in ['tgt', 'fun', 'arg', 'timeout', 'tgt_type', 'kwarg']: if ignore in kwargs: del kwargs[ignore] return self.cmd(low['tgt'], low['fun'], low.get('arg', [])...
Execute a salt-ssh call synchronously. .. versionadded:: 2015.5.0 WARNING: Eauth is **NOT** respected .. code-block:: python client.cmd_sync({ 'tgt': 'silver', 'fun': 'test.ping', 'arg': (), 'tgt_type'='glob', ...
def attach_image(field, nested_fields, page, record_keeper=None): if (field in nested_fields) and nested_fields[field]: foreign_image_id = nested_fields[field]["id"] if record_keeper: try: local_image_id = record_keeper.get_local_image( foreign_image_i...
Returns a function that attaches an image to page if it exists Currenlty assumes that images have already been imported and info has been stored in record_keeper
def singleOrPair(obj): if len(list(obj.__class__.__mro__)) <= 2: return 'Neither' else: if ancestorJr(obj) is Pair: return 'Pair' elif ancestor(obj) is Single: return 'Single' else: return 'Neither'
Chech an object is single or pair or neither. Of course,, all pairs are single, so what the function is really detecting is whether an object is only single or at the same time a pair. Args: obj (object): Literally anything. Returns: str: 'Single', or 'Pair', or 'Neither'
def version(self, context=None): if self.replaces_scope and self.replaces_name: if context: old_opts = context.options.for_scope(self.replaces_scope) if old_opts.get(self.replaces_name) and not old_opts.is_default(self.replaces_name): return old_opts.get(self.replaces_name) els...
Returns the version of the specified binary tool. If replaces_scope and replaces_name are defined, then the caller must pass in a context, otherwise no context should be passed. # TODO: Once we're migrated, get rid of the context arg. :API: public
def apply_single_tag_set(tag_set, selection): def tags_match(server_tags): for key, value in tag_set.items(): if key not in server_tags or server_tags[key] != value: return False return True return selection.with_server_descriptions( [s for s in selection.serv...
All servers matching one tag set. A tag set is a dict. A server matches if its tags are a superset: A server tagged {'a': '1', 'b': '2'} matches the tag set {'a': '1'}. The empty tag set {} matches any server.
def load_profiles(self, overwrite=False): for profile in self.minimum_needs.get_profiles(overwrite): self.profile_combo.addItem(profile) minimum_needs = self.minimum_needs.get_full_needs() self.profile_combo.setCurrentIndex( self.profile_combo.findText(minimum_needs['prof...
Load the profiles into the dropdown list. :param overwrite: If we overwrite existing profiles from the plugin. :type overwrite: bool
def show_detailed_monitoring(name=None, instance_id=None, call=None, quiet=False): if call != 'action': raise SaltCloudSystemExit( 'The show_detailed_monitoring action must be called with -a or --action.' ) location = get_location() if six.text_type(name).startswith('i-') and (le...
Show the details from EC2 regarding cloudwatch detailed monitoring.
def _wait_output(popen, is_slow): proc = Process(popen.pid) try: proc.wait(settings.wait_slow_command if is_slow else settings.wait_command) return True except TimeoutExpired: for child in proc.children(recursive=True): _kill_process(child) _kill...
Returns `True` if we can get output of the command in the `settings.wait_command` time. Command will be killed if it wasn't finished in the time. :type popen: Popen :rtype: bool
def display(self): w, h = (0, 0) for line in self.shell('dumpsys', 'display').splitlines(): m = _DISPLAY_RE.search(line, 0) if not m: continue w = int(m.group('width')) h = int(m.group('height')) o = int(m.group('orientation')) ...
Return device width, height, rotation
def constructRows(self, items): rows = [] for item in items: row = dict((colname, col.extractValue(self, item)) for (colname, col) in self.columns.iteritems()) link = self.linkToItem(item) if link is not None: row[u'__id__'] = li...
Build row objects that are serializable using Athena for sending to the client. @param items: an iterable of objects compatible with my columns' C{extractValue} methods. @return: a list of dictionaries, where each dictionary has a string key for each column name in my list of c...
def get_command_templates(command_tokens, file_tokens=[], path_tokens=[], job_options=[]): files = get_files(file_tokens) paths = get_paths(path_tokens) job_options = get_options(job_options) templates = _get_command_templates(command_tokens, files, paths, job_options) for command_templa...
Given a list of tokens from the grammar, return a list of commands.
def _generate_default_grp_constraints(roles, network_constraints): default_delay = network_constraints.get('default_delay') default_rate = network_constraints.get('default_rate') default_loss = network_constraints.get('default_loss', 0) except_groups = network_constraints.get('except', []) grps = ne...
Generate default symetric grp constraints.
def run(host='0.0.0.0', port=5000, reload=True, debug=True): from werkzeug.serving import run_simple app = bootstrap.get_app() return run_simple( hostname=host, port=port, application=app, use_reloader=reload, use_debugger=debug, )
Run development server
def _highlight_lines(self, tokensource): hls = self.hl_lines for i, (t, value) in enumerate(tokensource): if t != 1: yield t, value if i + 1 in hls: if self.noclasses: style = '' if self.style.highlight_color...
Highlighted the lines specified in the `hl_lines` option by post-processing the token stream coming from `_format_lines`.
def smooth_angle_channels(self, channels): for vertex in self.vertices: for col in vertex.meta['rot_ind']: if col: for k in range(1, channels.shape[0]): diff=channels[k, col]-channels[k-1, col] if abs(diff+360.)<abs(diff...
Remove discontinuities in angle channels so that they don't cause artifacts in algorithms that rely on the smoothness of the functions.
def finalize_sv(orig_vcf, data, items): paired = vcfutils.get_paired(items) if paired: sample_vcf = orig_vcf if paired.tumor_name == dd.get_sample_name(data) else None else: sample_vcf = "%s-%s.vcf.gz" % (utils.splitext_plus(orig_vcf)[0], dd.get_sample_name(data)) sample_vcf = vcfuti...
Finalize structural variants, adding effects and splitting if needed.
def _container_blacklist(self): if self.__container_blacklist is None: self.__container_blacklist = \ set(self.CLOUD_BROWSER_CONTAINER_BLACKLIST or []) return self.__container_blacklist
Container blacklist.
def replace_uuid_w_names(self, resp): col_mapper = self.get_point_name(resp.context)["?point"].to_dict() resp.df.rename(columns=col_mapper, inplace=True) return resp
Replace the uuid's with names. Parameters ---------- resp : ??? ??? Returns ------- ??? ???
def initialize_page(title, style, script, header=None): page = markup.page(mode="strict_html") page._escape = False page.init(title=title, css=style, script=script, header=header) return page
A function that returns a markup.py page object with the required html header.
def vertex_fingerprints(self): return self.get_vertex_fingerprints( [self.get_vertex_string(i) for i in range(self.num_vertices)], [self.get_edge_string(i) for i in range(self.num_edges)], )
A fingerprint for each vertex The result is invariant under permutation of the vertex indexes. Vertices that are symmetrically equivalent will get the same fingerprint, e.g. the hydrogens in methane would get the same fingerprint.
def first(sequence, message=None): try: return next(iter(sequence)) except StopIteration: raise ValueError(message or ('Sequence is empty: %s' % sequence))
The first item in that sequence If there aren't any, raise a ValueError with that message
def list_suites(suitedir="./testcases/suites", cloud=False): suites = [] suites.extend(TestSuite.get_suite_files(suitedir)) if cloud: names = cloud.get_campaign_names() if names: suites.append("------------------------------------") suites....
Static method for listing suites from both local source and cloud. Uses PrettyTable to generate the table. :param suitedir: Local directory for suites. :param cloud: cloud module :return: PrettyTable object or None if no test cases were found