code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def DualDBSystemCronJob(legacy_name=None, stateful=False): def Decorator(cls): if not legacy_name: raise ValueError("legacy_name has to be provided") if stateful: aff4_base_cls = StatefulSystemCronFlow else: aff4_base_cls = SystemCronFlow if issubclass(cls, cronjobs.SystemCronJobBase...
Decorator that creates AFF4 and RELDB cronjobs from a given mixin.
def network_info(): def extract(host, family): return socket.getaddrinfo(host, None, family)[0][4][0] host = socket.gethostname() response = { 'hostname': host, 'ipv4': None, 'ipv6': None } with suppress(IndexError, socket.gaierror): response['ipv4'] = extract...
Returns hostname, ipv4 and ipv6.
def add_hbar_widget(self, ref, x=1, y=1, length=10): if ref not in self.widgets: widget = widgets.HBarWidget(screen=self, ref=ref, x=x, y=y, length=length) self.widgets[ref] = widget return self.widgets[ref]
Add Horizontal Bar Widget
def serialize_iso(attr, **kwargs): if isinstance(attr, str): attr = isodate.parse_datetime(attr) try: if not attr.tzinfo: _LOGGER.warning( "Datetime with no tzinfo will be considered UTC.") utc = attr.utctimetuple() if u...
Serialize Datetime object into ISO-8601 formatted string. :param Datetime attr: Object to be serialized. :rtype: str :raises: SerializationError if format invalid.
def dec(self, key, delta=1): self.set(key, (self.get(key) or 0) - delta)
Decrements the value of a key by `delta`. If the key does not yet exist it is initialized with `-delta`. For supporting caches this is an atomic operation. :param key: the key to increment. :param delta: the delta to subtract.
def dumps(obj, *args, **kwargs): kwargs['default'] = object2dict return json.dumps(obj, *args, **kwargs)
Serialize a object to string Basic Usage: >>> import simplekit.objson >>> obj = {'name':'wendy'} >>> print simplekit.objson.dumps(obj) :param obj: a object which need to dump :param args: Optional arguments that :func:`json.dumps` takes. :param kwargs: Keys arguments that :py:func:`json....
def _datasource_cell(args, cell_body): name = args['name'] paths = args['paths'] data_format = (args['format'] or 'CSV').lower() compressed = args['compressed'] or False record = google.datalab.utils.commands.parse_config( cell_body, google.datalab.utils.commands.notebook_environment(), as_dict=False) ...
Implements the BigQuery datasource cell magic for ipython notebooks. The supported syntax is %%bq datasource --name <var> --paths <url> [--format <CSV|JSON>] <schema> Args: args: the optional arguments following '%%bq datasource' cell_body: the datasource's schema in json/yaml
def _ParseCommon2003CachedEntry(self, value_data, cached_entry_offset): data_type_map = self._GetDataTypeMap( 'appcompatcache_cached_entry_2003_common') try: cached_entry = self._ReadStructureFromByteStream( value_data[cached_entry_offset:], cached_entry_offset, data_type_map) except...
Parses the cached entry structure common for Windows 2003, Vista and 7. Args: value_data (bytes): value data. cached_entry_offset (int): offset of the first cached entry data relative to the start of the value data. Returns: appcompatcache_cached_entry_2003_common: cached entry str...
def add_enclosure(self, left_char, right_char): assert len(left_char) == 1, \ "Parameter left_char must be character not string" assert len(right_char) == 1, \ "Parameter right_char must be character not string" self._enclosure.add((left_char, right_char)) self._a...
Add new enclosure pair of characters. That and should be removed when their presence is detected at beginning and end of found URL :param str left_char: left character of enclosure pair - e.g. "(" :param str right_char: right character of enclosure pair - e.g. ")"
def sas_logical_jbod_attachments(self): if not self.__sas_logical_jbod_attachments: self.__sas_logical_jbod_attachments = SasLogicalJbodAttachments(self.__connection) return self.__sas_logical_jbod_attachments
Gets the SAS Logical JBOD Attachments client. Returns: SasLogicalJbodAttachments:
def get_cluster( self, project_id, region, cluster_name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): if "get_cluster" not in self._inner_api_calls: self._inner_api_calls[...
Gets the resource representation for a cluster in a project. Example: >>> from google.cloud import dataproc_v1beta2 >>> >>> client = dataproc_v1beta2.ClusterControllerClient() >>> >>> # TODO: Initialize `project_id`: >>> project_id = '' ...
def create_org_smarthost(self, orgid, data): return self.api_call( ENDPOINTS['orgsmarthosts']['new'], dict(orgid=orgid), body=data)
Create an organization smarthost
def jsonify(*args, **kwargs): return Response( json.dumps( dict( *args, **kwargs), cls=MongoJSONEncoder), mimetype='application/json')
jsonify with support for MongoDB ObjectId
def _preloop_hook(self) -> None: self._stop_thread = False self._alerter_thread = threading.Thread(name='alerter', target=self._alerter_thread_func) self._alerter_thread.start()
Start the alerter thread
def _CompareFields(field, other_field): field_attrs = _GetFieldAttributes(field) other_field_attrs = _GetFieldAttributes(other_field) if field_attrs != other_field_attrs: return False return field.__class__ == other_field.__class__
Checks if two ProtoRPC fields are "equal". Compares the arguments, rather than the id of the elements (which is the default __eq__ behavior) as well as the class of the fields. Args: field: A ProtoRPC message field to be compared. other_field: A ProtoRPC message field to be compared. Returns: Boo...
def session_hook(exception): safeprint( "The resource you are trying to access requires you to " "re-authenticate with specific identities." ) params = exception.raw_json["authorization_parameters"] message = params.get("session_message") if message: safeprint("message: {}".f...
Expects an exception with an authorization_paramaters field in its raw_json
def atlasdb_format_query( query, values ): return "".join( ["%s %s" % (frag, "'%s'" % val if type(val) in [str, unicode] else val) for (frag, val) in zip(query.split("?"), values + ("",))] )
Turn a query into a string for printing. Useful for debugging.
def get_unread_message_count_between(parser, token): try: tag_name, arg = token.contents.split(None, 1) except ValueError: raise template.TemplateSyntaxError("%s tag requires arguments" % token.contents.split()[0]) m = re.search(r'(.*?) and (.*?) as (\w+)', arg) if not m: raise t...
Returns the unread message count between two users. Syntax:: {% get_unread_message_count_between [user] and [user] as [var_name] %} Example usage:: {% get_unread_message_count_between funky and wunki as message_count %}
def data(self, data): if self.state == STATE_SOURCE_ID: self.context.audit_record.source_id = int(data) elif self.state == STATE_DATETIME: dt = datetime.datetime.strptime(data, "%Y-%m-%dT%H:%M:%S") self.get_parent_element().datetimestamp = dt elif self.state =...
Called for text between tags
def _tristate_parent(self, item): self.change_state(item, "tristate") parent = self.parent(item) if parent: self._tristate_parent(parent)
Put the box of item in tristate and change the state of the boxes of item's ancestors accordingly.
def get_picture(self, login=None, **kwargs): _login = kwargs.get( 'login', login or self._login ) _activities_url = PICTURE_URL.format(login=_login) return self._request_api(url=_activities_url).content
Get a user's picture. :param str login: Login of the user to check :return: JSON
def fa2s2b(fastas): s2b = {} for fa in fastas: for seq in parse_fasta(fa): s = seq[0].split('>', 1)[1].split()[0] s2b[s] = fa.rsplit('/', 1)[-1].rsplit('.', 1)[0] return s2b
convert fastas to s2b dictionary
def Execute(self, http): self._Execute(http) for key in self.__request_response_handlers: response = self.__request_response_handlers[key].response callback = self.__request_response_handlers[key].handler exception = None if response.status_code >= 300: ...
Execute all the requests as a single batched HTTP request. Args: http: A httplib2.Http object to be used with the request. Returns: None Raises: BatchError if the response is the wrong format.
def set_env(user, name, value=None): lst = list_tab(user) for env in lst['env']: if name == env['name']: if value != env['value']: rm_env(user, name) jret = set_env(user, name, value) if jret == 'new': return 'updated' ...
Set up an environment variable in the crontab. CLI Example: .. code-block:: bash salt '*' cron.set_env root MAILTO user@example.com
def _import_object(self, path, look_for_cls_method): last_nth = 2 if look_for_cls_method else 1 path = path.split('.') module_path = '.'.join(path[:-last_nth]) class_name = path[-last_nth] module = importlib.import_module(module_path) if look_for_cls_method and path[-last...
Imports the module that contains the referenced method. Args: path: python path of class/function look_for_cls_method (bool): If True, treat the last part of path as class method. Returns: Tuple. (class object, class name, method to be called)
def _get_satellite_tile(self, x_tile, y_tile, z_tile): cache_file = "mapscache/{}.{}.{}.jpg".format(z_tile, x_tile, y_tile) if cache_file not in self._tiles: if not os.path.isfile(cache_file): url = _IMAGE_URL.format(z_tile, x_tile, y_tile, _KEY) data = reques...
Load up a single satellite image tile.
def _send_heartbeat_request(self): if self.coordinator_unknown(): e = Errors.GroupCoordinatorNotAvailableError(self.coordinator_id) return Future().failure(e) elif not self._client.ready(self.coordinator_id, metadata_priority=False): e = Errors.NodeNotReadyError(self....
Send a heartbeat request
def __diff_iterable(self, level, parents_ids=frozenset({})): subscriptable = self.__iterables_subscriptable(level.t1, level.t2) if subscriptable: child_relationship_class = SubscriptableIterableRelationship else: child_relationship_class = NonSubscriptableIterableRelation...
Difference of iterables
def contentsMethod(self, contentFilter): allowedroles = ['Manager', 'LabManager', 'Client', 'LabClerk'] pm = getToolByName(self.context, "portal_membership") member = pm.getAuthenticatedMember() roles = member.getRoles() allowed = [a for a in allowedroles if a in roles] r...
ARReport objects associated to the current Analysis request. If the user is not a Manager or LabManager or Client, no items are displayed.
def _analyze_indexed_fields(indexed_fields): result = {} for field_name in indexed_fields: if not isinstance(field_name, basestring): raise TypeError('Field names must be strings; got %r' % (field_name,)) if '.' not in field_name: if field_name in result: raise ValueError('Duplicate fiel...
Internal helper to check a list of indexed fields. Args: indexed_fields: A list of names, possibly dotted names. (A dotted name is a string containing names separated by dots, e.g. 'foo.bar.baz'. An undotted name is a string containing no dots, e.g. 'foo'.) Returns: A dict whose keys are undotted ...
def get_user(self, username="", ext_collections=False, ext_galleries=False): if not username and self.standard_grant_type == "authorization_code": response = self._req('/user/whoami') u = User() u.from_dict(response) else: if not username: ...
Get user profile information :param username: username to lookup profile of :param ext_collections: Include collection folder info :param ext_galleries: Include gallery folder info
def clearLayout(layout): while layout.count(): child = layout.takeAt(0) child.widget().deleteLater()
Removes all widgets in the layout. Useful when opening a new file, want to clear everything.
def get_asset(self): if not bool(self._my_map['assetId']): raise errors.IllegalState('asset empty') mgr = self._get_provider_manager('REPOSITORY') if not mgr.supports_asset_lookup(): raise errors.OperationFailed('Repository does not support Asset lookup') lookup_s...
Gets the ``Asset`` corresponding to this content. return: (osid.repository.Asset) - the asset *compliance: mandatory -- This method must be implemented.*
def entry_breadcrumbs(entry): date = entry.publication_date if is_aware(date): date = localtime(date) return [year_crumb(date), month_crumb(date), day_crumb(date), Crumb(entry.title)]
Breadcrumbs for an Entry.
def get_action_meanings(self): actions = sorted(self._action_meanings.keys()) return [self._action_meanings[action] for action in actions]
Return a list of actions meanings.
def get_num_shards(num_samples: int, samples_per_shard: int, min_num_shards: int) -> int: return max(int(math.ceil(num_samples / samples_per_shard)), min_num_shards)
Returns the number of shards. :param num_samples: Number of training data samples. :param samples_per_shard: Samples per shard. :param min_num_shards: Minimum number of shards. :return: Number of shards.
def get_texts_box(texts, fs): max_len = max(map(len, texts)) return (fs, text_len(max_len, fs))
Approximation of multiple texts bounds
def _read_response(self, may_block=False): res = self._waitfor_set(yubikey_defs.RESP_PENDING_FLAG, may_block)[:7] while True: this = self._read() flags = yubico_util.ord_byte(this[7]) if flags & yubikey_defs.RESP_PENDING_FLAG: seq = flags & 0b00011111 ...
Wait for a response to become available, and read it.
def lookup(self, plain_src_ns): if plain_src_ns in self._ex_namespace_set: return None if not self._regex_map and not self._plain: return Namespace( dest_name=plain_src_ns, source_name=plain_src_ns, include_fields=self._include_fiel...
Given a plain source namespace, return the corresponding Namespace object, or None if it is not included.
def scale_and_crop_with_ranges( im, size, size_range=None, crop=False, upscale=False, zoom=None, target=None, **kwargs): min_width, min_height = size if min_width == 0 or min_height == 0 or not size_range: return scale_and_crop(im, size, crop, upscale, zoom, target, **kwargs) max_width = min_wid...
An easy_thumbnails processor that accepts a `size_range` tuple, which indicates that one or both dimensions can give by a number of pixels in order to minimize cropping.
def lines2file(lines, filename, encoding='utf-8'): with codecs.open(filename, "w", encoding=encoding) as f: for line in lines: f.write(line) f.write("\n")
write json stream, write lines too
def swap_dims(self, dims_dict): ds = self._to_temp_dataset().swap_dims(dims_dict) return self._from_temp_dataset(ds)
Returns a new DataArray with swapped dimensions. Parameters ---------- dims_dict : dict-like Dictionary whose keys are current dimension names and whose values are new names. Each value must already be a coordinate on this array. Returns ----...
def keep_only_sticked_and_selected_tabs(self): if not global_gui_config.get_config_value('KEEP_ONLY_STICKY_STATES_OPEN', True): return page_id = self.view.notebook.get_current_page() if page_id == -1: return page = self.view.notebook.get_nth_page(page_id) ...
Close all tabs, except the currently active one and all sticked ones
def createDataChannel(self, label, maxPacketLifeTime=None, maxRetransmits=None, ordered=True, protocol='', negotiated=False, id=None): if maxPacketLifeTime is not None and maxRetransmits is not None: raise ValueError('Cannot specify both maxPacketLifeTime and maxRetransmits...
Create a data channel with the given label. :rtype: :class:`RTCDataChannel`
def create_module(sym, data_shapes, label_shapes, label_names, gpus=''): if gpus == '': devices = mx.cpu() else: devices = [mx.gpu(int(i)) for i in gpus.split(',')] data_names = [data_shape[0] for data_shape in data_shapes] mod = mx.mod.Module( symbol=sym, data_names=data...
Creates a new MXNet module. Parameters ---------- sym : Symbol An MXNet symbol. input_shape: tuple The shape of the input data in the form of (batch_size, channels, height, width) files: list of strings List of URLs pertaining to files that need to be downloaded in order t...
def visit_slice(self, node): lower = node.lower.accept(self) if node.lower else "" upper = node.upper.accept(self) if node.upper else "" step = node.step.accept(self) if node.step else "" if step: return "%s:%s:%s" % (lower, upper, step) return "%s:%s" % (lower, upper...
return an astroid.Slice node as string
async def get_message(self, ignore_subscribe_messages=False, timeout=0): response = await self.parse_response(block=False, timeout=timeout) if response: return self.handle_message(response, ignore_subscribe_messages) return None
Get the next message if one is available, otherwise None. If timeout is specified, the system will wait for `timeout` seconds before returning. Timeout should be specified as a floating point number.
def revert(self, unchanged_only=False): if self._reverted: raise errors.ChangelistError('This changelist has been reverted') change = self._change if self._change == 0: change = 'default' cmd = ['revert', '-c', str(change)] if unchanged_only: c...
Revert all files in this changelist :param unchanged_only: Only revert unchanged files :type unchanged_only: bool :raises: :class:`.ChangelistError`
def estimate(coll, filter={}, sample=1): total = coll.estimated_document_count() if not filter and sample == 1: return total if sample <= 1: sample *= total pipeline = list(builtins.filter(None, [ {'$sample': {'size': sample}} if sample < total else {}, {'$match': filter}...
Estimate the number of documents in the collection matching the filter. Sample may be a fixed number of documents to sample or a percentage of the total collection size. >>> coll = getfixture('bulky_collection') >>> estimate(coll) 100 >>> query = {"val": {"$gte": 50}} >>> val = estimat...
def variance(self, param): param_number = self.model.params.index(param) try: return self.covariance_matrix[param_number, param_number] except TypeError: return None
Return the variance in a given parameter as found by the fit. :param param: ``Parameter`` Instance. :return: Variance of ``param``.
def _resize(self, init=False): col, row = self._selection_to_col_row(self.selection) if not (self.startPos <= row <= self.startPos + self.list_maxY - 1): while row > self.startPos: self.startPos += 1 while row < self.startPos + self.list_maxY - 1: ...
if the selection at the end of the list, try to scroll down
def set_hierarchy(self, hierarchy): self.hierarchy = dict([(k, i) for i, j in enumerate(hierarchy) for k in j])
Sets an alternative sonority hierarchy, note that you will also need to specify the vowelset with the set_vowels, in order for the module to correctly identify each nucleus. The order of the phonemes defined is by decreased consonantality Example: >>> s = Syllabifier() ...
def open(self): try: self.graph.open(self.cache_uri, create=False) self._add_namespaces(self.graph) self.is_open = True except Exception: raise InvalidCacheException('The cache is invalid or not created')
Opens an existing cache.
def execute(self, env, args): task_name = args.task_name clone_task = args.clone_task if not env.task.create(task_name, clone_task): raise errors.FocusError(u'Could not create task "{0}"' .format(task_name)) if not args.skip_edit: ...
Creates a new task. `env` Runtime ``Environment`` instance. `args` Arguments object from arg parser.
def _find_elements(self, result, elements): element_mapping = {} result = StringIO.StringIO(result) for _, e in ET.iterparse(result, events=('end',)): if not elements: break if e.tag in elements: element_mapping[e.tag] = e.text elements.remove(e.tag) return element_ma...
Find interesting elements from XML. This function tries to only look for specified elements without parsing the entire XML. The specified elements is better located near the beginning. Args: result: response XML. elements: a set of interesting element tags. Returns: A dict from ...
def hook_alias(self, alias, model_obj=None): try: search_alias = self._alias_hooks[alias] except KeyError: raise AttributeError('Could not find search alias named {}. Is this alias defined in BUNGIESEARCH["ALIASES"]?'.format(alias)) else: if search_alias._appl...
Returns the alias function, if it exists and if it can be applied to this model.
def get_spectra_id(self, fn_id, retention_time=None, scan_nr=None): cursor = self.get_cursor() sql = 'SELECT spectra_id FROM mzml WHERE mzmlfile_id=? ' values = [fn_id] if retention_time is not None: sql = '{0} AND retention_time=?'.format(sql) values.append(reten...
Returns spectra id for spectra filename and retention time
def coverage(self): intervals = ReadIntervals(self.subjectLength) for hsp in self.hsps(): intervals.add(hsp.subjectStart, hsp.subjectEnd) return intervals.coverage()
Get the fraction of this title sequence that is matched by its reads. @return: The C{float} fraction of the title sequence matched by its reads.
def manages(self, cfg_part): logger.debug("Do I (%s/%s) manage: %s, my managed configuration(s): %s", self.type, self.name, cfg_part, self.cfg_managed) if not self.cfg_managed: logger.info("I (%s/%s) do not manage (yet) any configuration!", self.type, self.name) ...
Tell if the satellite is managing this configuration part The managed configuration is formed as a dictionary indexed on the link instance_id: { u'SchedulerLink_1': { u'hash': u'4d08630a3483e1eac7898e7a721bd5d7768c8320', u'push_flavor': u'4d08630a3483e1eac78...
def Scan(self, scan_context, auto_recurse=True, scan_path_spec=None): if not scan_context: raise ValueError('Invalid scan context.') scan_context.updated = False if scan_path_spec: scan_node = scan_context.GetScanNode(scan_path_spec) else: scan_node = scan_context.GetUnscannedScanNode(...
Scans for supported formats. Args: scan_context (SourceScannerContext): source scanner context. auto_recurse (Optional[bool]): True if the scan should automatically recurse as far as possible. scan_path_spec (Optional[PathSpec]): path specification to indicate where the source...
def bargraph(data, max_key_width=30): lines = [] max_length = min(max(len(key) for key in data.keys()), max_key_width) max_val = max(data.values()) max_val_length = max( len(_style_value(val)) for val in data.values()) term_width = get_terminal_size()[0] max_bar_width = term_widt...
Return a bar graph as a string, given a dictionary of data.
def gen500(request, baseURI, project=None): return HttpResponseServerError( render_to_response('plugIt/500.html', { 'context': { 'ebuio_baseUrl': baseURI, 'ebuio_userMode': request.session.get('plugit-standalone-usermode', 'ano'), }, 'proje...
Return a 500 error
def all_hosts(self): return set(imap(common.clean_node, itertools.chain( self._doc.get('hosts', []), self._doc.get('passives', []), self._doc.get('arbiters', []))))
List of hosts, passives, and arbiters known to this server.
def home_win_percentage(self): try: result = float(self.home_wins) / \ float(self.home_wins + self.home_losses) return round(result, 3) except ZeroDivisionError: return 0.0
Returns a ``float`` of the percentage of games the home team has won after the conclusion of the game. Percentage ranges from 0-1.
def send_rally_points(self): self.mav_param.mavset(self.master,'RALLY_TOTAL',self.rallyloader.rally_count(),3) for i in range(self.rallyloader.rally_count()): self.send_rally_point(i)
send rally points from rallyloader
def num_throats(self, labels='all', mode='union'): r Ts = self._get_indices(labels=labels, mode=mode, element='throat') Nt = sp.shape(Ts)[0] return Nt
r""" Return the number of throats of the specified labels Parameters ---------- labels : list of strings, optional The throat labels that should be included in the count. If not supplied, all throats are counted. mode : string, optional Speci...
def pull(self, platform=None): repository, _ = parse_repository_tag(self.image_name) return self.collection.pull(repository, tag=self.id, platform=platform)
Pull the image digest. Args: platform (str): The platform to pull the image for. Default: ``None`` Returns: (:py:class:`Image`): A reference to the pulled image.
def run_band_structure(self, paths, with_eigenvectors=False, with_group_velocities=False, is_band_connection=False, path_connections=None, labels=None, ...
Run phonon band structure calculation. Parameters ---------- paths : List of array_like Sets of qpoints that can be passed to phonopy.set_band_structure(). Numbers of qpoints can be different. shape of each array_like : (qpoints, 3) with_eigenvectors ...
def githubWebHookConsumer(self, *args, **kwargs): return self._makeApiCall(self.funcinfo["githubWebHookConsumer"], *args, **kwargs)
Consume GitHub WebHook Capture a GitHub event and publish it via pulse, if it's a push, release or pull request. This method is ``experimental``
def _forgiving_issubclass(derived_class, base_class): return (type(derived_class) is ClassType and \ type(base_class) is ClassType and \ issubclass(derived_class, base_class))
Forgiving version of ``issubclass`` Does not throw any exception when arguments are not of class type
def readTempC(self): v = self._read32() if v & 0x7: return float('NaN') if v & 0x80000000: v >>= 18 v -= 16384 else: v >>= 18 return v * 0.25
Return the thermocouple temperature value in degrees celsius.
def transform_y(self, tfms:Optional[Tuple[TfmList,TfmList]]=(None,None), **kwargs): "Set `tfms` to be applied to the ys of the train and validation set." if not tfms: tfms=(None,None) self.train.transform_y(tfms[0], **kwargs) self.valid.transform_y(tfms[1], **kwargs) if self.test...
Set `tfms` to be applied to the ys of the train and validation set.
def lonlat2xyz(lon, lat): lat = xu.deg2rad(lat) lon = xu.deg2rad(lon) x = xu.cos(lat) * xu.cos(lon) y = xu.cos(lat) * xu.sin(lon) z = xu.sin(lat) return x, y, z
Convert lon lat to cartesian.
def _get_value(scikit_value, mode = 'regressor', scaling = 1.0, n_classes = 2, tree_index = 0): if mode == 'regressor': return scikit_value[0] * scaling if n_classes == 2: if len(scikit_value[0]) != 1: value = scikit_value[0][1] * scaling / scikit_value[0].sum() else: ...
Get the right value from the scikit-tree
def re_thresh_csv(path, old_thresh, new_thresh, chan_thresh): from eqcorrscan.core.match_filter import read_detections warnings.warn('Legacy function, please use ' 'eqcorrscan.core.match_filter.Party.rethreshold.') old_detections = read_detections(path) old_thresh = float(old_thresh) ...
Remove detections by changing the threshold. Can only be done to remove detection by increasing threshold, threshold lowering will have no effect. :type path: str :param path: Path to the .csv detection file :type old_thresh: float :param old_thresh: Old threshold MAD multiplier :type new_...
def ReadHuntOutputPluginsStates(self, hunt_id, cursor=None): columns = ", ".join(_HUNT_OUTPUT_PLUGINS_STATES_COLUMNS) query = ("SELECT {columns} FROM hunt_output_plugins_states " "WHERE hunt_id = %s".format(columns=columns)) rows_returned = cursor.execute(query, [db_utils.HuntIDToInt(hunt_id)])...
Reads all hunt output plugins states of a given hunt.
def ConsumeRange(self, start, end): old = self.CurrentRange() if old is None: return if old.start > start: if old.start < end: raise RuntimeError('Block end too high.') return if old.start < start: raise RuntimeError('Block start too high.') if old.end == end: d...
Consumes an entire range, or part thereof. If the finger has no ranges left, or the curent range start is higher than the end of the consumed block, nothing happens. Otherwise, the current range is adjusted for the consumed block, or removed, if the entire block is consumed. For things to work, the con...
def read_response(self): response = self._read_response() frame, data = nsq.unpack_response(response) self.last_response = time.time() if frame not in self._frame_handlers: raise errors.NSQFrameError('unknown frame {}'.format(frame)) frame_handler = self._frame_handle...
Read an individual response from nsqd. :returns: tuple of the frame type and the processed data.
def _toplevel(cls): superclasses = ( list(set(ClosureModel.__subclasses__()) & set(cls._meta.get_parent_list())) ) return next(iter(superclasses)) if superclasses else cls
Find the top level of the chain we're in. For example, if we have: C inheriting from B inheriting from A inheriting from ClosureModel C._toplevel() will return A.
def findContours(*args, **kwargs): if cv2.__version__.startswith('4'): contours, hierarchy = cv2.findContours(*args, **kwargs) elif cv2.__version__.startswith('3'): _, contours, hierarchy = cv2.findContours(*args, **kwargs) else: raise AssertionError( 'cv2 must be either ...
Wraps cv2.findContours to maintain compatiblity between versions 3 and 4 Returns: contours, hierarchy
def create_tag(self, version, params): cmd = self._command.tag(version, params) (code, stdout, stderr) = self._exec(cmd) if code: raise errors.VCSError('Can\'t create VCS tag %s. Process exited with code %d and message: %s' % ( version, code, stderr or stdout))
Create VCS tag :param version: :param params: :return:
def generate_keypair(keypair_file): from Crypto.PublicKey import RSA key = RSA.generate(2048) keypair_dir = os.path.dirname(keypair_file) if not os.path.exists(keypair_dir): os.makedirs(keypair_dir) with open(keypair_file, 'wb') as filey: filey.write(key.exportKey('PEM')) return ...
generate_keypair is used by some of the helpers that need a keypair. The function should be used if the client doesn't have the attribute self.key. We generate the key and return it. We use pycryptodome (3.7.2) Parameters ========= keypair_file: fullpath to where to s...
def get_cache(self): if self.no_cache: self.pkg_list = self.list_packages() return if not os.path.exists(self.yolk_dir): os.mkdir(self.yolk_dir) if os.path.exists(self.pkg_cache_file): self.pkg_list = self.query_cached_package_list() else: ...
Get a package name list from disk cache or PyPI
def validate_arrangement_version(self): arrangement_version = self.build_kwargs['arrangement_version'] if arrangement_version is None: return if arrangement_version <= 5: self.log.warning("arrangement_version <= 5 is deprecated and will be removed" ...
Validate if the arrangement_version is supported This is for autorebuilds to fail early otherwise they may failed on workers because of osbs-client validation checks. Method should be called after self.adjust_build_kwargs Shows a warning when version is deprecated :raises Val...
def tabular(client, datasets): from renku.models._tabulate import tabulate click.echo( tabulate( datasets, headers=OrderedDict(( ('short_id', 'id'), ('name', None), ('created', None), ('authors_csv', 'authors'), ...
Format datasets with a tabular output.
def to_text(self, fn:str): "Save `self.items` to `fn` in `self.path`." with open(self.path/fn, 'w') as f: f.writelines([f'{o}\n' for o in self._relative_item_paths()])
Save `self.items` to `fn` in `self.path`.
def label(self): with self.selenium.context(self.selenium.CONTEXT_CHROME): return self.root.get_attribute("label")
Provide access to the notification label. Returns: str: The notification label
def putenv(key, value): key = path2fsn(key) value = path2fsn(value) if is_win and PY2: try: set_windows_env_var(key, value) except WindowsError: raise ValueError else: try: os.putenv(key, value) except OSError: raise ValueEr...
Like `os.putenv` but takes unicode under Windows + Python 2 Args: key (pathlike): The env var to get value (pathlike): The value to set Raises: ValueError
def app_token(vault_client, app_id, user_id): resp = vault_client.auth_app_id(app_id, user_id) if 'auth' in resp and 'client_token' in resp['auth']: return resp['auth']['client_token'] else: raise aomi.exceptions.AomiCredentials('invalid apptoken')
Returns a vault token based on the app and user id.
def is_searchable(self): return self.raw or (self.is_valid_country and (not self.state or self.is_valid_state))
A bool value that indicates whether the address is a valid address to search by.
def sigmasq_series(htilde, psd=None, low_frequency_cutoff=None, high_frequency_cutoff=None): htilde = make_frequency_series(htilde) N = (len(htilde)-1) * 2 norm = 4.0 * htilde.delta_f kmin, kmax = get_cutoff_indices(low_frequency_cutoff, high_frequency_cuto...
Return a cumulative sigmasq frequency series. Return a frequency series containing the accumulated power in the input up to that frequency. Parameters ---------- htilde : TimeSeries or FrequencySeries The input vector psd : {None, FrequencySeries}, optional The psd used to weig...
def load(self, config_template, config_file=None): if config_file is None: config_file = config_template config_path = build_config_file_path(config_file) template_path = os.path.join(os.path.dirname(__file__), config_template) self._copy_...
Read the config file if it exists, else read the default config. Creates the user config file if it doesn't exist using the template. :type config_template: str :param config_template: The config template file name. :type config_file: str :param config_file: (Optional) The con...
def delete_event_view(request, id): event = get_object_or_404(Event, id=id) if not request.user.has_admin_permission('events'): raise exceptions.PermissionDenied if request.method == "POST": try: event.delete() messages.success(request, "Successfully deleted event.") ...
Delete event page. You may only delete an event if you were the creator or you are an administrator. Confirmation page if not POST. id: event id
def load(self, env=None): self._load() e = env or \ os.environ.get(RUNNING_MODE_ENVKEY, DEFAULT_RUNNING_MODE) if e in self.config: return self.config[e] logging.warn("Environment '%s' was not found.", e)
Load a section values of given environment. If nothing to specified, use environmental variable. If unknown environment was specified, warn it on logger. :param env: environment key to load in a coercive manner :type env: string :rtype: dict
def on_click(self, event): button = event["button"] if button == self.button_toggle: self.toggled = True if self.mode == "ip": self.mode = "status" else: self.mode = "ip" elif button == self.button_refresh: self.idle...
Toggle between display modes 'ip' and 'status'
def categorize(func: Union[Callable, Iterable], category: str) -> None: if isinstance(func, Iterable): for item in func: setattr(item, HELP_CATEGORY, category) else: setattr(func, HELP_CATEGORY, category)
Categorize a function. The help command output will group this function under the specified category heading :param func: function to categorize :param category: category to put it in
def _sign_operation(op): md5 = hashlib.md5() md5.update(op.consumerId.encode('utf-8')) md5.update(b'\x00') md5.update(op.operationName.encode('utf-8')) if op.labels: signing.add_dict_to_hash(md5, encoding.MessageToPyValue(op.labels)) return md5.digest()
Obtains a signature for an operation in a ReportRequest. Args: op (:class:`endpoints_management.gen.servicecontrol_v1_messages.Operation`): an operation used in a `ReportRequest` Returns: string: a unique signature for that operation
def keys(self, section=None): if not section and self.section: section = self.section config = self.config.get(section, {}) if section else self.config return config.keys()
Provide dict like keys method
def add(self, interval, offset): start, stop = self.get_start_stop(interval) if len(self.starts) > 0: if start < self.starts[-1] or offset <= self.offsets[-1][1]: raise ValueError('intervals and offsets must be added in-order') self.offsets[-1][1] = offset ...
The added interval must be overlapping or beyond the last stored interval ie. added in sorted order. :param interval: interval to add :param offset: full virtual offset to add :return:
def always_fail(cls, request) -> [ (200, 'Ok', String), (406, 'Not Acceptable', Void)]: task_id = uuid4().hex.upper()[:5] log.info('Starting always FAILING task {}'.format(task_id)) for i in range(randint(0, MAX_LOOP_DURATION)): yield Respond(406) ...
Perform an always failing task.