code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def getAsWmsDatasetString(self, session): FIRST_VALUE_INDEX = 12 if type(self.raster) != type(None): valueGrassRasterString = self.getAsGrassAsciiGrid(session) values = valueGrassRasterString.split() wmsDatasetString = '' for i in range(FIRST_VALUE_INDEX, ...
Retrieve the WMS Raster as a string in the WMS Dataset format
def delete_tags(self, archive_name, tags): updated_tag_list = list(self._get_tags(archive_name)) for tag in tags: if tag in updated_tag_list: updated_tag_list.remove(tag) self._set_tags(archive_name, updated_tag_list)
Delete tags from an archive Parameters ---------- archive_name:s tr Name of archive tags: list or tuple of strings tags to delete from the archive
def add_user(bridge_user): resp = post_resource(admin_uid_url(None) + ("?%s" % CUSTOM_FIELD), json.dumps(bridge_user.to_json_post(), separators=(',', ':'))) return _process_json_resp_data(resp, no_custom_fields=True)
Add the bridge_user given Return a list of BridgeUser objects with custom fields
def get(self, path_or_index, default=None): err, value = self._resolve(path_or_index) value = default if err else value return err, value
Get details about a given result :param path_or_index: The path (or index) of the result to fetch. :param default: If the given result does not exist, return this value instead :return: A tuple of `(error, value)`. If the entry does not exist then `(err, default)` is ret...
def followers_org_count(self): from udata.models import Follow return sum(Follow.objects(following=org).count() for org in self.organizations)
Return the number of followers of user's organizations.
def get(self, sid): return CertificateContext(self._version, fleet_sid=self._solution['fleet_sid'], sid=sid, )
Constructs a CertificateContext :param sid: A string that uniquely identifies the Certificate. :returns: twilio.rest.preview.deployed_devices.fleet.certificate.CertificateContext :rtype: twilio.rest.preview.deployed_devices.fleet.certificate.CertificateContext
def updateSynapses(self, synapses, delta): reached0 = False if delta > 0: for synapse in synapses: self.syns[synapse][2] = newValue = self.syns[synapse][2] + delta if newValue > self.tp.permanenceMax: self.syns[synapse][2] = self.tp.permanenceMax else: for synapse in sy...
Update a set of synapses in the segment. @param tp The owner TP @param synapses List of synapse indices to update @param delta How much to add to each permanence @returns True if synapse reached 0
def guessImageMetadataFromData(img_data): format, width, height = None, None, None img_stream = io.BytesIO(img_data) try: img = PIL.Image.open(img_stream) except IOError: format = imghdr.what(None, h=img_data) format = SUPPORTED_IMG_FORMATS.get(format, None) else: format = im...
Identify an image format and size from its first bytes.
def get_easter_monday(self, year): "Return the date of the monday after easter" sunday = self.get_easter_sunday(year) return sunday + timedelta(days=1)
Return the date of the monday after easter
def sort_by_efficiency(self, reverse=True): self._confs.sort(key=lambda c: c.efficiency, reverse=reverse) return self
Sort the configurations in place. items with highest efficiency come first
def on_connection_closed(self, connection, reply_code, reply_text): start_state = self.state self.state = self.STATE_CLOSED if self.on_unavailable: self.on_unavailable(self) self.connection = None self.channel = None if start_state != self.STATE_CLOSING: ...
This method is invoked by pika when the connection to RabbitMQ is closed unexpectedly. Since it is unexpected, we will reconnect to RabbitMQ if it disconnects. :param pika.TornadoConnection connection: Closed connection :param int reply_code: The server provided reply_code if given ...
def _get_selectable(self): cursong = self.loop[self.song][0] if self.dif_song and len(cursong) > 1: s = cursong[0] + cursong[1] else: s = cursong[-1] return s
Used internally to get a group of choosable tracks.
def _get_component_from_result(self, result, lookup): for component in result['address_components']: if lookup['type'] in component['types']: return component.get(lookup['key'], '') return ''
Helper function to get a particular address component from a Google result. Since the address components in results are an array of objects containing a types array, we have to search for a particular component rather than being able to look it up directly. Returns the first match, so this sho...
def set_colors(self, text='black', background='white'): if self._multiline: self._widget.setStyleSheet("QTextEdit {background-color: "+str(background)+"; color: "+str(text)+"}") else: self._widget.setStyleSheet("QLineEdit {background-color: "+str(background)+"; color: "+str(text)+"}")
Sets the colors of the text area.
def _extend_object(parent, n, o, otype, fqdn): from inspect import ismodule, isclass pmodule = parent if ismodule(parent) or isclass(parent) else None try: if otype == "methods": setattr(o.__func__, "__acornext__", None) else: setattr(o, "__acornext__", None) ...
Extends the specified object if it needs to be extended. The method attempts to add an attribute to the object; if it fails, a new object is created that inherits all of `o` attributes, but is now a regular object that can have attributes set. Args: parent: has `n` in its `__dict__` attribute. ...
def get_parents_letters(self, goobj): parents_all = set.union(self.go2parents[goobj.id]) parents_all.add(goobj.id) parents_d1 = parents_all.intersection(self.gos_depth1) return [self.goone2ntletter[g].D1 for g in parents_d1]
Get the letters representing all parent terms which are depth-01 GO terms.
def get_mute(self): params = '<InstanceID>0</InstanceID><Channel>Master</Channel>' res = self.soap_request(URL_CONTROL_DMR, URN_RENDERING_CONTROL, 'GetMute', params) root = ET.fromstring(res) el_mute = root.find('.//CurrentMute') return el_mute.tex...
Return if the TV is muted.
def apply_rules(self, rules, recursive=True): if recursive: new_args = [_apply_rules(arg, rules) for arg in self.args] new_kwargs = { key: _apply_rules(val, rules) for (key, val) in self.kwargs.items()} else: new_args = self.args ...
Rebuild the expression while applying a list of rules The rules are applied against the instantiated expression, and any sub-expressions if `recursive` is True. Rule application is best though of as a pattern-based substitution. This is different from the *automatic* rules that :meth:`c...
def _pigpio_aio_command(self, cmd, p1, p2,): with (yield from self._lock): data = struct.pack('IIII', cmd, p1, p2, 0) self._loop.sock_sendall(self.s, data) response = yield from self._loop.sock_recv(self.s, 16) _, res = struct.unpack('12sI', response) ...
Runs a pigpio socket command. sl:= command socket and lock. cmd:= the command to be executed. p1:= command parameter 1 (if applicable). p2:= command parameter 2 (if applicable).
def get_repo(self, auth, username, repo_name): path = "/repos/{u}/{r}".format(u=username, r=repo_name) response = self.get(path, auth=auth) return GogsRepo.from_json(response.json())
Returns a the repository with name ``repo_name`` owned by the user with username ``username``. :param auth.Authentication auth: authentication object :param str username: username of owner of repository :param str repo_name: name of repository :return: a representation of the re...
def sign_data(self, name, hash_input, key_version=None, hash_algorithm="sha2-256", context="", prehashed=False, signature_algorithm="pss", mount_point=DEFAULT_MOUNT_POINT): if hash_algorithm not in transit_constants.ALLOWED_HASH_DATA_ALGORITHMS: error_msg = 'invalid hash_algorithm ...
Return the cryptographic signature of the given data using the named key and the specified hash algorithm. The key must be of a type that supports signing. Supported methods: POST: /{mount_point}/sign/{name}(/{hash_algorithm}). Produces: 200 application/json :param name: Specifies...
def parameters(self): return self.block_tl, self.block_br, self.rows, self.cols, self.cells
Returns tuple of selection parameters of self (self.block_tl, self.block_br, self.rows, self.cols, self.cells)
def fetch(url, binary, outfile, noprint, rendered): with chrome_context.ChromeContext(binary=binary) as cr: resp = cr.blocking_navigate_and_get_source(url) if rendered: resp['content'] = cr.get_rendered_page_source() resp['binary'] = False resp['mimie'] = 'text/html' if not noprint: if resp['binary'] i...
Fetch a specified URL's content, and output it to the console.
def add_loss(self, loss, name=None, regularization=False, add_summaries=True): _ = name if regularization: self._g.add_to_collection(GraphKeys.REGULARIZATION_LOSSES, loss) tf.add_to_collection(GraphKeys.LOSSES, loss) if add_summaries: self.add_scalar_summary(loss, 'loss') self.add_aver...
Append a loss to the total loss for the network. Args: loss: append this loss operation name: The name for this loss, defaults to loss.op.name regularization: Set to True if this is a regularization loss. add_summaries: Set to True if you want to see scalar and average summary.
def get_product_target_mappings_for_targets(self, targets): product_target_mappings = [] for target in targets: for product in self._products_by_target[target]: product_target_mappings.append((product, target)) return product_target_mappings
Gets the product-target associations for the given targets, preserving the input order. :API: public :param targets: The targets to lookup products for. :returns: The ordered (product, target) tuples.
def ed25519_private_key_to_string(key): return base64.b64encode(key.private_bytes( encoding=serialization.Encoding.Raw, format=serialization.PrivateFormat.Raw, encryption_algorithm=serialization.NoEncryption() ), None).decode('utf-8')
Convert an ed25519 private key to a base64-encoded string. Args: key (Ed25519PrivateKey): the key to write to the file. Returns: str: the key representation as a str
def custom_background_code(): while True: logger.info("Block %s / %s", str(Blockchain.Default().Height), str(Blockchain.Default().HeaderHeight)) sleep(15)
Custom code run in a background thread. Prints the current block height. This function is run in a daemonized thread, which means it can be instantly killed at any moment, whenever the main thread quits. If you need more safety, don't use a daemonized thread and handle exiting this thread in another way (...
def build_sequence(self, xs, masks, init, is_left_to_right): states = [] last = init if is_left_to_right: for i, xs_i in enumerate(xs): h = self.build(xs_i, last, masks[i]) states.append(h) last = h else: for i in ra...
Build GRU sequence.
def available_for_entry_point(self, entry_point): if self.entry_point == ALL or entry_point == ALL: return True return entry_point in ensure_sequence(self.entry_point)
Check if the current function can be executed from a request to the given entry point
def mean_date(dt_list): dt_list_sort = sorted(dt_list) dt_list_sort_rel = [dt - dt_list_sort[0] for dt in dt_list_sort] avg_timedelta = sum(dt_list_sort_rel, timedelta())/len(dt_list_sort_rel) return dt_list_sort[0] + avg_timedelta
Calcuate mean datetime from datetime list
def correct_transition_matrix(T, reversible=None): r row_sums = T.sum(axis=1).A1 max_sum = np.max(row_sums) if max_sum == 0.0: max_sum = 1.0 return (T + scipy.sparse.diags(-row_sums+max_sum, 0)) / max_sum
r"""Normalize transition matrix Fixes a the row normalization of a transition matrix. To be used with the reversible estimators to fix an almost coverged transition matrix. Parameters ---------- T : (M, M) ndarray matrix to correct reversible : boolean for future use R...
def get_attributes(self): attr = ['chr', 'start', 'stop'] if self.strandPos is not None: attr.append('strand') if self.otherPos: for i, o in enumerate(self.otherPos): attr.append(o[1]) return attr
Returns the unordered list of attributes :return: list of strings
def _fetch_messages(self): try: [_, msg] = self.socket.recv_multipart(flags=zmq.NOBLOCK) if Global.CONFIG_MANAGER.tracing_mode: Global.LOGGER.debug("fetched a new message") self.fetched = self.fetched + 1 obj = pickle.loads(msg) self._d...
Get an input message from the socket
def get_tags_of_offer_per_page(self, offer_id, per_page=1000, page=1): return self._get_resource_per_page( resource=OFFER_TAGS, per_page=per_page, page=page, params={'offer_id': offer_id}, )
Get tags of offer per page :param per_page: How many objects per page. Default: 1000 :param page: Which page. Default: 1 :param offer_id: the offer id :return: list
def node_coord_in_direction(tile_id, direction): tile_coord = tile_id_to_coord(tile_id) for node_coord in nodes_touching_tile(tile_id): if tile_node_offset_to_direction(node_coord - tile_coord) == direction: return node_coord raise ValueError('No node found in direction={} at tile_id={}'...
Returns the node coordinate in the given direction at the given tile identifier. :param tile_id: tile identifier, int :param direction: direction, str :return: node coord, int
def insert(self, storagemodel) -> StorageTableModel: modeldefinition = self.getmodeldefinition(storagemodel, True) try: modeldefinition['tableservice'].insert_or_replace_entity(modeldefinition['tablename'], storagemodel.entity()) storagemodel._exists = True except AzureMi...
insert model into storage
def item(p_queue, queue_id, host=None): if host is not None: return os.path.join(_path(host, _c.FSQ_QUEUE, root=hosts(p_queue)), valid_name(queue_id)) return os.path.join(_path(p_queue, _c.FSQ_QUEUE), valid_name(queue_id))
Construct a path to a queued item
def update(cls, draft_share_invite_api_key_id, status=None, sub_status=None, expiration=None, custom_headers=None): if custom_headers is None: custom_headers = {} api_client = client.ApiClient(cls._get_api_context()) request_map = { cls.FIELD_STATUS: status...
Update a draft share invite. When sending status CANCELLED it is possible to cancel the draft share invite. :type user_id: int :type draft_share_invite_api_key_id: int :param status: The status of the draft share invite. Can be CANCELLED (the user cancels the draft share before ...
def detect(filename, include_confidence=False): f = open(filename) detection = chardet.detect(f.read()) f.close() encoding = detection.get('encoding') confidence = detection.get('confidence') if include_confidence: return (encoding, confidence) return encoding
Detect the encoding of a file. Returns only the predicted current encoding as a string. If `include_confidence` is True, Returns tuple containing: (str encoding, float confidence)
def get_request_filename(request): if 'Content-Disposition' in request.info(): disposition = request.info()['Content-Disposition'] pieces = re.split(r'\s*;\s*', disposition) for piece in pieces: if piece.startswith('filename='): filename = piece[len('filename='):]...
Figure out the filename for an HTTP download.
def _wait_for_new_tasks(self, timeout=0, batch_timeout=0): new_queue_found = False start_time = batch_exit = time.time() while True: if batch_exit > start_time: pubsub_sleep = batch_exit - time.time() else: pubsub_sleep = start_time + timeo...
Check activity channel and wait as necessary. This method is also used to slow down the main processing loop to reduce the effects of rapidly sending Redis commands. This method will exit for any of these conditions: 1. _did_work is True, suggests there could be more work pending ...
def start(self): logger.info('Starting client.') self.dispatcher_greenlets = [] for _, entry in self.config['baits'].items(): for b in clientbase.ClientBase.__subclasses__(): bait_name = b.__name__.lower() if bait_name in entry: bai...
Starts sending client bait to the configured Honeypot.
def query(self, object_class=None, json=None, **kwargs): path = "/directory-sync-service/v1/{}".format(object_class) r = self._httpclient.request( method="POST", url=self.url, json=json, path=path, **kwargs ) return r
Query data stored in directory. Retrieves directory data by querying a Directory Sync Service cloud-based instance. The directory data is stored with the Directory Sync Service instance using an agent that is installed in the customer's network.This agent retrieves directory data ...
def visible_line_width(self, position = Point): extra_char_width = len([ None for c in self[:position].line_buffer if 0x2013 <= ord(c) <= 0xFFFD]) return len(self[:position].quoted_text()) + self[:position].line_buffer.count(u"\t")*7 + extra_char_width
Return the visible width of the text in line buffer up to position.
def schemaNewValidCtxt(self): ret = libxml2mod.xmlSchemaNewValidCtxt(self._o) if ret is None:raise treeError('xmlSchemaNewValidCtxt() failed') __tmp = SchemaValidCtxt(_obj=ret) __tmp.schema = self return __tmp
Create an XML Schemas validation context based on the given schema.
def _apply_shadow_vars(avg_grads): ps_var_grads = [] for grad, var in avg_grads: assert var.name.startswith('tower'), var.name my_name = '/'.join(var.name.split('/')[1:]) my_name = get_op_tensor_name(my_name)[0] new_v = tf.get_variable(my_name, dtype=var.d...
Create shadow variables on PS, and replace variables in avg_grads by these shadow variables. Args: avg_grads: list of (grad, var) tuples
def get_extended_metadata(self, item_id): response = self.soap_client.call( 'getExtendedMetadata', [('id', item_id)]) return response.get('getExtendedMetadataResult', None)
Get extended metadata for a media item, such as related items. Args: item_id (str): The item for which metadata is required. Returns: ~collections.OrderedDict: The item's extended metadata or None. See also: The Sonos `getExtendedMetadata API <h...
def run_facter(self, key=None): args = [self.facter_path] args.append("--puppet") if self.external_dir is not None: args.append('--external-dir') args.append(self.external_dir) if self.uses_yaml: args.append("--yaml") if key is not None: ...
Run the facter executable with an optional specfic fact. Output is parsed to yaml if available and selected. Puppet facts are always selected. Returns a dictionary if no key is given, and the value if a key is passed.
def dispatch_hook(cls, s=None, *_args, **_kwds): if s is None: return config.conf.raw_layer fb = orb(s[0]) if fb & 0x80 != 0: return HPackIndexedHdr if fb & 0x40 != 0: return HPackLitHdrFldWithIncrIndexing if fb & 0x20 != 0: return ...
dispatch_hook returns the subclass of HPackHeaders that must be used to dissect the string.
def _fixpath(root, base): return os.path.abspath(os.path.normpath(os.path.join(root, base)))
Return absolute, normalized, joined paths
def _structure(self, source_code): def cutter(seq, block_size): for index in range(0, len(seq), block_size): lexem = seq[index:index+block_size] if len(lexem) == block_size: yield self.table_struct[seq[index:index+block_size]] return tuple(...
return structure in ACDP format.
def run(self): stdoutBak = sys.stdout streamResult = StringIO() sys.stdout = streamResult try: pycodestyle.Checker.check_all(self) finally: sys.stdout = stdoutBak
Run pycodestyle checker and record warnings.
def rmdir(self, pathobj): stat = self.stat(pathobj) if not stat.is_dir: raise OSError(20, "Not a directory: '%s'" % str(pathobj)) url = str(pathobj) + '/' text, code = self.rest_del(url, session=pathobj.session, verify=pathobj.verify, cert=pathobj.cert) if code not in...
Removes a directory
def instances_get(opts, plugins, url_file_input, out): instances = OrderedDict() preferred_order = ['wordpress', 'joomla', 'drupal'] for cms_name in preferred_order: for plugin in plugins: plugin_name = plugin.__name__.lower() if cms_name == plugin_name: insta...
Creates and returns an ordered dictionary containing instances for all available scanning plugins, sort of ordered by popularity. @param opts: options as returned by self._options. @param plugins: plugins as returned by plugins_util.plugins_base_get. @param url_file_input: boolean value which indicates ...
def stop(self): self._cease.set() time.sleep(0.1) self._isRunning = False
Stop the periodic runner
def to_dict(self): return {"address": self.address, "port": self.port, "condition": self.condition, "type": self.type, "id": self.id, }
Convert this Node to a dict representation for passing to the API.
def read_wav(self, filename): wave_input = None try: wave_input = wave.open(filename, 'r') wave_frames = bytearray( wave_input.readframes(wave_input.getnframes())) self.sample_data = [x >> 4 for x in wave_frames] finally: if wave_in...
Read sample data for this sample from a WAV file. :param filename: the file from which to read
def _find_rtd_version(): vstr = 'latest' try: import ginga from bs4 import BeautifulSoup except ImportError: return vstr if not minversion(ginga, '2.6.0'): return vstr url = 'https://readthedocs.org/projects/ginga/downloads/' with urllib.request.urlopen(url) as r:...
Find closest RTD doc version.
def _check_match(self, name, version_string) -> bool: if not name or not version_string: return False try: version = Version(version_string) except InvalidVersion: logger.debug(f"Package {name}=={version_string} has an invalid version") return Fals...
Check if the package name and version matches against a blacklisted package version specifier. Parameters ========== name: str Package name version: str Package version Returns ======= bool: True if it matches, False ...
def cancel_offer(self, offer_id): return self._create_put_request( resource=OFFERS, billomat_id=offer_id, command=CANCEL, )
Cancelles an offer :param offer_id: the offer id :return Response
def _schedule(self, action: Callable, seconds: int=0) -> int: self.aid += 1 if seconds > 0: nxt = time.perf_counter() + seconds if nxt < self.aqNextCheck: self.aqNextCheck = nxt logger.trace("{} scheduling action {} with id {} to run in {} " ...
Schedule an action to be executed after `seconds` seconds. :param action: a callable to be scheduled :param seconds: the time in seconds after which the action must be executed
def addFeatureSet(self, featureSet): id_ = featureSet.getId() self._featureSetIdMap[id_] = featureSet self._featureSetIds.append(id_) name = featureSet.getLocalId() self._featureSetNameMap[name] = featureSet
Adds the specified featureSet to this dataset.
def _set_route(self, ip_dest, next_hop, **kwargs): commands = self._build_commands(ip_dest, next_hop, **kwargs) delete = kwargs.get('delete', False) default = kwargs.get('default', False) if delete: commands = "no " + commands else: if default: ...
Configure a static route Args: ip_dest (string): The ip address of the destination in the form of A.B.C.D/E next_hop (string): The next hop interface or ip address **kwargs['next_hop_ip'] (string): The next hop address on destination interface...
def createRtiFromFileName(fileName): cls, rtiRegItem = detectRtiFromFileName(fileName) if cls is None: logger.warn("Unable to import plugin {}: {}" .format(rtiRegItem.fullName, rtiRegItem.exception)) rti = UnknownFileRti.createFromFileName(fileName) rti.setException(r...
Determines the type of RepoTreeItem to use given a file name and creates it. Uses a DirectoryRti for directories and an UnknownFileRti if the file extension doesn't match one of the registered RTI extensions.
def eigenvalues_rev(T, k, ncv=None, mu=None): r if mu is None: mu = stationary_distribution(T) if np.any(mu <= 0): raise ValueError('Cannot symmetrize transition matrix') smu = np.sqrt(mu) D = diags(smu, 0) Dinv = diags(1.0/smu, 0) S = (D.dot(T)).dot(Dinv) evals = scipy.s...
r"""Compute the eigenvalues of a reversible, sparse transition matrix. Parameters ---------- T : (M, M) scipy.sparse matrix Transition matrix k : int Number of eigenvalues to compute. ncv : int, optional The number of Lanczos vectors generated, `ncv` must be greater than k; ...
def torrent_from_url(self, url, cache=True, prefetch=False): if self._use_cache(cache) and url in self._torrent_cache: return self._torrent_cache[url] torrent = Torrent(url, cache, prefetch) if cache: self._torrent_cache[url] = torrent return torrent
Create a Torrent object from a given URL. If the cache option is set, check to see if we already have a Torrent object representing it. If prefetch is set, automatically query the torrent's info page to fill in the torrent object. (If prefetch is false, then the torrent page will be que...
def validate_matrix(self, data): is_grid_search = ( data.get('grid_search') is not None or (data.get('grid_search') is None and data.get('random_search') is None and data.get('hyperband') is None and data.get('bo') is None) ) is_bo =...
Validates matrix data and creates the config objects
def outputDeflections(self): try: self.wOutFile if self.Verbose: print("Output filename provided.") except: try: self.wOutFile = self.configGet("string", "output", "DeflectionOut", optional=True) except: if self.Debug: print("No output filename provided:...
Outputs a grid of deflections if an output directory is defined in the configuration file If the filename given in the configuration file ends in ".npy", then a binary numpy grid will be exported. Otherwise, an ASCII grid will be exported.
def removeComments(element): global _num_bytes_saved_in_comments num = 0 if isinstance(element, xml.dom.minidom.Comment): _num_bytes_saved_in_comments += len(element.data) element.parentNode.removeChild(element) num += 1 else: for subelement in element.childNodes[:]: ...
Removes comments from the element and its children.
def calibration(self): self.calibration_cache_path() if self.job().is_dax(): self.add_var_opt('glob-calibration-data','') cache_filename=self.get_calibration() pat = re.compile(r'(file://.*)') f = open(cache_filename, 'r') lines = f.readlines() for line in lines: m = ...
Set the path to the calibration cache file for the given IFO. During S2 the Hanford 2km IFO had two calibration epochs, so if the start time is during S2, we use the correct cache file.
def _ensure_image_registry(self, image): image_with_registry = image.copy() if self.parent_registry: if image.registry and image.registry != self.parent_registry: error = ( "Registry specified in dockerfile image doesn't match configured one. " ...
If plugin configured with a parent registry, ensure the image uses it
def get_next_revision(self, session_id, revision, delta): session = self.sessions[session_id] session.state = State.connected if delta == revision: session.revision = max(session.revision, revision) self.next_revision_available.wait() return self.revision
Determine the next revision number for a given session id, revision and delta. In case the client is up-to-date, this method will block until the next revision is available. :param int session_id: Session identifier :param int revision: Client revision number :param int...
def _compute_derivatives(self): derivatives = [] for i, (timestamp, value) in enumerate(self.time_series_items): if i > 0: pre_item = self.time_series_items[i - 1] pre_timestamp = pre_item[0] pre_value = pre_item[1] td = timesta...
Compute derivatives of the time series.
def get_current_word(self, completion=False): ret = self.get_current_word_and_position(completion) if ret is not None: return ret[0]
Return current word, i.e. word at cursor position
def iloc(cls, dataset, index): rows, cols = index scalar = False if isinstance(cols, slice): cols = [d.name for d in dataset.dimensions()][cols] elif np.isscalar(cols): scalar = np.isscalar(rows) cols = [dataset.get_dimension(cols).name] else: ...
Dask does not support iloc, therefore iloc will execute the call graph and lose the laziness of the operation.
def validate(self, ticket=None, raise_=False): rv = Receipt.objects.filter(pk=self.pk).validate(ticket) self.refresh_from_db() if raise_ and rv: raise exceptions.ValidationError(rv[0]) return rv
Validates this receipt. This is a shortcut to :class:`~.ReceiptQuerySet`'s method of the same name. Calling this validates only this instance. :param AuthTicket ticket: Use this ticket. If None, one will be loaded or created automatically. :param bool raise_: If True, an ex...
def filtered_attrs(module, *, modules=False, private=False, dunder=False, common=False): attr_names = set() for name, value in module.__dict__.items(): if not common and name in STANDARD_MODULE_ATTRS: continue if name.startswith('_'): if name.endswith('...
Return a collection of attributes on 'module'. If 'modules' is false then module instances are excluded. If 'private' is false then attributes starting with, but not ending in, '_' will be excluded. With 'dunder' set to false then attributes starting and ending with '_' are left out. The 'common' argum...
def _bse_cli_get_versions(args): name = args.basis.lower() metadata = api.get_metadata(args.data_dir) if not name in metadata: raise KeyError( "Basis set {} does not exist. For a complete list of basis sets, use the 'list-basis-sets' command".format( name)) version_da...
Handles the get-versions subcommand
def timestamp(self, timestamp): clone = copy.deepcopy(self) clone._timestamp = timestamp return clone
Allows for custom timestamps to be saved with the record.
def validate_dispatcher(args): nni_config = Config(get_config_filename(args)).get_config('experimentConfig') if nni_config.get('tuner') and nni_config['tuner'].get('builtinTunerName'): dispatcher_name = nni_config['tuner']['builtinTunerName'] elif nni_config.get('advisor') and nni_config['advisor']....
validate if the dispatcher of the experiment supports importing data
def clear_all_events(self): self.lock.acquire() self.event_dict.clear() self.lock.release()
Clear all event queues and their cached events.
def get_tag_cloud(context, steps=6, min_count=None, template='zinnia/tags/tag_cloud.html'): tags = Tag.objects.usage_for_queryset( Entry.published.all(), counts=True, min_count=min_count) return {'template': template, 'tags': calculate_cloud(tags, steps), ...
Return a cloud of published tags.
def should_see_id_in_seconds(self, element_id, timeout): def check_element(): assert ElementSelector( world.browser, 'id("%s")' % element_id, filter_displayed=True, ), "Expected element with given id." wait_for(check_element)(timeout=int(timeout))
Assert an element with the given ``id`` is visible within n seconds.
def show_guiref(self): from qtconsole.usage import gui_reference self.main.help.show_rich_text(gui_reference, collapse=True)
Show qtconsole help
def loadNetworkbyID(self, id, callback=None, errback=None): import ns1.ipam network = ns1.ipam.Network(self.config, id=id) return network.load(callback=callback, errback=errback)
Load an existing Network by ID into a high level Network object :param int id: id of an existing Network
def retry(self, retries, task_f, check_f=bool, wait_f=None): for attempt in range(retries): ret = task_f() if check_f(ret): return ret if attempt < retries - 1 and wait_f is not None: wait_f(attempt) raise RetryException("Giving up afte...
Try a function up to n times. Raise an exception if it does not pass in time :param retries int: The number of times to retry :param task_f func: The function to be run and observed :param func()bool check_f: a function to check if task_f is complete :param func()bool wait_f: a ...
def map_or(self, callback: Callable[[T], U], default: A) -> Union[U, A]: return callback(self._val) if self._is_some else default
Applies the ``callback`` to the contained value or returns ``default``. Args: callback: The callback to apply to the contained value. default: The default value. Returns: The ``callback`` result if the contained value is ``Some``, otherwise ``default``. ...
def display(self): self.pretty(self._timings, 'Raw Redis Commands') print() for key, value in self._commands.items(): self.pretty(value, 'Qless "%s" Command' % key) print()
Print the results of this profiling
def license_cleanup(text): if not text: return None text = text.rsplit(':', 1)[-1] replacements = [ 'licenses', 'license', 'licences', 'licence', 'software', ',', ] for replacement in replacements: text = text.replace(replacement, '') ...
Tidy up a license string e.g. "::OSI:: mit software license" -> "MIT"
def _deserialize(self, value, attr, data): token_builder = URLSafeTimedSerializer( current_app.config['SECRET_KEY'], salt=data['verb'], ) result = token_builder.loads(value, max_age=current_app.config[ 'OAISERVER_RESUMPTION_TOKEN_EXPIRE_TIME']) result[...
Serialize resumption token.
def get_label(self, indices=None): if indices is None: indices = list(range(0, self.get_sample_size())) elif isinstance(indices, collections.Iterable): indices = sorted(list(set(indices))) else: indices = [indices] if len(indices) == 0: ret...
Returns the label pair indices requested by the user @ In, indices, a list of non-negative integers specifying the row indices to return @ Out, a list of integer 2-tuples specifying the minimum and maximum index of the specified rows.
def get_pyserial_version(self): pyserial_version = pkg_resources.require("pyserial")[0].version version = 3.0 match = self.re_float.search(pyserial_version) if match: try: version = float(match.group(0)) except ValueError: version =...
! Retrieve pyserial module version @return Returns float with pyserial module number
def copy_layer(source, target): out_feature = QgsFeature() target.startEditing() request = QgsFeatureRequest() aggregation_layer = False if source.keywords.get('layer_purpose') == 'aggregation': try: use_selected_only = source.use_selected_features_only except AttributeEr...
Copy a vector layer to another one. :param source: The vector layer to copy. :type source: QgsVectorLayer :param target: The destination. :type source: QgsVectorLayer
def size_filter(labeled_grid, min_size): out_grid = np.zeros(labeled_grid.shape, dtype=int) slices = find_objects(labeled_grid) j = 1 for i, s in enumerate(slices): box = labeled_grid[s] size = np.count_nonzero(box.ravel() == (i + 1)) if size >= min_si...
Remove labeled objects that do not meet size threshold criteria. Args: labeled_grid: 2D output from label method. min_size: minimum size of object in pixels. Returns: labeled grid with smaller objects removed.
def scan_used_functions(example_file, gallery_conf): example_code_obj = identify_names(example_file) if example_code_obj: codeobj_fname = example_file[:-3] + '_codeobj.pickle.new' with open(codeobj_fname, 'wb') as fid: pickle.dump(example_code_obj, fid, pickle.HIGHEST_PROTOCOL) ...
save variables so we can later add links to the documentation
def typewrite(message, interval=0.0, pause=None, _pause=True): interval = float(interval) _failSafeCheck() for c in message: if len(c) > 1: c = c.lower() press(c, _pause=False) time.sleep(interval) _failSafeCheck() _autoPause(pause, _pause)
Performs a keyboard key press down, followed by a release, for each of the characters in message. The message argument can also be list of strings, in which case any valid keyboard name can be used. Since this performs a sequence of keyboard presses and does not hold down keys, it cannot be used t...
def _get_app_module(self): def configure(binder): binder.bind(ServiceApplication, to=self, scope=singleton) binder.bind(Config, to=self.config, scope=singleton) return configure
Returns a module which binds the current app and configuration. :return: configuration callback :rtype: Callable
def get(self, key): check_not_none(key, "key can't be None") key_data = self._to_data(key) return self._encode_invoke_on_key(multi_map_get_codec, key_data, key=key_data, thread_id=thread_id())
Returns the list of values associated with the key. ``None`` if this map does not contain this key. **Warning: This method uses hashCode and equals of the binary form of the key, not the actual implementations of hashCode and equals defined in the key's class.** **Warning-2: Th...
def SampleSum(dists, n): pmf = MakePmfFromList(RandomSum(dists) for i in xrange(n)) return pmf
Draws a sample of sums from a list of distributions. dists: sequence of Pmf or Cdf objects n: sample size returns: new Pmf of sums
def source(self, value): if not isinstance(value, tuple) or len(value) != 2: raise AttributeError self._source = value
Set the source of the message. :type value: tuple :param value: (ip, port) :raise AttributeError: if value is not a ip and a port.