code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def queryEx(self, viewcls, *args, **kwargs): kwargs['itercls'] = viewcls o = super(AsyncBucket, self).query(*args, **kwargs) if not self.connected: self.connect().addCallback(lambda x: o.start()) else: o.start() return o
Query a view, with the ``viewcls`` instance receiving events of the query as they arrive. :param type viewcls: A class (derived from :class:`AsyncViewBase`) to instantiate Other arguments are passed to the standard `query` method. This functions exactly like the :meth:`~couc...
def import_name(mod_name): try: mod_obj_old = sys.modules[mod_name] except KeyError: mod_obj_old = None if mod_obj_old is not None: return mod_obj_old __import__(mod_name) mod_obj = sys.modules[mod_name] return mod_obj
Import a module by module name. @param mod_name: module name.
def login(self, username, *, token=None): self._username = username self._oauth(username, token=token) return self.is_authenticated
Log in to Google Music. Parameters: username (str, Optional): Your Google Music username. Used for keeping stored OAuth tokens for multiple accounts separate. device_id (str, Optional): A mobile device ID or music manager uploader ID. Default: MAC address is used. token (dict, Optional): An OAuth to...
def move(self, target, pos=None): if self.outline != target.outline: raise IntegrityError('Elements must be from the same outline!') tree_manipulation.send( sender=self.__class__, instance=self, action='move', target_node_type=None, ...
An override of the treebeard api in order to send a signal in advance.
def createmergerequest(self, project_id, sourcebranch, targetbranch, title, target_project_id=None, assignee_id=None): data = { 'source_branch': sourcebranch, 'target_branch': targetbranch, 'title': title, 'assignee_id': assignee_id, ...
Create a new merge request. :param project_id: ID of the project originating the merge request :param sourcebranch: name of the branch to merge from :param targetbranch: name of the branch to merge to :param title: Title of the merge request :param assignee_id: Assignee user ID ...
def fopenat_rw(base_fd, path): return os.fdopen(openat(base_fd, path, os.O_RDWR), 'rb+')
Does openat read-write, then does fdopen to get a file object
def db_create(name, character_set=None, collate=None, **connection_args): if db_exists(name, **connection_args): log.info('DB \'%s\' already exists', name) return False dbc = _connect(**connection_args) if dbc is None: return False cur = dbc.cursor() s_name = quote_identifier...
Adds a databases to the MySQL server. name The name of the database to manage character_set The character set, if left empty the MySQL default will be used collate The collation, if left empty the MySQL default will be used CLI Example: .. code-block:: bash salt...
def from_tree(cls, repo, *treeish, **kwargs): if len(treeish) == 0 or len(treeish) > 3: raise ValueError("Please specify between 1 and 3 treeish, got %i" % len(treeish)) arg_list = [] if len(treeish) > 1: arg_list.append("--reset") arg_list.append("--aggressiv...
Merge the given treeish revisions into a new index which is returned. The original index will remain unaltered :param repo: The repository treeish are located in. :param treeish: One, two or three Tree Objects, Commits or 40 byte hexshas. The result changes ...
def create(self, Name, Subject, HtmlBody=None, TextBody=None, Alias=None): assert TextBody or HtmlBody, "Provide either email TextBody or HtmlBody or both" data = {"Name": Name, "Subject": Subject, "HtmlBody": HtmlBody, "TextBody": TextBody, "Alias": Alias} return self._init_instance(self.call("...
Creates a template. :param Name: Name of template :param Subject: The content to use for the Subject when this template is used to send email. :param HtmlBody: The content to use for the HtmlBody when this template is used to send email. :param TextBody: The content to use for the HtmlB...
def _process_change(self, server_description): td_old = self._description if self._publish_server: old_server_description = td_old._server_descriptions[ server_description.address] self._events.put(( self._listeners.publish_server_description_chang...
Process a new ServerDescription on an opened topology. Hold the lock when calling this.
def stop(self): yield from self._stop_ubridge() if self._nvram_watcher: self._nvram_watcher.close() self._nvram_watcher = None if self._telnet_server: self._telnet_server.close() self._telnet_server = None if self.is_running(): ...
Stops the IOU process.
def mul(name, num, minimum=0, maximum=0, ref=None): return calc( name=name, num=num, oper='mul', minimum=minimum, maximum=maximum, ref=ref )
Multiplies together the ``num`` most recent values. Requires a list. USAGE: .. code-block:: yaml foo: calc.mul: - name: myregentry - num: 5
def get_activity_comments(self, activity_id, markdown=False, limit=None): result_fetcher = functools.partial(self.protocol.get, '/activities/{id}/comments', id=activity_id, markdown=int(markdown)) return BatchedResultsIterator(entity=model.ActivityComment, ...
Gets the comments for an activity. http://strava.github.io/api/v3/comments/#list :param activity_id: The activity for which to fetch comments. :type activity_id: int :param markdown: Whether to include markdown in comments (default is false/filterout). :type markdown: bool ...
def destroy_list(self, list_id): return List(tweepy_list_to_json(self._client.destroy_list(list_id=list_id)))
Destroy a list :param list_id: list ID number :return: The destroyed list object :rtype: :class:`~responsebot.models.List`
def set_stripe_api_version(version=None, validate=True): version = version or get_stripe_api_version() if validate: valid = validate_stripe_api_version(version) if not valid: raise ValueError("Bad stripe API version: {}".format(version)) stripe.api_version = version
Set the desired API version to use for Stripe requests. :param version: The version to set for the Stripe API. :type version: ``str`` :param validate: If True validate the value for the specified version). :type validate: ``bool``
def which(path, jail=None, chroot=None, root=None, origin=False, quiet=False): opts = '' if quiet: opts += 'q' if origin: opts += 'o' cmd = _pkg(jail, chroot, root) cmd.append('which') if opts: cmd.append('-' + opts) cmd.append(path) return __salt__['cmd.run']( ...
Displays which package installed a specific file CLI Example: .. code-block:: bash salt '*' pkg.which <file name> jail Perform the check in the specified jail CLI Example: .. code-block:: bash salt '*' pkg.which <file name> jail=<jail name or id> chroo...
def set_shutter_level(self, level=0.0): data = {"channelIndex": 1, "deviceId": self.id, "shutterLevel": level} return self._restCall("device/control/setShutterLevel", body=json.dumps(data))
sets the shutter level Args: level(float): the new level of the shutter. 0.0 = open, 1.0 = closed Returns: the result of the _restCall
def fq_merge(R1, R2): c = itertools.cycle([1, 2, 3, 4]) for r1, r2 in zip(R1, R2): n = next(c) if n == 1: pair = [[], []] pair[0].append(r1.strip()) pair[1].append(r2.strip()) if n == 4: yield pair
merge separate fastq files
def cut_from_chain(sciobj_model): if _is_head(sciobj_model): old_pid = sciobj_model.obsoletes.did _cut_head_from_chain(sciobj_model) elif _is_tail(sciobj_model): old_pid = sciobj_model.obsoleted_by.did _cut_tail_from_chain(sciobj_model) else: old_pid = sciobj_model.ob...
Remove an object from a revision chain. The object can be at any location in the chain, including the head or tail. Preconditions: - The object with the pid is verified to exist and to be a member of an revision chain. E.g., with: d1_gmn.app.views.asserts.is_existing_object(pid) d1_gmn.app.vi...
def getTriples(pointing): sql="SELECT id FROM triples t join triple_members m ON t.id=m.triple" sql+=" join bucket.exposure e on e.expnum=m.expnum " sql+=" WHERE pointing=%s group by id order by e.expnum " cfeps.execute(sql, ( pointing, ) ) return(cfeps.fetchall())
Get all triples of a specified pointing ID. Defaults is to return a complete list triples.
def _from_python_type(self, obj, field, pytype): json_schema = { 'title': field.attribute or field.name, } for key, val in TYPE_MAP[pytype].items(): json_schema[key] = val if field.dump_only: json_schema['readonly'] = True if field.default is n...
Get schema definition from python type.
def load_model(model_name, epoch_num, data_shapes, label_shapes, label_names, gpus=''): sym, arg_params, aux_params = mx.model.load_checkpoint(model_name, epoch_num) mod = create_module(sym, data_shapes, label_shapes, label_names, gpus) mod.set_params( arg_params=arg_params, aux_params=aux_p...
Returns a module loaded with the provided model. Parameters ---------- model_name: str Prefix of the MXNet model name as stored on the local directory. epoch_num : int Epoch number of model we would like to load. input_shape: tuple The shape of the input data in the form o...
def prttex_summary_cnts_all(self, prt=sys.stdout): cnts = self.get_cnts_levels_depths_recs(set(self.obo.values())) self._prttex_summary_cnts(prt, cnts)
Print LaTeX format summary of level and depth counts for all active GO Terms.
def createUsageReport(self, reportname, queries, metadata, since="LAST_DAY", fromValue=None, toValue=None, aggregationInterval=None ...
Creates a new usage report. A usage report is created by submitting a JSON representation of the usage report to this operation. Inputs: reportname - the unique name of the report since - the time duration of the report. The supported values are: LAST_DAY, LAST_WEEK,...
def wsgi(self, environ, start_response): request = Request(environ) ctx = Context(request) try: try: response = self(request, ctx) ctx._run_callbacks('finalize', (request, response)) response = response.conditional_to(request) ...
Implements the mapper's WSGI interface.
def save_boolean_setting(self, key, check_box): set_setting(key, check_box.isChecked(), qsettings=self.settings)
Save boolean setting according to check_box state. :param key: Key to retrieve setting value. :type key: str :param check_box: Check box to show and set the setting. :type check_box: PyQt5.QtWidgets.QCheckBox.QCheckBox
def do_create_tool_item(self): proxy = SpinToolItem(*self._args_for_toolitem) self.connect_proxy(proxy) return proxy
This is called by the UIManager when it is time to instantiate the proxy
def set_locs(self, locs): 'Sets the locations of the ticks' _check_implicitly_registered() self.locs = locs (vmin, vmax) = vi = tuple(self.axis.get_view_interval()) if vi != self.plot_obj.view_interval: self.plot_obj.date_axis_info = None self.plot_obj.view_in...
Sets the locations of the ticks
def location(args): fastafile = args.fastafile pwmfile = args.pwmfile lwidth = args.width if not lwidth: f = Fasta(fastafile) lwidth = len(f.items()[0][1]) f = None jobs = [] motifs = pwmfile_to_motifs(pwmfile) ids = [motif.id for motif in motifs] if args.ids: ...
Creates histrogram of motif location. Parameters ---------- args : argparse object Command line arguments.
def getFileObjects(self): files = {'project-file': self, 'mapping-table-file': self.mapTableFile, 'channel-input-file': self.channelInputFile, 'precipitation-file': self.precipFile, 'storm-pipe-network-file': self.stormPipeNetworkFile, ...
Retrieve a dictionary of file objects. This is a utility method that can be used to programmatically access the GsshaPy file objects. Use this method in conjunction with the getFileKeys method to access only files that have been read into the database. Returns: dict: Dictionary wit...
def convert_to_match_query(ir_blocks): output_block = ir_blocks[-1] if not isinstance(output_block, ConstructResult): raise AssertionError(u'Expected last IR block to be ConstructResult, found: ' u'{} {}'.format(output_block, ir_blocks)) ir_except_output = ir_blocks[:-1]...
Convert the list of IR blocks into a MatchQuery object, for easier manipulation.
def AddPorts(self,ports): for port in ports: if 'port_to' in port: self.ports.append(Port(self,port['protocol'],port['port'],port['port_to'])) else: self.ports.append(Port(self,port['protocol'],port['port'])) return(self.Update())
Create one or more port access policies. Include a list of dicts with protocol, port, and port_to (optional - for range) keys. >>> clc.v2.Server("WA1BTDIX01").PublicIPs().public_ips[0] .AddPorts([{'protocol': 'TCP', 'port': '80' }, {'protocol': 'UDP', 'port': '10000', 'port_to': '15000'}]).Wait...
def remove_all_cts_records_by(file_name, crypto_idfp): db = XonoticDB.load_path(file_name) db.remove_all_cts_records_by(crypto_idfp) db.save(file_name)
Remove all cts records set by player with CRYPTO_IDFP
def CreateNetworkConnectivityTauDEMTree(network_connectivity_tree_file, out_csv_file): stream_id_array = [] next_down_id_array = [] with open_csv(network_connectivity_tree_file, "r") as csvfile: for row in csvfile: split_row = row.split() ...
Creates Network Connectivity input CSV file for RAPID based on the TauDEM network connectivity tree file
def get_region_for_chip(x, y, level=3): shift = 6 - 2*level bit = ((x >> shift) & 3) + 4*((y >> shift) & 3) mask = 0xffff ^ ((4 << shift) - 1) nx = x & mask ny = y & mask region = (nx << 24) | (ny << 16) | (level << 16) | (1 << bit) return region
Get the region word for the given chip co-ordinates. Parameters ---------- x : int x co-ordinate y : int y co-ordinate level : int Level of region to build. 0 is the most coarse and 3 is the finest. When 3 is used the specified region will ONLY select the given chip,...
def _run(self): for node in self.node.relatives: launch_node_task(node) for node in self.node.relatives: self.wait_and_join(node.task) if self.node.parent: while not self.node.parent.task.siblings_permission: time.sleep(self._polling_time) ...
Run the task respecting dependencies
def print_item_callback(item): print('&listen [{}, {}={}]'.format( item.get('cmd', ''), item.get('id', ''), item.get('data', '')))
Print an item callback, used by &listen.
def crypto_box_keypair(): pk = ffi.new("unsigned char[]", crypto_box_PUBLICKEYBYTES) sk = ffi.new("unsigned char[]", crypto_box_SECRETKEYBYTES) rc = lib.crypto_box_keypair(pk, sk) ensure(rc == 0, 'Unexpected library error', raising=exc.RuntimeError) return ( ffi.buffer(...
Returns a randomly generated public and secret key. :rtype: (bytes(public_key), bytes(secret_key))
def _get_property_values_with_defaults(self, classname, property_values): final_values = self.get_default_property_values(classname) final_values.update(property_values) return final_values
Return the property values for the class, with default values applied where needed.
def _sanity_check_registered_locations_parent_locations(query_metadata_table): for location, location_info in query_metadata_table.registered_locations: if (location != query_metadata_table.root_location and not query_metadata_table.root_location.is_revisited_at(location)): if lo...
Assert that all registered locations' parent locations are also registered.
def log(msg, level=0): red = '\033[91m' endc = '\033[0m' cfg = { 'version': 1, 'disable_existing_loggers': False, 'formatters': { 'stdout': { 'format': '[%(levelname)s]: %(asctime)s - %(message)s', 'datefmt': '%x %X' }, ...
Logs a message to the console, with optional level paramater Args: - msg (str): message to send to console - level (int): log level; 0 for info, 1 for error (default = 0)
def _cutout(x, n_holes:uniform_int=1, length:uniform_int=40): "Cut out `n_holes` number of square holes of size `length` in image at random locations." h,w = x.shape[1:] for n in range(n_holes): h_y = np.random.randint(0, h) h_x = np.random.randint(0, w) y1 = int(np.clip(h_y - length...
Cut out `n_holes` number of square holes of size `length` in image at random locations.
def describe(self): return OrderedDict([ (name, field.describe()) for name, field in self.fields.items() ])
Describe all serialized fields. It returns dictionary of all fields description defined for this serializer using their own ``describe()`` methods with respect to order in which they are defined as class attributes. Returns: OrderedDict: serializer description
def endpoint_create(service, publicurl=None, internalurl=None, adminurl=None, region=None, profile=None, url=None, interface=None, **connection_args): kstone = auth(profile, **connection_args) keystone_service = service_get(name=service, profile=profile, **...
Create an endpoint for an Openstack service CLI Examples: .. code-block:: bash salt 'v2' keystone.endpoint_create nova 'http://public/url' 'http://internal/url' 'http://adminurl/url' region salt 'v3' keystone.endpoint_create nova url='http://public/url' interface='public' region='RegionOne'
def ScanForVolumeSystem(self, source_path_spec): if source_path_spec.type_indicator == definitions.TYPE_INDICATOR_VSHADOW: return None if source_path_spec.IsVolumeSystemRoot(): return source_path_spec if source_path_spec.type_indicator == ( definitions.TYPE_INDICATOR_APFS_CONTAINER): ...
Scans the path specification for a supported volume system format. Args: source_path_spec (PathSpec): source path specification. Returns: PathSpec: volume system path specification or None if no supported volume system type was found. Raises: BackEndError: if the source cannot...
def AddMethod(self, interface, name, in_sig, out_sig, code): if not interface: interface = self.interface n_args = len(dbus.Signature(in_sig)) method = lambda self, *args, **kwargs: DBusMockObject.mock_method( self, interface, name, in_sig, *args, **kwargs) dbus_m...
Add a method to this object interface: D-Bus interface to add this to. For convenience you can specify '' here to add the method to the object's main interface (as specified on construction). name: Name of the method in_sig: Signature of input arguments; fo...
def decrypt_seal(self, data: bytes) -> bytes: curve25519_public_key = libnacl.crypto_sign_ed25519_pk_to_curve25519(self.vk) curve25519_secret_key = libnacl.crypto_sign_ed25519_sk_to_curve25519(self.sk) return libnacl.crypto_box_seal_open(data, curve25519_public_key, curve25519_secret_key)
Decrypt bytes data with a curve25519 version of the ed25519 key pair :param data: Encrypted data :return:
def get_context_arguments(self): cargs = {} for context in self.__context_stack: cargs.update(context.context_arguments) return cargs
Return a dictionary containing the current context arguments.
def _get_toc_reference(app, node, toc, docname): if isinstance(node, nodes.section) and isinstance(node.parent, nodes.document): ref_id = docname toc_reference = _find_toc_node(toc, ref_id, nodes.section) elif isinstance(node, nodes.section): ref_id = node.attributes["ids"][0] to...
Logic that understands maps a specific node to it's part of the toctree. It takes a specific incoming ``node``, and returns the actual TOC Tree node that is said reference.
def shutdown(self): inputQueue = self.inputQueue self.inputQueue = None for i in range(self.numWorkers): inputQueue.put(None) for thread in self.workerThreads: thread.join() BatchSystemSupport.workerCleanup(self.workerCleanupInfo)
Cleanly terminate worker threads. Add sentinels to inputQueue equal to maxThreads. Join all worker threads.
def _calf(self, spec): self.prepare(spec) self.compile(spec) self.assemble(spec) self.link(spec) self.finalize(spec)
The main call, assuming the base spec is prepared. Also, no advices will be triggered.
def reorient_wf(name='ReorientWorkflow'): workflow = pe.Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface(fields=['in_file']), name='inputnode') outputnode = pe.Node(niu.IdentityInterface( fields=['out_file']), name='outputnode') deoblique = pe.Node(afni.Refit...
A workflow to reorient images to 'RPI' orientation
def convert(self): if self.downloaded is False: raise serror("Track not downloaded, can't convert file..") filetype = magic.from_file(self.filepath, mime=True) if filetype == "audio/mpeg": print("File is already in mp3 format. Skipping convert.") return False ...
Convert file in mp3 format.
def readFromFile(self, filename): s = dict(np.load(filename)) try: self.coeffs = s['coeffs'][()] except KeyError: self.coeffs = s try: self.opts = s['opts'][()] except KeyError: pass return self.coeffs
read the distortion coeffs from file
def dollars_to_math(source): r s = "\n".join(source) if s.find("$") == -1: return global _data _data = {} def repl(matchobj): global _data s = matchobj.group(0) t = "___XXX_REPL_%d___" % len(_data) _data[t] = s return t s = re.sub(r"({[^{}$]*\$...
r""" Replace dollar signs with backticks. More precisely, do a regular expression search. Replace a plain dollar sign ($) by a backtick (`). Replace an escaped dollar sign (\$) by a dollar sign ($). Don't change a dollar sign preceded or followed by a backtick (`$ or $`), because of strings like...
def _merge_map(key, values, partial): proto = kv_pb.KeyValues() proto.set_key(key) proto.value_list().extend(values) yield proto.Encode()
A map function used in merge phase. Stores (key, values) into KeyValues proto and yields its serialization. Args: key: values key. values: values themselves. partial: True if more values for this key will follow. False otherwise. Yields: The proto.
def get_distributed_seismicity_source_nodes(source): source_nodes = [] source_nodes.append( Node("magScaleRel", text=source.magnitude_scaling_relationship.__class__.__name__)) source_nodes.append( Node("ruptAspectRatio", text=source.rupture_aspect_ratio)) source_nodes.append...
Returns list of nodes of attributes common to all distributed seismicity source classes :param source: Seismic source as instance of :class: `openquake.hazardlib.source.area.AreaSource` or :class: `openquake.hazardlib.source.point.PointSource` :returns: List of instances of ...
def classname(self): cls = javabridge.call(self.jobject, "getClass", "()Ljava/lang/Class;") return javabridge.call(cls, "getName", "()Ljava/lang/String;")
Returns the Java classname in dot-notation. :return: the Java classname :rtype: str
def find_module(self, name): defmodule = lib.EnvFindDefmodule(self._env, name.encode()) if defmodule == ffi.NULL: raise LookupError("Module '%s' not found" % name) return Module(self._env, defmodule)
Find the Module by its name.
def StrikeDip(n, e, u): r2d = 180 / np.pi if u < 0: n = -n e = -e u = -u strike = np.arctan2(e, n) * r2d strike = strike - 90 while strike >= 360: strike = strike - 360 while strike < 0: strike = strike + 360 x = np.sqrt(np.power(n, 2) + np.power(e, 2)...
Finds strike and dip of plane given normal vector having components n, e, and u. Adapted from MATLAB script `bb.m <http://www.ceri.memphis.edu/people/olboyd/Software/Software.html>`_ written by Andy Michael and Oliver Boyd.
def synergy_to_datetime(time_qualifier, timeperiod): if time_qualifier == QUALIFIER_HOURLY: date_format = SYNERGY_HOURLY_PATTERN elif time_qualifier == QUALIFIER_DAILY: date_format = SYNERGY_DAILY_PATTERN elif time_qualifier == QUALIFIER_MONTHLY: date_format = SYNERGY_MONTHLY_PATTERN...
method receives timeperiod in Synergy format YYYYMMDDHH and convert it to UTC _naive_ datetime
def verify_login(user, password=None, **connection_args): connection_args['connection_user'] = user connection_args['connection_pass'] = password dbc = _connect(**connection_args) if dbc is None: if 'mysql.error' in __context__: del __context__['mysql.error'] return False ...
Attempt to login using the provided credentials. If successful, return true. Otherwise, return False. CLI Example: .. code-block:: bash salt '*' mysql.verify_login root password
def state_counts(gamma, T, out=None): return np.sum(gamma[0:T], axis=0, out=out)
Sum the probabilities of being in state i to time t Parameters ---------- gamma : ndarray((T,N), dtype = float), optional, default = None gamma[t,i] is the probabilty at time t to be in state i ! T : int number of time steps Returns ------- count : numpy.array shape (N) ...
def moments(self): moment1 = statstools.calc_mean_time(self.delays, self.coefs) moment2 = statstools.calc_mean_time_deviation( self.delays, self.coefs, moment1) return numpy.array([moment1, moment2])
The first two time delay weighted statistical moments of the MA coefficients.
def is_tagged(required_tags, has_tags): if not required_tags and not has_tags: return True elif not required_tags: return False found_tags = [] for tag in required_tags: if tag in has_tags: found_tags.append(tag) return len(found_tags) == len(required_tags)
Checks if tags match
def dist(self, other): dx = self.x - other.x dy = self.y - other.y return math.sqrt(dx**2 + dy**2)
Distance to some other point.
def _check_not_empty(string): string = string.strip() if len(string) == 0: message = 'The string should not be empty' raise pp.ParseException(message)
Checks that the string is not empty. If it is empty an exception is raised, stopping the validation. This is used for compulsory alphanumeric fields. :param string: the field value
def activate(lang=None): if lang is None: lang = locale.getlocale()[0] tr = gettext.translation("argparse", os.path.join(locpath, "locale"), [lang], fallback=True) argparse._ = tr.gettext argparse.ngettext = tr.ngettext
Activate a translation for lang. If lang is None, then the language of locale.getdefaultlocale() is used. If the translation file does not exist, the original messages will be used.
def getOutputName(self,name): val = self.outputNames[name] if self.inmemory: val = self.virtualOutputs[val] return val
Return the name of the file or PyFITS object associated with that name, depending on the setting of self.inmemory.
def delete_policy(self, pol_id): if pol_id not in self.policies: LOG.error("Invalid policy %s", pol_id) return del self.policies[pol_id] self.policy_cnt -= 1
Deletes the policy from the local dictionary.
def _get_system(model_folder): model_description_file = os.path.join(model_folder, "info.yml") if not os.path.isfile(model_description_file): logging.error("You are probably not in the folder of a model, because " "%s is not a file. (-m argument)", model_descr...
Return the preprocessing description, the feature description and the model description.
def elapsed(self): dt = 0 for ss in self.starts_and_stops[:-1]: dt += (ss['stop'] - ss['start']).total_seconds() ss = self.starts_and_stops[-1] if ss['stop']: dt += (ss['stop'] - ss['start']).total_seconds() else: dt += (doublethink.utcnow() - ...
Returns elapsed crawl time as a float in seconds. This metric includes all the time that a site was in active rotation, including any time it spent waiting for its turn to be brozzled. In contrast `Site.active_brozzling_time` only counts time when a brozzler worker claimed the site and...
def smartread(path): with open(path, "rb") as f: content = f.read() result = chardet.detect(content) return content.decode(result["encoding"])
Read text from file, automatically detect encoding. ``chardet`` required.
def powerDown(self, powerup, interface=None): if interface is None: for interface, priority in powerup._getPowerupInterfaces(): self.powerDown(powerup, interface) else: for cable in self.store.query(_PowerupConnector, AND(...
Remove a powerup. If no interface is specified, and the type of the object being installed has a "powerupInterfaces" attribute (containing either a sequence of interfaces, or a sequence of (interface, priority) tuples), the target will be powered down with this object on those i...
def remove_file_data(file_id, silent=True): try: f = FileInstance.get(file_id) if not f.writable: return f.delete() db.session.commit() f.storage().delete() except IntegrityError: if not silent: raise
Remove file instance and associated data. :param file_id: The :class:`invenio_files_rest.models.FileInstance` ID. :param silent: It stops propagation of a possible arised IntegrityError exception. (Default: ``True``) :raises sqlalchemy.exc.IntegrityError: Raised if the database removal goes ...
def validate_properties(self): for name, property_type in self.property_types.items(): value = getattr(self, name) if property_type.supports_intrinsics and self._is_intrinsic_function(value): continue if value is None: if property_type.required...
Validates that the required properties for this Resource have been populated, and that all properties have valid values. :returns: True if all properties are valid :rtype: bool :raises TypeError: if any properties are invalid
def shape(self): if self._shape is None: self._populate_from_rasterio_object(read_image=False) return self._shape
Raster shape.
def getInfoMutator(self): if self._infoMutator: return self._infoMutator infoItems = [] for sourceDescriptor in self.sources: if sourceDescriptor.layerName is not None: continue loc = Location(sourceDescriptor.location) sourceFont =...
Returns a info mutator
def read_login(collector, image, **kwargs): docker_api = collector.configuration["harpoon"].docker_api collector.configuration["authentication"].login(docker_api, image, is_pushing=False, global_docker=True)
Login to a docker registry with read permissions
def get_provider_links(self): if not bool(self._my_map['providerLinkIds']): raise errors.IllegalState('no providerLinkIds') mgr = self._get_provider_manager('RESOURCE') if not mgr.supports_resource_lookup(): raise errors.OperationFailed('Resource does not support Resource...
Gets the ``Resources`` representing the source of this asset in order from the most recent provider to the originating source. return: (osid.resource.ResourceList) - the provider chain raise: OperationFailed - unable to complete request *compliance: mandatory -- This method must be implemented...
def match_serializers(self, serializers, default_media_type): return self._match_serializers_by_query_arg(serializers) or self.\ _match_serializers_by_accept_headers(serializers, default_media_type)
Choose serializer for a given request based on query arg or headers. Checks if query arg `format` (by default) is present and tries to match the serializer based on the arg value, by resolving the mimetype mapped to the arg value. Otherwise, chooses the serializer by retrieving the best...
def _get_flux_bounds(self, r_id, model, flux_limits, equation): if r_id not in flux_limits or flux_limits[r_id][0] is None: if equation.direction == Direction.Forward: lower = 0 else: lower = -model.default_flux_limit else: lower = flux...
Read reaction's limits to set up strings for limits in the output file.
def canonicalize(parsed_op): assert 'op' in parsed_op assert len(parsed_op['op']) == 2 if parsed_op['op'][1] == TRANSFER_KEEP_DATA: parsed_op['keep_data'] = True elif parsed_op['op'][1] == TRANSFER_REMOVE_DATA: parsed_op['keep_data'] = False else: raise ValueError("Invalid op...
Get the "canonical form" of this operation, putting it into a form where it can be serialized to form a consensus hash. This method is meant to preserve compatibility across blockstackd releases. For NAME_TRANSFER, this means: * add 'keep_data' flag
def delete_cookie(self, key, **kwargs): kwargs['max_age'] = -1 kwargs['expires'] = 0 self.set_cookie(key, '', **kwargs)
Delete a cookie. Be sure to use the same `domain` and `path` parameters as used to create the cookie.
def compose(self, sources, client=None): client = self._require_client(client) query_params = {} if self.user_project is not None: query_params["userProject"] = self.user_project request = { "sourceObjects": [{"name": source.name} for source in sources], ...
Concatenate source blobs into this one. If :attr:`user_project` is set on the bucket, bills the API request to that project. :type sources: list of :class:`Blob` :param sources: blobs whose contents will be composed into this blob. :type client: :class:`~google.cloud.storage.c...
def filter_records(self, records): for record in records: try: filtered = self.filter_record(record) assert (filtered) if filtered.seq == record.seq: self.passed_unchanged += 1 else: self.passed_c...
Apply the filter to records
def fixPoint(self, plotterPoint, canvasPoint): 'adjust visibleBox.xymin so that canvasPoint is plotted at plotterPoint' self.visibleBox.xmin = canvasPoint.x - self.canvasW(plotterPoint.x-self.plotviewBox.xmin) self.visibleBox.ymin = canvasPoint.y - self.canvasH(plotterPoint.y-self.plotviewBox.ym...
adjust visibleBox.xymin so that canvasPoint is plotted at plotterPoint
def _skip_frame(self): self._get_line() num_atoms = int(self._get_line()) if self.num_atoms is not None and self.num_atoms != num_atoms: raise ValueError("The number of atoms must be the same over the entire file.") for i in range(num_atoms+1): self._get_line()
Skip one frame
def _parameterize_string(raw): parts = [] s_index = 0 for match in _PARAMETER_PATTERN.finditer(raw): parts.append(raw[s_index:match.start()]) parts.append({u"Ref": match.group(1)}) s_index = match.end() if not parts: return GenericHelperFn(raw) parts.append(raw[s_inde...
Substitute placeholders in a string using CloudFormation references Args: raw (`str`): String to be processed. Byte strings are not supported; decode them before passing them to this function. Returns: `str` | :class:`troposphere.GenericHelperFn`: An expression with placeho...
def getresponse(self): if self.__response and self.__response.isclosed(): self.__response = None if self.__state != _CS_REQ_SENT or self.__response: raise ResponseNotReady(self.__state) if self.debuglevel > 0: response = self.response_class(self.sock, self.deb...
Get the response from the server. If the HTTPConnection is in the correct state, returns an instance of HTTPResponse or of whatever object is returned by class the response_class variable. If a request has not been sent or if a previous response has not be handled, ResponseNotR...
def default_channel_ops(nqubits): for gates in cartesian_product(TOMOGRAPHY_GATES.values(), repeat=nqubits): yield qt.tensor(*gates)
Generate the tomographic pre- and post-rotations of any number of qubits as qutip operators. :param int nqubits: The number of qubits to perform tomography on. :return: Qutip object corresponding to the tomographic rotation. :rtype: Qobj
def deserialize_basic(self, attr, data_type): if isinstance(attr, ET.Element): attr = attr.text if not attr: if data_type == "str": return '' else: return None if data_type == 'bool': if attr in [...
Deserialize baisc builtin data type from string. Will attempt to convert to str, int, float and bool. This function will also accept '1', '0', 'true' and 'false' as valid bool values. :param str attr: response string to be deserialized. :param str data_type: deserialization data...
def unwrap(self): if self.algorithm == 'rsa': return self['private_key'].parsed if self.algorithm == 'dsa': params = self['private_key_algorithm']['parameters'] return DSAPrivateKey({ 'version': 0, 'p': params['p'], 'q':...
Unwraps the private key into an RSAPrivateKey, DSAPrivateKey or ECPrivateKey object :return: An RSAPrivateKey, DSAPrivateKey or ECPrivateKey object
def GetAPFSVolumeByPathSpec(self, path_spec): volume_index = apfs_helper.APFSContainerPathSpecGetVolumeIndex(path_spec) if volume_index is None: return None return self._fsapfs_container.get_volume(volume_index)
Retrieves an APFS volume for a path specification. Args: path_spec (PathSpec): path specification. Returns: pyfsapfs.volume: an APFS volume or None if not available.
def selection_r(acquisition_function, samples_y_aggregation, x_bounds, x_types, regressor_gp, num_starting_points=100, minimize_constraints_fun=None): minimize_starting_points = [lib_data.rand(x_bounds, x_types) \ ...
Selecte R value
def _get_vlanid(self, context): segment = context.bottom_bound_segment if segment and self.check_segment(segment): return segment.get(api.SEGMENTATION_ID)
Returns vlan_id associated with a bound VLAN segment.
def get(self, id=None, name=None): if not (id is None) ^ (name is None): raise ValueError("Either id or name must be set (but not both!)") if id is not None: return super(TaskQueueManager, self).get(id=id) return self.list(filters={"name": name})[0]
Get a task queue. Either the id xor the name of the task type must be specified. Args: id (int, optional): The id of the task type to get. name (str, optional): The name of the task type to get. Returns: :class:`saltant.models.task_queue.TaskQueue`: ...
def __walk_rec(self, top, rec): if not rec or os.path.islink(top) or not os.path.isdir(top): yield top else: for root, dirs, files in os.walk(top): yield root
Yields each subdirectories of top, doesn't follow symlinks. If rec is false, only yield top. @param top: root directory. @type top: string @param rec: recursive flag. @type rec: bool @return: path of one subdirectory. @rtype: string
def intSize(self, obj): if obj < 0: return 8 elif obj <= 0xFF: return 1 elif obj <= 0xFFFF: return 2 elif obj <= 0xFFFFFFFF: return 4 elif obj <= 0x7FFFFFFFFFFFFFFF: return 8 elif obj <= 0xffffffffffffffff: ...
Returns the number of bytes necessary to store the given integer.
def _final_frame_length(header, final_frame_bytes): final_frame_length = 4 final_frame_length += 4 final_frame_length += header.algorithm.iv_len final_frame_length += 4 final_frame_length += final_frame_bytes final_frame_length += header.algorithm.auth_len return final_frame_length
Calculates the length of a final ciphertext frame, given a complete header and the number of bytes of ciphertext in the final frame. :param header: Complete message header object :type header: aws_encryption_sdk.structures.MessageHeader :param int final_frame_bytes: Bytes of ciphertext in the final fra...