code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def read_energy_bounds(hdu): nebins = len(hdu.data) ebin_edges = np.ndarray((nebins + 1)) try: ebin_edges[0:-1] = np.log10(hdu.data.field("E_MIN")) - 3. ebin_edges[-1] = np.log10(hdu.data.field("E_MAX")[-1]) - 3. except KeyError: ebin_edges[0:-1] = np.log10(hdu.data.field("energy...
Reads and returns the energy bin edges from a FITs HDU
def impact_check_range(func): @wraps(func) def impact_wrapper(*args,**kwargs): if isinstance(args[1],numpy.ndarray): out= numpy.zeros(len(args[1])) goodIndx= (args[1] < args[0]._deltaAngleTrackImpact)*(args[1] > 0.) out[goodIndx]= func(args[0],args[1][goodIndx]) ...
Decorator to check the range of interpolated kicks
def get_image_size(self, image): if image['size'] is None: args = settings.THUMBNAIL_VIPSHEADER.split(' ') args.append(image['source']) p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) p.wait() m = size_re.match(str(p.stdout.r...
Returns the image width and height as a tuple
def get_as_parameters_with_default(self, key, default_value): result = self.get_as_nullable_parameters(key) return result if result != None else default_value
Converts map element into an Parameters or returns default value if conversion is not possible. :param key: a key of element to get. :param default_value: the default value :return: Parameters value of the element or default value if conversion is not supported.
def remove_listener(self, listener): internal_listener = self._internal_listeners.pop(listener) return self._client.remove_listener(internal_listener)
Remove the given listener from the wrapped client. :param listener: A listener previously passed to :meth:`add_listener`.
def get_narrow_url(self, instance): text = instance[0] request = self.context["request"] query_params = request.GET.copy() page_query_param = self.get_paginate_by_param() if page_query_param and page_query_param in query_params: del query_params[page_query_param] ...
Return a link suitable for narrowing on the current item.
def also_restrict_to(self, restriction): if type(restriction) != list: restriction = [ restriction ] self._also_restriction = restriction
Works like restict_to but offers an additional restriction. Playbooks use this to implement serial behavior.
def _convert_json(obj): if isinstance(obj, dict): return {_convert_json(key): _convert_json(val) for (key, val) in six.iteritems(obj)} elif isinstance(obj, list) and len(obj) == 2: first = obj[0] second = obj[1] if first == 'set' and isinstance(second, list): ...
Converts from the JSON output provided by ovs-vsctl into a usable Python object tree. In particular, sets and maps are converted from lists to actual sets or maps. Args: obj: Object that shall be recursively converted. Returns: Converted version of object.
def ex_varassign(name, expr): if not isinstance(expr, ast.expr): expr = ex_literal(expr) return ast.Assign([ex_lvalue(name)], expr)
Assign an expression into a single variable. The expression may either be an `ast.expr` object or a value to be used as a literal.
def _interpret_ltude(value, name, psuffix, nsuffix): if not isinstance(value, str): return Angle(degrees=_unsexagesimalize(value)) value = value.strip().upper() if value.endswith(psuffix): sign = +1.0 elif value.endswith(nsuffix): sign = -1.0 else: raise ValueError('y...
Interpret a string, float, or tuple as a latitude or longitude angle. `value` - The string to interpret. `name` - 'latitude' or 'longitude', for use in exception messages. `positive` - The string that indicates a positive angle ('N' or 'E'). `negative` - The string that indicates a negative angle ('S' ...
def aroon_up(data, period): catch_errors.check_for_period_error(data, period) period = int(period) a_up = [((period - list(reversed(data[idx+1-period:idx+1])).index(np.max(data[idx+1-period:idx+1]))) / float(period)) * 100 for idx in range(period-1, len(data))] a_up = fill_for_no...
Aroon Up. Formula: AROONUP = (((PERIOD) - (PERIODS since PERIOD high)) / (PERIOD)) * 100
def _persist_metadata(self): serializable_data = self.get_serializable() try: self._try_persist_metadata(serializable_data) except TypeError: cleaned_data = Script._remove_non_serializable_store_entries(serializable_data["store"]) self._try_persist_metadata(cl...
Write all script meta-data, including the persistent script Store. The Store instance might contain arbitrary user data, like function objects, OpenCL contexts, or whatever other non-serializable objects, both as keys or values. Try to serialize the data, and if it fails, fall back to checking t...
def get_transport_target(cls, instance, timeout, retries): if "ip_address" not in instance: raise Exception("An IP address needs to be specified") ip_address = instance["ip_address"] port = int(instance.get("port", 161)) return hlapi.UdpTransportTarget((ip_address, port), tim...
Generate a Transport target object based on the instance's configuration
def parse_datetime(record: str) -> Optional[datetime]: format_strings = {8: '%Y%m%d', 12: '%Y%m%d%H%M', 14: '%Y%m%d%H%M%S'} if record == '': return None return datetime.strptime(record.strip(), format_strings[len(record.strip())])
Parse a datetime string into a python datetime object
def _start_lst_proc(self, listener_type, listener_opts): log.debug('Starting the listener process for %s', listener_type) listener = NapalmLogsListenerProc(self.opts, self.address, ...
Start the listener process.
def edit_matching_entry(program, arguments): entry = program.select_entry(*arguments) entry.context.execute("pass", "edit", entry.name)
Edit the matching entry.
def _generate_manager(manager_config): if 'class' not in manager_config: raise ValueError( 'Manager not fully specified. Give ' '"class:manager_name", e.g. "class:MongoDBManager".') mgr_class_name = manager_config['class'] if mgr_class_name.lower()[:5]...
Generate a manager from a manager_config dictionary Parameters ---------- manager_config : dict Configuration with keys class, args, and kwargs used to generate a new datafs.manager object Returns ------- manager : object datafs.man...
def get_paths(self, theme, icon_size): _size_str = "x".join(map(str, icon_size)) theme_path = get_program_path() + "share" + os.sep + "icons" + os.sep icon_path = theme_path + theme + os.sep + _size_str + os.sep action_path = icon_path + "actions" + os.sep toggle_path = icon_path...
Returns tuple of theme, icon, action and toggle paths
def _handle_stderr_event(self, fd, events): assert fd == self.fd_stderr if events & self.ioloop.READ: if not self.headers_sent: payload = self.process.stderr.read() data = 'HTTP/1.1 500 Internal Server Error\r\nDate: %s\r\nContent-Length: %d\r\n\r\n' % (get_da...
Eventhandler for stderr
def set_published_date(self): try: self.published_date = self.soup.find('pubdate').string except AttributeError: self.published_date = None
Parses published date and set value
def bluemix(cls, vcap_services, instance_name=None, service_name=None, **kwargs): service_name = service_name or 'cloudantNoSQLDB' try: service = CloudFoundryService(vcap_services, instance_name=instance_name, ...
Create a Cloudant session using a VCAP_SERVICES environment variable. :param vcap_services: VCAP_SERVICES environment variable :type vcap_services: dict or str :param str instance_name: Optional Bluemix instance name. Only required if multiple Cloudant instances are available. ...
def get_all_query_traces(self, max_wait_per=None, query_cl=ConsistencyLevel.LOCAL_ONE): if self._query_traces: return [self._get_query_trace(i, max_wait_per, query_cl) for i in range(len(self._query_traces))] return []
Fetches and returns the query traces for all query pages, if tracing was enabled. See note in :meth:`~.get_query_trace` regarding possible exceptions.
def intercept(actions: dict={}): for action in actions.values(): if type(action) is not returns and type(action) is not raises: raise InterceptorError('Actions must be declared as `returns` or `raises`') def decorated(f): def wrapped(*args, **kargs): try: ...
Decorates a function and handles any exceptions that may rise. Args: actions: A dictionary ``<exception type>: <action>``. Available actions\ are :class:`raises` and :class:`returns`. Returns: Any value declared using a :class:`returns` action. Raises: AnyException: if...
def clear_copyright(self): if (self.get_copyright_metadata().is_read_only() or self.get_copyright_metadata().is_required()): raise errors.NoAccess() self._my_map['copyright'] = dict(self._copyright_default)
Removes the copyright. raise: NoAccess - ``Metadata.isRequired()`` is ``true`` or ``Metadata.isReadOnly()`` is ``true`` *compliance: mandatory -- This method must be implemented.*
def _evaluate_dimension_fields(self) -> bool: for _, item in self._dimension_fields.items(): item.run_evaluate() if item.eval_error: return False return True
Evaluates the dimension fields. Returns False if any of the fields could not be evaluated.
def findLowest(self, symorders): _range = range(len(symorders)) stableSymorders = map(None, symorders, _range) stableSymorders.sort() lowest = None for index in _range: if stableSymorders[index][0] == lowest: return stableSymorders[index-1][1] ...
Find the position of the first lowest tie in a symorder or -1 if there are no ties
def vae(x, z_size, name=None): with tf.variable_scope(name, default_name="vae"): mu = tf.layers.dense(x, z_size, name="mu") log_sigma = tf.layers.dense(x, z_size, name="log_sigma") shape = common_layers.shape_list(x) epsilon = tf.random_normal([shape[0], shape[1], 1, z_size]) z = mu + tf.exp(log_s...
Simple variational autoencoder without discretization. Args: x: Input to the discretization bottleneck. z_size: Number of bits, where discrete codes range from 1 to 2**z_size. name: Name for the bottleneck scope. Returns: Embedding function, latent, loss, mu and log_simga.
def inv(self): self.v = 1/self.v tmp = self.v**2 if self.deriv > 1: self.dd[:] = tmp*(2*self.v*np.outer(self.d, self.d) - self.dd) if self.deriv > 0: self.d[:] = -tmp*self.d[:]
In place invert
def get_subgraph_by_edge_filter(graph, edge_predicates: Optional[EdgePredicates] = None): rv = graph.fresh_copy() expand_by_edge_filter(graph, rv, edge_predicates=edge_predicates) return rv
Induce a sub-graph on all edges that pass the given filters. :param pybel.BELGraph graph: A BEL graph :param edge_predicates: An edge predicate or list of edge predicates :return: A BEL sub-graph induced over the edges passing the given filters :rtype: pybel.BELGraph
def readAltWCS(fobj, ext, wcskey=' ', verbose=False): if isinstance(fobj, str): fobj = fits.open(fobj, memmap=False) hdr = altwcs._getheader(fobj, ext) try: original_logging_level = log.level log.setLevel(logutil.logging.WARNING) nwcs = pywcs.WCS(hdr, fobj=fobj, key=wcskey) ...
Reads in alternate primary WCS from specified extension. Parameters ---------- fobj : str, `astropy.io.fits.HDUList` fits filename or fits file object containing alternate/primary WCS(s) to be converted wcskey : str [" ",A-Z] alternate/primary WCS key that will be replac...
def _any_pandas_objects(terms): return any(isinstance(term.value, pd.core.generic.PandasObject) for term in terms)
Check a sequence of terms for instances of PandasObject.
def _get_batch_representative(items, key): if isinstance(items, dict): return items, items else: vals = set([]) out = [] for data in items: if key in data: vals.add(data[key]) out.append(data) if len(vals) != 1: rais...
Retrieve a representative data item from a batch. Handles standard bcbio cases (a single data item) and CWL cases with batches that have a consistent variant file.
def _is_valid_function(module_name, function): try: functions = __salt__['sys.list_functions'](module_name) except salt.exceptions.SaltException: functions = ["unable to look up functions"] return "{0}.{1}".format(module_name, function) in functions
Determine if a function is valid for a module
def _get_driver(self): ComputeEngine = get_driver(Provider.GCE) return ComputeEngine( self.service_account_email, self.service_account_file, project=self.service_account_project )
Get authenticated GCE driver.
def family_coff(self): if not self._ptr: raise BfdException("BFD not initialized") return _bfd.get_bfd_attribute(self._ptr, BfdAttributes.FAMILY_COFF)
Return the family_coff attribute of the BFD file being processed.
def check_padding_around_mutation(given_padding, epitope_lengths): min_required_padding = max(epitope_lengths) - 1 if not given_padding: return min_required_padding else: require_integer(given_padding, "Padding around mutation") if given_padding < min_required_padding: ra...
If user doesn't provide any padding around the mutation we need to at least include enough of the surrounding non-mutated esidues to construct candidate epitopes of the specified lengths.
def extract(path_to_hex, output_path=None): with open(path_to_hex, 'r') as hex_file: python_script = extract_script(hex_file.read()) if output_path: with open(output_path, 'w') as output_file: output_file.write(python_script) else: print(python_script)
Given a path_to_hex file this function will attempt to extract the embedded script from it and save it either to output_path or stdout
def remove_cycle_mrkr(self): window_start = self.parent.value('window_start') try: self.annot.remove_cycle_mrkr(window_start) except KeyError: msg = ('The start of the window does not correspond to any cycle ' 'marker in sleep scoring file') ...
Remove cycle marker.
def set_type(spec, obj_type): if spec is None: raise ValueError('Spec cannot be None') if TemplateFields.generation not in spec: spec[TemplateFields.generation] = {} spec[TemplateFields.generation][TemplateFields.commkey] = \ Gen.CLIENT if (obj_type & (int(1) ...
Updates type integer in the cerate UO specification. Type has to already have generations flags set correctly. Generation field is set accordingly. :param spec: :param obj_type: :return:
def load_from_file(filename): if os.path.isdir(filename): logger.error("Err: File '%s' is a directory", filename) return None if not os.path.isfile(filename): logger.error("Err: File '%s' does not exist", filename) return None try: with open(filename, 'r') as sourcefi...
Load a list of filenames from an external text file.
def convert(self, value, view): if isinstance(value, int): return value elif isinstance(value, float): return int(value) else: self.fail(u'must be a number', view, True)
Check that the value is an integer. Floats are rounded.
def build_full_toctree(builder, docname, prune, collapse): env = builder.env doctree = env.get_doctree(env.config.master_doc) toctrees = [] for toctreenode in doctree.traverse(addnodes.toctree): toctree = env.resolve_toctree(docname, builder, toctreenode, co...
Return a single toctree starting from docname containing all sub-document doctrees.
def rename(self, name): if name: rename1, rename2 = callbacks.add( b'rename', self.change_name, False) self.dispatch_command(b'/bin/echo "' + rename1 + b'""' + rename2 + b'"' + name + b'\n') else: self.change_name(self...
Send to the remote shell, its new name to be shell expanded
def expand_dataset(X, y_proba, factor=10, random_state=None, extra_arrays=None): rng = check_random_state(random_state) extra_arrays = extra_arrays or [] n_classes = y_proba.shape[1] classes = np.arange(n_classes, dtype=int) for el in zip(X, y_proba, *extra_arrays): x, probs = el[0:2] ...
Convert a dataset with float multiclass probabilities to a dataset with indicator probabilities by duplicating X rows and sampling true labels.
def schema(self): if not self._schema: try: self._load_info() self._schema = _schema.Schema(self._info['schema']['fields']) except KeyError: raise Exception('Unexpected table response: missing schema') return self._schema
Retrieves the schema of the table. Returns: A Schema object containing a list of schema fields and associated metadata. Raises Exception if the request could not be executed or the response was malformed.
def patch_sys_version(): if '|' in sys.version: sys_version = sys.version.split('|') sys.version = ' '.join([sys_version[0].strip(), sys_version[-1].strip()])
Remove Continuum copyright statement to avoid parsing errors in IDLE
def is_installed(self, name: str) -> bool: assert name is not None try: self.__docker.images.get(name) return True except docker.errors.ImageNotFound: return False
Indicates a given Docker image is installed on this server. Parameters: name: the name of the Docker image. Returns: `True` if installed; `False` if not.
def _get_by_index(self, index): volume_or_disk = self.parser.get_by_index(index) volume, disk = (volume_or_disk, None) if not isinstance(volume_or_disk, Disk) else (None, volume_or_disk) return volume, disk
Returns a volume,disk tuple for the specified index
def start(self): self._timer = Timer(self.time, self.handler) self._timer.daemon = True self._timer.start() return
Starts the watchdog timer.
def redirect_to(request, url, permanent=True, query_string=False, **kwargs): r args = request.META.get('QUERY_STRING', '') if url is not None: if kwargs: url = url % kwargs if args and query_string: url = "%s?%s" % (url, args) klass = (permanent and HttpRespon...
r""" Redirect to a given URL. The given url may contain dict-style string formatting, which will be interpolated against the params in the URL. For example, to redirect from ``/foo/<id>/`` to ``/bar/<id>/``, you could use the following URLconf:: urlpatterns = patterns('', (r'^foo/...
def update_content_encoding(self, data: Any) -> None: if not data: return enc = self.headers.get(hdrs.CONTENT_ENCODING, '').lower() if enc: if self.compress: raise ValueError( 'compress can not be set ' 'if Content-E...
Set request content encoding.
def find_op_code_sequence(pattern: list, instruction_list: list) -> Generator: for i in range(0, len(instruction_list) - len(pattern) + 1): if is_sequence_match(pattern, instruction_list, i): yield i
Returns all indices in instruction_list that point to instruction sequences following a pattern. :param pattern: The pattern to look for, e.g. [["PUSH1", "PUSH2"], ["EQ"]] where ["PUSH1", "EQ"] satisfies pattern :param instruction_list: List of instructions to look in :return: Indices to the instructio...
def detailed_tokens(tokenizer, text): node = tokenizer.parseToNode(text) node = node.next words = [] while node.posid != 0: surface = node.surface base = surface parts = node.feature.split(",") pos = ",".join(parts[0:4]) if len(parts) > 7: base = parts...
Format Mecab output into a nice data structure, based on Janome.
def SamplingRoundAddedEventHandler(instance, event): if instance.portal_type != "SamplingRound": print("How does this happen: type is %s should be SamplingRound" % instance.portal_type) return renameAfterCreation(instance) num_art = len(instance.ar_templates) destination_url = instance.a...
Event fired when BikaSetup object gets modified. Since Sampling Round is a dexterity object we have to change the ID by "hand" Then we have to redirect the user to the ar add form
def forward(self, inputs, label, begin_state, sampled_values): encoded = self.embedding(inputs) length = inputs.shape[0] batch_size = inputs.shape[1] encoded, out_states = self.encoder.unroll(length, encoded, begin_state, layout='TNC', me...
Defines the forward computation. Parameters ----------- inputs : NDArray input tensor with shape `(sequence_length, batch_size)` when `layout` is "TNC". begin_state : list initial recurrent state tensor with length equals to num_layers*2. ...
def network_undefine(name, **kwargs): conn = __get_conn(**kwargs) try: net = conn.networkLookupByName(name) return not bool(net.undefine()) finally: conn.close()
Remove a defined virtual network. This does not stop the virtual network. :param name: virtual network name :param connection: libvirt connection URI, overriding defaults :param username: username to connect with, overriding defaults :param password: password to connect with, overriding defaults ....
def get_path(self): md5_hash = hashlib.md5(self.task_id.encode()).hexdigest() logger.debug('Hash %s corresponds to task %s', md5_hash, self.task_id) return os.path.join(self.temp_dir, str(self.unique.value), md5_hash)
Returns a temporary file path based on a MD5 hash generated with the task's name and its arguments
def queue_command(self, command): if self._running: QtCore.QCoreApplication.postEvent( self, ActionEvent(command), QtCore.Qt.LowEventPriority) else: self._incoming.append(command)
Put a command on the queue to be called in the component's thread. :param callable command: the method to be invoked, e.g. :py:meth:`~Component.new_frame_event`.
def _read_with_mask(raster, masked): if masked is None: mask_flags = raster.mask_flag_enums per_dataset_mask = all([rasterio.enums.MaskFlags.per_dataset in flags for flags in mask_flags]) masked = per_dataset_mask return masked
returns if we should read from rasterio using the masked
def parse_date(date: str, hour_threshold: int = 200): date = date.strip('Z') if len(date) == 4: date += '00' if not (len(date) == 6 and date.isdigit()): return now = datetime.utcnow() guess = now.replace(day=int(date[0:2]), hour=int(date[2:4]) % 24, ...
Parses a report timestamp in ddhhZ or ddhhmmZ format This function assumes the given timestamp is within the hour threshold from current date
def _unicode_sub_super(string, mapping, max_len=None): string = str(string) if string.startswith('(') and string.endswith(')'): len_string = len(string) - 2 else: len_string = len(string) if max_len is not None: if len_string > max_len: raise KeyError("max_len exceede...
Try to render a subscript or superscript string in unicode, fall back on ascii if this is not possible
def function_to_serializable_representation(fn): if type(fn) not in (FunctionType, BuiltinFunctionType): raise ValueError( "Can't serialize %s : %s, must be globally defined function" % ( fn, type(fn),)) if hasattr(fn, "__closure__") and fn.__closure__ is not None: ra...
Converts a Python function into a serializable representation. Does not currently work for methods or functions with closure data.
def weighting(self, landscape=None): if landscape is not None: if len(landscape) > 0: maxy = np.max(landscape[:, 1]) else: maxy = 1 def linear(interval): d = interval[1] return (1 / maxy) * d if landscape is not None else d...
Define a weighting function, for stability results to hold, the function must be 0 at y=0.
def from_name(cls, name): result = cls.list({'items_per_page': 500}) webaccs = {} for webacc in result: webaccs[webacc['name']] = webacc['id'] return webaccs.get(name)
Retrieve webacc id associated to a webacc name.
def set_config(self, config): if self.config is None: self.config = { } self.config.update(config_to_api_list(config))
Set the service configuration. @param config: A dictionary of config key/value
def tnet_to_nx(df, t=None): if t is not None: df = get_network_when(df, t=t) if 'weight' in df.columns: nxobj = nx.from_pandas_edgelist( df, source='i', target='j', edge_attr='weight') else: nxobj = nx.from_pandas_edgelist(df, source='i', target='j') return nxobj
Creates undirected networkx object
def get_substances(identifier, namespace='sid', as_dataframe=False, **kwargs): results = get_json(identifier, namespace, 'substance', **kwargs) substances = [Substance(r) for r in results['PC_Substances']] if results else [] if as_dataframe: return substances_to_frame(substances) return substanc...
Retrieve the specified substance records from PubChem. :param identifier: The substance identifier to use as a search query. :param namespace: (optional) The identifier type, one of sid, name or sourceid/<source name>. :param as_dataframe: (optional) Automatically extract the :class:`~pubchempy.Substance` ...
def _ConvertAttributeContainerToDict(cls, attribute_container): if not isinstance( attribute_container, containers_interface.AttributeContainer): raise TypeError('{0:s} is not an attribute container type.'.format( type(attribute_container))) container_type = getattr(attribute_container, ...
Converts an attribute container object into a JSON dictionary. The resulting dictionary of the JSON serialized objects consists of: { '__type__': 'AttributeContainer' '__container_type__': ... ... } Here '__type__' indicates the object base type. In this case 'AttributeCont...
def burst_range(psd, snr=8, energy=1e-2, fmin=100, fmax=500): freqs = psd.frequencies.value if not fmin: fmin = psd.f0 if not fmax: fmax = psd.span[1] condition = (freqs >= fmin) & (freqs < fmax) integrand = burst_range_spectrum( psd[condition], snr=snr, energy=energy) ** 3 ...
Calculate the integrated GRB-like GW burst range from a strain PSD Parameters ---------- psd : `~gwpy.frequencyseries.FrequencySeries` the instrumental power-spectral-density data snr : `float`, optional the signal-to-noise ratio for which to calculate range, default: ``8`` ...
def int_dp_g(arr, dp): return integrate(arr, to_pascal(dp, is_dp=True), vert_coord_name(dp)) / GRAV_EARTH
Mass weighted integral.
def add_nodes(self, coors, node_low_or_high=None): last = self.lastnode if type(coors) is nm.ndarray: if len(coors.shape) == 1: coors = coors.reshape((1, coors.size)) nadd = coors.shape[0] idx = slice(last, last + nadd) else: nadd =...
Add new nodes at the end of the list.
def dropout_mask(x:Tensor, sz:Collection[int], p:float): "Return a dropout mask of the same type as `x`, size `sz`, with probability `p` to cancel an element." return x.new(*sz).bernoulli_(1-p).div_(1-p)
Return a dropout mask of the same type as `x`, size `sz`, with probability `p` to cancel an element.
def declare_config_variable(self, name, config_id, type_name, default=None, convert=None): config = ConfigDescriptor(config_id, type_name, default, name=name, python_type=convert) self._config_variables[config_id] = config
Declare a config variable that this emulated tile accepts. The default value (if passed) may be specified as either a `bytes` object or a python int or list of ints. If an int or list of ints is passed, it is converted to binary. Otherwise, the raw binary data is used. Passin...
def attr_to_path(node): def get_intrinsic_path(modules, attr): if isinstance(attr, ast.Name): return modules[demangle(attr.id)], (demangle(attr.id),) elif isinstance(attr, ast.Attribute): module, path = get_intrinsic_path(modules, attr.value) return module[attr.at...
Compute path and final object for an attribute node
def get_authorizations_by_ids(self, authorization_ids): collection = JSONClientValidated('authorization', collection='Authorization', runtime=self._runtime) object_id_list = [] for i in authorization_ids: ...
Gets an ``AuthorizationList`` corresponding to the given ``IdList``. In plenary mode, the returned list contains all of the authorizations specified in the ``Id`` list, in the order of the list, including duplicates, or an error results if an ``Id`` in the supplied list is not found or ...
def has_scope(context=None): if not booted(context): return False _sd_version = version(context) if _sd_version is None: return False return _sd_version >= 205
Scopes were introduced in systemd 205, this function returns a boolean which is true when the minion is systemd-booted and running systemd>=205.
def read(self, document, iface, *args, **kwargs): try: document = IReadableDocument(document) mime_type = document.mime_type reader = self.lookup_reader(mime_type, iface) if not reader: msg = ("No adapter found to read object %s from %s document" ...
Returns a Deferred that fire the read object.
def AgregarReceptor(self, cuit, iibb, nro_socio, nro_fet, **kwargs): "Agrego un receptor a la liq." rcpt = dict(cuit=cuit, iibb=iibb, nroSocio=nro_socio, nroFET=nro_fet) self.solicitud['receptor'] = rcpt return True
Agrego un receptor a la liq.
def strip_HETATMs(self, only_strip_these_chains = []): if only_strip_these_chains: self.lines = [l for l in self.lines if not(l.startswith('HETATM')) or l[21] not in only_strip_these_chains] else: self.lines = [l for l in self.lines if not(l.startswith('HETATM'))] self._u...
Throw away all HETATM lines. If only_strip_these_chains is specified then only strip HETATMs lines for those chains.
def contact_methods(self, **kwargs): endpoint = '{0}/{1}/contact_methods'.format( self.endpoint, self['id'], ) result = self.request('GET', endpoint=endpoint, query_params=kwargs) return result['contact_methods']
Get all contact methods for this user.
def transform(self, X=None, y=None): zoom_x, zoom_y= self.zoom self.params = (zoom_x, zoom_y) zoom_matrix = np.array([[zoom_x, 0, 0], [0, zoom_y, 0]]) self.tx.set_parameters(zoom_matrix) if self.lazy or X is None: return self.tx ...
Transform an image using an Affine transform with the given zoom parameters. Return the transform if X=None. Arguments --------- X : ANTsImage Image to transform y : ANTsImage (optional) Another image to transform Returns ------- ...
def issequence(arg): string_behaviour = ( isinstance(arg, six.string_types) or isinstance(arg, six.text_type)) list_behaviour = hasattr(arg, '__getitem__') or hasattr(arg, '__iter__') return not string_behaviour and list_behaviour
Return True if `arg` acts as a list and does not look like a string.
def put_name(self, type_, id_, name): cachefile = self.filename(type_, id_) dirname = os.path.dirname(cachefile) try: os.makedirs(dirname) except OSError as e: if e.errno != errno.EEXIST: raise with open(cachefile, 'w') as f: f....
Write a cached name to disk. :param type_: str, "user" or "tag" :param id_: int, eg. 123456 :returns: None
def get_ip6_address(interface_name, expand=False): address = _get_address(interface_name, IP6_PATTERN) if address and expand: return ':'.join(_expand_groups(address)) return address
Extracts the IPv6 address for a particular interface from `ifconfig`. :param interface_name: Name of the network interface (e.g. ``eth0``). :type interface_name: unicode :param expand: If set to ``True``, an abbreviated address is expanded to the full address. :type expand: bool :return: IPv6 addre...
def close(self): if self._socket is not None and self._conn is not None: message_input = UnityMessage() message_input.header.status = 400 self._communicator_send(message_input.SerializeToString()) if self._socket is not None: self._socket.close() ...
Sends a shutdown signal to the unity environment, and closes the socket connection.
def get_remote_file(self, remote_path, local_path): sftp_client = self.transport.open_sftp_client() LOG.debug('Get the remote file. ' 'Source=%(src)s. Target=%(target)s.' % {'src': remote_path, 'target': local_path}) try: sftp_client.get(remote_pat...
Fetch remote File. :param remote_path: remote path :param local_path: local path
def GetAll(alias=None,location=None,session=None): if not alias: alias = clc.v2.Account.GetAlias(session=session) policies = [] policy_resp = clc.v2.API.Call('GET','antiAffinityPolicies/%s' % alias,{},session=session) for k in policy_resp: r_val = policy_resp[k] for r in r_val: if r.get('location'): ...
Gets a list of anti-affinity policies within a given account. https://t3n.zendesk.com/entries/44657214-Get-Anti-Affinity-Policies >>> clc.v2.AntiAffinity.GetAll() [<clc.APIv2.anti_affinity.AntiAffinity object at 0x10c65e910>, <clc.APIv2.anti_affinity.AntiAffinity object at 0x10c65ec90>]
def generateCertificate(self, alias, commonName, organizationalUnit, city, state, country, keyalg="RSA", keysize=1024, sigalg="SHA256withRSA", validity=90 ...
Use this operation to create a self-signed certificate or as a starting point for getting a production-ready CA-signed certificate. The portal will generate a certificate for you and store it in its keystore.
def _fetch_dimensions(self, dataset): yield Dimension(u"school") yield Dimension(u"year", datatype="year") yield Dimension(u"semester", datatype="academic_term", dialect="swedish") yield Dimension(u"municipality", ...
Iterate through semesters, counties and municipalities.
def _handle_config(self, data): self.room.config.update(data) self.conn.enqueue_data("config", data)
Handle initial config push and config changes
def pkg_supports(feature, pkg_version, pkg_feat_dict): from pkg_resources import parse_requirements feature = str(feature) pkg_version = str(pkg_version) supp_versions = pkg_feat_dict.get(feature, None) if supp_versions is None: return False if is_string(supp_versions): supp_vers...
Return bool indicating whether a package supports ``feature``. Parameters ---------- feature : str Name of a potential feature of a package. pkg_version : str Version of the package that should be checked for presence of the feature. pkg_feat_dict : dict Specificatio...
def configured_class(cls): base = cls.configurable_base() if base.__dict__.get('_Configurable__impl_class') is None: base.__impl_class = cls.configurable_default() return base.__impl_class
Returns the currently configured class.
def _start_srv_proc(self, started_os_proc): log.debug('Starting the server process') server = NapalmLogsServerProc(self.opts, self.config_dict, started_os_proc, buffe...
Start the server process.
def progress_patch(self, _=False): from .progress import ShellProgressView self.cli_ctx.progress_controller.init_progress(ShellProgressView()) return self.cli_ctx.progress_controller
forces to use the Shell Progress
def _threaded_copy_data(instream, outstream): copy_thread = threading.Thread(target=_copy_data, args=(instream, outstream)) copy_thread.setDaemon(True) log.debug('%r, %r, %r', copy_thread, instream, outstream) copy_thread.start() return copy_thread
Copy data from one stream to another in a separate thread. Wraps ``_copy_data()`` in a :class:`threading.Thread`. :type instream: :class:`io.BytesIO` or :class:`io.StringIO` :param instream: A byte stream to read from. :param file outstream: The file descriptor of a tmpfile to write to.
def _get_params(self): return np.hstack((self.varianceU,self.varianceY, self.lengthscaleU,self.lengthscaleY))
return the value of the parameters.
def handle_onchain_secretreveal( initiator_state: InitiatorTransferState, state_change: ContractReceiveSecretReveal, channel_state: NettingChannelState, pseudo_random_generator: random.Random, ) -> TransitionResult[InitiatorTransferState]: iteration: TransitionResult[InitiatorTransfe...
When a secret is revealed on-chain all nodes learn the secret. This check the on-chain secret corresponds to the one used by the initiator, and if valid a new balance proof is sent to the next hop with the current lock removed from the merkle tree and the transferred amount updated.
def stop(self): self.logger.info('Stopping client fuzzer') self._target_control_thread.stop() self.target.signal_mutated() super(ClientFuzzer, self).stop()
Stop the fuzzing session
def get_feed_renderer(engines, name): if name not in engines: raise FeedparserError("Given feed name '{}' does not exists in 'settings.FEED_RENDER_ENGINES'".format(name)) renderer = safe_import_module(engines[name]) return renderer
From engine name, load the engine path and return the renderer class Raise 'FeedparserError' if any loading error
def remove_files(self): file_list = ["molecule.svg","lig.pdb","HIS.pdb","PHE.pdb","TRP.pdb","TYR.pdb","lig.mol","test.xtc"] for residue in self.topol_data.dict_of_plotted_res.keys(): file_list.append(residue[1]+residue[2]+".svg") for f in file_list: if os.path.isfile(f)==...
Removes intermediate files.