code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def data_complete(datadir, sitedir, get_container_name): if any(not path.isdir(sitedir + x) for x in ('/files', '/run', '/solr')): return False if docker.is_boot2docker(): return all(docker.inspect_container(get_container_name(x)) for x in ('pgdata', 'venv')) retu...
Return True if the directories and containers we're expecting are present in datadir, sitedir and containers
def get_extents(self): extents = ffi.new('cairo_rectangle_t *') if cairo.cairo_recording_surface_get_extents(self._pointer, extents): return (extents.x, extents.y, extents.width, extents.height)
Return the extents of the recording-surface. :returns: A ``(x, y, width, height)`` tuple of floats, or :obj:`None` if the surface is unbounded. *New in cairo 1.12*
def certify_tuple(value, certifier=None, min_len=None, max_len=None, required=True, schema=None): certify_iterable( value=value, types=tuple([tuple]), certifier=certifier, min_len=min_len, max_len=max_len, schema=schema, required=required, )
Validates a tuple, checking it against an optional schema. The schema should be a list of expected values replaced by functions which will be called to with the corresponding value in the input. A simple example: >>> certifier = certify_tuple(schema=( ... certify_key(kind='Model'), ...
def get_boundaries(self, filter_type, value): assert filter_type in self.handled_suffixes start = '-' end = '+' exclude = None if filter_type in (None, 'eq'): start = u'[%s%s' % (value, self.separator) end = start.encode('utf-8') + b'\xff' elif fil...
Compute the boundaries to pass to zrangebylex depending of the filter type The third return value, ``exclude`` is ``None`` except for the filters `lt` and `gt` because we cannot explicitly exclude it when querying the sorted-set For the parameters, see BaseRangeIndex.store No...
def get_impala_queries(self, start_time, end_time, filter_str="", limit=100, offset=0): params = { 'from': start_time.isoformat(), 'to': end_time.isoformat(), 'filter': filter_str, 'limit': limit, 'offset': offset, } return self._get("impalaQueries", ApiImpalaQueryR...
Returns a list of queries that satisfy the filter @type start_time: datetime.datetime. Note that the datetime must either be time zone aware or specified in the server time zone. See the python datetime documentation for more details about python's ...
def unweave( target, advices=None, pointcut=None, ctx=None, depth=1, public=False, ): if advices is not None: if isroutine(advices): advices = [advices] if pointcut is None or callable(pointcut): pass elif isinstance(pointcut, string_types): pointcut = _namematcher(po...
Unweave advices on target with input pointcut. :param callable target: target from where checking pointcut and weaving advices. :param pointcut: condition for weaving advices on joinpointe. The condition depends on its type. :type pointcut: - NoneType: advices are weaved on target....
def decorate(*reversed_views): fns = reversed_views[::-1] view = fns[0] for wrapper in fns[1:]: view = wrapper(view) return view
provide a syntax decorating views without nested calls. instead of: json_api_call(etag(<hash_fn>)(<view_fn>))) you can write: decorate(json_api_call, etag(<hash_fn>), <view_fn>)
def _open(self, skip=0): usb_device = self._get_usb_device(skip) if usb_device: usb_conf = usb_device.configurations[0] self._usb_int = usb_conf.interfaces[0][0] else: raise YubiKeyUSBHIDError('No USB YubiKey found') try: self._usb_handle =...
Perform HID initialization
def update_readme_for_modules(modules): readme = parse_readme() module_docstrings = core_module_docstrings() if modules == ["__all__"]: modules = core_module_docstrings().keys() for module in modules: if module in module_docstrings: print_stderr("Updating README.md for module...
Update README.md updating the sections for the module names listed.
async def get_bluetooth_settings(self) -> List[Setting]: bt = await self.services["avContent"]["getBluetoothSettings"]({}) return [Setting.make(**x) for x in bt]
Get bluetooth settings.
def run_model(t_output_every, output_dir=None, m=None, force_resume=True, **iterate_args): r = runner.Runner(output_dir, m, force_resume) print(r) r.iterate(t_output_every=t_output_every, **iterate_args) return r
Convenience function to combine making a Runner object, and running it for some time. Parameters ---------- m: Model Model to run. iterate_args: Arguments to pass to :meth:`Runner.iterate`. Others: see :class:`Runner`. Returns ------- r: Runner runne...
def paste(location): copyData = settings.getDataFile() if not location: location = "." try: data = pickle.load(open(copyData, "rb")) speech.speak("Pasting " + data["copyLocation"] + " to current directory.") except: speech.fail("It doesn't look like you've copied anything yet.") speech.fail("Type 'hallie ...
paste a file or directory that has been previously copied
def redis_from_url(url): import redis url = url or "" parsed_url = urlparse(url) if parsed_url.scheme != "redis": return None kwargs = {} match = PASS_HOST_PORT.match(parsed_url.netloc) if match.group('password') is not None: kwargs['password'] = match.group('password') i...
Converts a redis URL used by celery into a `redis.Redis` object.
def entries(self): return ContentTypeEntriesProxy(self._client, self.space.id, self._environment_id, self.id)
Provides access to entry management methods for the given content type. API reference: https://www.contentful.com/developers/docs/references/content-management-api/#/reference/entries :return: :class:`ContentTypeEntriesProxy <contentful_management.content_type_entries_proxy.ContentTypeEntriesProxy>` o...
def forward(self, inputs, lengths): x = self.embedder(inputs) x = self.dropout(x) x = pack_padded_sequence(x, lengths.cpu().numpy(), batch_first=self.batch_first) x, _ = self.rnn_layers[0](x) x, _ = pad_packed_sequence(x, batch_first=self.batch_fi...
Execute the encoder. :param inputs: tensor with indices from the vocabulary :param lengths: vector with sequence lengths (excluding padding) returns: tensor with encoded sequences
def part(self, *args, **kwargs): _parts = self.parts(*args, **kwargs) if len(_parts) == 0: raise NotFoundError("No part fits criteria") if len(_parts) != 1: raise MultipleFoundError("Multiple parts fit criteria") return _parts[0]
Retrieve single KE-chain part. Uses the same interface as the :func:`parts` method but returns only a single pykechain :class:`models.Part` instance. If additional `keyword=value` arguments are provided, these are added to the request parameters. Please refer to the documentation of th...
def addPort(n: LNode, intf: Interface): d = PortTypeFromDir(intf._direction) ext_p = LayoutExternalPort( n, name=intf._name, direction=d, node2lnode=n._node2lnode) ext_p.originObj = originObjOfPort(intf) n.children.append(ext_p) addPortToLNode(ext_p, intf, reverseDirection=True) return e...
Add LayoutExternalPort for interface
def main(): from spyder.utils.qthelpers import qapplication app = qapplication() if os.name == 'nt': dialog = WinUserEnvDialog() else: dialog = EnvDialog() dialog.show() app.exec_()
Run Windows environment variable editor
def footprints_from_address(address, distance, footprint_type='building', retain_invalid=False): point = geocode(query=address) return footprints_from_point(point, distance, footprint_type=footprint_type, retain_invalid=retain_invalid)
Get footprints within some distance north, south, east, and west of an address. Parameters ---------- address : string the address to geocode to a lat-long point distance : numeric distance in meters footprint_type : string type of footprint to be downloaded. OSM tag key...
def list_repos(self, envs=[], query='/repositories/'): juicer.utils.Log.log_debug( "List Repos In: %s", ", ".join(envs)) repo_lists = {} for env in envs: repo_lists[env] = [] for env in envs: _r = self.connectors[env].get(query) if _r.s...
List repositories in specified environments
def get_dweets_for(thing_name, key=None, session=None): if key is not None: params = {'key': key} else: params = None return _request('get', '/get/dweets/for/{0}'.format(thing_name), params=params, session=None)
Read all the dweets for a dweeter
def map(self, func, *columns): if not columns: return map(func, self.rows) else: values = (self.values(column) for column in columns) result = [map(func, v) for v in values] if len(columns) == 1: return result[0] else: ...
Map a function to rows, or to given columns
def set_elapsed_time(self, client): related_clients = self.get_related_clients(client) for cl in related_clients: if cl.timer is not None: client.create_time_label() client.t0 = cl.t0 client.timer.timeout.connect(client.show_time) ...
Set elapsed time for slave clients.
def get_welcome_response(): session_attributes = {} card_title = "Welcome" speech_output = "Welcome to the Alexa Skills Kit sample. " \ "Please tell me your favorite color by saying, " \ "my favorite color is red" reprompt_text = "Please tell me your favorite colo...
If we wanted to initialize the session to have some attributes we could add those here
def extract_ids(text, extractors): for extractor in extractors: for id in extractor.extract(text): yield id
Uses `extractors` to extract citation identifiers from a text. :Parameters: text : str The text to process extractors : `list`(`extractor`) A list of extractors to apply to the text :Returns: `iterable` -- a generator of extracted identifiers
def render_template(template_name, template_getter=get_app_template): def wrapper(func): template = template_getter(template_name) def _wraped(self, request, context, *args, **kwargs): res = func(self, request, context, *args, **kwargs) if isinstance(res, dict): ...
Decorator to specify which template to use for Wrapped Views. It will return string rendered by specified template and returned dictionary from wrapped views as a context for template. The returned value was not dictionary, it does nothing, just returns the result.
def load(self, rule_type, quiet = False): if self.filename and os.path.exists(self.filename): try: with open(self.filename, 'rt') as f: ruleset = json.load(f) self.about = ruleset['about'] if 'about' in ruleset else '' self....
Open a JSON file definiting a ruleset and load it into a Ruleset object :param quiet: :return:
def set_widgets(self): if self.parent.aggregation_layer: aggr = self.parent.aggregation_layer.name() else: aggr = self.tr('no aggregation') html = self.tr('Please ensure the following information ' 'is correct and press Run.') html += '<br/>...
Set widgets on the Summary tab.
def save(self): "Saves the state to the state file" with open(self.state_file, "w") as fh: json.dump({ "hosts": self.hosts, "stats": self.stats, }, fh)
Saves the state to the state file
def add(self, email): if email not in self._collaborators: self._collaborators[email] = ShareRequestValue.Add self._dirty = True
Add a collaborator. Args: str : Collaborator email address.
def _message(self, request_cls, destination=None, message_id=0, consent=None, extensions=None, sign=False, sign_prepare=False, nsprefix=None, sign_alg=None, digest_alg=None, **kwargs): if not message_id: message_id = sid() for key, val in self.message_args(m...
Some parameters appear in all requests so simplify by doing it in one place :param request_cls: The specific request type :param destination: The recipient :param message_id: A message identifier :param consent: Whether the principal have given her consent :param extensi...
def create_page(slug, post_data): logger.info('Call create Page') if MWiki.get_by_uid(slug): return False title = post_data['title'].strip() if len(title) < 2: return False return MWiki.__create_rec(slug, '2', post_data=post_data)
The page would be created with slug.
def acquire(self): start_time = time.time() while True: try: self.fd = os.open(self.lockfile, os.O_CREAT | os.O_EXCL | os.O_RDWR) break except (OSError,) as e: if e.errno != errno.EEXIST: ...
Acquire the lock, if possible. If the lock is in use, it check again every `delay` seconds. It does this until it either gets the lock or exceeds `timeout` number of seconds, in which case it throws an exception.
def _terminate(self): def generate_body(): d = defer.succeed(None) d.addBoth(defer.drop_param, self.agent.shutdown_agent) d.addBoth(lambda _: self.delete_document(self._descriptor)) return d return self._terminate_procedure(generate_body)
Shutdown agent gently removing the descriptor and notifying partners.
def replace_in_files(search, replace, depth=0, paths=None, confirm=True): if paths==None: paths = _s.dialogs.MultipleFiles('DIS AND DAT|*.*') if paths == []: return for path in paths: lines = read_lines(path) if depth: N=min(len(lines),depth) else: N=len(lines) fo...
Does a line-by-line search and replace, but only up to the "depth" line.
def cache_property(key, empty, type): return property(lambda x: x._get_cache_value(key, empty, type), lambda x, v: x._set_cache_value(key, v, type), lambda x: x._del_cache_value(key), 'accessor for %r' % key)
Return a new property object for a cache header. Useful if you want to add support for a cache extension in a subclass.
def send_email(self, msg, tag=None): try: return self._send_email(msg, tag) except: self.exceptions.append(straceback()) return -2
Send an e-mail before completing the shutdown. Returns 0 if success.
def clean_readme(fname): with codecs.open(fname, 'r', 'utf-8') as f: return ''.join( re.sub(r':\w+:`([^`]+?)( <[^<>]+>)?`', r'``\1``', line) for line in f if not (line.startswith('.. currentmodule') or line.startswith('.. toctree')) )
Cleanup README.rst for proper PyPI formatting.
def appendAnchor(self, name=None, position=None, color=None, anchor=None): identifier = None if anchor is not None: anchor = normalizers.normalizeAnchor(anchor) if name is None: name = anchor.name if position is None: position = anchor....
Append an anchor to this glyph. >>> anchor = glyph.appendAnchor("top", (10, 20)) This will return a :class:`BaseAnchor` object representing the new anchor in the glyph. ``name`` indicated the name to be assigned to the anchor. It must be a :ref:`type-string` or ``None``. ``...
def _removepkg(self, package): try: subprocess.call("removepkg {0} {1}".format(self.flag, package), shell=True) if os.path.isfile(self.dep_path + package): os.remove(self.dep_path + package) except subprocess.CalledProcessError as er: ...
removepkg Slackware command
def kill(self, id, signal=signal.SIGTERM): args = { 'id': id, 'signal': int(signal), } self._kill_chk.check(args) return self._client.json('job.kill', args)
Kill a job with given id :WARNING: beware of what u kill, if u killed redis for example core0 or coreX won't be reachable :param id: job id to kill
def get_all_nodes(self, addr, is_syscall=None, anyaddr=False): results = [ ] for cfg_node in self.graph.nodes(): if cfg_node.addr == addr or (anyaddr and cfg_node.size is not None and cfg_node.addr <= addr < (c...
Get all CFGNodes whose address is the specified one. :param addr: Address of the node :param is_syscall: True returns the syscall node, False returns the normal CFGNode, None returns both :return: all CFGNodes
def get_remote_chassis_id_mac(self, tlv_data): ret, parsed_val = self._check_common_tlv_format( tlv_data, "MAC:", "Chassis ID TLV") if not ret: return None mac = parsed_val[1].split('\n') return mac[0].strip()
Returns Remote Chassis ID MAC from the TLV.
def to_dict(self): ret = merge_dicts(self.__attributes__, self.__relations__, self.__fields__) ret = {k : v.value for k,v in ret.items()} ret['maps'] = {k : v.value for k,v in self.maps.items()} return ret
Return a dict representing the ChemicalEntity that can be read back using from_dict.
def all_props(self): d = self.arg_props d.update(self.props) return d
Return a dictionary with the values of all children, and place holders for all of the section argumemts. It combines props and arg_props
def is_transition_matrix(T, tol=1e-12): r T = _types.ensure_ndarray_or_sparse(T, ndim=2, uniform=True, kind='numeric') if _issparse(T): return sparse.assessment.is_transition_matrix(T, tol) else: return dense.assessment.is_transition_matrix(T, tol)
r"""Check if the given matrix is a transition matrix. Parameters ---------- T : (M, M) ndarray or scipy.sparse matrix Matrix to check tol : float (optional) Floating point tolerance to check with Returns ------- is_transition_matrix : bool True, if T is a valid tran...
def _error_is_decreasing(self, last_error): current_error = self._compute_error() is_decreasing = current_error < last_error return is_decreasing, current_error
True if current error is less than last_error.
def transform(self, trajs_tuple, y=None): return [self.partial_transform(traj_zip) for traj_zip in zip(*trajs_tuple)]
Featurize a several trajectories. Parameters ---------- traj_list : list(mdtraj.Trajectory) Trajectories to be featurized. Returns ------- features : list(np.ndarray), length = len(traj_list) The featurized trajectories. features[i] is the featu...
def is_merge_origin(self): if self.gridSpan > 1 and not self.vMerge: return True if self.rowSpan > 1 and not self.hMerge: return True return False
True if cell is top-left in merged cell range.
def __wrap_with_tuple(self) -> tuple: l = list() length = len(self.data) while self.idx < length: l.append(self.__parse()) return tuple(l)
Returns a tuple of all nested bencode elements.
def get_feature_subset(self, subset_idx): subset_idx = np.asarray(subset_idx) if not (max(subset_idx) < self.__num_features) and (min(subset_idx) >= 0): raise UnboundLocalError('indices out of range for the dataset. ' 'Max index: {} Min index : 0'.format( ...
Returns the subset of features indexed numerically. Parameters ---------- subset_idx : list, ndarray List of indices to features to be returned Returns ------- MLDataset : MLDataset with subset of features requested. Raises -----...
def LengthMeters(self): assert(len(self._points) > 0) length = 0 for i in range(0, len(self._points) - 1): length += self._points[i].GetDistanceMeters(self._points[i+1]) return length
Return length of this polyline in meters.
def dedent_block_string_value(raw_string: str) -> str: lines = raw_string.splitlines() common_indent = None for line in lines[1:]: indent = leading_whitespace(line) if indent < len(line) and (common_indent is None or indent < common_indent): common_indent = indent if comm...
Produce the value of a block string from its parsed raw value. Similar to CoffeeScript's block string, Python's docstring trim or Ruby's strip_heredoc. This implements the GraphQL spec's BlockStringValue() static algorithm.
def _gate_name(self, gate): try: name = gate.tex_str() except AttributeError: name = str(gate) return name
Return the string representation of the gate. Tries to use gate.tex_str and, if that is not available, uses str(gate) instead. :param string gate: Gate object of which to get the name / LaTeX representation. :return: LaTeX gate name. :rtype: string
def _add_to_dict(t, container, name, value): if name in container: raise Exception("%s '%s' already exists" % (t, name)) else: container[name] = value
Adds an item to a dictionary, or raises an exception if an item with the specified key already exists in the dictionary.
def run(self, executable: Executable, memory_map: Dict[str, List[Union[int, float]]] = None) -> np.ndarray: self.qam.load(executable) if memory_map: for region_name, values_list in memory_map.items(): for offset, value in enumerate(values_list): ...
Run a quil executable. If the executable contains declared parameters, then a memory map must be provided, which defines the runtime values of these parameters. :param executable: The program to run. You are responsible for compiling this first. :param memory_map: The mapping of declared parame...
def repeat(self, repeats, *args, **kwargs): nv.validate_repeat(args, kwargs) values = self._data.repeat(repeats) return type(self)(values.view('i8'), dtype=self.dtype)
Repeat elements of an array. See Also -------- numpy.ndarray.repeat
def identifier_md5(self): as_int = (self.identifier * 1e4).astype(np.int64) hashed = util.md5_object(as_int.tostring(order='C')) return hashed
Return an MD5 of the identifier
def parse_host(entity, default_port=DEFAULT_PORT): host = entity port = default_port if entity[0] == '[': host, port = parse_ipv6_literal_host(entity, default_port) elif entity.endswith(".sock"): return entity, default_port elif entity.find(':') != -1: if entity.count(':') > ...
Validates a host string Returns a 2-tuple of host followed by port where port is default_port if it wasn't specified in the string. :Parameters: - `entity`: A host or host:port string where host could be a hostname or IP address. - `default_port`: The port number to use...
def execute_request(conn, classname, max_open, max_pull): start = ElapsedTimer() result = conn.OpenEnumerateInstances(classname, MaxObjectCount=max_open) print('open rtn eos=%s context=%s, count=%s time=%s ms' % (result.eos, result.context, len(result.insta...
Enumerate instances defined by the function's classname argument using the OpenEnumerateInstances and PullInstancesWithPath. * classname - Classname for the enumeration. * max_open - defines the maximum number of instances for the server to return for the open *max_p...
def use(wcspkg, raise_err=True): global coord_types, wcs_configured, WCS if wcspkg not in common.custom_wcs: modname = 'wcs_%s' % (wcspkg) path = os.path.join(wcs_home, '%s.py' % (modname)) try: my_import(modname, path) except ImportError: return False ...
Choose WCS package.
def normalize(self) -> 'State': tensor = self.tensor / bk.ccast(bk.sqrt(self.norm())) return State(tensor, self.qubits, self._memory)
Normalize the state
def base64url_decode(input): rem = len(input) % 4 if rem > 0: input += b'=' * (4 - rem) return base64.urlsafe_b64decode(input)
Helper method to base64url_decode a string. Args: input (str): A base64url_encoded string to decode.
def get_coord_line_number(self,coord): if coord[0] in self._coords: if coord[1] in self._coords[coord[0]]: return self._coords[coord[0]][coord[1]] return None
return the one-indexed line number given the coordinates
def serialize(self, value, **kwargs): if types.Type.is_type(self.attr_type): try: value = self.accessor.get(value, **kwargs) except (AttributeError, KeyError): if not hasattr(self, "default") and self.required: raise val...
Serialize the attribute of the input data. Gets the attribute value with accessor and converts it using the type serialization. Schema will place this serialized value into corresponding compartment of the HAL structure with the name of the attribute as a key. :param value: Val...
def log_transform(rates): transformed = [] for key in ['missense', 'nonsense', 'splice_lof', 'splice_region', 'synonymous']: try: value = math.log10(rates[key]) except ValueError: value = "NA" except KeyError: continue transformed.a...
log transform a numeric value, unless it is zero, or negative
def stayOpen(self): if not self._wantToClose: self.show() self.setGeometry(self._geometry)
optional dialog restore
def get_val_by_text(root,search): found_flag = False for el in root.iter(): if found_flag: return(el) if el.text == search: found_flag = True
From MeasYaps XML root find next sibling of node matching 'search'. MeasYaps looks like: <value>Key</value> <value>Value</value> Thus 'search' is the Key and we want to find the node that has the Value. We return the node containing the desired Value. Arguments: root (Element) r...
def group_citation_edges(edges: Iterable[EdgeTuple]) -> Iterable[Tuple[str, Iterable[EdgeTuple]]]: return itt.groupby(edges, key=_citation_sort_key)
Return an iterator over pairs of citation values and their corresponding edge iterators.
def weld_filter(array, weld_type, bool_array): obj_id, weld_obj = create_weld_object(array) bool_obj_id = get_weld_obj_id(weld_obj, bool_array) weld_template = weld_obj.weld_code = weld_template.format(array=obj_id, bool_array=bool_obj_id, ...
Returns a new array only with the elements with a corresponding True in bool_array. Parameters ---------- array : numpy.ndarray or WeldObject Input data. weld_type : WeldType Type of the elements in the input array. bool_array : numpy.ndarray or WeldObject Array of bool with...
def mark_running(self): with self._lock: self._set_state(self._RUNNING, self._PAUSED)
Moves the service to the Running state. Raises if the service is not currently in the Paused state.
def readline(self, size=-1): if self.closed: raise ValueError("I/O operation on closed file") pos = self.buffer.find(b"\n") + 1 if pos == 0: while True: buf = self.fileobj.read(self.blocksize) self.buffer += buf if not buf o...
Read one entire line from the file. If size is present and non-negative, return a string with at most that size, which may be an incomplete line.
def runContainer(image, **kwargs): container = None try: container = client.containers.run(image, **kwargs) if "name" in kwargs.keys(): print("Container", kwargs["name"], "is now running.") except ContainerError as exc: eprint("Failed to run container") raise exc ...
Run a docker container using a given image; passing keyword arguments documented to be accepted by docker's client.containers.run function No extra side effects. Handles and reraises ContainerError, ImageNotFound, and APIError exceptions.
def get_instance(self, payload): return WorkerChannelInstance( self._version, payload, workspace_sid=self._solution['workspace_sid'], worker_sid=self._solution['worker_sid'], )
Build an instance of WorkerChannelInstance :param dict payload: Payload response from the API :returns: twilio.rest.taskrouter.v1.workspace.worker.worker_channel.WorkerChannelInstance :rtype: twilio.rest.taskrouter.v1.workspace.worker.worker_channel.WorkerChannelInstance
def load(cls, path, name): filepath = aux.joinpath(path, name + '.proteindb') with zipfile.ZipFile(filepath, 'r', allowZip64=True) as containerZip: proteinsString = io.TextIOWrapper(containerZip.open('proteins'), encoding='utf-8' ...
Imports the specified ``proteindb`` file from the hard disk. :param path: filedirectory of the ``proteindb`` file :param name: filename without the file extension ".proteindb" .. note:: this generates rather large files, which actually take longer to import than to newly generate. ...
def tangent_lineation_plot(ax, strikes, dips, rakes): rake_x, rake_y = mplstereonet.rake(strikes, dips, rakes) mag = np.hypot(rake_x, rake_y) u, v = -rake_x / mag, -rake_y / mag pole_x, pole_y = mplstereonet.pole(strikes, dips) arrows = ax.quiver(pole_x, pole_y, u, v, width=1, headwidth=4, units='do...
Makes a tangent lineation plot for normal faults with the given strikes, dips, and rakes.
def directed_tripartition(seq): for a, b, c in directed_tripartition_indices(len(seq)): yield (tuple(seq[i] for i in a), tuple(seq[j] for j in b), tuple(seq[k] for k in c))
Generator over all directed tripartitions of a sequence. Args: seq (Iterable): a sequence. Yields: tuple[tuple]: A tripartition of ``seq``. Example: >>> seq = (2, 5) >>> list(directed_tripartition(seq)) # doctest: +NORMALIZE_WHITESPACE [((2, 5), (), ()), ...
def create_domain(provider, context, **kwargs): session = get_session(provider.region) client = session.client("route53") domain = kwargs.get("domain") if not domain: logger.error("domain argument or BaseDomain variable not provided.") return False zone_id = create_route53_zone(clien...
Create a domain within route53. Args: provider (:class:`stacker.providers.base.BaseProvider`): provider instance context (:class:`stacker.context.Context`): context instance Returns: boolean for whether or not the hook succeeded.
def decode_cert(cert): ret_dict = {} subject_xname = X509_get_subject_name(cert.value) ret_dict["subject"] = _create_tuple_for_X509_NAME(subject_xname) notAfter = X509_get_notAfter(cert.value) ret_dict["notAfter"] = ASN1_TIME_print(notAfter) peer_alt_names = _get_peer_alt_names(cert) if peer...
Convert an X509 certificate into a Python dictionary This function converts the given X509 certificate into a Python dictionary in the manner established by the Python standard library's ssl module.
def run_suite(case, config, summary): m = _load_case_module(case, config) result = m.run(case, config) summary[case] = _summarize_result(m, result) _print_summary(m, case, summary) if result['Type'] == 'Book': for name, page in six.iteritems(result['Data']): functions.create_page...
Run the full suite of validation tests
def _dscl(cmd, ctype='create'): if __grains__['osrelease_info'] < (10, 8): source, noderoot = '.', '' else: source, noderoot = 'localhost', '/Local/Default' if noderoot: cmd[0] = noderoot + cmd[0] return __salt__['cmd.run_all']( ['dscl', source, '-' + ctype] + cmd, ...
Run a dscl -create command
def parse_netloc(scheme, netloc): auth, _netloc = netloc.split('@') sender, token = auth.split(':') if ':' in _netloc: domain, port = _netloc.split(':') port = int(port) else: domain = _netloc if scheme == 'https': port = 443 else: port = 8...
Parse netloc string.
def inflate(deflated_vector): dv = json.loads(deflated_vector) result = np.zeros(5555) for n in dv['indices']: result[int(n)] = dv['indices'][n] return result
Given a defalated vector, inflate it into a np array and return it
def check_git(): try: with open(os.devnull, "wb") as devnull: subprocess.check_call(["git", "--version"], stdout=devnull, stderr=devnull) except: raise RuntimeError("Please make sure git is installed and on your path.")
Check if git command is available.
def _factorize_from_iterables(iterables): if len(iterables) == 0: return [[], []] return map(list, lzip(*[_factorize_from_iterable(it) for it in iterables]))
A higher-level wrapper over `_factorize_from_iterable`. *This is an internal function* Parameters ---------- iterables : list-like of list-likes Returns ------- codes_list : list of ndarrays categories_list : list of Indexes Notes ----- See `_factorize_from_iterable` for ...
def sleep(self, unique_id, delay, configs=None): self.pause(unique_id, configs) time.sleep(delay) self.resume(unique_id, configs)
Pauses the process for the specified delay and then resumes it :Parameter unique_id: the name of the process :Parameter delay: delay time in seconds
def ext_publish(self, instance, loop, *args, **kwargs): if self.external_signaller is not None: return self.external_signaller.publish_signal(self, instance, loop, args, kwargs)
If 'external_signaller' is defined, calls it's publish method to notify external event systems. This is for internal usage only, but it's doumented because it's part of the interface with external notification systems.
def load_suite_from_stdin(self): suite = unittest.TestSuite() rules = Rules("stream", suite) line_generator = self._parser.parse_stdin() return self._load_lines("stream", line_generator, suite, rules)
Load a test suite with test lines from the TAP stream on STDIN. :returns: A ``unittest.TestSuite`` instance
def GetEntries(self, parser_mediator, match=None, **unused_kwargs): if 'RememberedNetworks' not in match: return for wifi in match['RememberedNetworks']: ssid = wifi.get('SSIDString', 'UNKNOWN_SSID') security_type = wifi.get('SecurityType', 'UNKNOWN_SECURITY_TYPE') event_data = plist_eve...
Extracts relevant Airport entries. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. match (Optional[dict[str: object]]): keys extracted from PLIST_KEYS.
def main(): try: command = sys.argv[1] except IndexError: return error_message() try: module = importlib.import_module('i18n.%s' % command) module.main.args = sys.argv[2:] except (ImportError, AttributeError): return error_message() return module.main()
Executes the given command. Returns error_message if command is not valid. Returns: Output of the given command or error message if command is not valid.
def abort (aggregate): while True: try: aggregate.abort() aggregate.finish() aggregate.end_log_output(interrupt=True) break except KeyboardInterrupt: log.warn(LOG_CHECK, _("user abort; force shutdown")) aggregate.end_log_output(...
Helper function to ensure a clean shutdown.
def ned2geodetic(n: float, e: float, d: float, lat0: float, lon0: float, h0: float, ell: Ellipsoid = None, deg: bool = True) -> Tuple[float, float, float]: x, y, z = enu2ecef(e, n, -d, lat0, lon0, h0, ell, deg=deg) return ecef2geodetic(x, y, z, ell, deg=deg)
Converts North, East, Down to target latitude, longitude, altitude Parameters ---------- n : float or numpy.ndarray of float North NED coordinate (meters) e : float or numpy.ndarray of float East NED coordinate (meters) d : float or numpy.ndarray of float Down NED coordinat...
def configure(self, sbi_config: str): config_dict = json.loads(sbi_config) self.debug_stream('SBI configuration:\n%s', json.dumps(config_dict, indent=2)) try: sbi = Subarray(self.get_name()).configure_sbi(config_dict) except jsonschema.exceptions.Val...
Configure an SBI for this subarray. Args: sbi_config (str): SBI configuration JSON Returns: str,
def get_marshmallow_schema_name(self, plugin, schema): try: return plugin.openapi.refs[schema] except KeyError: plugin.spec.definition(schema.__name__, schema=schema) return schema.__name__
Get the schema name. If the schema doesn't exist, create it.
def listen(self): import select while self.connected: r, w, e = select.select((self.ws.sock, ), (), ()) if r: self.on_message() elif e: self.subscriber.on_sock_error(e) self.disconnect()
Set up a quick connection. Returns on disconnect. After calling `connect()`, this waits for messages from the server using `select`, and notifies the subscriber of any events.
def update_index(self, name, value): kwargs = {} kwargs['index.' + name] = value if isinstance(value, basestring) else json.dumps(value) return self.post(**kwargs)
Changes the definition of a KV Store index. :param name: name of index to change :type name: ``string`` :param value: new index definition :type value: ``dict`` or ``string`` :return: Result of POST request
def Sample(self, tasks_status): sample_time = time.time() sample = '{0:f}\t{1:d}\t{2:d}\t{3:d}\t{4:d}\t{5:d}\n'.format( sample_time, tasks_status.number_of_queued_tasks, tasks_status.number_of_tasks_processing, tasks_status.number_of_tasks_pending_merge, tasks_status.number_of_ab...
Takes a sample of the status of queued tasks for profiling. Args: tasks_status (TasksStatus): status information about tasks.
def load(config_path: str): if os.path.splitext(config_path)[1] in ('.yaml', '.yml'): _ = load_yaml_configuration(config_path, translator=PipelineTranslator()) elif os.path.splitext(config_path)[1] == '.py': _ = load_python_configuration(config_path) else: raise ValueError('Unknown c...
Load a configuration and keep it alive for the given context :param config_path: path to a configuration file
def _deserialize(x, elementType, compress, relicReadBinFunc): b = (c_ubyte*len(x))(*bytearray(x)) flag = c_int(compress) result = elementType() relicReadBinFunc(byref(result), byref(b), len(x), flag) return result
Deserializes a bytearray @x, into an @element of the correct type, using the a relic read_bin function and the specified @compressed flag. This is the underlying implementation for deserialize G1, G2, and Gt.
def _reset(self): with self._lock: self.stop() self.start() for svc_ref in self.get_bindings(): if not self.requirement.filter.matches( svc_ref.get_properties() ): self.on_service_departure(svc_ref)
Called when the filter has been changed