code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def get_hops(self, start, end=None, forward=True): if forward: return list(self._iterbfs(start=start, end=end, forward=True)) else: return list(self._iterbfs(start=start, end=end, forward=False))
Computes the hop distance to all nodes centered around a specified node. First order neighbours are at hop 1, their neigbours are at hop 2 etc. Uses :py:meth:`forw_bfs` or :py:meth:`back_bfs` depending on the value of the forward parameter. If the distance between all neighbouring nodes is 1 t...
def _include_exclude(file_path, include=None, exclude=None): if exclude is not None and exclude: for pattern in exclude: if file_path.match(pattern): return False if include is not None and include: for pattern in include: if file_path.match(pattern): ...
Check if file matches one of include filters and not in exclude filter. :param file_path: Path to the file. :param include: Tuple containing patterns to which include from result. :param exclude: Tuple containing patterns to which exclude from result.
def newCDataBlock(self, content, len): ret = libxml2mod.xmlNewCDataBlock(self._o, content, len) if ret is None:raise treeError('xmlNewCDataBlock() failed') __tmp = xmlNode(_obj=ret) return __tmp
Creation of a new node containing a CDATA block.
def base62_encode(cls, num): alphabet = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" if num == 0: return alphabet[0] arr = [] base = len(alphabet) while num: rem = num % base num = num // base arr.append(alphabet...
Encode a number in Base X. `num`: The number to encode `alphabet`: The alphabet to use for encoding Stolen from: http://stackoverflow.com/a/1119769/1144479
def image_id_from_k8s(): token_path = "/var/run/secrets/kubernetes.io/serviceaccount/token" if os.path.exists(token_path): k8s_server = "https://{}:{}/api/v1/namespaces/default/pods/{}".format( os.getenv("KUBERNETES_SERVICE_HOST"), os.getenv( "KUBERNETES_PORT_443_TCP_PORT"), ...
Pings the k8s metadata service for the image id
def nocomment(astr, com='!'): alist = astr.splitlines() for i in range(len(alist)): element = alist[i] pnt = element.find(com) if pnt != -1: alist[i] = element[:pnt] return '\n'.join(alist)
just like the comment in python. removes any text after the phrase 'com'
def resize_bytes(fobj, old_size, new_size, offset): if new_size < old_size: delete_size = old_size - new_size delete_at = offset + new_size delete_bytes(fobj, delete_size, delete_at) elif new_size > old_size: insert_size = new_size - old_size insert_at = offset + old_size...
Resize an area in a file adding and deleting at the end of it. Does nothing if no resizing is needed. Args: fobj (fileobj) old_size (int): The area starting at offset new_size (int): The new size of the area offset (int): The start of the area Raises: IOError
def upload(self, response, file): response = response.json() if not response.get('upload_url'): raise ValueError('Bad API response. No upload_url.') if not response.get('upload_params'): raise ValueError('Bad API response. No upload_params.') kwargs = response.get...
Upload the file. :param response: The response from the upload request. :type response: dict :param file: A file handler pointing to the file to upload. :returns: True if the file uploaded successfully, False otherwise, \ and the JSON response from the API. :rtype: t...
def sub(self, key): subv = Vyper() data = self.get(key) if isinstance(data, dict): subv._config = data return subv else: return None
Returns new Vyper instance representing a sub tree of this instance.
def _read_opt_lio(self, code, *, desc): _type = self._read_opt_type(code) _size = self._read_unpack(1) _llen = self._read_unpack(1) _line = self._read_fileng(_llen) opt = dict( desc=desc, type=_type, length=_size + 2, lid_len=_llen,...
Read HOPOPT Line-Identification option. Structure of HOPOPT Line-Identification option [RFC 6788]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-...
def without_edge(self, edge: Edge) -> 'BipartiteGraph[TLeft, TRight, TEdgeValue]': return BipartiteGraph((e2, v) for e2, v in self._edges.items() if edge != e2)
Returns a copy of this bipartite graph with the given edge removed.
def setValues(self, rows, *values): 'Set our column value for given list of rows to `value`.' for r, v in zip(rows, itertools.cycle(values)): self.setValueSafe(r, v) self.recalc() return status('set %d cells to %d values' % (len(rows), len(values)))
Set our column value for given list of rows to `value`.
def lookup_prefix(self, prefix, timestamp=timestamp_now): prefix = prefix.strip().upper() if self._lookuptype == "clublogxml" or self._lookuptype == "countryfile": return self._check_data_for_date(prefix, timestamp, self._prefixes, self._prefixes_index) elif self._lookuptype == "redi...
Returns lookup data of a Prefix Args: prefix (string): Prefix of a Amateur Radio callsign timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC) Returns: dict: Dictionary containing the country specific data of the Prefix Raises: KeyE...
def query_target(target_chembl_id): query_dict = {'query': 'target', 'params': {'target_chembl_id': target_chembl_id, 'limit': 1}} res = send_query(query_dict) target = res['targets'][0] return target
Query ChEMBL API target by id Parameters ---------- target_chembl_id : str Returns ------- target : dict dict parsed from json that is unique for the target
def sadd(self, key, *values): if len(values) == 0: raise ResponseError("wrong number of arguments for 'sadd' command") redis_set = self._get_set(key, 'SADD', create=True) before_count = len(redis_set) redis_set.update(map(self._encode, values)) after_count = len(redis...
Emulate sadd.
def visualize_model(X, y, estimator, path, **kwargs): y = LabelEncoder().fit_transform(y) model = Pipeline([ ('one_hot_encoder', OneHotEncoder()), ('estimator', estimator) ]) _, ax = plt.subplots() visualizer = ClassificationReport( model, classes=['edible', 'poisonous'], ...
Test various estimators.
def add(self, data_source, module, package=None): super(Data, self).add(data_source, module, package) if data_source not in self.layer: self.layer[data_source] = {'module': module, 'package': package} self.objects[data_source] = None
Add data_source to model. Tries to import module, then looks for data source class definition. :param data_source: Name of data source to add. :type data_source: str :param module: Module in which data source resides. Can be absolute or relative. See :func:`importlib.import_...
def validate_signature(self, filename): if not GPG_PRESENT: return False sigfilename = filename + '.sig' try: with open(sigfilename): pass except IOError: return False return verify(sigfilename, filename)
Returns True if a valid signature is present for filename
def get_parent_path(index=2): try: path = _caller_path(index) except RuntimeError: path = os.getcwd() path = os.path.abspath(os.path.join(path, os.pardir)) return path
Get the caller's parent path to sys.path If the caller is a CLI through stdin, the parent of the current working directory is used
def _assertIndex(self, index): if type(index) is not int: raise TypeError('list indices must be integers') if index < 0 or index >= self.nelems: raise IndexError('list index out of range')
Raise TypeError or IndexError if index is not an integer or out of range for the number of elements in this array, respectively.
def get_pattern_additional_cycles(self, patternnumber): _checkPatternNumber(patternnumber) address = _calculateRegisterAddress('cycles', patternnumber) return self.read_register(address)
Get the number of additional cycles for a given pattern. Args: patternnumber (integer): 0-7 Returns: The number of additional cycles (int).
def cardinal(self, to): return sum(1 for _ in filter( lambda d: not d.external and d.target in to, self.dependencies))
Return the number of dependencies of this module to the given node. Args: to (Package/Module): the target node. Returns: int: number of dependencies.
def _restore_group(self, group_id): meta = self.TaskSetModel._default_manager.restore_taskset(group_id) if meta: return meta.to_dict()
Get group metadata for a group by id.
def get_all_hosted_routers(self, context): cctxt = self.client.prepare() return cctxt.call(context, 'cfg_sync_all_hosted_routers', host=self.host)
Make a remote process call to retrieve the sync data for routers that have been scheduled to a hosting device. :param context: session context
def _db(self): if not hasattr(self, "_db_client") or getattr(self, "_db_client") is None: self._db_client = get_db_client() return self._db_client
Database client for accessing storage. :returns: :class:`livebridge.storages.base.BaseStorage`
def _check_available(name): _status = _systemctl_status(name) sd_version = salt.utils.systemd.version(__context__) if sd_version is not None and sd_version >= 231: return 0 <= _status['retcode'] < 4 out = _status['stdout'].lower() if 'could not be found' in out: return False for ...
Returns boolean telling whether or not the named service is available
def get_help_text(self): txt = str('\n') for name, info in self.commands.items(): command_txt = "\t{0: <22} {1}\n".format(name, info['description']) if info['attributes']: command_txt = ''.join([command_txt, "\t Attributes:\n"]) for attrname, attrd...
Returns the help output in plain text format.
def MetatagDistinctValuesGet(self, metatag_name, namespace = None): ns = "default" if namespace is None else namespace if self.__SenseApiCall__("/metatag_name/{0}/distinct_values.json", "GET", parameters = {'namespace': ns}): return True else: self.__error__ = "api c...
Find the distinct value of a metatag name in a certain namespace @param metatag_name (string) - Name of the metatag for which to find the distinct values @param namespace (stirng) - Namespace in which to find the distinct values @return (bool) - Boolean...
def check_platforms(platforms): if len(platforms) > 0: return all(platform in PLATFORM_IDS for platform in platforms) return True
Checks if the platforms have a valid platform code
def encipher(self,string): string = self.remove_punctuation(string) ret = '' for (i,c) in enumerate(string): if i<len(self.key): offset = self.a2i(self.key[i]) else: offset = self.a2i(string[i-len(self.key)]) ret += self.i2a(self.a2i(c)+offset) re...
Encipher string using Autokey cipher according to initialised key. Punctuation and whitespace are removed from the input. Example:: ciphertext = Autokey('HELLO').encipher(plaintext) :param string: The string to encipher. :returns: The enciphered string.
def get_requested_quarter_data(self, zero_qtr_data, zeroth_quarter_idx, stacked_last_per_qtr, num_announcements, dates): zero_qtr_data_id...
Selects the requested data for each date. Parameters ---------- zero_qtr_data : pd.DataFrame The 'time zero' data for each calendar date per sid. zeroth_quarter_idx : pd.Index An index of calendar dates, sid, and normalized quarters, for only the rows...
def parse_startup_message(self): return parse_map(lambda args: OmapiStartupMessage(*args), parse_chain(self.parse_net32int, lambda _: self.parse_net32int()))
results in an OmapiStartupMessage >>> d = b"\\0\\0\\0\\x64\\0\\0\\0\\x18" >>> next(InBuffer(d).parse_startup_message()).validate()
def cal_frame_according_boundaries(left, right, top, bottom, parent_size, gaphas_editor=True, group=True): margin = cal_margin(parent_size) if group: rel_pos = max(left - margin, 0), max(top - margin, 0) size = (min(right - left + 2 * margin, parent_size[0] - rel_pos[0]), min(bot...
Generate margin and relative position and size handed boundary parameter and parent size
def create_permissions_from_tuples(model, codename_tpls): if codename_tpls: model_cls = django_apps.get_model(model) content_type = ContentType.objects.get_for_model(model_cls) for codename_tpl in codename_tpls: app_label, codename, name = get_from_codename_tuple( ...
Creates custom permissions on model "model".
async def apply_command(self, cmd): if cmd: if cmd.prehook: await cmd.prehook(ui=self, dbm=self.dbman, cmd=cmd) try: if asyncio.iscoroutinefunction(cmd.apply): await cmd.apply(self) else: cmd.apply(se...
applies a command This calls the pre and post hooks attached to the command, as well as :meth:`cmd.apply`. :param cmd: an applicable command :type cmd: :class:`~alot.commands.Command`
def add_record(post_id, catalog_id, order=0): rec = MPost2Catalog.__get_by_info(post_id, catalog_id) if rec: entry = TabPost2Tag.update( order=order, par_id=rec.tag_id[:2] + '00', ).where(TabPost2Tag.uid == rec.uid) entry.execute() ...
Create the record of post 2 tag, and update the count in g_tag.
def register_piece(self, from_address, to_address, hash, password, min_confirmations=6, sync=False, ownership=True): file_hash, file_hash_metadata = hash path, from_address = from_address verb = Spoolverb() unsigned_tx = self.simple_spool_transaction(from_address, ...
Register a piece Args: from_address (Tuple[str]): Federation address. All register transactions originate from the the Federation wallet to_address (str): Address registering the edition hash (Tuple[str]): Hash of the piece. (file_hash, file_hash_metadata) ...
def debug(*args): for i in args: click.echo('D:%s' % str(i), err=True)
Send debug messages to the Maltego console.
def amount(self, amount): if amount is None: raise ValueError("Invalid value for `amount`, must not be `None`") if amount < 0: raise ValueError("Invalid value for `amount`, must be a value greater than or equal to `0`") self._amount = amount
Sets the amount of this Money. The amount of money, in the smallest denomination of the currency indicated by `currency`. For example, when `currency` is `USD`, `amount` is in cents. :param amount: The amount of this Money. :type: int
def item_options(self, **kwargs): actions = self._item_actions.copy() if self._resource.is_singular: actions['create'] = ('POST',) methods = self._get_handled_methods(actions) return self._set_options_headers(methods)
Handle collection OPTIONS request. Singular route requests are handled a bit differently because singular views may handle POST requests despite being registered as item routes.
def prepare(cls): if cls._ask_openapi(): napp_path = Path() tpl_path = SKEL_PATH / 'napp-structure/username/napp' OpenAPI(napp_path, tpl_path).render_template() print('Please, update your openapi.yml file.') sys.exit()
Prepare NApp to be uploaded by creating openAPI skeleton.
def get_diff_endpoints_from_commit_range(repo, commit_range): if not commit_range: raise ValueError('commit_range cannot be empty') result = re_find(COMMIT_RANGE_REGEX, commit_range) if not result: raise ValueError( 'Expected diff str of the form \'a..b\' or \'a...b\' (got {})' ...
Get endpoints of a diff given a commit range The resulting endpoints can be diffed directly:: a, b = get_diff_endpoints_from_commit_range(repo, commit_range) a.diff(b) For details on specifying git diffs, see ``git diff --help``. For details on specifying revisions, see ``git help revisio...
def authenticate(self, user, password): assert user['password_hash'] == '_'.join((password, 'hash')) self.logger.debug('User %s has been successfully authenticated', user['uid'])
Authenticate user.
def add_arguments(self, parser): subparsers = parser.add_subparsers(help='sub-command help', dest='command') add_parser = partial(_add_subparser, subparsers, parser) add_parser('list', help="list concurrency triggers") add_parser('drop', help="d...
Entry point for subclassed commands to add custom arguments.
def toggle_concatenate(self): if not (self.chunk['epoch'].isChecked() and self.lock_to_staging.get_value()): for i,j in zip([self.idx_chan, self.idx_cycle, self.idx_stage, self.idx_evt_type], [self.cat['chan'], self.cat['cycle'], ...
Enable and disable concatenation options.
def convert_machine_list_time_val(text: str) -> datetime.datetime: text = text[:14] if len(text) != 14: raise ValueError('Time value not 14 chars') year = int(text[0:4]) month = int(text[4:6]) day = int(text[6:8]) hour = int(text[8:10]) minute = int(text[10:12]) second = int(text...
Convert RFC 3659 time-val to datetime objects.
def _zoom_rows(self, zoom): self.grid.SetDefaultRowSize(self.grid.std_row_size * zoom, resizeExistingRows=True) self.grid.SetRowLabelSize(self.grid.row_label_size * zoom) for row, tab in self.code_array.row_heights: if tab == self.grid.current_tabl...
Zooms grid rows
def bulk_copy(self, ids): schema = DeviceSchema() return self.service.bulk_copy(self.base, self.RESOURCE, ids, schema)
Bulk copy a set of devices. :param ids: Int list of device IDs. :return: :class:`devices.Device <devices.Device>` list
def configure(self, options, conf): self.conf = conf self.when = options.browser_closer_when
Configure plugin. Plugin is enabled by default.
def _make(c): ann = defaultdict(list) for pos in c['ann']: for db in pos: ann[db] += list(pos[db]) logger.debug(ann) valid = [l for l in c['valid']] ann_list = [", ".join(list(set(ann[feature]))) for feature in ann if feature in valid] return valid, ann_list
create html from template, adding figure, annotation and sequences counts
def get_objects(self): return rope.base.oi.soi.get_passed_objects( self.pyfunction, self.index)
Returns the list of objects passed as this parameter
def edit(filename, identifier, data): with open(filename, 'r') as fh: bibtex = bibtexparser.load(fh) bibtex.entries_dict[identifier] = data.entries[0] write(filename, bibtex)
Update an entry in a BibTeX file. :param filename: The name of the BibTeX file to edit. :param identifier: The id of the entry to update, in the BibTeX file. :param data: A dict associating fields and updated values. Fields present \ in the BibTeX file but not in this dict will be kept as is.
def pretty_print(self, indent=0): s = tab = ' '*indent s += '%s: ' %self.tag if isinstance(self.value, basestring): s += self.value else: s += '\n' for e in self.value: s += e.pretty_print(indent+4) s += '\n' return s
Print the document without tags using indentation
def basic_auth(self, username, password): if not (username and password): return self.auth = (username, password) self.headers.pop('Authorization', None)
Set the Basic Auth credentials on this Session. :param str username: Your GitHub username :param str password: Your GitHub password
def searchEnterpriseGroups(self, searchFilter="", maxCount=100): params = { "f" : "json", "filter" : searchFilter, "maxCount" : maxCount } url = self._url + "/groups/searchEnterpriseGroups" return self._post(url=url, param_dic...
This operation searches groups in the configured enterprise group store. You can narrow down the search using the search filter parameter. Parameters: searchFilter - text value to narrow the search down maxCount - maximum number of records to return
def validation_statuses(self, area_uuid): path = "/area/{uuid}/validations".format(uuid=area_uuid) result = self._make_request('get', path) return result.json()
Get count of validation statuses for all files in upload_area :param str area_uuid: A RFC4122-compliant ID for the upload area :return: a dict with key for each state and value being the count of files in that state :rtype: dict :raises UploadApiException: if information could not be ob...
def get_callproc_signature(self, name, param_types): if isinstance(param_types[0], (list, tuple)): params = [self.sql_writer.to_placeholder(*pt) for pt in param_types] else: params = [self.sql_writer.to_placeholder(None, pt) for pt in param_types] return name + self.sql_writer.to_tuple(params)
Returns a procedure's signature from the name and list of types. :name: the name of the procedure :params: can be either strings, or 2-tuples. 2-tuples must be of the form (name, db_type). :return: the procedure's signature
def diff(x, lag=1, differences=1): if any(v < 1 for v in (lag, differences)): raise ValueError('lag and differences must be positive (> 0) integers') x = check_array(x, ensure_2d=False, dtype=np.float32) fun = _diff_vector if x.ndim == 1 else _diff_matrix res = x for i in range(differences):...
Difference an array. A python implementation of the R ``diff`` function [1]. This computes lag differences from an array given a ``lag`` and ``differencing`` term. If ``x`` is a vector of length :math:`n`, ``lag=1`` and ``differences=1``, then the computed result is equal to the successive differences...
def parse_macro_params(token): try: bits = token.split_contents() tag_name, macro_name, values = bits[0], bits[1], bits[2:] except IndexError: raise template.TemplateSyntaxError( "{0} tag requires at least one argument (macro name)".format( token.contents.spli...
Common parsing logic for both use_macro and macro_block
def get_local_user(): import getpass username = None try: username = getpass.getuser() except KeyError: pass except ImportError: if win32: import win32api import win32security import win32profile username = win32api.GetUserName(...
Return the local executing username, or ``None`` if one can't be found. .. versionadded:: 2.0
def visit_ImportFrom(self, node): if node.level: raise PythranSyntaxError("Relative import not supported", node) if not node.module: raise PythranSyntaxError("import from without module", node) module = node.module current_module = MODULES for path in modu...
Check validity of imported functions. Check: - no level specific value are provided. - a module is provided - module/submodule exists in MODULES - imported function exists in the given module/submodule
def _gen_keys_from_multicol_key(key_multicol, n_keys): keys = [('{}{:03}of{:03}') .format(key_multicol, i+1, n_keys) for i in range(n_keys)] return keys
Generates single-column keys from multicolumn key.
def _set_categories(self, categories, fastpath=False): if fastpath: new_dtype = CategoricalDtype._from_fastpath(categories, self.ordered) else: new_dtype = CategoricalDtype(categories, ordered=self.ordered) if (not f...
Sets new categories inplace Parameters ---------- fastpath : bool, default False Don't perform validation of the categories for uniqueness or nulls Examples -------- >>> c = pd.Categorical(['a', 'b']) >>> c [a, b] Categories (2, object...
def current_time(self) -> datetime: _date = datetime.strptime(self.obj.SBRes.SBReq.StartT.get("date"), "%Y%m%d") _time = datetime.strptime(self.obj.SBRes.SBReq.StartT.get("time"), "%H:%M") return datetime.combine(_date.date(), _time.time())
Extract current time.
def pdf_doc_info(instance): for key, obj in instance['objects'].items(): if ('type' in obj and obj['type'] == 'file'): try: did = obj['extensions']['pdf-ext']['document_info_dict'] except KeyError: continue for elem in did: ...
Ensure the keys of the 'document_info_dict' property of the pdf-ext extension of file objects are only valid PDF Document Information Dictionary Keys.
def update_source(self, **kwargs): callback = kwargs.pop('callback', self._callback) ip_addr = ip_interface(unicode(kwargs.pop('neighbor'))) config = self._update_source_xml(neighbor=ip_addr, int_type=kwargs.pop('int_type'), ...
Set BGP update source property for a neighbor. This method currently only supports loopback interfaces. Args: vrf (str): The VRF for this BGP process. rbridge_id (str): The rbridge ID of the device on which BGP will be configured in a VCS fabric. nei...
def set_wsgi_params(self, module=None, callable_name=None, env_strategy=None): module = module or '' if '/' in module: self._set('wsgi-file', module, condition=module) else: self._set('wsgi', module, condition=module) self._set('callable', callable_name) s...
Set wsgi related parameters. :param str|unicode module: * load .wsgi file as the Python application * load a WSGI module as the application. .. note:: The module (sans ``.py``) must be importable, ie. be in ``PYTHONPATH``. Examples: * mypackage....
def _exec_cleanup(self, cursor, fd): LOGGER.debug('Closing cursor and cleaning %s', fd) try: cursor.close() except (psycopg2.Error, psycopg2.Warning) as error: LOGGER.debug('Error closing the cursor: %s', error) self._cleanup_fd(fd) if self._cleanup_callba...
Close the cursor, remove any references to the fd in internal state and remove the fd from the ioloop. :param psycopg2.extensions.cursor cursor: The cursor to close :param int fd: The connection file descriptor
def _on_connect(self, sequence, topic, message): try: slug = None parts = topic.split('/') slug = parts[-3] uuid = self._extract_device_uuid(slug) except Exception: self._logger.exception("Error parsing slug from connection request (slug=%s, to...
Process a request to connect to an IOTile device A connection message triggers an attempt to connect to a device, any error checking is done by the DeviceManager that is actually managing the devices. A disconnection message is checked to make sure its key matches what we excep...
def create_build_configuration_set_raw(**kwargs): config_set = _create_build_config_set_object(**kwargs) response = utils.checked_api_call(pnc_api.build_group_configs, 'create_new', body=config_set) if response: return response.content
Create a new BuildConfigurationSet.
def _extract_methods(self): service = self._service all_urls = set() urls_with_options = set() if not service.http: return for rule in service.http.rules: http_method, url = _detect_pattern_option(rule) if not url or not http_method or not rule...
Obtains the methods used in the service.
def send(self, text, thread_ts=None): self._client.rtm_send_message(self._body['channel'], text, thread_ts=thread_ts)
Send a reply using RTM API (This function doesn't supports formatted message when using a bot integration)
def valid_header_waiting(self): if len(self.buffer) < 4: self.logger.debug("Buffer does not yet contain full header") result = False else: result = True result = result and self.buffer[0] == velbus.START_BYTE if not result: self...
Check if a valid header is waiting in buffer
def transaction(self, compare, success=None, failure=None): compare = [c.build_message() for c in compare] success_ops = self._ops_to_requests(success) failure_ops = self._ops_to_requests(failure) transaction_request = etcdrpc.TxnRequest(compare=compare, ...
Perform a transaction. Example usage: .. code-block:: python etcd.transaction( compare=[ etcd.transactions.value('/doot/testing') == 'doot', etcd.transactions.version('/doot/testing') > 0, ], success=[...
def go_offline(self, comment=None): self.make_request( NodeCommandFailed, method='update', resource='go_offline', params={'comment': comment})
Executes a Go-Offline operation on the specified node :param str comment: optional comment to audit :raises NodeCommandFailed: offline not available :return: None
def _parse(reactor, directory, pemdir, *args, **kwargs): def colon_join(items): return ':'.join([item.replace(':', '\\:') for item in items]) sub = colon_join(list(args) + ['='.join(item) for item in kwargs.items()]) pem_path = FilePath(pemdir).asTextMode() acme_key = load_or_create_client_key(p...
Parse a txacme endpoint description. :param reactor: The Twisted reactor. :param directory: ``twisted.python.url.URL`` for the ACME directory to use for issuing certs. :param str pemdir: The path to the certificate directory to use.
def send_post(config, urlpath, post_data): server = config.get("Server", "url") logger.debug("Sending executor payload to " + server) post_data = urlencode(post_data) post_data = post_data.encode("utf-8", errors="ignore") url = server + urlpath try: urlopen(url, post_data) except Exc...
Send POST data to an OpenSubmit server url path, according to the configuration.
def insort_event_right(self, event, lo=0, hi=None): if lo < 0: raise ValueError('lo must be non-negative') if hi is None: hi = len(self.queue) while lo < hi: mid = (lo + hi) // 2 if event[0] < self.queue[mid][0]: hi = mid else: lo = mid + 1 self.queue.insert...
Insert event in queue, and keep it sorted assuming queue is sorted. If event is already in queue, insert it to the right of the rightmost event (to keep FIFO order). Optional args lo (default 0) and hi (default len(a)) bound the slice of a to be searched. Args: event: a (time in sec since u...
def fullqualname_py2(obj): if type(obj).__name__ == 'builtin_function_or_method': return _fullqualname_builtin_py2(obj) elif type(obj).__name__ == 'function': return obj.__module__ + '.' + obj.__name__ elif type(obj).__name__ in ['member_descriptor', 'method_descriptor', ...
Fully qualified name for objects in Python 2.
def run(self): model = Model(self.fP, self.doc) self.doc.walkabout(model) return model
Parse the script file. :rtype: :py:class:`~turberfield.dialogue.model.Model`
def int(self, *args): return self.random.randint(*self._arg_defaults(args, [-sys.maxint, sys.maxint], int))
Returns a random int between -sys.maxint and sys.maxint INT %{INT} -> '1245123' %{INT:10} -> '10000000' %{INT:10,20} -> '19'
def _check_subnet(self, name): subnets = self._vpc_connection.get_all_subnets( filters={'vpcId': self._vpc_id}) matching_subnets = [ subnet for subnet in subnets if name in [subnet.tags.get('Name'), subnet.id] ] if len(matchin...
Checks if the subnet exists. :param str name: name of the subnet :return: str - subnet id of the subnet :raises: `SubnetError` if group does not exist
def get_and_subtract(self, delta): return self._invoke_internal(pn_counter_add_codec, delta=-1 * delta, get_before_update=True)
Subtracts the given value from the current value and returns the previous value. :raises NoDataMemberInClusterError: if the cluster does not contain any data members. :raises UnsupportedOperationError: if the cluster version is less than 3.10. :raises ConsistencyLostError: if the session guaran...
def prepare_content_length(self, body): if body is not None: length = super_len(body) if length: self.headers['Content-Length'] = builtin_str(length) elif self.method not in ('GET', 'HEAD') and self.headers.get('Content-Length') is None: self.headers['...
Prepare Content-Length header based on request method and body
def _calculate_new_overlap(stride, traj_len, skip): overlap = stride * ((traj_len - skip - 1) // stride + 1) - traj_len + skip return overlap
Given two trajectories T_1 and T_2, this function calculates for the first trajectory an overlap, i.e., a skip parameter for T_2 such that the trajectory fragments T_1 and T_2 appear as one under the given stride. Idea for deriving the formula: It is K = ((traj_len - skip - 1) // stride + 1) =...
def _get_elevation(self, location): url = self._elevation_query_base % (location.latitude, location.longitude) if self.api_key != "": url += "&key=%s" % self.api_key data = self._read_from_url(url) response = json.loads(data) if response["status"] == "OK": ...
Query the elevation information with the latitude and longitude of the specified `location`.
def readerForFd(fd, URL, encoding, options): ret = libxml2mod.xmlReaderForFd(fd, URL, encoding, options) if ret is None:raise treeError('xmlReaderForFd() failed') return xmlTextReader(_obj=ret)
Create an xmltextReader for an XML from a file descriptor. The parsing flags @options are a combination of xmlParserOption. NOTE that the file descriptor will not be closed when the reader is closed or reset.
def parse_pagination(headers): links = { link.rel: parse_qs(link.href).get("page", None) for link in link_header.parse(headers.get("Link", "")).links } return _Navigation( links.get("previous", [None])[0], links.get("next", [None])[0], links.get("last", [None])[0], ...
Parses headers to create a pagination objects :param headers: HTTP Headers :type headers: dict :return: Navigation object for pagination :rtype: _Navigation
def collect_consequences(self): consequences = {self.key()} for relation in self.referenced_by.values(): consequences.update(relation.collect_consequences()) return consequences
Recursively collect a set of _ReferenceKeys that would consequentially get dropped if this were dropped via "drop ... cascade". :return Set[_ReferenceKey]: All the relations that would be dropped
def _cmd_down(self): revision = self._get_revision() if not self._rev: self._log(0, "downgrading current revision") else: self._log(0, "downgrading to revision %s" % revision) for rev in reversed(self._revisions[int(revision) - 1:]): sql_files = glob.g...
Downgrade to a revision
def instant_articles(self, **kwargs): eqs = self.search(**kwargs).sort('-last_modified', '-published') return eqs.filter(InstantArticle())
QuerySet including all published content approved for instant articles. Instant articles are configured via FeatureType. FeatureType.instant_article = True.
def parse_substring(allele, pred, max_len=None): result = "" pos = 0 if max_len is None: max_len = len(allele) else: max_len = min(max_len, len(allele)) while pos < max_len and pred(allele[pos]): result += allele[pos] pos += 1 return result, allele[pos:]
Extract substring of letters for which predicate is True
def set_cache_expiry(response): if response.cache_control.max_age is None and 'CACHE_DEFAULT_TIMEOUT' in config.cache: response.cache_control.max_age = config.cache['CACHE_DEFAULT_TIMEOUT'] return response
Set the cache control headers
def contains_any(self, other): return self.value == other.value or self.value & other.value
Check if any flags are set. (OsuMod.Hidden | OsuMod.HardRock) in flags # Check if either hidden or hardrock are enabled. OsuMod.keyMod in flags # Check if any keymod is enabled.
def md5_for_file(f, block_size=2 ** 20): md5 = hashlib.md5() try: f.seek(0) return md5_for_stream(f, block_size=block_size) except AttributeError: file_name = f with open(file_name, 'rb') as f: return md5_for_file(f, block_size)
Generate an MD5 has for a possibly large file by breaking it into chunks.
def runExperiment( self, e ): space = self.parameterSpace() if len(space) > 0: nb = self.notebook() ps = self._mixup(space) try: self.open() view = self._client.load_balanced_view() jobs = [] for p in ps:...
Run the experiment across the parameter space in parallel using all the engines in the cluster. This method returns immediately. The experiments are run asynchronously, with the points in the parameter space being explored randomly so that intermediate retrievals of results are more rep...
def get_project_build(account_project): url = make_url("/projects/{account_project}", account_project=account_project) response = requests.get(url, headers=make_auth_headers()) return response.json()
Get the details of the latest Appveyor build.
def do_local(self, host="localhost", port=8000): port = int(port) if host == "off": self._local_endpoint = None else: self._local_endpoint = (host, port) self.onecmd("use %s" % self.engine.region)
Connect to a local DynamoDB instance. Use 'local off' to disable. > local > local host=localhost port=8001 > local off
def from_vhost(cls, vhost): result = Vhost().list() paas_hosts = {} for host in result: paas_hosts[host['name']] = host['paas_id'] return paas_hosts.get(vhost)
Retrieve paas instance id associated to a vhost.
def _get_all_timers(self, dataframe): s = dataframe['custom_timers'].apply(json.loads) s.index = dataframe['epoch'] for index, value in s.iteritems(): if not value: continue for key, value in six.iteritems(value): self._timers_values[key].a...
Get all timers and set them in the _timers_values property :param pandas.DataFrame dataframe: the main dataframe with row results