code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def get_config_object(): global _DEFAULT_CONFIG_WRAPPER if _DEFAULT_CONFIG_WRAPPER is not None: return _DEFAULT_CONFIG_WRAPPER with _DEFAULT_CONFIG_WRAPPER_LOCK: if _DEFAULT_CONFIG_WRAPPER is not None: return _DEFAULT_CONFIG_WRAPPER _DEFAULT_CONFIG_WRAPPER = ConfigWrapper...
Thread-safe accessor for the immutable default ConfigWrapper object
def create_cas_validate_url(cas_url, cas_route, service, ticket, renew=None): return create_url( cas_url, cas_route, ('service', service), ('ticket', ticket), ('renew', renew), )
Create a CAS validate URL. Keyword arguments: cas_url -- The url to the CAS (ex. http://sso.pdx.edu) cas_route -- The route where the CAS lives on server (ex. /cas/serviceValidate) service -- (ex. http://localhost:5000/login) ticket -- (ex. 'ST-58274-x839euFek492ou832Eena7ee-cas') renew -- "tr...
def _request_token(self, force=False): if self.login_data is None: raise RuntimeError("Don't have a token to refresh") if not force: if not self._requires_refresh_token(): return True headers = { "Accept": "application/json", 'Autho...
Request a new auth token
def reraise_if_any(failures, cause_cls_finder=None): if not isinstance(failures, (list, tuple)): failures = list(failures) if len(failures) == 1: failures[0].reraise(cause_cls_finder=cause_cls_finder) elif len(failures) > 1: raise WrappedFailure(failures)
Re-raise exceptions if argument is not empty. If argument is empty list/tuple/iterator, this method returns None. If argument is converted into a list with a single ``Failure`` object in it, that failure is reraised. Else, a :class:`~.WrappedFailure` exception is raised with the failure...
def _clear_policy(self, lambda_name): try: policy_response = self.lambda_client.get_policy( FunctionName=lambda_name ) if policy_response['ResponseMetadata']['HTTPStatusCode'] == 200: statement = json.loads(policy_response['Policy'])['Statement...
Remove obsolete policy statements to prevent policy from bloating over the limit after repeated updates.
def session_rollback(self, session): if not hasattr(session, 'meepo_unique_id'): self.logger.debug("skipped - session_rollback") return self.logger.debug("%s - after_rollback" % session.meepo_unique_id) signal("session_rollback").send(session) self._session_del(se...
Send session_rollback signal in sqlalchemy ``after_rollback``. This marks the failure of session so the session may enter commit phase.
def norm(x, mu, sigma=1.0): return stats.norm(loc=mu, scale=sigma).pdf(x)
Scipy norm function
def disconnect_sync(self, conn_id): done = threading.Event() result = {} def disconnect_done(conn_id, adapter_id, status, reason): result['success'] = status result['failure_reason'] = reason done.set() self.disconnect_async(conn_id, disconnect_done) ...
Synchronously disconnect from a connected device Args: conn_id (int): A unique identifier that will refer to this connection Returns: dict: A dictionary with two elements 'success': a bool with the result of the connection attempt 'failure_reason...
def get_byte_array(integer): return int.to_bytes( integer, (integer.bit_length() + 8 - 1) // 8, byteorder='big', signed=False )
Return the variable length bytes corresponding to the given int
def get_book_info(cursor, real_dict_cursor, book_id, book_version, page_id, page_version): book_ident_hash = join_ident_hash(book_id, book_version) page_ident_hash = join_ident_hash(page_id, page_version) tree = get_tree(book_ident_hash, cursor) if not tree or page_ident_hash not in fl...
Return information about a given book. Return the book's title, id, shortId, authors and revised date. Raise HTTPNotFound if the page is not in the book.
def use_theme(theme): global current current = theme import scene if scene.current is not None: scene.current.stylize()
Make the given theme current. There are two included themes: light_theme, dark_theme.
def search_certificate(self, hash): c = CensysCertificates(api_id=self.__uid, api_secret=self.__api_key) return c.view(hash)
Searches for a specific certificate using its hash :param hash: certificate hash :type hash: str :return: dict
def _extra_compile_time_classpath(self): def extra_compile_classpath_iter(): for conf in self._confs: for jar in self.extra_compile_time_classpath_elements(): yield (conf, jar) return list(extra_compile_classpath_iter())
Compute any extra compile-time-only classpath elements.
async def _watchdog(self, timeout): await asyncio.sleep(timeout, loop=self.loop) _LOGGER.debug("Watchdog triggered!") await self.cancel_watchdog() await self._watchdog_cb()
Trigger and cancel the watchdog after timeout. Call callback.
def is_file(dirname): if not os.path.isfile(dirname): msg = "{0} is not an existing file".format(dirname) raise argparse.ArgumentTypeError(msg) else: return dirname
Checks if a path is an actual file that exists
def pid(name): try: return int(info(name).get('PID')) except (TypeError, ValueError) as exc: raise CommandExecutionError( 'Unable to get PID for container \'{0}\': {1}'.format(name, exc) )
Returns the PID of a container name Container name CLI Example: .. code-block:: bash salt myminion nspawn.pid arch1
def get_scope_path(self, scope_separator="::"): if self.parent_scope is None: return "" elif isinstance(self.parent_scope, Root): return "" else: parent_path = self.parent_scope.get_scope_path(scope_separator) if parent_path: return...
Generate a string that represents this component's declaration namespace scope. Parameters ---------- scope_separator: str Override the separator between namespace scopes
def modify_column_if_table_exists(self, tablename: str, fieldname: str, newdef: str) -> Optional[int]: if not self.table_exists(tablename): return None sql = "ALTER TABLE {t} MOD...
Alters a column's definition without renaming it.
def clean_resource_json(resource_json): for a in ('parent_docname', 'parent', 'template', 'repr', 'series'): if a in resource_json: del resource_json[a] props = resource_json['props'] for prop in ( 'acquireds', 'style', 'in_nav', 'nav_title', 'weight', 'auto_excer...
The catalog wants to be smaller, let's drop some stuff
def _validate_timeout(seconds: float): val = int(seconds * 1000) assert 60000 <= val <= 4294967294, "Bad value: {}".format(val) return val
Creates an int from 60000 to 4294967294 that represents a valid millisecond wireless LAN timeout
def union(self, other, renorm=True): for d in range(1, min(self.maxdepth, other.maxdepth)+1): self.add_pixels(other.pixeldict[d], d) if self.maxdepth < other.maxdepth: for d in range(self.maxdepth+1, other.maxdepth+1): for p in other.pixeldict[d]: ...
Add another Region by performing union on their pixlists. Parameters ---------- other : :class:`AegeanTools.regions.Region` The region to be combined. renorm : bool Perform renormalisation after the operation? Default = True.
def runGetContinuousSet(self, id_): compoundId = datamodel.ContinuousSetCompoundId.parse(id_) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) continuousSet = dataset.getContinuousSet(id_) return self.runGetRequest(continuousSet)
Runs a getContinuousSet request for the specified ID.
def _produce_return(self, cursor): results = cursor.fetchall() if self._row_formatter is not None: return (self._row_formatter(r, cursor) for r in results) return results
Get the rows from the cursor and apply the row formatter. :return: sequence of rows, or a generator if a row formatter has to be applied
def upload_file_boto(fname, remote_fname, mditems=None): r_fname = objectstore.parse_remote(remote_fname) conn = objectstore.connect(remote_fname) bucket = conn.lookup(r_fname.bucket) if not bucket: bucket = conn.create_bucket(r_fname.bucket, location=objectstore.get_region(remote_fname)) ke...
Upload a file using boto instead of external tools.
def create(): if not all(map(os.path.isdir, ARGS.directory)): exit('Error: One or more of the specified directories does not exist.') with sqlite3.connect(ARGS.database) as connection: connection.text_factory = str cursor = connection.cursor() cursor.execute('DROP TABLE IF EXISTS...
Create a new database with information about the films in the specified directory or directories.
def _row_resized(self, row, old_height, new_height): self.dataTable.setRowHeight(row, new_height) self._update_layout()
Update the row height.
def clean_structure(self, out_suffix='_clean', outdir=None, force_rerun=False, remove_atom_alt=True, keep_atom_alt_id='A',remove_atom_hydrogen=True, add_atom_occ=True, remove_res_hetero=True, keep_chemicals=None, keep_res_only=None, add_chain_id_i...
Clean the structure file associated with this structure, and save it as a new file. Returns the file path. Args: out_suffix (str): Suffix to append to original filename outdir (str): Path to output directory force_rerun (bool): If structure should be re-cleaned if a clean fi...
def fire_ret_load(self, load): if load.get('retcode') and load.get('fun'): if isinstance(load['fun'], list): if isinstance(load['retcode'], list): multifunc_ordered = True else: multifunc_ordered = False for fun_...
Fire events based on information in the return load
def output_file_name(self): safe_path = re.sub(r":|/", "_", self.source_urn.Path().lstrip("/")) return "results_%s%s" % (safe_path, self.output_file_extension)
Name of the file where plugin's output should be written to.
def addRow(self, *row): row = [ str(item) for item in row ] len_row = [ len(item) for item in row ] width = self.__width len_old = len(width) len_new = len(row) known = min(len_old, len_new) missing = len_new - len_old if missing > 0: w...
Add a row to the table. All items are converted to strings. @type row: tuple @keyword row: Each argument is a cell in the table.
def patch(): from twisted.application.service import Service old_startService = Service.startService old_stopService = Service.stopService def startService(self): assert not self.running, "%r already running" % (self,) return old_startService(self) def stopService(self): asse...
Patch startService and stopService so that they check the previous state first. (used for debugging only)
def transform_sources(self, sources, with_string=False): modules = {} updater = partial( self.replace_source, modules=modules, prefix='string_') for filename in sources: updated = update_func_body(sources[filename], updater) sources[filename] = EXTERN_AND_SEG ...
Get the defintions of needed strings and functions after replacement.
def get_url(self, *paths, **params): path_stack = self._attribute_stack[:] if paths: path_stack.extend(paths) u = self._stack_collapser(path_stack) url = self._url_template % { "domain": self._api_url, "generated_url" : u, } if self._pa...
Returns the URL for this request. :param paths: Additional URL path parts to add to the request :param params: Additional query parameters to add to the request
def update_contributions(sender, instance, action, model, pk_set, **kwargs): if action != 'pre_add': return else: for author in model.objects.filter(pk__in=pk_set): update_content_contributions(instance, author)
Creates a contribution for each author added to an article.
def _registerHandler(self, handler): self._logger.addHandler(handler) self._handlers.append(handler)
Registers a handler. :param handler: A handler object.
def get_line_break_property(value, is_bytes=False): obj = unidata.ascii_line_break if is_bytes else unidata.unicode_line_break if value.startswith('^'): negated = value[1:] value = '^' + unidata.unicode_alias['linebreak'].get(negated, negated) else: value = unidata.unicode_alias['lin...
Get `LINE BREAK` property.
def dump_wcxf(self, C_out, scale_out, fmt='yaml', stream=None, **kwargs): wc = self.get_wcxf(C_out, scale_out) return wc.dump(fmt=fmt, stream=stream, **kwargs)
Return a string representation of the Wilson coefficients `C_out` in WCxf format. If `stream` is specified, export it to a file. `fmt` defaults to `yaml`, but can also be `json`. Note that the Wilson coefficients are rotated into the Warsaw basis as defined in WCxf, i.e. to the basis wh...
def get_random(self): import random Statement = self.get_model('statement') session = self.Session() count = self.count() if count < 1: raise self.EmptyDatabaseException() random_index = random.randrange(0, count) random_statement = session.query(State...
Returns a random statement from the database.
def get_partial_contenthandler(element): from ligo.lw.ligolw import PartialLIGOLWContentHandler from ligo.lw.table import Table if issubclass(element, Table): def _element_filter(name, attrs): return element.CheckProperties(name, attrs) else: def _element_filter(name, _): ...
Build a `PartialLIGOLWContentHandler` to read only this element Parameters ---------- element : `type`, subclass of :class:`~ligo.lw.ligolw.Element` the element class to be read, Returns ------- contenthandler : `type` a subclass of :class:`~ligo.lw.ligolw.PartialLIGOLWContentH...
def components(accountable, project_key): components = accountable.project_components(project_key) headers = sorted(['id', 'name', 'self']) rows = [[v for k, v in sorted(component.items()) if k in headers] for component in components] rows.insert(0, headers) print_table(SingleTable(rows)...
Returns a list of all a project's components.
def getfile(data_name, path): data_source = get_data_object(data_name, use_data_config=False) if not data_source: if 'output' in data_name: floyd_logger.info("Note: You cannot clone the output of a running job. You need to wait for it to finish.") sys.exit() url = "{}/api/v1/reso...
Download a specific file from a dataset.
def find_types(self, site=None, match=r'^(?!lastfile|spectro|\.).*'): self._find_paths() types = [tag for (site_, tag) in self.paths if site in (None, site_)] if match is not None: match = re.compile(match) return list(filter(match.search, types)) return types
Return the list of known data types. This is just the basename of each FFL file found in the FFL directory (minus the ``.ffl`` extension)
def reset(self): self.gametree = self.game self.nodenum = 0 self.index = 0 self.stack = [] self.node = self.gametree[self.index] self._setChildren() self._setFlags()
Set 'Cursor' to point to the start of the root 'GameTree', 'self.game'.
def namespace(self, namespace, to=None): fields = get_apphook_field_names(self.model) if not fields: raise ValueError( ugettext( 'Can\'t find any relation to an ApphookConfig model in {0}' ).format(self.model.__name__) ) ...
Filter by namespace. Try to guess which field to use in lookup. Accept 'to' argument if you need to specify.
def askForFolder(parent, msg = None): msg = msg or 'Select folder' caller = _callerName().split(".") name = "/".join([LAST_PATH, caller[-1]]) namespace = caller[0] path = pluginSetting(name, namespace) folder = QtWidgets.QFileDialog.getExistingDirectory(parent, msg, path) if folder: ...
Asks for a folder, opening the corresponding dialog with the last path that was selected when this same function was invoked from the calling method :param parent: The parent window :param msg: The message to use for the dialog title
def unmarshall_value(self, value): value = str(value) if self.escapeValues: value = value.decode('hex') if self.compressValues: value = zlib.decompress(value) value = pickle.loads(value) return value
Unmarshalls a Crash object read from the database. @type value: str @param value: Object to convert. @rtype: L{Crash} @return: Converted object.
def probe(self, axis: str, distance: float) -> Dict[str, float]: return self._smoothie_driver.probe_axis(axis, distance)
Run a probe and return the new position dict
def ensure_size( self, size = None ): if size is None: size = self.size_constraint while sys.getsizeof(self) > size: element_frequencies = collections.Counter(self) infrequent_element = element_frequencies.most_common()[-1:][0][0] s...
This function removes the least frequent elements until the size constraint is met.
def files(self): all_files = set() for label in self.filesets: all_files.update(self.filesets[label]) return all_files
Get all files in the chroot.
def encode_datetime(o): r = o.isoformat() if o.microsecond: r = r[:23] + r[26:] if r.endswith('+00:00'): r = r[:-6] + 'Z' return r
Encodes a Python datetime.datetime object as an ECMA-262 compliant datetime string.
def image(self): slide_part, rId = self.part, self._element.blip_rId if rId is None: raise ValueError('no embedded image') return slide_part.get_image(rId)
An |Image| object providing access to the properties and bytes of the image in this picture shape.
def estimate_bg(self, fit_offset="mean", fit_profile="tilt", border_px=0, from_mask=None, ret_mask=False): self.set_bg(bg=None, key="fit") bgimage, mask = bg_estimate.estimate(data=self.image, fit_offset=fit_offset, ...
Estimate image background Parameters ---------- fit_profile: str The type of background profile to fit: - "offset": offset only - "poly2o": 2D 2nd order polynomial with mixed terms - "tilt": 2D linear tilt with offset (default) fit_offset...
def create_metadata(self, **params): params = json.dumps(params) return self.post("https://upload.twitter.com/1.1/media/metadata/create.json", params=params)
Adds metadata to a media element, such as image descriptions for visually impaired. Docs: https://developer.twitter.com/en/docs/media/upload-media/api-reference/post-media-metadata-create
def get_last_scene_time(self, refresh=False): if refresh: self.refresh_complex_value('LastSceneTime') val = self.get_complex_value('LastSceneTime') return val
Get last scene time. Refresh data from Vera if refresh is True, otherwise use local cache. Refresh is only needed if you're not using subscriptions.
def _check_email_changed(cls, username, email): ret = cls.exec_request('user/{}'.format(username), 'get', raise_for_status=True) return ret['email'] != email
Compares email to one set on SeAT
def histogram(self, bmus=None): if bmus is None: assert self._bmus is not None, 'not trained' bmus = self._bmus arr = np.zeros((self._som.nrows, self._som.ncols)) for i,j in bmus: arr[i,j] += 1 return arr
\ Return a 2D histogram of bmus. :param bmus: the best-match units indexes for underlying data. :type bmus: :class:`numpy.ndarray` :returns: the computed 2D histogram of bmus. :rtype: :class:`numpy.ndarray`
def getReliableListeners(self): for rellist in self.store.query(_ReliableListener, _ReliableListener.processor == self): yield rellist.listener
Return an iterable of the listeners which have been added to this batch processor.
def _raw_aspera_metadata(self, bucket): response = self._client.get_bucket_aspera(Bucket=bucket) aspera_access_key = response['AccessKey']['Id'] aspera_secret_key = response['AccessKey']['Secret'] ats_endpoint = response['ATSEndpoint'] return aspera_access_key, aspera_secret_key,...
get the Aspera connection details on Aspera enabled buckets
def get(self, query, sort, page, size): urlkwargs = { 'q': query, 'sort': sort, 'size': size, } communities = Community.filter_communities(query, sort) page = communities.paginate(page, size) links = default_links_pagination_factory(page, urlkw...
Get a list of all the communities. .. http:get:: /communities/(string:id) Returns a JSON list with all the communities. **Request**: .. sourcecode:: http GET /communities HTTP/1.1 Accept: application/json Content-Type: applicat...
def on_click(self, button, **kwargs): actions = ['leftclick', 'middleclick', 'rightclick', 'upscroll', 'downscroll'] try: action = actions[button - 1] except (TypeError, IndexError): self.__log_button_event(button, None, None, "Other button") ...
Maps a click event with its associated callback. Currently implemented events are: ============ ================ ========= Event Callback setting Button ID ============ ================ ========= Left click on_leftclick 1 Middle click on_middleclic...
def _match_registers(self, query): if query in self._status_registers: register = self._status_registers[query] response = register.value logger.debug('Found response in status register: %s', repr(response)) register.clear() re...
Tries to match in status registers :param query: message tuple :type query: Tuple[bytes] :return: response if found or None :rtype: Tuple[bytes] | None
def get_perm_codename(perm, fail_silently=True): try: perm = perm.split('.', 1)[1] except IndexError as e: if not fail_silently: raise e return perm
Get permission codename from permission-string. Examples -------- >>> get_perm_codename('app_label.codename_model') 'codename_model' >>> get_perm_codename('app_label.codename') 'codename' >>> get_perm_codename('codename_model') 'codename_model' >>> get_perm_codename('codename') ...
def add_item(self, item): if not(isinstance(item.name, basestring) and isinstance(item.description, basestring)): raise TypeError("Name and description should be strings, are of type {} and {}" .format(type(item.name), type(item.description))) if not(isinstance(it...
Add single command line flag Arguments: name (:obj:`str`): Name of flag used in command line flag_type (:py:class:`snap_plugin.v1.plugin.FlagType`): Indication if flag should store value or is simple bool flag description (:obj:`str`): Flag description used i...
def _cache_key_select_sample_type(method, self, allow_blank=True, multiselect=False, style=None): key = update_timer(), allow_blank, multiselect, style return key
This function returns the key used to decide if method select_sample_type has to be recomputed
def ddns(self, domain_id, record_id, sub_domain, record_line, value): record = self.info(domain_id, record_id) if record.sub_domain == sub_domain and \ record.record_line == record_line and \ record.value == value: return self._api.do_post('Record.Ddns', domain_...
Update record's value dynamically If the ``value`` is different from the record's current value, then perform a dynamic record update. Otherwise, nothing will be done. :param str domain_id: Domain ID :param str record_id: Record ID :param str sub_domain: Sub domain of domain (e...
def write(filename, mesh, fmt_version, write_binary=True): try: writer = _writers[fmt_version] except KeyError: try: writer = _writers[fmt_version.split(".")[0]] except KeyError: raise ValueError( "Need mesh format in {} (got {})".format( ...
Writes a Gmsh msh file.
def delete_files_in_folder(fldr): fl = glob.glob(fldr + os.sep + '*.*') for f in fl: delete_file(f, True)
delete all files in folder 'fldr'
def check_syntax(string): args = ["ecpg", "-o", "-", "-"] with open(os.devnull, "w") as devnull: try: proc = subprocess.Popen(args, shell=False, stdout=devnull, stdin=subprocess.PIPE, ...
Check syntax of a string of PostgreSQL-dialect SQL
def get_index_mappings(self, index): fields_arr = [] for (key, val) in iteritems(index): doc_mapping = self.get_doc_type_mappings(index[key]) if doc_mapping is None: return None fields_arr.extend(doc_mapping) return fields_arr
Converts all index's doc_types to .kibana
def load(cls, path): data = json.load(open(path)) weights = data['weights'] weights = np.asarray(weights, dtype=np.float64) s = cls(data['map_dimensions'], data['params']['lr']['orig'], data['data_dimensionality'], influence=data['params'][...
Load a SOM from a JSON file saved with this package.. Parameters ---------- path : str The path to the JSON file. Returns ------- s : cls A som of the specified class.
def _http_request(url, request_timeout=None): _auth(url) try: request_timeout = __salt__['config.option']('solr.request_timeout') kwargs = {} if request_timeout is None else {'timeout': request_timeout} data = salt.utils.json.load(_urlopen(url, **kwargs)) return _get_return_dict(...
PRIVATE METHOD Uses salt.utils.json.load to fetch the JSON results from the solr API. url : str a complete URL that can be passed to urllib.open request_timeout : int (None) The number of seconds before the timeout should fail. Leave blank/None to use the default. __opts__['solr.req...
def create_polynoms(): fname = pr.resource_filename('pyciss', 'data/soliton_prediction_parameters.csv') res_df = pd.read_csv(fname) polys = {} for resorder, row in zip('65 54 43 21'.split(), range(4)): p = poly1d([res_df.loc[row, 'Slope (km/yr)'], res_df.loc[row, 'In...
Create and return poly1d objects. Uses the parameters from Morgan to create poly1d objects for calculations.
def implemented_methods(cls): if cls.__implemented_methods: return cls.__implemented_methods cls.__implemented_methods = {} for method in cls.callbacks: for op in getattr(method, 'swagger_ops'): cls.__implemented_methods[op] = method return cls.__i...
Return a mapping of implemented HTTP methods vs. their callbacks.
def challenge_auth(username, password, challenge, lower, digest='sha256'): def hdig(x): return fdigest(x).hexdigest() fdigest = get_digest(digest) luser = lower(username) tpass = password[:10].encode("ascii") hvalue = hdig("{0}:{1}".format(luser, hdig(tpass)).encode("ascii")) bhvalue = h...
Calculates quakenet's challenge auth hash .. code-block:: python >>> challenge_auth("mooking", "0000000000", ... "12345678901234567890123456789012", str.lower, "md5") '2ed1a1f1d2cd5487d2e18f27213286b9'
def add_primary_text(self, item_url, primary_text): c = self.conn.cursor() c.execute("DELETE FROM primary_texts WHERE item_url=?", (str(item_url),)) self.conn.commit() c.execute("INSERT INTO primary_texts VALUES (?, ?, ?)", (str(item_url), primary_...
Add the given primary text to the cache database, updating the existing record if the primary text is already present :type item_url: String or Item :param item_url: the URL of the corresponding item, or an Item object :type primary_text: String :param primary_text: the item's p...
def _copy_scratch_to_state(args: Dict[str, Any]): np.copyto(_state_shard(args), _scratch_shard(args))
Copes scratch shards to state shards.
def parsePositionFile(filename): l=[] with open( filename, "rb" ) as theFile: reader = csv.DictReader( theFile ) for line in reader: mytime=dateparser.parse(line['time']) line['strtime']=mytime.strftime("%d %b %Y, %H:%M UTC") l.append(line) return l
Parses Android GPS logger csv file and returns list of dictionaries
def excise(self, ngrams, replacement): content = self.get_token_content() ngrams.sort(key=len, reverse=True) for ngram in ngrams: content = content.replace(ngram, replacement) return content
Returns the token content of this text with every occurrence of each n-gram in `ngrams` replaced with `replacement`. The replacing is performed on each n-gram by descending order of length. :param ngrams: n-grams to be replaced :type ngrams: `list` of `str` :param repla...
def pack(self): sn, sa = self.number, self.attribute return pack("<H", (sn & 0x3ff) << 6 | (sa & 0x3f))
Pack the service code for transmission. Returns a 2 byte string.
def postIncidents(self, name, message, status, visible, **kwargs): kwargs['name'] = name kwargs['message'] = message kwargs['status'] = status kwargs['visible'] = visible return self.__postRequest('/incidents', kwargs)
Create a new incident. :param name: Name of the incident :param message: A message (supporting Markdown) to explain more. :param status: Status of the incident. :param visible: Whether the incident is publicly visible. :param component_id: (optional) Component to update. ...
def _get_disksize_MiB(iLOIP, cred): result = _parse_mibs(iLOIP, cred) disksize = {} for uuid in sorted(result): for key in result[uuid]: if key.find('PhyDrvSize') >= 0: disksize[uuid] = dict() for suffix in sorted(result[uuid][key]): si...
Reads the dictionary of parsed MIBs and gets the disk size. :param iLOIP: IP address of the server on which SNMP discovery has to be executed. :param snmp_credentials in a dictionary having following mandatory keys. auth_user: SNMP user auth_protocol: Auth Pro...
def process_request(self, request, client_address): self.collect_children() pid = os.fork() if pid: if self.active_children is None: self.active_children = [] self.active_children.append(pid) self.close_request(request) return ...
Fork a new subprocess to process the request.
def getVersion(): print('epochs version:', str(CDFepoch.version) + '.' + str(CDFepoch.release) + '.'+str(CDFepoch.increment))
Shows the code version.
def guess_lexer_using_filename(file_name, text): lexer, accuracy = None, None try: lexer = custom_pygments_guess_lexer_for_filename(file_name, text) except SkipHeartbeat as ex: raise SkipHeartbeat(u(ex)) except: log.traceback(logging.DEBUG) if lexer is not None: try: ...
Guess lexer for given text, limited to lexers for this file's extension. Returns a tuple of (lexer, accuracy).
def get_sequence(self, chrom, start, end, strand=None): if not self.index_dir: print("Index dir is not defined!") sys.exit() fasta_file = self.fasta_file[chrom] index_file = self.index_file[chrom] line_size = self.line_size[chrom] total_size = self.size[ch...
Retrieve a sequence
def to_json(self): data = json.dumps(self) out = u'{"%s":%s}' % (self.schema['title'], data) return out
put the object to json and remove the internal stuff salesking schema stores the type in the title
def inxsearch(self, r, g, b): dists = (self.colormap[:, :3] - np.array([r, g, b])) a = np.argmin((dists * dists).sum(1)) return a
Search for BGR values 0..255 and return colour index
def get(self, pid, record, **kwargs): etag = str(record.revision_id) self.check_etag(str(record.revision_id)) self.check_if_modified_since(record.updated, etag=etag) return self.make_response( pid, record, links_factory=self.links_factory )
Get a record. Permissions: ``read_permission_factory`` Procedure description: #. The record is resolved reading the pid value from the url. #. The ETag and If-Modifed-Since is checked. #. The HTTP response is built with the help of the link factory. :param pid: Pers...
def _has_manual_kern_feature(font): return any(f for f in font.features if f.name == "kern" and not f.automatic)
Return true if the GSFont contains a manually written 'kern' feature.
def create_or_get_keypair(self, nova, keypair_name="testkey"): try: _keypair = nova.keypairs.get(keypair_name) self.log.debug('Keypair ({}) already exists, ' 'using it.'.format(keypair_name)) return _keypair except Exception: sel...
Create a new keypair, or return pointer if it already exists.
def _open(file,mode='copyonwrite'): import pyfits try: infits=pyfits.open(file,mode) hdu=infits except (ValueError,pyfits.VerifyError,pyfits.FITS_SevereError): import sys hdu=_open_fix(file) for f in hdu: strip_pad(f) return hdu
Opens a FITS format file and calls _open_fix if header doesn't verify correctly.
def _get_decision_trees_bulk(self, payload, valid_indices, invalid_indices, invalid_dts): valid_dts = self._create_and_send_json_bulk([payload[i] for i in valid_indices], "{}/bulk/decision_tree".format(self._base_url), "...
Tool for the function get_decision_trees_bulk. :param list payload: contains the informations necessary for getting the trees. Its form is the same than for the function. get_decision_trees_bulk. :param list valid_indices: list of the indices of the valid agent id. :param list invalid_indices: list...
def nl_socket_modify_err_cb(sk, kind, func, arg): return int(nl_cb_err(sk.s_cb, kind, func, arg))
Modify the error callback handler associated with the socket. https://github.com/thom311/libnl/blob/libnl3_2_25/lib/socket.c#L649 Positional arguments: sk -- Netlink socket (nl_sock class instance). kind -- kind of callback (integer). func -- callback function. arg -- argument to be passed to ...
def _get_keywords(self, location, keywords): if 'xml' in keywords: keywords.pop('xml') self.xml = True else: keywords['file_type'] = 'json' if 'id' in keywords: if location != 'series': location = location.rstrip('s') ke...
Format GET request's parameters from keywords.
def _remove_exts(self,string): if string.lower().endswith(('.png','.gif','.jpg','.bmp','.jpeg','.ppm','.datauri')): format = string[string.rfind('.') +1 :len(string)] if format.lower() == 'jpg': format = 'jpeg' self.format = format string = str...
Sets the string, to create the Robohash
def deallocate_network_ipv4(self, id_network_ipv4): if not is_valid_int_param(id_network_ipv4): raise InvalidParameterError( u'The identifier of NetworkIPv4 is invalid or was not informed.') url = 'network/ipv4/' + str(id_network_ipv4) + '/deallocate/' code, xml = sel...
Deallocate all relationships between NetworkIPv4. :param id_network_ipv4: ID for NetworkIPv4 :return: Nothing :raise InvalidParameterError: Invalid ID for NetworkIPv4. :raise NetworkIPv4NotFoundError: NetworkIPv4 not found. :raise DataBaseError: Networkapi failed to access the...
async def run_asgi(self): try: result = await self.app(self.scope, self.asgi_receive, self.asgi_send) except BaseException as exc: self.closed_event.set() msg = "Exception in ASGI application\n" self.logger.error(msg, exc_info=exc) if not self....
Wrapper around the ASGI callable, handling exceptions and unexpected termination states.
def info(gandi): output_keys = ['handle', 'credit', 'prepaid'] account = gandi.account.all() account['prepaid_info'] = gandi.contact.balance().get('prepaid', {}) output_account(gandi, account, output_keys) return account
Display information about hosting account.
def package_version(package_name: str) -> typing.Optional[str]: try: return pkg_resources.get_distribution(package_name).version except (pkg_resources.DistributionNotFound, AttributeError): return None
Returns package version as a string, or None if it couldn't be found.
def multiply_encrypted_to_plaintext(public, encrypted, plaintext, output): log("Loading public key") publickeydata = json.load(public) pub = load_public_key(publickeydata) log("Loading encrypted number") enc = load_encrypted_number(encrypted, pub) log("Loading unencrypted number") num = floa...
Multiply encrypted num with unencrypted num. Requires a PUBLIC key file, a number ENCRYPTED with that public key also as a file, and the PLAINTEXT number to multiply. Creates a new encrypted number.