code
stringlengths
59
4.4k
docstring
stringlengths
5
7.69k
def save_intraday(data: pd.DataFrame, ticker: str, dt, typ='TRADE'): cur_dt = pd.Timestamp(dt).strftime('%Y-%m-%d') logger = logs.get_logger(save_intraday, level='debug') info = f'{ticker} / {cur_dt} / {typ}' data_file = hist_file(ticker=ticker, dt=dt, typ=typ) if not data_file: return if data.e...
Check whether data is done for the day and save Args: data: data ticker: ticker dt: date typ: [TRADE, BID, ASK, BID_BEST, ASK_BEST, BEST_BID, BEST_ASK] Examples: >>> os.environ['BBG_ROOT'] = 'xbbg/tests/data' >>> sample = pd.read_parquet('xbbg/tests/data/aapl.pa...
def fetch_and_filter_tags(self): self.all_tags = self.fetcher.get_all_tags() self.filtered_tags = self.get_filtered_tags(self.all_tags) self.fetch_tags_dates()
Fetch and filter tags, fetch dates and sort them in time order.
def __load_jams_schema(): schema_file = os.path.join(SCHEMA_DIR, 'jams_schema.json') jams_schema = None with open(resource_filename(__name__, schema_file), mode='r') as fdesc: jams_schema = json.load(fdesc) if jams_schema is None: raise JamsError('Unable to load JAMS schema') return ...
Load the schema file from the package.
def MoveToCenter(self) -> bool: if self.IsTopLevel(): rect = self.BoundingRectangle screenWidth, screenHeight = GetScreenSize() x, y = (screenWidth - rect.width()) // 2, (screenHeight - rect.height()) // 2 if x < 0: x = 0 if y < 0: y = 0 re...
Move window to screen center.
def delete_tenant_quota(context, tenant_id): tenant_quotas = context.session.query(Quota) tenant_quotas = tenant_quotas.filter_by(tenant_id=tenant_id) tenant_quotas.delete()
Delete the quota entries for a given tenant_id. Atfer deletion, this tenant will use default quota values in conf.
def transform(self, mode=None): if mode: self._canvas.mode = mode return self._canvas.mode
Set the current transform mode. :param mode: CENTER or CORNER
def make_clean_figure(figsize, remove_tooltips=False, remove_keybindings=False): tooltip = mpl.rcParams['toolbar'] if remove_tooltips: mpl.rcParams['toolbar'] = 'None' fig = pl.figure(figsize=figsize) mpl.rcParams['toolbar'] = tooltip if remove_keybindings: fig.canvas.mpl_disconnect(...
Makes a `matplotlib.pyplot.Figure` without tooltips or keybindings Parameters ---------- figsize : tuple Figsize as passed to `matplotlib.pyplot.figure` remove_tooltips, remove_keybindings : bool Set to True to remove the tooltips bar or any key bindings, respectively. Default i...
def _get_valid_formats(): if NO_SOX: return [] so = subprocess.check_output(['sox', '-h']) if type(so) is not str: so = str(so, encoding='UTF-8') so = so.split('\n') idx = [i for i in range(len(so)) if 'AUDIO FILE FORMATS:' in so[i]][0] formats = so[idx].split(' ')[3:] return...
Calls SoX help for a lists of audio formats available with the current install of SoX. Returns: -------- formats : list List of audio file extensions that SoX can process.
def contains(self, clr): if not isinstance(clr, Color): return False if not isinstance(clr, _list): clr = [clr] for clr in clr: if clr.is_grey and not self.grayscale: return (self.black.contains(clr) or \ self.white.cont...
Returns True if the given color is part of this color range. Check whether each h, s, b, a component of the color falls within the defined range for that component. If the given color is grayscale, checks against the definitions for black and white.
def append(self, item): if self.meta_type == 'dict': raise AssertionError('Cannot append to object of `dict` base type!') if self.meta_type == 'list': self._list.append(item) return
Append to object, if object is list.
def CheckBracesSpacing(filename, clean_lines, linenum, nesting_state, error): line = clean_lines.elided[linenum] match = Match(r'^(.*[^ ({>]){', line) if match: leading_text = match.group(1) (endline, endlinenum, endpos) = CloseExpression( clean_lines, linenum, len(match.group(1))) trailing_te...
Checks for horizontal spacing near commas. Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. nesting_state: A NestingState instance which maintains information about the current stack o...
def as_unicode(self): result = self.domain if self.local: result = self.local + u'@' + result if self.resource: result = result + u'/' + self.resource if not JID.cache.has_key(result): JID.cache[result] = self return result
Unicode string JID representation. :return: JID as Unicode string.
def _insert_metadata(cursor, model, publisher, message): params = model.metadata.copy() params['publisher'] = publisher params['publication_message'] = message params['_portal_type'] = _model_to_portaltype(model) params['summary'] = str(cnxepub.DocumentSummaryFormatter(model)) for person_field i...
Insert a module with the given ``metadata``.
def _validate_search_query(self, returning_query): start_index = returning_query.from_index or 0 size = returning_query.size or 0 if start_index < 0: raise CitrinationClientError( "start_index cannot be negative. Please enter a value greater than or equal to zero") ...
Checks to see that the query will not exceed the max query depth :param returning_query: The PIF system or Dataset query to execute. :type returning_query: :class:`PifSystemReturningQuery` or :class: `DatasetReturningQuery`
def ControlFromPoint2(x: int, y: int) -> Control: return Control.CreateControlFromElement(_AutomationClient.instance().IUIAutomation.ElementFromHandle(WindowFromPoint(x, y)))
Get a native handle from point x,y and call IUIAutomation.ElementFromHandle. Return `Control` subclass.
def fmt(lbaf=3): if env(): cij.err("cij.nvme.exists: Invalid NVMe ENV.") return 1 nvme = cij.env_to_dict(PREFIX, EXPORTED + REQUIRED) cmd = ["nvme", "format", nvme["DEV_PATH"], "-l", str(lbaf)] rcode, _, _ = cij.ssh.command(cmd, shell=True) return rcode
Do format for NVMe device
def toBigInt(byteArray): array = byteArray[::-1] out = 0 for key, value in enumerate(array): decoded = struct.unpack("B", bytes([value]))[0] out = out | decoded << key * 8 return out
Convert the byte array to a BigInteger
def serialize_groups(self, groups): rules = [] for group in groups: rules.extend(self.serialize_rules(group.rules)) return rules
Creates a payload for the redis server The rule schema is the following: REDIS KEY - port_device_id.port_mac_address/sg REDIS VALUE - A JSON dump of the following: port_mac_address must be lower-cased and stripped of non-alphanumeric characters {"id": "<arbitrary uuid...
def custom_showtraceback( self, exc_tuple=None, filename=None, tb_offset=None, exception_only=False, running_compiled_code=False, ): self.default_showtraceback( exc_tuple, filename, tb_offset, exception_only=True, running_compiled_code=running_compiled...
Custom showtraceback for monkey-patching IPython's InteractiveShell https://stackoverflow.com/questions/1261668/cannot-override-sys-excepthook
def GetTopLevelControl(self) -> 'Control': handle = self.NativeWindowHandle if handle: topHandle = GetAncestor(handle, GAFlag.Root) if topHandle: if topHandle == handle: return self else: return ControlFromHa...
Get the top level control which current control lays. If current control is top level, return self. If current control is root control, return None. Return `PaneControl` or `WindowControl` or None.
def handle_package_has_file_helper(self, pkg_file): nodes = list(self.graph.triples((None, self.spdx_namespace.fileName, Literal(pkg_file.name)))) if len(nodes) == 1: return nodes[0][0] else: raise InvalidDocumentError('handle_package_has_file_helper could not' + ...
Return node representing pkg_file pkg_file should be instance of spdx.file.
def get_wildcard(self): return _convert(self._ip, notation=NM_WILDCARD, inotation=IP_DOT, _check=False, _isnm=self._isnm)
Return the wildcard bits notation of the netmask.
def upload_backend(index='dev', user=None): get_vars() use_devpi(index=index) with fab.lcd('../application'): fab.local('make upload')
Build the backend and upload it to the remote server at the given index
def refresh(self, token, timeout): assert token in self._dict, "Lock must exist" assert timeout == -1 or timeout > 0 if timeout < 0 or timeout > LockStorageDict.LOCK_TIME_OUT_MAX: timeout = LockStorageDict.LOCK_TIME_OUT_MAX self._lock.acquire_write() try: ...
Modify an existing lock's timeout. token: Valid lock token. timeout: Suggested lifetime in seconds (-1 for infinite). The real expiration time may be shorter than requested! Returns: Lock dictionary. Raises ValueError, if token is inva...
def analyze(fname=False,save=True,show=None): if fname and os.path.exists(fname.replace(".abf",".rst")): print("SKIPPING DUE TO RST FILE") return swhlab.plotting.core.IMAGE_SAVE=save if show is None: if cm.isIpython(): swhlab.plotting.core.IMAGE_SHOW=True else: ...
given a filename or ABF object, try to analyze it.
def from_entity(entity, self_user_id): user_id = UserID(chat_id=entity.id.chat_id, gaia_id=entity.id.gaia_id) return User(user_id, entity.properties.display_name, entity.properties.first_name, entity.properties.photo_url, ...
Construct user from ``Entity`` message. Args: entity: ``Entity`` message. self_user_id (~hangups.user.UserID or None): The ID of the current user. If ``None``, assume ``entity`` is the current user. Returns: :class:`~hangups.user.User` object.
def write_sbml_model(cobra_model, filename, f_replace=F_REPLACE, **kwargs): doc = _model_to_sbml(cobra_model, f_replace=f_replace, **kwargs) if isinstance(filename, string_types): libsbml.writeSBMLToFile(doc, filename) elif hasattr(filename, "write"): sbml_str = libsbml.writeSBMLToString(doc...
Writes cobra model to filename. The created model is SBML level 3 version 1 (L1V3) with fbc package v2 (fbc-v2). If the given filename ends with the suffix ".gz" (for example, "myfile.xml.gz"), libSBML assumes the caller wants the file to be written compressed in gzip format. Similarly, if the giv...
def get_filtered_normalized_events(self): user_image = google_v2_operations.get_action_image(self._op, _ACTION_USER_COMMAND) need_ok = google_v2_operations.is_success(self._op) events = {} for event in google_v2_operations.get_events(self._op): ...
Filter the granular v2 events down to events of interest. Filter through the large number of granular events returned by the pipelines API, and extract only those that are interesting to a user. This is implemented by filtering out events which are known to be uninteresting (i.e. the default actions ru...
def platform_config_dir(): if LINUX: dpath_ = os.environ.get('XDG_CONFIG_HOME', '~/.config') elif DARWIN: dpath_ = '~/Library/Application Support' elif WIN32: dpath_ = os.environ.get('APPDATA', '~/AppData/Roaming') else: raise NotImplementedError('Unknown Platform %r' %...
Returns a directory which should be writable for any application This should be used for persistent configuration files. Returns: PathLike : path to the cahce dir used by the current operating system
def _GetNativeEolStyle(platform=sys.platform): _NATIVE_EOL_STYLE_MAP = { 'win32' : EOL_STYLE_WINDOWS, 'linux2' : EOL_STYLE_UNIX, 'linux' : EOL_STYLE_UNIX, 'darwin' : EOL_STYLE_MAC, } result = _NATIVE_EOL_STYLE_MAP.get(platform) if result is None: from ._exceptions...
Internal function that determines EOL_STYLE_NATIVE constant with the proper value for the current platform.
def get_socket(self, sessid=''): socket = self.sockets.get(sessid) if sessid and not socket: return None if socket is None: socket = Socket(self, self.config) self.sockets[socket.sessid] = socket else: socket.incr_hits() return sock...
Return an existing or new client Socket.
def item(proto_dataset_uri, input_file, relpath_in_dataset): proto_dataset = dtoolcore.ProtoDataSet.from_uri( proto_dataset_uri, config_path=CONFIG_PATH) if relpath_in_dataset == "": relpath_in_dataset = os.path.basename(input_file) proto_dataset.put_item(input_file, relpath_in_datas...
Add a file to the proto dataset.
def _in_gce_environment(): if SETTINGS.env_name is not None: return SETTINGS.env_name == 'GCE_PRODUCTION' if NO_GCE_CHECK != 'True' and _detect_gce_environment(): SETTINGS.env_name = 'GCE_PRODUCTION' return True return False
Detect if the code is running in the Compute Engine environment. Returns: True if running in the GCE environment, False otherwise.
def index(self, i, length=None): if self.begin <= i <= self.end: index = i - self.BEGIN - self.offset if length is None: length = self.full_range() else: length = min(length, self.full_range()) if 0 <= index < length: ...
Return an integer index or None
def clippedObj(obj, maxElementSize=64): if hasattr(obj, '_asdict'): obj = obj._asdict() if isinstance(obj, dict): objOut = dict() for key,val in obj.iteritems(): objOut[key] = clippedObj(val) elif hasattr(obj, '__iter__'): objOut = [] for val in obj: objOut.append(clippedObj(val)) ...
Return a clipped version of obj suitable for printing, This is useful when generating log messages by printing data structures, but don't want the message to be too long. If passed in a dict, list, or namedtuple, each element of the structure's string representation will be limited to 'maxElementSize' characte...
def sorted(list, cmp=None, reversed=False): list = [x for x in list] list.sort(cmp) if reversed: list.reverse() return list
Returns a sorted copy of the list.
def reposition(self, frame_no): for label, j in self.channels.items(): body = self.bodies[label] body.position = self.positions[frame_no, j] body.linear_velocity = self.velocities[frame_no, j]
Reposition markers to a specific frame of data. Parameters ---------- frame_no : int The frame of data where we should reposition marker bodies. Markers will be positioned in the appropriate places in world coordinates. In addition, linear velocities of the m...
def draw_on_image(self, image, color=(0, 255, 0), alpha=1.0, size=1, copy=True, raise_if_out_of_image=False, thickness=None): image = np.copy(image) if copy else image for bb in self.bounding_boxes: image = bb.draw_on_image( image, color=...
Draw all bounding boxes onto a given image. Parameters ---------- image : (H,W,3) ndarray The image onto which to draw the bounding boxes. This image should usually have the same shape as set in BoundingBoxesOnImage.shape. color : int or list of int ...
def list(self, key_name=None, max_suggestions=100, cutoff=0.5, locked_only=False, key_type=None): self._assert_valid_stash() key_list = [k for k in self._storage.list() if k['name'] != 'stored_passphrase' and ...
Return a list of all keys.
def refresh_maps(self): for robot in self.robots: resp2 = ( requests.get(urljoin(self.ENDPOINT, 'users/me/robots/{}/maps'.format(robot.serial)), headers=self._headers)) resp2.raise_for_status() self._maps.update({robot.serial: resp...
Get information about maps of the robots. :return:
def lint_file(in_file, out_file=None): for line in in_file: print(line.strip(), file=out_file)
Helps remove extraneous whitespace from the lines of a file :param file in_file: A readable file or file-like :param file out_file: A writable file or file-like
def codestr2rst(codestr, lang='python'): code_directive = "\n.. code-block:: {0}\n\n".format(lang) indented_block = indent(codestr, ' ' * 4) return code_directive + indented_block
Return reStructuredText code block from code string
def send(self, command, timeout=5): logger.info(u'Sending %s' % command) _, writable, __ = select.select([], [self.sock], [], timeout) if not writable: raise SendTimeoutError() writable[0].sendall(command + '\n')
Send a command to the server :param string command: command to send
async def get_entity_by_id(self, get_entity_by_id_request): response = hangouts_pb2.GetEntityByIdResponse() await self._pb_request('contacts/getentitybyid', get_entity_by_id_request, response) return response
Return one or more user entities. Searching by phone number only finds entities when their phone number is in your contacts (and not always even then), and can't be used to find Google Voice contacts.
def next(self, data): self.__length += 1 result = self.__kdf.calculate(self.__key, data, 64) self.__key = result[:32] return result[32:]
Derive a new set of internal and output data from given input data and the data stored internally. Use the key derivation function to derive new data. The kdf gets supplied with the current key and the data passed to this method. :param data: A bytes-like object encoding the data to pa...
def auth_finish(self, _unused): self.lock.acquire() try: self.__logger.debug("Authenticated") self.authenticated=True self.state_change("authorized",self.my_jid) self._post_auth() finally: self.lock.release()
Handle success of the legacy authentication.
def convert_gemm(params, w_name, scope_name, inputs, layers, weights, names): print('Converting Linear ...') if names == 'short': tf_name = 'FC' + random_string(6) elif names == 'keep': tf_name = w_name else: tf_name = w_name + str(random.random()) bias_name = '{0}.bias'.form...
Convert Linear. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras laye...
def find_duplicates(items, k=2, key=None): duplicates = defaultdict(list) if key is None: for count, item in enumerate(items): duplicates[item].append(count) else: for count, item in enumerate(items): duplicates[key(item)].append(count) for key in list(duplicates....
Find all duplicate items in a list. Search for all items that appear more than `k` times and return a mapping from each (k)-duplicate item to the positions it appeared in. Args: items (Iterable): hashable items possibly containing duplicates k (int): only return items that appear at least ...
def parse_line(line, document=None): result = re.match(line_pattern, line) if result: _, lineno, offset, severity, msg = result.groups() lineno = int(lineno or 1) offset = int(offset or 0) errno = 2 if severity == 'error': errno = 1 diag = { ...
Return a language-server diagnostic from a line of the Mypy error report; optionally, use the whole document to provide more context on it.
def detail(self, *args, **kwargs): prefix = kwargs.pop("prefix", default_prefix) kwargs["votes"] = list(set(kwargs["votes"])) return OrderedDict( [ ("memo_key", PublicKey(kwargs["memo_key"], prefix=prefix)), ("voting_account", ObjectId(kwargs["voting_a...
This is an example how to sort votes prior to using them in the Object
def update_collaboration(self): for field in record_get_field_instances(self.record, '710'): subs = field_get_subfield_instances(field) for idx, (key, value) in enumerate(subs[:]): if key == '5': subs.pop(idx) elif value.startswith('CER...
710 Collaboration.
def _get_result_msg_and_payload(result, stream): groups = _GDB_MI_RESULT_RE.match(result).groups() token = int(groups[0]) if groups[0] != "" else None message = groups[1] if groups[2] is None: payload = None else: stream.advance_past_chars([","]) payload = _parse_dict(stream)...
Get result message and payload dict
def login(email=None, password=None, api_key=None, application='Default', url=None, verify_ssl_certificate=True): try: input_ = raw_input except NameError: input_ = input if url is None: url = input_('Server URL: ') url = url.rstrip('/') if session.communicator is N...
Do the legwork of logging into the Midas Server instance, storing the API key and token. :param email: (optional) Email address to login with. If not set, the console will be prompted. :type email: None | string :param password: (optional) User password to login with. If not set and no ...
def delete(self, wait=True): resp = self.parent.delete(self.id) if wait: self.wait() return resp
Delete this droplet Parameters ---------- wait: bool, default True Whether to block until the pending action is completed
def create(self, list_id, data): return self._mc_client._post(url=self._build_path(list_id, 'segments'), data=data)
adds a new segment to the list.
def upgrade(safe=True): manager = MANAGER if safe: cmd = 'upgrade' else: cmd = 'dist-upgrade' run_as_root("%(manager)s --assume-yes %(cmd)s" % locals(), pty=False)
Upgrade all packages.
def http_exception_error_handler( exception): assert issubclass(type(exception), HTTPException), type(exception) assert hasattr(exception, "code") assert hasattr(exception, "description") return response(exception.code, exception.description)
Handle HTTP exception :param werkzeug.exceptions.HTTPException exception: Raised exception A response is returned, as formatted by the :py:func:`response` function.
def id_to_word(self, word_id): if word_id >= len(self.reverse_vocab): return self.reverse_vocab[self.unk_id] else: return self.reverse_vocab[word_id]
Returns the word string of an integer word id.
def new_knitting_pattern_set_loader(specification=DefaultSpecification()): parser = specification.new_parser(specification) loader = specification.new_loader(parser.knitting_pattern_set) return loader
Create a loader for a knitting pattern set. :param specification: a :class:`specification <knittingpattern.ParsingSpecification.ParsingSpecification>` for the knitting pattern set, default :class:`DefaultSpecification`
def currency(self): try: current_subscription = self.info["viewer"]["home"]["currentSubscription"] return current_subscription["priceInfo"]["current"]["currency"] except (KeyError, TypeError, IndexError): _LOGGER.error("Could not find currency.") return ""
Return the currency.
def run(self, cmd, *args): if self.manager is None: raise Exception("Fatal internal error: Missing repository manager") if cmd not in dir(self.manager): raise Exception("Fatal internal error: Invalid command {} being run".format(cmd)) func = getattr(self.manager, cmd) ...
Run a specific command using the manager
def copy_file(src, dest): dir_path = os.path.dirname(dest) if not os.path.exists(dir_path): os.makedirs(dir_path) shutil.copy2(src, dest)
Copy file helper method :type src: str|unicode :type dest: str|unicode
def _call_api(self, verb, url, **request_kwargs): api = 'https://api.github.com{}'.format(url) auth_headers = {'Authorization': 'token {}'.format(self.api_token)} headers = {**auth_headers, **request_kwargs.pop('headers', {})} return getattr(requests, verb)(api, headers=headers, **reques...
Perform a github API call Args: verb (str): Can be "post", "put", or "get" url (str): The base URL with a leading slash for Github API (v3) auth (str or HTTPBasicAuth): A Github API token or a HTTPBasicAuth object
def error_message(self, message, fh=None, prefix="[error]:", suffix="..."): msg = prefix + message + suffix fh = fh or sys.stderr if fh is sys.stderr: termcolor.cprint(msg, color="red") else: fh.write(msg) pass
print error type message if file handle is `sys.stderr`, print color message :param str message: message to print :param file fh: file handle, default is `sys.stdout` :param str prefix: message prefix,default is `[error]` :param str suffix: message suffix ,default is '...' ...
def gather(obj): if hasattr(obj, '__distob_gather__'): return obj.__distob_gather__() elif (isinstance(obj, collections.Sequence) and not isinstance(obj, string_types)): return [gather(subobj) for subobj in obj] else: return obj
Retrieve objects that have been distributed, making them local again
def get_chat_id(self, message): if message.chat.type == 'private': return message.user.id return message.chat.id
Telegram chat type can be either "private", "group", "supergroup" or "channel". Return user ID if it is of type "private", chat ID otherwise.
def publish(self, registry=None): if (registry is None) or (registry == registry_access.Registry_Base_URL): if 'private' in self.description and self.description['private']: return "this %s is private and cannot be published" % (self.description_filename.split('.')[0]) upload...
Publish to the appropriate registry, return a description of any errors that occured, or None if successful. No VCS tagging is performed.
def get_resources(self, names=None, stores=None, workspaces=None): stores = self.get_stores( names = stores, workspaces = workspaces ) resources = [] for s in stores: try: resources.extend(s.get_resources()) except FailedReq...
Resources include feature stores, coverage stores and WMS stores, however does not include layer groups. names, stores and workspaces can be provided as a comma delimited strings or as arrays, and are used for filtering. Will always return an array.
def plot_3(data, ss, *args): if len(data) <= 1: warnings.warn("Only one datapoint. Could not compute t-SNE embedding.") return None scores = np.array([d['mean_test_score'] for d in data]) warped = np.array([ss.point_to_unit(d['parameters']) for d in data]) X = TSNE(n_components=2).fit_tr...
t-SNE embedding of the parameters, colored by score
def set_concluded_license(self, doc, lic): if self.has_package(doc) and self.has_file(doc): if not self.file_conc_lics_set: self.file_conc_lics_set = True if validations.validate_lics_conc(lic): self.file(doc).conc_lics = lic re...
Raises OrderError if no package or file defined. Raises CardinalityError if already set. Raises SPDXValueError if malformed.
def get_argument_starttime(self): try: starttime = self.get_argument(constants.PARAM_STARTTIME) return starttime except tornado.web.MissingArgumentError as e: raise Exception(e.log_message)
Helper function to get starttime argument. Raises exception if argument is missing. Returns the starttime argument.
def render_to_message(self, extra_context=None, *args, **kwargs): message = super(TemplatedHTMLEmailMessageView, self)\ .render_to_message(extra_context, *args, **kwargs) if extra_context is None: extra_context = {} context = self.get_context_data(**extra_context) ...
Renders and returns an unsent message with the given context. Any extra keyword arguments passed will be passed through as keyword arguments to the message constructor. :param extra_context: Any additional context to use when rendering templated content. :type extra_context...
def _load_credentials_file(credentials_file): try: credentials_file.seek(0) data = json.load(credentials_file) except Exception: logger.warning( 'Credentials file could not be loaded, will ignore and ' 'overwrite.') return {} if data.get('file_version'...
Load credentials from the given file handle. The file is expected to be in this format: { "file_version": 2, "credentials": { "key": "base64 encoded json representation of credentials." } } This function will warn and return empty credential...
def padnames(names): longname_len = max(len(i) for i in names) padding = 5 pnames = [name + " " * (longname_len - len(name)+ padding) \ for name in names] snppad = "//" + " " * (longname_len - 2 + padding) return np.array(pnames), snppad
pads names for loci output
def stop(self): with self.lock: for dummy in self.threads: self.queue.put(None)
Stop the resolver threads.
def sys_rename(self, oldnamep, newnamep): oldname = self.current.read_string(oldnamep) newname = self.current.read_string(newnamep) ret = 0 try: os.rename(oldname, newname) except OSError as e: ret = -e.errno return ret
Rename filename `oldnamep` to `newnamep`. :param int oldnamep: pointer to oldname :param int newnamep: pointer to newname
def remove_members(self, to_remove): if isinstance(to_remove, string_types) or \ hasattr(to_remove, "id"): warn("need to pass in a list") to_remove = [to_remove] self._members.difference_update(to_remove)
Remove objects from the group. Parameters ---------- to_remove : list A list of cobra objects to remove from the group
def create_input_peptides_files( peptides, max_peptides_per_file=None, group_by_length=False): if group_by_length: peptide_lengths = {len(p) for p in peptides} peptide_groups = {l: [] for l in peptide_lengths} for p in peptides: peptide_groups[len(p)].appe...
Creates one or more files containing one peptide per line, returns names of files.
def unlink_f(path): try: os.unlink(path) except OSError as err: if err.errno != errno.ENOENT: raise
Unlink path but do not complain if file does not exist.
def draw_lines_heatmap_array(self, image_shape, alpha=1.0, size=1, antialiased=True, raise_if_out_of_image=False): assert len(image_shape) == 2 or ( len(image_shape) == 3 and image_shape[-1] == 1), ( "Expected (H,W) or (H,...
Draw the line segments of the line string as a heatmap array. Parameters ---------- image_shape : tuple of int The shape of the image onto which to draw the line mask. alpha : float, optional Opacity of the line string. Higher values denote a more visible ...
def import_pyfile(filepath, mod_name=None): import sys if sys.version_info.major == 3: import importlib.machinery loader = importlib.machinery.SourceFileLoader('', filepath) mod = loader.load_module(mod_name) else: import imp mod = imp.load_source(mod_name, filepath) ...
Imports the contents of filepath as a Python module. :param filepath: string :param mod_name: string Name of the module when imported :return: module Imported module
def labels(self): return sorted(self.channels, key=lambda c: self.channels[c])
Return the names of our marker labels in canonical order.
def get_devpi_url(ctx): cmd = 'devpi use --urls' lines = ctx.run(cmd, hide='out', echo=False).stdout.splitlines() for line in lines: try: line, base_url = line.split(':', 1) except ValueError: notify.warning('Ignoring "{}"!'.format(line)) else: if ...
Get currently used 'devpi' base URL.
async def get_oauth_verifier(oauth_token): url = "https://api.twitter.com/oauth/authorize?oauth_token=" + oauth_token try: browser = webbrowser.open(url) await asyncio.sleep(2) if not browser: raise RuntimeError except RuntimeError: print("could not open a browser...
Open authorize page in a browser, print the url if it didn't work Arguments --------- oauth_token : str The oauth token received in :func:`get_oauth_token` Returns ------- str The PIN entered by the user
def layers(self): layers = [self._layer_def(style) for style in self.styles] return layers
Renders the list of layers to add to the map. Returns: layers (list): list of layer entries suitable for use in mapbox-gl 'map.addLayer()' call
def objHasUnsavedChanges(self): if not self.obj: return False return self.obj.hasUnsavedChanges(cascadeObjects=True)
objHasUnsavedChanges - Check if any object has unsaved changes, cascading.
def parse_list_line_windows(self, b): line = b.decode(encoding=self.encoding).rstrip("\r\n") date_time_end = line.index("M") date_time_str = line[:date_time_end + 1].strip().split(" ") date_time_str = " ".join([x for x in date_time_str if len(x) > 0]) line = line[date_time_end + ...
Parsing Microsoft Windows `dir` output :param b: response line :type b: :py:class:`bytes` or :py:class:`str` :return: (path, info) :rtype: (:py:class:`pathlib.PurePosixPath`, :py:class:`dict`)
def get_incorrect_names_by_namespace(graph: BELGraph, namespace: str) -> Set[str]: return { exc.name for _, exc, _ in graph.warnings if isinstance(exc, (MissingNamespaceNameWarning, MissingNamespaceRegexWarning)) and exc.namespace == namespace }
Return the set of all incorrect names from the given namespace in the graph. :return: The set of all incorrect names from the given namespace in the graph
def _sample_with_priority(self, p): parent = 0 while True: left = 2 * parent + 1 if left >= len(self._memory): return parent left_p = self._memory[left] if left < self._capacity - 1 \ else (self._memory[left].priority or 0) ...
Sample random element with priority greater than p.
def get_fsapi_endpoint(self): endpoint = yield from self.__session.get(self.fsapi_device_url, timeout = self.timeout) text = yield from endpoint.text(encoding='utf-8') doc = objectify.fromstring(text) return doc.webfsapi.text
Parse the fsapi endpoint from the device url.
def fit_transform(self, X, y=None, **params): X = as_features(X, stack=True) X_new = self.transformer.fit_transform(X.stacked_features, y, **params) return self._gather_outputs(X, X_new)
Fit and transform the stacked points. Parameters ---------- X : :class:`Features` or list of bag feature arrays Data to train on and transform. any other keyword argument : Passed on as keyword arguments to the transformer's ``transform()``. Returns ...
def to_representation(self, instance): request = self.context['request'] enterprise_customer = instance.enterprise_customer representation = super(EnterpriseCustomerCatalogDetailSerializer, self).to_representation(instance) paginated_content = instance.get_paginated_content(request.GET) ...
Serialize the EnterpriseCustomerCatalog object. Arguments: instance (EnterpriseCustomerCatalog): The EnterpriseCustomerCatalog to serialize. Returns: dict: The EnterpriseCustomerCatalog converted to a dict.
def encode(self, word, max_length=8): word = ''.join( char for char in word.lower() if char in self._initial_phones ) if not word: word = '÷' values = [self._initial_phones[word[0]]] values += [self._trailing_phones[char] for char in word[1:]] shif...
Return the eudex phonetic hash of a word. Parameters ---------- word : str The word to transform max_length : int The length in bits of the code returned (default 8) Returns ------- int The eudex hash Examples ...
def create_archive(directory, filename, config={}, ignore_predicate=None, ignored_files=['.git', '.svn']): with zipfile.ZipFile(filename, 'w', compression=zipfile.ZIP_DEFLATED) as zip_file: root_len = len(os.path.abspath(directory)) out("Creating archive: " + str(filename)) for root, dirs, f...
Creates an archive from a directory and returns the file that was created.
def all_childnodes_to_nifti1img(h5group): child_nodes = [] def append_parent_if_dataset(name, obj): if isinstance(obj, h5py.Dataset): if name.split('/')[-1] == 'data': child_nodes.append(obj.parent) vols = [] h5group.visititems(append_parent_if_dataset) for c in c...
Returns in a list all images found under h5group. Parameters ---------- h5group: h5py.Group HDF group Returns ------- list of nifti1Image
def t_t_isopen(self, t): r'"|\'' if t.value[0] == '"': t.lexer.push_state('istringquotes') elif t.value[0] == '\'': t.lexer.push_state('istringapostrophe') return t
r'"|\
def expandvars_dict(settings): return dict( (key, os.path.expandvars(value)) for key, value in settings.iteritems() )
Expands all environment variables in a settings dictionary.
def check_pre_requirements(pre_requirements): pre_requirements = set(pre_requirements or []) pre_requirements.add('virtualenv') for requirement in pre_requirements: if not which(requirement): print_error('Requirement {0!r} is not found in system'. format(requireme...
Check all necessary system requirements to exist. :param pre_requirements: Sequence of pre-requirements to check by running ``where <pre_requirement>`` on Windows and ``which ...`` elsewhere.
def get_images_bytesize_match(self, images): cnt = 0 max_bytes_size = 15728640 good_images = [] for image in images: if cnt > 30: return good_images src = self.parser.getAttribute(image, attr='src') src = self.build_image_path(src) ...
\ loop through all the images and find the ones that have the best bytez to even make them a candidate
def _parse_revision_date(self): r doc_datetime = None if not self.is_draft: date_command = LatexCommand( 'date', {'name': 'content', 'required': True, 'bracket': '{'}) try: parsed = next(date_command.parse(self._tex)) ...
r"""Parse the ``\date`` command, falling back to getting the most recent Git commit date and the current datetime. Result is available from the `revision_datetime` attribute.