code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def _normalize_value_ms(cls, value): value = round(value / 1000) * 1000 sorted_units = sorted(cls.UNITS_IN_MILLISECONDS.items(), key=lambda x: x[1], reverse=True) for unit, unit_in_ms in sorted_units: unit_value = value / unit_in_ms if unit_v...
Normalize a value in ms to the largest unit possible without decimal places. Note that this ignores fractions of a second and always returns a value _at least_ in seconds. :return: the normalized value and unit name :rtype: Tuple[Union[int, float], str]
def alerts(self): endpoint = '/'.join((self.endpoint, self.id, 'alerts')) return self.alertFactory.find( endpoint=endpoint, api_key=self.api_key, )
Query for alerts attached to this incident.
def _format_default(client, value): if isinstance(value, File): return os.path.relpath( str((client.workflow_path / value.path).resolve()) ) return value
Format default values.
def fft(a, n=None, axis=-1, norm=None): output = mkl_fft.fft(a, n, axis) if _unitary(norm): output *= 1 / sqrt(output.shape[axis]) return output
Compute the one-dimensional discrete Fourier Transform. This function computes the one-dimensional *n*-point discrete Fourier Transform (DFT) with the efficient Fast Fourier Transform (FFT) algorithm [CT]. Parameters ---------- a : array_like Input array, can be complex. n : int, o...
def sign(self, msg, key): h = hmac.HMAC(key, self.algorithm(), default_backend()) h.update(msg) return h.finalize()
Create a signature over a message as defined in RFC7515 using a symmetric key :param msg: The message :param key: The key :return: A signature
def default_start(): (config, daemon, pidfile, startup, fork) = parsearg() if config is None: if os.path.isfile('/etc/vlcp.conf'): config = '/etc/vlcp.conf' else: print('/etc/vlcp.conf is not found; start without configurations.') elif not config: config = Non...
Use `sys.argv` for starting parameters. This is the entry-point of `vlcp-start`
def add_context_menu_items(self, items, replace_items=False): for label, action in items: assert isinstance(label, basestring) assert isinstance(action, basestring) if replace_items: self._context_menu_items = [] self._context_menu_items.extend(items) ...
Adds context menu items. If replace_items is True all previous context menu items will be removed.
def _assert_can_do_op(self, value): if not is_scalar(value): msg = "'value' must be a scalar, passed: {0}" raise TypeError(msg.format(type(value).__name__))
Check value is valid for scalar op.
def pwgen(length=None): if length is None: length = random.choice(range(35, 45)) alphanumeric_chars = [ l for l in (string.ascii_letters + string.digits) if l not in 'l0QD1vAEIOUaeiou'] random_generator = random.SystemRandom() random_chars = [ random_generator.choice(alph...
Generate a random pasword.
def _unsubscribe_myself(self): url = UNSUBSCRIBE_ENDPOINT return self._session.query(url, method='GET', raw=True, stream=False)
Unsubscribe this base station for all events.
def make_frequency_series(vec): if isinstance(vec, FrequencySeries): return vec if isinstance(vec, TimeSeries): N = len(vec) n = N/2+1 delta_f = 1.0 / N / vec.delta_t vectilde = FrequencySeries(zeros(n, dtype=complex_same_precision_as(vec)), ...
Return a frequency series of the input vector. If the input is a frequency series it is returned, else if the input vector is a real time series it is fourier transformed and returned as a frequency series. Parameters ---------- vector : TimeSeries or FrequencySeries Returns ------- ...
def create_args(line, namespace): args = [] for arg in shlex.split(line): if not arg: continue if arg[0] == '$': var_name = arg[1:] if var_name in namespace: args.append((namespace[var_name])) else: raise Exception('Undefined variable referenced i...
Expand any meta-variable references in the argument list.
def open(self, filename, mode='r', **kwargs): if 'r' in mode and not self.backend.exists(filename): raise FileNotFound(filename) return self.backend.open(filename, mode, **kwargs)
Open the file and return a file-like object. :param str filename: The storage root-relative filename :param str mode: The open mode (``(r|w)b?``) :raises FileNotFound: If trying to read a file that does not exists
def _get_queue_for_the_action(self, action): mod = getattr(action, 'module_type', 'fork') queues = list(self.q_by_mod[mod].items()) if not queues: return (0, None) self.rr_qid = (self.rr_qid + 1) % len(queues) (worker_id, queue) = queues[self.rr_qid] return (w...
Find action queue for the action depending on the module. The id is found with action modulo on action id :param a: the action that need action queue to be assigned :type action: object :return: worker id and queue. (0, None) if no queue for the module_type :rtype: tuple
def search_associations_go( subject_category=None, object_category=None, relation=None, subject=None, **kwargs): go_golr_url = "http://golr.geneontology.org/solr/" go_solr = pysolr.Solr(go_golr_url, timeout=5) go_solr.get_session().headers['User-Agent'] = get_user_age...
Perform association search using Monarch golr
def print_token(self, token_node_index): err_msg = "The given node is not a token node." assert isinstance(self.nodes[token_node_index], TokenNode), err_msg onset = self.nodes[token_node_index].onset offset = self.nodes[token_node_index].offset return self.text[onset:offset]
returns the string representation of a token.
def regexer_for_targets(targets): for target in targets: path, file_ext = os.path.splitext(target) regexer = config.regexers[file_ext] yield target, regexer
Pairs up target files with their correct regex
def write(self, equities=None, futures=None, exchanges=None, root_symbols=None, equity_supplementary_mappings=None, chunk_size=DEFAULT_CHUNK_SIZE): if exchanges is None: exchange_names = [ df['exchang...
Write asset metadata to a sqlite database. Parameters ---------- equities : pd.DataFrame, optional The equity metadata. The columns for this dataframe are: symbol : str The ticker symbol for this equity. asset_name : str ...
def namedb_is_name_zonefile_hash(cur, name, zonefile_hash): select_query = 'SELECT COUNT(value_hash) FROM history WHERE history_id = ? AND value_hash = ?' select_args = (name,zonefile_hash) rows = namedb_query_execute(cur, select_query, select_args) count = None for r in rows: count = r['COU...
Determine if a zone file hash was sent by a name. Return True if so, false if not
def decode_mysql_literal(text): if MYSQL_NULL_PATTERN.match(text): return None if MYSQL_BOOLEAN_PATTERN.match(text): return text.lower() == "true" if MYSQL_FLOAT_PATTERN.match(text): return float(text) if MYSQL_INT_PATTERN.match(text): return int(text) if MYSQL_STRING...
Attempts to decode given MySQL literal into Python value. :param text: Value to be decoded, as MySQL literal. :type text: str :return: Python version of the given MySQL literal. :rtype: any
def importPreflibFile(self, fileName): elecFileObj = open(fileName, 'r') self.candMap, rankMaps, wmgMapsCounts, self.numVoters = prefpy_io.read_election_file(elecFileObj) elecFileObj.close() self.numCands = len(self.candMap.keys()) self.preferences = [] for i in range(0, ...
Imports a preflib format file that contains all the information of a Profile. This function will completely override all members of the current Profile object. Currently, we assume that in an election where incomplete ordering are allowed, if a voter ranks only one candidate, then the voter di...
def get_locale_choices(locale_dir): file_name_s = os.listdir(locale_dir) choice_s = [] for file_name in file_name_s: if file_name.endswith(I18n.TT_FILE_EXT_STXT): file_name_noext, _ = os.path.splitext(file_name) if file_name_noext: ...
Get a list of locale file names in the given locale dir.
def __prepare_info_from_dicomdir_file(self, writedicomdirfile=True): createdcmdir = True dicomdirfile = os.path.join(self.dirpath, self.dicomdir_filename) ftype = 'pickle' if os.path.exists(dicomdirfile): try: dcmdirplus = misc.obj_from_file(dicomdirfile, ftyp...
Check if exists dicomdir file and load it or cerate it dcmdir = get_dir(dirpath) dcmdir: list with filenames, SeriesNumber and SliceLocation
def tilt_residual(params, data, mask): bg = tilt_model(params, shape=data.shape) res = (data - bg)[mask] return res.flatten()
lmfit tilt residuals
def _try_to_compute_deterministic_class_id(cls, depth=5): class_id = pickle.dumps(cls) for _ in range(depth): new_class_id = pickle.dumps(pickle.loads(class_id)) if new_class_id == class_id: return hashlib.sha1(new_class_id).digest() class_id = new_class_id logger.warning...
Attempt to produce a deterministic class ID for a given class. The goal here is for the class ID to be the same when this is run on different worker processes. Pickling, loading, and pickling again seems to produce more consistent results than simply pickling. This is a bit crazy and could cause proble...
def _get_float(data, position, dummy0, dummy1, dummy2): end = position + 8 return _UNPACK_FLOAT(data[position:end])[0], end
Decode a BSON double to python float.
def apply_boundary_conditions(self, **kwargs): polarval = kwargs[self._polar_angle] azval = kwargs[self._azimuthal_angle] polarval = self._polardist._domain.apply_conditions(polarval) azval = self._azimuthaldist._domain.apply_conditions(azval) polarval = self._bounds[self._polar_...
Maps the given values to be within the domain of the azimuthal and polar angles, before applying any other boundary conditions. Parameters ---------- \**kwargs : The keyword args must include values for both the azimuthal and polar angle, using the names they wer...
def by_median_home_value(self, lower=-1, upper=2 ** 31, zipcode_type=ZipcodeType.Standard, sort_by=SimpleZipcode.median_home_value.name, ascending=False, ...
Search zipcode information by median home value.
def pivot_wavelength(self): wl = self.registry._pivot_wavelengths.get((self.telescope, self.band)) if wl is not None: return wl wl = self.calc_pivot_wavelength() self.registry.register_pivot_wavelength(self.telescope, self.band, wl) return wl
Get the bandpass' pivot wavelength. Unlike calc_pivot_wavelength(), this function will use a cached value if available.
def get_last(self): query = self.table().where('batch', self.get_last_batch_number()) return query.order_by('migration', 'desc').get()
Get the last migration batch. :rtype: list
def add(self, document_data, document_id=None): if document_id is None: parent_path, expected_prefix = self._parent_info() document_pb = document_pb2.Document() created_document_pb = self._client._firestore_api.create_document( parent_path, col...
Create a document in the Firestore database with the provided data. Args: document_data (dict): Property names and values to use for creating the document. document_id (Optional[str]): The document identifier within the current collection. If not provided...
def query_by_student(self, student_id, end_time=None, start_time=None): path = {} data = {} params = {} path["student_id"] = student_id if start_time is not None: params["start_time"] = start_time if end_time is not None: params["end_time"]...
Query by student. List grade change events for a given student.
def retrieve_customer(self, handle, with_additional_data=False): response = self.request(E.retrieveCustomerRequest( E.handle(handle), E.withAdditionalData(int(with_additional_data)), )) return response.as_model(Customer)
Retrieve information of an existing customer.
def clear_list_value(self, value): if not value: return self.empty_value if self.clean_empty: value = [v for v in value if v] return value or self.empty_value
Clean the argument value to eliminate None or Falsy values if needed.
def _wire_events(self): self._device.on_open += self._on_open self._device.on_close += self._on_close self._device.on_read += self._on_read self._device.on_write += self._on_write self._zonetracker.on_fault += self._on_zone_fault self._zonetracker.on_restore += self._on_z...
Wires up the internal device events.
def reward_bonus(self, assignment_id, amount, reason): try: return self.mturkservice.grant_bonus(assignment_id, amount, reason) except MTurkServiceException as ex: logger.exception(str(ex))
Reward the Turker for a specified assignment with a bonus.
def inspect_select_calculation(self): try: node = self.ctx.cif_select self.ctx.cif = node.outputs.cif except exceptions.NotExistent: self.report('aborting: CifSelectCalculation<{}> did not return the required cif output'.format(node.uuid)) return self.exit...
Inspect the result of the CifSelectCalculation, verifying that it produced a CifData output node.
def from_chords(self, chords, duration=1): tun = self.get_tuning() def add_chord(chord, duration): if type(chord) == list: for c in chord: add_chord(c, duration * 2) else: chord = NoteContainer().from_chord(chord) ...
Add chords to the Track. The given chords should be a list of shorthand strings or list of list of shorthand strings, etc. Each sublist divides the value by 2. If a tuning is set, chords will be expanded so they have a proper fingering. Example: >>> t = Track(...
def extract_coverage(self, container: Container) -> FileLineSet: uid = container.uid r = self.__api.post('containers/{}/read-coverage'.format(uid)) if r.status_code == 200: return FileLineSet.from_dict(r.json()) self.__api.handle_erroneous_response(r)
Extracts a report of the lines that have been executed since the last time that a coverage report was extracted.
def size_r_img_inches(width, height): aspect_ratio = height / (1.0 * width) return R_IMAGE_SIZE, round(aspect_ratio * R_IMAGE_SIZE, 2)
Compute the width and height for an R image for display in IPython Neight width nor height can be null but should be integer pixel values > 0. Returns a tuple of (width, height) that should be used by ggsave in R to produce an appropriately sized jpeg/png/pdf image with the right aspect ratio. The re...
def package_releases(self, project_name): try: return self._connection.package_releases(project_name) except Exception as err: raise PyPIClientError(err)
Retrieve the versions from PyPI by ``project_name``. Args: project_name (str): The name of the project we wish to retrieve the versions of. Returns: list: Of string versions.
def get_remote_port_id_local(self, tlv_data): ret, parsed_val = self._check_common_tlv_format( tlv_data, "Local:", "Port ID TLV") if not ret: return None local = parsed_val[1].split('\n') return local[0].strip()
Returns Remote Port ID Local from the TLV.
def _generate_badges(self): daycount = self._stats.downloads_per_day day = self._generate_badge('Downloads', '%d/day' % daycount) self._badges['per-day'] = day weekcount = self._stats.downloads_per_week if weekcount is None: return week = self._generate_badge(...
Generate download badges. Append them to ``self._badges``.
def _init_add_goid_alt(self): goid_alts = set() go2cnt_add = {} aspect_counts = self.aspect_counts gocnts = self.gocnts go2obj = self.go2obj for go_id, cnt in gocnts.items(): goobj = go2obj[go_id] assert cnt, "NO TERM COUNTS FOR {GO}".format(GO=goo...
Add alternate GO IDs to term counts.
def on_resolve(target, func, *args, **kwargs): return _register_hook(ON_RESOLVE, target, func, *args, **kwargs)
Register a resolution hook.
def toggleDrawingSensitive(self, drawing=True): self.actions.editMode.setEnabled(not drawing) if not drawing and self.beginner(): print('Cancel creation.') self.canvas.setEditing(True) self.canvas.restoreCursor() self.actions.create.setEnabled(True)
In the middle of drawing, toggling between modes should be disabled.
def angle2vecs(vec1, vec2): dot = np.dot(vec1, vec2) vec1_modulus = np.sqrt(np.multiply(vec1, vec1).sum()) vec2_modulus = np.sqrt(np.multiply(vec2, vec2).sum()) if (vec1_modulus * vec2_modulus) == 0: cos_angle = 1 else: cos_angle = dot / (vec1_modulus * vec2_modulus) return math.degrees(...
angle between two vectors
def _update_prx(self): qx = scipy.ones(N_CODON, dtype='float') for j in range(3): for w in range(N_NT): qx[CODON_NT[j][w]] *= self.phi[w] frx = self.pi_codon**self.beta self.prx = frx * qx with scipy.errstate(divide='raise', under='raise', over='raise'...
Update `prx` from `phi`, `pi_codon`, and `beta`.
def fmt_transition(t): return "Transition({} {} {})".format( fmt_mechanism(t.cause_indices, t.node_labels), ARROW_RIGHT, fmt_mechanism(t.effect_indices, t.node_labels))
Format a |Transition|.
def delete(self): response = self._client._request('DELETE', self._client._build_url('service', service_id=self.id)) if response.status_code != requests.codes.no_content: raise APIError("Could not delete service: {} with id {}".format(self.name, self.id))
Delete this service. :raises APIError: if delete was not succesfull.
def update(self, **args): data = json.dumps(args) r = requests.put( "https://kippt.com/api/clips/%s" % (self.id), headers=self.kippt.header, data=data) return (r.json())
Updates a Clip. Parameters: - args Dictionary of other fields Accepted fields can be found here: https://github.com/kippt/api-documentation/blob/master/objects/clip.md
def _collapse_header(self, header): out = [] for i, h in enumerate(header): if h.startswith(self._col_quals): out[-1].append(i) else: out.append([i]) return out
Combine header columns into related groups.
def set(self, code): if self.update: self.vertices_substitution_dict, self.edges_substitution_dict, self.match_info\ = self.match.get_variables_substitution_dictionaries(self.g, self.matching_graph) try: self.matching_graph = self.__apply_code_to_graph(code, self....
Executes the code and apply it to the self.g :param code: the LISP code to execute :return: True/False, depending on the result of the LISP code
def delete_session(self, ticket): assert isinstance(self.session_storage_adapter, CASSessionAdapter) logging.debug('[CAS] Deleting session for ticket {}'.format(ticket)) self.session_storage_adapter.delete(ticket)
Delete a session record associated with a service ticket.
def display_arr(screen, arr, video_size, transpose): if transpose: pyg_img = pygame.surfarray.make_surface(arr.swapaxes(0, 1)) else: pyg_img = arr pyg_img = pygame.transform.scale(pyg_img, video_size) screen.blit(pyg_img, (0, 0))
Display an image to the pygame screen. Args: screen (pygame.Surface): the pygame surface to write frames to arr (np.ndarray): numpy array representing a single frame of gameplay video_size (tuple): the size to render the frame as transpose (bool): whether to transpose the frame befo...
def _caveat_v1_to_dict(c): serialized = {} if len(c.caveat_id) > 0: serialized['cid'] = c.caveat_id if c.verification_key_id: serialized['vid'] = utils.raw_urlsafe_b64encode( c.verification_key_id).decode('utf-8') if c.location: serialized['cl'] = c.location retur...
Return a caveat as a dictionary for export as the JSON macaroon v1 format.
def _sumDiceRolls(self, rollList): if isinstance(rollList, RollList): self.rolls.append(rollList) return rollList.sum() else: return rollList
convert from dice roll structure to a single integer result
def _peek(self, *types): tok = self._scanner.token(self._pos, types) return tok[2]
Returns the token type for lookahead; if there are any args then the list of args is the set of token types to allow
def remove_from_tor(self, protocol): r = yield protocol.queue_command('DEL_ONION %s' % self.hostname[:-6]) if r.strip() != 'OK': raise RuntimeError('Failed to remove hidden service: "%s".' % r)
Returns a Deferred which fires with None
def remove_autosave_file(self, fileinfo): filename = fileinfo.filename if filename not in self.name_mapping: return autosave_filename = self.name_mapping[filename] try: os.remove(autosave_filename) except EnvironmentError as error: action = (_(...
Remove autosave file for specified file. This function also updates `self.autosave_mapping` and clears the `changed_since_autosave` flag.
def show_instance(name, call=None): if call != 'action': raise SaltCloudException( 'The show_instance action must be called with -a or --action.' ) node_id = get_linode_id_from_name(name) node_data = get_linode(kwargs={'linode_id': node_id}) ips = get_ips(node_id) state =...
Displays details about a particular Linode VM. Either a name or a linode_id must be provided. .. versionadded:: 2015.8.0 name The name of the VM for which to display details. CLI Example: .. code-block:: bash salt-cloud -a show_instance vm_name .. note:: The ``imag...
def is_in_current_deployment(server, extra_prefix=""): return re.match(r"^%s" % '-'.join([DEFAULT_PREFIX, extra_prefix]), server.name) is not None
Check if an existing server in the system take part to the current deployment
def object_factory(api, api_version, kind): resource_list = api.resource_list(api_version) resource = next((resource for resource in resource_list["resources"] if resource["kind"] == kind), None) base = NamespacedAPIObject if resource["namespaced"] else APIObject return type(kind, (base,), { "ve...
Dynamically builds a Python class for the given Kubernetes object in an API. For example: api = pykube.HTTPClient(...) NetworkPolicy = pykube.object_factory(api, "networking.k8s.io/v1", "NetworkPolicy") This enables construction of any Kubernetes object kind without explicit support from ...
def remove_diagonal(S): if not isspmatrix_csr(S): raise TypeError('expected csr_matrix') if S.shape[0] != S.shape[1]: raise ValueError('expected square matrix, shape=%s' % (S.shape,)) S = coo_matrix(S) mask = S.row != S.col S.row = S.row[mask] S.col = S.col[mask] S.data = S.d...
Remove the diagonal of the matrix S. Parameters ---------- S : csr_matrix Square matrix Returns ------- S : csr_matrix Strength matrix with the diagonal removed Notes ----- This is needed by all the splitting routines which operate on matrix graphs with an assu...
def size(self): import tensorflow as tf if self._size is None: self._size = 0 options = tf.python_io.TFRecordOptions(tf.python_io.TFRecordCompressionType.GZIP) for tfexample_file in self.files: self._size += sum(1 for x in tf.python_io.tf_record_iterator(tfexa...
The number of instances in the data. If the underlying data source changes, it may be outdated.
def insert(self, index, item): super(ObservableList, self).insert(index, item) length = len(self) if index >= length: index = length - 1 elif index < 0: index += length - 1 if index < 0: index = 0 self._notify_add_at(index)
See list.insert.
def expectation(self, operator: Union[PauliTerm, PauliSum]): if not isinstance(operator, PauliSum): operator = PauliSum([operator]) return sum(_term_expectation(self.wf, term, n_qubits=self.n_qubits) for term in operator)
Compute the expectation of an operator. :param operator: The operator :return: The operator's expectation value
def pp(i, base=1024): degree = 0 pattern = "%4d %s" while i > base: pattern = "%7.2f %s" i = i / float(base) degree += 1 scales = ['B', 'KB', 'MB', 'GB', 'TB', 'EB'] return pattern % (i, scales[degree])
Pretty-print the integer `i` as a human-readable size representation.
def _original_images(self, **kwargs): def test(image): if not image.original: return False for filter, value in kwargs.items(): if getattr(image, filter) != value: return False return True if Session.object_session(s...
A list of the original images. :returns: A list of the original images. :rtype: :class:`typing.Sequence`\ [:class:`Image`]
def base64_b64decode(instr): decoded = base64.b64decode(salt.utils.stringutils.to_bytes(instr)) try: return salt.utils.stringutils.to_unicode( decoded, encoding='utf8' if salt.utils.platform.is_windows() else None ) except UnicodeDecodeError: return decoded
Decode a base64-encoded string using the "modern" Python interface.
async def get_analog_map(self): current_time = time.time() if self.query_reply_data.get( PrivateConstants.ANALOG_MAPPING_RESPONSE) is None: await self._send_sysex(PrivateConstants.ANALOG_MAPPING_QUERY) while self.query_reply_data.get( PrivateCo...
This method requests a Firmata analog map query and returns the results. :returns: An analog map response or None if a timeout occurs
def list_documents(self, limit=None): limit_str = '' if limit: try: limit_str = 'LIMIT {}'.format(int(limit)) except (TypeError, ValueError): pass query = ('SELECT identifier FROM identifier_index ' + limit_str) for row in self.back...
Generates vids of all indexed identifiers. Args: limit (int, optional): If not empty, the maximum number of results to return Generates: str: vid of the document.
def reset(self): "Initialises all needed variables to default values" self.metadata = {} self.items = [] self.spine = [] self.guide = [] self.pages = [] self.toc = [] self.bindings = [] self.IDENTIFIER_ID = 'id' self.FOLDER_NAME = 'EPUB' ...
Initialises all needed variables to default values
def previous(self, day_of_week=None): if day_of_week is None: day_of_week = self.day_of_week if day_of_week < SUNDAY or day_of_week > SATURDAY: raise ValueError("Invalid day of week") dt = self.subtract(days=1) while dt.day_of_week != day_of_week: dt =...
Modify to the previous occurrence of a given day of the week. If no day_of_week is provided, modify to the previous occurrence of the current day of the week. Use the supplied consts to indicate the desired day_of_week, ex. pendulum.MONDAY. :param day_of_week: The previous day of week ...
def AddMapping(self, filename, new_mapping): for field in self._REQUIRED_MAPPING_FIELDS: if field not in new_mapping: raise problems.InvalidMapping(field) if filename in self.GetKnownFilenames(): raise problems.DuplicateMapping(filename) self._file_mapping[filename] = new_mapping
Adds an entry to the list of known filenames. Args: filename: The filename whose mapping is being added. new_mapping: A dictionary with the mapping to add. Must contain all fields in _REQUIRED_MAPPING_FIELDS. Raises: DuplicateMapping if the filename already exists in the map...
def format_file_path(filepath): try: is_windows_network_mount = WINDOWS_NETWORK_MOUNT_PATTERN.match(filepath) filepath = os.path.realpath(os.path.abspath(filepath)) filepath = re.sub(BACKSLASH_REPLACE_PATTERN, '/', filepath) is_windows_drive = WINDOWS_DRIVE_PATTERN.match(filepath) ...
Formats a path as absolute and with the correct platform separator.
def _validate_auth(self, path, obj, _): errs = [] if obj.type == 'apiKey': if not obj.passAs: errs.append('need "passAs" for apiKey') if not obj.keyname: errs.append('need "keyname" for apiKey') elif obj.type == 'oauth2': if not...
validate that apiKey and oauth2 requirements
def concretize_load_idx(self, idx, strategies=None): if isinstance(idx, int): return [idx] elif not self.state.solver.symbolic(idx): return [self.state.solver.eval(idx)] strategies = self.load_strategies if strategies is None else strategies return self._apply_con...
Concretizes a load index. :param idx: An expression for the index. :param strategies: A list of concretization strategies (to override the default). :param min_idx: Minimum value for a concretized index (inclusive). :param max_idx: Maximum value for a co...
def update_ff(self, ff, mol2=False, force_ff_assign=False): aff = False if force_ff_assign: aff = True elif 'assigned_ff' not in self.tags: aff = True elif not self.tags['assigned_ff']: aff = True if aff: self.assign_force_field(ff,...
Manages assigning the force field parameters. The aim of this method is to avoid unnecessary assignment of the force field. Parameters ---------- ff: BuffForceField The force field to be used for scoring. mol2: bool, optional If true, mol2 style ...
def Fierz_to_Bern_chrom(C, dd, parameters): e = sqrt(4 * pi * parameters['alpha_e']) gs = sqrt(4 * pi * parameters['alpha_s']) if dd == 'sb' or dd == 'db': mq = parameters['m_b'] elif dd == 'ds': mq = parameters['m_s'] else: KeyError("Not sure what to do with quark mass for f...
From Fierz to chromomagnetic Bern basis for Class V. dd should be of the form 'sb', 'ds' etc.
def is_cached(self, link): if link is None: return False elif hasattr(link, 'uri'): return link.uri in self.id_map else: return link in self.id_map
Returns whether the current navigator is cached. Intended to be overwritten and customized by subclasses.
def reconnect(connection): if isinstance(connection, FflConnection): return type(connection)(connection.ffldir) kw = {'context': connection._context} if connection.port != 80 else {} return connection.__class__(connection.host, port=connection.port, **kw)
Open a new datafind connection based on an existing connection This is required because of https://git.ligo.org/lscsoft/glue/issues/1 Parameters ---------- connection : :class:`~gwdatafind.http.HTTPConnection` or `FflConnection` a connection object (doesn't need to be open) Returns --...
def write_csv_header(mol, csv_writer): line = [] line.append('id') line.append('status') queryList = mol.properties.keys() for queryLabel in queryList: line.append(queryLabel) csv_writer.writerow(line)
Write the csv header
def HasColumn(self, table_name, column_name): if not self._connection: raise IOError('Not opened.') if not column_name: return False table_name = table_name.lower() column_names = self._column_names_per_table.get(table_name, None) if column_names is None: column_names = [] se...
Determines if a specific column exists. Args: table_name (str): name of the table. column_name (str): name of the column. Returns: bool: True if the column exists. Raises: IOError: if the database file is not opened. OSError: if the database file is not opened.
def show_syspath(self): editor = CollectionsEditor(parent=self) editor.setup(sys.path, title="sys.path", readonly=True, width=600, icon=ima.icon('syspath')) self.dialog_manager.show(editor)
Show sys.path
async def restart_walk(self): if not self._restartwalk: self._restartwalk = True await self.wait_for_send(FlowUpdaterNotification(self, FlowUpdaterNotification.STARTWALK))
Force a re-walk
def source(self, source): BaseView.source.fset(self, source) if self.main_pane: self.main_pane.object = self.contents self.label_pane.object = self.label
When the source gets updated, update the pane object
def _GetAuthCookie(self, auth_token): continue_location = "http://localhost/" args = {"continue": continue_location, "auth": auth_token} req = self._CreateRequest("https://%s/_ah/login?%s" % (self.host, urllib.urlencode(args))) try: response = self.opener.open(req) except urllib2.HTTPError, e: response ...
Fetches authentication cookies for an authentication token. Args: auth_token: The authentication token returned by ClientLogin. Raises: HTTPError: If there was an error fetching the authentication cookies.
def distribution(self, start=None, end=None, normalized=True, mask=None): start, end, mask = self._check_boundaries(start, end, mask=mask) counter = histogram.Histogram() for start, end, _ in mask.iterperiods(value=True): for t0, t1, value in self.iterperiods(start, end): ...
Calculate the distribution of values over the given time range from `start` to `end`. Args: start (orderable, optional): The lower time bound of when to calculate the distribution. By default, the first time point will be used. end (orderable, o...
def query(self, design, view, use_devmode=False, **kwargs): design = self._mk_devmode(design, use_devmode) itercls = kwargs.pop('itercls', View) return itercls(self, design, view, **kwargs)
Query a pre-defined MapReduce view, passing parameters. This method executes a view on the cluster. It accepts various parameters for the view and returns an iterable object (specifically, a :class:`~.View`). :param string design: The design document :param string view: The vie...
def get_container_names(self): current_containers = self.containers(all=True) return set(c_name[1:] for c in current_containers for c_name in c['Names'])
Fetches names of all present containers from Docker. :return: All container names. :rtype: set
def search_index_simple(self,index,key,search_term): request = self.session url = 'http://%s:%s/%s/_search?q=%s:%s' % (self.host,self.port,index,key,search_term) response = request.get(url) return response
Search the index using a simple key and search_term @param index Name of the index @param key Search Key @param search_term The term to be searched for
def cls_get_by_name(cls, name): try: val = getattr(cls, name) except AttributeError: for attr in (a for a in dir(cls) if not a.startswith('_')): try: val = getattr(cls, attr) except AttributeError: continue valname = getattr(val...
Return a class attribute by searching the attributes `name` attribute.
def login_service_description(self): label = 'Login to ' + self.name if (self.auth_type): label = label + ' (' + self.auth_type + ')' desc = {"@id": self.login_uri, "profile": self.profile_base + self.auth_pattern, "label": label} if (self.head...
Login service description. The login service description _MUST_ include the token service description. The authentication pattern is indicated via the profile URI which is built using self.auth_pattern.
def compute_nats_and_bits_per_dim(data_dim, latent_dim, average_reconstruction, average_prior): with tf.name_scope(None, default_name="compute_nats_per_dim"): data_dim = tf.cast(data_dim, average_reconstruction.d...
Computes negative ELBO, which is an upper bound on the negative likelihood. Args: data_dim: int-like indicating data dimensionality. latent_dim: int-like indicating latent dimensionality. average_reconstruction: Scalar Tensor indicating the reconstruction cost averaged over all data dimensions and ...
def sendContact(self, context={}): for recipient in self.recipients: super(ContactFormMail, self).__init__(recipient, self.async) self.sendEmail('contactForm', 'New contact form message', context)
Send contact form message to single or multiple recipients
def _build_matches(matches, uuids, no_filtered, fastmode=False): result = [] for m in matches: mk = m[0].uuid if not fastmode else m[0] subset = [uuids[mk]] for id_ in m[1:]: uk = id_.uuid if not fastmode else id_ u = uuids[uk] if u not in subset: ...
Build a list with matching subsets
def has_adjacent_leaves_only(self): leaves = self.leaves() for i in range(len(leaves) - 1): current_interval = leaves[i].interval next_interval = leaves[i + 1].interval if not current_interval.is_adjacent_before(next_interval): return False ret...
Return ``True`` if the sync map fragments which are the leaves of the sync map tree are all adjacent. :rtype: bool .. versionadded:: 1.7.0
def define_zip_index_for_species(names_ppn_world, number_names_ppn_world): global cl cl={} for a,b in zip(names_ppn_world,number_names_ppn_world): cl[a] = b
This just give back cl, that is the original index as it is read from files from a data file.
def send_msg_multi(name, profile, recipients=None, rooms=None): ret = {'name': name, 'changes': {}, 'result': None, 'comment': ''} if recipients is None and rooms is None: ret['comment'] = "Recipients and rooms are...
Send a message to an list of recipients or rooms .. code-block:: yaml server-warning-message: xmpp.send_msg: - name: 'This is a server warning message' - profile: my-xmpp-account - recipients: - admins@xmpp.example.com/salt - rooms: ...