code
stringlengths
59
4.4k
docstring
stringlengths
5
7.69k
def get_single_header(headers, key): raw_headers = headers.getRawHeaders(key) if raw_headers is None: return None header, _ = cgi.parse_header(raw_headers[-1]) return header
Get a single value for the given key out of the given set of headers. :param twisted.web.http_headers.Headers headers: The set of headers in which to look for the header value :param str key: The header key
def update(self, *args, **kwargs): super(Deposit, self).update(*args, **kwargs)
Update only drafts. Status required: ``'draft'``. Meta information inside `_deposit` are preserved.
def aggregate_tree(l_tree): def _aggregate_phase1(tree): n_tree = radix.Radix() for prefix in tree.prefixes(): if tree.search_worst(prefix).prefix == prefix: n_tree.add(prefix) return n_tree def _aggregate_phase2(tree): n_tree = radix.Radix() f...
Walk a py-radix tree and aggregate it. Arguments l_tree -- radix.Radix() object
def switch(template, version): temple.update.update(new_template=template, new_version=version)
Switch a project's template to a different template.
def send_status_response(environ, start_response, e, add_headers=None, is_head=False): status = get_http_status_string(e) headers = [] if add_headers: headers.extend(add_headers) if e in (HTTP_NOT_MODIFIED, HTTP_NO_CONTENT): start_response( status, [("Content-Length", "0"), (...
Start a WSGI response for a DAVError or status code.
def removeTopology(self, topology_name, state_manager_name): topologies = [] for top in self.topologies: if (top.name == topology_name and top.state_manager_name == state_manager_name): if (topology_name, state_manager_name) in self.topologyInfos: self.topologyInfos.pop((topolo...
Removes the topology from the local cache.
def encoder_data(self, data): prev_val = self.digital_response_table[data[self.RESPONSE_TABLE_MODE]][self.RESPONSE_TABLE_PIN_DATA_VALUE] val = int((data[self.MSB] << 7) + data[self.LSB]) if val > 8192: val -= 16384 pin = data[0] with self.pymata.data_lock: ...
This method handles the incoming encoder data message and stores the data in the digital response table. :param data: Message data from Firmata :return: No return value.
def _dot_to_dec(ip, check=True): if check and not is_dot(ip): raise ValueError('_dot_to_dec: invalid IP: "%s"' % ip) octets = str(ip).split('.') dec = 0 dec |= int(octets[0]) << 24 dec |= int(octets[1]) << 16 dec |= int(octets[2]) << 8 dec |= int(octets[3]) return dec
Dotted decimal notation to decimal conversion.
def initialize(self, containers): self._containers = deepcopy(containers) self.__write(containers, initialize=True)
Initialize a new state file with the given contents. This function fails in case the state file already exists.
def is_quiet(self): level = self._conversation.self_conversation_state.notification_level return level == hangouts_pb2.NOTIFICATION_LEVEL_QUIET
``True`` if notification level for this conversation is quiet.
def create_dashboard(self, panel_file, data_sources=None, strict=True): es_enrich = self.conf['es_enrichment']['url'] kibana_url = self.conf['panels']['kibiter_url'] mboxes_sources = set(['pipermail', 'hyperkitty', 'groupsio', 'nntp']) if data_sources and any(x in data_sources for x in m...
Upload a panel to Elasticsearch if it does not exist yet. If a list of data sources is specified, upload only those elements (visualizations, searches) that match that data source. :param panel_file: file name of panel (dashobard) to upload :param data_sources: list of data sources ...
def get_constructor_arguments(self) -> str: item = self._constructor_abi_item return '()' if item is None else self.tuple_signature_for_components(item['inputs'])
Returns the tuple type signature for the arguments of the contract constructor.
def _get_operation_input_field_values(self, metadata, file_input): input_args = metadata['request']['ephemeralPipeline']['inputParameters'] vals_dict = metadata['request']['pipelineArgs']['inputs'] names = [ arg['name'] for arg in input_args if ('localCopy' in arg) == file_input ] return {na...
Returns a dictionary of envs or file inputs for an operation. Args: metadata: operation metadata field file_input: True to return a dict of file inputs, False to return envs. Returns: A dictionary of input field name value pairs
def view_maker(self, name, instance=None): if instance is None: instance = self sig = "lang" in [ parameter.name for parameter in inspect.signature(getattr(instance, name)).parameters.values() ] def route(**kwargs): if sig and "lang" not in...
Create a view :param name: Name of the route function to use for the view. :type name: str :return: Route function which makes use of Nemo context (such as menu informations) :rtype: function
def _get_container_port_mappings(app): container = app['container'] port_mappings = container.get('portMappings') if port_mappings is None and 'docker' in container: port_mappings = container['docker'].get('portMappings') return port_mappings
Get the ``portMappings`` field for the app container.
def clear(self): self._compound_masses = self._compound_masses * 0.0 self._P = 1.0 self._T = 25.0 self._H = 0.0
Set all the compound masses in the package to zero. Set the pressure to 1, the temperature to 25 and the enthalpy to zero.
def MessageToJson(message, including_default_value_fields=False): js = _MessageToJsonObject(message, including_default_value_fields) return json.dumps(js, indent=2)
Converts protobuf message to JSON format. Args: message: The protocol buffers message instance to serialize. including_default_value_fields: If True, singular primitive fields, repeated fields, and map fields will always be serialized. If False, only serialize non-empty fields. Singular mes...
def _keep_alive_thread(self): while True: with self._lock: if self.connected(): self._ws.ping() else: self.disconnect() self._thread = None return sleep(30)
Used exclusively as a thread which keeps the WebSocket alive.
def scaffolds_to_contigs(infile, outfile, number_contigs=False): seq_reader = sequences.file_reader(infile) fout = utils.open_file_write(outfile) for seq in seq_reader: contigs = seq.contig_coords() counter = 1 for contig in contigs: if number_contigs: nam...
Makes a file of contigs from scaffolds by splitting at every N. Use number_contigs=True to add .1, .2, etc onto end of each contig, instead of default to append coordinates.
def parseStringList(s): assert isinstance(s, basestring) return [int(i) for i in s.split()]
Parse a string of space-separated numbers, returning a Python list. :param s: (string) to parse :returns: (list) binary SDR
def _camelcase_to_underscore(url): def upper2underscore(text): for char in text: if char.islower(): yield char else: yield '_' if char.isalpha(): yield char.lower() return ''.join(upper2underscore(url))
Translate camelCase into underscore format. >>> _camelcase_to_underscore('minutesBetweenSummaries') 'minutes_between_summaries'
def add_loopless(model, zero_cutoff=None): zero_cutoff = normalize_cutoff(model, zero_cutoff) internal = [i for i, r in enumerate(model.reactions) if not r.boundary] s_int = create_stoichiometric_matrix(model)[:, numpy.array(internal)] n_int = nullspace(s_int).T max_bound = max(max(abs(b) for b in r...
Modify a model so all feasible flux distributions are loopless. In most cases you probably want to use the much faster `loopless_solution`. May be used in cases where you want to add complex constraints and objecives (for instance quadratic objectives) to the model afterwards or use an approximation of...
def many_to_one(clsname, **kw): @declared_attr def m2o(cls): cls._references((cls.__name__, clsname)) return relationship(clsname, **kw) return m2o
Use an event to build a many-to-one relationship on a class. This makes use of the :meth:`.References._reference_table` method to generate a full foreign key relationship to the remote table.
def SyntheticRestaurant(n=20): "Generate a DataSet with n examples." def gen(): example = map(random.choice, restaurant.values) example[restaurant.target] = Fig[18,2](example) return example return RestaurantDataSet([gen() for i in range(n)])
Generate a DataSet with n examples.
def merge_to_one_seq(infile, outfile, seqname='union'): seq_reader = sequences.file_reader(infile) seqs = [] for seq in seq_reader: seqs.append(copy.copy(seq)) new_seq = ''.join([seq.seq for seq in seqs]) if type(seqs[0]) == sequences.Fastq: new_qual = ''.join([seq.qual for seq in se...
Takes a multi fasta or fastq file and writes a new file that contains just one sequence, with the original sequences catted together, preserving their order
def decorator(decorator_func): assert callable(decorator_func), type(decorator_func) def _decorator(func=None, **kwargs): assert func is None or callable(func), type(func) if func: return decorator_func(func, **kwargs) else: def _decorator_helper(func): ...
Allows a decorator to be called with or without keyword arguments.
def _matrix2dict(matrix, etype=False): n = len(matrix) adj = {k: {} for k in range(n)} for k in range(n): for j in range(n): if matrix[k, j] != 0: adj[k][j] = {} if not etype else matrix[k, j] return adj
Takes an adjacency matrix and returns an adjacency list.
def get_or_default(func=None, default=None): def decorator(func): @wraps(func) def wrapper(*args, **kwargs): try: return func(*args, **kwargs) except ObjectDoesNotExist: if callable(default): return default() ...
Wrapper around Django's ORM `get` functionality. Wrap anything that raises ObjectDoesNotExist exception and provide the default value if necessary. `default` by default is None. `default` can be any callable, if it is callable it will be called when ObjectDoesNotExist exception will be raised.
def _expand_consumed_mesh(self, mesh, mesh_index, row_position, passed): if not mesh.is_produced(): return row = mesh.producing_row position = Point( row_position.x + mesh.index_in_producing_row - mesh_index, row_position.y - INSTRUCTION_HEIGHT ) ...
expand the consumed meshes
def register_timer_task_in_sec(self, task, second): second_in_float = float(second) expiration = time.time() + second_in_float heappush(self.timer_tasks, (expiration, task))
Registers a new timer task :param task: function to be run at a specified second from now :param second: how many seconds to wait before the timer is triggered
def fmt(a, b): return 100 * np.min([a, b], axis=0).sum() / np.max([a, b], axis=0).sum()
Figure of merit in time
def submit(course, tid=None, pastebin=False, review=False): if tid is not None: return submit_exercise(Exercise.byid(tid), pastebin=pastebin, request_review=review) else: sel = Exercise.get_selected() if not sel: r...
Submit the selected exercise to the server.
def _get_warped_array( input_file=None, indexes=None, dst_bounds=None, dst_shape=None, dst_crs=None, resampling=None, src_nodata=None, dst_nodata=None ): try: return _rasterio_read( input_file=input_file, indexes=indexes, dst_bounds=dst_bou...
Extract a numpy array from a raster file.
def fromordinal(cls, n): y, m, d = _ord2ymd(n) return cls(y, m, d)
Contruct a date from a proleptic Gregorian ordinal. January 1 of year 1 is day 1. Only the year, month and day are non-zero in the result.
def get_own_ip(): own_ip = None interfaces = psutil.net_if_addrs() for _, details in interfaces.items(): for detail in details: if detail.family == socket.AF_INET: ip_address = ipaddress.ip_address(detail.address) if not (ip_address.is_link_local or ip_add...
Gets the IP from the inet interfaces.
def dist_abs( self, src, tar, weights='exponential', max_length=8, normalized=False ): xored = eudex(src, max_length=max_length) ^ eudex( tar, max_length=max_length ) if not weights: binary = bin(xored) distance = binary.count('1') if n...
Calculate the distance between the Eudex hashes of two terms. Parameters ---------- src : str Source string for comparison tar : str Target string for comparison weights : str, iterable, or generator function The weights or weights generator f...
def tradepile(self): method = 'GET' url = 'tradepile' rc = self.__request__(method, url) events = [self.pin.event('page_view', 'Hub - Transfers'), self.pin.event('page_view', 'Transfer List - List View')] if rc.get('auctionInfo'): events.append(self.pin.event('page_vi...
Return items in tradepile.
def get_all_certificates(self): data = self.get_data("certificates") certificates = list() for jsoned in data['certificates']: cert = Certificate(**jsoned) cert.token = self.token certificates.append(cert) return certificates
This function returns a list of Certificate objects.
def create_jwt(integration_id, private_key_path): integration_id = int(integration_id) with open(private_key_path, 'rb') as f: cert_bytes = f.read() now = datetime.datetime.now() expiration_time = now + datetime.timedelta(minutes=9) payload = { 'iat': int(now.timestamp()), 'e...
Create a JSON Web Token to authenticate a GitHub Integration or installation. Parameters ---------- integration_id : `int` Integration ID. This is available from the GitHub integration's homepage. private_key_path : `str` Path to the integration's private key (a ``.pem`` fil...
def allow_request(self, request, view): service_users = get_service_usernames() if request.user.username in service_users: self.update_throttle_scope() return super(ServiceUserThrottle, self).allow_request(request, view)
Modify throttling for service users. Updates throttling rate if the request is coming from the service user, and defaults to UserRateThrottle's configured setting otherwise. Updated throttling rate comes from `DEFAULT_THROTTLE_RATES` key in `REST_FRAMEWORK` setting. service user thrott...
def map(self, map_function): from heronpy.streamlet.impl.mapbolt import MapStreamlet map_streamlet = MapStreamlet(map_function, self) self._add_child(map_streamlet) return map_streamlet
Return a new Streamlet by applying map_function to each element of this Streamlet.
def connect(self): try: logger.info(u'Connecting %s:%d' % (self.host, self.port)) self.sock.connect((self.host, self.port)) except socket.error: raise ConnectionError() self.state = CONNECTED
Connect to the server :raise ConnectionError: If socket cannot establish a connection
def _get_algorithm_info(self, algorithm_info): if algorithm_info['algorithm'] not in self.ALGORITHMS: raise Exception('Algorithm not supported: %s' % algorithm_info['algorithm']) algorithm = self.ALGORITHMS[algorithm_info['algorithm']] algorithm_info.updat...
Get algorithm info
def path_to_node(tree, path): if path is None: return None node = tree for key in path: node = child_by_key(node, key) return node
FST node located at the given path
def INC(cpu, dest): arg0 = dest.read() res = dest.write(arg0 + 1) res &= (1 << dest.size) - 1 SIGN_MASK = 1 << (dest.size - 1) cpu.AF = ((arg0 ^ 1) ^ res) & 0x10 != 0 cpu.ZF = res == 0 cpu.SF = (res & SIGN_MASK) != 0 cpu.OF = res == SIGN_MASK cpu.P...
Increments by 1. Adds 1 to the destination operand, while preserving the state of the CF flag. The destination operand can be a register or a memory location. This instruction allows a loop counter to be updated without disturbing the CF flag. (Use a ADD instruction with an immediate op...
def i2c_stop_reading(self, address): data = [address, self.I2C_STOP_READING] self._command_handler.send_sysex(self._command_handler.I2C_REQUEST, data)
This method stops an I2C_READ_CONTINUOUSLY operation for the i2c device address specified. :param address: address of i2c device
def get_last_commit(git_path=None): if git_path is None: git_path = GIT_PATH line = get_last_commit_line(git_path) revision_id = line.split()[1] return revision_id
Get the HEAD commit SHA1 of repository in current dir.
def file_serializer(obj): return { "id": str(obj.file_id), "filename": obj.key, "filesize": obj.file.size, "checksum": obj.file.checksum, }
Serialize a object. :param obj: A :class:`invenio_files_rest.models.ObjectVersion` instance. :returns: A dictionary with the fields to serialize.
def _keywords(self): meta = self.find("meta", {"name":"keywords"}) if isinstance(meta, dict) and \ meta.has_key("content"): keywords = [k.strip() for k in meta["content"].split(",")] else: keywords = [] return keywords
Returns the meta keywords in the page.
def update_throttle_scope(self): self.scope = SERVICE_USER_SCOPE self.rate = self.get_rate() self.num_requests, self.duration = self.parse_rate(self.rate)
Update throttle scope so that service user throttle rates are applied.
def predict_proba(self, X): check_is_fitted(self, ['inverse_influence_matrix']) X = check_array(X) return self.__find_leverages(X, self.inverse_influence_matrix)
Predict the distances for X to center of the training set. Parameters ---------- X : array-like or sparse matrix, shape (n_samples, n_features) The input samples. Internally, it will be converted to ``dtype=np.float32`` and if a sparse matrix is provided to a...
def getch(): try: termios.tcsetattr(_fd, termios.TCSANOW, _new_settings) ch = sys.stdin.read(1) finally: termios.tcsetattr(_fd, termios.TCSADRAIN, _old_settings) return ch
get character. waiting for key
def _in_git_repo(): ret = temple.utils.shell('git rev-parse', stderr=subprocess.DEVNULL, check=False) return ret.returncode == 0
Returns True if inside a git repo, False otherwise
def smooth_fwhm(self, fwhm): if fwhm != self._smooth_fwhm: self._is_data_smooth = False self._smooth_fwhm = fwhm
Set a smoothing Gaussian kernel given its FWHM in mm.
def record_delete_subfield(rec, tag, subfield_code, ind1=' ', ind2=' '): ind1, ind2 = _wash_indicators(ind1, ind2) for field in rec.get(tag, []): if field[1] == ind1 and field[2] == ind2: field[0][:] = [subfield for subfield in field[0] if subfield_code != subfield...
Delete all subfields with subfield_code in the record.
def sendCommands(comPort, commands): mutex.acquire() try: try: port = serial.Serial(port=comPort) header = '11010101 10101010' footer = '10101101' for command in _translateCommands(commands): _sendBinaryData(port, header + command + footer)...
Send X10 commands using the FireCracker on comPort comPort should be the name of a serial port on the host platform. On Windows, for example, 'com1'. commands should be a string consisting of X10 commands separated by commas. For example. 'A1 On, A Dim, A Dim, A Dim, A Lamps Off'. The letter is a ...
def save_traceback(err): dirname = safe_path(os.path.expanduser( os.path.join('~', '.{0}'.format(__script__)) )) if not os.path.isdir(dirname): os.mkdir(dirname) filename = os.path.join(dirname, '{0}.log'.format(__script__)) with open(filename, 'a+') as handler: traceback.pri...
Save error traceback to bootstrapper log file. :param err: Catched exception.
def configure_modrpaf(self): r = self.local_renderer if r.env.modrpaf_enabled: self.install_packages() self.enable_mod('rpaf') else: if self.last_manifest.modrpaf_enabled: self.disable_mod('mod_rpaf')
Installs the mod-rpaf Apache module. https://github.com/gnif/mod_rpaf
def _plugin_endpoint_rename(fn_name, instance): if instance and instance.namespaced: fn_name = "r_{0}_{1}".format(instance.name, fn_name[2:]) return fn_name
Rename endpoint function name to avoid conflict when namespacing is set to true :param fn_name: Name of the route function :param instance: Instance bound to the function :return: Name of the new namespaced function name
def get_directory(self, path_to_directory, timeout=30, backoff=0.4, max_wait=4): response = None started_at = None time_elapsed = 0 i = 0 while time_elapsed < timeout: response = self._get('{0}.zip'.format(path_to_directory)) if response: b...
Gets an artifact directory by its path. See the `Go artifact directory documentation`__ for example responses. .. __: http://api.go.cd/current/#get-artifact-directory .. note:: Getting a directory relies on Go creating a zip file of the directory in question. Because of th...
def update(dst, src): stack = [(dst, src)] def isdict(o): return hasattr(o, 'keys') while stack: current_dst, current_src = stack.pop() for key in current_src: if key not in current_dst: current_dst[key] = current_src[key] else: ...
Recursively update the destination dict-like object with the source dict-like object. Useful for merging options and Bunches together! Based on: http://code.activestate.com/recipes/499335-recursively-update-a-dictionary-without-hitting-py/#c1
def remove(name_or_path): click.echo() try: r = cpenv.resolve(name_or_path) except cpenv.ResolveError as e: click.echo(e) return obj = r.resolved[0] if not isinstance(obj, cpenv.VirtualEnvironment): click.echo('{} is a module. Use `cpenv module remove` instead.') ...
Remove an environment
def retrieve_descriptor(descriptor): the_descriptor = descriptor if the_descriptor is None: the_descriptor = {} if isinstance(the_descriptor, six.string_types): try: if os.path.isfile(the_descriptor): with open(the_descriptor, 'r') as f: the_de...
Retrieve descriptor.
def decode(geohash): lat, lon, lat_err, lon_err = decode_exactly(geohash) lats = "%.*f" % (max(1, int(round(-log10(lat_err)))) - 1, lat) lons = "%.*f" % (max(1, int(round(-log10(lon_err)))) - 1, lon) if '.' in lats: lats = lats.rstrip('0') if '.' in lons: lons = lons.rstrip('0') return lats, lon...
Decode geohash, returning two strings with latitude and longitude containing only relevant digits and with trailing zeroes removed.
def validate(self, strict=True): valid = True try: jsonschema.validate(self.__json__, self.__schema__) except jsonschema.ValidationError as invalid: if strict: raise SchemaError(str(invalid)) else: warnings.warn(str(invalid)) ...
Validate a JObject against its schema Parameters ---------- strict : bool Enforce strict schema validation Returns ------- valid : bool True if the jam validates False if not, and `strict==False` Raises ------ ...
def add_tag(self, tag): self.tags = list(set(self.tags or []) | set([tag]))
Adds a tag to the list of tags and makes sure the result list contains only unique results.
def list_(args): osf = _setup_osf(args) project = osf.project(args.project) for store in project.storages: prefix = store.name for file_ in store.files: path = file_.path if path.startswith('/'): path = path[1:] print(os.path.join(prefix, p...
List all files from all storages for project. If the project is private you need to specify a username.
def filter_config(config, deploy_config): if not os.path.isfile(deploy_config): return DotDict() config_module = get_config_module(deploy_config) return config_module.filter(config)
Return a config subset using the filter defined in the deploy config.
def create_session(self): session = None if self.key_file is not None: credfile = os.path.expandvars(os.path.expanduser(self.key_file)) try: with open(credfile, 'r') as f: creds = json.load(f) except json.JSONDecodeError as e: ...
Create a session. First we look in self.key_file for a path to a json file with the credentials. The key file should have 'AWSAccessKeyId' and 'AWSSecretKey'. Next we look at self.profile for a profile name and try to use the Session call to automatically pick up the keys for the profi...
async def dispatch_downstream(self, message, steam_name): handler = getattr(self, get_handler_name(message), None) if handler: await handler(message, stream_name=steam_name) else: await self.base_send(message)
Handle a downstream message coming from an upstream steam. if there is not handling method set for this method type it will propagate the message further downstream. This is called as part of the co-routine of an upstream steam, not the same loop as used for upstream messages in the de-multipl...
def clicky(parser, token): bits = token.split_contents() if len(bits) > 1: raise TemplateSyntaxError("'%s' takes no arguments" % bits[0]) return ClickyNode()
Clicky tracking template tag. Renders Javascript code to track page visits. You must supply your Clicky Site ID (as a string) in the ``CLICKY_SITE_ID`` setting.
def call_dcm2nii(work_dir, arguments=''): if not op.exists(work_dir): raise IOError('Folder {} not found.'.format(work_dir)) cmd_line = 'dcm2nii {0} "{1}"'.format(arguments, work_dir) log.info(cmd_line) return subprocess.check_call(cmd_line, shell=True)
Converts all DICOM files within `work_dir` into one or more NifTi files by calling dcm2nii on this folder. Parameters ---------- work_dir: str Path to the folder that contain the DICOM files arguments: str String containing all the flag arguments for `dcm2nii` CLI. Returns ...
def to_float(option,value): if type(value) is str: try: value=float(value) except ValueError: pass return (option,value)
Converts string values to floats when appropriate
def print_boggle(board): "Print the board in a 2-d array." n2 = len(board); n = exact_sqrt(n2) for i in range(n2): if i % n == 0 and i > 0: print if board[i] == 'Q': print 'Qu', else: print str(board[i]) + ' ', print
Print the board in a 2-d array.
def backend_version(backend, childprocess=None): if childprocess is None: childprocess = childprocess_default_value() if not childprocess: return _backend_version(backend) else: return run_in_childprocess(_backend_version, None, backend)
Back-end version. :param backend: back-end (examples:scrot, wx,..) :param childprocess: see :py:func:`grab` :return: version as string
def make_present_participles(verbs): res = [] for verb in verbs: parts = verb.split() if parts[0].endswith("e"): parts[0] = parts[0][:-1] + "ing" else: parts[0] = parts[0] + "ing" res.append(" ".join(parts)) return res
Make the list of verbs into present participles E.g.: empower -> empowering drive -> driving
def active_env_module_resolver(resolver, path): from .api import get_active_env env = get_active_env() if not env: raise ResolveError mod = env.get_module(path) if not mod: raise ResolveError return mod
Resolves modules in currently active environment.
def execute_sync(self, message): info("synchronizing message: {message}") with self.world._unlock_temporarily(): message._sync(self.world) self.world._react_to_sync_response(message) for actor in self.actors: actor._react_to_sync_response(message)
Respond when the server indicates that the client is out of sync. The server can request a sync when this client sends a message that fails the check() on the server. If the reason for the failure isn't very serious, then the server can decide to send it as usual in the interest of ...
def _concat_queries(queries, operators='__and__'): if not queries: raise ValueError('Expected some `queries`, got {}.'.format(queries)) if len(queries) == 1: return queries[0] if isinstance(operators, str): operators = [operators] * (len(queries) - 1) if len(queries) - 1 != len(o...
Create a tinyDB Query object that is the concatenation of each query in `queries`. The concatenation operator is taken from `operators`. Parameters ---------- queries: list of tinydb.Query The list of tinydb.Query to be joined. operators: str or list of str List of binary operators...
def file_supported(cls, filename): if not isinstance(filename, str): return False (_, ext) = os.path.splitext(filename) if ext not in cls.extensions: return False else: return True
Returns a boolean indicating whether the filename has an appropriate extension for this class.
def invoke_hook_spout_fail(self, message_id, fail_latency_ns): if len(self.task_hooks) > 0: spout_fail_info = SpoutFailInfo(message_id=message_id, spout_task_id=self.get_task_id(), fail_latency_ms=fail_latency_ns * system_constants.NS...
invoke task hooks for every time spout fails a tuple :type message_id: str :param message_id: message id to which a failed tuple was anchored :type fail_latency_ns: float :param fail_latency_ns: fail latency in nano seconds
def write_extracted_licenses(lics, out): write_value('LicenseID', lics.identifier, out) if lics.full_name is not None: write_value('LicenseName', lics.full_name, out) if lics.comment is not None: write_text_value('LicenseComment', lics.comment, out) for xref in sorted(lics.cross_ref): ...
Write extracted licenses fields to out.
def run_splitted_processing(max_simultaneous_processes, process_name, filenames): pids = [] while len(filenames) > 0: while len(filenames) > 0 and len(pids) < max_simultaneous_processes: filename = filenames.pop() pids.append(service_start(service=proc...
Run processes which push the routing dump of the RIPE in a redis database. The dump has been splitted in multiple files and each process run on one of this files.
def bdp(tickers, flds, **kwargs): logger = logs.get_logger(bdp, level=kwargs.pop('log', logs.LOG_LEVEL)) con, _ = create_connection() ovrds = assist.proc_ovrds(**kwargs) logger.info( f'loading reference data from Bloomberg:\n' f'{assist.info_qry(tickers=tickers, flds=flds)}' ) da...
Bloomberg reference data Args: tickers: tickers flds: fields to query **kwargs: bbg overrides Returns: pd.DataFrame Examples: >>> bdp('IQ US Equity', 'Crncy', raw=True) ticker field value 0 IQ US Equity Crncy USD >>> bdp('IQ US...
def next_item(self): queue = self.queue try: item = queue.get(block=True, timeout=5) return item except Exception: return None
Get a single item from the queue.
def _run_gevent(app, config, mode): import gevent import gevent.monkey gevent.monkey.patch_all() from gevent.pywsgi import WSGIServer server_args = { "bind_addr": (config["host"], config["port"]), "wsgi_app": app, "keyfile": None, "certfile": None, } protocol ...
Run WsgiDAV using gevent if gevent is installed. See https://github.com/gevent/gevent/blob/master/src/gevent/pywsgi.py#L1356 https://github.com/gevent/gevent/blob/master/src/gevent/server.py#L38 for more options
def on_exit_stage(self): self.forum.on_finish_game() for actor in self.actors: actor.on_finish_game() with self.world._unlock_temporarily(): self.world.on_finish_game()
Give the actors, the world, and the messaging system a chance to react to the end of the game.
def spin_up_instance(self, command, job_name): command = Template(template_string).substitute(jobname=job_name, user_script=command, linger=str(self.linger).lower(), ...
Start an instance in the VPC in the first available subnet. N instances will be started if nodes_per_block > 1. Not supported. We only do 1 node per block. Parameters ---------- command : str Command string to execute on the node. job_name : str ...
def sort_by_modified(files_or_folders: list) -> list: return sorted(files_or_folders, key=os.path.getmtime, reverse=True)
Sort files or folders by modified time Args: files_or_folders: list of files or folders Returns: list
def contains_ignoring_case(self, *items): if len(items) == 0: raise ValueError('one or more args must be given') if isinstance(self.val, str_types): if len(items) == 1: if not isinstance(items[0], str_types): raise TypeError('given arg must be ...
Asserts that val is string and contains the given item or items.
def expose_ancestors_or_children(self, member, collection, lang=None): x = { "id": member.id, "label": str(member.get_label(lang)), "model": str(member.model), "type": str(member.type), "size": member.size, "semantic": self.semantic(member,...
Build an ancestor or descendant dict view based on selected information :param member: Current Member to build for :param collection: Collection from which we retrieved it :param lang: Language to express data in :return:
def get_mongoadmins(self): apps = [] for app_name in settings.INSTALLED_APPS: mongoadmin = "{0}.mongoadmin".format(app_name) try: module = import_module(mongoadmin) except ImportError as e: if str(e).startswith("No module named"): ...
Returns a list of all mongoadmin implementations for the site
def move_next_to(self, body_a, body_b, offset_a, offset_b): ba = self.get_body(body_a) bb = self.get_body(body_b) if ba is None: return bb.relative_offset_to_world(offset_b) if bb is None: return ba.relative_offset_to_world(offset_a) anchor = ba.relative_o...
Move one body to be near another one. After moving, the location described by ``offset_a`` on ``body_a`` will be coincident with the location described by ``offset_b`` on ``body_b``. Parameters ---------- body_a : str or :class:`Body` The body to use as a reference ...
def get_exception_from_status_and_error_codes(status_code, error_code, value): if status_code == requests.codes.bad_request: exception = BadRequest(value) elif status_code == requests.codes.unauthorized: exception = Unauthorized(value) elif status_code == requests.codes.forbidden: ex...
Return an exception given status and error codes. :param status_code: HTTP status code. :type status_code: None | int :param error_code: Midas Server error code. :type error_code: None | int :param value: Message to display. :type value: string :returns: Exception. :rtype : pydas.except...
def add_example(self, example): "Add an example to the list of examples, checking it first." self.check_example(example) self.examples.append(example)
Add an example to the list of examples, checking it first.
def removeAllRecords(self): for field in self.fields: field.encodings, field.values=[], [] field.numRecords, field.numEncodings= (0, 0)
Deletes all the values in the dataset
async def get_size(media): if hasattr(media, 'seek'): await execute(media.seek(0, os.SEEK_END)) size = await execute(media.tell()) await execute(media.seek(0)) elif hasattr(media, 'headers'): size = int(media.headers['Content-Length']) elif isinstance(media, bytes): s...
Get the size of a file Parameters ---------- media : file object The file object of the media Returns ------- int The size of the file
def set_color_list(self, color_list, offset=0): if not len(color_list): return color_list = make.colors(color_list) size = len(self._colors) - offset if len(color_list) > size: color_list = color_list[:size] self._colors[offset:offset + len(color_list)] = ...
Set the internal colors starting at an optional offset. If `color_list` is a list or other 1-dimensional array, it is reshaped into an N x 3 list. If `color_list` too long it is truncated; if it is too short then only the initial colors are set.
def find(self, y): node = self.root while True: edge = self._edgeLabel(node, node.parent) if edge.startswith(y): return node.idx i = 0 while(i < len(edge) and edge[i] == y[0]): y = y[1:] i += 1 if...
Returns starting position of the substring y in the string used for building the Suffix tree. :param y: String :return: Index of the starting position of string y in the string used for building the Suffix tree -1 if y is not a substring.
def add_item(self,jid,node=None,name=None,action=None): return DiscoItem(self,jid,node,name,action)
Add a new item to the `DiscoItems` object. :Parameters: - `jid`: item JID. - `node`: item node name. - `name`: item name. - `action`: action for a "disco push". :Types: - `jid`: `pyxmpp.JID` - `node`: `unicode` - `name`...