code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def detectTierTablet(self): return self.detectIpad() \ or self.detectAndroidTablet() \ or self.detectBlackBerryTablet() \ or self.detectFirefoxOSTablet() \ or self.detectUbuntuTablet() \ or self.detectWebOSTablet()
Return detection of any device in the Tablet Tier The quick way to detect for a tier of devices. This method detects for the new generation of HTML 5 capable, larger screen tablets. Includes iPad, Android (e.g., Xoom), BB Playbook, WebOS, etc.
def __get_line_profile_data(self): if self.line_profiler is None: return {} return self.line_profiler.file_dict[self.pyfile.path][0].line_dict
Method to procure line profiles. @return: Line profiles if the file has been profiles else empty dictionary.
def _check_graph(self, graph): if graph.num_vertices != self.size: raise TypeError("The number of vertices in the graph does not " "match the length of the atomic numbers array.") if (self.numbers != graph.numbers).any(): raise TypeError("The atomic numbers in the...
the atomic numbers must match
def right(self): return self.source.directory[self.right_sibling_id] \ if self.right_sibling_id != NOSTREAM else None
Entry is right sibling of current directory entry
def mkdir(self, path): if not os.path.exists(path): os.makedirs(path)
create a directory if it does not exist.
def read_config(self, correlation_id, parameters): value = self._read_object(correlation_id, parameters) return ConfigParams.from_value(value)
Reads configuration and parameterize it with given values. :param correlation_id: (optional) transaction id to trace execution through call chain. :param parameters: values to parameters the configuration or null to skip parameterization. :return: ConfigParams configuration.
def tile_bbox(self, tile_indices): (z, x, y) = tile_indices topleft = (x * self.tilesize, (y + 1) * self.tilesize) bottomright = ((x + 1) * self.tilesize, y * self.tilesize) nw = self.unproject_pixels(topleft, z) se = self.unproject_pixels(bottomright, z) return nw + se
Returns the WGS84 bbox of the specified tile
def spacing(self): libfn = utils.get_lib_fn('getSpacing%s'%self._libsuffix) return libfn(self.pointer)
Get image spacing Returns ------- tuple
def upload_package(context): if not context.dry_run and build_distributions(context): upload_args = 'twine upload ' upload_args += ' '.join(Path('dist').files()) if context.pypi: upload_args += ' -r %s' % context.pypi upload_result = shell.dry_run(upload_args, context.dry...
Uploads your project packages to pypi with twine.
def printdir(self): print "%-46s %19s %12s" % ("File Name", "Modified ", "Size") for zinfo in self.filelist: date = "%d-%02d-%02d %02d:%02d:%02d" % zinfo.date_time[:6] print "%-46s %s %12d" % (zinfo.filename, date, zinfo.file_size)
Print a table of contents for the zip file.
def _update_record(self, record_id, name, address, ttl): data = json.dumps({'record': {'name': name, 'content': address, 'ttl': ttl}}) headers = {'Content-Type': 'application/json'} request = self._session.put(self._base...
Updates an existing record.
def record(self, partition, num_bytes, num_records): self.unrecorded_partitions.remove(partition) self.total_bytes += num_bytes self.total_records += num_records if not self.unrecorded_partitions: self.sensors.bytes_fetched.record(self.total_bytes) self.sensors.re...
After each partition is parsed, we update the current metric totals with the total bytes and number of records parsed. After all partitions have reported, we write the metric.
async def on_raw_317(self, message): target, nickname, idle_time = message.params[:3] info = { 'idle': int(idle_time), } if nickname in self._pending['whois']: self._whois_info[nickname].update(info)
WHOIS idle time.
def groups(self, query=None, exclude=None, maxResults=9999): params = {} groups = [] if query is not None: params['query'] = query if exclude is not None: params['exclude'] = exclude if maxResults is not None: params['maxResults'] = maxResults ...
Return a list of groups matching the specified criteria. :param query: filter groups by name with this string :type query: Optional[str] :param exclude: filter out groups by name with this string :type exclude: Optional[Any] :param maxResults: maximum results to return. (Default...
def create_todo_item(self, list_id, content, party_id=None, notify=False): path = '/todos/create_item/%u' % list_id req = ET.Element('request') ET.SubElement(req, 'content').text = str(content) if party_id is not None: ET.SubElement(req, 'responsible-party').text = str(party_...
This call lets you add an item to an existing list. The item is added to the bottom of the list. If a person is responsible for the item, give their id as the party_id value. If a company is responsible, prefix their company id with a 'c' and use that as the party_id value. If the item h...
def _list_keys(self): req = self.request(self.uri + '/keys') keys = req.get().json() if keys: self._keys = {} for key in keys: self._keys[key['id']] = Key(key, self) else: self._keys = {}
Retrieves a list of all added Keys and populates the self._keys dict with Key instances :returns: A list of Keys instances
def _fetch_all_as_dict(self, cursor): desc = cursor.description return [ dict(zip([col[0] for col in desc], row)) for row in cursor.fetchall() ]
Iterates over the result set and converts each row to a dictionary :return: A list of dictionaries where each row is a dictionary :rtype: list of dict
def combine(self, expert_out, multiply_by_gates=True): stitched = common_layers.convert_gradient_to_tensor( tf.concat(expert_out, 0)) if multiply_by_gates: stitched *= tf.expand_dims(self._nonzero_gates, 1) combined = tf.unsorted_segment_sum(stitched, self._batch_index, ...
Sum together the expert output, weighted by the gates. The slice corresponding to a particular batch element `b` is computed as the sum over all experts `i` of the expert output, weighted by the corresponding gate values. If `multiply_by_gates` is set to False, the gate values are ignored. Args: ...
def annotate_intervals(target_file, data): out_file = "%s-gcannotated.tsv" % utils.splitext_plus(target_file)[0] if not utils.file_uptodate(out_file, target_file): with file_transaction(data, out_file) as tx_out_file: params = ["-T", "AnnotateIntervals", "-R", dd.get_ref_file(data), ...
Provide GC annotated intervals for error correction during panels and denoising. TODO: include mappability and segmentation duplication inputs
def make_request_validator(request): verb = request.values.get('verb', '', type=str) resumption_token = request.values.get('resumptionToken', None) schema = Verbs if resumption_token is None else ResumptionVerbs return getattr(schema, verb, OAISchema)(partial=False)
Validate arguments in incomming request.
def _radial_distance(shape): if len(shape) != 2: raise ValueError('shape must have only 2 elements') position = (np.asarray(shape) - 1) / 2. x = np.arange(shape[1]) - position[1] y = np.arange(shape[0]) - position[0] xx, yy = np.meshgrid(x, y) return np.sqrt(xx**2 + yy**2)
Return an array where each value is the Euclidean distance from the array center. Parameters ---------- shape : tuple of int The size of the output array along each axis. Returns ------- result : `~numpy.ndarray` An array containing the Euclidian radial distances from the ...
async def fetch(self, limit: int = None) -> Sequence[StorageRecord]: LOGGER.debug('StorageRecordSearch.fetch >>> limit: %s', limit) if not self.opened: LOGGER.debug('StorageRecordSearch.fetch <!< Storage record search is closed') raise BadSearch('Storage record search is closed')...
Fetch next batch of search results. Raise BadSearch if search is closed, WalletState if wallet is closed. :param limit: maximum number of records to return (default value Wallet.DEFAULT_CHUNK) :return: next batch of records found
def _check_metrics(cls, schema, metrics): for name, value in metrics.items(): metric = schema.get(name) if not metric: message = "Unexpected metric '{}' returned".format(name) raise Exception(message) cls._check_metric(schema, metric, name, val...
Ensure that returned metrics are properly exposed
def purge_queues(self, queues): for name, vhost in queues: vhost = quote(vhost, '') name = quote(name, '') path = Client.urls['purge_queue'] % (vhost, name) self._call(path, 'DELETE') return True
Purge all messages from one or more queues. :param list queues: A list of ('qname', 'vhost') tuples. :returns: True on success
def hash_data(data, hashlen=None, alphabet=None): r if alphabet is None: alphabet = ALPHABET_27 if hashlen is None: hashlen = HASH_LEN2 if isinstance(data, stringlike) and len(data) == 0: text = (alphabet[0] * hashlen) else: hasher = hashlib.sha512() _update_h...
r""" Get a unique hash depending on the state of the data. Args: data (object): any sort of loosely organized data hashlen (None): (default = None) alphabet (None): (default = None) Returns: str: text - hash string CommandLine: python -m utool.util_hash hash_d...
async def user_info(self, params=None, **kwargs): params = params or {} params[ 'fields'] = 'id,email,first_name,last_name,name,link,locale,' \ 'gender,location' return await super(FacebookClient, self).user_info(params=params, **kwargs)
Facebook required fields-param.
def load_sources(self, sources): self.clear() for s in sources: if isinstance(s, dict): s = Model.create_from_dict(s) self.load_source(s, build_index=False) self._build_src_index()
Delete all sources in the ROI and load the input source list.
def delete_nic(self, instance_id, port_id): self.client.servers.interface_detach(instance_id, port_id) return True
Delete a Network Interface Controller
def issuperset(self, items): return all(_compat.map(self._seen.__contains__, items))
Return whether this collection contains all items. >>> Unique(['spam', 'eggs']).issuperset(['spam', 'spam', 'spam']) True
def where_before_entry(query, ref): return orm.select( e for e in query if e.local_date < ref.local_date or (e.local_date == ref.local_date and e.id < ref.id) )
Generate a where clause for prior entries ref -- The entry of reference
def Describe(self): result = ["\nUsername: %s" % self.urn.Basename()] labels = [l.name for l in self.GetLabels()] result.append("Labels: %s" % ",".join(labels)) if self.Get(self.Schema.PASSWORD) is None: result.append("Password: not set") else: result.append("Password: set") return "...
Return a description of this user.
def _uninstall_signal_handlers(self): signal.signal(signal.SIGINT, signal.SIG_DFL) signal.signal(signal.SIGTERM, signal.SIG_DFL)
Restores default signal handlers.
def _insert_row(self, i, index): if i == len(self._index): self._add_row(index) else: self._index.insert(i, index) for c in range(len(self._columns)): self._data[c].insert(i, None)
Insert a new row in the DataFrame. :param i: index location to insert :param index: index value to insert into the index list :return: nothing
def _parse_subnet(self, subnet_dict): if not subnet_dict: return alloc_pool = subnet_dict.get('allocation_pools') cidr = subnet_dict.get('cidr') subnet = cidr.split('/')[0] start = alloc_pool[0].get('start') end = alloc_pool[0].get('end') gateway = sub...
Return the subnet, start, end, gateway of a subnet.
def relation_factory(relation_name): role, interface = hookenv.relation_to_role_and_interface(relation_name) if not (role and interface): hookenv.log('Unable to determine role and interface for relation ' '{}'.format(relation_name), hookenv.ERROR) return None return _find...
Get the RelationFactory for the given relation name. Looks for a RelationFactory in the first file matching: ``$CHARM_DIR/hooks/relations/{interface}/{provides,requires,peer}.py``
def mapValues(self, f): map_values_fn = lambda kv: (kv[0], f(kv[1])) return self.map(map_values_fn, preservesPartitioning=True)
Pass each value in the key-value pair RDD through a map function without changing the keys; this also retains the original RDD's partitioning. >>> x = sc.parallelize([("a", ["apple", "banana", "lemon"]), ("b", ["grapes"])]) >>> def f(x): return len(x) >>> x.mapValues(f).collect(...
def get_linked_metadata(obj, name=None, context=None, site=None, language=None): Metadata = _get_metadata_model(name) InstanceMetadata = Metadata._meta.get_model('modelinstance') ModelMetadata = Metadata._meta.get_model('model') content_type = ContentType.objects.get_for_model(obj) instances = [] ...
Gets metadata linked from the given object.
def FilterFnTable(fn_table, symbol): new_table = list() for entry in fn_table: if entry[0] != symbol: new_table.append(entry) return new_table
Remove a specific symbol from a fn_table.
def map_legend_attributes(self): LOGGER.debug('InaSAFE Map getMapLegendAttributes called') legend_attribute_list = [ 'legend_notes', 'legend_units', 'legend_title'] legend_attribute_dict = {} for legend_attribute in legend_attribute_list: t...
Get the map legend attribute from the layer keywords if possible. :returns: None on error, otherwise the attributes (notes and units). :rtype: None, str
def write_to_path(self, path=None): if path is None: path = self.path f = GitFile(path, 'wb') try: self.write_to_file(f) finally: f.close()
Write configuration to a file on disk.
def get_languages(self): languages = ['python'] all_options = CONF.options(self.CONF_SECTION) for option in all_options: if option in [l.lower() for l in LSP_LANGUAGES]: languages.append(option) return languages
Get the list of languages we need to start servers and create clients for.
def make_error_redirect(self, authorization_error=None): if not self.redirect_uri: return HttpResponseRedirect(self.missing_redirect_uri) authorization_error = (authorization_error or AccessDenied('user denied the request')) response_params = get_error_details(authorization_...
Return a Django ``HttpResponseRedirect`` describing the request failure. If the :py:meth:`validate` method raises an error, the authorization endpoint should return the result of calling this method like so: >>> auth_code_generator = ( >>> AuthorizationCodeGenerator('/oauth2/missing_redirect_u...
def get_image_uri(region_name, repo_name, repo_version=1): repo = '{}:{}'.format(repo_name, repo_version) return '{}/{}'.format(registry(region_name, repo_name), repo)
Return algorithm image URI for the given AWS region, repository name, and repository version
def _pick_lead_item(items): paired = vcfutils.get_paired(items) if paired: return paired.tumor_data else: return list(items)[0]
Choose lead item for a set of samples. Picks tumors for tumor/normal pairs and first sample for batch groups.
def get_hierarchy_form(self, *args, **kwargs): if isinstance(args[-1], list) or 'hierarchy_record_types' in kwargs: return self.get_hierarchy_form_for_create(*args, **kwargs) else: return self.get_hierarchy_form_for_update(*args, **kwargs)
Pass through to provider HierarchyAdminSession.get_hierarchy_form_for_update
def cache_set(key, value, timeout=None, refreshed=False): if timeout is None: timeout = settings.CACHE_MIDDLEWARE_SECONDS refresh_time = timeout + time() real_timeout = timeout + settings.CACHE_SET_DELAY_SECONDS packed = (value, refresh_time, refreshed) return cache.set(_hashed_key(key), pac...
Wrapper for ``cache.set``. Stores the cache entry packed with the desired cache expiry time. When the entry is retrieved from cache, the packed expiry time is also checked, and if past, the stale cache entry is stored again with an expiry that has ``CACHE_SET_DELAY_SECONDS`` added to it. In this case th...
def columns_formatter(cls, colname): def wrapper(func): cls.columns_formatters[colname] = func return func return wrapper
Decorator to mark a function as columns formatter.
def vector(self) -> typing.Tuple[typing.Tuple[float, float], typing.Tuple[float, float]]: ...
Return the vector property in relative coordinates. Vector will be a tuple of tuples ((y_start, x_start), (y_end, x_end)).
def inject_settings(mixed: Union[str, Settings], context: MutableMapping[str, Any], fail_silently: bool = False) -> None: if isinstance(mixed, str): try: mixed = import_module(mixed) except Exception: if fail_silently: r...
Inject settings values to given context. :param mixed: Settings can be a string (that it will be read from Python path), Python module or dict-like instance. :param context: Context to assign settings key values. It should support dict-like item assingment. :param fail_silen...
def validate_signed_elements(self, signed_elements): if len(signed_elements) > 2: return False response_tag = '{%s}Response' % OneLogin_Saml2_Constants.NS_SAMLP assertion_tag = '{%s}Assertion' % OneLogin_Saml2_Constants.NS_SAML if (response_tag in signed_elements and signed_e...
Verifies that the document has the expected signed nodes. :param signed_elements: The signed elements to be checked :type signed_elements: list :param raise_exceptions: Whether to return false on failure or raise an exception :type raise_exceptions: Boolean
def print_object_attributes( thing, heading=None, file=None ): if heading : print( '==', heading, '==', file=file ) print( '\n'.join( object_attributes( thing ) ), file=file )
Print the attribute names in thing vertically
def execute(self, limit='default', params=None, **kwargs): from ibis.client import execute return execute(self, limit=limit, params=params, **kwargs)
If this expression is based on physical tables in a database backend, execute it against that backend. Parameters ---------- limit : integer or None, default 'default' Pass an integer to effect a specific row limit. limit=None means "no limit". The default is whateve...
def call(args, stdout=PIPE, stderr=PIPE): p = Popen(args, stdout=stdout, stderr=stderr) out, err = p.communicate() try: return out.decode(sys.stdout.encoding), err.decode(sys.stdout.encoding) except Exception: return out, err
Calls the given arguments in a seperate process and returns the contents of standard out.
def computePCs(plink_path,k,bfile,ffile): try: output = subprocess.check_output('%s --version --noweb'%plink_path,shell=True) use_plink = float(output.split(' ')[1][1:-3])>=1.9 except: use_plink = False assert bfile!=None, 'Path to bed-file is missing.' assert os.path.exists(bfil...
compute the first k principal components Input: k : number of principal components plink_path : plink path bfile : binary bed file (bfile.bed, bfile.bim and bfile.fam are required) ffile : name of output file
def rescan_file(self, filename, sha256hash, apikey): url = self.base_url + "file/rescan" params = { 'apikey': apikey, 'resource': sha256hash } rate_limit_clear = self.rate_limit() if rate_limit_clear: response = requests.post(url, params=params...
just send the hash, check the date
def create_logger(log_file, name='logger', cmd=True): import logging logger = logging.getLogger(name) logger.setLevel(logging.DEBUG) formatter = logging.Formatter('%(asctime)s | %(name)s | %(levelname)s | %(message)s', datefmt='%Y-%m-%d %H:%M:%S') fh = logging.FileH...
define a logger for your program parameters ------------ log_file file name of log name name of logger example ------------ logger = create_logger('example.log',name='logger',) logger.info('This is an example!') logger.warning('This is a warn!')
def greedy_max_inden_setcover(candidate_sets_dict, items, max_covers=None): uncovered_set = set(items) rejected_keys = set() accepted_keys = set() covered_items_list = [] while True: if max_covers is not None and len(covered_items_list) >= max_covers: break maxkey = None ...
greedy algorithm for maximum independent set cover Covers items with sets from candidate sets. Could be made faster. CommandLine: python -m utool.util_alg --test-greedy_max_inden_setcover Example0: >>> # ENABLE_DOCTEST >>> from utool.util_alg import * # NOQA >>> import ut...
def update_state_machine_tab_label(self, state_machine_m): sm_id = state_machine_m.state_machine.state_machine_id if sm_id in self.tabs: sm = state_machine_m.state_machine if not self.tabs[sm_id]['marked_dirty'] == sm.marked_dirty or \ not self.tabs[sm_id]['fi...
Updates tab label if needed because system path, root state name or marked_dirty flag changed :param StateMachineModel state_machine_m: State machine model that has changed :return:
def focus_down(pymux): " Move focus down. " _move_focus(pymux, lambda wp: wp.xpos, lambda wp: wp.ypos + wp.height + 2)
Move focus down.
def widget_from_single_value(o): if isinstance(o, string_types): return Text(value=unicode_type(o)) elif isinstance(o, bool): return Checkbox(value=o) elif isinstance(o, Integral): min, max, value = _get_min_max_value(None, None, o) return IntSlide...
Make widgets from single values, which can be used as parameter defaults.
def churn(self): canceled = self.canceled().count() active = self.active().count() return decimal.Decimal(str(canceled)) / decimal.Decimal(str(active))
Return number of canceled Subscriptions divided by active Subscriptions.
def get_group_for_col(self, table_name, col_name): df = self.dm[table_name] try: group_name = df.loc[col_name, 'group'] except KeyError: return '' return group_name
Check data model to find group name for a given column header Parameters ---------- table_name: str col_name: str Returns --------- group_name: str
def _set_hyperparameters(self, parameters): for name, value in parameters.iteritems(): try: getattr(self, name) except AttributeError: raise ValueError( 'Each parameter in parameters must be an attribute. ' '{} is no...
Set internal optimization parameters.
def create_backend(self, service_id, version_number, name, address, use_ssl=False, port=80, connect_timeout=1000, first_byte_timeout=15000, between_bytes_timeout=10000, error_threshold=0, max_conn=20, weight=100, auto_loadbalance=False, shield=None, request_condition=None, healthcheck...
Create a backend for a particular service and version.
def prox_soft_plus(X, step, thresh=0): return prox_plus(prox_soft(X, step, thresh=thresh), step)
Soft thresholding with projection onto non-negative numbers
def exclude_chars(text, exclusion=None): exclusion = [] if exclusion is None else exclusion regexp = r"|".join([select_regexp_char(x) for x in exclusion]) or r'' return re.sub(regexp, '', text)
Clean text string of simbols in exclusion list.
def handle_unset_command(self, line: str, position: int, tokens: ParseResults) -> ParseResults: key = tokens['key'] self.validate_unset_command(line, position, key) del self.annotations[key] return tokens
Handle an ``UNSET X`` statement or raises an exception if it is not already set. :raises: MissingAnnotationKeyWarning
def _fetch(self): if 'uri' in self._meta_data: error = "There was an attempt to assign a new uri to this "\ "resource, the _meta_data['uri'] is %s and it should"\ " not be changed." % (self._meta_data['uri']) raise URICreationCollision(error) ...
wrapped by `fetch` override that in subclasses to customize
def paint_agent_trail(self, y, x, val): for j in range(1,self.cell_height-1): for i in range(1,self.cell_width-1): self.img.put(self.agent_color(val), (x*self.cell_width+i, y*self.cell_height+j))
paint an agent trail as ONE pixel to allow for multiple agent trails to be seen in the same cell
def standardize_input_data(data): if type(data) == bytes: data = data.decode('utf-8') if type(data) == list: data = [ el.decode('utf-8') if type(data) == bytes else el for el in data ] return data
Ensure utf-8 encoded strings are passed to the indico API
def is_successful(self, retry=False): if not self.is_terminated(retry=retry): return False retry_num = options.retry_times while retry_num > 0: try: statuses = self.get_task_statuses() return all(task.status == Instance.Task.TaskStatus.SUCC...
If the instance runs successfully. :return: True if successful else False :rtype: bool
def load(fp, class_=None, **kwargs): return loado(json.load(fp, **kwargs), class_=class_)
Convert content in a JSON-encoded text file to a Physical Information Object or a list of such objects. :param fp: File-like object supporting .read() method to deserialize from. :param class_: Subclass of :class:`.Pio` to produce, if not unambiguous :param kwargs: Any options available to json.load(). ...
def _related_field_data(field, obj): data = _basic_field_data(field, obj) relation_info = { Field.REL_DB_TABLE: field.rel.to._meta.db_table, Field.REL_APP: field.rel.to._meta.app_label, Field.REL_MODEL: field.rel.to.__name__ } data[Field.TYPE] = FieldType.REL data[Field.REL] ...
Returns relation ``field`` as a dict. Dict contains related pk info and some meta information for reconstructing objects.
def make_ioc(name=None, description='Automatically generated IOC', author='IOC_api', links=None, keywords=None, iocid=None): root = ioc_et.make_ioc_root(iocid) root.append(ioc_et.make_metadata_node(name, description, au...
This generates all parts of an IOC, but without any definition. This is a helper function used by __init__. :param name: string, Name of the ioc :param description: string, description of the ioc :param author: string, author name/email address :param links: ist of tuples. Eac...
def escape_unicode_string(u): def replacer(matchobj): if ord(matchobj.group(1)) == 127: return "\\x7f" if ord(matchobj.group(1)) == 92: return "\\\\" return REPLACEMENT_TABLE[ord(matchobj.group(1))] return re.sub("([\\000-\\037\\134\\177])", replacer, u)
Escapes the nonprintable chars 0-31 and 127, and backslash; preferably with a friendly equivalent such as '\n' if available, but otherwise with a Python-style backslashed hex escape.
def escape_shell_arg(shell_arg): if isinstance(shell_arg, six.text_type): msg = "ERROR: escape_shell_arg() expected string argument but " \ "got '%s' of type '%s'." % (repr(shell_arg), type(shell_arg)) raise TypeError(msg) return "'%s'" % shell_arg.replace("'", r"'\''")
Escape shell argument shell_arg by placing it within single-quotes. Any single quotes found within the shell argument string will be escaped. @param shell_arg: The shell argument to be escaped. @type shell_arg: string @return: The single-quote-escaped value of the shell argument. @rtype: strin...
def get_customjs(self, references, plot_id=None): if plot_id is None: plot_id = self.plot.id or 'PLACEHOLDER_PLOT_ID' self_callback = self.js_callback.format(comm_id=self.comm.id, timeout=self.timeout, ...
Creates a CustomJS callback that will send the requested attributes back to python.
def merge_urls_data_to(to, food={}): if not to: to.update(food) for url, data in food.items(): if url not in to: to[url] = data else: to[url] = to[url].merge_with(data)
Merge urls data
def __make_footprint(input, size, footprint): "Creates a standard footprint element ala scipy.ndimage." if footprint is None: if size is None: raise RuntimeError("no footprint or filter size provided") sizes = _ni_support._normalize_sequence(size, input.ndim) footprint = nump...
Creates a standard footprint element ala scipy.ndimage.
def next_img(self, loop=True): channel = self.get_current_channel() if channel is None: self.show_error("Please create a channel.", raisetab=True) return channel.next_image() return True
Go to the next image in the channel.
def _onWhat(self, name, line, pos, absPosition): self.__lastImport.what.append(ImportWhat(name, line, pos, absPosition))
Memorizes an imported item
def make_fasta_url( ensembl_release, species, sequence_type, server=ENSEMBL_FTP_SERVER): ensembl_release, species, reference_name = normalize_release_properties( ensembl_release, species) subdir = _species_subdir( ensembl_release, species=species, ...
Construct URL to FASTA file with cDNA transcript or protein sequences Parameter examples: ensembl_release = 75 species = "Homo_sapiens" sequence_type = "cdna" (other option: "pep")
def delete_thing_shadow(self, **kwargs): r thing_name = self._get_required_parameter('thingName', **kwargs) payload = b'' return self._shadow_op('delete', thing_name, payload)
r""" Deletes the thing shadow for the specified thing. :Keyword Arguments: * *thingName* (``string``) -- [REQUIRED] The name of the thing. :returns: (``dict``) -- The output from the DeleteThingShadow operation * *payload* (``bytes``)...
def national(self): if self._national is None: self._national = NationalList( self._version, account_sid=self._solution['account_sid'], country_code=self._solution['country_code'], ) return self._national
Access the national :returns: twilio.rest.api.v2010.account.available_phone_number.national.NationalList :rtype: twilio.rest.api.v2010.account.available_phone_number.national.NationalList
def client_getname(self, encoding=_NOTSET): return self.execute(b'CLIENT', b'GETNAME', encoding=encoding)
Get the current connection name.
def contributions(self, request, **kwargs): if Contribution not in get_models(): return Response([]) if request.method == "POST": serializer = ContributionSerializer(data=get_request_data(request), many=True) if not serializer.is_valid(): return Respon...
gets or adds contributions :param request: a WSGI request object :param kwargs: keyword arguments (optional) :return: `rest_framework.response.Response`
def update(self): if not self._track_changes: return True data = self.to_api_data(restrict_keys=self._track_changes) response = self.session.patch(self.build_url(''), data=data) if not response: return False data = response.json() for field in self...
Update this range
def _ParseRecordExtraField(self, byte_stream, file_offset): extra_field_map = self._GetDataTypeMap('asl_record_extra_field') try: record_extra_field = self._ReadStructureFromByteStream( byte_stream, file_offset, extra_field_map) except (ValueError, errors.ParseError) as exception: rais...
Parses a record extra field. Args: byte_stream (bytes): byte stream. file_offset (int): offset of the record extra field relative to the start of the file. Returns: asl_record_extra_field: record extra field. Raises: ParseError: if the record extra field cannot be parsed...
def submodules(self): p = lambda o: isinstance(o, Module) and self._docfilter(o) return sorted(filter(p, self.doc.values()))
Returns all documented sub-modules in the module sorted alphabetically as a list of `pydoc.Module`.
def inet_to_str(inet): try: return socket.inet_ntop(socket.AF_INET, inet) except ValueError: return socket.inet_ntop(socket.AF_INET6, inet)
Convert inet object to a string Args: inet (inet struct): inet network address Returns: str: Printable/readable IP address
def install_dependencies(self): if self._skip_virtualenv: LOG.info('Skip Virtualenv set ... nothing to do') return has_reqs = _isfile(self._requirements_file) or self._requirements if self._virtualenv is None and has_reqs: LOG.info('Building new virtualenv and...
Creates a virtualenv and installs requirements
def _master_control_program(self): return mcp.MasterControlProgram(self.config, consumer=self.args.consumer, profile=self.args.profile, quantity=self.args.quantity)
Return an instance of the MasterControlProgram. :rtype: rejected.mcp.MasterControlProgram
def create_switch(type, settings, pin): switch = None if type == "A": group, device = settings.split(",") switch = pi_switch.RCSwitchA(group, device) elif type == "B": addr, channel = settings.split(",") addr = int(addr) channel = int(channel) switch = pi_switch.RCSwitchB(addr, channel) elif type == "C"...
Create a switch. Args: type: (str): type of the switch [A,B,C,D] settings (str): a comma separted list pin (int): wiringPi pin Returns: switch
def on_import1(self, event): pmag_menu_dialogs.MoveFileIntoWD(self.parent, self.parent.WD)
initialize window to import an arbitrary file into the working directory
def init_all_receivers(): receivers = discover() init_receivers = [] for receiver in receivers: init_receiver = DenonAVR(receiver["host"]) init_receivers.append(init_receiver) return init_receivers
Initialize all discovered Denon AVR receivers in LAN zone. Returns a list of created Denon AVR instances. By default SSDP broadcasts are sent up to 3 times with a 2 seconds timeout.
def from_argparse(cls, opts): return cls(opts.ethinca_pn_order, opts.filter_cutoff, opts.ethinca_frequency_step, fLow=None, full_ethinca=opts.calculate_ethinca_metric, time_ethinca=opts.calculate_time_metric_components)
Initialize an instance of the ethincaParameters class from an argparse.OptionParser instance. This assumes that insert_ethinca_metric_options and verify_ethinca_metric_options have already been called before initializing the class.
def _by_columns(self, columns): return columns if self.isstr(columns) else self._backtick_columns(columns)
Allow select.group and select.order accepting string and list
def _operator_norms(L): L_norms = [] for Li in L: if np.isscalar(Li): L_norms.append(float(Li)) elif isinstance(Li, Operator): L_norms.append(Li.norm(estimate=True)) else: raise TypeError('invalid entry {!r} in `L`'.format(Li)) return L_norms
Get operator norms if needed. Parameters ---------- L : sequence of `Operator` or float The operators or the norms of the operators that are used in the `douglas_rachford_pd` method. For `Operator` entries, the norm is computed with ``Operator.norm(estimate=True)``.
def _first_word_not_cmd(self, first_word: str, command: str, args: tuple, kwargs: dict) -> None: if self.service_interface.is_service(first_word): self._logger.debug(' first word is a serv...
check to see if this is an author or service. This method does high level control handling
def run(cmd_str,cwd='.',verbose=False): warnings.warn("run() has moved to pyemu.os_utils",PyemuWarning) pyemu.os_utils.run(cmd_str=cmd_str,cwd=cwd,verbose=verbose)
an OS agnostic function to execute command Parameters ---------- cmd_str : str the str to execute with os.system() cwd : str the directory to execute the command in verbose : bool flag to echo to stdout complete cmd str Note ---- uses platform to detect OS and...