code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def setDriftLength(self, x): if x != self.getDriftLength(): self._setDriftList(x) self.refresh = True
set lengths for drift sections :param x: single double or list :return: None :Example: >>> import beamline >>> chi = beamline.mathutils.Chicane(bend_length=1,bend_field=0.5,drift_length=1,gamma=1000) >>> chi.getMatrix() >>> r56 = chi.getR(5,6) # r56 = -0.432 ...
def bind_port(self, context): port = context.current log_context("bind_port: port", port) for segment in context.segments_to_bind: physnet = segment.get(driver_api.PHYSICAL_NETWORK) segment_type = segment[driver_api.NETWORK_TYPE] if not physnet: ...
Bind port to a network segment. Provisioning request to Arista Hardware to plug a host into appropriate network is done when the port is created this simply tells the ML2 Plugin that we are binding the port
def get_machines_by_groups(self, groups): if not isinstance(groups, list): raise TypeError("groups can only be an instance of type list") for a in groups[:10]: if not isinstance(a, basestring): raise TypeError( "array can only contain objec...
Gets all machine references which are in one of the specified groups. in groups of type str What groups to match. The usual group list rules apply, i.e. passing an empty list will match VMs in the toplevel group, likewise the empty string. return machines of type :c...
def _digitize_lons(lons, lon_bins): if cross_idl(lon_bins[0], lon_bins[-1]): idx = numpy.zeros_like(lons, dtype=numpy.int) for i_lon in range(len(lon_bins) - 1): extents = get_longitudinal_extent(lons, lon_bins[i_lon + 1]) lon_idx = extents > 0 if i_lon != 0: ...
Return indices of the bins to which each value in lons belongs. Takes into account the case in which longitude values cross the international date line. :parameter lons: An instance of `numpy.ndarray`. :parameter lons_bins: An instance of `numpy.ndarray`.
def parse(self, string, evaluate_result=True): m = self._match_re.match(string) if m is None: return None if evaluate_result: return self.evaluate_result(m) else: return Match(self, m)
Match my format to the string exactly. Return a Result or Match instance or None if there's no match.
def validate(self, url): if not url.startswith('http') or not 'github' in url: bot.error('Test of preview must be given a Github repostitory.') return False if not self._validate_preview(url): return False return True
takes in a Github repository for validation of preview and runtime (and possibly tests passing?
def read_message(self): msg = "" num_blank_lines = 0 while True: with self._reader_lock: line = self.input_stream.readline() if line == "end\n": break elif line == "": raise StormWentAwayError() elif ...
The Storm multilang protocol consists of JSON messages followed by a newline and "end\n". All of Storm's messages (for either bolts or spouts) should be of the form:: '<command or task_id form prior emit>\\nend\\n' Command example, an incoming Tuple to a bolt:: ...
def get_grouped_indices(self, voigt=False, **kwargs): if voigt: array = self.voigt else: array = self indices = list(itertools.product(*[range(n) for n in array.shape])) remaining = indices.copy() grouped = [list(zip(*np.where(np.isclose(array, 0, **kwargs...
Gets index sets for equivalent tensor values Args: voigt (bool): whether to get grouped indices of voigt or full notation tensor, defaults to false **kwargs: keyword args for np.isclose. Can take atol and rtol for absolute and relative to...
def _client_connection(self, conn, addr): log.debug('Established connection with %s:%d', addr[0], addr[1]) conn.settimeout(self.socket_timeout) try: while self.__up: msg = conn.recv(self.buffer_size) if not msg: continue ...
Handle the connecition with one client.
def elekta_icon_fbp(ray_transform, padding=False, filter_type='Hann', frequency_scaling=0.6, parker_weighting=True): fbp_op = odl.tomo.fbp_op(ray_transform, padding=padding, filter_type=filter_type, ...
Approximation of the FDK reconstruction used in the Elekta Icon. Parameters ---------- ray_transform : `RayTransform` The ray transform to be used, should have an Elekta Icon geometry. padding : bool, optional Whether the FBP filter should use padding, increases memory use signi...
def calculate_first_digit(number): sum = 0 if len(number) == 9: weights = CPF_WEIGHTS[0] else: weights = CNPJ_WEIGHTS[0] for i in range(len(number)): sum = sum + int(number[i]) * weights[i] rest_division = sum % DIVISOR if rest_division < 2: return '0' return ...
This function calculates the first check digit of a cpf or cnpj. :param number: cpf (length 9) or cnpf (length 12) string to check the first digit. Only numbers. :type number: string :returns: string -- the first digit
def reset(self, base=0, item=0, leng=None, refs=None, both=True, kind=None, type=None): if base < 0: raise ValueError('invalid option: %s=%r' % ('base', base)) else: self.base = base if item < 0: raise ValueError('invalid op...
Reset all specified attributes.
def is_almost_simplicial(G, n): for w in G[n]: if all(u in G[v] for u, v in itertools.combinations(G[n], 2) if u != w and v != w): return True return False
Determines whether a node n in G is almost simplicial. Parameters ---------- G : NetworkX graph The graph on which to check whether node n is almost simplicial. n : node A node in graph G. Returns ------- is_almost_simplicial : bool True if all but one of its neighb...
def get_filter(self): q = self.q().select('name').expand('filter') response = self.session.get(self.build_url(''), params=q.as_params()) if not response: return None data = response.json() return data.get('criteria', None)
Returns the filter applie to this column
def analyse_text(text): if re.match(r'^\s*REBOL\s*\[', text, re.IGNORECASE): return 1.0 elif re.search(r'\s*REBOL\s*[', text, re.IGNORECASE): return 0.5
Check if code contains REBOL header and so it probably not R code
def _parse_flowcontrol_receive(self, config): value = 'off' match = re.search(r'flowcontrol receive (\w+)$', config, re.M) if match: value = match.group(1) return dict(flowcontrol_receive=value)
Scans the config block and returns the flowcontrol receive value Args: config (str): The interface config block to scan Returns: dict: Returns a dict object with the flowcontrol receive value retrieved from the config block. The returned dict object ...
def hist(darray, figsize=None, size=None, aspect=None, ax=None, **kwargs): ax = get_axis(figsize, size, aspect, ax) xincrease = kwargs.pop('xincrease', None) yincrease = kwargs.pop('yincrease', None) xscale = kwargs.pop('xscale', None) yscale = kwargs.pop('yscale', None) xticks = kwargs.pop('xti...
Histogram of DataArray Wraps :func:`matplotlib:matplotlib.pyplot.hist` Plots N dimensional arrays by first flattening the array. Parameters ---------- darray : DataArray Can be any dimension figsize : tuple, optional A tuple (width, height) of the figure in inches. Mut...
def _module_callers(parser, modname, result): if modname in result: return module = parser.get(modname) mresult = {} if module is not None: for xname, xinst in module.executables(): _exec_callers(xinst, mresult) result[modname] = mresult for depkey in module.d...
Adds any calls to executables contained in the specified module.
def function_application(func): if func not in NUMEXPR_MATH_FUNCS: raise ValueError("Unsupported mathematical function '%s'" % func) @with_doc(func) @with_name(func) def mathfunc(self): if isinstance(self, NumericalExpression): return NumExprFactor( "{func}({e...
Factory function for producing function application methods for Factor subclasses.
def store(self, stream, linesep=os.linesep): for k, v in self.items(): write_key_val(stream, k, v, linesep) stream.write(linesep.encode('utf-8'))
Serialize this section and write it to a binary stream
def isAboveUpperDetectionLimit(self): if self.isUpperDetectionLimit(): return True result = self.getResult() if result and str(result).strip().startswith(UDL): return True if api.is_floatable(result): return api.to_float(result) > self.getUpperDetectio...
Returns True if the result is above the Upper Detection Limit or if Upper Detection Limit has been manually set
def _non_blocking_wrapper(self, method, *args, **kwargs): exceptions = [] def task_run(task): try: getattr(task, method)(*args, **kwargs) except Exception as e: exceptions.append(e) threads = [threading.Thread(name=f'task_{method}_{i}', target=task...
Runs given method on every task in the job. Blocks until all tasks finish. Propagates exception from first failed task.
def request_help(self, req, msg): if not msg.arguments: for name, method in sorted(self._request_handlers.items()): doc = method.__doc__ req.inform(name, doc) num_methods = len(self._request_handlers) return req.make_reply("ok", str(num_methods...
Return help on the available requests. Return a description of the available requests using a sequence of #help informs. Parameters ---------- request : str, optional The name of the request to return help for (the default is to return help for all reque...
def _repr_html_(): from bonobo.commands.version import get_versions return ( '<div style="padding: 8px;">' ' <div style="float: left; width: 20px; height: 20px;">{}</div>' ' <pre style="white-space: nowrap; padding-left: 8px">{}</pre>' "</div>" ).format(__logo__, "<br/>".jo...
This allows to easily display a version snippet in Jupyter.
def get_master_status(**connection_args): mod = sys._getframe().f_code.co_name log.debug('%s<--', mod) conn = _connect(**connection_args) if conn is None: return [] rtnv = __do_query_into_hash(conn, "SHOW MASTER STATUS") conn.close() if not rtnv: rtnv.append([]) log.debug...
Retrieves the master status from the minion. Returns:: {'host.domain.com': {'Binlog_Do_DB': '', 'Binlog_Ignore_DB': '', 'File': 'mysql-bin.000021', 'Position': 107}} CLI Example: .. code-block:: bash salt '*' mys...
def _merge_report(self, target, new): time = None if 'ts' in new['parsed']: time = new['parsed']['ts'] if (target.get('lastSeenDate', None) and time and target['lastSeenDate'] < time): target['lastSeenDate'] = time query_millis ...
Merges a new report into the target report
def _handle_github(self): value = click.prompt( _BUG + click.style( '1. Open an issue by typing "open";\n', fg='green', ) + click.style( '2. Print human-readable information by typing ' '"print";\n', fg='yell...
Handle exception and submit it as GitHub issue.
def inertial_advective_wind(u, v, u_geostrophic, v_geostrophic, dx, dy, lats): r f = coriolis_parameter(lats) dugdy, dugdx = gradient(u_geostrophic, deltas=(dy, dx), axes=(-2, -1)) dvgdy, dvgdx = gradient(v_geostrophic, deltas=(dy, dx), axes=(-2, -1)) u_component = -(u * dvgdx + v * dvgdy) / f v...
r"""Calculate the inertial advective wind. .. math:: \frac{\hat k}{f} \times (\vec V \cdot \nabla)\hat V_g .. math:: \frac{\hat k}{f} \times \left[ \left( u \frac{\partial u_g}{\partial x} + v \frac{\partial u_g}{\partial y} \right) \hat i + \left( u \frac{\partial v_g} {\partial x...
def _parse_and_sort_accept_header(accept_header): return sorted([_split_into_mimetype_and_priority(x) for x in accept_header.split(',')], key=lambda x: x[1], reverse=True)
Parse and sort the accept header items. >>> _parse_and_sort_accept_header('application/json;q=0.5, text/*') [('text/*', 1.0), ('application/json', 0.5)]
def visit_setcomp(self, node, parent): newnode = nodes.SetComp(node.lineno, node.col_offset, parent) newnode.postinit( self.visit(node.elt, newnode), [self.visit(child, newnode) for child in node.generators], ) return newnode
visit a SetComp node by returning a fresh instance of it
def MetaGraph(self): if self._meta_graph is None: raise ValueError('There is no metagraph in this EventAccumulator') meta_graph = meta_graph_pb2.MetaGraphDef() meta_graph.ParseFromString(self._meta_graph) return meta_graph
Return the metagraph definition, if there is one. Raises: ValueError: If there is no metagraph for this run. Returns: The `meta_graph_def` proto.
def global_exception_handler(handler): if not hasattr(handler, "__call__"): raise TypeError("exception handlers must be callable") log.info("setting a new global exception handler") state.global_exception_handlers.append(weakref.ref(handler)) return handler
add a callback for when an exception goes uncaught in any greenlet :param handler: the callback function. must be a function taking 3 arguments: - ``klass`` the exception class - ``exc`` the exception instance - ``tb`` the traceback object :type handler: function Note also...
def get_or_none(cls, video_id, language_code): try: transcript = cls.objects.get(video__edx_video_id=video_id, language_code=language_code) except cls.DoesNotExist: transcript = None return transcript
Returns a data model object if found or none otherwise. Arguments: video_id(unicode): video id to which transcript may be associated language_code(unicode): language of the requested transcript
def make_response(self, data: Any = None, **kwargs: Any) -> Any: r if not self._valid_request: logger.error('Request not validated, cannot make response') raise self.make_error('Request not validated before, cannot make ' ...
r"""Validate response data and wrap it inside response factory. :param data: Response data. Could be ommited. :param \*\*kwargs: Keyword arguments to be passed to response factory.
def encodeSentence(self, *words): encoded = map(self.encodeWord, words) encoded = b''.join(encoded) encoded += b'\x00' return encoded
Encode given sentence in API format. :param words: Words to endoce. :returns: Encoded sentence.
def json_2_text(inp, out, verbose = False): for root, dirs, filenames in os.walk(inp): for f in filenames: log = codecs.open(os.path.join(root, f), 'r') j_obj = json.load(log) j_obj = json_format(j_obj) textWriter(j_obj, out, verbose)
Convert a Wikipedia article to Text object. Concatenates the sections in wikipedia file and rearranges other information so it can be interpreted as a Text object. Links and other elements with start and end positions are annotated as layers. Parameters ---------- inp: directory of parsed ...
def register(self, notification_cls=None): self.loaded = True display_names = [n.display_name for n in self.registry.values()] if ( notification_cls.name not in self.registry and notification_cls.display_name not in display_names ): self.registry.updat...
Registers a Notification class unique by name.
def fetch(self, url, body=None, headers=None): if body: method = 'POST' else: method = 'GET' if headers is None: headers = {} if not (url.startswith('http://') or url.startswith('https://')): raise ValueError('URL is not a HTTP URL: %r' % (...
Perform an HTTP request @raises Exception: Any exception that can be raised by httplib2 @see: C{L{HTTPFetcher.fetch}}
def add(self, scene): if not isinstance(scene, Scene): raise TypeError() self.__scenes.append(scene)
Add scene.
def _find_ancillary_vars(self, ds, refresh=False): if self._ancillary_vars.get(ds, None) and refresh is False: return self._ancillary_vars[ds] self._ancillary_vars[ds] = [] for name, var in ds.variables.items(): if hasattr(var, 'ancillary_variables'): for ...
Returns a list of variable names that are defined as ancillary variables in the dataset ds. An ancillary variable generally is a metadata container and referenced from other variables via a string reference in an attribute. - via ancillary_variables (3.4) - "grid mapping var" (...
def markInputline( self, markerString = ">!<" ): line_str = self.line line_column = self.column - 1 if markerString: line_str = "".join((line_str[:line_column], markerString, line_str[line_column:])) return line_str.strip()
Extracts the exception line from the input string, and marks the location of the exception with a special symbol.
def wr_tsv(self, fout_tsv): with open(fout_tsv, 'w') as prt: kws_tsv = { 'fld2fmt': {f:'{:8.2e}' for f in self.flds_cur if f[:2] == 'p_'}, 'prt_flds':self.flds_cur} prt_tsv_sections(prt, self.desc2nts['sections'], **kws_tsv) print(" WROTE: {TS...
Print grouped GOEA results into a tab-separated file.
def geometry_identifiers(self): identifiers = {mesh.identifier_md5: name for name, mesh in self.geometry.items()} return identifiers
Look up geometries by identifier MD5 Returns --------- identifiers: dict, identifier md5: key in self.geometry
def place_oceans_at_map_borders(world): ocean_border = int(min(30, max(world.width / 5, world.height / 5))) def place_ocean(x, y, i): world.layers['elevation'].data[y, x] = \ (world.layers['elevation'].data[y, x] * i) / ocean_border for x in range(world.width): for i in range(oce...
Lower the elevation near the border of the map
def spread_stats(stats, spreader=False): spread = spread_t() if spreader else True descendants = deque(stats) while descendants: _stats = descendants.popleft() if spreader: spread.clear() yield _stats, spread else: yield _stats if spread: ...
Iterates all descendant statistics under the given root statistics. When ``spreader=True``, each iteration yields a descendant statistics and `spread()` function together. You should call `spread()` if you want to spread the yielded statistics also.
def filter_channels_by_status( channel_states: List[NettingChannelState], exclude_states: Optional[List[str]] = None, ) -> List[NettingChannelState]: if exclude_states is None: exclude_states = [] states = [] for channel_state in channel_states: if channel.get_status(channel_...
Filter the list of channels by excluding ones for which the state exists in `exclude_states`.
def add_environment_vars(config: MutableMapping[str, Any]): for e in os.environ: if re.match("BELBIO_", e): val = os.environ.get(e) if val: e.replace("BELBIO_", "") env_keys = e.lower().split("__") if len(env_keys) > 1: ...
Override config with environment variables Environment variables have to be prefixed with BELBIO_ which will be stripped before splitting on '__' and lower-casing the environment variable name that is left into keys for the config dictionary. Example: BELBIO_BEL_API__SERVERS__API_URL=http:...
def get_db_versions(self, conn): curs = conn.cursor() query = 'select version from {}'.format(self.version_table) try: curs.execute(query) return set(version for version, in curs.fetchall()) except: raise VersioningNotInstalled('Run oq engine --upgrade...
Get all the versions stored in the database as a set. :param conn: a DB API 2 connection
def _fingerprint(public_key, fingerprint_hash_type): if fingerprint_hash_type: hash_type = fingerprint_hash_type.lower() else: hash_type = 'sha256' try: hash_func = getattr(hashlib, hash_type) except AttributeError: raise CommandExecutionError( 'The fingerprin...
Return a public key fingerprint based on its base64-encoded representation The fingerprint string is formatted according to RFC 4716 (ch.4), that is, in the form "xx:xx:...:xx" If the key is invalid (incorrect base64 string), return None public_key The public key to return the fingerprint for...
def add_node_from_appliance(self, appliance_id, x=0, y=0, compute_id=None): try: template = self.controller.appliances[appliance_id].data except KeyError: msg = "Appliance {} doesn't exist".format(appliance_id) log.error(msg) raise aiohttp.web.HTTPNotFound...
Create a node from an appliance
def LoadPlugins(cls): if cls.PLUGINS_LOADED: return reg = ComponentRegistry() for _, record in reg.load_extensions('iotile.update_record'): cls.RegisterRecordType(record) cls.PLUGINS_LOADED = True
Load all registered iotile.update_record plugins.
def parse_barcode_file(fp, primer=None, header=False): tr = trie.trie() reader = csv.reader(fp) if header: next(reader) records = (record for record in reader if record) for record in records: specimen, barcode = record[:2] if primer is not None: pr = primer ...
Load label, barcode, primer records from a CSV file. Returns a map from barcode -> label Any additional columns are ignored
def pois_from_address(address, distance, amenities=None): point = geocode(query=address) return pois_from_point(point=point, amenities=amenities, distance=distance)
Get OSM points of Interests within some distance north, south, east, and west of an address. Parameters ---------- address : string the address to geocode to a lat-long point distance : numeric distance in meters amenities : list List of amenities that will be used for f...
def memoize(func): @wraps(func) def memoizer(self): if not hasattr(self, '_cache'): self._cache = {} if func.__name__ not in self._cache: self._cache[func.__name__] = func(self) return self._cache[func.__name__] return memoizer
Memoize a method that should return the same result every time on a given instance.
def setupTable_glyf(self): if not {"glyf", "loca"}.issubset(self.tables): return self.otf["loca"] = newTable("loca") self.otf["glyf"] = glyf = newTable("glyf") glyf.glyphs = {} glyf.glyphOrder = self.glyphOrder hmtx = self.otf.get("hmtx") allGlyphs = s...
Make the glyf table.
def get_item(self): try: item_lookup_session = get_item_lookup_session(runtime=self._runtime, proxy=self._proxy) item_lookup_session.use_federated_bank_view() item = item_lookup_session.get_item(self._item_id) except errors.NotFound: if self._section is no...
Gets the ``Item``. return: (osid.assessment.Item) - the assessment item *compliance: mandatory -- This method must be implemented.*
def get_positions(self, attr=None): pos = self.parent.get_positions(self) try: if attr is not None: attr = attr.replace("quantity", "position") return pos[attr] except Exception as e: return pos
Get the positions data for the instrument :Optional: attr : string Position attribute to get (optional attributes: symbol, position, avgCost, account) :Retruns: positions : dict (positions) / float/str (attribute) positions data f...
def _print_divide(self): for space in self.AttributesLength: self.StrTable += "+ " + "- " * space self.StrTable += "+" + "\n"
Prints all those table line dividers.
def removeIndividual(self): self._openRepo() dataset = self._repo.getDatasetByName(self._args.datasetName) individual = dataset.getIndividualByName(self._args.individualName) def func(): self._updateRepo(self._repo.removeIndividual, individual) self._confirmDelete("In...
Removes an individual from this repo
def collect_summands(cls, ops, kwargs): from qnet.algebra.core.abstract_quantum_algebra import ( ScalarTimesQuantumExpression) coeff_map = OrderedDict() for op in ops: if isinstance(op, ScalarTimesQuantumExpression): coeff, term = op.coeff, op.term else: coeff...
Collect summands that occur multiple times into a single summand Also filters out zero-summands. Example: >>> A, B, C = (OperatorSymbol(s, hs=0) for s in ('A', 'B', 'C')) >>> collect_summands( ... OperatorPlus, (A, B, C, ZeroOperator, 2 * A, B, -C) , {}) ((3 * A^(0), 2 * B^...
def reset_image_attribute(self, image_id, attribute='launchPermission'): params = {'ImageId' : image_id, 'Attribute' : attribute} return self.get_status('ResetImageAttribute', params, verb='POST')
Resets an attribute of an AMI to its default value. :type image_id: string :param image_id: ID of the AMI for which an attribute will be described :type attribute: string :param attribute: The attribute to reset :rtype: bool :return: Whether the operation succeeded or ...
def get_file_client(opts, pillar=False): client = opts.get('file_client', 'remote') if pillar and client == 'local': client = 'pillar' return { 'remote': RemoteClient, 'local': FSClient, 'pillar': PillarClient, }.get(client, RemoteClient)(opts)
Read in the ``file_client`` option and return the correct type of file server
def filter(self, predicate: Callable[[FileLine], 'FileLineSet'] ) -> 'FileLineSet': filtered = [fileline for fileline in self if predicate(fileline)] return FileLineSet.from_list(filtered)
Returns a subset of the file lines within this set that satisfy a given filtering criterion.
def normalize_job_id(job_id): if not isinstance(job_id, uuid.UUID): job_id = uuid.UUID(job_id) return job_id
Convert a value to a job id. :param job_id: Value to convert. :type job_id: int, str :return: The job id. :rtype: :py:class:`uuid.UUID`
def clean_previous_run(self): super(Alignak, self).clean_previous_run() self.pollers.clear() self.reactionners.clear() self.brokers.clear()
Clean variables from previous configuration :return: None
def _pb_timestamp_to_datetime(timestamp_pb): return _EPOCH + datetime.timedelta( seconds=timestamp_pb.seconds, microseconds=(timestamp_pb.nanos / 1000.0) )
Convert a Timestamp protobuf to a datetime object. :type timestamp_pb: :class:`google.protobuf.timestamp_pb2.Timestamp` :param timestamp_pb: A Google returned timestamp protobuf. :rtype: :class:`datetime.datetime` :returns: A UTC datetime object converted from a protobuf timestamp.
def _drop_gracefully(self): shard_id = self.request.headers[util._MR_SHARD_ID_TASK_HEADER] mr_id = self.request.headers[util._MR_ID_TASK_HEADER] shard_state, mr_state = db.get([ model.ShardState.get_key_by_shard_id(shard_id), model.MapreduceState.get_key_by_job_id(mr_id)]) if shard_state...
Drop worker task gracefully. Set current shard_state to failed. Controller logic will take care of other shards and the entire MR.
def track_parallel(items, sub_type): out = [] for i, args in enumerate(items): item_i, item = _get_provitem_from_args(args) if item: sub_entity = "%s.%s.%s" % (item["provenance"]["entity"], sub_type, i) item["provenance"]["entity"] = sub_entity args = list(arg...
Create entity identifiers to trace the given items in sub-commands. Helps handle nesting in parallel program execution: run id => sub-section id => parallel ids
def find_external_metabolites(model): ex_comp = find_external_compartment(model) return [met for met in model.metabolites if met.compartment == ex_comp]
Return all metabolites in the external compartment.
def access_keys(opts): keys = {} publisher_acl = opts['publisher_acl'] acl_users = set(publisher_acl.keys()) if opts.get('user'): acl_users.add(opts['user']) acl_users.add(salt.utils.user.get_user()) for user in acl_users: log.info('Preparing the %s key for local communication', ...
A key needs to be placed in the filesystem with permissions 0400 so clients are required to run as root.
def FindFileContainingSymbol(self, symbol): symbol = _NormalizeFullyQualifiedName(symbol) try: return self._descriptors[symbol].file except KeyError: pass try: return self._enum_descriptors[symbol].file except KeyError: pass try: return self._FindFileContainingSymbo...
Gets the FileDescriptor for the file containing the specified symbol. Args: symbol: The name of the symbol to search for. Returns: A FileDescriptor that contains the specified symbol. Raises: KeyError: if the file cannot be found in the pool.
def _add_junction(item): type_, channels = _expand_one_key_dictionary(item) junction = UnnamedStatement(type='junction') for item in channels: type_, value = _expand_one_key_dictionary(item) channel = UnnamedStatement(type='channel') for val in value: if _is_reference(val...
Adds a junction to the _current_statement.
def _processEscapeSequences(replaceText): def _replaceFunc(escapeMatchObject): char = escapeMatchObject.group(0)[1] if char in _escapeSequences: return _escapeSequences[char] return escapeMatchObject.group(0) return _seqReplacer.sub(_replaceFunc, replaceText)
Replace symbols like \n \\, etc
def get_channel(self, name): return self._api_get('/api/channels/{0}'.format( urllib.parse.quote_plus(name) ))
Details about an individual channel. :param name: The channel name :type name: str
def geocode(self): submit_set = [] data_map = {} for address, o in self.gen: submit_set.append(address) data_map[address] = o if len(submit_set) >= self.submit_size: results = self._send(submit_set) submit_set = [] ...
A Generator that reads from the address generators and returns geocode results. The generator yields ( address, geocode_results, object)
def IsEquivalent(self, other): if self.name and other.name: return self.name == other.name if self.name: self_family, self_version_tuple = self._FAMILY_AND_VERSION_PER_NAME.get( self.name, self._DEFAULT_FAMILY_AND_VERSION) return ( self_family == other.family and ...
Determines if 2 operating system artifacts are equivalent. This function compares the operating systems based in order of: * name derived from product * family and version * family Args: other (OperatingSystemArtifact): operating system artifact attribute container to compare with....
def graph_to_gluon(self, graph, ctx): sym, arg_params, aux_params = self.from_onnx(graph) metadata = self.get_graph_metadata(graph) data_names = [input_tensor[0] for input_tensor in metadata['input_tensor_data']] data_inputs = [symbol.var(data_name) for data_name in data_names] f...
Construct SymbolBlock from onnx graph. Parameters ---------- graph : onnx protobuf object The loaded onnx graph ctx : Context or list of Context Loads the model into one or many context(s). Returns ------- sym_block :gluon.nn.SymbolBlock ...
def filepaths(self) -> List[str]: path = self.currentpath return [os.path.join(path, name) for name in self.filenames]
Absolute path names of the files contained in the current working directory. Files names starting with underscores are ignored: >>> from hydpy.core.filetools import FileManager >>> filemanager = FileManager() >>> filemanager.BASEDIR = 'basename' >>> filemanager.projectd...
def echo_utc(string): from datetime import datetime click.echo('{} | {}'.format(datetime.utcnow().isoformat(), string))
Echo the string to standard out, prefixed with the current date and time in UTC format. :param string: string to echo
def interested_in(self): genders = [] for gender in self.cache['interested_in']: genders.append(gender) return genders
A list of strings describing the genders the user is interested in.
def get_depts(self, dept_name=None): depts = self.json_response.get("department", None) params = self.kwargs.get("params", None) fetch_child = params.get("fetch_child", True) if params else True if dept_name is not None: depts = [dept for dept in depts if dept["name"] ==...
Method to get department by name.
def stop_recording_skipped(cls): if cls._errors_recorded is None: raise Exception('Cannot stop recording before it is started') recorded = cls._errors_recorded[:] cls._errors_recorded = None return recorded
Stop collecting OptionErrors recorded with the record_skipped_option method and return them
def make_epub_base(location): log.info('Making EPUB base files in {0}'.format(location)) with open(os.path.join(location, 'mimetype'), 'w') as out: out.write('application/epub+zip') os.mkdir(os.path.join(location, 'META-INF')) os.mkdir(os.path.join(location, 'EPUB')) os.mkdir(os.path.join(lo...
Creates the base structure for an EPUB file in a specified location. This function creates constant components for the structure of the EPUB in a specified directory location. Parameters ---------- location : str A path string to a local directory in which the EPUB is to be built
def get_iter(self, times, seconds, chunk_size=2000): def entry_generator(): with ConstantRateLimit(times, seconds, sleep_func=self._steam.sleep) as r: for entries in chunks(self, chunk_size): if not entries: return for e...
Make a iterator over the entries See :class:`steam.util.throttle.ConstantRateLimit` for ``times`` and ``seconds`` parameters. :param chunk_size: number of entries per request :type chunk_size: :class:`int` :returns: generator object :rtype: :class:`generator` The itera...
def render_document(template_name, data_name, output_name): env = Environment(loader=PackageLoader('aide_document')) with open(output_name, 'w') as output_file: output = env.get_template(template_name).render(yaml.load(open(data_name))) output_file.write(output)
Combines a MarkDown template file from the aide_document package with a local associated YAML data file, then outputs the rendered combination to a local MarkDown output file. Parameters ========== template_name : String Exact name of the MarkDown template file from the aide_document/templates fold...
def at_depth(self, level): return Zconfig(lib.zconfig_at_depth(self._as_parameter_, level), False)
Locate the last config item at a specified depth
def add_entry(self, net_type, cn, addresses): self.entries.append({ 'cn': cn, 'addresses': addresses})
Add a request to the batch :param net_type: str netwrok space name request is for :param cn: str Canonical Name for certificate :param addresses: [] List of addresses to be used as SANs
def set_file_filters(self, file_filters): file_filters = util.return_list(file_filters) self.file_filters = file_filters
Sets internal file filters to `file_filters` by tossing old state. `file_filters` can be single object or iterable.
def search(ctx, tags, prefix=None): _generate_api(ctx) for i, match in enumerate(ctx.obj.api.search(*tags, prefix=prefix)): click.echo(match, nl=False) print('')
List all archives matching tag search criteria
def update(self, argv): if len(argv) == 0: error("Command requires an index name", 2) name = argv[0] if name not in self.service.indexes: error("Index '%s' does not exist" % name, 2) index = self.service.indexes[name] fields = self.service.indexes.itemmet...
Update an index according to the given argument vector.
def set_version(version): global UNIVERSION global UNIVERSION_INFO if version is None: version = unicodedata.unidata_version UNIVERSION = version UNIVERSION_INFO = tuple([int(x) for x in UNIVERSION.split('.')])
Set version.
def on_key_down(self, event): keycode = event.GetKeyCode() meta_down = event.MetaDown() or event.GetCmdDown() if keycode == 86 and meta_down: self.do_fit(event)
If user does command v, re-size window in case pasting has changed the content size.
def _gcs_list_keys(bucket, pattern): data = [{'Name': obj.metadata.name, 'Type': obj.metadata.content_type, 'Size': obj.metadata.size, 'Updated': obj.metadata.updated_on} for obj in _gcs_get_keys(bucket, pattern)] return google.datalab.utils.commands.render_dictionary(data...
List all Google Cloud Storage keys in a specified bucket that match a pattern.
def _metric_value(value_str, metric_type): if metric_type in (int, float): try: return metric_type(value_str) except ValueError: raise ValueError("Invalid {} metric value: {!r}". format(metric_type.__class__.__name__, value_str)) elif metric_t...
Return a Python-typed metric value from a metric value string.
def get_current(): global current if exists( SETTINGSFILE ): f = open( SETTINGSFILE ).read() current = re.findall('config[^\s]+.+', f)[1].split('/')[-1] return current else: return "** Not Set **"
return current Xresources color theme
def status(queue, munin, munin_config): if munin_config: return status_print_config(queue) queues = get_queues(queue) for queue in queues: status_print_queue(queue, munin=munin) if not munin: print('-' * 40)
List queued tasks aggregated by name
def DeactivateCard(self, card): if hasattr(card, 'connection'): card.connection.disconnect() if None != self.parent.apdutracerpanel: card.connection.deleteObserver(self.parent.apdutracerpanel) delattr(card, 'connection') self.dialogpanel.OnDeactivateCa...
Deactivate a card.
def set_motion_detect(self, enable): if enable: return api.request_motion_detection_enable(self.sync.blink, self.network_id, self.camera_id) return api.request_motion_detection_disab...
Set motion detection.
def lint(filename, lines, config): _, ext = os.path.splitext(filename) if ext in config: output = collections.defaultdict(list) for linter in config[ext]: linter_output = linter(filename, lines) for category, values in linter_output[filename].items(): outp...
Lints a file. Args: filename: string: filename to lint. lines: list[int]|None: list of lines that we want to capture. If None, then all lines will be captured. config: dict[string: linter]: mapping from extension to a linter function. Returns: dict: if there were er...
def socket(self): if not hasattr(self, '_socket'): self._socket = self.context.socket(zmq.REQ) if hasattr(zmq, 'RECONNECT_IVL_MAX'): self._socket.setsockopt( zmq.RECONNECT_IVL_MAX, 5000 ) self._set_tcp_keepalive() ...
Lazily create the socket.