code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def set_peer_link(self, value=None, default=False, disable=False): return self._configure_mlag('peer-link', value, default, disable)
Configures the mlag peer-link value Args: value (str): The value to configure the peer-link default (bool): Configures the peer-link using the default keyword disable (bool): Negates the peer-link using the no keyword Returns: bool: Retur...
def _css_helper(self): entries = [entry for entry in self._plugin_manager.call_hook("css") if entry is not None] entries += self._get_ctx()["css"] entries = ["<link href='" + entry + "' rel='stylesheet'>" for entry in entries] return "\n".join(entries)
Add CSS links for the current page and for the plugins
def _get_stats_from_socket(self, name): try: json_blob = subprocess.check_output( [self.config['ceph_binary'], '--admin-daemon', name, 'perf', 'dump', ]) except subprocess.CalledProcessError ...
Return the parsed JSON data returned when ceph is told to dump the stats from the named socket. In the event of an error error, the exception is logged, and an empty result set is returned.
def inventory(self, all=False, ssid=None): if all or self.api_key is None: if ssid is not None: return self._ssid_inventory(self.full_inventory, ssid) else: return self.full_inventory else: if ssid is not None: return se...
Returns a node inventory. If an API key is specified, only the nodes provisioned by this key will be returned. :return: { inventory }
def __store_cash_balances_per_currency(self, cash_balances): cash = self.model.get_cash_asset_class() for cur_symbol in cash_balances: item = CashBalance(cur_symbol) item.parent = cash quantity = cash_balances[cur_symbol]["total"] item.value = Decimal(quan...
Store balance per currency as Stock records under Cash class
def _create_response_record(self, response): record = dict() record['id'] = response['id'] record['type'] = response['type'] record['name'] = self._full_name(response['name']) if 'content' in response: record['content'] = response['content'] or "" if 'ttl' in ...
Creates record for lexicon API calls
def calcDrawingProbs(self): wmg = self.wmg phi = self.phi weights = [] for i in range(0, len(wmg.keys())): weights.append(phi**i) totalWeight = sum(weights) for i in range(0, len(wmg.keys())): weights[i] = weights[i]/totalWeight return weig...
Returns a vector that contains the probabily of an item being from each position. We say that every item in a order vector is drawn with weight phi^i where i is its position.
def generate(env): SCons.Tool.createSharedLibBuilder(env) SCons.Tool.createProgBuilder(env) env['LINK'] = '$CC' env['LINKFLAGS'] = SCons.Util.CLVar('') env['LINKCOM'] = '$LINK -q $LINKFLAGS -e$TARGET $SOURCES $LIBS' env['LIBDIRPREFIX']='' env['LIBDIRSUFFIX']='' env['LIBLINKP...
Add Builders and construction variables for Borland ilink to an Environment.
def timedelta_seconds(timedelta): return (timedelta.total_seconds() if hasattr(timedelta, "total_seconds") else timedelta.days * 24 * 3600 + timedelta.seconds + timedelta.microseconds / 1000000.)
Returns the total timedelta duration in seconds.
def group(self): split_count = self._url.lower().find("/content/") len_count = len('/content/') gURL = self._url[:self._url.lower().find("/content/")] + \ "/community/" + self._url[split_count+ len_count:] return CommunityGroup(url=gURL, security...
returns the community.Group class for the current group
def get_thermostat_state_by_id(self, id_): return next((state for state in self.thermostat_states if state.id == id_), None)
Retrieves a thermostat state object by its id :param id_: The id of the thermostat state :return: The thermostat state object
def get_cluster_port_names(self, cluster_name): port_names = list() for host_name in self.get_hosts_by_clusters()[cluster_name]: port_names.extend(self.get_hosts_by_name(host_name)) return port_names
return a list of the port names under XIV CLuster
def find_link(self, href_pattern, make_absolute=True): if make_absolute: self.tree.make_links_absolute(self.doc.url) if isinstance(href_pattern, six.text_type): raise GrabMisuseError('Method `find_link` accepts only ' 'byte-string argument') ...
Find link in response body which href value matches ``href_pattern``. Returns found url or None.
def update(self, **kwargs): ret_val = super(ManagerUtilsQuerySet, self).update(**kwargs) post_bulk_operation.send(sender=self.model, model=self.model) return ret_val
Overrides Django's update method to emit a post_bulk_operation signal when it completes.
def unsubscribe(self, topic): if self.sock == NC.INVALID_SOCKET: return NC.ERR_NO_CONN self.logger.info("UNSUBSCRIBE: %s", topic) return self.send_unsubscribe(False, [utf8encode(topic)])
Unsubscribe to some topic.
def write(notebook, file_or_stream, fmt, version=nbformat.NO_CONVERT, **kwargs): text = u'' + writes(notebook, fmt, version, **kwargs) file_or_stream.write(text) if not text.endswith(u'\n'): file_or_stream.write(u'\n')
Write a notebook to a file
def _parse_signed_int_components(buf): sign_bit = 0 value = 0 first = True while True: ch = buf.read(1) if ch == b'': break octet = ord(ch) if first: if octet & _SIGNED_INT_SIGN_MASK: sign_bit = 1 value = octet & _SIGNED...
Parses the remainder of a file-like object as a signed magnitude value. Returns: Returns a pair of the sign bit and the unsigned magnitude.
def rprof(self): if self.istep not in self.sdat.rprof.index.levels[0]: return None return self.sdat.rprof.loc[self.istep]
Radial profiles data of the time step. Set to None if no radial profiles data is available for this time step.
def dict_stack(dict_list, key_prefix=''): r dict_stacked_ = defaultdict(list) for dict_ in dict_list: for key, val in six.iteritems(dict_): dict_stacked_[key_prefix + key].append(val) dict_stacked = dict(dict_stacked_) return dict_stacked
r""" stacks values from two dicts into a new dict where the values are list of the input values. the keys are the same. DEPRICATE in favor of dict_stack2 Args: dict_list (list): list of dicts with similar keys Returns: dict dict_stacked CommandLine: python -m utool.ut...
def within_polygon(self, polygon, distance=None, **kwargs): if distance: zone_polygon = polygon.dilate(distance) else: zone_polygon = polygon upper_depth, lower_depth = _check_depth_limits(kwargs) valid_depth = np.logical_and( self.catalogue.data['dept...
Select earthquakes within polygon :param polygon: Centre point as instance of nhlib.geo.polygon.Polygon class :param float distance: Buffer distance (km) (can take negative values) :returns: Instance of :class:`openquake.hmtk.seismicity.catalogue.Catalogue`...
def annotate(self, sent): preds = [] words = [] for word, fv in self.sent2examples(sent): probs = self.predictor(fv) tags = probs.argsort() tag = self.ID_TAG[tags[-1]] words.append(word) preds.append(tag) annotations = zip(words, preds) return annotations
Annotate a squence of words with entity tags. Args: sent: sequence of strings/words.
def generate(variables, template): env = jinja2.Environment(undefined=jinja2.StrictUndefined) for c in expand(variables): c['rc'] = rc yield env.from_string(template).render(c)
Yields a resolved "template" for each config set and dumps on output This function will extrapolate the ``template`` file using the contents of ``variables`` and will output individual (extrapolated, expanded) files in the output directory ``output``. Parameters: variables (str): A string stream contain...
def transform(self, data=None): if data is None: return self.xform_data else: formatted = format_data( data, semantic=self.semantic, vectorizer=self.vectorizer, corpus=self.corpus, ppca=True) ...
Return transformed data, or transform new data using the same model parameters Parameters ---------- data : numpy array, pandas dataframe or list of arrays/dfs The data to transform. If no data is passed, the xform_data from the DataGeometry object will be retur...
def _function_add_node(self, cfg_node, function_addr): snippet = self._to_snippet(cfg_node=cfg_node) self.kb.functions._add_node(function_addr, snippet)
Adds node to function manager, converting address to CodeNode if possible :param CFGNode cfg_node: A CFGNode instance. :param int function_addr: Address of the current function. :return: None
def _py_ex_argtype(executable): result = [] for p in executable.ordered_parameters: atypes = p.argtypes if atypes is not None: result.extend(p.argtypes) else: print(("No argtypes for: {}".format(p.definition()))) if type(executable).__name__ == "Function": ...
Returns the code to create the argtype to assign to the methods argtypes attribute.
def check_config(self, config, name=''): config = config.get(self.config_name, {}) extras = set(config.keys()).difference(self.default_config) if 'config' not in self.services and extras: raise ConfigurationError( 'Unsupported config options for "%s": %s' ...
Check that the configuration for this object is valid. This is a more restrictive check than for most :mod:`yakonfig` objects. It will raise :exc:`yakonfig.ConfigurationError` if `config` contains any keys that are not in the underlying callable's parameter list (that is, extra unused ...
def skipline(self): position = self.tell() prefix = self._fix() self.seek(prefix, 1) suffix = self._fix() if prefix != suffix: raise IOError(_FIX_ERROR) return position, prefix
Skip the next line and returns position and size of line. Raises IOError if pre- and suffix of line do not match.
def unmatched_quotes_in_line(text): text = text.replace("\\'", "") text = text.replace('\\"', '') if text.count('"') % 2: return '"' elif text.count("'") % 2: return "'" else: return ''
Return whether a string has open quotes. This simply counts whether the number of quote characters of either type in the string is odd. Take from the IPython project (in IPython/core/completer.py in v0.13) Spyder team: Add some changes to deal with escaped quotes - Copyright (C) 2008-2011 IPython...
def get_processing_block_ids(): ids = [] for key in sorted(DB.keys(pattern='scheduling_block/*')): config = json.loads(DB.get(key)) for processing_block in config['processing_blocks']: ids.append(processing_block['id']) return ids
Return an array of Processing Block ids
def _handle_info(self, *args, **kwargs): if 'version' in kwargs: self.api_version = kwargs['version'] print("Initialized API with version %s" % self.api_version) return try: info_code = str(kwargs['code']) except KeyError: raise FaultyP...
Handles info messages and executed corresponding code
def fetch(self): returnResults = [] results = self._query.run() for result in results: if self._join: item = self._model.fromRawEntry(**result["left"]) joined = self._join.fromRawEntry(**result["right"]) item.protectedItems = self._join...
Fetches the query and then tries to wrap the data in the model, joining as needed, if applicable.
def system(self): url = self._url + "/system" return _System(url=url, securityHandler=self._securityHandler, proxy_url=self._proxy_url, proxy_port=self._proxy_port)
Creates a reference to the System operations for Portal
def get_endpoints_using_catalog_api(domain, token): headers = {"X-App-Token": token} uri = "http://api.us.socrata.com/api/catalog/v1?domains={0}&offset={1}&limit=1000" ret = [] endpoints_thus_far = set() offset = 0 while True: try: r = requests.get(uri.format(domain, offset),...
Implements a raw HTTP GET against the entire Socrata portal for the domain in question. This method uses the second of the two ways of getting this information, the catalog API. Parameters ---------- domain: str A Socrata data portal domain. "data.seattle.gov" or "data.cityofnewyork.us" for exa...
def warp_locations(locations, y_center=None, return_ellipsoid=False, verbose=False): locations = np.asarray(locations) if y_center is None: c, r = _fit_ellipsoid_full(locations) else: c, r = _fit_ellipsoid_partial(locations, y_center) elliptic_locations = _project_on_ellipsoid(c, r, loca...
Warp EEG electrode locations to spherical layout. EEG Electrodes are warped to a spherical layout in three steps: 1. An ellipsoid is least-squares-fitted to the electrode locations. 2. Electrodes are displaced to the nearest point on the ellipsoid's surface. 3. The ellipsoid is transformed ...
def all_library_calls(self): if self._all_library_calls is None: self._all_library_calls = self._explore_functions(lambda x: x.library_calls) return self._all_library_calls
recursive version of library calls
def reply_inform(cls, req_msg, *args): return cls(cls.INFORM, req_msg.name, args, req_msg.mid)
Helper method for creating inform messages in reply to a request. Copies the message name and message identifier from request message. Parameters ---------- req_msg : katcp.core.Message instance The request message that this inform if in reply to args : list of stri...
def cancel_room(self, booking_id): resp = self._request("POST", "/1.1/space/cancel/{}".format(booking_id)) return resp.json()
Cancel a room given a booking id. :param booking_id: A booking id or a list of booking ids (separated by commas) to cancel. :type booking_id: str
def build(self): if self.colour: embed = discord.Embed( title=self.title, type='rich', description=self.description, colour=self.colour) else: embed = discord.Embed( title=self.title, ...
Builds Discord embed GUI Returns: discord.Embed: Built GUI
def printParams(paramDictionary, all=False, log=None): if log is not None: def output(msg): log.info(msg) else: def output(msg): print(msg) if not paramDictionary: output('No parameters were supplied') else: for key in sorted(paramDictionary): ...
Print nicely the parameters from the dictionary.
def check_num(self, checks, radl): prefixes = {} for f in self.features: if not isinstance(f, Feature): continue (prefix, sep, tail) = f.prop.partition(".") if not sep or prefix not in checks: continue checks0 = checks[prefi...
Check types, operators and units in features with numbers. Args: - checks(dict of dict of str:tuples): keys are property name prefixes, and the values are dict with keys are property name suffixes and values are iterable as in ``_check_feature``. - radl: passed to ``_check_...
def template(self): s = Template(self._IPTABLES_TEMPLATE) return s.substitute(filtertable='\n'.join(self.filters), rawtable='\n'.join(self.raw), mangletable='\n'.join(self.mangle), nattable='\n'.join(self.nat), date=datetime.today())
Create a rules file in iptables-restore format
def bb_get_instr_max_width(basic_block): asm_mnemonic_max_width = 0 for instr in basic_block: if len(instr.mnemonic) > asm_mnemonic_max_width: asm_mnemonic_max_width = len(instr.mnemonic) return asm_mnemonic_max_width
Get maximum instruction mnemonic width
def _get_object_key(self, p_object): matched_key = None matched_index = None if hasattr(p_object, self._searchNames[0]): return getattr(p_object, self._searchNames[0]) for x in xrange(len(self._searchNames)): key = self._searchNames[x] if hasattr(p_obj...
Get key from object
def grep_file(query, item): return ['%s: %s' % (item, line) for line in open(item) if re.search(query, line)]
This function performs the actual grep on a given file.
def _make_sync_method(name): def sync_wrapper(self, *args, **kwds): method = getattr(self, name) future = method(*args, **kwds) return future.get_result() return sync_wrapper
Helper to synthesize a synchronous method from an async method name. Used by the @add_sync_methods class decorator below. Args: name: The name of the synchronous method. Returns: A method (with first argument 'self') that retrieves and calls self.<name>, passing its own arguments, expects it to ret...
def save_credentials(self, profile): filename = profile_path(S3_PROFILE_ID, profile) creds = { "access_key": self.access_key, "secret_key": self.secret_key } dump_to_json(filename, creds)
Saves credentials to a dotfile so you can open them grab them later. Parameters ---------- profile: str name for your profile (i.e. "dev", "prod")
def _create_serial_ports(serial_ports): ports = [] keys = range(-9000, -9050, -1) if serial_ports: devs = [serial['adapter'] for serial in serial_ports] log.trace('Creating serial ports %s', devs) for port, key in zip(serial_ports, keys): serial_port_device = _apply_seria...
Returns a list of vim.vm.device.VirtualDeviceSpec objects representing the serial ports to be created for a virtual machine serial_ports Serial port properties
def warning(self, msg, indent=0, **kwargs): return self.logger.warning(self._indent(msg, indent), **kwargs)
invoke ``self.logger.warning``
def get_top_tags(self, limit=None, cacheable=True): doc = _Request(self, "tag.getTopTags").execute(cacheable) seq = [] for node in doc.getElementsByTagName("tag"): if limit and len(seq) >= limit: break tag = Tag(_extract(node, "name"), self) we...
Returns the most used tags as a sequence of TopItem objects.
def set_extent_location(self, new_location, main_vd_extent, reserve_vd_extent): if not self._initialized: raise pycdlibexception.PyCdlibInternalError('UDF Anchor Volume Structure not yet initialized') self.new_extent_loc = new_location self.desc_tag.tag_location = new_location ...
A method to set a new location for this Anchor Volume Structure. Parameters: new_location - The new extent that this Anchor Volume Structure should be located at. main_vd_extent - The extent containing the main Volume Descriptors. reserve_vd_extent - The extent containing the reserve...
def destination(self): return os.path.join(os.path.abspath(self.outdir), self.filename)
Get the destination path. This is the property should be calculated every time it is used because a user could change the outdir and filename dynamically.
def write(self, obj, **kwargs): super().write(obj, **kwargs) for name, ss in obj.items(): key = 'sparse_series_{name}'.format(name=name) if key not in self.group._v_children: node = self._handle.create_group(self.group, key) else: node ...
write it as a collection of individual sparse series
def add2python(self, module=None, up=0, down=None, front=False, must_exist=True): if module: try: return import_module(module) except ImportError: pass dir = self.dir().ancestor(up) if down: dir = dir.join(*do...
Add a directory to the python path. :parameter module: Optional module name to try to import once we have found the directory :parameter up: number of level to go up the directory three from :attr:`local_path`. :parameter down: Optional tuple of directory names to travel...
def _linux_stp(br, state): brctl = _tool_path('brctl') return __salt__['cmd.run']('{0} stp {1} {2}'.format(brctl, br, state), python_shell=False)
Internal, sets STP state
def fail_request(self, orig_request, message, start_response): cors_handler = self._create_cors_handler(orig_request) return util.send_wsgi_error_response( message, start_response, cors_handler=cors_handler)
Write an immediate failure response to outfile, no redirect. This calls start_response and returns the error body. Args: orig_request: An ApiRequest, the original request from the user. message: A string containing the error message to be displayed to user. start_response: A function with se...
def adj_nodes_aws(aws_nodes): for node in aws_nodes: node.cloud = "aws" node.cloud_disp = "AWS" node.private_ips = ip_to_str(node.private_ips) node.public_ips = ip_to_str(node.public_ips) node.zone = node.extra['availability'] node.size = node.extra['instance_type'] ...
Adjust details specific to AWS.
def timestamp(num_params, p_levels, k_choices, N): string = "_v%s_l%s_gs%s_k%s_N%s_%s.txt" % (num_params, p_levels, k_choices, N, ...
Returns a uniform timestamp with parameter values for file identification
def mod(self): if self._mod is None: self._mod = self.compile_and_import_binary() return self._mod
Cached compiled binary of the Generic_Code class. To clear cache invoke :meth:`clear_mod_cache`.
def get_users(self, fetch=True): return Users(self.resource.users, self.client, populate=fetch)
Return this Applications's users object, populating it if fetch is True.
def setCmd(self, cmd): cmd = cmd.upper() if cmd not in VALID_COMMANDS: raise FrameError("The cmd '%s' is not valid! It must be one of '%s' (STOMP v%s)." % ( cmd, VALID_COMMANDS, STOMP_VERSION) ) else: self._cmd = cmd
Check the cmd is valid, FrameError will be raised if its not.
def expression_list_to_conjunction(expression_list): if not isinstance(expression_list, list): raise AssertionError(u'Expected `list`, Received {}.'.format(expression_list)) if len(expression_list) == 0: return TrueLiteral if not isinstance(expression_list[0], Expression): raise Asse...
Convert a list of expressions to an Expression that is the conjunction of all of them.
def modify(self, vals): self.vals = vals.view(np.ndarray).copy() y = self.model.predict(self.vals)[0] self.data_visualize.modify(y) self.latent_handle.set_data(self.vals[0,self.latent_index[0]], self.vals[0,self.latent_index[1]]) self.axes.figure.canvas.draw()
When latent values are modified update the latent representation and ulso update the output visualization.
def to_feature(self, name=None, feature_type='misc_feature'): if name is None: if not self.name: raise ValueError('name attribute missing from DNA instance' ' and arguments') name = self.name return Feature(name, start=0, stop=len(...
Create a feature from the current object. :param name: Name for the new feature. Must be specified if the DNA instance has no .name attribute. :type name: str :param feature_type: The type of feature (genbank standard). :type feature_type: str
def account(transition, direction=Direction.BIDIRECTIONAL): if direction != Direction.BIDIRECTIONAL: return directed_account(transition, direction) return Account(directed_account(transition, Direction.CAUSE) + directed_account(transition, Direction.EFFECT))
Return the set of all causal links for a |Transition|. Args: transition (Transition): The transition of interest. Keyword Args: direction (Direction): By default the account contains actual causes and actual effects.
def from_file(cls, filename="CTRL", **kwargs): with zopen(filename, "rt") as f: contents = f.read() return LMTOCtrl.from_string(contents, **kwargs)
Creates a CTRL file object from an existing file. Args: filename: The name of the CTRL file. Defaults to 'CTRL'. Returns: An LMTOCtrl object.
async def detach_tip(data): global session if not feature_flags.use_protocol_api_v2(): pipette = session.pipettes[session.current_mount] if not pipette.tip_attached: log.warning('detach tip called with no tip') pipette._remove_tip(session.tip_length) else: session...
Detach the tip from the current pipette :param data: Information obtained from a POST request. The content type is application/json. The correct packet form should be as follows: { 'token': UUID token from current session start 'command': 'detach tip' }
def attention_lm_decoder(decoder_input, decoder_self_attention_bias, hparams, name="decoder"): x = decoder_input with tf.variable_scope(name): for layer in range(hparams.num_hidden_layers): with tf.variable_scope("layer_%d" % layer...
A stack of attention_lm layers. Args: decoder_input: a Tensor decoder_self_attention_bias: bias Tensor for self-attention (see common_attention.attention_bias()) hparams: hyperparameters for model name: a string Returns: y: a Tensors
def partition_ordered(sequence, key=None): yield from ((k, list(g)) for k, g in groupby(sequence, key=key))
Partition ordered sequence by key. Sequence is expected to already be ordered. Parameters ---------- sequence: iterable data. key: partition key function Yields ------- iterable tuple(s) of partition key, data list pairs. Examples -------- 1. By object attributes. Pa...
def Rz_matrix(theta): return np.array([ [np.cos(theta), -np.sin(theta), 0], [np.sin(theta), np.cos(theta), 0], [0, 0, 1] ])
Rotation matrix around the Z axis
def _any(objs, query): for obj in objs: if isinstance(obj, Document): if _any(obj.roots, query): return True else: if any(query(ref) for ref in obj.references()): return True else: return False
Whether any of a collection of objects satisfies a given query predicate Args: objs (seq[Model or Document]) : query (callable) Returns: True, if ``query(obj)`` is True for some object in ``objs``, else False
def decrease_user_property(self, user_id, property_name, value=0, headers=None, endpoint_url=None): endpoint_url = endpoint_url or self._endpoint_url url = endpoint_url + "/users/" + user_id + "/properties/" + property_name + "/decrease/" + value.__str__() headers = headers or self._default_head...
Decrease a user's property by a value. :param str user_id: identified user's ID :param str property_name: user property name to increase :param number value: amount by which to decrease the property :param dict headers: custom request headers (if isn't set default values are used) ...
def frag2text(endpoint, stype, selector, clean=False, raw=False, verbose=False): try: return main(endpoint, stype, selector, clean, raw, verbose) except StandardError as err: return err
returns Markdown text of selected fragment. Args: endpoint: URL, file, or HTML string stype: { 'css' | 'xpath' } selector: CSS selector or XPath expression Returns: Markdown text Options: clean: cleans fragment (lxml.html.clean defaults) raw: returns raw HTML...
def _resolve_subkeys(key, separator="."): parts = key.split(separator, 1) if len(parts) > 1: return parts else: return parts[0], None
Resolve a potentially nested key. If the key contains the ``separator`` (e.g. ``.``) then the key will be split on the first instance of the subkey:: >>> _resolve_subkeys('a.b.c') ('a', 'b.c') >>> _resolve_subkeys('d|e|f', separator='|') ('d', 'e|f') If not, the subkey will be...
def generalized_negative_binomial(mu=1, alpha=1, shape=_Null, dtype=_Null, **kwargs): return _random_helper(_internal._random_generalized_negative_binomial, _internal._sample_generalized_negative_binomial, [mu, alpha], shape, dtype, kwargs)
Draw random samples from a generalized negative binomial distribution. Samples are distributed according to a generalized negative binomial distribution parametrized by *mu* (mean) and *alpha* (dispersion). *alpha* is defined as *1/k* where *k* is the failure limit of the number of unsuccessful experim...
def warning_handler(self, handler): if not self.opened(): handler = handler or util.noop self._warning_handler = enums.JLinkFunctions.LOG_PROTOTYPE(handler) self._dll.JLINKARM_SetWarnOutHandler(self._warning_handler)
Setter for the warning handler function. If the DLL is open, this function is a no-op, so it should be called prior to calling ``open()``. Args: self (JLink): the ``JLink`` instance handler (function): function to call on warning messages Returns: ``None`...
def nansum(values, axis=None, skipna=True, min_count=0, mask=None): values, mask, dtype, dtype_max, _ = _get_values(values, skipna, 0, mask=mask) dtype_sum = dtype_max if is_float_dtype(dtype): dtype_sum = dtype elif is_timedelta64_dtype(dtype)...
Sum the elements along an axis ignoring NaNs Parameters ---------- values : ndarray[dtype] axis: int, optional skipna : bool, default True min_count: int, default 0 mask : ndarray[bool], optional nan-mask if known Returns ------- result : dtype Examples -------...
def merge_figures(figures): figure={} data=[] for fig in figures: for trace in fig['data']: data.append(trace) layout=get_base_layout(figures) figure['data']=data figure['layout']=layout return figure
Generates a single Figure from a list of figures Parameters: ----------- figures : list(Figures) List of figures to be merged.
def clear_masters(self): packages = [] for mas in Utils().remove_dbs(self.packages): if mas not in self.dependencies: packages.append(mas) self.packages = packages
Clear master packages if already exist in dependencies or if added to install two or more times
def is_empty(self): return (not self.breakpoint and not self.code_analysis and not self.todo and not self.bookmarks)
Return whether the block of user data is empty.
def initialize_remaining_constants(self, value=0): remaining = [] for node, _inputs, _outputs in self.iterate_bfs(): streams = node.input_streams() + [node.stream] for stream in streams: if stream.stream_type is not DataStream.ConstantType: con...
Ensure that all constant streams referenced in the sensor graph have a value. Constant streams that are automatically created by the compiler are initialized as part of the compilation process but it's possible that the user references other constant streams but never assigns them an explicit i...
def get_default(): if not is_configured(): raise JutException('No configurations available, please run `jut config add`') for configuration in _CONFIG.sections(): if _CONFIG.has_option(configuration, 'default'): return dict(_CONFIG.items(configuration))
return the attributes associated with the default configuration
def forward(self, channel, date_s, fragment): time_s, sep, nick = fragment.rpartition('.') time = datetime.datetime.strptime(time_s, '%H.%M.%S') date = datetime.datetime.strptime(date_s, '%Y-%m-%d') dt = datetime.datetime.combine(date, time.time()) loc_dt = self.timezone.localize(dt) utc_dt = loc_dt.astimez...
Given an HREF in the legacy timezone, redirect to an href for UTC.
def FilterMessages( self, Channel, FromID, ToID, Mode): try: res = self.__m_dllBasic.CAN_FilterMessages(Channel,FromID,ToID,Mode) return TPCANStatus(res) except: logger.error("Exception on PCANBasic.FilterMessages") ...
Configures the reception filter Remarks: The message filter will be expanded with every call to this function. If it is desired to reset the filter, please use the 'SetValue' function. Parameters: Channel : A TPCANHandle representing a PCAN Channel FromID : A c...
def _create_token(token_type, value, lineno, lexpos): token = lex.LexToken() token.type = token_type token.value = value token.lineno = lineno token.lexpos = lexpos return token
Helper for creating ply.lex.LexToken objects. Unfortunately, LexToken does not have a constructor defined to make settings these values easy.
def _accumulate_sufficient_statistics(self, stats, X, framelogprob, posteriors, fwdlattice, bwdlattice): stats['nobs'] += 1 if 's' in self.params: stats['start'] += posteriors[0] if 't' in self.params: n_samples, n_components = fr...
Updates sufficient statistics from a given sample. Parameters ---------- stats : dict Sufficient statistics as returned by :meth:`~base._BaseHMM._initialize_sufficient_statistics`. X : array, shape (n_samples, n_features) Sample sequence. fr...
def find_steam_location(): if registry is None: return None key = registry.CreateKey(registry.HKEY_CURRENT_USER,"Software\Valve\Steam") return registry.QueryValueEx(key,"SteamPath")[0]
Finds the location of the current Steam installation on Windows machines. Returns None for any non-Windows machines, or for Windows machines where Steam is not installed.
def deregister(): for type_, cls in get_pairs(): if type(units.registry.get(type_)) is cls: units.registry.pop(type_) for unit, formatter in _mpl_units.items(): if type(formatter) not in {DatetimeConverter, PeriodConverter, TimeConverter}: ...
Remove pandas' formatters and converters Removes the custom converters added by :func:`register`. This attempts to set the state of the registry back to the state before pandas registered its own units. Converters for pandas' own types like Timestamp and Period are removed completely. Converters for ty...
def print_rendered_results(results_dict): class _HubComponentEncoder(json.JSONEncoder): def default(self, o): if isinstance(o, base.HubComponent): return repr(o) return json.JSONEncoder.default(self, o) formatted = json.dumps(results_dict, indent=4, cls=_HubCompon...
Pretty-prints the rendered results dictionary. Rendered results can be multiply-nested dictionaries; this uses JSON serialization to print a nice representation.
def prepare_impact_function(self): impact_function = ImpactFunction() impact_function.callback = self.progress_callback impact_function.hazard = self.parent.hazard_layer impact_function.exposure = self.parent.exposure_layer aggregation = self.parent.aggregation_layer if a...
Create analysis as a representation of current situation of IFCW.
def update(self): stats = self.get_init_value() if self.input_method == 'local': stats['cpu'] = cpu_percent.get() stats['percpu'] = cpu_percent.get(percpu=True) stats['mem'] = psutil.virtual_memory().percent stats['swap'] = psutil.swap_memory().percent ...
Update quicklook stats using the input method.
def set_ipcsem_params(self, ftok=None, persistent=None): self._set('ftok', ftok) self._set('persistent-ipcsem', persistent, cast=bool) return self._section
Sets ipcsem lock engine params. :param str|unicode ftok: Set the ipcsem key via ftok() for avoiding duplicates. :param bool persistent: Do not remove ipcsem's on shutdown.
def is_broker_action_done(action, rid=None, unit=None): rdata = relation_get(rid, unit) or {} broker_rsp = rdata.get(get_broker_rsp_key()) if not broker_rsp: return False rsp = CephBrokerRsp(broker_rsp) unit_name = local_unit().partition('/')[2] key = "unit_{}_ceph_broker_action.{}".form...
Check whether broker action has completed yet. @param action: name of action to be performed @returns True if action complete otherwise False
def get_proficiencies_for_objectives(self, objective_ids): collection = JSONClientValidated('learning', collection='Proficiency', runtime=self._runtime) result = collection.find( dict({'objectiveId': str(object...
Gets a ``ProficiencyList`` relating to the given objectives. arg: objective_ids (osid.id.IdList): the objective ``Ids`` return: (osid.learning.ProficiencyList) - the returned ``Proficiency`` list raise: NullArgument - ``objective_ids`` is ``null`` raise: OperationFa...
def get_pg_core(connection_string, *, cursor_factory=None, edit_connection=None): import psycopg2 as pq from psycopg2.extras import NamedTupleCursor def opener(): cn = pq.connect(connection_string) cn.cursor_factory = cursor_factory or NamedTupleCursor if edit_connection: edit_connection(cn) ...
Creates a simple PostgreSQL core. Requires the psycopg2 library.
def getDiskFreeSpace( self, freeBytesAvailable, totalNumberOfBytes, totalNumberOfFreeBytes, dokanFileInfo, ): ret = self.operations('getDiskFreeSpace') ctypes.memmove( freeBytesAvailable, ctypes.byref(ctypes.c_longlong(ret['freeBytesAva...
Get the amount of free space on this volume. :param freeBytesAvailable: pointer for free bytes available :type freeBytesAvailable: ctypes.c_void_p :param totalNumberOfBytes: pointer for total number of bytes :type totalNumberOfBytes: ctypes.c_void_p :param totalNumberOfFreeBytes...
def async_session_handler(self, signal: str) -> None: if signal == 'data': self.async_event_handler(self.websocket.data) elif signal == 'state': if self.async_connection_status_callback: self.async_connection_status_callback( self.websocket.sta...
Signalling from websocket. data - new data available for processing. state - network state has changed.
def countExtn(fimg, extname='SCI'): closefits = False if isinstance(fimg, string_types): fimg = fits.open(fimg) closefits = True n = 0 for e in fimg: if 'extname' in e.header and e.header['extname'] == extname: n += 1 if closefits: fimg.close() return ...
Return the number of 'extname' extensions, defaulting to counting the number of SCI extensions.
def frame2string(frame): lineno = frame.f_lineno co = frame.f_code filename = co.co_filename name = co.co_name s = '\tFile "{0}", line {1}, in {2}'.format(filename, lineno, name) line = linecache.getline(filename, lineno, frame.f_globals).lstrip() return s + '\n\t\t' + line
Return info about frame. Keyword arg: frame Return string in format: File {file name}, line {line number}, in {name of parent of code object} {newline} Line from file at line number
def __init_url(self): portals_self_url = "{}/portals/self".format(self._url) params = { "f" :"json" } if not self._securityHandler is None: params['token'] = self._securityHandler.token res = self._get(url=portals_self_url, param...
loads the information into the class
def get_ignored_lines(self): ignored_lines = set() for line_number, line in enumerate(self.content.split('\n'), 1): if ( WHITELIST_REGEX['yaml'].search(line) or ( self.exclude_lines_regex and self.exclude_lines_regex.sea...
Return a set of integers that refer to line numbers that were whitelisted by the user and should be ignored. We need to parse the file separately from PyYAML parsing because the parser drops the comments (at least up to version 3.13): https://github.com/yaml/pyyaml/blob/a2d481b8dbd2b352...