positive
stringlengths
100
30.3k
anchor
stringlengths
1
15k
def normalize_topic(topic): """ Get a canonical representation of a Wikipedia topic, which may include a disambiguation string in parentheses. Returns (name, disambig), where "name" is the normalized topic name, and "disambig" is a string corresponding to the disambiguation text or None. ""...
Get a canonical representation of a Wikipedia topic, which may include a disambiguation string in parentheses. Returns (name, disambig), where "name" is the normalized topic name, and "disambig" is a string corresponding to the disambiguation text or None.
def _errcheck(result, func, arguments): """ Error checker for functions returning an integer indicating success (0) / failure (1). Raises a XdoException in case of error, otherwise just returns ``None`` (returning the original code, 0, would be useless anyways..) """ if result != 0: ...
Error checker for functions returning an integer indicating success (0) / failure (1). Raises a XdoException in case of error, otherwise just returns ``None`` (returning the original code, 0, would be useless anyways..)
def update(self, webhook_url=values.unset, friendly_name=values.unset, reachability_webhooks_enabled=values.unset, acl_enabled=values.unset): """ Update the ServiceInstance :param unicode webhook_url: A URL that will receive event updates when objects are manipulat...
Update the ServiceInstance :param unicode webhook_url: A URL that will receive event updates when objects are manipulated. :param unicode friendly_name: Human-readable name for this service instance :param bool reachability_webhooks_enabled: True or false - controls whether this instance fires ...
def init_jcrop(min_size=None): """Initialize jcrop. :param min_size: The minimal size of crop area. """ init_x = current_app.config['AVATARS_CROP_INIT_POS'][0] init_y = current_app.config['AVATARS_CROP_INIT_POS'][1] init_size = current_app.config['AVATARS_CROP_INIT_SIZE'...
Initialize jcrop. :param min_size: The minimal size of crop area.
def scalarDecorator(func): """Decorator to return scalar outputs as a set""" @wraps(func) def scalar_wrapper(*args,**kwargs): if nu.array(args[0]).shape == (): scalarOut= True newargs= () for ii in range(len(args)): newargs= newargs+(nu.array([args...
Decorator to return scalar outputs as a set
def step_impl10(context): """Create application list. :param context: test context. """ assert context.app_list and len( context.app_list) > 0, "ENSURE: app list is provided." assert context.file_list and len( context.file_list) > 0, "ENSURE: file list is provided." context.fuzz...
Create application list. :param context: test context.
def removeFixedEffect(self, index=None): """ set sample and trait designs F: NxK sample design A: LxP sample design REML: REML for this term? index: index of which fixed effect to replace. If None, remove last term. """ if self._n_terms==0: ...
set sample and trait designs F: NxK sample design A: LxP sample design REML: REML for this term? index: index of which fixed effect to replace. If None, remove last term.
def with_config_dir(self, dir_path): """ Configure current resolver to use every valid YAML configuration files available in the given directory path. To be taken into account, a configuration file must conform to the following naming convention: * 'lexicon.yml' for a global ...
Configure current resolver to use every valid YAML configuration files available in the given directory path. To be taken into account, a configuration file must conform to the following naming convention: * 'lexicon.yml' for a global Lexicon config file (see with_config_file doc) ...
def _reset(cls): """If we have forked since the watch dictionaries were initialized, all that has is garbage, so clear it.""" if os.getpid() != cls._cls_pid: cls._cls_pid = os.getpid() cls._cls_instances_by_target.clear() cls._cls_thread_by_target.clear()
If we have forked since the watch dictionaries were initialized, all that has is garbage, so clear it.
def select(i): """ Input: { dict - dict with values being dicts with 'name' as string to display and 'sort' as int (for ordering) (title) - print title (error_if_empty) - if 'yes' and Enter, make error (skip_sort) - if 'yes', do ...
Input: { dict - dict with values being dicts with 'name' as string to display and 'sort' as int (for ordering) (title) - print title (error_if_empty) - if 'yes' and Enter, make error (skip_sort) - if 'yes', do not sort array ...
def present(name, running=None, source=None, profiles=None, config=None, devices=None, architecture='x86_64', ephemeral=False, restart_on_change=False, remote_addr=None, cert=None, key=Non...
Create the named container if it does not exist name The name of the container to be created running : None * If ``True``, ensure that the container is running * If ``False``, ensure that the container is stopped * If ``None``, do nothing with regards to the running state of th...
def start(self): """ This method must be called immediately after the class is instantiated. It instantiates the serial interface and then performs auto pin discovery. It is intended for use by pymata3 applications that do not use asyncio coroutines directly. :re...
This method must be called immediately after the class is instantiated. It instantiates the serial interface and then performs auto pin discovery. It is intended for use by pymata3 applications that do not use asyncio coroutines directly. :returns: No return value.
def returnTradeHistory(self, currencyPair, start=None, end=None): """Returns the past 200 trades for a given market, or up to 50,000 trades between a range specified in UNIX timestamps by the "start" and "end" GET parameters.""" return self._public('returnTradeHistory', currencyPair=curr...
Returns the past 200 trades for a given market, or up to 50,000 trades between a range specified in UNIX timestamps by the "start" and "end" GET parameters.
def _get_app_path(url): ''' Extract the app path from a Bokeh server URL Args: url (str) : Returns: str ''' app_path = urlparse(url).path.rstrip("/") if not app_path.startswith("/"): app_path = "/" + app_path return app_path
Extract the app path from a Bokeh server URL Args: url (str) : Returns: str
def fileRefDiscovery(self): ''' Finds the missing components for file nodes by parsing the Doxygen xml (which is just the ``doxygen_output_dir/node.refid``). Additional items parsed include adding items whose ``refid`` tag are used in this file, the <programlisting> for the file...
Finds the missing components for file nodes by parsing the Doxygen xml (which is just the ``doxygen_output_dir/node.refid``). Additional items parsed include adding items whose ``refid`` tag are used in this file, the <programlisting> for the file, what it includes and what includes it, as well...
def _json_safe_dump(self, data): """ Make a json dump of `data`, that can be used directly in a `<script>` tag. Available as json() inside templates """ return json.dumps(data).replace(u'<', u'\\u003c') \ .replace(u'>', u'\\u003e') \ .replace(u'&', u'\\u0026') \ .repl...
Make a json dump of `data`, that can be used directly in a `<script>` tag. Available as json() inside templates
def delete(self, *names): """ Remove the key from redis :param names: tuple of strings - The keys to remove from redis. :return: Future() """ names = [self.redis_key(n) for n in names] with self.pipe as pipe: return pipe.delete(*names)
Remove the key from redis :param names: tuple of strings - The keys to remove from redis. :return: Future()
def _finalize_batch(self): """ Method to finalize the batch, this will iterate over the _batches dict and create a PmtInf node for each batch. The correct information (from the batch_key and batch_totals) will be inserted and the batch transaction nodes will be folded. Finally, t...
Method to finalize the batch, this will iterate over the _batches dict and create a PmtInf node for each batch. The correct information (from the batch_key and batch_totals) will be inserted and the batch transaction nodes will be folded. Finally, the batches will be added to the main XM...
def manifest(self, entry): """Returns manifest as a list. :param entry: :class:`jicimagelib.image.FileBackend.Entry` :returns: list """ entries = [] for fname in self._sorted_nicely(os.listdir(entry.directory)): if fname == 'manifest.json': ...
Returns manifest as a list. :param entry: :class:`jicimagelib.image.FileBackend.Entry` :returns: list
async def peer_delete(self, *, dc=None, address): """Remove the server with given address from the Raft configuration Parameters: dc (str): Specify datacenter that will be used. Defaults to the agent's local datacenter. address (str): "IP:port" of the serve...
Remove the server with given address from the Raft configuration Parameters: dc (str): Specify datacenter that will be used. Defaults to the agent's local datacenter. address (str): "IP:port" of the server to remove. Returns: bool: ``True`` on s...
def to_dict(self): """Returns attributes formatted as a dictionary.""" d = {'id': self.id, 'classes': self.classes} d.update(self.kvs) return d
Returns attributes formatted as a dictionary.
def degrees_of_freedom(self): """ Returns the number of degrees of freedom. """ if len(self._set_xdata)==0 or len(self._set_ydata)==0: return None # Temporary hack: get the studentized residuals, which uses the massaged data # This should later be changed to get_...
Returns the number of degrees of freedom.
def to_satoshis(input_quantity, input_type): ''' convert to satoshis, no rounding ''' assert input_type in UNIT_CHOICES, input_type # convert to satoshis if input_type in ('btc', 'mbtc', 'bit'): satoshis = float(input_quantity) * float(UNIT_MAPPINGS[input_type]['satoshis_per']) elif input_t...
convert to satoshis, no rounding
def raw(self): """Return raw key. returns: str: raw key """ if self._raw: return text_type(self._raw).strip("\r\n") else: return text_type(base64decode(self._b64encoded)).strip("\r\n")
Return raw key. returns: str: raw key
def check(path_dir, requirements_name='requirements.txt'): '''Look for unused packages listed on project requirements''' requirements = _load_requirements(requirements_name, path_dir) imported_modules = _iter_modules(path_dir) installed_packages = _list_installed_packages() imported_modules.update(...
Look for unused packages listed on project requirements
def _delete_redundancy_routers(self, context, router_db): """To be called in delete_router() BEFORE router has been deleted in DB. The router should have not interfaces. """ e_context = context.elevated() for binding in router_db.redundancy_bindings: self.delete_route...
To be called in delete_router() BEFORE router has been deleted in DB. The router should have not interfaces.
def infoObject(object, cat, format, *args): """ Log an informational message in the given category. """ doLog(INFO, object, cat, format, args)
Log an informational message in the given category.
def _standalone_init(self, spark_master_address, pre_20_mode, requests_config, tags): """ Return a dictionary of {app_id: (app_name, tracking_url)} for the running Spark applications """ metrics_json = self._rest_request_to_json( spark_master_address, SPARK_MASTER_STATE_PATH,...
Return a dictionary of {app_id: (app_name, tracking_url)} for the running Spark applications
def get_interface_detail_output_interface_configured_line_speed(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") get_interface_detail = ET.Element("get_interface_detail") config = get_interface_detail output = ET.SubElement(get_interface_detail, "...
Auto Generated Code
def parseFullScan(self, i, modifications=True): """ parses scan info for giving a Spectrum Obj for plotting. takes significantly longer since it has to unzip/parse xml """ scanObj = PeptideObject() peptide = str(i[1]) pid=i[2] if modifications: sql = '...
parses scan info for giving a Spectrum Obj for plotting. takes significantly longer since it has to unzip/parse xml
def natsort_key(val, key, string_func, bytes_func, num_func): """ Key to sort strings and numbers naturally. It works by splitting the string into components of strings and numbers, and then converting the numbers into actual ints or floats. Parameters ---------- val : str | unicode | byte...
Key to sort strings and numbers naturally. It works by splitting the string into components of strings and numbers, and then converting the numbers into actual ints or floats. Parameters ---------- val : str | unicode | bytes | int | float | iterable key : callable | None A key to appl...
def _set_auth_type(self, v, load=False): """ Setter method for auth_type, mapped from YANG variable /routing_system/interface/ve/ipv6/ipv6_vrrp_extended/auth_type (container) If this variable is read-only (config: false) in the source YANG file, then _set_auth_type is considered as a private method....
Setter method for auth_type, mapped from YANG variable /routing_system/interface/ve/ipv6/ipv6_vrrp_extended/auth_type (container) If this variable is read-only (config: false) in the source YANG file, then _set_auth_type is considered as a private method. Backends looking to populate this variable should ...
def sqlwhere(criteria=None): """Generates SQL where clause. Returns (sql, values). Criteria is a dictionary of {field: value}. >>> sqlwhere() ('', []) >>> sqlwhere({'id': 5}) ('id=%s', [5]) >>> sqlwhere({'id': 3, 'name': 'toto'}) ('id=%s and name=%s', [3, 'toto']) >>> sqlwhere({'id'...
Generates SQL where clause. Returns (sql, values). Criteria is a dictionary of {field: value}. >>> sqlwhere() ('', []) >>> sqlwhere({'id': 5}) ('id=%s', [5]) >>> sqlwhere({'id': 3, 'name': 'toto'}) ('id=%s and name=%s', [3, 'toto']) >>> sqlwhere({'id': 3, 'name': 'toto', 'createdon': '2...
def auto_populate(mode='all'): """ Overrides translation fields population mode (population mode decides which unprovided translations will be filled during model construction / loading). Example: with auto_populate('all'): s = Slugged.objects.create(title='foo') s.title_en...
Overrides translation fields population mode (population mode decides which unprovided translations will be filled during model construction / loading). Example: with auto_populate('all'): s = Slugged.objects.create(title='foo') s.title_en == 'foo' // True s.title_de == 'fo...
def submodel_has_python_callbacks(models): ''' Traverses submodels to check for Python (event) callbacks ''' has_python_callback = False for model in collect_models(models): if len(model._callbacks) > 0 or len(model._event_callbacks) > 0: has_python_callback = True break...
Traverses submodels to check for Python (event) callbacks
def request(self, method, uri, params=None, data=None, headers=None, auth=None, timeout=None, allow_redirects=False): """ Make an HTTP request. """ url = self.relative_uri(uri) return self.domain.request( method, url, params=par...
Make an HTTP request.
def fetch_and_parse(url, bodyLines): """Takes a url, and returns a dictionary of data with 'bodyLines' lines""" pageHtml = fetch_page(url) return parse(url, pageHtml, bodyLines)
Takes a url, and returns a dictionary of data with 'bodyLines' lines
def is_public(self): """Return True iff this function should be considered public.""" if self.dunder_all is not None: return self.name in self.dunder_all else: return not self.name.startswith('_')
Return True iff this function should be considered public.
def _anime_add(self, data): """ Adds an anime to a user's list. :param data: A :class:`Pymoe.Mal.Objects.Anime` object with the anime data :raises: SyntaxError on invalid data type :raises: ServerError on failure to add :rtype: Bool :return: True on success ...
Adds an anime to a user's list. :param data: A :class:`Pymoe.Mal.Objects.Anime` object with the anime data :raises: SyntaxError on invalid data type :raises: ServerError on failure to add :rtype: Bool :return: True on success
def _keepVol(self, vol): """ Mark this volume to be kept in path. """ if vol is None: return if vol in self.extraVolumes: del self.extraVolumes[vol] return if vol not in self.paths: raise Exception("%s not in %s" % (vol, self)) p...
Mark this volume to be kept in path.
def _set_dscp_to_cos_mapping(self, v, load=False): """ Setter method for dscp_to_cos_mapping, mapped from YANG variable /qos/map/dscp_cos/dscp_to_cos_mapping (list) If this variable is read-only (config: false) in the source YANG file, then _set_dscp_to_cos_mapping is considered as a private method....
Setter method for dscp_to_cos_mapping, mapped from YANG variable /qos/map/dscp_cos/dscp_to_cos_mapping (list) If this variable is read-only (config: false) in the source YANG file, then _set_dscp_to_cos_mapping is considered as a private method. Backends looking to populate this variable should do so vi...
def write_shortstr(self, s): """Write a string up to 255 bytes long (after any encoding). If passed a unicode string, encode with UTF-8. """ self._flushbits() if isinstance(s, string): s = s.encode('utf-8') if len(s) > 255: raise FrameSyntaxError...
Write a string up to 255 bytes long (after any encoding). If passed a unicode string, encode with UTF-8.
def colorize_invoke_command(self, string): """ Apply various heuristics to return a colorized version the invoke command string. If these fail, simply return the string in plaintext. Inspired by colorize_log_entry(). """ final_string = string try: ...
Apply various heuristics to return a colorized version the invoke command string. If these fail, simply return the string in plaintext. Inspired by colorize_log_entry().
def _onFunction(self, name, line, pos, absPosition, keywordLine, keywordPos, colonLine, colonPos, level, isAsync, returnAnnotation): """Memorizes a function""" self.__flushLevel(level) f = Function(name, line, pos, absPosition, keywordL...
Memorizes a function
def _compute_mean(self, C, rup, dists, sites, imt): """ Compute mean value for PGA and pseudo-velocity response spectrum, as given in equation 1. Converts also pseudo-velocity response spectrum values to SA, using: SA = (PSV * W)/ratio(SA_larger/SA_geo_mean) W = (2 ...
Compute mean value for PGA and pseudo-velocity response spectrum, as given in equation 1. Converts also pseudo-velocity response spectrum values to SA, using: SA = (PSV * W)/ratio(SA_larger/SA_geo_mean) W = (2 * pi / T) T = period (sec)
def _eq__(self, other): """ Compare the current place object to another passed to the comparison method. The two place objects must have the same identification, even if some of their attributes might be different. @param other: a ``Place`` instance to compare with the current ...
Compare the current place object to another passed to the comparison method. The two place objects must have the same identification, even if some of their attributes might be different. @param other: a ``Place`` instance to compare with the current place object. @return: ...
def guess_depth_cutoff(cb_histogram): ''' Guesses at an appropriate barcode cutoff ''' with read_cbhistogram(cb_histogram) as fh: cb_vals = [int(p.strip().split()[1]) for p in fh] histo = np.histogram(np.log10(cb_vals), bins=50) vals = histo[0] edges = histo[1] mids = np.array([(edge...
Guesses at an appropriate barcode cutoff
def _ExpandArtifactFilesSource(self, source, requested): """Recursively expands an artifact files source.""" expanded_source = rdf_artifacts.ExpandedSource(base_source=source) sub_sources = [] artifact_list = [] if "artifact_list" in source.attributes: artifact_list = source.attributes["artifa...
Recursively expands an artifact files source.
def copy(self, sql, data, **kwargs): """ EXAMPLE: >> with open("/tmp/file.csv", "rb") as fs: >> cursor.copy("COPY table(field1,field2) FROM STDIN DELIMITER ',' ENCLOSED BY ''''", >> fs, buffer_size=65536) """ sql = as_text(sql) ...
EXAMPLE: >> with open("/tmp/file.csv", "rb") as fs: >> cursor.copy("COPY table(field1,field2) FROM STDIN DELIMITER ',' ENCLOSED BY ''''", >> fs, buffer_size=65536)
def free_numeric(self): """Free numeric data""" if self._numeric is not None: self.funs.free_numeric(self._numeric) self._numeric = None self.free_symbolic()
Free numeric data
def __modify(self, withdrawal_id, **kwargs): """Call documentation: `/withdrawal/modify <https://www.wepay.com/developer/reference/withdrawal#modify>`_, plus extra keyword parameters: :keyword str access_token: will be used instead of instance's ``access_token``, with...
Call documentation: `/withdrawal/modify <https://www.wepay.com/developer/reference/withdrawal#modify>`_, plus extra keyword parameters: :keyword str access_token: will be used instead of instance's ``access_token``, with ``batch_mode=True`` will set `authorization` ...
def requirements(requirements_file): """Return packages mentioned in the given file. Args: requirements_file (str): path to the requirements file to be parsed. Returns: (list): 3rd-party package dependencies contained in the file. """ return [ str(pkg.req) for pkg in parse_...
Return packages mentioned in the given file. Args: requirements_file (str): path to the requirements file to be parsed. Returns: (list): 3rd-party package dependencies contained in the file.
def all_selected_options(self): """Returns a list of all selected options belonging to this select tag""" ret = [] for opt in self.options: if opt.is_selected(): ret.append(opt) return ret
Returns a list of all selected options belonging to this select tag
def from_eocube(eocube, ji): """Create a EOCubeChunk object from an EOCube object.""" eocubewin = EOCubeChunk(ji, eocube.df_layers, eocube.chunksize, eocube.wdir) return eocubewin
Create a EOCubeChunk object from an EOCube object.
def render(engine, format, filepath, renderer=None, formatter=None, quiet=False): """Render file with Graphviz ``engine`` into ``format``, return result filename. Args: engine: The layout commmand used for rendering (``'dot'``, ``'neato'``, ...). format: The output format used for rendering (`...
Render file with Graphviz ``engine`` into ``format``, return result filename. Args: engine: The layout commmand used for rendering (``'dot'``, ``'neato'``, ...). format: The output format used for rendering (``'pdf'``, ``'png'``, ...). filepath: Path to the DOT source file to render. ...
def report(rel): """Fires if the machine is running Fedora.""" if "Fedora" in rel.product: return make_pass("IS_FEDORA", product=rel.product) else: return make_fail("IS_NOT_FEDORA", product=rel.product)
Fires if the machine is running Fedora.
def get_compression_extension(self): """ Find the filename extension for the 'docker save' output, which may or may not be compressed. Raises OsbsValidationException if the extension cannot be determined due to a configuration error. :returns: str including leading dot,...
Find the filename extension for the 'docker save' output, which may or may not be compressed. Raises OsbsValidationException if the extension cannot be determined due to a configuration error. :returns: str including leading dot, or else None if no compression
def ed25519_generate_key_pair_from_secret(secret): """ Generate a new key pair. Args: secret (:class:`string`): A secret that serves as a seed Returns: A tuple of (private_key, public_key) encoded in base58. """ # if you want to do this correctly, use a key derivation function! ...
Generate a new key pair. Args: secret (:class:`string`): A secret that serves as a seed Returns: A tuple of (private_key, public_key) encoded in base58.
def find_mrms_tracks(self): """ Identify objects from MRMS timesteps and link them together with object matching. Returns: List of STObjects containing MESH track information. """ obs_objects = [] tracked_obs_objects = [] if self.mrms_ew is not None: ...
Identify objects from MRMS timesteps and link them together with object matching. Returns: List of STObjects containing MESH track information.
def update_function_configuration(self, vpc_config): """Update existing Lambda function configuration. Args: vpc_config (dict): Dictionary of SubnetIds and SecurityGroupsIds for using a VPC in lambda """ LOG.info('Updating configuration for lam...
Update existing Lambda function configuration. Args: vpc_config (dict): Dictionary of SubnetIds and SecurityGroupsIds for using a VPC in lambda
def run_gblocks(align_fasta_file, **kwargs): """ remove poorly aligned positions and divergent regions with Gblocks """ cl = GblocksCommandline(aln_file=align_fasta_file, **kwargs) r, e = cl.run() print("Gblocks:", cl, file=sys.stderr) if e: print("***Gblocks could not run", file=s...
remove poorly aligned positions and divergent regions with Gblocks
def kill_mprocess(process): """kill process Args: process - Popen object for process """ if process and proc_alive(process): process.terminate() process.communicate() return not proc_alive(process)
kill process Args: process - Popen object for process
def createLoanOffer(self, currency, amount, duration, autoRenew, lendingRate): """Creates a loan offer for a given currency. Required POST parameters are "currency", "amount", "duration", "autoRenew" (0 or 1), and "lendingRate". """ return self._private('createLoa...
Creates a loan offer for a given currency. Required POST parameters are "currency", "amount", "duration", "autoRenew" (0 or 1), and "lendingRate".
def addProxyObject(self, obj, proxied): """ Stores a reference to the unproxied and proxied versions of C{obj} for later retrieval. @since: 0.6 """ self.proxied_objects[id(obj)] = proxied self.proxied_objects[id(proxied)] = obj
Stores a reference to the unproxied and proxied versions of C{obj} for later retrieval. @since: 0.6
def to_repr(value, ctx): """ Converts a value back to its representation form, e.g. x -> "x" """ as_string = to_string(value, ctx) if isinstance(value, str) or isinstance(value, datetime.date) or isinstance(value, datetime.time): as_string = as_string.replace('"', '""') # escape quotes by ...
Converts a value back to its representation form, e.g. x -> "x"
def bin_matrix(M, subsampling_factor=3): """Bin either sparse or dense matrices. """ try: from scipy.sparse import issparse if issparse(M): return bin_sparse(M, subsampling_factor=subsampling_factor) else: raise ImportError except ImportError: ret...
Bin either sparse or dense matrices.
def get_overlaps(self, offset, length): """Returns chunks overlapped with the given range. Args: offset (int): Begin offset of the range. length (int): Length of the range. Returns: Overlapped chunks. (:obj:`budou.chunk.ChunkList`) """ # In case entity's offset points to a space ...
Returns chunks overlapped with the given range. Args: offset (int): Begin offset of the range. length (int): Length of the range. Returns: Overlapped chunks. (:obj:`budou.chunk.ChunkList`)
def get_devices(self): """ Helper that retuns a dict of devices for this server. :return: Returns a tuple of two elements: - dict<tango class name : list of device names> - dict<device names : tango class name> :rtype: tuple<dict, dict> ""...
Helper that retuns a dict of devices for this server. :return: Returns a tuple of two elements: - dict<tango class name : list of device names> - dict<device names : tango class name> :rtype: tuple<dict, dict>
def init(opts): ''' This function gets called when the proxy starts up. ''' if 'host' not in opts['proxy']: log.critical('No \'host\' key found in pillar for this proxy.') return False if 'username' not in opts['proxy']: log.critical('No \'username\' key found in pillar for t...
This function gets called when the proxy starts up.
def nodeDumpOutput(self, buf, cur, level, format, encoding): """Dump an XML node, recursive behaviour, children are printed too. Note that @format = 1 provide node indenting only if xmlIndentTreeOutput = 1 or xmlKeepBlanksDefault(0) was called """ if buf is None: buf__o = ...
Dump an XML node, recursive behaviour, children are printed too. Note that @format = 1 provide node indenting only if xmlIndentTreeOutput = 1 or xmlKeepBlanksDefault(0) was called
def add_application(self, application, sync=True): """ add an application to this OS instance. :param application: the application to add on this OS instance :param sync: If sync=True(default) synchronize with Ariane server. If sync=False, add the application object on list to be...
add an application to this OS instance. :param application: the application to add on this OS instance :param sync: If sync=True(default) synchronize with Ariane server. If sync=False, add the application object on list to be added on next save(). :return:
def requires_user(fn): """ Requires that the calling Subject be *either* authenticated *or* remembered via RememberMe services before allowing access. This method essentially ensures that subject.identifiers IS NOT None :raises UnauthenticatedException: indicating that the deco...
Requires that the calling Subject be *either* authenticated *or* remembered via RememberMe services before allowing access. This method essentially ensures that subject.identifiers IS NOT None :raises UnauthenticatedException: indicating that the decorated method is ...
def create(cls, *props, **kwargs): """ Call to CREATE with parameters map. A new instance will be created and saved. :param props: dict of properties to create the nodes. :type props: tuple :param lazy: False by default, specify True to get nodes with id only without the paramet...
Call to CREATE with parameters map. A new instance will be created and saved. :param props: dict of properties to create the nodes. :type props: tuple :param lazy: False by default, specify True to get nodes with id only without the parameters. :type: bool :rtype: list
def IsPathSuffix(mod_path, path): """Checks whether path is a full path suffix of mod_path. Args: mod_path: Must be an absolute path to a source file. Must not have file extension. path: A relative path. Must not have file extension. Returns: True if path is a full path suffix of mod_p...
Checks whether path is a full path suffix of mod_path. Args: mod_path: Must be an absolute path to a source file. Must not have file extension. path: A relative path. Must not have file extension. Returns: True if path is a full path suffix of mod_path. False otherwise.
def print_tools(self, pattern=None, buf=sys.stdout): """Print a list of visible tools. Args: pattern (str): Only list tools that match this glob pattern. """ seen = set() rows = [] context = self.context if context: data = context.get_too...
Print a list of visible tools. Args: pattern (str): Only list tools that match this glob pattern.
def transform_data_fasttext(data, vocab, idx_to_counts, cbow, ngram_buckets, ngrams, batch_size, window_size, frequent_token_subsampling=1E-4, dtype='float32', index_dtype='int64'): """Transform a DataStream of coded DataSets to a D...
Transform a DataStream of coded DataSets to a DataStream of batches. Parameters ---------- data : gluonnlp.data.DataStream DataStream where each sample is a valid input to gluonnlp.data.EmbeddingCenterContextBatchify. vocab : gluonnlp.Vocab Vocabulary containing all tokens whose...
def create(self, target, configuration_url=values.unset, configuration_method=values.unset, configuration_filters=values.unset, configuration_triggers=values.unset, configuration_flow_sid=values.unset, configuration_retry_count=values.unset, ...
Create a new WebhookInstance :param WebhookInstance.Target target: The target of this webhook. :param unicode configuration_url: The absolute url the webhook request should be sent to. :param WebhookInstance.Method configuration_method: The HTTP method to be used when sending a webhook request....
def taskrouter(self): """ Access the Taskrouter Twilio Domain :returns: Taskrouter Twilio Domain :rtype: twilio.rest.taskrouter.Taskrouter """ if self._taskrouter is None: from twilio.rest.taskrouter import Taskrouter self._taskrouter = Taskrouter...
Access the Taskrouter Twilio Domain :returns: Taskrouter Twilio Domain :rtype: twilio.rest.taskrouter.Taskrouter
def create_page_move(self, page_move_parameters, project, wiki_identifier, comment=None): """CreatePageMove. Creates a page move operation that updates the path and order of the page as provided in the parameters. :param :class:`<WikiPageMoveParameters> <azure.devops.v5_0.wiki.models.WikiPageMov...
CreatePageMove. Creates a page move operation that updates the path and order of the page as provided in the parameters. :param :class:`<WikiPageMoveParameters> <azure.devops.v5_0.wiki.models.WikiPageMoveParameters>` page_move_parameters: Page more operation parameters. :param str project: Proje...
def close(self): """ Deletes all static mask objects. """ for key in self.masklist.keys(): self.masklist[key] = None self.masklist = {}
Deletes all static mask objects.
def check_next_arg(self, atype, avalue, add=True, check_extension=True): """Argument validity checking This method is usually used by the parser to check if detected argument is allowed for this command. We make a distinction between required and optional arguments. Optional (o...
Argument validity checking This method is usually used by the parser to check if detected argument is allowed for this command. We make a distinction between required and optional arguments. Optional (or tagged) arguments can be provided unordered but not the required ones. ...
def p_ArrayLiteralContentList(p): ''' ArrayLiteralContentList : ArrayLiteralContent | ArrayLiteralContentList COMMA ArrayLiteralContent ''' if len(p) < 3: p[0] = ArrayLiteralContentList(None, p[1]) else: p[0] = ArrayLiteralContentList(p[1], p[3])
ArrayLiteralContentList : ArrayLiteralContent | ArrayLiteralContentList COMMA ArrayLiteralContent
def execute(self, logger: Logger, options: Dict[str, Dict[str, Any]]) -> T: """ Called to parse the object as described in this parsing plan, using the provided arguments for the parser. * Exceptions are caught and wrapped into ParsingException * If result does not match expected type, a...
Called to parse the object as described in this parsing plan, using the provided arguments for the parser. * Exceptions are caught and wrapped into ParsingException * If result does not match expected type, an error is thrown :param logger: the logger to use during parsing (optional: None is su...
def filter_noexpand_columns(columns): """Return columns not containing and containing the noexpand prefix. Parameters ---------- columns: sequence of str A sequence of strings to be split Returns ------- Two lists, the first containing strings without the noexpand prefix, the ...
Return columns not containing and containing the noexpand prefix. Parameters ---------- columns: sequence of str A sequence of strings to be split Returns ------- Two lists, the first containing strings without the noexpand prefix, the second containing those that do with the pre...
def _qualifiers_tomof(qualifiers, indent, maxline=MAX_MOF_LINE): """ Return a MOF string with the qualifier values, including the surrounding square brackets. The qualifiers are ordered by their name. Return empty string if no qualifiers. Normally multiline output and may fold qualifiers into mult...
Return a MOF string with the qualifier values, including the surrounding square brackets. The qualifiers are ordered by their name. Return empty string if no qualifiers. Normally multiline output and may fold qualifiers into multiple lines. The order of qualifiers is preserved. Parameters: ...
def _write_parameter_file(params): """ Write the parameter file in the format that elaxtix likes. """ # Get path path = os.path.join(get_tempdir(), 'params.txt') # Define helper function def valToStr(val): if val in [True, False]: return '"%s"' % str(val).lower() ...
Write the parameter file in the format that elaxtix likes.
def _get_rule_changes(rules, _rules): ''' given a list of desired rules (rules) and existing rules (_rules) return a list of rules to delete (to_delete) and to create (to_create) ''' to_delete = [] to_create = [] # for each rule in state file # 1. validate rule # 2. determine if rule...
given a list of desired rules (rules) and existing rules (_rules) return a list of rules to delete (to_delete) and to create (to_create)
def bprecess(ra0, dec0, mu_radec=None, parallax=None, rad_vel=None, epoch=None): """ NAME: BPRECESS PURPOSE: Precess positions from J2000.0 (FK5) to B1950.0 (FK4) EXPLANATION: Calculates the mean place of a star at B1950.0 on the FK4 system from the mean place at J...
NAME: BPRECESS PURPOSE: Precess positions from J2000.0 (FK5) to B1950.0 (FK4) EXPLANATION: Calculates the mean place of a star at B1950.0 on the FK4 system from the mean place at J2000.0 on the FK5 system. CALLING SEQUENCE: bprecess, ra, dec, ra_1950, de...
def remove(self, key): """ Removes the mapping for a key from this map if it is present. The map will not contain a mapping for the specified key once the call returns. **Warning: This method uses __hash__ and __eq__ methods of binary form of the key, not the actual implementations ...
Removes the mapping for a key from this map if it is present. The map will not contain a mapping for the specified key once the call returns. **Warning: This method uses __hash__ and __eq__ methods of binary form of the key, not the actual implementations of __hash__ and __eq__ defined in key's...
def create_dep(self, ): """Create a dep and store it in the self.dep :returns: None :rtype: None :raises: None """ name = self.name_le.text() short = self.short_le.text() assetflag = self.asset_rb.isChecked() ordervalue = self.ordervalue_sb.value(...
Create a dep and store it in the self.dep :returns: None :rtype: None :raises: None
def bbduk_trim(forward_in, forward_out, reverse_in='NA', reverse_out='NA', returncmd=False, **kwargs): """ Wrapper for using bbduk to quality trim reads. Contains arguments used in OLC Assembly Pipeline, but these can be overwritten by using keyword parameters. :param forward_in: Forward reads you want ...
Wrapper for using bbduk to quality trim reads. Contains arguments used in OLC Assembly Pipeline, but these can be overwritten by using keyword parameters. :param forward_in: Forward reads you want to quality trim. :param returncmd: If set to true, function will return the cmd string passed to subprocess as ...
def draw_chimera_embedding(G, *args, **kwargs): """Draws an embedding onto the chimera graph G, according to layout. If interaction_edges is not None, then only display the couplers in that list. If embedded_graph is not None, the only display the couplers between chains with intended couplings accord...
Draws an embedding onto the chimera graph G, according to layout. If interaction_edges is not None, then only display the couplers in that list. If embedded_graph is not None, the only display the couplers between chains with intended couplings according to embedded_graph. Parameters ---------- ...
def __add_token_annotation_tier(self, tier): """ adds a tier to the document graph, in which each event annotates exactly one token. """ for i, event in enumerate(tier.iter('event')): anno_key = '{0}:{1}'.format(self.ns, tier.attrib['category']) anno_val =...
adds a tier to the document graph, in which each event annotates exactly one token.
def current_changed(self, i): """Slot for when the current index changes. Emits the :data:`AbstractLevel.new_root` signal. :param index: the new current index :type index: int :returns: None :rtype: None :raises: None """ m = self.model() ...
Slot for when the current index changes. Emits the :data:`AbstractLevel.new_root` signal. :param index: the new current index :type index: int :returns: None :rtype: None :raises: None
def namePop(ctxt): """Pops the top element name from the name stack """ if ctxt is None: ctxt__o = None else: ctxt__o = ctxt._o ret = libxml2mod.namePop(ctxt__o) return ret
Pops the top element name from the name stack
def get_string(self, key, is_list=False, is_optional=False, is_secret=False, is_local=False, default=None, options=None): """ Get a the value corresponding to the key and ...
Get a the value corresponding to the key and converts it to `str`/`list(str)`. Args: key: the dict key. is_list: If this is one element or a list of elements. is_optional: To raise an error if key was not found. is_secret: If the key is a secret. is_l...
def get_map(name, map_type, number, reverse=False): """ Return a `BrewerMap` representation of the specified color map. Parameters ---------- name : str Name of color map. Use `print_maps` to see available color maps. map_type : {'Sequential', 'Diverging', 'Qualitative'} Select ...
Return a `BrewerMap` representation of the specified color map. Parameters ---------- name : str Name of color map. Use `print_maps` to see available color maps. map_type : {'Sequential', 'Diverging', 'Qualitative'} Select color map type. number : int Number of defined color...
def get_handler(self): """Create a fully configured CloudLoggingHandler. Returns: (obj): Instance of `google.cloud.logging.handlers. CloudLoggingHandler` """ gcl_client = gcl_logging.Client( project=self.project_id, credentials=se...
Create a fully configured CloudLoggingHandler. Returns: (obj): Instance of `google.cloud.logging.handlers. CloudLoggingHandler`
def h_all_pairs(gbm, array_or_frame, indices_or_columns = 'all'): """ PURPOSE Compute Friedman and Popescu's two-variable H statistic, in order to look for an interaction in the passed gradient- boosting model between each pair of variables represented by the elements of the passed array or frame and s...
PURPOSE Compute Friedman and Popescu's two-variable H statistic, in order to look for an interaction in the passed gradient- boosting model between each pair of variables represented by the elements of the passed array or frame and specified by the passed indices or columns. See Jerome H. Friedman and...
def away(self, msg=''): """ Sets/unsets your away status. Optional arguments: * msg='' - Away reason. """ with self.lock: self.send('AWAY :%s' % msg) if self.readable(): msg = self._recv(expected_replies=('306', '305')) ...
Sets/unsets your away status. Optional arguments: * msg='' - Away reason.