positive stringlengths 100 30.3k | anchor stringlengths 1 15k |
|---|---|
def delete(cls, label='default', path=None):
"""Delete a server configuration.
This method is thread safe.
:param label: A string. The configuration identified by ``label`` is
deleted.
:param path: A string. The configuration file to be manipulated.
Defaults to ... | Delete a server configuration.
This method is thread safe.
:param label: A string. The configuration identified by ``label`` is
deleted.
:param path: A string. The configuration file to be manipulated.
Defaults to what is returned by
:func:`nailgun.config._g... |
def main(argv=None): # IGNORE:C0111
''' Main Entry '''
by = None
reqres = check_requirements()
if reqres == CheckResult.Error:
perr("Requirement checking failed")
sys.exit(const.EFatal)
try:
result = const.ENoError
if argv is None:
argv = sys.argv
else:
sys.argv.extend(argv)
setuphandlers()
... | Main Entry |
def autodetect(self):
"""
Try to guess the device_type using SNMP GET based on the SNMP_MAPPER dict. The type which
is returned is directly matching the name in *netmiko.ssh_dispatcher.CLASS_MAPPER_BASE*
dict.
Thus you can use this name to retrieve automatically the right Connec... | Try to guess the device_type using SNMP GET based on the SNMP_MAPPER dict. The type which
is returned is directly matching the name in *netmiko.ssh_dispatcher.CLASS_MAPPER_BASE*
dict.
Thus you can use this name to retrieve automatically the right ConnectionClass
Returns
-------... |
def find_channel_groups(chan):
"""Channels are often organized in groups (different grids / strips or
channels in different brain locations), so we use a simple heuristic to
get these channel groups.
Parameters
----------
chan : instance of Channels
channels to group
Returns
--... | Channels are often organized in groups (different grids / strips or
channels in different brain locations), so we use a simple heuristic to
get these channel groups.
Parameters
----------
chan : instance of Channels
channels to group
Returns
-------
groups : dict
channe... |
def draw(self, can=None):
"Draw the charts."
if can == None:
can = canvas.default_canvas()
assert self.check_integrity()
for plot in self.__plots:
plot.check_integrity()
self.x_range, self.x_grid_interval = \
self.__get_data_range(self.x_ra... | Draw the charts. |
def _attend_process(self, proc, sleeptime):
"""
Waits on a process for a given time to see if it finishes, returns True
if it's still running after the given time or False as soon as it
returns.
:param psutil.Popen proc: Process object opened by psutil.Popen()
:param fl... | Waits on a process for a given time to see if it finishes, returns True
if it's still running after the given time or False as soon as it
returns.
:param psutil.Popen proc: Process object opened by psutil.Popen()
:param float sleeptime: Time to wait
:return bool: True if proces... |
def energy(mesh, uniform_density=False):
"""The mesh energy is defined as
E = int_Omega |u_l(x) - u(x)| rho(x) dx
where u(x) = ||x||^2 and u_l is its piecewise linearization on the mesh.
"""
# E = 1/(d+1) sum_i ||x_i||^2 |omega_i| - int_Omega_i ||x||^2
dim = mesh.cells["nodes"].shape[1] - 1
... | The mesh energy is defined as
E = int_Omega |u_l(x) - u(x)| rho(x) dx
where u(x) = ||x||^2 and u_l is its piecewise linearization on the mesh. |
def lon(self):
"""Longitude of grid centers (degrees)
:getter: Returns the points of axis ``'lon'`` if availible in the
process's domains.
:type: array
:raises: :exc:`ValueError`
if no ``'lon'`` axis can be found.
"""
try:... | Longitude of grid centers (degrees)
:getter: Returns the points of axis ``'lon'`` if availible in the
process's domains.
:type: array
:raises: :exc:`ValueError`
if no ``'lon'`` axis can be found. |
def get_logger(cls):
"""
Initializes and returns our logger instance.
"""
if cls.logger is None:
cls.logger = logging.getLogger("django_auth_ldap")
cls.logger.addHandler(logging.NullHandler())
return cls.logger | Initializes and returns our logger instance. |
def string_to_macaddr(value, strict=False):
"""
Return a tuple corresponding to the string representation of a Media
Access Control address (MAC address) of a device, which is a unique
identifier assigned to a network interface controller (NIC) for
communications at the data link layer of a network ... | Return a tuple corresponding to the string representation of a Media
Access Control address (MAC address) of a device, which is a unique
identifier assigned to a network interface controller (NIC) for
communications at the data link layer of a network segment.
The standard (IEEE 802) format for printin... |
def parse_cstring(stream, offset):
"""
parse_cstring will parse a null-terminated string in a bytestream.
The string will be decoded with UTF-8 decoder, of course since we are
doing this byte-a-byte, it won't really work for all Unicode strings.
TODO: add proper Unicode support
... | parse_cstring will parse a null-terminated string in a bytestream.
The string will be decoded with UTF-8 decoder, of course since we are
doing this byte-a-byte, it won't really work for all Unicode strings.
TODO: add proper Unicode support |
def match(select, tag, namespaces=None, flags=0, **kwargs):
"""Match node."""
return compile(select, namespaces, flags, **kwargs).match(tag) | Match node. |
def cross_join(*tables, **kwargs):
"""
Perform a cross join (cartesian product) amongst a list of tables, with
optional set of prefixes to apply to overlapping column names
Parameters
----------
tables : ibis.expr.types.TableExpr
Returns
-------
joined : TableExpr
Examples
... | Perform a cross join (cartesian product) amongst a list of tables, with
optional set of prefixes to apply to overlapping column names
Parameters
----------
tables : ibis.expr.types.TableExpr
Returns
-------
joined : TableExpr
Examples
--------
>>> import ibis
>>> schemas =... |
def _progenitor_setup(self,progenitor,leading,useTMHessian):
"""The part of the setup relating to the progenitor's orbit"""
#Progenitor orbit: Calculate actions, frequencies, and angles for the progenitor
self._progenitor= progenitor() #call to get new Orbit
# Make sure we do not use phy... | The part of the setup relating to the progenitor's orbit |
def validate(self, value):
"""Validate the length of a list.
:param value: List of values.
:raises: :class:`halogen.exception.ValidationError` exception when length of the list is less than
minimum or greater than maximum.
"""
try:
length = len(value)
... | Validate the length of a list.
:param value: List of values.
:raises: :class:`halogen.exception.ValidationError` exception when length of the list is less than
minimum or greater than maximum. |
def is_valid_url(url):
"""Checks if a given string is an url"""
from .misc import to_text
if not url:
return url
pieces = urllib_parse.urlparse(to_text(url))
return all([pieces.scheme, pieces.netloc]) | Checks if a given string is an url |
def idle_task(self):
'''called on idle'''
if self.module('console') is not None and not self.menu_added_console:
self.menu_added_console = True
self.module('console').add_menu(self.menu) | called on idle |
def main():
"""Read GeoTiff raster data and perform log transformation.
"""
input_tif = "../tests/data/Jamaica_dem.tif"
output_tif = "../tests/data/tmp_results/log_dem.tif"
rst = RasterUtilClass.read_raster(input_tif)
# raster data (with noDataValue as numpy.nan) as numpy array
rst_valid = r... | Read GeoTiff raster data and perform log transformation. |
def _setup_logger(self, logging_level: int, log_to_console: bool):
"""Sets up the internal logger
Args:
logging_level: what logging level to use
log_to_console: whether or not to log to the console
"""
self.logger = logging.getLogger('discord')
self.logge... | Sets up the internal logger
Args:
logging_level: what logging level to use
log_to_console: whether or not to log to the console |
def decr(self, member, by=1):
""" Decrements @member by @by within the sorted set """
return self._client.zincrby(
self.key_prefix, self._dumps(member), by * -1) | Decrements @member by @by within the sorted set |
def send(self, event_data):
"""
Sends an event data and blocks until acknowledgement is
received or operation times out.
:param event_data: The event to be sent.
:type event_data: ~azure.eventhub.common.EventData
:raises: ~azure.eventhub.common.EventHubError if the messa... | Sends an event data and blocks until acknowledgement is
received or operation times out.
:param event_data: The event to be sent.
:type event_data: ~azure.eventhub.common.EventData
:raises: ~azure.eventhub.common.EventHubError if the message fails to
send.
:return: The ... |
def create_space(self, space_name, add_users=True):
"""
Create a new space with the given name in the current target
organization.
"""
body = {
'name': space_name,
'organization_guid': self.api.config.get_organization_guid()
}
# MAINT: may... | Create a new space with the given name in the current target
organization. |
def _pos(self, k):
"""
Description:
Position k breaking
Parameters:
k: position k is used for the breaking
"""
if k < 2:
raise ValueError("k smaller than 2")
G = np.zeros((self.m, self.m))
for i in range(self.m):
... | Description:
Position k breaking
Parameters:
k: position k is used for the breaking |
def _create_port_profile(self, handle, profile_name, vlan_id,
vnic_type, ucsm_ip, trunk_vlans, qos_policy):
"""Creates a Port Profile on the UCS Manager.
Significant parameters set in the port profile are:
1. Port profile name - Should match what was set in vif_deta... | Creates a Port Profile on the UCS Manager.
Significant parameters set in the port profile are:
1. Port profile name - Should match what was set in vif_details
2. High performance mode - For VM-FEX to be enabled/configured on
the port using this port profile, this mode should be enabled.... |
def doc_paragraph(s, indent=0):
'''Takes in a string without wrapping corresponding to a paragraph,
and returns a version of that string wrapped to be at most 80
characters in length on each line.
If indent is given, ensures each line is indented to that number
of spaces.
'''
ret... | Takes in a string without wrapping corresponding to a paragraph,
and returns a version of that string wrapped to be at most 80
characters in length on each line.
If indent is given, ensures each line is indented to that number
of spaces. |
def appendleft(self, value):
"""Add *value* to the left side of the collection."""
def appendleft_trans(pipe):
self._appendleft_helper(value, pipe)
self._transaction(appendleft_trans) | Add *value* to the left side of the collection. |
def _verb(self, verb):
""" Posts minimal activity with verb and bare self object.
:param verb: verb to be used.
"""
activity = {
"verb": verb,
"object": {
"id": self.id,
"objectType": self.object_type,
}
}
... | Posts minimal activity with verb and bare self object.
:param verb: verb to be used. |
def remove_from_tor(self, protocol):
'''
Returns a Deferred which fires with None
'''
r = yield protocol.queue_command('DEL_ONION %s' % self.hostname[:-6])
if r.strip() != 'OK':
raise RuntimeError('Failed to remove hidden service: "%s".' % r) | Returns a Deferred which fires with None |
def get_model(self):
"""
Get a model if the formula was previously satisfied.
"""
if self.lingeling and self.status == True:
model = pysolvers.lingeling_model(self.lingeling)
return model if model != None else [] | Get a model if the formula was previously satisfied. |
def transform(self, X, y=None, **params):
"""
Transforms *X* from phase-space to Fourier-space, returning the design
matrix produced by :func:`Fourier.design_matrix` for input to a
regressor.
**Parameters**
X : array-like, shape = [n_samples, 1]
Column vecto... | Transforms *X* from phase-space to Fourier-space, returning the design
matrix produced by :func:`Fourier.design_matrix` for input to a
regressor.
**Parameters**
X : array-like, shape = [n_samples, 1]
Column vector of phases.
y : None, optional
Unused arg... |
def execute_nb(fname, metadata=None, save=True, show_doc_only=False):
"Execute notebook `fname` with `metadata` for preprocessing."
# Any module used in the notebook that isn't inside must be in the same directory as this script
with open(fname) as f: nb = nbformat.read(f, as_version=4)
ep_class = Execu... | Execute notebook `fname` with `metadata` for preprocessing. |
def nextSolarReturn(date, lon):
""" Returns the next date when sun is at longitude 'lon'. """
jd = eph.nextSolarReturn(date.jd, lon)
return Datetime.fromJD(jd, date.utcoffset) | Returns the next date when sun is at longitude 'lon'. |
def basic_stats(self):
"""Return a markdown representation of simple statistics."""
comment_score = sum(comment.score for comment in self.comments)
if self.comments:
comment_duration = (self.comments[-1].created_utc -
self.comments[0].created_utc)
... | Return a markdown representation of simple statistics. |
def _get_label_or_level_values(self, key, axis=0):
"""
Return a 1-D array of values associated with `key`, a label or level
from the given `axis`.
Retrieval logic:
- (axis=0): Return column values if `key` matches a column label.
Otherwise return index level values... | Return a 1-D array of values associated with `key`, a label or level
from the given `axis`.
Retrieval logic:
- (axis=0): Return column values if `key` matches a column label.
Otherwise return index level values if `key` matches an index
level.
- (axis=1): Ret... |
def GetPresetsByOperatingSystem(self, operating_system):
"""Retrieves preset definitions for a specific operating system.
Args:
operating_system (OperatingSystemArtifact): an operating system artifact
attribute container.
Returns:
list[PresetDefinition]: preset definition that corres... | Retrieves preset definitions for a specific operating system.
Args:
operating_system (OperatingSystemArtifact): an operating system artifact
attribute container.
Returns:
list[PresetDefinition]: preset definition that correspond with the
operating system. |
def poll_queue(job_id, pid, poll_time):
"""
Check the queue of executing/submitted jobs and exit when there is
a free slot.
"""
if config.distribution.serialize_jobs:
first_time = True
while True:
jobs = logs.dbcmd(GET_JOBS)
failed = [job.id for job in jobs if... | Check the queue of executing/submitted jobs and exit when there is
a free slot. |
def generative(func):
"""
Marks an instance method as generative.
"""
def wrap(inst, *args, **kw):
clone = type(inst).__new__(type(inst))
clone.__dict__ = inst.__dict__.copy()
return func(clone, *args, **kw)
return update_wrapper(wrap, func) | Marks an instance method as generative. |
def setFont(self, font):
""" Sets the font that will be returned when data() is called with the Qt.FontRole.
Can be a QFont or None if no font is set.
"""
check_class(font, QtGui.QFont, allow_none=True)
self._font = font | Sets the font that will be returned when data() is called with the Qt.FontRole.
Can be a QFont or None if no font is set. |
def has_fingerprint_moduli(self, modulus):
"""
Returns true if the fingerprint was detected in the key
:param modulus:
:return:
"""
if not self.is_acceptable_modulus(modulus):
return False
self.tested += 1
for i in range(0, len(self.primes)):
... | Returns true if the fingerprint was detected in the key
:param modulus:
:return: |
def names_singleton(self):
"""Returns True if this URI names a file or
if URI represents input/output stream.
"""
if self.stream:
return True
else:
return os.path.isfile(self.object_name) | Returns True if this URI names a file or
if URI represents input/output stream. |
def _set_bundle_message(self, v, load=False):
"""
Setter method for bundle_message, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/mpls_interface/rsvp/interface_refresh_reduction/bundle_message (container)
If this variable is read-only (config: false) in the
source YANG file, then _... | Setter method for bundle_message, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/mpls_interface/rsvp/interface_refresh_reduction/bundle_message (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_bundle_message is considered as a private
method. ... |
def _get_namespace2go2term(go2terms):
"""Group GO IDs by namespace."""
namespace2go2term = cx.defaultdict(dict)
for goid, goterm in go2terms.items():
namespace2go2term[goterm.namespace][goid] = goterm
return namespace2go2term | Group GO IDs by namespace. |
def configureEndpoint(self, hostName, portNumber):
"""
**Description**
Used to configure the host name and port number the client tries to connect to. Should be called
before connect.
**Syntax**
.. code:: python
myAWSIoTMQTTClient.configureEndpoint("random.i... | **Description**
Used to configure the host name and port number the client tries to connect to. Should be called
before connect.
**Syntax**
.. code:: python
myAWSIoTMQTTClient.configureEndpoint("random.iot.region.amazonaws.com", 8883)
**Parameters**
*hostN... |
def get_charset(self, default: str) -> str:
"""Returns charset parameter from Content-Type header or default."""
ctype = self.headers.get(CONTENT_TYPE, '')
mimetype = parse_mimetype(ctype)
return mimetype.parameters.get('charset', default) | Returns charset parameter from Content-Type header or default. |
async def connect_controller(self, controller_name=None):
"""Connect to a controller by name. If the name is empty, it
connect to the current controller.
"""
if not controller_name:
controller_name = self.jujudata.current_controller()
if not controller_name:
... | Connect to a controller by name. If the name is empty, it
connect to the current controller. |
def is_normal(self, k):
'''
lmap.is_normal(k) yields True if k is a key in the given lazy map lmap that is neither lazy
nor a formerly-lazy memoized key.
'''
v = ps.PMap.__getitem__(self, k)
if not isinstance(v, (types.FunctionType, partial)) or [] != getargspec_py27like(... | lmap.is_normal(k) yields True if k is a key in the given lazy map lmap that is neither lazy
nor a formerly-lazy memoized key. |
def plot_segmentation(data, peaks, segment_indexes, figsize=(10, 5)):
""" Will plot the data and segmentation based on the peaks and segment indexes.
:param 1d-array data: The orginal axis of the data that was segmented into sections.
:param 1d-array peaks: Peaks of the data.
:param 1d-... | Will plot the data and segmentation based on the peaks and segment indexes.
:param 1d-array data: The orginal axis of the data that was segmented into sections.
:param 1d-array peaks: Peaks of the data.
:param 1d-array segment_indexes: These are the different classes, corresponding to each ... |
def is_callable_type(tp):
"""Test if the type is a generic callable type, including subclasses
excluding non-generic types and callables.
Examples::
is_callable_type(int) == False
is_callable_type(type) == False
is_callable_type(Callable) == True
is_callable_type(Callable[..... | Test if the type is a generic callable type, including subclasses
excluding non-generic types and callables.
Examples::
is_callable_type(int) == False
is_callable_type(type) == False
is_callable_type(Callable) == True
is_callable_type(Callable[..., int]) == True
is_calla... |
def intersect_exposure_and_aggregate_hazard(self):
"""This function intersects the exposure with the aggregate hazard.
If the the exposure is a continuous raster exposure, this function
will set the aggregate hazard layer.
However, this function will set the impact layer.
""... | This function intersects the exposure with the aggregate hazard.
If the the exposure is a continuous raster exposure, this function
will set the aggregate hazard layer.
However, this function will set the impact layer. |
def postpro_fisher(data, report=None):
"""
Performs fisher transform on everything in data.
If report variable is passed, this is added to the report.
"""
if not report:
report = {}
# Due to rounding errors
data[data < -0.99999999999999] = -1
data[data > 0.99999999999999] = 1
... | Performs fisher transform on everything in data.
If report variable is passed, this is added to the report. |
def dump_package_data(data, buf, format_=FileFormat.py, skip_attributes=None):
"""Write package data to `buf`.
Args:
data (dict): Data source - must conform to `package_serialise_schema`.
buf (file-like object): Destination stream.
format_ (`FileFormat`): Format to dump data in.
... | Write package data to `buf`.
Args:
data (dict): Data source - must conform to `package_serialise_schema`.
buf (file-like object): Destination stream.
format_ (`FileFormat`): Format to dump data in.
skip_attributes (list of str): List of attributes to not print. |
def save(self, *args, **kwargs):
"""
**uid**: :code:`electiontype:{name}`
"""
self.uid = 'electiontype:{}'.format(self.slug)
super(ElectionType, self).save(*args, **kwargs) | **uid**: :code:`electiontype:{name}` |
def uclust_cluster_from_sorted_fasta_filepath(
fasta_filepath,
uc_save_filepath=None,
percent_ID=0.97,
max_accepts=1,
max_rejects=8,
stepwords=8,
word_length=8,
optimal=False,
exact=False,
suppress_sort=False,
enable_rev_strand_matc... | Returns clustered uclust file from sorted fasta |
def translate(patterns, flags):
"""Translate patterns."""
positive = []
negative = []
if isinstance(patterns, (str, bytes)):
patterns = [patterns]
flags |= _TRANSLATE
for pattern in patterns:
for expanded in expand_braces(pattern, flags):
(negative if is_negative(e... | Translate patterns. |
def split_window(self, fpath, vertical=False, size=None, bufopts=None):
"""Open file in a new split window.
Args:
fpath (str): Path of the file to open. If ``None``, a new empty
split is created.
vertical (bool): Whether to open a vertical split.
size... | Open file in a new split window.
Args:
fpath (str): Path of the file to open. If ``None``, a new empty
split is created.
vertical (bool): Whether to open a vertical split.
size (Optional[int]): The height (or width) to set for the new window.
bufo... |
def decode(self, subtokens):
"""Converts list of int subtokens ids into a string."""
if isinstance(subtokens, np.ndarray):
# Note that list(subtokens) converts subtokens to a python list, but the
# items remain as np.int32. This converts both the array and its items.
subtokens = subtokens.toli... | Converts list of int subtokens ids into a string. |
def top_features(self, featureset_name, topn=20, by='counts',
perslice=False, slice_kwargs={}):
"""
Retrieves the top ``topn`` most numerous features in the corpus.
Parameters
----------
featureset_name : str
Name of a :class:`.FeatureSet` in the... | Retrieves the top ``topn`` most numerous features in the corpus.
Parameters
----------
featureset_name : str
Name of a :class:`.FeatureSet` in the :class:`.Corpus`\.
topn : int
(default: ``20``) Number of features to return.
by : str
(default:... |
def change_text(self,
text,
fname,
pattern=None,
before=False,
force=False,
delete=False,
note=None,
replace=False,
line_oriented=True,
create=True,
... | Change text in a file.
Returns None if there was no match for the regexp, True if it was matched
and replaced, and False if the file did not exist or there was some other
problem.
@param text: Text to insert.
@param fname: Filename to insert text to
@param pattern: Regexp for a line... |
def save_tag_to_audio_file(audio_file, tracklisting):
"""
Saves tag to audio file.
"""
print("Trying to tag {}".format(audio_file))
f = mediafile.MediaFile(audio_file)
if not f.lyrics:
print("No tracklisting present. Creating lyrics tag.")
f.lyrics = 'Tracklisting' + '\n' + trac... | Saves tag to audio file. |
def get_authentication_info(self, identifier):
"""
The default authentication caching policy is to cache an account's
credentials that are queried from an account store, for a specific
user, so to facilitate any subsequent authentication attempts for
that user. Naturally, in orde... | The default authentication caching policy is to cache an account's
credentials that are queried from an account store, for a specific
user, so to facilitate any subsequent authentication attempts for
that user. Naturally, in order to cache one must have a CacheHandler.
If a user were to ... |
def watch_children(kzclient,
path, func, allow_session_lost=True, send_event=False,
ChildrenWatch=ChildrenWatch):
"""
Install a Kazoo :obj:`ChildrenWatch` on the given path.
The given `func` will be called in the reactor thread when any children are
created or dele... | Install a Kazoo :obj:`ChildrenWatch` on the given path.
The given `func` will be called in the reactor thread when any children are
created or deleted, or if the node itself is deleted.
Returns a Deferred which usually has no result, but may fail with an
exception if e.g. the path does not exist. |
def table(text):
"""Format the text as a table.
Text in format:
first | second
row 2 col 1 | 4
Will be formatted as::
+-------------+--------+
| first | second |
+-------------+--------+
| row 2 col 1 | 4 |
+-------------+--------+... | Format the text as a table.
Text in format:
first | second
row 2 col 1 | 4
Will be formatted as::
+-------------+--------+
| first | second |
+-------------+--------+
| row 2 col 1 | 4 |
+-------------+--------+
Args:
te... |
def magicrun(
text,
shell,
prompt_template="default",
aliases=None,
envvars=None,
extra_commands=None,
speed=1,
test_mode=False,
commentecho=False,
):
"""Echo out each character in ``text`` as keyboard characters are pressed,
wait for a RETURN keypress, then run the ``text`` ... | Echo out each character in ``text`` as keyboard characters are pressed,
wait for a RETURN keypress, then run the ``text`` in a shell context. |
def check_process_counts(self):
"""Check for the minimum consumer process levels and start up new
processes needed.
"""
LOGGER.debug('Checking minimum consumer process levels')
for name in self.consumers:
processes_needed = self.process_spawn_qty(name)
if... | Check for the minimum consumer process levels and start up new
processes needed. |
def get_node(self, index: int) -> Optional[Node]:
"""
Returns the node with the given index if such a node currently exists in the node list.
Arguments:
index (int): The index of the queried node.
Returns:
The node with the given index if such a node cur... | Returns the node with the given index if such a node currently exists in the node list.
Arguments:
index (int): The index of the queried node.
Returns:
The node with the given index if such a node currently exists in the node list,
`None` otherwise. |
def main():
"""
Starts the Application.
:return: Definition success.
:rtype: bool
"""
args = get_command_line_arguments()
args.css_file = args.css_file if foundations.common.path_exists(args.css_file) else CSS_FILE
return reStructuredText_to_html(args.input,
... | Starts the Application.
:return: Definition success.
:rtype: bool |
def _get_commit_msg(self, repo, sha):
'''
:param repo: the repo full name, ``{owner}/{project}``.
e.g. ``buildbot/buildbot``
'''
headers = {
'User-Agent': 'Buildbot'
}
if self._token:
headers['Authorization'] = 'token ' + self._token
... | :param repo: the repo full name, ``{owner}/{project}``.
e.g. ``buildbot/buildbot`` |
def isAuxilied(self):
""" Returns if the object is separating and applying to
a benefic considering good aspects.
"""
benefics = [const.VENUS, const.JUPITER]
return self.__sepApp(benefics, aspList=[0, 60, 120]) | Returns if the object is separating and applying to
a benefic considering good aspects. |
def ipath_from_ext(self, ext):
"""
Returns the path of the input file with extension ext.
Use it when the file does not exist yet.
"""
return os.path.join(self.workdir, self.prefix.idata + "_" + ext) | Returns the path of the input file with extension ext.
Use it when the file does not exist yet. |
def set_project_permission(project_id, usernames, read, write, share,**kwargs):
"""
Set permissions on a project to a list of users, identifed by
their usernames.
The read flag ('Y' or 'N') sets read access, the write
flag sets write access. If the read flag is 'N', then there is
... | Set permissions on a project to a list of users, identifed by
their usernames.
The read flag ('Y' or 'N') sets read access, the write
flag sets write access. If the read flag is 'N', then there is
automatically no write access or share access. |
def fetch(self, addon_id, data={}, **kwargs):
""""
Fetch addon for given Id
Args:
addon_id : Id for which addon object has to be retrieved
Returns:
addon dict for given subscription Id
"""
return super(Addon, self).fetch(addon_id, data, **kwargs) | Fetch addon for given Id
Args:
addon_id : Id for which addon object has to be retrieved
Returns:
addon dict for given subscription Id |
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
"""
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
:raises ValueError:
if imt is instance of :class:`openquake.hazardlib... | See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
:raises ValueError:
if imt is instance of :class:`openquake.hazardlib.imt.SA` with
unsupported period. |
def set_style(network_id, ndex_cred=None, template_id=None):
"""Set the style of the network to a given template network's style
Parameters
----------
network_id : str
The UUID of the NDEx network whose style is to be changed.
ndex_cred : dict
A dictionary of NDEx credentials.
t... | Set the style of the network to a given template network's style
Parameters
----------
network_id : str
The UUID of the NDEx network whose style is to be changed.
ndex_cred : dict
A dictionary of NDEx credentials.
template_id : Optional[str]
The UUID of the NDEx network whos... |
def get_value(cls, bucket, key):
"""Get tag value."""
obj = cls.get(bucket, key)
return obj.value if obj else None | Get tag value. |
def update_link_rel_based(self, old_rel, new_rel=None, new_text=None, single_link=False):
"""
Update link nodes, based on the existing link/@rel values.
This requires specifying a link/@rel value to update, and either a new
link/@rel value, or a new link/text() value for all links which... | Update link nodes, based on the existing link/@rel values.
This requires specifying a link/@rel value to update, and either a new
link/@rel value, or a new link/text() value for all links which match
the link/@rel value. Optionally, only the first link which matches the
link/@rel value... |
def human_repr(self):
"""Return decoded human readable string for URL representation."""
return urlunsplit(
SplitResult(
self.scheme,
self._make_netloc(
self.user, self.password, self.host, self._val.port, encode=False
),
... | Return decoded human readable string for URL representation. |
def deny(ip,
port=None,
proto='tcp',
direction='in',
port_origin='d',
ip_origin='d',
ttl=None,
comment=''):
'''
Add an rule to csf denied hosts
See :func:`_access_rule`.
1- Deny an IP:
CLI Example:
.. code-block:: bash
salt '*' cs... | Add an rule to csf denied hosts
See :func:`_access_rule`.
1- Deny an IP:
CLI Example:
.. code-block:: bash
salt '*' csf.deny 127.0.0.1
salt '*' csf.deny 127.0.0.1 comment="Too localhosty" |
def _update_vdr_vxrheadtail(self, f, vdr_offset, VXRoffset):
'''
This sets a VXR to be the first and last VXR in the VDR
'''
# VDR's VXRhead
self._update_offset_value(f, vdr_offset+28, 8, VXRoffset)
# VDR's VXRtail
self._update_offset_value(f, vdr_offset+36, 8, VX... | This sets a VXR to be the first and last VXR in the VDR |
def _get_numeric_status(self, key):
"""Extract the numeric value from the statuses object."""
value = self._get_status(key)
if value and any(i.isdigit() for i in value):
return float(re.sub("[^0-9.]", "", value))
return None | Extract the numeric value from the statuses object. |
def download_manifest_v2(self, manifest, replica,
num_retries=10,
min_delay_seconds=0.25,
download_dir='.'):
"""
Process the given manifest file in TSV (tab-separated values) format and download the files referenced b... | Process the given manifest file in TSV (tab-separated values) format and download the files referenced by it.
The files are downloaded in the version 2 format.
This download format will serve as the main storage format for downloaded files. If a user specifies a different
format for download (c... |
def nextprefix(self):
"""
Get the next available prefix. This means a prefix starting with 'ns'
with a number appended as (ns0, ns1, ..) that is not already defined
on the wsdl document.
"""
used = [ns[0] for ns in self.prefixes]
used += [ns[0] for ns in self.wsd... | Get the next available prefix. This means a prefix starting with 'ns'
with a number appended as (ns0, ns1, ..) that is not already defined
on the wsdl document. |
def deleteMessage(self, date):
"""
消息历史记录删除方法(删除 APP 内指定某天某小时内的所有会话消息记录。调用该接口返回成功后,date参数指定的某小时的消息记录文件将在随后的5-10分钟内被永久删除。) 方法
@param date:指定北京时间某天某小时,格式为2014010101,表示:2014年1月1日凌晨1点。(必传)
@return code:返回码,200 为正常。
@return errorMessage:错误信息。
"""
desc = {
... | 消息历史记录删除方法(删除 APP 内指定某天某小时内的所有会话消息记录。调用该接口返回成功后,date参数指定的某小时的消息记录文件将在随后的5-10分钟内被永久删除。) 方法
@param date:指定北京时间某天某小时,格式为2014010101,表示:2014年1月1日凌晨1点。(必传)
@return code:返回码,200 为正常。
@return errorMessage:错误信息。 |
def steady_connection(self):
"""Get a steady, non-persistent PyGreSQL connection."""
return SteadyPgConnection(
self._maxusage, self._setsession, self._closeable,
*self._args, **self._kwargs) | Get a steady, non-persistent PyGreSQL connection. |
def flatlist_dropdup(list_of_lists):
"""Make a single list out of a list of lists, and drop all duplicates.
Args:
list_of_lists: List of lists.
Returns:
list: List of single objects.
"""
return list(set([str(item) for sublist in list_of_lists for item in sublist])) | Make a single list out of a list of lists, and drop all duplicates.
Args:
list_of_lists: List of lists.
Returns:
list: List of single objects. |
def command_publish(self, command, **kwargs):
""" Publish a MQTT message """
mqttc = mqtt.Client()
mqttc.connect(
command['host'],
port=int(command['port']))
mqttc.loop_start()
try:
mqttc.publish(
command['endpoint'],
... | Publish a MQTT message |
def available_backends():
"""Lists the currently available backend types"""
print 'The following LiveSync agents are available:'
for name, backend in current_plugin.backend_classes.iteritems():
print cformat(' - %{white!}{}%{reset}: {} ({})').format(name, backend.title, backend.description) | Lists the currently available backend types |
def get_queue_name(queue_name):
"""Determine which queue MR should run on.
How to choose the queue:
1. If user provided one, use that.
2. If we are starting a mr from taskqueue, inherit that queue.
If it's a special queue, fall back to the default queue.
3. Default queue.
If user is using any MR pipe... | Determine which queue MR should run on.
How to choose the queue:
1. If user provided one, use that.
2. If we are starting a mr from taskqueue, inherit that queue.
If it's a special queue, fall back to the default queue.
3. Default queue.
If user is using any MR pipeline interface, pipeline.start takes ... |
def nnash(A, B1, B2, R1, R2, Q1, Q2, S1, S2, W1, W2, M1, M2,
beta=1.0, tol=1e-8, max_iter=1000, random_state=None):
r"""
Compute the limit of a Nash linear quadratic dynamic game. In this
problem, player i minimizes
.. math::
\sum_{t=0}^{\infty}
\left\{
x_t' r_i x_... | r"""
Compute the limit of a Nash linear quadratic dynamic game. In this
problem, player i minimizes
.. math::
\sum_{t=0}^{\infty}
\left\{
x_t' r_i x_t + 2 x_t' w_i
u_{it} +u_{it}' q_i u_{it} + u_{jt}' s_i u_{jt} + 2 u_{jt}'
m_i u_{it}
\right\}
... |
def dens2lip(dens_gcm3, dens_lipid=0.9007, dens_prot=1.34, dens_water=0.994,
dens_ash=2.3):
'''Get percent composition of animal from body density
The equation calculating animal density is from Biuw et al. (2003), and
default values for component densities are from human studies collected in
t... | Get percent composition of animal from body density
The equation calculating animal density is from Biuw et al. (2003), and
default values for component densities are from human studies collected in
the book by Moore et al. (1963).
Args
----
dens_gcm3: float or ndarray
An array of seal... |
def sync_client(self):
"""Synchronous OAuth 2.0 Bearer client"""
if not self._sync_client:
self._sync_client = AlfSyncClient(
token_endpoint=self.config.get('OAUTH_TOKEN_ENDPOINT'),
client_id=self.config.get('OAUTH_CLIENT_ID'),
client_secret=se... | Synchronous OAuth 2.0 Bearer client |
def normalize_encoding(encoding, default=DEFAULT_ENCODING):
"""Normalize the encoding name, replace ASCII w/ UTF-8."""
if encoding is None:
return default
encoding = encoding.lower().strip()
if encoding in ['', 'ascii']:
return default
try:
codecs.lookup(encoding)
ret... | Normalize the encoding name, replace ASCII w/ UTF-8. |
def is_get_query_with_results(results):
"""
:param results: the response from Elasticsearch
:return: true if the get query returned a result, false otherwise
"""
return results and EsConst.FOUND in results and results[EsConst.FOUND] and EsConst.FIELDS in results | :param results: the response from Elasticsearch
:return: true if the get query returned a result, false otherwise |
def unbind_key(pymux, variables):
"""
Remove key binding.
"""
key = variables['<key>']
needs_prefix = not variables['-n']
pymux.key_bindings_manager.remove_custom_binding(
key, needs_prefix=needs_prefix) | Remove key binding. |
def execute(path, argv=None, environ=None, command_class=ExternalSearchCommand):
"""
:param path:
:type path: basestring
:param argv:
:type: argv: list, tuple, or None
:param environ:
:type environ: dict
:param command_class: External search command class to instantiate and execute.
... | :param path:
:type path: basestring
:param argv:
:type: argv: list, tuple, or None
:param environ:
:type environ: dict
:param command_class: External search command class to instantiate and execute.
:type command_class: type
:return:
:rtype: None |
def _maybe_download_corpora(tmp_dir):
"""Download corpora for multinli.
Args:
tmp_dir: a string
Returns:
a string
"""
mnli_filename = "MNLI.zip"
mnli_finalpath = os.path.join(tmp_dir, "MNLI")
if not tf.gfile.Exists(mnli_finalpath):
zip_filepath = generator_utils.maybe_download(
tmp_di... | Download corpora for multinli.
Args:
tmp_dir: a string
Returns:
a string |
def expand_indent(line):
"""
Return the amount of indentation.
Tabs are expanded to the next multiple of 8.
>>> expand_indent(' ')
4
>>> expand_indent('\\t')
8
>>> expand_indent(' \\t')
8
>>> expand_indent(' \\t')
8
>>> expand_indent(' \\t')
16
... | Return the amount of indentation.
Tabs are expanded to the next multiple of 8.
>>> expand_indent(' ')
4
>>> expand_indent('\\t')
8
>>> expand_indent(' \\t')
8
>>> expand_indent(' \\t')
8
>>> expand_indent(' \\t')
16 |
def __format_row(self, row: AssetAllocationViewModel):
""" display-format one row
Formats one Asset Class record """
output = ""
index = 0
# Name
value = row.name
# Indent according to depth.
for _ in range(0, row.depth):
value = f" {value}"... | display-format one row
Formats one Asset Class record |
def SynchronizedClassMethod(*locks_attr_names, **kwargs):
# pylint: disable=C1801
"""
A synchronizer decorator for class methods. An AttributeError can be raised
at runtime if the given lock attribute doesn't exist or if it is None.
If a parameter ``sorted`` is found in ``kwargs`` and its value is ... | A synchronizer decorator for class methods. An AttributeError can be raised
at runtime if the given lock attribute doesn't exist or if it is None.
If a parameter ``sorted`` is found in ``kwargs`` and its value is True,
then the list of locks names will be sorted before locking.
:param locks_attr_names... |
def dot(r1, r2):
"""Compute the dot product
Arguments:
| ``r1``, ``r2`` -- two :class:`Vector3` objects
(Returns a Scalar)
"""
if r1.size != r2.size:
raise ValueError("Both arguments must have the same input size.")
if r1.deriv != r2.deriv:
raise ValueError("Both... | Compute the dot product
Arguments:
| ``r1``, ``r2`` -- two :class:`Vector3` objects
(Returns a Scalar) |
def filter_factory(global_conf, **local_conf):
"""Returns a WSGI filter app for use with paste.deploy."""
conf = global_conf.copy()
conf.update(local_conf)
def blacklist(app):
return BlacklistFilter(app, conf)
return blacklist | Returns a WSGI filter app for use with paste.deploy. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.