positive stringlengths 100 30.3k | anchor stringlengths 1 15k |
|---|---|
def create_json(self, create_missing=None):
"""Create an entity.
Call :meth:`create_raw`. Check the response status code, decode JSON
and return the decoded JSON as a dict.
:return: A dict. The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError``... | Create an entity.
Call :meth:`create_raw`. Check the response status code, decode JSON
and return the decoded JSON as a dict.
:return: A dict. The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` if the response has an HTTP
4XX or 5XX sta... |
async def create_collection(db, model_class: MongoCollectionMixin):
'''
Creates a MongoDB collection and all the declared indices in the model's ``Meta`` class
:param db:
A database handle
:type db:
motor.motor_asyncio.AsyncIOMotorClient
:param model_class:
The model to cre... | Creates a MongoDB collection and all the declared indices in the model's ``Meta`` class
:param db:
A database handle
:type db:
motor.motor_asyncio.AsyncIOMotorClient
:param model_class:
The model to create
:type model_class:
Subclass of ``Model`` mixed with ``MongoColle... |
def get_default_frame(self):
'''default frame for waypoints'''
if self.settings.terrainalt == 'Auto':
if self.get_mav_param('TERRAIN_FOLLOW',0) == 1:
return mavutil.mavlink.MAV_FRAME_GLOBAL_TERRAIN_ALT
return mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT
i... | default frame for waypoints |
def scan_to_graph(python_modules, graph, ignore=tuple()):
"""
Scans `python_modules` with :py:func:`scan` and registers found providers
in `graph`.
`ignore` argument is passed through to :py:func:`scan`.
"""
def callback(specification, provider):
graph.register_provider(specification, p... | Scans `python_modules` with :py:func:`scan` and registers found providers
in `graph`.
`ignore` argument is passed through to :py:func:`scan`. |
def device_is_attached_to_network(device, network_name):
"""
Checks if the device has a backing with of the right network name
:param <vim.vm.Device> device: instance of adapter
:param <str> network_name: network name
:return:
"""
try:
backing = device... | Checks if the device has a backing with of the right network name
:param <vim.vm.Device> device: instance of adapter
:param <str> network_name: network name
:return: |
def are_budget_data_package_fields_filled_in(self, resource):
"""
Check if the budget data package fields are all filled in because
if not then this can't be a budget data package
"""
fields = ['country', 'currency', 'year', 'status']
return all([self.in_resource(f, resou... | Check if the budget data package fields are all filled in because
if not then this can't be a budget data package |
def symmetrize(A):
"""
Returns a symmetric matrix from a sparse square matrix :math:`A`. Only the
lower triangular entries of :math:`A` are accessed.
"""
assert type(A) is spmatrix, "argument must be a sparse matrix"
assert A.size[0] == A.size[1], "argument must me a square matrix"
idx = [i ... | Returns a symmetric matrix from a sparse square matrix :math:`A`. Only the
lower triangular entries of :math:`A` are accessed. |
def get(self, request, *args, **kwargs):
"""
Do the login and password protection.
"""
response = super(EntryProtectionMixin, self).get(
request, *args, **kwargs)
if self.object.login_required and not request.user.is_authenticated:
return self.login()
... | Do the login and password protection. |
def sanitize(name):
"""
Sanitize the specified ``name`` for use with breathe directives.
**Parameters**
``name`` (:class:`python:str`)
The name to be sanitized.
**Return**
:class:`python:str`
The input ``name`` sanitized to use with breathe directives (primarily for use
... | Sanitize the specified ``name`` for use with breathe directives.
**Parameters**
``name`` (:class:`python:str`)
The name to be sanitized.
**Return**
:class:`python:str`
The input ``name`` sanitized to use with breathe directives (primarily for use
with ``.. doxygenfunction::``... |
def _get_slot(self):
"Returns the next coordinates for a preview"
x = y = 10
for k, p in self.previews.items():
y += p.height() + self.padding
return x, y | Returns the next coordinates for a preview |
def determine_type(filename):
'''Determine the file type and return it.'''
ftype = magic.from_file(filename, mime=True).decode('utf8')
if ftype == 'text/plain':
ftype = 'text'
elif ftype == 'image/svg+xml':
ftype = 'svg'
else:
ftype = ftype.split('/')[1]
return ftype | Determine the file type and return it. |
def magic_postpone_execution(self, line):
"""
Postpone execution of instructions until explicitly run
Usage:
Call this magic with `true` or nothing to postpone execution,
or call with `false` to execute each instruction when evaluated.
This defaults to True.
Not... | Postpone execution of instructions until explicitly run
Usage:
Call this magic with `true` or nothing to postpone execution,
or call with `false` to execute each instruction when evaluated.
This defaults to True.
Note that each cell is executed only executed after all lines in
... |
def signup(request, **kwargs):
"""
Overrides allauth.account.views.signup
"""
if not ALLAUTH:
return http.HttpResponse(_('allauth not installed...'))
if request.method == "POST" and 'login' in request.POST:
form_class = LoginForm
form = form_class(request.POST)
redire... | Overrides allauth.account.views.signup |
def read_frames(cls, reader):
'''
Read one or more frames from an IO stream. Buffer must support file
object interface.
After reading, caller will need to check if there are bytes remaining
in the stream. If there are, then that implies that there is one or
more incompl... | Read one or more frames from an IO stream. Buffer must support file
object interface.
After reading, caller will need to check if there are bytes remaining
in the stream. If there are, then that implies that there is one or
more incomplete frames and more data needs to be read. The po... |
def interpolate_where(self, condition):
"""
Remove then interpolate across
"""
raise NotImplementedError()
self[self < 0] = np.nan
return self.interpolate() | Remove then interpolate across |
def _plot_go_group(self, hdrgo, usrgos, pltargs, go2parentids):
"""Plot an exploratory GO DAG for a single Group of user GOs."""
gosubdagplotnts = self._get_gosubdagplotnts(hdrgo, usrgos, pltargs, go2parentids)
# Create pngs and return png names
pngs = [obj.wrplt(pltargs.fout_dir, pltarg... | Plot an exploratory GO DAG for a single Group of user GOs. |
def _validate(self, writing=False):
"""Self-validate the box before writing."""
box_ids = [box.box_id for box in self.box]
if len(box_ids) != 1 or box_ids[0] != 'flst':
msg = ("Fragment table boxes must have a single fragment list "
"box as a child box.")
... | Self-validate the box before writing. |
def _new_alloc_handle(stype, shape, ctx, delay_alloc, dtype, aux_types, aux_shapes=None):
"""Return a new handle with specified storage type, shape, dtype and context.
Empty handle is only used to hold results
Returns
-------
handle
A new empty ndarray handle
"""
hdl = NDArrayHandl... | Return a new handle with specified storage type, shape, dtype and context.
Empty handle is only used to hold results
Returns
-------
handle
A new empty ndarray handle |
def gait(self, x):
"""
Extract gait features from estimated heel strikes and accelerometer data.
:param x: The time series to assess freeze of gait on. This could be x, y, z or mag_sum_acc.
:type x: pandas.Series
:return number_of_steps: Estimated number of step... | Extract gait features from estimated heel strikes and accelerometer data.
:param x: The time series to assess freeze of gait on. This could be x, y, z or mag_sum_acc.
:type x: pandas.Series
:return number_of_steps: Estimated number of steps based on heel strikes [number of steps].
... |
def hashcomplement(a, b, strict=False):
"""
Alternative implementation of :func:`petl.transform.setops.complement`,
where the complement is executed by constructing an in-memory set for all
rows found in the right hand table, then iterating over rows from the
left hand table.
May be faster and/... | Alternative implementation of :func:`petl.transform.setops.complement`,
where the complement is executed by constructing an in-memory set for all
rows found in the right hand table, then iterating over rows from the
left hand table.
May be faster and/or more resource efficient where the right table is ... |
def get_status(self, channel=Channel.CHANNEL_CH0):
"""
Returns the error status of a specific CAN channel.
:param int channel: CAN channel, to be used (:data:`Channel.CHANNEL_CH0` or :data:`Channel.CHANNEL_CH1`).
:return: Tuple with CAN and USB status (see structure :class:`Status`).
... | Returns the error status of a specific CAN channel.
:param int channel: CAN channel, to be used (:data:`Channel.CHANNEL_CH0` or :data:`Channel.CHANNEL_CH1`).
:return: Tuple with CAN and USB status (see structure :class:`Status`).
:rtype: tuple(int, int) |
def parseAcceptHeader(value):
"""Parse an accept header, ignoring any accept-extensions
returns a list of tuples containing main MIME type, MIME subtype,
and quality markdown.
str -> [(str, str, float)]
"""
chunks = [chunk.strip() for chunk in value.split(',')]
accept = []
for chunk in... | Parse an accept header, ignoring any accept-extensions
returns a list of tuples containing main MIME type, MIME subtype,
and quality markdown.
str -> [(str, str, float)] |
def load_le32(buf, pos):
"""Load little-endian 32-bit integer"""
end = pos + 4
if end > len(buf):
raise BadRarFile('cannot load le32')
return S_LONG.unpack_from(buf, pos)[0], pos + 4 | Load little-endian 32-bit integer |
def _run(*args, **kwargs):
"""
Run current executable via subprocess and given args
"""
verbose = kwargs.pop("verbose", False)
if verbose:
click.secho(" ".join([repr(i) for i in args]), bg='blue', fg='white')
executable = args[0]
if not os.path.isfile(executable):
raise Runt... | Run current executable via subprocess and given args |
def i2c_master_write_read(self, i2c_address, data, length):
"""Make an I2C write/read access.
First an I2C write access is issued. No stop condition will be
generated. Instead the read access begins with a repeated start.
This method is useful for accessing most addressable I2C devices... | Make an I2C write/read access.
First an I2C write access is issued. No stop condition will be
generated. Instead the read access begins with a repeated start.
This method is useful for accessing most addressable I2C devices like
EEPROMs, port expander, etc.
Basically, this is ... |
def _scale_gradient_op(dtype):
"""Create an op that scales gradients using a Defun.
The tensorflow Defun decorator creates an op and tensorflow caches these ops
automatically according to `func_name`. Using a Defun decorator twice with the
same `func_name` does not create a new op, instead the cached op is use... | Create an op that scales gradients using a Defun.
The tensorflow Defun decorator creates an op and tensorflow caches these ops
automatically according to `func_name`. Using a Defun decorator twice with the
same `func_name` does not create a new op, instead the cached op is used.
This method produces a new op ... |
def reboot_adb_server():
""" execute 'adb devices' to start adb server """
_reboot_count = 0
_max_retry = 1
def _reboot():
nonlocal _reboot_count
if _reboot_count >= _max_retry:
raise RuntimeError('fail after retry {} times'.format(_max_retry))
_reboot_count += 1
... | execute 'adb devices' to start adb server |
def read_ASCII_cols(infile, cols=[1, 2, 3]): # noqa: N802
""" Interpret input ASCII file to return arrays for specified columns.
Notes
-----
The specification of the columns should be expected to have lists for
each 'column', with all columns in each list combined into a single
... | Interpret input ASCII file to return arrays for specified columns.
Notes
-----
The specification of the columns should be expected to have lists for
each 'column', with all columns in each list combined into a single
entry.
For example::
cols = ['1,2,3','4,... |
def find_zonefile_origins( self, missing_zfinfo, peer_hostports ):
"""
Find out which peers can serve which zonefiles
"""
zonefile_origins = {} # map peer hostport to list of zonefile hashes
# which peers can serve each zonefile?
for zfhash in missing_zfinfo.keys():
... | Find out which peers can serve which zonefiles |
def override_familly(self, args):
"""Look in the current wrapped object to find a cache configuration to
override the current default configuration."""
resourceapi = args[0]
cache_cfg = resourceapi.cache
if cache_cfg.has_key('familly'):
self.familly = cache_cfg['famil... | Look in the current wrapped object to find a cache configuration to
override the current default configuration. |
def expand_source_declarations(map_el, dirs, local_conf):
""" This provides mechanism for externalizing and sharing data sources. The datasource configs are
python files, and layers reference sections within that config:
<DataSourcesConfig src="datasources.cfg" />
<Layer class="road major" source_... | This provides mechanism for externalizing and sharing data sources. The datasource configs are
python files, and layers reference sections within that config:
<DataSourcesConfig src="datasources.cfg" />
<Layer class="road major" source_name="planet_osm_major_roads" />
<Layer class="road minor" sou... |
def _init_browser(self):
"""Overide in appropriate way to prepare a logged in browser."""
self.browser = splinter.Browser('phantomjs')
self.browser.visit(self.server_url + "/youraccount/login")
try:
self.browser.fill('nickname', self.user)
self.browser.fill('passw... | Overide in appropriate way to prepare a logged in browser. |
def inc_from_lat(lat):
"""
Calculate inclination predicted from latitude using the dipole equation
Parameter
----------
lat : latitude in degrees
Returns
-------
inc : inclination calculated using the dipole equation
"""
rad = old_div(np.pi, 180.)
inc = old_div(np.arctan(2 ... | Calculate inclination predicted from latitude using the dipole equation
Parameter
----------
lat : latitude in degrees
Returns
-------
inc : inclination calculated using the dipole equation |
def _load_modules(self):
"""
Load modules-related configuration listened in modules section
Before loading:
"modules": {
"mal": "myanimelist.json",
"ann": "animenewsnetwork.json"
}
After loading:
"modules": {
"mal": {
....
},
"ann": {
... | Load modules-related configuration listened in modules section
Before loading:
"modules": {
"mal": "myanimelist.json",
"ann": "animenewsnetwork.json"
}
After loading:
"modules": {
"mal": {
....
},
"ann": {
....
}
} |
def addResource(self, content, uri, headers):
"""
Adds the a hendrix.contrib.cache.resource.CachedResource to the
ReverseProxy cache connection
"""
self.cache[uri] = CachedResource(content, headers) | Adds the a hendrix.contrib.cache.resource.CachedResource to the
ReverseProxy cache connection |
def array_values(expr):
"""Given an expression expr denoting a list of values, array_values(expr)
returns a list of values for that expression."""
if isinstance(expr, Array):
return expr.get_elems(all_subs(expr._bounds))
elif isinstance(expr, list):
vals = [array_values(x) for x in e... | Given an expression expr denoting a list of values, array_values(expr)
returns a list of values for that expression. |
def snmp_server_group_write(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
snmp_server = ET.SubElement(config, "snmp-server", xmlns="urn:brocade.com:mgmt:brocade-snmp")
group = ET.SubElement(snmp_server, "group")
group_name_key = ET.SubElement(g... | Auto Generated Code |
def _print_tasks(env, tasks, mark_active=False):
""" Prints task information using io stream.
`env`
``Environment`` object.
`tasks`
List of tuples (task_name, options, block_options).
`mark_active`
Set to ``True`` to mark active task.
"""
if ... | Prints task information using io stream.
`env`
``Environment`` object.
`tasks`
List of tuples (task_name, options, block_options).
`mark_active`
Set to ``True`` to mark active task. |
def _get_new_connection(self, conn_params):
"""Opens a connection to the database."""
self.__connection_string = conn_params.get('connection_string', '')
conn = self.Database.connect(**conn_params)
return conn | Opens a connection to the database. |
def default_bucket(self):
"""Return the name of the default bucket to use in relevant Amazon SageMaker interactions.
Returns:
str: The name of the default bucket, which is of the form: ``sagemaker-{region}-{AWS account ID}``.
"""
if self._default_bucket:
return s... | Return the name of the default bucket to use in relevant Amazon SageMaker interactions.
Returns:
str: The name of the default bucket, which is of the form: ``sagemaker-{region}-{AWS account ID}``. |
def wait_for_any_log(nodes, pattern, timeout, filename='system.log', marks=None):
"""
Look for a pattern in the system.log of any in a given list
of nodes.
@param nodes The list of nodes whose logs to scan
@param pattern The target pattern
@param timeout How long to wait for the pattern. Note th... | Look for a pattern in the system.log of any in a given list
of nodes.
@param nodes The list of nodes whose logs to scan
@param pattern The target pattern
@param timeout How long to wait for the pattern. Note that
strictly speaking, timeout is not really a timeout,
... |
def url_api_version(self, api_version):
""" Return base API url string for the QualysGuard api_version and server.
"""
# Set base url depending on API version.
if api_version == 1:
# QualysGuard API v1 url.
url = "https://%s/msp/" % (self.server,)
elif ap... | Return base API url string for the QualysGuard api_version and server. |
def update_model_dict(self):
"""Updates the model dictionary"""
dct = {}
models = self.chimera.openModels
for md in models.list():
dct[md.name] = md.id
self.model_dict = dct | Updates the model dictionary |
def expand(self, line, do_expand, force=False, vislevels=0, level=-1):
"""Multi-purpose expand method from original STC class"""
lastchild = self.GetLastChild(line, level)
line += 1
while line <= lastchild:
if force:
if vislevels > 0:
sel... | Multi-purpose expand method from original STC class |
def showmapfile(self,datastore_id=None, datasource_id=None):
""" GET /datastores/{datastore_id}/datasources/{datasource_id}/mapfile: Get
the JSON representation of a specific datasource's default MapFile LAYER
block. """
datastore = self._get_datastore_by_id(datastore_id)
... | GET /datastores/{datastore_id}/datasources/{datasource_id}/mapfile: Get
the JSON representation of a specific datasource's default MapFile LAYER
block. |
def cric__ridge():
""" Ridge Regression
"""
model = sklearn.linear_model.LogisticRegression(penalty="l2")
# we want to explain the raw probability outputs of the trees
model.predict = lambda X: model.predict_proba(X)[:,1]
return model | Ridge Regression |
def load_yaml_by_relpath(cls, directories, rel_path, log_debug=False):
"""Load a yaml file with path that is relative to one of given directories.
Args:
directories: list of directories to search
name: relative path of the yaml file to load
log_debug: log all message... | Load a yaml file with path that is relative to one of given directories.
Args:
directories: list of directories to search
name: relative path of the yaml file to load
log_debug: log all messages as debug
Returns:
tuple (fullpath, loaded yaml structure) or... |
def upload(self, tool: Tool) -> bool:
"""
Attempts to upload the Docker image for a given tool to
`DockerHub <https://hub.docker.com>`_.
"""
return self.__installation.build.upload(tool.image) | Attempts to upload the Docker image for a given tool to
`DockerHub <https://hub.docker.com>`_. |
def pinv_array(a, cond=None):
"""Calculate the Moore-Penrose pseudo inverse of each block of the three dimensional array a.
Parameters
----------
a : {dense array}
Is of size (n, m, m)
cond : {float}
Used by gelss to filter numerically zeros singular values.
If None, a sui... | Calculate the Moore-Penrose pseudo inverse of each block of the three dimensional array a.
Parameters
----------
a : {dense array}
Is of size (n, m, m)
cond : {float}
Used by gelss to filter numerically zeros singular values.
If None, a suitable value is chosen for you.
R... |
def make_generic_validator(validator: AnyCallable) -> 'ValidatorCallable':
"""
Make a generic function which calls a validator with the right arguments.
Unfortunately other approaches (eg. return a partial of a function that builds the arguments) is slow,
hence this laborious way of doing things.
... | Make a generic function which calls a validator with the right arguments.
Unfortunately other approaches (eg. return a partial of a function that builds the arguments) is slow,
hence this laborious way of doing things.
It's done like this so validators don't all need **kwargs in their signature, eg. any c... |
def byte(self):
"""Return a byte representation of ControlFlags."""
flags = int(self._in_use) << 7 \
| int(self._controller) << 6 \
| int(self._bit5) << 5 \
| int(self._bit4) << 4 \
| int(self._used_before) << 1
return flags | Return a byte representation of ControlFlags. |
def rgb_to_vector(image):
"""
Convert an RGB ANTsImage to a Vector ANTsImage
Arguments
---------
image : ANTsImage
RGB image to be converted
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> mni = ants.image_read(ants.get_data('mni'))
>>> mni_rg... | Convert an RGB ANTsImage to a Vector ANTsImage
Arguments
---------
image : ANTsImage
RGB image to be converted
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> mni = ants.image_read(ants.get_data('mni'))
>>> mni_rgb = mni.scalar_to_rgb()
>>> mni_ve... |
def triangle_area(p0, p1, p2):
if p2.ndim < 2:
p2 = p2[np.newaxis, :]
'''p2 can be a vector'''
area = 0.5 * np.abs(p0[0] * p1[1] - p0[0] * p2[:,1] +
p1[0] * p2[:,1] - p1[0] * p0[1] +
p2[:,0] * p0[1] - p2[:,0] * p1[1])
return area | p2 can be a vector |
def addFrame(self):
"""Add frame to current video."""
fr = "/tmp/vpvid/" + str(len(self.frames)) + ".png"
screenshot(fr)
self.frames.append(fr) | Add frame to current video. |
def add_source(source, key=None):
"""Add a package source to this system.
@param source: a URL with a rpm package
@param key: A key to be added to the system's keyring and used
to verify the signatures on packages. Ideally, this should be an
ASCII format GPG public key including the block headers.... | Add a package source to this system.
@param source: a URL with a rpm package
@param key: A key to be added to the system's keyring and used
to verify the signatures on packages. Ideally, this should be an
ASCII format GPG public key including the block headers. A GPG key
id may also be used, but b... |
def _set_tcp_keepalive(zmq_socket, opts):
'''
Ensure that TCP keepalives are set as specified in "opts".
Warning: Failure to set TCP keepalives on the salt-master can result in
not detecting the loss of a minion when the connection is lost or when
it's host has been terminated without first closing... | Ensure that TCP keepalives are set as specified in "opts".
Warning: Failure to set TCP keepalives on the salt-master can result in
not detecting the loss of a minion when the connection is lost or when
it's host has been terminated without first closing the socket.
Salt's Presence System depends on thi... |
def legendValue(requestContext, seriesList, *valueTypes):
"""
Takes one metric or a wildcard seriesList and a string in quotes.
Appends a value to the metric name in the legend. Currently one or several
of: `last`, `avg`, `total`, `min`, `max`. The last argument can be `si`
(default) or `binary`, in... | Takes one metric or a wildcard seriesList and a string in quotes.
Appends a value to the metric name in the legend. Currently one or several
of: `last`, `avg`, `total`, `min`, `max`. The last argument can be `si`
(default) or `binary`, in that case values will be formatted in the
corresponding system.
... |
def task_path(cls, project, location, queue, task):
"""Return a fully-qualified task string."""
return google.api_core.path_template.expand(
"projects/{project}/locations/{location}/queues/{queue}/tasks/{task}",
project=project,
location=location,
queue=qu... | Return a fully-qualified task string. |
def next_run_in(self, utc_now=None):
""" :param utc_now: optional parameter to be used by Unit Tests as a definition of "now"
:return: timedelta instance presenting amount of time before the trigger is triggered next time
or None if the EventClock instance is not running """
if utc_... | :param utc_now: optional parameter to be used by Unit Tests as a definition of "now"
:return: timedelta instance presenting amount of time before the trigger is triggered next time
or None if the EventClock instance is not running |
def update_coordinates(self, coordinates=None):
"""Update the coordinates (and derived quantities)
Argument:
coordinates -- new Cartesian coordinates of the system
"""
if coordinates is not None:
self.coordinates = coordinates
self.numc = len(self.c... | Update the coordinates (and derived quantities)
Argument:
coordinates -- new Cartesian coordinates of the system |
def emit(self, sound, exclude=set()):
"""Send text to entities nearby this one."""
nearby = self.nearby()
try:
exclude = set(exclude)
except TypeError:
exclude = set([exclude])
exclude.add(self.entity)
listeners = nearby - exclude
for liste... | Send text to entities nearby this one. |
def print_output(self):
""" Outputs final list of measures to screen a csv file.
The .csv file created has the same name as the input file, with
"vfclust_TYPE_CATEGORY" appended to the filename, where TYPE indicates
the type of task performed done (SEMANTIC or PHONETIC) and CATEGORY
... | Outputs final list of measures to screen a csv file.
The .csv file created has the same name as the input file, with
"vfclust_TYPE_CATEGORY" appended to the filename, where TYPE indicates
the type of task performed done (SEMANTIC or PHONETIC) and CATEGORY
indicates the category requirem... |
def options_string_builder(option_mapping, args):
"""Return arguments for CLI invocation of kal."""
options_string = ""
for option, flag in option_mapping.items():
if option in args:
options_string += str(" %s %s" % (flag, str(args[option])))
return options_string | Return arguments for CLI invocation of kal. |
def gaussian(f=Ellipsis, mu=0, sigma=1, scale=1, invert=False, normalize=False):
'''
gaussian() yields a potential function f(x) that calculates a Gaussian function over x; the
formula used is given below.
gaussian(g) yields a function h(x) such that, if f(x) is yielded by gaussian(), h(x) = f(g(x)).
... | gaussian() yields a potential function f(x) that calculates a Gaussian function over x; the
formula used is given below.
gaussian(g) yields a function h(x) such that, if f(x) is yielded by gaussian(), h(x) = f(g(x)).
The formula employed by the Gaussian function is as follows, with mu, sigma, and scale a... |
def set_recent_config(self, max_samples=0):
"""Update/configure recent data settings for this Feed. If the container does not support recent storage or it
is not enabled for this owner, this function will have no effect.
`max_samples` (optional) (int) how many shares to store for later retrieva... | Update/configure recent data settings for this Feed. If the container does not support recent storage or it
is not enabled for this owner, this function will have no effect.
`max_samples` (optional) (int) how many shares to store for later retrieval. If not supported by container, this
argument... |
def _call(self, x, out=None):
"""Take the power of ``x`` and write to ``out`` if given."""
if out is None:
return x ** self.exponent
elif self.__domain_is_field:
raise ValueError('cannot use `out` with field')
else:
out.assign(x)
out **= se... | Take the power of ``x`` and write to ``out`` if given. |
def update_network_asset(self, asset_id, name, asset_type):
"""
Updates a Network Asset
Args:
name: The name provided to the network asset
asset_type: The type provided to the network asset
asset_id:
Returns:
"""
self.update_asset('NE... | Updates a Network Asset
Args:
name: The name provided to the network asset
asset_type: The type provided to the network asset
asset_id:
Returns: |
def set_state(key, value, namespace=None, table_name=None, environment=None,
layer=None, stage=None, shard_id=None, consistent=True,
serializer=json.dumps, wait_exponential_multiplier=500,
wait_exponential_max=5000, stop_max_delay=10000, ttl=None):
"""Set Lambda state value... | Set Lambda state value. |
def _read_with_sitk(datapath):
"""Reads file using SimpleITK. Returns array of pixels (image located in datapath) and its metadata.
:param datapath: path to file (img or dicom)
:return: tuple (data3d, metadata), where data3d is array of pixels
"""
try:
import SimpleI... | Reads file using SimpleITK. Returns array of pixels (image located in datapath) and its metadata.
:param datapath: path to file (img or dicom)
:return: tuple (data3d, metadata), where data3d is array of pixels |
def map_df(self, df):
"""
Map df
"""
if len(df) == 0:
return
aesthetics = set(self.aesthetics) & set(df.columns)
for ae in aesthetics:
df[ae] = self.map(df[ae])
return df | Map df |
def fix_schema_node_ordering(parent):
"""
Fix the ordering of children under the criteria node to ensure that IndicatorItem/Indicator order
is preserved, as per XML Schema.
:return:
"""
children = parent.getchildren()
i_nodes = [node for node in children if node.... | Fix the ordering of children under the criteria node to ensure that IndicatorItem/Indicator order
is preserved, as per XML Schema.
:return: |
def collectLocations(self):
"""
Return a dictionary with all objects.
"""
pts = []
for l, (value, deltaName) in self.items():
pts.append(Location(l))
return pts | Return a dictionary with all objects. |
def add(self, promise, bitoffset, *, _offsetideal=None):
"""Add a promise to the promise collection at an optional offset.
Args:
promise: A TDOPromise to add to this collection.
bitoffset: An integer offset for this new promise in the collection.
_offsetideal: An int... | Add a promise to the promise collection at an optional offset.
Args:
promise: A TDOPromise to add to this collection.
bitoffset: An integer offset for this new promise in the collection.
_offsetideal: An integer offset for this new promise in the collection if the associated... |
def is_probably_packed( pe ):
"""Returns True is there is a high likelihood that a file is packed or contains compressed data.
The sections of the PE file will be analyzed, if enough sections
look like containing compressed data and the data makes
up for more than 20% of the total file size, the functi... | Returns True is there is a high likelihood that a file is packed or contains compressed data.
The sections of the PE file will be analyzed, if enough sections
look like containing compressed data and the data makes
up for more than 20% of the total file size, the function will
return True. |
def cleanup(self):
'''
Cleans up existing connections giving them time to finish the currect
request.
'''
self.debug("Cleanup called on Site.")
if not self.connections:
return defer.succeed(None)
self.debug("Waiting for all the connections to close.")
... | Cleans up existing connections giving them time to finish the currect
request. |
def letter():
'''Parse a letter in alphabet.'''
@Parser
def letter_parser(text, index=0):
if index < len(text) and text[index].isalpha():
return Value.success(index + 1, text[index])
else:
return Value.failure(index, 'a letter')
return letter_parser | Parse a letter in alphabet. |
def p_with_statement(self, p):
"""with_statement : WITH LPAREN expr RPAREN statement"""
p[0] = ast.With(expr=p[3], statement=p[5]) | with_statement : WITH LPAREN expr RPAREN statement |
def _watches_belonging_to_user(cls, user_or_email, object_id=None,
**filters):
"""Return a QuerySet of watches having the given user or email, having
(only) the given filters, and having the event_type and content_type
attrs of the class.
Matched Watch... | Return a QuerySet of watches having the given user or email, having
(only) the given filters, and having the event_type and content_type
attrs of the class.
Matched Watches may be either confirmed and unconfirmed. They may
include duplicates if the get-then-create race condition in
... |
def active():
'''
Return current active profile
CLI Example:
.. code-block:: bash
salt '*' tuned.active
'''
# turn off all profiles
result = __salt__['cmd.run']('tuned-adm active')
pattern = re.compile(r'''(?P<stmt>Current active profile:) (?P<profile>\w+.*)''')
match = r... | Return current active profile
CLI Example:
.. code-block:: bash
salt '*' tuned.active |
def log_player_trades_with_other_player(self, player, to_other, other, to_player):
"""
:param player: catan.game.Player
:param to_other: list of tuples, [(int, game.board.Terrain), (int, game.board.Terrain)]
:param other: catan.board.Player
:param to_player: list of tuples, [(int... | :param player: catan.game.Player
:param to_other: list of tuples, [(int, game.board.Terrain), (int, game.board.Terrain)]
:param other: catan.board.Player
:param to_player: list of tuples, [(int, game.board.Terrain), (int, game.board.Terrain)] |
def get_referenced_object(self):
"""
:rtype: core.BunqModel
:raise: BunqException
"""
if self._BunqMeTab is not None:
return self._BunqMeTab
if self._BunqMeTabResultResponse is not None:
return self._BunqMeTabResultResponse
if self._Bunq... | :rtype: core.BunqModel
:raise: BunqException |
def convert(self, value, param, ctx):
"""Convert to URL scheme."""
if not isinstance(value, tuple):
parsed = urlparse.urlparse(value)
if parsed.scheme not in ('telnet', 'ssh'):
self.fail('invalid URL scheme (%s). Only telnet and ssh URLs are '
... | Convert to URL scheme. |
def base_url(self, space_id, content_type_id, environment_id=None, **kwargs):
"""
Returns the URI for the editor interface.
"""
return "spaces/{0}{1}/content_types/{2}/editor_interface".format(
space_id,
'/environments/{0}'.format(environment_id) if environment_i... | Returns the URI for the editor interface. |
def _init_metadata(self):
"""stub"""
QuestionTextFormRecord._init_metadata(self)
self._choices_metadata = {
'element_id': Id(self.my_osid_object_form._authority,
self.my_osid_object_form._namespace,
'choices'),
'el... | stub |
def _setChoiceDict(self):
"""Create dictionary for choice list"""
# value is name of choice parameter (same as key)
self.choiceDict = {}
for c in self.choice: self.choiceDict[c] = c | Create dictionary for choice list |
def main():
"""Populate the templates with release-specific fields.
Requires user input for the CircleCI, AppVeyor, Coveralls.io and Travis
build IDs.
"""
version = get_version()
circleci_build = six.moves.input("CircleCI Build ID: ")
appveyor_build = six.moves.input("AppVeyor Build ID: ")
... | Populate the templates with release-specific fields.
Requires user input for the CircleCI, AppVeyor, Coveralls.io and Travis
build IDs. |
def raster(times, indices, max_time=None, max_index=None,
x_label="Timestep", y_label="Index", **kwargs):
"""Plots a raster plot given times and indices of events."""
# set default size to 1
if 's' not in kwargs:
kwargs['s'] = 1
scatter(times, indices, **kwargs)
if max_time ... | Plots a raster plot given times and indices of events. |
def _parse_caps_devices_features(node):
'''
Parse the devices or features list of the domain capatilities
'''
result = {}
for child in node:
if child.get('supported') == 'yes':
enums = [_parse_caps_enum(node) for node in child.findall('enum')]
result[child.tag] = {ite... | Parse the devices or features list of the domain capatilities |
def unmake(self):
"""This method is equivalent to delete except that it uses
message-passing instead of directly deleting the instance.
"""
if lib.EnvUnmakeInstance(self._env, self._ist) != 1:
raise CLIPSError(self._env) | This method is equivalent to delete except that it uses
message-passing instead of directly deleting the instance. |
def key_source(self):
"""
:return: the relation whose primary key values are passed, sequentially, to the
``make`` method when populate() is called.
The default value is the join of the parent relations.
Users may override to change the granularity or the ... | :return: the relation whose primary key values are passed, sequentially, to the
``make`` method when populate() is called.
The default value is the join of the parent relations.
Users may override to change the granularity or the scope of populate() calls. |
def translate_markers(pipfile_entry):
"""Take a pipfile entry and normalize its markers
Provide a pipfile entry which may have 'markers' as a key or it may have
any valid key from `packaging.markers.marker_context.keys()` and standardize
the format into {'markers': 'key == "some_value"'}.
:param p... | Take a pipfile entry and normalize its markers
Provide a pipfile entry which may have 'markers' as a key or it may have
any valid key from `packaging.markers.marker_context.keys()` and standardize
the format into {'markers': 'key == "some_value"'}.
:param pipfile_entry: A dictionariy of keys and value... |
async def _retrieve_guilds_before_strategy(self, retrieve):
"""Retrieve guilds using before parameter."""
before = self.before.id if self.before else None
data = await self.get_guilds(retrieve, before=before)
if len(data):
if self.limit is not None:
self.limit... | Retrieve guilds using before parameter. |
def get_single_file_info(self, rel_path):
""" Gets last change time for a single file """
f_path = self.get_full_file_path(rel_path)
return get_single_file_info(f_path, rel_path) | Gets last change time for a single file |
def DbGetAttributeAlias(self, argin):
""" Get the attribute name for the given alias.
If alias not found in database, returns an empty string.
:param argin: The attribute alias name
:type: tango.DevString
:return: The attribute name (device/attribute)
:rtype: tango.DevSt... | Get the attribute name for the given alias.
If alias not found in database, returns an empty string.
:param argin: The attribute alias name
:type: tango.DevString
:return: The attribute name (device/attribute)
:rtype: tango.DevString |
def replaceNode(self, cur):
"""Unlink the old node from its current context, prune the new
one at the same place. If @cur was already inserted in a
document it is first unlinked from its existing context. """
if cur is None: cur__o = None
else: cur__o = cur._o
ret = ... | Unlink the old node from its current context, prune the new
one at the same place. If @cur was already inserted in a
document it is first unlinked from its existing context. |
def Map(self, function):
"""Applies the function to every row in the table.
Args:
function: A function applied to each row.
Returns:
A new TextTable()
Raises:
TableError: When transform is not invalid row entry. The transform
must be compatible with Append().
... | Applies the function to every row in the table.
Args:
function: A function applied to each row.
Returns:
A new TextTable()
Raises:
TableError: When transform is not invalid row entry. The transform
must be compatible with Append(). |
def north_arrow(self, north_arrow_path):
"""Set image that will be used as north arrow in reports.
:param north_arrow_path: Path to the north arrow image.
:type north_arrow_path: str
"""
if isinstance(north_arrow_path, str) and os.path.exists(
north_arrow_path):
... | Set image that will be used as north arrow in reports.
:param north_arrow_path: Path to the north arrow image.
:type north_arrow_path: str |
def remote(self, username=None, password=None, inquire=None):
""" Configures remote access """
self.set_netloc(sdss=True) # simplifies things to have a single sdss machine in .netrc
self.set_auth(username=username, password=password, inquire=inquire)
self.set_netloc(dtn=not self.public... | Configures remote access |
def unzoom_all(self,event=None):
"""zoom out full data range """
if self.panel is not None:
self.panel.unzoom_all(event=event) | zoom out full data range |
def pem_finger(path=None, key=None, sum_type='sha256'):
'''
Pass in either a raw pem string, or the path on disk to the location of a
pem file, and the type of cryptographic hash to use. The default is SHA256.
The fingerprint of the pem will be returned.
If neither a key nor a path are passed in, a... | Pass in either a raw pem string, or the path on disk to the location of a
pem file, and the type of cryptographic hash to use. The default is SHA256.
The fingerprint of the pem will be returned.
If neither a key nor a path are passed in, a blank string will be returned. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.