code stringlengths 51 2.38k | docstring stringlengths 4 15.2k |
|---|---|
def append_row(self, values, value_input_option='RAW'):
params = {
'valueInputOption': value_input_option
}
body = {
'values': [values]
}
return self.spreadsheet.values_append(self.title, params, body) | Adds a row to the worksheet and populates it with values.
Widens the worksheet if there are more values than columns.
:param values: List of values for the new row.
:param value_input_option: (optional) Determines how input data should
be interpreted. See `Va... |
def redirect_territory(level, code):
territory = GeoZone.objects.valid_at(datetime.now()).filter(
code=code, level='fr:{level}'.format(level=level)).first()
return redirect(url_for('territories.territory', territory=territory)) | Implicit redirect given the INSEE code.
Optimistically redirect to the latest valid/known INSEE code. |
def assertTraceDoesNotContain(response, message):
if not hasattr(response, "verify_trace"):
raise AttributeError("Response object does not contain verify_trace method!")
if response.verify_trace(message, False):
raise TestStepFail('Assert: Message(s) "%s" in response' % message) | Raise TestStepFail if response.verify_trace finds message from response traces.
:param response: Response. Must contain method verify_trace
:param message: Message to look for
:return: Nothing
:raises: AttributeError if response does not contain verify_trace method.
TestStepFail if verify_trace ret... |
def SensorMetatagsPost(self, sensor_id, metatags, namespace = None):
ns = "default" if namespace is None else namespace
if self.__SenseApiCall__("/sensors/{0}/metatags.json?namespace={1}".format(sensor_id, ns), "POST", parameters = metatags):
return True
else:
self._... | Attach metatags to a sensor for a specific namespace
@param sensor_id (int) - Id of the sensor to attach metatags to
@param namespace (string) - Namespace for which to attach metatags
@param metatags (dictionary) - Metatags to attach to the sensor
... |
def interp_like(self, other, method='linear', assume_sorted=False,
kwargs={}):
if self.dtype.kind not in 'uifc':
raise TypeError('interp only works for a numeric type array. '
'Given {}.'.format(self.dtype))
ds = self._to_temp_dataset().interp_... | Interpolate this object onto the coordinates of another object,
filling out of range values with NaN.
Parameters
----------
other : Dataset or DataArray
Object with an 'indexes' attribute giving a mapping from dimension
names to an 1d array-like, which provides c... |
def _translate_language_name(self, language_name):
languages = self.languages()
language_id = None
for ideone_index, ideone_language in languages.items():
if ideone_language.lower() == language_name.lower():
return ideone_index
simple_languages = dict((k,v.spl... | Translate a human readable langauge name into its Ideone
integer representation.
Keyword Arguments
-----------------
* langauge_name: a string of the language (e.g. "c++")
Returns
-------
An integer representation of the language.
Notes
-----
... |
def _merge_filters(self) -> None:
for opts in (["-filter:a", "-af"], ["-filter:v", "-vf"]):
filter_list = []
new_argv = []
cmd_iter = iter(self._argv)
for element in cmd_iter:
if element in opts:
filter_list.insert(0, next(cmd_i... | Merge all filter config in command line. |
def get_changes(self, changers, in_hierarchy=False, resources=None,
task_handle=taskhandle.NullTaskHandle()):
function_changer = _FunctionChangers(self.pyname.get_object(),
self._definfo(), changers)
return self._change_calls(function_chan... | Get changes caused by this refactoring
`changers` is a list of `_ArgumentChanger`\s. If `in_hierarchy`
is `True` the changers are applyed to all matching methods in
the class hierarchy.
`resources` can be a list of `rope.base.resource.File`\s that
should be searched for occurre... |
def cree_ws_lecture(self, champs_ligne):
for c in champs_ligne:
label = ASSOCIATION[c][0]
w = ASSOCIATION[c][3](self.acces[c], False)
w.setObjectName("champ-lecture-seule-details")
self.widgets[c] = (w, label) | Alternative to create read only widgets. They should be set after. |
def get_sql(self):
test_method = [
self.is_time,
self.is_date,
self.is_datetime,
self.is_decimal,
self.is_year,
self.is_tinyint,
self.is_smallint,
self.is_mediumint,
self.is_int,
self.is_bigin... | Retrieve the data type for a data record. |
def align_after(self, offset):
f = self.reader
if offset <= 0:
f.seek(0)
self._block_count = 0
self._read_header()
return
sm = self.sync_marker
sml = len(sm)
pos = offset
while pos < self.file_length - sml:
f.see... | Search for a sync point after offset and align just after that. |
def render_form(form, **kwargs):
renderer_cls = get_form_renderer(**kwargs)
return renderer_cls(form, **kwargs).render() | Render a form to a Bootstrap layout |
def depart_heading(self, _):
assert isinstance(self.current_node, nodes.title)
text = self.current_node.astext()
if self.translate_section_name:
text = self.translate_section_name(text)
name = nodes.fully_normalize_name(text)
section = self.current_node.parent
... | Finish establishing section
Wrap up title node, but stick in the section node. Add the section names
based on all the text nodes added to the title. |
def bound_spec(self, name):
if isinstance(name, BaseData):
name = name.name
spec = self.data_spec(name)
try:
bound = self._inputs[name]
except KeyError:
if not spec.derived and spec.default is None:
raise ArcanaMissingDataException(
... | Returns an input selector or derived spec bound to the study, i.e.
where the repository tree is checked for existing outputs
Parameters
----------
name : Str
A name of a fileset or field |
def rows(self):
from ambry.orm import Config as SAConfig
from sqlalchemy import or_
rows = []
configs = self.dataset.session\
.query(SAConfig)\
.filter(or_(SAConfig.group == 'config', SAConfig.group == 'process'),
SAConfig.d_vid == self.dataset... | Return configuration in a form that can be used to reconstitute a
Metadata object. Returns all of the rows for a dataset.
This is distinct from get_config_value, which returns the value
for the library. |
def detect_phantomjs(version='2.1'):
if settings.phantomjs_path() is not None:
phantomjs_path = settings.phantomjs_path()
else:
if hasattr(shutil, "which"):
phantomjs_path = shutil.which("phantomjs") or "phantomjs"
else:
phantomjs_path = "phantomjs"
try:
... | Detect if PhantomJS is avaiable in PATH, at a minimum version.
Args:
version (str, optional) :
Required minimum version for PhantomJS (mostly for testing)
Returns:
str, path to PhantomJS |
def get_user_groups(name, sid=False):
if name == 'SYSTEM':
groups = [name]
else:
groups = win32net.NetUserGetLocalGroups(None, name)
if not sid:
return groups
ret_groups = set()
for group in groups:
ret_groups.add(get_sid_from_name(group))
return ret_groups | Get the groups to which a user belongs
Args:
name (str): The user name to query
sid (bool): True will return a list of SIDs, False will return a list of
group names
Returns:
list: A list of group names or sids |
def hashitem(item):
norm = normitem(item)
byts = s_msgpack.en(norm)
return hashlib.md5(byts).hexdigest() | Generate a uniq hash for the JSON compatible primitive data structure. |
def from_ssl(self,
ca_certs,
client_cert,
client_key,
hosts=default.ELASTICSEARCH_HOSTS,
use_ssl=True,
verify_certs=True, **kwargs):
self.client = Elasticsearch(hosts=hosts,
... | Initialize a Elasticsearch client by SSL.
:param ca_certs: optional path to CA bundle. See
https://urllib3.readthedocs.io/en/latest/security.html#using-certifi-with-urllib3
:param client_cert: path to the file containing the private key and the
certificate, or cert only if using client_... |
def _return_base_data(self, url, container, container_object=None,
container_headers=None, object_headers=None):
headers = self.job_args['base_headers']
headers.update({'X-Auth-Token': self.job_args['os_token']})
_container_uri = url.geturl().rstrip('/')
if cont... | Return headers and a parsed url.
:param url:
:param container:
:param container_object:
:param container_headers:
:return: ``tuple`` |
def add(self, cls_or_branch, *args, **kwargs):
if isinstance(cls_or_branch, Branch):
self.tasks.append(cls_or_branch)
else:
self.__validate_task(cls_or_branch, '__init__', args, kwargs)
self.tasks.append({'cls_or_branch': cls_or_branch, 'args': args, 'kwargs': kwargs}... | Adds a task or branch to the lane.
Parameters
----------
cls_or_branch : Class
*args
Variable length argument list to be passed to `cls_or_branch` during instantiation
**kwargs
Variable length keyword arguments to be passed to `cls_or_branch` during insta... |
def add_etag(self, overwrite=False, weak=False):
if overwrite or "etag" not in self.headers:
self.set_etag(generate_etag(self.get_data()), weak) | Add an etag for the current response if there is none yet. |
def refresh(self):
if lib.EnvRefresh(self._env, self._rule) != 1:
raise CLIPSError(self._env) | Refresh the Rule.
The Python equivalent of the CLIPS refresh command. |
def _update_sid_to_last_existing_pid_map(pid):
last_pid = _find_head_or_latest_connected(pid)
chain_model = _get_chain_by_pid(last_pid)
if not chain_model:
return
chain_model.head_pid = d1_gmn.app.did.get_or_create_did(last_pid)
chain_model.save() | Set chain head PID to the last existing object in the chain to which ``pid``
belongs. If SID has been set for chain, it resolves to chain head PID.
Intended to be called in MNStorage.delete() and other chain manipulation.
Preconditions:
- ``pid`` must exist and be verified to be a PID.
d1_gmn.ap... |
def _stage_input_files(self, file_mapping, dry_run=True):
if self._file_stage is None:
return
self._file_stage.copy_to_scratch(file_mapping, dry_run) | Stage the input files to the scratch area and adjust the arguments accordingly |
def _add_record(table, data, buffer_size):
fields = table.fields
for invalid_key in set(data).difference([f.name for f in fields]):
del data[invalid_key]
table.append(Record.from_dict(fields, data))
if buffer_size is not None and table.is_attached():
if (len(table) - 1) - table._last_syn... | Prepare and append a Record into its Table; flush to disk if necessary. |
def predictions(self):
for prediction in self.api.predictions(vid=self.vid)['prd']:
pobj = Prediction.fromapi(self.api, prediction)
pobj._busobj = self
yield pobj | Generator that yields prediction objects from an API response. |
def main(argv=None):
arguments = cli_common(__doc__, argv=argv)
benet = BeNet(arguments['CAMPAIGN_FILE'])
benet.run()
if argv is not None:
return benet | ben-nett entry point |
def bifurcated_extend(self, corpus, max_size):
temp_fd, temp_path = tempfile.mkstemp(text=True)
try:
self._prepare_bifurcated_extend_data(corpus, max_size, temp_path,
temp_fd)
finally:
try:
os.remove(temp_pa... | Replaces the results with those n-grams that contain any of the
original n-grams, and that represent points at which an n-gram
is a constituent of multiple larger n-grams with a lower label
count.
:param corpus: corpus of works to which results belong
:type corpus: `Corpus`
... |
def cmd_zf(self, ch=None):
viewer = self.get_viewer(ch)
if viewer is None:
self.log("No current viewer/channel.")
return
viewer.zoom_fit()
cur_lvl = viewer.get_zoom()
self.log("zoom=%f" % (cur_lvl)) | zf ch=chname
Zoom the image for the given viewer/channel to fit the window. |
def calc_2d_forces(self,x1,y1,x2,y2,width):
if x1>x2:
a = x1-x2
else:
a = x2-x1
a_sq=a*a
if y1>y2:
b = y1-y2
else:
b = y2-y1
b_sq=b*b
from math import sqrt
c_sq = a_sq+b_sq
c = sqrt(c_sq)
if c... | Calculate overlap in 2D space |
def on_channel_closed(self, channel, reply_code, reply_text):
for future in self.messages.values():
future.set_exception(AMQPException(reply_code, reply_text))
self.messages = {}
if self.closing:
LOGGER.debug('Channel %s was intentionally closed (%s) %s',
... | Invoked by pika when RabbitMQ unexpectedly closes the channel.
Channels are usually closed if you attempt to do something that
violates the protocol, such as re-declare an exchange or queue with
different parameters.
In this case, we just want to log the error and create a new channel
... |
def _parsemeta_tmy2(columns, line):
rawmeta = " ".join(line.split()).split(" ")
meta = rawmeta[:3]
meta.append(int(rawmeta[3]))
longitude = (
float(rawmeta[5]) + float(rawmeta[6])/60) * (2*(rawmeta[4] == 'N') - 1)
latitude = (
float(rawmeta[8]) + float(rawmeta[9])/60) * (2*(rawmeta[7... | Retrieves metadata from the top line of the tmy2 file.
Parameters
----------
columns : string
String of column headings in the header
line : string
Header string containing DataFrame
Returns
-------
meta : Dict of metadata contained in the header string |
def sqrt(n):
if isinstance(n, Rational):
n = Constructible(n)
elif not isinstance(n, Constructible):
raise ValueError('the square root is not implemented for the type %s' % type(n))
r = n._try_sqrt()
if r is not None:
return r
return Constructible(Constructible.lift_rational_... | return the square root of n in an exact representation |
def add_markdown_cell(self, content, tags=None):
self.notebook["cells"].append(nb.v4.new_markdown_cell(content, **{"metadata":
{"tags": tags}})) | Class method responsible for adding a markdown cell with content 'content' to the
Notebook object.
----------
Parameters
----------
content : str
Text/HTML code/... to include in the markdown cell (triple quote for multiline text).
tags : list
A ... |
async def flush(self, request: 'Request'):
from bernard.middleware import MiddlewareManager
for stack in self._stacks:
await stack.convert_media(self.platform)
func = MiddlewareManager.instance().get('flush', self._flush)
await func(request, self._stacks) | Send all queued messages.
The first step is to convert all media in the stacked layers then the
second step is to send all messages as grouped in time as possible. |
def consume(self):
if self.match:
self.pos = self.match.end()
if self.match.group()[-1] == '\n':
self._update_prefix()
self.match = None | Consume the body of source. ``pos`` will move forward. |
def new_iteration(self, prefix):
self.flush()
self.prefix[-1] = prefix
self.reset_formatter() | When inside a loop logger, created a new iteration |
def quantile(q, variable, weight_variable = None, filter_variable = None):
def formula(entity, period):
value = entity(variable, period)
if weight_variable is not None:
weight = entity(weight_variable, period)
weight = entity.filled_array(1)
if filter_variable is not None... | Return quantile of a variable with weight provided by a specific wieght variable potentially filtered |
def _CheckLegacyPassword(self, password):
import crypt
salt = self._value[:2]
return crypt.crypt(password, salt) == self._value | Check password with legacy crypt based method. |
def common_ancestor(c):
span1 = _to_span(c[0])
span2 = _to_span(c[1])
ancestor1 = np.array(span1.sentence.xpath.split("/"))
ancestor2 = np.array(span2.sentence.xpath.split("/"))
min_len = min(ancestor1.size, ancestor2.size)
return list(ancestor1[: np.argmin(ancestor1[:min_len] == ancestor2[:min_... | Return the path to the root that is shared between a binary-Mention Candidate.
In particular, this is the common path of HTML tags.
:param c: The binary-Mention Candidate to evaluate
:rtype: list of strings |
def make_pkgng_aware(jname):
ret = {'changes': {}}
cdir = _config_dir()
if not os.path.isdir(cdir):
os.makedirs(cdir)
if os.path.isdir(cdir):
ret['changes'] = 'Created poudriere make file dir {0}'.format(cdir)
else:
return 'Could not create or find required di... | Make jail ``jname`` pkgng aware
CLI Example:
.. code-block:: bash
salt '*' poudriere.make_pkgng_aware <jail name> |
def read(self):
data = bytearray()
while True:
incoming_bytes = self.comport.inWaiting()
if incoming_bytes == 0:
break
else:
content = self.comport.read(size=incoming_bytes)
data.extend(bytearray(content))
return... | Read data from serial port and returns a ``bytearray``. |
def find_spectrum_match(spec, spec_lib, method='euclidian'):
spec = spec / np.max(spec)
if method == 'dot':
d1 = (spec_lib * lil_matrix(spec).T).sum(axis=1).A ** 2
d2 = np.sum(spec ** 2) * spec_lib.multiply(spec_lib).sum(axis=1).A
dist = d1 / d2
elif method == 'euclidian':
st... | Find spectrum in spec_lib most similar to spec. |
def file_id(self):
if self.type.lower() == "directory":
return None
if self.file_uuid is None:
raise exceptions.MetsError(
"No FILEID: File %s does not have file_uuid set" % self.path
)
if self.is_aip:
return os.path.splitext(os.pat... | Returns the fptr @FILEID if this is not a Directory. |
def _update_linear_bucket_count(a_float, dist):
buckets = dist.linearBuckets
if buckets is None:
raise ValueError(_BAD_UNSET_BUCKETS % (u'linear buckets'))
bucket_counts = dist.bucketCounts
num_finite_buckets = buckets.numFiniteBuckets
if len(bucket_counts) < num_finite_buckets + 2:
... | Adds `a_float` to `dist`, updating the its linear buckets.
Args:
a_float (float): a new value
dist (:class:`endpoints_management.gen.servicecontrol_v1_messages.Distribution`):
the Distribution being updated
Raises:
ValueError: if `dist` does not already have linear buckets defined
... |
def get_matrix(self):
if self.parent:
return self.get_local_matrix() * (self._prev_parent_matrix or self.parent.get_matrix())
else:
return self.get_local_matrix() | return sprite's current transformation matrix |
def get_parent_id(self, resource, document):
parent_type = self._get_parent_type(resource)
if parent_type and document:
return document.get(parent_type.get('field'))
return None | Get the Parent Id of the document
:param resource: resource name
:param document: document containing the parent id |
def _compute(self, data):
local_ts = self._local_ts(*data)
dt = local_ts[internal_names.TIME_WEIGHTS_STR]
dt = dt / np.timedelta64(1, 'D')
return local_ts, dt | Perform the calculation. |
def get_property(self, name):
with self.__properties_lock:
return self.__properties.get(name, os.getenv(name)) | Retrieves a framework or system property. As framework properties don't
change while it's running, this method don't need to be protected.
:param name: The property name |
def _update_xyz(self, change):
self.x,self.y,self.z = self.position.X(),self.position.Y(),self.position.Z() | Keep x,y,z in sync with position |
def profile_loglike(self, x):
if self._prof_interp is None:
return self._profile_loglike(x)[1]
x = np.array(x, ndmin=1)
return self._prof_interp(x) | Profile log-likelihood.
Returns ``L_prof(x,y=y_min|z')`` : where y_min is the
value of y that minimizes
L for a given x.
This will used the cached '~fermipy.castro.Interpolator' object
if possible, and ... |
def vn_delete(call=None, kwargs=None):
if call != 'function':
raise SaltCloudSystemExit(
'The vn_delete function must be called with -f or --function.'
)
if kwargs is None:
kwargs = {}
name = kwargs.get('name', None)
vn_id = kwargs.get('vn_id', None)
if vn_id:
... | Deletes the given virtual network from OpenNebula. Either a name or a vn_id must
be supplied.
.. versionadded:: 2016.3.0
name
The name of the virtual network to delete. Can be used instead of ``vn_id``.
vn_id
The ID of the virtual network to delete. Can be used instead of ``name``.
... |
def get_config(config_file):
def load(fp):
try:
return yaml.safe_load(fp)
except yaml.YAMLError as e:
sys.stderr.write(text_type(e))
sys.exit(1)
if config_file == '-':
return load(sys.stdin)
if not os.path.exists(config_file):
sys.stderr.wr... | Get configuration from a file. |
def setKeepAliveTimeOut(self, iTimeOut):
print '%s call setKeepAliveTimeOut' % self.port
print iTimeOut
try:
cmd = WPANCTL_CMD + 'setprop NCP:SleepyPollInterval %s' % str(iTimeOut*1000)
print cmd
return self.__sendCommand(cmd)[0] != 'Fail'
except Excep... | set keep alive timeout for device
has been deprecated and also set SED polling rate
Args:
iTimeOut: data poll period for sleepy end device
Returns:
True: successful to set the data poll period for SED
False: fail to set the data poll period for SED |
def disable_if_no_tty(cls):
if sys.stdout.isatty() or sys.stderr.isatty():
return False
cls.disable_all_colors()
return True | Disable all colors only if there is no TTY available.
:return: True if colors are disabled, False if stderr or stdout is a TTY.
:rtype: bool |
def import_class(clspath):
modpath, clsname = split_clspath(clspath)
__import__(modpath)
module = sys.modules[modpath]
return getattr(module, clsname) | Given a clspath, returns the class.
Note: This is a really simplistic implementation. |
def setExpertLevel(self):
g = get_root(self).globals
level = g.cpars['expert_level']
if level == 0:
if self.val.get() == 'CCD TECs':
self.val.set('Observe')
self._changed()
self.tecs.grid_forget()
else:
self.tecs.grid(ro... | Modifies widget according to expertise level, which in this
case is just matter of hiding or revealing the button to
set CCD temps |
def transform(self, maps):
out = {}
out["chi_p"] = conversions.chi_p(
maps[parameters.mass1], maps[parameters.mass2],
maps[parameters.spin1x], maps[parameters.spin1y],
maps[parameters.spin2x], maps[parameters.spin2y])... | This function transforms from component masses and caretsian spins
to chi_p.
Parameters
----------
maps : a mapping object
Examples
--------
Convert a dict of numpy.array:
Returns
-------
out : dict
A dict with key as paramet... |
def load_metrics(event_dir, epoch):
metrics = {}
for filename in tf.gfile.ListDirectory(event_dir):
path = os.path.join(event_dir, filename)
for event in tf.train.summary_iterator(path):
if event.step == epoch and event.HasField("summary"):
value = event.summary.value[0]
metrics[value.... | Loads metrics for this epoch if they have already been written.
This reads the entire event file but it's small with just per-epoch metrics.
Args:
event_dir: TODO(koz4k): Document this.
epoch: TODO(koz4k): Document this.
Returns:
metrics. |
def create_multiple_replace_func(*args, **kwds):
adict = dict(*args, **kwds)
rx = re.compile('|'.join(map(re.escape, adict)))
def one_xlat(match):
return adict[match.group(0)]
def xlat(text):
return rx.sub(one_xlat, text)
return xlat | You can call this function and pass it a dictionary, or any other
combination of arguments you could pass to built-in dict in order to
construct a dictionary. The function will return a xlat closure that
takes as its only argument text the string on which the substitutions
are desired and returns a copy... |
def _eval(self, v, in_bounds, der):
result = np.zeros_like(v, dtype='float')
x_indices = np.searchsorted(self._x, v, side='rigth')
ids = x_indices[in_bounds] - 1
u = v[in_bounds] - self._x[ids]
result[in_bounds] = self._poly_eval(u, ids, der)
return result | Eval polynomial inside bounds. |
def sanitizeStructTime(struct):
maxValues = (9999, 12, 31, 23, 59, 59)
minValues = (1, 1, 1, 0, 0, 0)
newstruct = []
for value, maxValue, minValue in zip(struct[:6], maxValues, minValues):
newstruct.append(max(minValue, min(value, maxValue)))
return tuple(newstruct) + struct[6:] | Convert struct_time tuples with possibly invalid values to valid
ones by substituting the closest valid value. |
def from_table(self, table=None, fields='*', schema=None, **kwargs):
self.tables.append(TableFactory(
table=table,
fields=fields,
schema=schema,
owner=self,
**kwargs
))
return self | Adds a ``Table`` and any optional fields to the list of tables
this query is selecting from.
:type table: str or dict or :class:`Table <querybuilder.tables.Table>`
or :class:`Query <querybuilder.query.Query>` or
:class:`ModelBase <django:django.db.models.base.ModelBase>`
... |
async def flexible_api_handler(service, action_type, payload, props, **kwds):
if action_type == intialize_service_action():
model = json.loads(payload) if isinstance(payload, str) else payload
models = service._external_service_data['models']
connections = service._external_service_data['con... | This query handler builds the dynamic picture of availible services. |
def build_payload(self, payload):
for segment in self.segments:
segment.pack(payload, commit=self.autocommit) | Build payload of message. |
def head(self, n=5):
self._reset_group_selection()
mask = self._cumcount_array() < n
return self._selected_obj[mask] | Return first n rows of each group.
Essentially equivalent to ``.apply(lambda x: x.head(n))``,
except ignores as_index flag.
%(see_also)s
Examples
--------
>>> df = pd.DataFrame([[1, 2], [1, 4], [5, 6]],
columns=['A', 'B'])
>>> df.gr... |
def disable_busy_cursor():
while QgsApplication.instance().overrideCursor() is not None and \
QgsApplication.instance().overrideCursor().shape() == \
QtCore.Qt.WaitCursor:
QgsApplication.instance().restoreOverrideCursor() | Disable the hourglass cursor and listen for layer changes. |
def validate_field(field, allowed_keys, allowed_types):
for key, value in field.items():
if key not in allowed_keys:
raise exceptions.ParametersFieldError(key, "property")
if key == defs.TYPE:
if value not in allowed_types:
raise exceptions.ParametersFieldErro... | Validate field is allowed and valid. |
def join(self, iterable):
return self.__class__(super(ColorStr, self).join(iterable), keep_tags=True) | Return a string which is the concatenation of the strings in the iterable.
:param iterable: Join items in this iterable. |
def _parse_multifile(self, desired_type: Type[T], obj: PersistedObject,
parsing_plan_for_children: Dict[str, ParsingPlan], logger: Logger,
options: Dict[str, Dict[str, Any]]) -> T:
pass | First parse all children from the parsing plan, then calls _build_object_from_parsed_children
:param desired_type:
:param obj:
:param parsing_plan_for_children:
:param logger:
:param options:
:return: |
def ensure_unicoded_and_unique(args_list, application):
unicoded_args = []
for argument in args_list:
argument = (six.u(argument)
if not isinstance(argument, six.text_type) else argument)
if argument not in unicoded_args or argument == application:
unicoded_args.a... | Iterate over args_list, make it unicode if needed and ensure that there
are no duplicates.
Returns list of unicoded arguments in the same order. |
def edge_has_annotation(edge_data: EdgeData, key: str) -> Optional[Any]:
annotations = edge_data.get(ANNOTATIONS)
if annotations is None:
return None
return annotations.get(key) | Check if an edge has the given annotation.
:param edge_data: The data dictionary from a BELGraph's edge
:param key: An annotation key
:return: If the annotation key is present in the current data dictionary
For example, it might be useful to print all edges that are annotated with 'Subgraph':
>>>... |
def _mean_dict(dict_list):
return {k: np.array([d[k] for d in dict_list]).mean()
for k in dict_list[0].keys()} | Compute the mean value across a list of dictionaries |
def skip_redundant(iterable, skipset=None):
if skipset is None:
skipset = set()
for item in iterable:
if item not in skipset:
skipset.add(item)
yield item | Redundant items are repeated items or items in the original skipset. |
def setNumberRange(key, keyType, start, end):
return And(
And(keyType, error=SCHEMA_TYPE_ERROR % (key, keyType.__name__)),
And(lambda n: start <= n <= end, error=SCHEMA_RANGE_ERROR % (key, '(%s,%s)' % (start, end))),
) | check number range |
def erase_in_display(self, how=0, *args, **kwargs):
if how == 0:
interval = range(self.cursor.y + 1, self.lines)
elif how == 1:
interval = range(self.cursor.y)
elif how == 2 or how == 3:
interval = range(self.lines)
self.dirty.update(interval)
... | Erases display in a specific way.
Character attributes are set to cursor attributes.
:param int how: defines the way the line should be erased in:
* ``0`` -- Erases from cursor to end of screen, including
cursor position.
* ``1`` -- Erases from beginning of scree... |
def _on_stackexchange_request(self, future, response):
content = escape.json_decode(response.body)
if 'error' in content:
future.set_exception(Exception('StackExchange error: %s' %
str(content['error'])))
return
future.set_result... | Invoked as a response to the StackExchange API request. Will decode
the response and set the result for the future to return the callback or
raise an exception |
def get_node(self, node_name):
for node in self.nodes:
if node.__name__ == node_name:
return node | Retrieve node with passed name |
def fetch_token(self):
grant_type = 'client_credentials'
channel = yield self._tvm.ticket_full(
self._client_id, self._client_secret, grant_type, {})
ticket = yield channel.rx.get()
raise gen.Return(self._make_token(ticket)) | Gains token from secure backend service.
:return: Token formatted for Cocaine protocol header. |
def get(self, *args, **kwargs):
return self.session.get(*args, **self.get_kwargs(**kwargs)) | Executes an HTTP GET.
:Parameters:
- `args`: Non-keyword arguments
- `kwargs`: Keyword arguments |
def get_reservation_ports(session, reservation_id, model_name='Generic Traffic Generator Port'):
reservation_ports = []
reservation = session.GetReservationDetails(reservation_id).ReservationDescription
for resource in reservation.Resources:
if resource.ResourceModelName == model_name:
r... | Get all Generic Traffic Generator Port in reservation.
:return: list of all Generic Traffic Generator Port resource objects in reservation |
def _sid_subdir_path(sid):
padded_sid = format(sid, '06')
return os.path.join(
padded_sid[0:2],
padded_sid[2:4],
"{0}.bcolz".format(str(padded_sid))
) | Format subdir path to limit the number directories in any given
subdirectory to 100.
The number in each directory is designed to support at least 100000
equities.
Parameters
----------
sid : int
Asset identifier.
Returns
-------
out : string
A path for the bcolz ro... |
def is_encodable(self, typ: TypeStr, arg: Any) -> bool:
encoder = self._registry.get_encoder(typ)
try:
encoder.validate_value(arg)
except EncodingError:
return False
except AttributeError:
try:
encoder(arg)
except EncodingEr... | Determines if the python value ``arg`` is encodable as a value of the
ABI type ``typ``.
:param typ: A string representation for the ABI type against which the
python value ``arg`` will be checked e.g. ``'uint256'``,
``'bytes[]'``, ``'(int,int)'``, etc.
:param arg: The py... |
def update_health(self, reporter, info):
with self.changes_squashed:
alarm = info.alarm
if alarm.is_ok():
self._faults.pop(reporter, None)
else:
self._faults[reporter] = alarm
if self._faults:
faults = sorted(self._f... | Set the health attribute. Called from part |
def defocus_blur(x, severity=1):
c = [(3, 0.1), (4, 0.5), (6, 0.5), (8, 0.5), (10, 0.5)][severity - 1]
x = np.array(x) / 255.
kernel = disk(radius=c[0], alias_blur=c[1])
channels = []
for d in range(3):
channels.append(tfds.core.lazy_imports.cv2.filter2D(x[:, :, d], -1, kernel))
channels = np.array(chan... | Defocus blurring to images.
Apply defocus blurring to images using Gaussian kernel.
Args:
x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255].
severity: integer, severity of corruption.
Returns:
numpy array, image with uint8 pixels in [0,255]. Applied defocus blur. |
def _populateBuffer(self, stream, n):
try:
for x in xrange(n):
output = stream.next()
self._buffer.write(output)
except StopIteration, e:
self._deferred.callback(None)
except Exception, e:
self._deferred.errback(e)
else:... | Iterator that returns N steps of
the genshi stream.
Found that performance really sucks for
n = 1 (0.5 requests/second for the root resources
versus 80 requests/second for a blocking algorithm).
Hopefully increasing the number of steps per timeslice will
significantly i... |
def _extract_alphabet(self, grammar):
alphabet = set([])
for terminal in grammar.Terminals:
alphabet |= set([x for x in terminal])
self.alphabet = list(alphabet) | Extract an alphabet from the given grammar. |
def Compile(self, filter_implementation):
arguments = [self.attribute]
for argument in self.args:
arguments.append(argument.Compile(filter_implementation))
expander = filter_implementation.FILTERS['ValueExpander']
context_cls = filter_implementation.FILTERS['Context']
return context_cls(argume... | Compile the expression. |
def get_users_by_ids(self, user_ids):
urls = [urljoin(self.user_url, F"{i}.json") for i in user_ids]
result = self._run_async(urls=urls)
return [User(r) for r in result if r] | Given a list of user ids, return all the User objects |
def set(self, prop, value):
prop_parts = prop.split(".")
if self.copy_dict:
new_dict = copy.deepcopy(self.obj)
else:
new_dict = self.obj
pointer = None
parts_length = len(prop_parts) - 1
for i, part in enumerate(prop_parts):
if pointer ... | sets the dot notated property to the passed in value
args:
prop: a string of the property to retreive
"a.b.c" ~ dictionary['a']['b']['c']
value: the value to set the prop object |
def censor_entity_types(self, entity_types):
assert type(entity_types) == set
self._entity_types_to_censor = entity_types
self._feats_from_spacy_doc = FeatsFromSpacyDoc(
use_lemmas=self._use_lemmas,
entity_types_to_censor=self._entity_types_to_censor
)
ret... | Entity types to exclude from feature construction. Terms matching
specificed entities, instead of labeled by their lower case orthographic
form or lemma, will be labeled by their entity type.
Parameters
----------
entity_types : set of entity types outputted by spaCy
'... |
def from_dict(self, document):
identifier = str(document['_id'])
active = document['active']
timestamp = datetime.datetime.strptime(document['timestamp'], '%Y-%m-%dT%H:%M:%S.%f')
properties = document['properties']
directory = self.get_directory(identifier)
return ImageHa... | Create image object from JSON document retrieved from database.
Parameters
----------
document : JSON
Json document in database
Returns
-------
ImageHandle
Handle for image object |
def get_files(self, file_paths):
results = []
def get_file_thunk(path, interface):
result = error = None
try:
result = interface.get_file(path)
except Exception as err:
error = err
print(err)
content, encoding = result
content = compression.decompress(cont... | returns a list of files faster by using threads |
def make_tree(self):
self.tree['is_ready'] = False
leaf_count = len(self.tree['leaves'])
if leaf_count > 0:
self._unshift(self.tree['levels'], self.tree['leaves'])
while len(self.tree['levels'][0]) > 1:
self._unshift(self.tree['levels'], self._calculate_ne... | Generates the merkle tree. |
def get_all_hits(self):
page_size = 100
search_rs = self.search_hits(page_size=page_size)
total_records = int(search_rs.TotalNumResults)
get_page_hits = lambda(page): self.search_hits(page_size=page_size, page_number=page)
page_nums = self._get_pages(page_size, total_records)
... | Return all of a Requester's HITs
Despite what search_hits says, it does not return all hits, but
instead returns a page of hits. This method will pull the hits
from the server 100 at a time, but will yield the results
iteratively, so subsequent requests are made on demand. |
def homepage():
if current_user.is_authenticated():
if not login_fresh():
logging.debug('User needs a fresh token')
abort(login.needs_refresh())
auth.claim_invitations(current_user)
build_list = operations.UserOps(current_user.get_id()).get_builds()
return render_temp... | Renders the homepage. |
def get_switch_macs(self, switch_ip=None, node=None, vlan=None, mac=None, port=None, verbose=0):
if (switch_ip == None):
if (node == None):
raise Exception('get_switch_macs() requires switch_ip or node parameter')
return None
switch_ip = node.get_ipaddr()
... | Get the CAM table from a switch.
Args:
switch_ip IP address of the device
node natlas_node from new_node()
vlan Filter results by VLAN
MAC Filter results by MAC address (regex)
port ... |
def get_cdn_auth_token(self, app_id, hostname):
return self.send_job_and_wait(MsgProto(EMsg.ClientGetCDNAuthToken),
{
'app_id': app_id,
'host_name': hostname,
... | Get CDN authentication token
:param app_id: app id
:type app_id: :class:`int`
:param hostname: cdn hostname
:type hostname: :class:`str`
:return: `CMsgClientGetCDNAuthTokenResponse <https://github.com/ValvePython/steam/blob/39627fe883feeed2206016bacd92cf0e4580ead6/protobufs/stea... |
def normalize_signature(func):
@wraps(func)
def wrapper(*args, **kwargs):
if kwargs:
args = args, kwargs
if len(args) is 1:
args = args[0]
return func(args)
return wrapper | Decorator. Combine args and kwargs. Unpack single item tuples. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.