code stringlengths 51 2.38k | docstring stringlengths 4 15.2k |
|---|---|
def save_to(self, obj):
if isinstance(obj, dict):
obj = dict(obj)
for key in self.changed_fields:
if key in self.cleaned_data:
val = self.cleaned_data.get(key)
set_obj_value(obj, key, val)
return obj | Save the cleaned data to an object. |
def execute_script(self, name, keys, *args, **options):
script = get_script(name)
if not script:
raise redis.RedisError('No such script "%s"' % name)
address = self.address()
if address not in all_loaded_scripts:
all_loaded_scripts[address] = set()
loaded ... | Execute a script.
makes sure all required scripts are loaded. |
def register_channel_post_handler(self, callback, *custom_filters, commands=None, regexp=None, content_types=None,
state=None, run_task=None, **kwargs):
filters_set = self.filters_factory.resolve(self.channel_post_handlers,
... | Register handler for channel post
:param callback:
:param commands: list of commands
:param regexp: REGEXP
:param content_types: List of content types.
:param state:
:param custom_filters: list of custom filters
:param run_task: run callback in task (no wait resu... |
def disable_switchport(self, inter_type, inter):
config = ET.Element('config')
interface = ET.SubElement(config, 'interface',
xmlns=("urn:brocade.com:mgmt:"
"brocade-interface"))
int_type = ET.SubElement(interface, inter_... | Change an interface's operation to L3.
Args:
inter_type: The type of interface you want to configure. Ex.
tengigabitethernet, gigabitethernet, fortygigabitethernet.
inter: The ID for the interface you want to configure. Ex. 1/0/1
Returns:
True if com... |
def write_intro (self):
self.comment(_("created by %(app)s at %(time)s") %
{"app": configuration.AppName,
"time": strformat.strtime(self.starttime)})
self.comment(_("Get the newest version at %(url)s") %
{'url': configuration.Url})
se... | Write intro comments. |
def get_field_info(self, field):
field_info = self.get_attributes(field)
field_info["required"] = getattr(field, "required", False)
field_info["type"] = self.get_label_lookup(field)
if getattr(field, "child", None):
field_info["child"] = self.get_field_info(field.child)
... | This method is basically a mirror from rest_framework==3.3.3
We are currently pinned to rest_framework==3.1.1. If we upgrade,
this can be refactored and simplified to rely more heavily on
rest_framework's built in logic. |
def detect_traits(item):
return traits.detect_traits(
name=item.name, alias=item.alias,
filetype=(list(item.fetch("kind_51")) or [None]).pop(),
) | Build traits list from attributes of the passed item. Currently,
"kind_51", "name" and "alias" are considered.
See pyrocore.util.traits:dectect_traits for more details. |
def partition(pred, iterable):
trues = []
falses = []
for item in iterable:
if pred(item):
trues.append(item)
else:
falses.append(item)
return trues, falses | split the results of an iterable based on a predicate |
def _lint():
project_python_files = [filename for filename in get_project_files()
if filename.endswith(b'.py')]
retcode = subprocess.call(
['flake8', '--max-complexity=10'] + project_python_files)
if retcode == 0:
print_success_message('No style errors')
retur... | Run lint and return an exit code. |
def map_sections(fun, neurites, neurite_type=NeuriteType.all, iterator_type=Tree.ipreorder):
return map(fun, iter_sections(neurites,
iterator_type=iterator_type,
neurite_filter=is_type(neurite_type))) | Map `fun` to all the sections in a collection of neurites |
def bestfit_func(self, bestfit_x):
if not self.bestfit_func:
raise KeyError("Do do_bestfit first")
return self.args["func"](self.fit_args, bestfit_x) | Returns y value |
def ekifld(handle, tabnam, ncols, nrows, cnmlen, cnames, declen, decls):
handle = ctypes.c_int(handle)
tabnam = stypes.stringToCharP(tabnam)
ncols = ctypes.c_int(ncols)
nrows = ctypes.c_int(nrows)
cnmlen = ctypes.c_int(cnmlen)
cnames = stypes.listToCharArray(cnames)
declen = ctypes.c_int(dec... | Initialize a new E-kernel segment to allow fast writing.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekifld_c.html
:param handle: File handle.
:type handle: int
:param tabnam: Table name.
:type tabnam: str
:param ncols: Number of columns in the segment.
:type ncols: int
:pa... |
def get_grid_data(xall, yall, zall, nbins=100, method='nearest'):
from scipy.interpolate import griddata
x, y = _np.meshgrid(
_np.linspace(xall.min(), xall.max(), nbins),
_np.linspace(yall.min(), yall.max(), nbins),
indexing='ij')
z = griddata(
_np.hstack([xall[:,None], yall[... | Interpolate unstructured two-dimensional data.
Parameters
----------
xall : ndarray(T)
Sample x-coordinates.
yall : ndarray(T)
Sample y-coordinates.
zall : ndarray(T)
Sample z-coordinates.
nbins : int, optional, default=100
Number of histogram bins used in x/y-di... |
def _onEncoding(self, encString, line, pos, absPosition):
self.encoding = Encoding(encString, line, pos, absPosition) | Memorizes module encoding |
def wait_for_keys(self, *keys, timeout=0):
if len(keys) == 1 and _is_iterable(keys[0]):
keys = keys[0]
return self.listen_until_return(Handler.key_press(keys), timeout=timeout) | Waits until one of the specified keys was pressed, and returns
which key was pressed.
:param keys: iterable of integers of pygame-keycodes, or simply
multiple keys passed via multiple arguments
:type keys: iterable
:param timeout: number of seconds to wait till the functio... |
def tearpage_backend(filename, teared_pages=None):
if teared_pages is None:
teared_pages = [0]
with tempfile.NamedTemporaryFile() as tmp:
shutil.copy(filename, tmp.name)
try:
input_file = PdfFileReader(open(tmp.name, 'rb'))
except PdfReadError:
fix_pdf(fil... | Copy filename to a tempfile, write pages to filename except the teared one.
..note ::
Adapted from sciunto's code, https://github.com/sciunto/tear-pages
:param filename: PDF filepath
:param teared_pages: Numbers of the pages to tear. Default to first page \
only. |
def is_canonical(version, loosedev=False):
if loosedev:
return loose440re.match(version) is not None
return pep440re.match(version) is not None | Return whether or not the version string is canonical according to Pep 440 |
def __create_preview_object_base(self, dct):
if dct.get("_id"):
del dct["_id"]
preview_object_id = yield self.previews.insert(dct)
raise Return(preview_object_id) | The starting point for a preview of a future object.
This is the object which will have future revisions iterated and applied to.
:param dict dct: The starting object dictionary
:return: The preview object id
:rtype: str |
def create_spooled_temporary_file(filepath=None, fileobj=None):
spooled_file = tempfile.SpooledTemporaryFile(
max_size=settings.TMP_FILE_MAX_SIZE,
dir=settings.TMP_DIR)
if filepath:
fileobj = open(filepath, 'r+b')
if fileobj is not None:
fileobj.seek(0)
copyfileobj(fi... | Create a spooled temporary file. if ``filepath`` or ``fileobj`` is
defined its content will be copied into temporary file.
:param filepath: Path of input file
:type filepath: str
:param fileobj: Input file object
:type fileobj: file
:returns: Spooled temporary file
:rtype: :class:`tempfil... |
def padDigitalData(self, dig_data, n):
n = int(n)
l0 = len(dig_data)
if l0 % n == 0:
return dig_data
else:
ladd = n - (l0 % n)
dig_data_add = np.zeros(ladd, dtype="uint32")
dig_data_add.fill(dig_data[-1])
return np.concatenate((... | Pad dig_data with its last element so that the new array is a
multiple of n. |
def upvotes(self, option):
params = join_params(self.parameters, {"upvotes": option})
return self.__class__(**params) | Set whether to filter by a user's upvoted list. Options available are
user.ONLY, user.NOT, and None; default is None. |
def where(self, column_or_label, value_or_predicate=None, other=None):
column = self._get_column(column_or_label)
if other is not None:
assert callable(value_or_predicate), "Predicate required for 3-arg where"
predicate = value_or_predicate
other = self._get_column(ot... | Return a new ``Table`` containing rows where ``value_or_predicate``
returns True for values in ``column_or_label``.
Args:
``column_or_label``: A column of the ``Table`` either as a label
(``str``) or an index (``int``). Can also be an array of booleans;
only the rows... |
def every(predicate, *iterables):
r
try:
if len(iterables) == 1: ifilterfalse(predicate, iterables[0]).next()
else: ifilterfalse(bool, starmap(predicate, izip(*iterables))).next()
except StopIteration: return True
else: return False | r"""Like `some`, but only returns `True` if all the elements of `iterables`
satisfy `predicate`.
Examples:
>>> every(bool, [])
True
>>> every(bool, [0])
False
>>> every(bool, [1,1])
True
>>> every(operator.eq, [1,2,3],[1,2])
True
>>> every(operator.eq, [1,2,3],[0,2])
Fal... |
def for_category(self, category, live_only=False):
filters = {'tag': category.tag}
if live_only:
filters.update({'entry__live': True})
return self.filter(**filters) | Returns queryset of EntryTag instances for specified category.
:param category: the Category instance.
:param live_only: flag to include only "live" entries.
:rtype: django.db.models.query.QuerySet. |
def _imagpart(self, f):
def f_im(x, **kwargs):
result = np.asarray(f(x, **kwargs),
dtype=self.scalar_out_dtype)
return result.imag
if is_real_dtype(self.out_dtype):
return self.zero()
else:
return self.real_space.ele... | Function returning the imaginary part of the result from ``f``. |
def get_top_artists(self, limit=None, cacheable=True):
params = {}
if limit:
params["limit"] = limit
doc = _Request(self, "chart.getTopArtists", params).execute(cacheable)
return _extract_top_artists(doc, self) | Returns the most played artists as a sequence of TopItem objects. |
def make_directory_if_not_exists(path):
try:
os.makedirs(path)
except OSError, error:
if error.errno <> errno.EEXIST:
raise error | Create the specified path, making all intermediate-level directories
needed to contain the leaf directory. Ignore any error that would
occur if the leaf directory already exists.
@note: all the intermediate-level directories are created with the
default mode is 0777 (octal).
@param path: the ... |
def bulk_record_workunits(self, engine_workunits):
for workunit in engine_workunits:
duration = workunit['end_timestamp'] - workunit['start_timestamp']
span = zipkin_span(
service_name="pants",
span_name=workunit['name'],
duration=duration,
span_storage=self.span_storage,... | A collection of workunits from v2 engine part |
def build_single(mode):
if mode == 'force':
amode = ['-a']
else:
amode = []
if executable.endswith('uwsgi'):
_executable = executable[:-5] + 'python'
else:
_executable = executable
p = subprocess.Popen([_executable, '-m', 'nikola', 'build'] + amode,
... | Build, in the single-user mode. |
def get_assessments_offered_by_search(self, assessment_offered_query, assessment_offered_search):
if not self._can('search'):
raise PermissionDenied()
return self._provider_session.get_assessments_offered_by_search(assessment_offered_query, assessment_offered_search) | Pass through to provider AssessmentOfferedSearchSession.get_assessments_offered_by_search |
def ToJSonResponse(self, columns_order=None, order_by=(), req_id=0,
response_handler="google.visualization.Query.setResponse"):
response_obj = {
"version": "0.6",
"reqId": str(req_id),
"table": self._ToJSonObj(columns_order, order_by),
"status": "ok"
}
en... | Writes a table as a JSON response that can be returned as-is to a client.
This method writes a JSON response to return to a client in response to a
Google Visualization API query. This string can be processed by the calling
page, and is used to deliver a data table to a visualization hosted on
a differ... |
def to_one_hot(dataY):
nc = 1 + np.max(dataY)
onehot = [np.zeros(nc, dtype=np.int8) for _ in dataY]
for i, j in enumerate(dataY):
onehot[i][j] = 1
return onehot | Convert the vector of labels dataY into one-hot encoding.
:param dataY: vector of labels
:return: one-hot encoded labels |
def execfile(fname, variables):
with open(fname) as f:
code = compile(f.read(), fname, 'exec')
exec(code, variables) | This is builtin in python2, but we have to roll our own on py3. |
def SetEventTag(self, event_tag):
event_identifier = event_tag.GetEventIdentifier()
lookup_key = event_identifier.CopyToString()
self._index[lookup_key] = event_tag.GetIdentifier() | Sets an event tag in the index.
Args:
event_tag (EventTag): event tag. |
def inherited_labels(cls):
return [scls.__label__ for scls in cls.mro()
if hasattr(scls, '__label__') and not hasattr(
scls, '__abstract_node__')] | Return list of labels from nodes class hierarchy.
:return: list |
def translate(self):
value = super().translate()
if value is None or (isinstance(value, str) and value.strip() == ''):
return None
return int(value) | Gets the value in the current language, or
in the configured fallbck language. |
def get_absl_log_prefix(record):
created_tuple = time.localtime(record.created)
created_microsecond = int(record.created % 1.0 * 1e6)
critical_prefix = ''
level = record.levelno
if _is_non_absl_fatal_record(record):
level = logging.ERROR
critical_prefix = _CRITICAL_PREFIX
severity = converter.get_in... | Returns the absl log prefix for the log record.
Args:
record: logging.LogRecord, the record to get prefix for. |
def userinfo_json(request, user_id):
data = {'first_name': '',
'last_name': '',
'email': '',
'slug': '',
'bio': '',
'phone': '',
'is_active': False}
try:
member = StaffMember.objects.get(pk=user_id)
for key in data.keys():
... | Return the user's information in a json object |
def option(self, *args, **kwargs):
args, kwargs = _config_parameter(args, kwargs)
return self._click.option(*args, **kwargs) | Registers a click.option which falls back to a configmanager Item
if user hasn't provided a value in the command line.
Item must be the last of ``args``.
Examples::
config = Config({'greeting': 'Hello'})
@click.command()
@config.click.option('--greeting', ... |
def white_noise(dur=None, low=-1., high=1.):
if dur is None or (isinf(dur) and dur > 0):
while True:
yield random.uniform(low, high)
for x in xrange(rint(dur)):
yield random.uniform(low, high) | White noise stream generator.
Parameters
----------
dur :
Duration, in number of samples; endless if not given (or None).
low, high :
Lower and higher limits. Defaults to the [-1; 1] range.
Returns
-------
Stream yielding random numbers between -1 and 1. |
def rmse(targets, predictions):
r
_supervised_evaluation_error_checking(targets, predictions)
return _turicreate.extensions._supervised_streaming_evaluator(targets,
predictions, "rmse", {}) | r"""
Compute the root mean squared error between two SArrays.
Parameters
----------
targets : SArray[float or int]
An Sarray of ground truth target values.
predictions : SArray[float or int]
The prediction that corresponds to each target value.
This vector must have the sam... |
def verify_edge_segments(edge_infos):
if edge_infos is None:
return
for edge_info in edge_infos:
num_segments = len(edge_info)
for index in six.moves.xrange(-1, num_segments - 1):
index1, start1, end1 = edge_info[index]
if not 0.0 <= start1 < end1 <= 1.0:
... | Verify that the edge segments in an intersection are valid.
.. note::
This is a helper used only by :func:`generic_intersect`.
Args:
edge_infos (Optional[list]): List of "edge info" lists. Each list
represents a curved polygon and contains 3-tuples of edge index,
start ... |
def run(self, func=None):
args = self.parser.parse_args()
if self.__add_vq is not None and self.__config_logging:
self.__config_logging(args)
if self.__show_version_func and args.version and callable(self.__show_version_func):
self.__show_version_func(self, args)
... | Run the app |
def dataReceived(self, data):
self.bytes_in += (len(data))
self.buffer_in = self.buffer_in + data
while self.CheckDataReceived():
pass | Called from Twisted whenever data is received. |
def has_mixture_channel(val: Any) -> bool:
mixture_getter = getattr(val, '_has_mixture_', None)
result = NotImplemented if mixture_getter is None else mixture_getter()
if result is not NotImplemented:
return result
result = has_unitary(val)
if result is not NotImplemented and result:
... | Returns whether the value has a mixture channel representation.
In contrast to `has_mixture` this method falls back to checking whether
the value has a unitary representation via `has_channel`.
Returns:
If `val` has a `_has_mixture_` method and its result is not
NotImplemented, that result... |
def get_compound_pd(self):
entry1 = PDEntry(self.entry1.composition, 0)
entry2 = PDEntry(self.entry2.composition, 0)
cpd = CompoundPhaseDiagram(
self.rxn_entries + [entry1, entry2],
[Composition(entry1.composition.reduced_formula),
Composition(entry2.composit... | Get the CompoundPhaseDiagram object, which can then be used for
plotting.
Returns:
(CompoundPhaseDiagram) |
def subpnt(method, target, et, fixref, abcorr, obsrvr):
method = stypes.stringToCharP(method)
target = stypes.stringToCharP(target)
et = ctypes.c_double(et)
fixref = stypes.stringToCharP(fixref)
abcorr = stypes.stringToCharP(abcorr)
obsrvr = stypes.stringToCharP(obsrvr)
spoint = stypes.empty... | Compute the rectangular coordinates of the sub-observer point on
a target body at a specified epoch, optionally corrected for
light time and stellar aberration.
This routine supersedes :func:`subpt`.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/subpnt_c.html
:param method: Computation ... |
def OnPrintPreview(self, event):
print_area = self._get_print_area()
print_data = self.main_window.print_data
self.main_window.actions.print_preview(print_area, print_data) | Print preview handler |
def probe_async(self, callback):
topics = MQTTTopicValidator(self.prefix)
self.client.publish(topics.probe, {'type': 'command', 'operation': 'probe', 'client': self.name})
callback(self.id, True, None) | Probe for visible devices connected to this DeviceAdapter.
Args:
callback (callable): A callback for when the probe operation has completed.
callback should have signature callback(adapter_id, success, failure_reason) where:
success: bool
fail... |
def proxy(host='localhost', port=4304, flags=0, persistent=False,
verbose=False, ):
try:
gai = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM,
socket.IPPROTO_TCP)
except socket.gaierror as err:
raise ConnError(*err.args)
assert gai
for (fa... | factory function that returns a proxy object for an owserver at
host, port. |
def parse(cls, uri):
uri_components = urlsplit(uri)
adapter_fn = lambda x: x if x is not None and (isinstance(x, str) is False or len(x)) > 0 else None
return cls(
scheme=adapter_fn(uri_components.scheme),
username=adapter_fn(uri_components.username),
password=adapter_fn(uri_components.password),
host... | Parse URI-string and return WURI object
:param uri: string to parse
:return: WURI |
def _allow_custom_expire(self, load):
expire_override = self.opts.get('token_expire_user_override', False)
if expire_override is True:
return True
if isinstance(expire_override, collections.Mapping):
expire_whitelist = expire_override.get(load['eauth'], [])
if... | Return bool if requesting user is allowed to set custom expire |
def plural(self, text, count=None):
pre, word, post = self.partition_word(text)
if not word:
return text
plural = self.postprocess(
word,
self._pl_special_adjective(word, count)
or self._pl_special_verb(word, count)
or self._plnoun(word... | Return the plural of text.
If count supplied, then return text if count is one of:
1, a, an, one, each, every, this, that
otherwise return the plural.
Whitespace at the start and end is preserved. |
def get(self, *args, **kwargs):
if not mqueue.qsize():
return None
message_data, content_type, content_encoding = mqueue.get()
return self.Message(backend=self, body=message_data,
content_type=content_type,
content_encoding=content_encodi... | Get the next waiting message from the queue.
:returns: A :class:`Message` instance, or ``None`` if there is
no messages waiting. |
def find(name):
if op.exists(name):
return name
path = op.dirname(__file__) or '.'
paths = [path] + config['include_path']
for path in paths:
filename = op.abspath(op.join(path, name))
if op.exists(filename):
return filename
for d in os.listdir(path):
... | Locate a filename into the shader library. |
def ref(self, orm_classpath, cls_pk=None):
orm_module, orm_class = get_objects(orm_classpath)
q = orm_class.query
if cls_pk:
found = False
for fn, f in orm_class.schema.fields.items():
cls_ref_s = f.schema
if cls_ref_s and self.schema == cl... | takes a classpath to allow query-ing from another Orm class
the reason why it takes string paths is to avoid infinite recursion import
problems because an orm class from module A might have a ref from module B
and sometimes it is handy to have module B be able to get the objects from
m... |
def get_planet(planet_id):
result = _get(planet_id, settings.PLANETS)
return Planet(result.content) | Return a single planet |
def close(self):
try:
self.dut.close()
except Exception:
logging.warning('Closing DUT was not successful')
else:
logging.debug('Closed DUT') | Releasing hardware resources. |
def set(self, name, value, overwrite=False):
if hasattr(self, name):
if overwrite:
setattr(self, name, value)
else:
self._log.warning("Configuration parameter %s exists and overwrite not allowed" % name)
raise Exception("Configuration param... | Sets a new value for a given configuration parameter.
If it already exists, an Exception is thrown.
To overwrite an existing value, set overwrite to True.
:param name: Unique name of the parameter
:param value: Value of the configuration parameter
:param overwrite: If true, an ... |
def output(self, value, normal=False, color=None, error=False,
arrow=False, indent=None):
if error and value and (normal or self.verbose):
return self._print(value, color='red', indent=indent)
if self.verbose or normal:
return self._print(value, color, arrow, inden... | Handles verbosity of this calls.
if priority is set to 1, the value is printed
if class instance verbose is True, the value is printed
:param value:
a string representing the message to be printed
:type value:
String
:param normal:
if set to ... |
def _set_status_data(self, userdata):
self._on_mask = userdata['d3']
self._off_mask = userdata['d4']
self._x10_house_code = userdata['d5']
self._x10_unit = userdata['d6']
self._ramp_rate = userdata['d7']
self._on_level = userdata['d8']
self._led_brightness = userd... | Set status properties from userdata response.
Response values:
d3: On Mask
d4: Off Mask
d5: X10 House Code
d6: X10 Unit
d7: Ramp Rate
d8: On-Level
d9: LED Brightness
d10: Non-Toggle Mask
d11: LED ... |
def calculate_manual_reading(basic_data: BasicMeterData) -> Reading:
t_start = basic_data.previous_register_read_datetime
t_end = basic_data.current_register_read_datetime
read_start = basic_data.previous_register_read
read_end = basic_data.current_register_read
value = basic_data.quantity
uom =... | Calculate the interval between two manual readings |
def list_rocs_files(url=ROCS_URL):
soup = BeautifulSoup(get(url))
if not url.endswith('/'):
url += '/'
files = []
for elem in soup.findAll('a'):
if elem['href'].startswith('?'):
continue
if elem.string.lower() == 'parent directory':
continue
files.... | Gets the contents of the given url. |
def _git_diff(self):
if self._diff_dict is None:
result_dict = dict()
for diff_str in self._get_included_diff_results():
diff_dict = self._parse_diff_str(diff_str)
for src_path in diff_dict.keys():
if self._is_path_excluded(src_path):
... | Run `git diff` and returns a dict in which the keys
are changed file paths and the values are lists of
line numbers.
Guarantees that each line number within a file
is unique (no repeats) and in ascending order.
Returns a cached result if called multiple times.
Raises a... |
def environ_setenv(self, tag, data):
environ = data.get('environ', None)
if environ is None:
return False
false_unsets = data.get('false_unsets', False)
clear_all = data.get('clear_all', False)
import salt.modules.environ as mod_environ
return mod_environ.sete... | Set the salt-minion main process environment according to
the data contained in the minion event data |
def _create_event(self, event_state, event_type, event_value,
proc_list, proc_desc, peak_time):
if event_state == "WARNING" or event_state == "CRITICAL":
self.set_process_sort(event_type)
item = [
time.mktime(datetime.now().timetuple()),
... | Add a new item in the log list.
Item is added only if the criticity (event_state) is WARNING or CRITICAL. |
def patchURL(self, url, headers, body):
return self._load_resource("PATCH", url, headers, body) | Request a URL using the HTTP method PATCH. |
def predict_is(self, h):
result = pd.DataFrame([self.run(h=h)[2]]).T
result.index = self.index[-h:]
return result | Outputs predictions for the Aggregate algorithm on the in-sample data
Parameters
----------
h : int
How many steps to run the aggregating algorithm on
Returns
----------
- pd.DataFrame of ensemble predictions |
def start_tasks(self):
while self.tasks_at_once > len(self.pending_results) and self._has_more_tasks():
task, parent_result = self.tasks.popleft()
self.execute_task(task, parent_result) | Start however many tasks we can based on our limits and what we have left to finish. |
def re_evaluate(local_dict=None):
try:
compiled_ex = _numexpr_last['ex']
except KeyError:
raise RuntimeError("not a previous evaluate() execution found")
argnames = _numexpr_last['argnames']
args = getArguments(argnames, local_dict)
kwargs = _numexpr_last['kwargs']
with evaluate_... | Re-evaluate the previous executed array expression without any check.
This is meant for accelerating loops that are re-evaluating the same
expression repeatedly without changing anything else than the operands.
If unsure, use evaluate() which is safer.
Parameters
----------
local_dict : dicti... |
def thread_pool(*workers, results=None, end_of_queue=EndOfQueue):
if results is None:
results = Queue(end_of_queue=end_of_queue)
count = thread_counter(results.close)
@pull
def thread_pool_results(source):
for worker in workers:
t = threading.Thread(
target=co... | Returns a |pull| object, call it ``r``, starting a thread for each given
worker. Each thread pulls from the source that ``r`` is connected to, and
the returned results are pushed to a |Queue|. ``r`` yields from the other
end of the same |Queue|.
The target function for each thread is |patch|, which c... |
def get_value_as_list(self, dictionary, key):
if key not in dictionary:
return None
value = dictionary[key]
if not isinstance(value, list):
return [value]
else:
return value | Helper function to check and convert a value to list.
Helper function to check and convert a value to json list.
This helps the ribcl data to be generalized across the servers.
:param dictionary: a dictionary to check in if key is present.
:param key: key to be checked if thats present... |
def _get_future_tasks(self):
self.alerts = {}
now = std_now()
for task in objectmodels['task'].find({'alert_time': {'$gt': now}}):
self.alerts[task.alert_time] = task
self.log('Found', len(self.alerts), 'future tasks') | Assemble a list of future alerts |
def getAvg(self,varname,**kwds):
if hasattr(self,varname):
return np.mean(getattr(self,varname),**kwds)
else:
return np.nan | Calculates the average of an attribute of this instance. Returns NaN if no such attribute.
Parameters
----------
varname : string
The name of the attribute whose average is to be calculated. This attribute must be an
np.array or other class compatible with np.mean.
... |
def parse_yaml(self, y):
self._targets = []
if 'targets' in y:
for t in y['targets']:
if 'waitTime' in t['condition']:
new_target = WaitTime()
elif 'preceding' in t['condition']:
new_target = Preceding()
... | Parse a YAML speficication of a message sending object into this
object. |
def create_prefix_dir(self, path, fmt):
create_prefix_dir(self._get_os_path(path.strip('/')), fmt) | Create the prefix dir, if missing |
def transform_cur_commands_interactive(_, **kwargs):
event_payload = kwargs.get('event_payload', {})
cur_commands = event_payload.get('text', '').split(' ')
_transform_cur_commands(cur_commands)
event_payload.update({
'text': ' '.join(cur_commands)
}) | Transform any aliases in current commands in interactive into their respective commands. |
def alias_package(package, alias, extra_modules={}):
path = package.__path__
alias_prefix = alias + '.'
prefix = package.__name__ + '.'
for _, name, _ in pkgutil.walk_packages(path, prefix):
if name.startswith('tango.databaseds.db_access.'):
continue
try:
if name ... | Alias a python package properly.
It ensures that modules are not duplicated by trying
to import and alias all the submodules recursively. |
def _check(self, args):
if sum(bool(args[arg]) for arg in self._mapping) > 1:
raise DocoptExit(_('These options are mutually exclusive: {0}',
', '.join(self._mapping))) | Exit in case of multiple exclusive arguments. |
def get_snapshots(self,**kwargs):
commits = self.repository.get_commits(**kwargs)
snapshots = []
for commit in commits:
for key in ('committer_date','author_date'):
commit[key] = datetime.datetime.fromtimestamp(commit[key+'_ts'])
snapshot = GitSnapshot(com... | Returns a list of snapshots in a given repository. |
def in_query(expression):
def _in(index, expression=expression):
ev = expression() if callable(expression) else expression
try:
iter(ev)
except TypeError:
raise AttributeError('$in argument must be an iterable!')
hashed_ev = [index.get_hash_for(v) for v in ev]... | Match any of the values that exist in an array specified in query. |
def call(method, *args, **kwargs):
kwargs = clean_kwargs(**kwargs)
return getattr(pyeapi_device['connection'], method)(*args, **kwargs) | Calls an arbitrary pyeapi method. |
def resizeEvent(self, event):
curr_item = self.currentItem()
self.closePersistentEditor(curr_item)
super(XMultiTagEdit, self).resizeEvent(event) | Overloads the resize event to control if we are still editing.
If we are resizing, then we are no longer editing. |
def server_session(model=None, session_id=None, url="default", relative_urls=False, resources="default"):
if session_id is None:
raise ValueError("Must supply a session_id")
url = _clean_url(url)
app_path = _get_app_path(url)
elementid = make_id()
modelid = "" if model is None else model.id
... | Return a script tag that embeds content from a specific existing session on
a Bokeh server.
This function is typically only useful for serving from a a specific session
that was previously created using the ``bokeh.client`` API.
Bokeh apps embedded using these methods will NOT set the browser window t... |
def string_asset(class_obj: type) -> type:
assert isinstance(class_obj, type), "class_obj is not a Class"
global _string_asset_resource_type
_string_asset_resource_type = class_obj
return class_obj | Decorator to annotate the StringAsset class. Registers the decorated class
as the StringAsset known type. |
def from_custom_template(cls, searchpath, name):
loader = ChoiceLoader([
FileSystemLoader(searchpath),
cls.loader,
])
class MyStyler(cls):
env = Environment(loader=loader)
template = env.get_template(name)
return MyStyler | Factory function for creating a subclass of ``Styler``
with a custom template and Jinja environment.
Parameters
----------
searchpath : str or list
Path or paths of directories containing the templates
name : str
Name of your custom template to use for re... |
def maybe_pause_consumer(self):
if self.load >= 1.0:
if self._consumer is not None and not self._consumer.is_paused:
_LOGGER.debug("Message backlog over load at %.2f, pausing.", self.load)
self._consumer.pause() | Check the current load and pause the consumer if needed. |
def unique(iterable, key=None):
ensure_iterable(iterable)
key = hash if key is None else ensure_callable(key)
def generator():
seen = set()
for elem in iterable:
k = key(elem)
if k not in seen:
seen.add(k)
yield elem
return generato... | Removes duplicates from given iterable, using given key as criterion.
:param key: Key function which returns a hashable,
uniquely identifying an object.
:return: Iterable with duplicates removed |
def _bsd_addif(br, iface):
kernel = __grains__['kernel']
if kernel == 'NetBSD':
cmd = _tool_path('brconfig')
brcmd = 'add'
else:
cmd = _tool_path('ifconfig')
brcmd = 'addem'
if not br or not iface:
return False
return __salt__['cmd.run']('{0} {1} {2} {3}'.form... | Internal, adds an interface to a bridge |
def special_validate(data, schema):
jsonschema.validate(data, schema)
data['special'] = str(data['name'] == 'Garfield').lower() | Custom validation function which inserts an special flag depending
on the cat's name |
def resolve_response_data(head_key, data_key, data):
new_data = []
if isinstance(data, list):
for data_row in data:
if head_key in data_row and data_key in data_row[head_key]:
if isinstance(data_row[head_key][data_key], list):
new_d... | Resolves the responses you get from billomat
If you have done a get_one_element request then you will get a dictionary
If you have done a get_all_elements request then you will get a list with all elements in it
:param head_key: the head key e.g: CLIENTS
:param data_key: the data key e.... |
def update(self, version, reason=None):
_check_version_format(version)
return self.collection.update({'_id': 'manifest'}, {
'$set': {'version': version},
'$push': {'history': {
'timestamp': datetime.utcnow(), 'version': version,
'reason': reason}}
... | Modify the datamodel's manifest
:param version: New version of the manifest
:param reason: Optional reason of the update (i.g. "Update from x.y.z") |
def get_project(self, project_id):
try:
UUID(project_id, version=4)
except ValueError:
raise aiohttp.web.HTTPBadRequest(text="Project ID {} is not a valid UUID".format(project_id))
if project_id not in self._projects:
raise aiohttp.web.HTTPNotFound(text="Proje... | Returns a Project instance.
:param project_id: Project identifier
:returns: Project instance |
def getKeplerFov(fieldnum):
info = getFieldInfo(fieldnum)
ra, dec, scRoll = info["ra"], info["dec"], info["roll"]
fovRoll = fov.getFovAngleFromSpacecraftRoll(scRoll)
brokenChannels = [5, 6, 7, 8, 17, 18, 19, 20]
if fieldnum > 10:
brokenChannels.extend([9, 10, 11, 12])
if fieldnum == 100... | Returns a `fov.KeplerFov` object for a given campaign.
Parameters
----------
fieldnum : int
K2 Campaign number.
Returns
-------
fovobj : `fov.KeplerFov` object
Details the footprint of the requested K2 campaign. |
def make_parser():
parser = ArgumentParser(
description='Start an IRC bot instance from the command line.',
formatter_class=ArgumentDefaultsHelpFormatter,
)
parser.add_argument(
'-v', '--version',
action='version',
version='{0} v{1}'.format(NAME, VERSION)
)
pa... | Creates an argument parser configured with options to run a bot
from the command line.
:return: configured argument parser
:rtype: :class:`argparse.ArgumentParser` |
def add_shellwidget(self, shellwidget):
shellwidget_id = id(shellwidget)
if shellwidget_id not in self.shellwidgets:
self.options_button.setVisible(True)
nsb = NamespaceBrowser(self, options_button=self.options_button)
nsb.set_shellwidget(shellwidget)
... | Register shell with variable explorer.
This function opens a new NamespaceBrowser for browsing the variables
in the shell. |
def chain_tasks(tasks):
if tasks:
previous_task = None
for task in tasks:
if task is not None:
if previous_task is not None:
task.set_run_after(previous_task)
previous_task = task
return tasks | Chain given tasks. Set each task to run after its previous task.
:param tasks: Tasks list.
:return: Given tasks list. |
async def dict(self, full):
node = await self.open(full)
return await HiveDict.anit(self, node) | Open a HiveDict at the given full path. |
def write_metadata(self, key, values):
values = Series(values)
self.parent.put(self._get_metadata_path(key), values, format='table',
encoding=self.encoding, errors=self.errors,
nan_rep=self.nan_rep) | write out a meta data array to the key as a fixed-format Series
Parameters
----------
key : string
values : ndarray |
def cancel(self, request, *args, **kwargs):
status = self.get_object()
status.cancel()
serializer = StatusSerializer(status, context={'request': request})
return Response(serializer.data) | Cancel the task associated with the specified status record.
Arguments:
request (Request): A POST including a task status record ID
Returns
-------
Response: A JSON response indicating whether the cancellation succeeded or not |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.