code stringlengths 51 2.38k | docstring stringlengths 4 15.2k |
|---|---|
def date_to_datetime(self, time_input, tz=None):
dt = None
try:
dt = parser.parse(time_input)
if tz is not None and tz != dt.tzname():
if dt.tzinfo is None:
dt = self._replace_timezone(dt)
dt = dt.astimezone(timezone(tz))
... | Convert ISO 8601 and other date strings to datetime.datetime type.
Args:
time_input (string): The time input string (see formats above).
tz (string): The time zone for the returned data.
Returns:
(datetime.datetime): Python datetime.datetime object. |
def _on_wheel_event(self, event):
try:
delta = event.angleDelta().y()
except AttributeError:
delta = event.delta()
if int(event.modifiers()) & QtCore.Qt.ControlModifier > 0:
if delta < self.prev_delta:
self.editor.zoom_out()
eve... | Increments or decrements editor fonts settings on mouse wheel event
if ctrl modifier is on.
:param event: wheel event
:type event: QWheelEvent |
def display_exc(self):
errmsg = self.get_error()
if errmsg is not None:
if self.path is not None:
errmsg_lines = ["in " + self.path + ":"]
for line in errmsg.splitlines():
if line:
line = " " * taberrfmt + line
... | Properly prints an exception in the exception context. |
def load_event_list(filename, **kwargs):
return dcase_util.containers.MetaDataContainer().load(filename=filename, **kwargs) | Load event list from csv formatted text-file
Supported formats (see more `dcase_util.containers.MetaDataContainer.load()` method):
- [event onset (float >= 0)][delimiter][event offset (float >= 0)]
- [event onset (float >= 0)][delimiter][event offset (float >= 0)][delimiter][label]
- [filename][delimi... |
def iiscgi(application):
try:
from wsgiref.handlers import IISCGIHandler
except ImportError:
print("Python 3.2 or newer is required.")
if not __debug__:
warnings.warn("Interactive debugging and other persistence-based processes will not work.")
IISCGIHandler().run(application) | A specialized version of the reference WSGI-CGI server to adapt to Microsoft IIS quirks.
This is not a production quality interface and will behave badly under load. |
def emit_containers(self, containers, verbose=True):
containers = sorted(containers, key=lambda c: c.get('name'))
task_definition = {
'family': self.family,
'containerDefinitions': containers,
'volumes': self.volumes or []
}
if verbose:
ret... | Emits the task definition and sorts containers by name
:param containers: List of the container definitions
:type containers: list of dict
:param verbose: Print out newlines and indented JSON
:type verbose: bool
:returns: The text output
:rtype: str |
def create_config(name=None,
subvolume=None,
fstype=None,
template=None,
extra_opts=None):
def raise_arg_error(argname):
raise CommandExecutionError(
'You must provide a "{0}" for the new configuration'.format(argname)
... | Creates a new Snapper configuration
name
Name of the new Snapper configuration.
subvolume
Path to the related subvolume.
fstype
Filesystem type of the subvolume.
template
Configuration template to use. (Default: default)
extra_opts
Extra Snapper configuration... |
def register_components(self):
unregistered_components = []
for path in self.paths:
for file in foundations.walkers.files_walker(path, ("\.{0}$".format(self.__extension),), ("\._",)):
if not self.register_component(file):
unregistered_components.append(fil... | Registers the Components.
Usage::
>>> manager = Manager(("./manager/tests/tests_manager/resources/components/core",))
>>> manager.register_components()
True
>>> manager.components.keys()
[u'core.tests_component_a', u'core.tests_component_b']
... |
def get_all_external_accounts(resource_root, type_name, view=None):
return call(resource_root.get,
EXTERNAL_ACCOUNT_FETCH_PATH % ("type", type_name,),
ApiExternalAccount, True, params=view and dict(view=view) or None) | Lookup all external accounts of a particular type, by type name.
@param resource_root: The root Resource object.
@param type_name: Type name
@param view: View
@return: An ApiList of ApiExternalAccount objects matching the specified type |
def addUnexpectedSuccess(self, test):
result = self._handle_result(
test, TestCompletionStatus.unexpected_success)
self.unexpectedSuccesses.append(result) | Register a test that passed unexpectedly.
Parameters
----------
test : unittest.TestCase
The test that has completed. |
def get_what_follows_raw(s: str,
prefix: str,
onlyatstart: bool = True,
stripwhitespace: bool = True) -> Tuple[bool, str]:
prefixstart = s.find(prefix)
if ((prefixstart == 0 and onlyatstart) or
(prefixstart != -1 and not only... | Find the part of ``s`` that is after ``prefix``.
Args:
s: string to analyse
prefix: prefix to find
onlyatstart: only accept the prefix if it is right at the start of
``s``
stripwhitespace: remove whitespace from the result
Returns:
tuple: ``(found, result)`` |
def get_all():
info_dir = _get_info_dir()
results = []
for filename in os.listdir(info_dir):
filepath = os.path.join(info_dir, filename)
try:
with open(filepath) as infile:
contents = infile.read()
except IOError as e:
if e.errno == errno.EACCES:
continue
else:
... | Return TensorBoardInfo values for running TensorBoard processes.
This function may not provide a perfect snapshot of the set of running
processes. Its result set may be incomplete if the user has cleaned
their /tmp/ directory while TensorBoard processes are running. It may
contain extraneous entries if TensorB... |
def drop_columns(self, colnames, **kwargs):
new_arr = rfn.drop_fields(
self, colnames, usemask=False, asrecarray=True, **kwargs
)
return self.__class__(
new_arr,
h5loc=self.h5loc,
split_h5=self.split_h5,
name=self.name,
h5si... | Drop columns from the table.
See the docs for ``numpy.lib.recfunctions.drop_fields`` for an
explanation of the remaining options. |
def reference(self):
if self.__reference is None:
self.__reference = _ConstructReference(self.__class__,
pairs=self.__pairs,
app=self.__app,
namespace=self.__namespace)
re... | Return the Reference object for this Key.
This is a entity_pb.Reference instance -- a protocol buffer class
used by the lower-level API to the datastore.
NOTE: The caller should not mutate the return value. |
def add_relationship(self, term1, relationship, term2):
url = self.base_path + 'term/add-relationship'
data = {'term1_id': term1['id'],
'relationship_tid': relationship['id'],
'term2_id': term2['id'],
'term1_version': term1['version'],
'rel... | Creates a relationship between 3 entities in database |
def step(self, provided_inputs):
for wire, value in provided_inputs.items():
wire = self.block.get_wirevector_by_name(wire) if isinstance(wire, str) else wire
if value > wire.bitmask or value < 0:
raise PyrtlError("Wire {} has value {} which cannot be represented"
... | Run the simulation for a cycle
:param provided_inputs: a dictionary mapping WireVectors (or their names)
to their values for this step
eg: {wire: 3, "wire_name": 17} |
def get_background_sids(self, src_filter):
branch_key = self.idx_set["grid_key"]
idist = src_filter.integration_distance(DEFAULT_TRT)
with h5py.File(self.source_file, 'r') as hdf5:
bg_locations = hdf5["Grid/Locations"].value
distances = min_geodetic_distance(
... | We can apply the filtering of the background sites as a pre-processing
step - this is done here rather than in the sampling of the ruptures
themselves |
def get(self, key, fallback=None):
value = None
if key in self._config:
value = self._config[key]
if isinstance(value, Section):
value = None
if value is None:
value = fallback
return value | look up global config values from alot's config
:param key: key to look up
:type key: str
:param fallback: fallback returned if key is not present
:type fallback: str
:returns: config value with type as specified in the spec-file |
def delete(self, db_session=None):
db_session = get_db_session(db_session, self)
db_session.delete(self) | Deletes the object via session, this will permanently delete the
object from storage on commit
:param db_session:
:return: |
def _range_along_dimension(range_dim, shape):
rank = len(shape)
if range_dim >= rank:
raise ValueError("Cannot calculate range along non-existent index.")
indices = tf.range(start=0, limit=shape[range_dim])
indices = tf.reshape(
indices,
shape=[1 if i != range_dim else shape[range_dim] for i in ... | Construct a Tensor whose values are the index along a dimension.
Construct a Tensor that counts the distance along a single dimension. This is
useful, for example, when constructing an identity matrix,
>>> x = _range_along_dimension(0, [2, 2]).eval()
>>> x
array([[0, 0],
[1, 1]], dtype=int3... |
def toggle(path_or_id, badge_kind):
if exists(path_or_id):
with open(path_or_id) as open_file:
for id_or_slug in open_file.readlines():
toggle_badge(id_or_slug.strip(), badge_kind)
else:
toggle_badge(path_or_id, badge_kind) | Toggle a `badge_kind` for a given `path_or_id`
The `path_or_id` is either an id, a slug or a file containing a list
of ids or slugs. |
def GetName(obj):
precondition.AssertType(obj, (type, types.FunctionType))
if PY2:
return obj.__name__.decode("ascii")
else:
return obj.__name__ | A compatibility wrapper for getting object's name.
In Python 2 class names are returned as `bytes` (since class names can contain
only ASCII characters) whereas in Python 3 they are `unicode` (since class
names can contain arbitrary unicode characters).
This function makes this behaviour consistent and always... |
def drop_namespaces(self):
self.session.query(NamespaceEntry).delete()
self.session.query(Namespace).delete()
self.session.commit() | Drop all namespaces. |
def load_file(self, file):
if not foundations.common.path_exists(file):
raise foundations.exceptions.FileExistsError(
"{0} | '{1}' file doesn't exists!".format(self.__class__.__name__,
file))
LOGGER.debug("> Loading '{... | Reads and loads given file into the editor.
:param File: File to load.
:type File: unicode
:return: Method success.
:rtype: bool |
def deserialize(self, data, status_code):
if status_code == 204:
return data
return serializer.Serializer().deserialize(
data)['body'] | Deserializes a JSON string into a dictionary. |
def _get_bmu(self, activations):
if self.argfunc == 'argmax':
activations = -activations
sort = np.argsort(activations, 1)
return sort.argsort() | Get indices of bmus, sorted by their distance from input. |
def imread(path, grayscale=False, size=None, interpolate="bilinear",
channel_first=False, as_uint16=False, num_channels=-1):
_imread_before(grayscale, num_channels)
r_mode = cv2.IMREAD_GRAYSCALE if grayscale else cv2.IMREAD_UNCHANGED
img = _imread_helper(path, r_mode)
if as_uint16 and img.dty... | Read image by cv2 module.
Args:
path (str or 'file object'): File path or object to read.
grayscale (bool):
size (tupple of int):
(width, height).
If None, output img shape depends on the files to read.
channel_first (bool):
This argument specifie... |
def create_primary_zone_by_axfr(self, account_name, zone_name, master, tsig_key=None, key_value=None):
zone_properties = {"name": zone_name, "accountName": account_name, "type": "PRIMARY"}
if tsig_key is not None and key_value is not None:
name_server_info = {"ip": master, "tsigKey": tsig_ke... | Creates a new primary zone by zone transferring off a master.
Arguments:
account_name -- The name of the account that will contain this zone.
zone_name -- The name of the zone. It must be unique.
master -- Primary name server IP address.
Keyword Arguments:
tsig_key -- ... |
def coinc(self, s0, s1, slide, step):
loglr = - s0 - s1
threshes = [self.fits_by_tid[i]['thresh'] for i in self.ifos]
loglr += sum([t**2. / 2. for t in threshes])
return (2. * loglr) ** 0.5 | Calculate the final coinc ranking statistic |
def expect(instr, expected, context):
if not isinstance(instr, expected):
raise DecompilationError(
"Expected a {expected} instruction {context}. Got {instr}.".format(
instr=instr, expected=expected, context=context,
)
)
return instr | Check that an instruction is of the expected type. |
def annotatedcore(self):
logging.info('Calculating annotated core')
self.total_core()
for sample in self.metadata:
if sample.general.bestassemblyfile != 'NA':
sample[self.analysistype].coreset = set()
if sample.general.referencegenus == 'Escherichia':
... | Calculates the core genome of organisms using custom databases |
def attach(self, engine, start=Events.STARTED, pause=Events.COMPLETED, resume=None, step=None):
engine.add_event_handler(start, self.reset)
engine.add_event_handler(pause, self.pause)
if resume is not None:
engine.add_event_handler(resume, self.resume)
if step is not None:
... | Register callbacks to control the timer.
Args:
engine (Engine):
Engine that this timer will be attached to.
start (Events):
Event which should start (reset) the timer.
pause (Events):
Event which should pause the timer.
... |
def reactivate(self):
self._protocol.connectionLost(None)
self._protocol = None
self.terminal.reset()
self._window.filthy()
self._window.repaint() | Called when a sub-protocol is finished. This disconnects the
sub-protocol and redraws the main menu UI. |
def generate_sigv4_auth_request(header_value=None):
request = requests.Request(
method='POST',
url='https://sts.amazonaws.com/',
headers={'Content-Type': 'application/x-www-form-urlencoded; charset=utf-8', 'Host': 'sts.amazonaws.com'},
data='Action=GetCallerIdentity&Version=2011-06-1... | Helper function to prepare a AWS API request to subsequently generate a "AWS Signature Version 4" header.
:param header_value: Vault allows you to require an additional header, X-Vault-AWS-IAM-Server-ID, to be present
to mitigate against different types of replay attacks. Depending on the configuration of ... |
def get_descriptions(self, description_type):
(desc_type, max_units) = description_type
results = [None] * max_units
self.elk._descriptions_in_progress[desc_type] = (max_units,
results,
... | Gets the descriptions for specified type.
When complete the callback is called with a list of descriptions |
def enter_room(self, sid, namespace, room):
if namespace not in self.rooms:
self.rooms[namespace] = {}
if room not in self.rooms[namespace]:
self.rooms[namespace][room] = {}
self.rooms[namespace][room][sid] = True | Add a client to a room. |
def hangup(self):
if self.active:
self._gsmModem.write('ATH')
self.answered = False
self.active = False
if self.id in self._gsmModem.activeCalls:
del self._gsmModem.activeCalls[self.id] | End the phone call.
Does nothing if the call is already inactive. |
def check(self, feature):
found = False
for handler in self.handlers:
try:
if handler(feature):
return True
except StopCheckingFeatureFlags:
return False
except NoFeatureFlagFound:
pass
else:
found = True
if not found:
message = u"No f... | Loop through all our feature flag checkers and return true if any of them are true.
The order of handlers matters - we will immediately return True if any handler returns true.
If you want to a handler to return False and stop the chain, raise the StopCheckingFeatureFlags exception. |
def ordered(self, ord='desc'):
if ord not in ('asc', 'desc', ):
raise
ord_f = getattr(PIDRelation.index, ord)()
return self.order_by(ord_f) | Order the query result on the relations' indexes. |
def get_settings(config_file):
default_settings = {
'general': {
'endpoint': 'http://guacamole.antojitos.io/files/',
'shortener': 'http://t.antojitos.io/api/v1/urls',
}
}
settings = configparser.ConfigParser()
try:
settings.read_dict(default_settings)
... | Search and load a configuration file. |
def get_colormap(name, *args, **kwargs):
if isinstance(name, BaseColormap):
cmap = name
else:
if not isinstance(name, string_types):
raise TypeError('colormap must be a Colormap or string name')
if name not in _colormaps:
raise KeyError('colormap name %s not found... | Obtain a colormap
Some colormaps can have additional configuration parameters. Refer to
their corresponding documentation for more information.
Parameters
----------
name : str | Colormap
Colormap name. Can also be a Colormap for pass-through.
Examples
--------
>>> get_co... |
def rm(pattern):
paths = glob.glob(pattern)
for path in paths:
if path.startswith('.git/'):
continue
if os.path.isdir(path):
def onerror(fun, path, excinfo):
exc = excinfo[1]
if exc.errno != errno.ENOENT:
raise
... | Recursively remove a file or dir by pattern. |
def create_and_configure_wrapper(context_or_world):
context_or_world.driver_wrapper = DriverWrappersPool.get_default_wrapper()
context_or_world.utils = context_or_world.driver_wrapper.utils
try:
behave_properties = context_or_world.config.userdata
except AttributeError:
behave_properties... | Create and configure driver wrapper in behave or lettuce tests
:param context_or_world: behave context or lettuce world |
def iter(self, start=0, end=None):
if end is None:
end = self._index + 1
elif end == 0:
raise StopIteration()
if start >= end:
raise StopIteration()
assert 0 <= end <= len(self._history)
assert 0 <= start <= end - 1
for i in range(start... | Iterate through successive history items.
Parameters
----------
end : int
Index of the last item to loop through + 1.
start : int
Initial index for the loop (0 by default). |
def _write_to_datastore(self):
roots_and_submissions = zip([ATTACKS_ENTITY_KEY,
TARGET_ATTACKS_ENTITY_KEY,
DEFENSES_ENTITY_KEY],
[self._attacks,
self._targeted_attacks,
... | Writes all submissions to datastore. |
def list_to_bytes_list(strList):
pList = c_char_p * len(strList)
if isinstance(strList, (pList, type(None))):
return strList
if not isinstance(strList, (list, set, tuple)):
raise TypeError("strList must be list, set or tuple, not " +
str(type(strList)))
pList = pList()
... | This function turns an array of strings into a pointer array
with pointers pointing to the encodings of those strings
Possibly contained bytes are kept as they are.
:param strList: List of strings that shall be converted
:type strList: List of strings
:returns: Pointer array with pointers pointing ... |
def pop_scope(self):
child_scope = self.stack.current.current.copy()
self.stack.current.pop()
parent_scope = self.stack.current.current.copy()
self.stack.current.current = {
key: child_scope[key] for key in child_scope if key in parent_scope
} | Delete the current scope in the current scope. |
def in_reply_to(self) -> Optional[UnstructuredHeader]:
try:
return cast(UnstructuredHeader, self[b'in-reply-to'][0])
except (KeyError, IndexError):
return None | The ``In-Reply-To`` header. |
def is_console(self, users_text):
if users_text is None:
self.log("Console information not collected")
return None
for line in users_text.split('\n'):
if '*' in line:
match = re.search(self.vty_re, line)
if match:
se... | Return if device is connected over console. |
def _format_volume_string(self, volume_string):
self.actual_volume = int(volume_string.split(self.volume_string)[1].split(',')[0].split()[0])
return '[Vol: {}%] '.format(int(100 * self.actual_volume / self.max_volume)) | format vlc's volume |
def vicinity(self):
if self._vicinity == '' and self.details != None and 'vicinity' in self.details:
self._vicinity = self.details['vicinity']
return self._vicinity | Returns a feature name of a nearby location.
Often this feature refers to a street or neighborhood within the given
results. |
def get_messages(self, page=0):
endpoint = 'https://outlook.office.com/api/v2.0/me/messages'
if page > 0:
endpoint = endpoint + '/?%24skip=' + str(page) + '0'
log.debug('Getting messages from endpoint: {} with Headers: {}'.format(endpoint, self._headers))
r = requests.get(end... | Get first 10 messages in account, across all folders.
Keyword Args:
page (int): Integer representing the 'page' of results to fetch
Returns:
List[:class:`Message <pyOutlook.core.message.Message>`] |
def mobile_template(template):
def decorator(f):
@functools.wraps(f)
def wrapper(*args, **kwargs):
ctx = stack.top
if ctx is not None and hasattr(ctx, 'request'):
request = ctx.request
is_mobile = getattr(request, 'MOBILE', None)
... | Mark a function as mobile-ready and pass a mobile template if MOBILE.
For example::
@mobile_template('a/{mobile/}b.html')
def view(template=None):
...
if ``request.MOBILE=True`` the template will be `a/mobile/b.html`.
if ``request.MOBILE=False`` the template will be `a/b.html... |
def DisplayEstimate(message, min_estimate, max_estimate):
mean_avg_cpc = (_CalculateMean(min_estimate['averageCpc']['microAmount'],
max_estimate['averageCpc']['microAmount'])
if 'averageCpc' in min_estimate
and min_estimate['averageCpc'] else None)
... | Displays mean average cpc, position, clicks, and total cost for estimate.
Args:
message: str message to display for the given estimate.
min_estimate: sudsobject containing a minimum estimate from the
TrafficEstimatorService response.
max_estimate: sudsobject containing a maximum estimate from the
... |
def show_tooltip(self, pos, tooltip, _sender_deco=None):
if _sender_deco is not None and _sender_deco not in self.decorations:
return
QtWidgets.QToolTip.showText(pos, tooltip[0: 1024], self) | Show a tool tip at the specified position
:param pos: Tooltip position
:param tooltip: Tooltip text
:param _sender_deco: TextDecoration which is the sender of the show
tooltip request. (for internal use only). |
def load_reader_options():
options = os.environ['PANDOC_READER_OPTIONS']
options = json.loads(options, object_pairs_hook=OrderedDict)
return options | Retrieve Pandoc Reader options from the environment |
def _make_minimal(dictionary):
new_dict = {}
for key, value in dictionary.items():
if value is not None:
if isinstance(value, dict):
new_value = _make_minimal(value)
if new_value:
new_dict[key] = new_value
else:
... | This function removes all the keys whose value is either None or an empty
dictionary. |
def is_valid_vpnv4_prefix(prefix):
if not isinstance(prefix, str):
return False
tokens = prefix.split(':', 2)
if len(tokens) != 3:
return False
if not is_valid_route_dist(':'.join([tokens[0], tokens[1]])):
return False
return is_valid_ipv4_prefix(tokens[2]) | Returns True if given prefix is a string represent vpnv4 prefix.
Vpnv4 prefix is made up of RD:Ipv4, where RD is represents route
distinguisher and Ipv4 represents valid dot-decimal ipv4 notation string. |
def start(self):
if not self._done_event.is_set():
return
self._done_event.clear()
nb_pending_tasks = self._queue.qsize()
if nb_pending_tasks > self._max_threads:
nb_threads = self._max_threads
nb_pending_tasks = self._max_threads
elif nb_pendi... | Starts the thread pool. Does nothing if the pool is already started. |
def write(self, file_or_path, append=False, timeout=10):
if isinstance(file_or_path, six.string_types):
if self.coverage:
file_or_path = get_smother_filename(
file_or_path, self.coverage.config.parallel)
outfile = Lock(
file_or_path, mo... | Write Smother results to a file.
Parameters
----------
fiile_or_path : str
Path to write report to
append : bool
If True, read an existing smother report from `outpath`
and combine it with this file before writing.
timeout : int
Ti... |
def error_log(self, msg='', level=20, traceback=False):
sys.stderr.write(msg + '\n')
sys.stderr.flush()
if traceback:
tblines = traceback_.format_exc()
sys.stderr.write(tblines)
sys.stderr.flush() | Write error message to log.
Args:
msg (str): error message
level (int): logging level
traceback (bool): add traceback to output or not |
def handleOneClientMsg(self, wrappedMsg):
try:
vmsg = self.validateClientMsg(wrappedMsg)
if vmsg:
self.unpackClientMsg(*vmsg)
except BlowUp:
raise
except Exception as ex:
msg, frm = wrappedMsg
friendly = friendlyEx(ex)
... | Validate and process a client message
:param wrappedMsg: a message from a client |
def GetUserinfo(credentials, http=None):
http = http or httplib2.Http()
url = _GetUserinfoUrl(credentials)
response, content = http.request(url)
if response.status == http_client.BAD_REQUEST:
credentials.refresh(http)
url = _GetUserinfoUrl(credentials)
response, content = http.re... | Get the userinfo associated with the given credentials.
This is dependent on the token having either the userinfo.email or
userinfo.profile scope for the given token.
Args:
credentials: (oauth2client.client.Credentials) incoming credentials
http: (httplib2.Http, optional) http instance to use
... |
def compute_colors_for_labels(self, labels):
colors = labels[:, None] * self.palette
colors = (colors % 255).numpy().astype("uint8")
return colors | Simple function that adds fixed colors depending on the class |
def _process_batch_write_response(request, response, table_crypto_config):
try:
unprocessed_items = response["UnprocessedItems"]
except KeyError:
return response
for table_name, unprocessed in unprocessed_items.items():
original_items = request[table_name]
crypto_config = tab... | Handle unprocessed items in the response from a transparently encrypted write.
:param dict request: The DynamoDB plaintext request dictionary
:param dict response: The DynamoDB response from the batch operation
:param Dict[Text, CryptoConfig] table_crypto_config: table level CryptoConfig used in encrypting... |
def publish_server_heartbeat_succeeded(self, connection_id, duration,
reply):
event = ServerHeartbeatSucceededEvent(duration, reply, connection_id)
for subscriber in self.__server_heartbeat_listeners:
try:
subscriber.succeeded(event)... | Publish a ServerHeartbeatSucceededEvent to all server heartbeat
listeners.
:Parameters:
- `connection_id`: The address (host/port pair) of the connection.
- `duration`: The execution time of the event in the highest possible
resolution for the platform.
- `reply`:... |
def put(self, body, priority=DEFAULT_PRIORITY, delay=0, ttr=DEFAULT_TTR):
assert isinstance(body, str), 'Job body must be a str instance'
jid = self._interact_value('put %d %d %d %d\r\n%s\r\n' % (
priority, delay, ttr, len(body), body),
... | Put a job into the current tube. Returns job id. |
def _char_density(self, c, font=ImageFont.load_default()):
image = Image.new('1', font.getsize(c), color=255)
draw = ImageDraw.Draw(image)
draw.text((0, 0), c, fill="white", font=font)
return collections.Counter(image.getdata())[0] | Count the number of black pixels in a rendered character. |
def _tile_ticks(self, frac, tickvec):
origins = np.tile(self.axis._vec, (len(frac), 1))
origins = self.axis.pos[0].T + (origins.T*frac).T
endpoints = tickvec + origins
return origins, endpoints | Tiles tick marks along the axis. |
def _peek_buffer(self, i=0):
while len(self._buffer) <= i:
self._buffer.append(next(self._source))
return self._buffer[i] | Get the next line without consuming it. |
def filing_history(self, num, transaction=None, **kwargs):
baseuri = self._BASE_URI + "company/{}/filing-history".format(num)
if transaction is not None:
baseuri += "/{}".format(transaction)
res = self.session.get(baseuri, params=kwargs)
self.handle_http_error(res)
re... | Search for a company's filling history by company number.
Args:
num (str): Company number to search on.
transaction (Optional[str]): Filing record number.
kwargs (dict): additional keywords passed into
requests.session.get params keyword. |
def validate_reaction(self):
if self.reaction not in self._reaction_valid_values:
raise ValueError("reaction should be one of: {valid}".format(
valid=", ".join(self._reaction_valid_values)
)) | Ensure reaction is of a certain type.
Mainly for future expansion. |
def collection(name=None):
if name is None:
collection = Collection.query.get_or_404(1)
else:
collection = Collection.query.filter(
Collection.name == name).first_or_404()
return render_template([
'invenio_collections/collection_{0}.html'.format(collection.id),
'i... | Render the collection page.
It renders it either with a collection specific template (aka
collection_{collection_name}.html) or with the default collection
template (collection.html). |
def merge(cls, *others):
for other in others:
for k, v in other:
setattr(cls, k, BoundValue(cls, k, v.value)) | Merge the `others` schema into this instance.
The values will all be read from the provider of the original object. |
def extract_params(params):
values = []
if isinstance(params, dict):
for key, value in params.items():
values.extend(extract_params(value))
elif isinstance(params, list):
for value in params:
values.extend(extract_params(value))
else:
values.append(params)... | Extracts the values of a set of parameters, recursing into nested dictionaries. |
def _check_list_minions(self, expr, greedy, ignore_missing=False):
if isinstance(expr, six.string_types):
expr = [m for m in expr.split(',') if m]
minions = self._pki_minions()
return {'minions': [x for x in expr if x in minions],
'missing': [] if ignore_missing else ... | Return the minions found by looking via a list |
def create_password_reset(cls, email, valid_for=3600) -> str:
user = cls.where_email(email)
if user is None:
return None
PasswordResetModel.delete_where_user_id(user.id)
token = JWT().create_token({
'code': Security.random_string(5),
'user_id': u... | Create a password reset request in the user_password_resets
database table. Hashed code gets stored in the database.
Returns unhashed reset code |
def VerifyStructure(self, parser_mediator, lines):
match_generator = self._VERIFICATION_GRAMMAR.scanString(lines, maxMatches=1)
return bool(list(match_generator)) | Verifies that this is a bash history file.
Args:
parser_mediator (ParserMediator): mediates interactions between
parsers and other components, such as storage and dfvfs.
lines (str): one or more lines from the text file.
Returns:
bool: True if this is the correct parser, False othe... |
def link_sources(self):
"Returns potential Link or Stream sources."
if isinstance(self, GenericOverlayPlot):
zorders = []
elif self.batched:
zorders = list(range(self.zorder, self.zorder+len(self.hmap.last)))
else:
zorders = [self.zorder]
if is... | Returns potential Link or Stream sources. |
def visit_Call(self, node: AST, dfltChaining: bool = True) -> str:
args = node.args
try:
kwds = node.keywords
except AttributeError:
kwds = []
self.compact = True
args_src = (self.visit(arg) for arg in args)
kwds_src = (self.visit(kwd) for kwd in k... | Return `node`s representation as function call. |
def abundances(self, ids=None):
if ids is None:
return self.table()
else:
res = self.table()
return res[res["tax_id"].isin(ids)] | Query the results table to get abundance data for all or some tax ids |
def add_external_reference_to_entity(self,entity_id, external_ref):
if self.entity_layer is not None:
self.entity_layer.add_external_reference_to_entity(entity_id,external_ref) | Adds an external reference to the given entity identifier in the entity layer
@type entity_id: string
@param entity_id: the entity identifier
@param external_ref: an external reference object
@type external_ref: L{CexternalReference} |
def binOp(op, indx, amap, bmap, fill_vec):
def op_or_missing(id):
va = amap.get(id, None)
vb = bmap.get(id, None)
if va is None or vb is None:
result = fill_vec
else:
try:
result = op(va, vb)
except Exception:
... | Combines the values from two map objects using the indx values
using the op operator. In situations where there is a missing value
it will use the callable function handle_missing |
def update_allowed(self):
return self.update_action.allowed(self.column.table.request,
self.datum,
self) | Determines whether update of given cell is allowed.
Calls allowed action of defined UpdateAction of the Column. |
def get_delete_branch_command(self, branch_name, message, author):
tokens = ['hg update --rev=%s && hg commit' % quote(branch_name)]
if author:
tokens.append('--user=%s' % quote(author.combined))
tokens.append('--message=%s' % quote(message))
tokens.append('--close-branch')
... | Get the command to delete or close a branch in the local repository. |
def insert(self, index, value):
return super(Collection, self).insert(
index, self._ensure_value_is_valid(value)) | Insert an item at a given position. |
def cli(obj, ids, query, filters, tags):
client = obj['client']
if ids:
total = len(ids)
else:
if query:
query = [('q', query)]
else:
query = build_query(filters)
total, _, _ = client.get_count(query)
ids = [a.id for a in client.get_alerts(quer... | Remove tags from alerts. |
def clean(self):
if self.event not in HOOK_EVENTS.keys():
raise ValidationError(
"Invalid hook event {evt}.".format(evt=self.event)
) | Validation for events. |
def download_and_bootstrap(src, name, prereq=None):
if prereq:
prereq_cmd = '{0} -c "{1}"'.format(PY_EXE, prereq)
rv = os.system(prereq_cmd)
if rv == 0:
return
ulp = urllib2.urlopen(src)
fp = open(name, "wb")
fp.write(ulp.read())
fp.close()
cmdline = "{0} {1}"... | Download and install something if 'prerequisite' fails |
def run(self, start_command_srv):
if start_command_srv:
self._command_server.start()
self._drop_privs()
self._task_runner.start()
self._reg_sighandlers()
while self.running:
time.sleep(self._sleep_period)
self.shutdown() | Setup daemon process, start child forks, and sleep until
events are signalled.
`start_command_srv`
Set to ``True`` if command server should be started. |
def _update_physical_disk_details(raid_config, server):
raid_config['physical_disks'] = []
physical_drives = server.get_physical_drives()
for physical_drive in physical_drives:
physical_drive_dict = physical_drive.get_physical_drive_dict()
raid_config['physical_disks'].append(physical_drive_... | Adds the physical disk details to the RAID configuration passed. |
def _load(self, scale=1.0):
LOG.debug("File: %s", str(self.requested_band_filename))
ncf = Dataset(self.requested_band_filename, 'r')
wvl = ncf.variables['wavelength'][:] * scale
resp = ncf.variables['response'][:]
self.rsr = {'wavelength': wvl, 'response': resp} | Load the SLSTR relative spectral responses |
def signRequest(self,
req: Request,
identifier: Identifier=None) -> Request:
idr = self.requiredIdr(idr=identifier or req._identifier)
req._identifier = idr
req.reqId = req.gen_req_id()
req.signature = self.signMsg(msg=req.signingPayloadState(ident... | Signs request. Modifies reqId and signature. May modify identifier.
:param req: request
:param requestIdStore: request id generator
:param identifier: signer identifier
:return: signed request |
def matrixToMathTransform(matrix):
if isinstance(matrix, ShallowTransform):
return matrix
off, scl, rot = MathTransform(matrix).decompose()
return ShallowTransform(off, scl, rot) | Take a 6-tuple and return a ShallowTransform object. |
def run_and_print_log(workflow, highlight=None):
from noodles.run.threading.sqlite3 import run_parallel
from noodles import serial
import io
import logging
log = io.StringIO()
log_handler = logging.StreamHandler(log)
formatter = logging.Formatter('%(asctime)s - %(message)s')
log_handler.... | Run workflow on multi-threaded worker cached with Sqlite3.
:param workflow: workflow to evaluate.
:param highlight: highlight these lines. |
def process_management_config_section(config, management_config):
if 'commands' in management_config:
for command in management_config['commands']:
config.management['commands'].append(command) | Processes the management section from a configuration data dict.
:param config: The config reference of the object that will hold the
configuration data from the config_data.
:param management_config: Management section from a config data dict. |
def tangent(obj, params, **kwargs):
normalize = kwargs.get('normalize', True)
if isinstance(obj, abstract.Curve):
if isinstance(params, (list, tuple)):
return ops.tangent_curve_single_list(obj, params, normalize)
else:
return ops.tangent_curve_single(obj, params, normaliz... | Evaluates the tangent vector of the curves or surfaces at the input parameter values.
This function is designed to evaluate tangent vectors of the B-Spline and NURBS shapes at single or
multiple parameter positions.
:param obj: input shape
:type obj: abstract.Curve or abstract.Surface
:param param... |
def get(self, key):
res = self.connection.get(key)
print(res)
return res | get a set of keys from redis |
def to_timezone(dt, tzinfo=None):
if not dt:
return dt
tz = pick_timezone(tzinfo, __timezone__)
if not tz:
return dt
dttz = getattr(dt, 'tzinfo', None)
if not dttz:
return dt.replace(tzinfo=tz)
else:
return dt.astimezone(tz) | Convert a datetime to timezone |
def use(module=None, decode=None, encode=None):
global _decode, _encode, _initialized, _using
if module is not None:
if not isinstance(module, basestring):
module = module.__name__
if module not in ('cjson', 'json', 'simplejson'):
raise ValueError('Unsupported JSON module... | Set the JSON library that should be used, either by specifying a known
module name, or by providing a decode and encode function.
The modules "simplejson", "cjson", and "json" are currently supported for
the ``module`` parameter.
If provided, the ``decode`` parameter must be a callable that accepts a
... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.