code stringlengths 51 2.34k | docstring stringlengths 11 171 |
|---|---|
def _init(self):
self._line_number = next_line(
self._communication.last_requested_line_number,
self._file.read(1)[0]) | Read the line number. |
def list(ctx):
log.debug('chemdataextractor.config.list')
for k in config:
click.echo('%s : %s' % (k, config[k])) | List all config values. |
def equalize_terminal_double_bond(mol):
for i, a in mol.atoms_iter():
if mol.neighbor_count(i) == 1:
nb = list(mol.neighbors(i).values())[0]
if nb.order == 2:
nb.type = 2 | Show equalized double bond if it is connected to terminal atom. |
def visit_Bytes(self, node: ast.Bytes) -> bytes:
result = node.s
self.recomputed_values[node] = result
return node.s | Recompute the value as the bytes at the node. |
def process_message(message, notification):
if not set(VITAL_MESSAGE_FIELDS) <= set(message):
logger.info('JSON Message Missing Vital Fields')
return HttpResponse('Missing Vital Fields')
if message['notificationType'] == 'Complaint':
return process_complaint(message, notification)
if... | Function to process a JSON message delivered from Amazon |
def wipe_table(self, table: str) -> int:
sql = "DELETE FROM " + self.delimit(table)
return self.db_exec(sql) | Delete all records from a table. Use caution! |
def getall(self, table):
try:
self._check_db()
except Exception as e:
self.err(e, "Can not connect to database")
return
if table not in self.db.tables:
self.warning("The table " + table + " does not exists")
return
try:
... | Get all rows values for a table |
def build_day(self, dt):
self.month = str(dt.month)
self.year = str(dt.year)
self.day = str(dt.day)
logger.debug("Building %s-%s-%s" % (self.year, self.month, self.day))
self.request = self.create_request(self.get_url())
path = self.get_build_path()
self.build_fil... | Build the page for the provided day. |
def run_add_system(name, token, org, system, prompt):
repo = get_repo(token=token, org=org, name=name)
try:
repo.create_label(name=system.strip(), color=SYSTEM_LABEL_COLOR)
click.secho("Successfully added new system {}".format(system), fg="green")
if prompt and click.confirm("Run update ... | Adds a new system to the repo. |
def R_isrk(self, k):
ind = int(self.index[self.R_time_var_index, k])
R = self.R[:, :, ind]
if (R.shape[0] == 1):
inv_square_root = np.sqrt(1.0/R)
else:
if self.svd_each_time:
(U, S, Vh) = sp.linalg.svd(R, full_matrices=False,
... | Function returns the inverse square root of R matrix on step k. |
def getfields(comm):
fields = []
for field in comm:
if 'field' in field:
fields.append(field)
return fields | get all the fields that have the key 'field' |
def _no_ntplt(self, ntplt):
sys.stdout.write(" {GO_USR:>6,} usr {GO_ALL:>6,} GOs DID NOT WRITE: {B} {D}\n".format(
B=self.grprobj.get_fout_base(ntplt.hdrgo),
D=ntplt.desc,
GO_USR=len(ntplt.gosubdag.go_sources),
GO_ALL=len(ntplt.gosubdag.go2obj))) | Print a message about the GO DAG Plot we are NOT plotting. |
def hash160(self, is_compressed=None):
if is_compressed is None:
is_compressed = self.is_compressed()
if is_compressed:
if self._hash160_compressed is None:
self._hash160_compressed = hash160(self.sec(is_compressed=is_compressed))
return self._hash160_... | Return the hash160 representation of this key, if available. |
def transloadsForPeer(self, peer):
for tl in self.transloads.itervalues():
if peer in tl.peers:
yield tl | Returns an iterator of transloads that apply to a particular peer. |
def transmit_tc_bpdu(self):
if not self.send_tc_flg:
timer = datetime.timedelta(seconds=self.port_times.max_age
+ self.port_times.forward_delay)
self.send_tc_timer = datetime.datetime.today() + timer
self.send_tc_flg = True | Set send_tc_flg to send Topology Change BPDU. |
def distances(self):
from molmod.ext import graphs_floyd_warshall
distances = np.zeros((self.num_vertices,)*2, dtype=int)
for i, j in self.edges:
distances[i, j] = 1
distances[j, i] = 1
graphs_floyd_warshall(distances)
return distances | The matrix with the all-pairs shortest path lenghts |
def load_cfg(self):
if self.cfg_mode == 'json':
with open(self.cfg_file) as opened_file:
return json.load(opened_file)
else:
with open(self.cfg_file) as ymlfile:
return yaml.safe_load(ymlfile) | loads our config object accessible via self.cfg |
def refactor_ifs(stmnt, ifs):
if isinstance(stmnt, _ast.BoolOp):
test, right = stmnt.values
if isinstance(stmnt.op, _ast.Or):
test = _ast.UnaryOp(op=_ast.Not(), operand=test, lineno=0, col_offset=0)
ifs.append(test)
return refactor_ifs(right, ifs)
return stmnt | for if statements in list comprehension |
def _linear_seaborn_(self, label=None, style=None, opts=None):
xticks, yticks = self._get_ticks(opts)
try:
fig = sns.lmplot(self.x, self.y, data=self.df)
fig = self._set_with_height(fig, opts)
return fig
except Exception as e:
self.err(e, self.line... | Returns a Seaborn linear regression plot |
def _parse_feature_names(feature_names, new_names):
if isinstance(feature_names, set):
return FeatureParser._parse_names_set(feature_names)
if isinstance(feature_names, dict):
return FeatureParser._parse_names_dict(feature_names)
if isinstance(feature_names, (tuple, ... | Helping function of `_parse_features` that parses a collection of feature names. |
def _reset(self):
with self._lock:
self.stop()
self.start()
for svc_ref in self.get_bindings():
if not self.requirement.filter.matches(
svc_ref.get_properties()
):
self.on_service_departure(svc_ref) | Called when the filter has been changed |
def run(self, args):
args = vars(args)
positionals = []
keywords = {}
for action in self.argparser._actions:
if not hasattr(action, 'label'):
continue
if action.label == 'positional':
positionals.append(args[action.dest])
... | Convert the unordered args into function arguments. |
def clip_grad(learn:Learner, clip:float=0.1)->Learner:
"Add gradient clipping of `clip` during training."
learn.callback_fns.append(partial(GradientClipping, clip=clip))
return learn | Add gradient clipping of `clip` during training. |
def unlock(name,
zk_hosts=None,
identifier=None,
max_concurrency=1,
ephemeral_lease=False,
profile=None,
scheme=None,
username=None,
password=None,
default_acl=None):
ret = {'name': name,
'changes': {},
... | Remove lease from semaphore. |
def _init_socket(self):
if self.ddpsocket:
self.ddpsocket.remove_all_listeners('received_message')
self.ddpsocket.remove_all_listeners('closed')
self.ddpsocket.remove_all_listeners('opened')
self.ddpsocket.close_connection()
self.ddpsocket = None
... | Initialize the ddp socket |
def check_extension(conn, extension: str) -> bool:
query = 'SELECT installed_version FROM pg_available_extensions WHERE name=%s;'
with conn.cursor() as cursor:
cursor.execute(query, (extension,))
result = cursor.fetchone()
if result is None:
raise psycopg2.ProgrammingError(
... | Check to see if an extension is installed. |
def close(self):
self._input.close()
self._call_parse()
root = self._pop_message()
assert not self._msgstack
if root.get_content_maintype() == 'multipart' \
and not root.is_multipart():
defect = errors.MultipartInvariantViolationDefect()
sel... | Parse all remaining data and return the root message object. |
def cli(env):
mgr = SoftLayer.LoadBalancerManager(env.client)
table = formatting.Table(['price_id', 'capacity', 'description', 'price'])
table.sortby = 'price'
table.align['price'] = 'r'
table.align['capacity'] = 'r'
table.align['id'] = 'r'
packages = mgr.get_lb_pkgs()
for package in pac... | Get price options to create a load balancer with. |
def relabel(self, qubits: Qubits) -> 'Gate':
gate = copy(self)
gate.vec = gate.vec.relabel(qubits)
return gate | Return a copy of this Gate with new qubits |
def setData(self, index, value, role=Qt.EditRole):
item = self.itemAt(index)
if not item:
return False
d = item.declaration
if role == Qt.CheckStateRole:
checked = value == Qt.Checked
if checked != d.checked:
d.checked = checked
... | Set the data for the item at the given index to the given value. |
def wiki(searchterm):
searchterm = quote(searchterm)
url = "https://en.wikipedia.org/w/api.php?action=query&list=search&srsearch={0}&format=json"
url = url.format(searchterm)
result = requests.get(url).json()
pages = result["query"]["search"]
pages = [p for p in pages if 'may refer to' not in p[... | return the top wiki search result for the term |
def __inner_predict(self, data_idx):
if data_idx >= self.__num_dataset:
raise ValueError("Data_idx should be smaller than number of dataset")
if self.__inner_predict_buffer[data_idx] is None:
if data_idx == 0:
n_preds = self.train_set.num_data() * self.__num_class... | Predict for training and validation dataset. |
def view_required_params_per_trt(token, dstore):
csm_info = dstore['csm_info']
tbl = []
for grp_id, trt in sorted(csm_info.grp_by("trt").items()):
gsims = csm_info.gsim_lt.get_gsims(trt)
maker = ContextMaker(trt, gsims)
distances = sorted(maker.REQUIRES_DISTANCES)
siteparams ... | Display the parameters needed by each tectonic region type |
def _run(name,
cmd,
exec_driver=None,
output=None,
stdin=None,
python_shell=True,
output_loglevel='debug',
ignore_retcode=False,
use_vt=False,
keep_env=None):
if exec_driver is None:
exec_driver = _get_exec_driver()
ret = _... | Common logic for docker.run functions |
def str2float(text):
try:
return float(re.sub(r"\(.+\)*", "", text))
except TypeError:
if isinstance(text, list) and len(text) == 1:
return float(re.sub(r"\(.+\)*", "", text[0]))
except ValueError as ex:
if text.strip() == ".":
return 0
raise ex | Remove uncertainty brackets from strings and return the float. |
def _convert_label(label):
style = qt_font_to_style(label.get("font"), label.get("color"))
return {
"text": html.escape(label["text"]),
"rotation": 0,
"style": style,
"x": int(label["x"]),
"y": int(label["y"])
} | Convert a label from 1.X to the new format |
def _renameClasses(classes, prefix):
renameMap = {}
for classID, glyphList in classes.items():
if len(glyphList) == 0:
groupName = "%s_empty_lu.%d_st.%d_cl.%d" % (prefix, classID[0], classID[1], classID[2])
elif len(glyphList) == 1:
groupName = list(glyphList)[0]
... | Replace class IDs with nice strings. |
def graceful_ctrlc(func):
@wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except KeyboardInterrupt:
exit(1)
return wrapper | Makes the decorated function exit with code 1 on CTRL+C. |
def from_fqdn(cls, fqdn):
result = cls.list({'fqdn': fqdn})
if len(result) > 0:
return result[0]['id'] | Retrieve domain id associated to a FQDN. |
def async_get_measurements(self, uid, fields='*'):
return (yield from self._get('/pods/{}/measurements'.format(uid),
fields=fields))[0] | Get measurements of a device. |
def _read_mz(self, mz_offset, mz_len, mz_enc_len):
self.ibd.seek(mz_offset)
data = self.ibd.read(mz_enc_len)
self.ibd.seek(0, 2)
data = self.mz_compression.decompress(data)
return tuple(np.fromstring(data, dtype=self.mz_dtype)) | reads a mz array from the currently open ibd file |
def list_ebs(region, filter_by_kwargs):
conn = boto.ec2.connect_to_region(region)
instances = conn.get_all_volumes()
return lookup(instances, filter_by=filter_by_kwargs) | List running ebs volumes. |
def _align_from_fastq(fastq1, fastq2, aligner, align_ref, sam_ref, names,
align_dir, data):
config = data["config"]
align_fn = TOOLS[aligner].align_fn
out = align_fn(fastq1, fastq2, align_ref, names, align_dir, data)
if isinstance(out, dict):
assert out.get("work_bam"), (dd... | Align from fastq inputs, producing sorted BAM output. |
def register_ddk_task(self, *args, **kwargs):
kwargs["task_class"] = DdkTask
return self.register_task(*args, **kwargs) | Register a ddk task. |
def save_user_and_user_email(self, user, user_email):
if self.UserEmailClass:
self.db_adapter.save_object(user_email)
self.db_adapter.save_object(user) | Save the User and UserEmail object. |
def handle_rule_versions(self, filename, rule_type, rule):
if 'versions' in rule:
versions = rule.pop('versions')
for version_key_suffix in versions:
version = versions[version_key_suffix]
version['key_suffix'] = version_key_suffix
tmp_rule... | For each version of a rule found in the ruleset, append a new Rule object |
def wrap(value):
if isinstance(value, Document) or isinstance(value, DocumentList):
return value
elif isinstance(value, dict):
return Document(value)
elif isinstance(value, list):
return DocumentList(value)
else:
return value | Wraps the given value in a Document or DocumentList as applicable. |
def parse_TKIP_hdr(pkt):
assert pkt.FCfield.protected
tkip_layer = pkt[Dot11TKIP]
payload = tkip_layer.data
if not tkip_layer.ext_iv:
raise ValueError("Extended IV must be set for TKIP")
TSC0 = tkip_layer.TSC0
TSC1 = tkip_layer.TSC1
WEPseed = tkip_layer.WEPSeed
TSC2 = tkip_layer.... | Extract TSCs, TA and encoded-data from a packet @pkt |
def guess_input_handler(seqs, add_seq_names=False):
if isinstance(seqs, str):
if '\n' in seqs:
return '_input_as_multiline_string'
else:
return '_input_as_string'
if isinstance(seqs, list) and len(seqs) and isinstance(seqs[0], tuple):
return '_input_as_seq_id_seq_... | Returns the name of the input handler for seqs. |
def _select_default_algorithm(analysis):
if not analysis or analysis == "Standard":
return "Standard", {"aligner": "bwa", "platform": "illumina", "quality_format": "Standard",
"recalibrate": False, "realign": False, "mark_duplicates": True,
"variantcal... | Provide default algorithm sections from templates or standard |
def spatial_slice_zeros(x):
return tf.cast(tf.reduce_all(tf.less_equal(x, 0.0), [0, 1, 2]),
tf.float32) | Experimental summary that shows how many planes are unused for a batch. |
def _spawn(self):
self.queue = Queue(maxsize=self.num_threads * 10)
for i in range(self.num_threads):
t = Thread(target=self._consume)
t.daemon = True
t.start() | Initialize the queue and the threads. |
def del_node(self, char, node):
del self._real.character[char].node[node]
for cache in (
self._char_nodes_rulebooks_cache,
self._node_stat_cache,
self._node_successors_cache
):
try:
del cache[char][node]
exce... | Remove a node from a character. |
def adapt_sum(line, cfg, filter_obj):
lines = filter_obj.filter_all(line)
res_s = [sum(it) for it in lines]
r = res_s.index(min(res_s))
return lines[r] | Determine best filter by sum of all row values |
def pupatizeElements(self) :
for i in range(len(self)) :
self[i] = self[i].pupa() | Transform all raba object into pupas |
def clear_last_check(self):
with db.session.begin_nested():
self.last_check = None
self.last_check_at = datetime.utcnow()
return self | Clear the checksum of the file. |
def extract(json_object, args, csv_writer):
found = [[]]
for attribute in args.attributes:
item = attribute.getElement(json_object)
if len(item) == 0:
for row in found:
row.append("NA")
else:
found1 = []
for value in item:
... | Extract and write found attributes. |
def _CheckCollation(cursor):
cur_collation_connection = _ReadVariable("collation_connection", cursor)
if cur_collation_connection != COLLATION:
logging.warning("Require MySQL collation_connection of %s, got %s.",
COLLATION, cur_collation_connection)
cur_collation_database = _ReadVariable("... | Checks MySQL collation and warns if misconfigured. |
def _text_attr(self, attr):
attr = text[attr]
if attr == "reset":
self.cursor_attributes = self.default_attributes
elif attr == "underline-off":
self.cursor_attributes = self._remove_text_attr("underline")
elif attr == "blink-off":
self.cursor_attribut... | Given a text attribute, set the current cursor appropriately. |
def _get(self, operation, field):
self._check_exists()
query = {Mark.FLD_OP: operation.name,
Mark.FLD_MARK + "." + field: {"$exists": True}}
return self._track.find_one(query) | Get tracked position for a given operation and field. |
def _send_method(self, method_sig, args=bytes(), content=None):
if isinstance(args, AMQPWriter):
args = args.getvalue()
self.connection.method_writer.write_method(self.channel_id,
method_sig, args, content) | Send a method for our channel. |
def cleanup(test_data, udfs, tmp_data, tmp_db):
con = make_ibis_client(ENV)
if udfs:
con.hdfs.rmdir(os.path.join(ENV.test_data_dir, 'udf'))
if test_data:
con.drop_database(ENV.test_data_db, force=True)
con.hdfs.rmdir(ENV.test_data_dir)
if tmp_data:
con.hdfs.rmdir(ENV.tmp_... | Cleanup Ibis test data and UDFs |
def _is_somatic(rec):
if _has_somatic_flag(rec):
return True
if _is_mutect2_somatic(rec):
return True
ss_flag = rec.INFO.get("SS")
if ss_flag is not None:
if str(ss_flag) == "2":
return True
status_flag = rec.INFO.get("STATUS")
if status_flag is not None:
... | Handle somatic classifications from MuTect, MuTect2, VarDict and VarScan |
def copyfile(self, target):
shutil.copyfile(self.path, self._to_backend(target)) | Copies this file to the given `target` location. |
def serveUpcoming(self, request):
myurl = self.get_url(request)
today = timezone.localdate()
monthlyUrl = myurl + self.reverse_subpage('serveMonth',
args=[today.year, today.month])
weekNum = gregorian_to_week_date(today)[1]
weekly... | Upcoming events list view. |
def transmit_agnocomplete_context(self):
user = super(AgnocompleteContextQuerysetMixin, self) \
.transmit_agnocomplete_context()
if user:
self.queryset = self.agnocomplete.get_queryset()
return user | We'll reset the current queryset only if the user is set. |
def _add_timeout(self, request, future):
io_loop = IOLoop.current()
t = io_loop.call_later(
request.ttl,
self._request_timed_out,
request.id,
request.service,
request.ttl,
future,
)
io_loop.add_future(future, lambda ... | Adds a timeout for the given request to the given future. |
def to_base_variable(self):
return Variable(self.dims, self._data, self._attrs,
encoding=self._encoding, fastpath=True) | Return this variable as a base xarray.Variable |
def verify(path):
path = pathlib.Path(path)
valid = False
if path.suffix == ".npy":
try:
nf = np.load(str(path), mmap_mode="r", allow_pickle=False)
except (OSError, ValueError, IsADirectoryError):
pass
else:
if l... | Verify that `path` has a supported numpy file format |
def start(st_reg_number):
weights = [9, 8, 7, 6, 5, 4, 3, 2]
digit_state_registration = st_reg_number[-1]
if len(st_reg_number) != 9:
return False
sum_total = 0
for i in range(0, 8):
sum_total = sum_total + weights[i] * int(st_reg_number[i])
if sum_total % 11 == 0:
return... | Checks the number valiaty for the Paraiba state |
def app_state(self, app):
if not self.available or not self.screen_on:
return STATE_OFF
if self.current_app["package"] == app:
return STATE_ON
return STATE_OFF | Informs if application is running. |
def load_user_catalog():
cat_dir = user_data_dir()
if not os.path.isdir(cat_dir):
return Catalog()
else:
return YAMLFilesCatalog(cat_dir) | Return a catalog for the platform-specific user Intake directory |
def acceptable(value, capitalize=False):
name = regexes['punctuation'].sub("", regexes['joins'].sub("_", value))
name = regexes['repeated_underscore'].sub("_", name.strip('_'))
if capitalize:
name_parts = []
for word in name.split('_'):
name_parts.append(word[0].upper())
... | Convert a string into something that can be used as a valid python variable name |
def round(self, digits=0):
self._left = round(self._left, digits)
self._bottom = round(self._bottom, digits)
self._width = round(self._width, digits)
self._height = round(self._height, digits) | Round the dimensions of the given rectangle to the given number of digits. |
def _build_session(username, password, trans_label=None):
bigip = requests.session()
bigip.auth = (username, password)
bigip.verify = False
bigip.headers.update({'Content-Type': 'application/json'})
if trans_label:
trans_id = __salt__['grains.get']('bigip_f5_trans:{label}'.format(label=trans... | Create a session to be used when connecting to iControl REST. |
def T(self, ID, sign):
lon = self.terms[sign][ID]
ID = 'T_%s_%s' % (ID, sign)
return self.G(ID, 0, lon) | Returns the term of an object in a sign. |
def to_json(self):
res_dict = {}
def gen_dep_edge(node, edge, dep_tgt, aliases):
return {
'target': dep_tgt.address.spec,
'dependency_type': self._edge_type(node.concrete_target, edge, dep_tgt),
'products_used': len(edge.products_used),
'products_used_ratio': self._used_rat... | Outputs the entire graph. |
def upgrade(cfg):
db_node = cfg["db"]
old_db_elems = ["host", "name", "port", "pass", "user", "dialect"]
has_old_db_elems = [x in db_node for x in old_db_elems]
if any(has_old_db_elems):
print("Old database configuration found. "
"Converting to new connect_string. "
"... | Provide forward migration for configuration files. |
def envs(self):
load = {'cmd': '_file_envs'}
return salt.utils.data.decode(self.channel.send(load)) if six.PY2 \
else self.channel.send(load) | Return a list of available environments |
def check_stf_agent(adbprefix=None, kill=False):
if adbprefix is None:
adbprefix = ['adb']
command = adbprefix + ['shell', 'ps']
out = subprocess.check_output(command).strip()
out = out.splitlines()
if len(out) > 1:
first, out = out[0], out[1:]
idx = first.split().index('PID... | return True if agent is alive. |
def write_bool(self, flag):
if flag:
self.write(b"\x01")
else:
self.write(b"\x00") | Writes a boolean to the underlying output file as a 1-byte value. |
def _push(self):
push_cmds = self.vcs.push_commands()
if not push_cmds:
return
if utils.ask("OK to push commits to the server?"):
for push_cmd in push_cmds:
output = utils.system(push_cmd)
logger.info(output) | Offer to push changes, if needed. |
def focusWindow(self, hwnd):
Debug.log(3, "Focusing window: " + str(hwnd))
SW_RESTORE = 9
if ctypes.windll.user32.IsIconic(hwnd):
ctypes.windll.user32.ShowWindow(hwnd, SW_RESTORE)
ctypes.windll.user32.SetForegroundWindow(hwnd) | Brings specified window to the front |
def calc_buffered_bounds(
format, bounds, meters_per_pixel_dim, layer_name, geometry_type,
buffer_cfg):
if not buffer_cfg:
return bounds
format_buffer_cfg = buffer_cfg.get(format.extension)
if format_buffer_cfg is None:
return bounds
geometry_type = normalize_geometry_typ... | Calculate the buffered bounds per format per layer based on config. |
def _from_dict(cls, _dict):
args = {}
if 'age' in _dict:
args['age'] = FaceAge._from_dict(_dict.get('age'))
if 'gender' in _dict:
args['gender'] = FaceGender._from_dict(_dict.get('gender'))
if 'face_location' in _dict:
args['face_location'] = FaceLocat... | Initialize a Face object from a json dictionary. |
def down(self, point):
self._vdown = arcball_map_to_sphere(point, self._center, self._radius)
self._qdown = self._qpre = self._qnow
if self._constrain and self._axes is not None:
self._axis = arcball_nearest_axis(self._vdown, self._axes)
self._vdown = arcball_constrain_to... | Set initial cursor window coordinates and pick constrain-axis. |
def handle(self, *args, **options):
self._connection = Auth()._get_connection()
if len(args) == 0:
containers = self._connection.list_containers()
if not containers:
print("No containers were found for this account.")
elif len(args) == 1:
conta... | Lists all the items in a container to stdout. |
def solve(self, solver_klass=None):
t0 = time()
om = self._construct_opf_model(self.case)
if om is None:
return {"converged": False, "output": {"message": "No Ref Bus."}}
if solver_klass is not None:
result = solver_klass(om, opt=self.opt).solve()
elif sel... | Solves an optimal power flow and returns a results dictionary. |
def _pop_index(self, index, has_default):
try:
if index is NOT_SET:
index = len(self._list) - 1
key, value = self._list.pop()
else:
key, value = self._list.pop(index)
if index < 0:
index += len(self._list) + 1
except IndexError:
if has_default:
return None, None, None
else:
... | Remove an element by index, or last element. |
def html(self, label, *msg):
lbl = "[" + label + "] "
txt = lbl + " " + " ".join(list(msg))
if self.notebook is True:
html = HTML(txt)
display(lbl + html)
else:
print(lbl + txt) | Prints html in notebook |
def variant(self, case_id, variant_id):
case_obj = self.case(case_id)
plugin, case_id = self.select_plugin(case_obj)
variant = plugin.variant(case_id, variant_id)
return variant | Fetch a single variant from variant source. |
def on_episode_end(self, episode, logs):
duration = timeit.default_timer() - self.starts[episode]
metrics = self.metrics[episode]
if np.isnan(metrics).all():
mean_metrics = np.array([np.nan for _ in self.metrics_names])
else:
mean_metrics = np.nanmean(metrics, axi... | Compute and print metrics at the end of each episode |
def get(self, entry):
if self.apiVersion == 1:
path = self.secretsmount + '/' + entry
else:
path = self.secretsmount + '/data/' + entry
proj = yield self._http.get('/v1/{0}'.format(path))
code = yield proj.code
if code != 200:
raise KeyError("T... | get the value from vault secret backend |
def open(cls, blob, username, password):
return cls(blob, blob.encryption_key(username, password)) | Creates a vault from a blob object |
def save(self):
active_language = get_language()
for (name, value) in self.cleaned_data.items():
if name not in registry:
name, code = name.rsplit('_modeltranslation_', 1)
else:
code = None
setting_obj, created = Setting.objects.get_or_... | Save each of the settings to the DB. |
def next_frame_pixel_noise():
hparams = next_frame_basic_deterministic()
hparams.add_hparam("video_modality_input_noise", 0.05)
hparams.bottom["inputs"] = modalities.video_pixel_noise_bottom
hparams.top["inputs"] = modalities.video_top
return hparams | Basic 2-frame conv model with pixel noise. |
async def on_raw_authenticate(self, message):
if self._sasl_timer:
self._sasl_timer.cancel()
self._sasl_timer = None
response = ' '.join(message.params)
if response != EMPTY_MESSAGE:
self._sasl_challenge += base64.b64decode(response)
if len(response) %... | Received part of the authentication challenge. |
def get(self, request, *args, **kwargs):
self.object = self.get_object()
can_delete = True
protected_objects = []
collector_message = None
collector = Collector(using="default")
try:
collector.collect([self.object])
except ProtectedError as e:
... | Catch protected relations and show to user. |
def cor(y_true, y_pred):
y_true, y_pred = _mask_nan(y_true, y_pred)
return np.corrcoef(y_true, y_pred)[0, 1] | Compute Pearson correlation coefficient. |
def clock_in(request):
user = request.user
active_entry = utils.get_active_entry(user, select_for_update=True)
initial = dict([(k, v) for k, v in request.GET.items()])
data = request.POST or None
form = ClockInForm(data, initial=initial, user=user, active=active_entry)
if form.is_valid():
... | For clocking the user into a project. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.