content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def get_locks(gdb):
"""Generates a list of current locks in a gdb."""
# TODO: change to `glob(os.path.join(gdb, "*.lock"))`
locks = [f for f in os.listdir(gdb) if ".lock" in f]
for lock in locks:
try:
with open(gdb, "w") as f:
pass
except IOError:
... | 700 |
def test_encode_json_strings(tmpdir):
"""Ensure that JSON values are preserved beteen NLJ and CSV."""
infile = str(tmpdir.mkdir('test-in').join('in.json'))
outfile = str(tmpdir.mkdir('test-out').join('out.json'))
roundtrip_file = str(tmpdir.mkdir('test-roundtrip').join('roundtrip.json'))
# Write NL... | 701 |
def init_net(net, init_type='normal', init_gain=0.02, gpu_ids=()):
"""Initialize a network: 1. register CPU/GPU device (with multi-GPU support); 2. initialize the network weights
Parameters:
net (network) -- the network to be initialized
init_type (str) -- the name of an initialization m... | 702 |
def remove_from_group(group_name, nodes=None, nodes_by_col='SUID', edges=None, edges_by_col='SUID', network=None,
base_url=DEFAULT_BASE_URL):
"""Remove the specified nodes and edges from the specified group.
Args:
group_name (str): Specifies the name used to identify the group
... | 703 |
def filter_bank_2high(t, Nj, Nj_1, ac=2.0, bc=2.0):
"""
computes the filter bank for control points N_j, Nj_1 given the variable t
:param t: data points on the real line R arranged in numpy array
:param Nj: control point, Nj > Nj_1, integer
:param Nj_1: control point, Nj > Nj_1, integer
:param ... | 704 |
def to_undirected(graph, copy_node_feat=True, copy_edge_feat=False):
"""Convert a graph to an undirected graph.
Args:
graph (pgl.Graph): The input graph, should be in numpy format.
copy_node_feat (bool): Whether to copy node feature in return graph. Default: True.
copy_edge_feat... | 705 |
def as_public():
"""Return requests session without authentication"""
return BaseUrlSession() | 706 |
def transform_real_2_sim(real_position):
"""
Transforms a position from the 'real' coordinate system to the 'sim' coordinate system.
:param real_position: dictionary with 'x', 'y' and 'z' keys to floating point values
:return: position in sim space as dictionary with 'x', 'y' and 'z' keys to floating po... | 707 |
def generate_tgt_mask(sz):
"""Generate a square mask for the sequence. The masked positions
are filled with float('-inf'). Unmasked positions are filled with
float(0.0).
This function is a slight modification of the version in the PyTorch
repository.
Parameters
----------
sz : int
... | 708 |
def SceneAddPipeline(builder, pipeline):
"""This method is deprecated. Please switch to AddPipeline."""
return AddPipeline(builder, pipeline) | 709 |
def SynthesizeUserId(email):
"""Return a synthetic user ID from an email address.
Note that this is not the same user ID found in the production system.
Args:
email: An email address.
Returns:
A string userid derived from the email address.
"""
user_id_digest = _MD5_FUNC(email.lower()).digest()
... | 710 |
def store_to_file(file_name, series, col_name, replace=False):
"""Add series to file."""
path = config.DATADIR
filepath = os.path.join(path, file_name)
df = pd.read_csv(filepath)
if (col_name in df) and (not replace):
return f'{col_name} already in {file_name}. Not added.'
df[col_name] =... | 711 |
def compute_jacobian(fn, x0: torch.Tensor, bs: int):
"""
Computes the Jacobian matrix of the given function at x0, using vector-Jacobian products
"""
input_shape = x0.shape
assert len(input_shape) == 3
dim = x0.numel()
eye = torch.eye(dim, dtype=x0.dtype, device=x0.device)
# Forward pas... | 712 |
def pixel_distance(A, B):
"""
In 9th grade I sat in geometry class wondering "when then hell am I
ever going to use this?"...today is that day.
Return the distance between two pixels
"""
(col_A, row_A) = A
(col_B, row_B) = B
return math.sqrt(math.pow(col_B - col_A, 2) + math.pow(row_B ... | 713 |
def test_out_of_scope_passed_error(logfile):
"""
If an error is thrown out of band ensure there's no error data
Unless we pass a value to the `error` field, in which case stuff it
in `error_message`
"""
configure_logging()
log = get_logger()
log.error("0 test", exc_info=sys.exc_info())... | 714 |
def word_ngrams(s, n=3, token_fn=tokens.on_whitespace):
"""
Word-level n-grams in a string
By default, whitespace is assumed to be a word boundary.
>>> ng.word_ngrams('This is not a test!')
[('This', 'is', 'not'), ('is', 'not', 'a'), ('not', 'a', 'test!')]
If the sequence'... | 715 |
def phosites_detail(text):
"""
create detail view output of phosphosites by accession.
:param text: string of phos group ID
:return: template
"""
results = browse_queries.browse_detail(text,'Phosphosite')
table = browse_queries.phos_kin_query(text)
# pass tables, results and style indic... | 716 |
def convert_to_np_arrays(X):
"""
Converts the input arrays to dense numpy arrays to allow the methods to work properly
"""
try:
X = X.todense()
except:
pass
X = np.array(X)
if len(X.shape) > 2:
X = reduce_shape(X)
return X | 717 |
def cleanup_handler(signal_received, frame):
"""Handle cleanup when exiting with Ctrl-C.
Args:
signal_received: The signal number received.
frame: The current stack frame.
"""
global force_non_graceful_cleanup
if not force_non_graceful_cleanup:
print(f"{bcolor.FAIL}SIGINT ... | 718 |
def main(wf):
"""Run the workflow.
Args:
wf (workflow.Workflow): Active Workflow object.
"""
from docopt import docopt
# Parse command-line arguments and call appropriate
# command function.
args = docopt(__doc__, wf.args, version=wf.version)
log.debug('args=%r', args)
if... | 719 |
def remove(c, containers=False, images=False):
"""
Clean up
"""
if containers:
c.run("sudo docker rm $(docker ps -a -q)")
if images:
c.run("sudo docker rmi $(docker images -q)") | 720 |
def run_sorting():
"""
This is just a test function, to avoid run the GUI every time.
"""
import csv
import itertools
"""
##To run fibers/cells/fmd/dtd/...
folders = ['/Users/romuere/Dropbox/CBIR/fibers/database/no_fibers/*','/Users/romuere/Dropbox/CBIR/fibers/database... | 721 |
def reduce_arr(arr):
"""
Return which elements on which axis are unique
Args:
arr (np.ndarray) : input array which to reduce to unique value
Returns:
reduced array(np.ndarray) : array with reduced data.
data_axis (list) : the axises that have changing data.
"""
ndim = l... | 722 |
def nll_lorentzian(preds, target, gamma):
"""
Isotropic lorentzian loss function
:param preds: prediction values from NN of size [batch, particles, timesteps, (x,y,v_x,v_y)]
:param target: target data of size [batch, particles, timesteps, (x,y,v_x,v_y)]
:param gamma: The tensor for the FWHM of the ... | 723 |
def get_archive():
"""Ensure that the archive file exists and return its path.
This is a function so the path can be made configurable in the future.
Returns:
:obj:`str`: The full local path to the archive file.
"""
filename = '/config/archive.txt'
archfile = Path(filename)
... | 724 |
def _filter_unique_configs(
configs: Sequence[ProblemConfig],
filter_fn: Callable[[ProblemConfig], bool] = lambda _: True,
) -> List[ProblemConfig]: # pytype: disable=annotation-type-mismatch
"""Filters a list of problem_config to their unique occurrences for testing.
Args:
configs: list of ProblemCon... | 725 |
def convert_units(str):
""" Convert some string with binary prefix to int bytes"""
unit = ''.join(ele for ele in str if not ele.isdigit()).strip().lower()
return int(''.join(ele for ele in str if ele.isdigit()))*{
"b": 1,
"B": 1,
"k": 2**10,
"kb": 2**10,
"m": 2**20,
... | 726 |
def match_piecewise(candidates: set, symbol: str, sep: str='::') -> set:
"""
Match the requested symbol reverse piecewise (split on ``::``) against the candidates.
This allows you to under-specify the base namespace so that ``"MyClass"`` can match ``my_namespace::MyClass``
Args:
candidates: set... | 727 |
def is_kube_version_supported(kube_version, min_version=None, max_version=None):
"""Check if the k8s version is supported by the application.
:param kube_version: the running or target k8s version
:param min_version (optional): minimum k8s version supported by the app
:param max_version (optional): max... | 728 |
def chebi(name=None, identifier=None):
"""Build a ChEBI abundance node.
:rtype: Abundance
"""
return Abundance(namespace='CHEBI', name=name, identifier=identifier) | 729 |
def plot_slice(sliceX, color, ax=None, s=100):
"""
Plots slice spatial coordinates.
param: sliceX - AnnData Object of slice
param: color - scatterplot color
param: ax - Pre-existing axes for the plot. Otherwise, call matplotlib.pyplot.gca() internally.
param: s - size of spots
"""
s... | 730 |
def get_group(request):
"""returns all the groups in database
"""
group_id = request.matchdict.get('id', -1)
group = Group.query.filter_by(id=group_id).first()
return [
{
'id': group.id,
'name': group.name,
'thumbnail_full_path':
group.thumbna... | 731 |
def create_histogram(path_to_image, target_path=''):
"""
creates a histogram of a given image and either shows or saves a plot
Args:
path_to_image: path to the image
target_path: if given, saves a plot, otherwise (if empty) shows the plot
Returns:
the histogram plot
"""
... | 732 |
def task_6_list_all_supplier_countries(cur) -> list:
"""
List all supplier countries
Args:
cur: psycopg cursor
Returns: 29 records
"""
cur.execute("""SELECT country FROM suppliers""")
return cur.fetchall() | 733 |
def _energy_to_length_factor(e_unit, l_unit):
"""
Convert the units of Planck's constant and speed of light
:param e_unit:
:type e_unit: str
:param l_unit:
:type l_unit: str
:return: c,h
"""
dest_h_u = ug.parse_units('%s s' % e_unit)
dest_c_u = ug.parse_units('%s/s' % l_unit)
... | 734 |
def kev_to_wavelength(kev):
"""Calculate the wavelength from kev"""
lamda = 12.3984 / kev #keV to Angstrom
return lamda | 735 |
def test_vault_kv_poll_refetch():
"""
Test the KV v2 token refetch operation
"""
with run_vault() as [vault_client, get_audit_events]:
vault_client.write("secret/data/app", data={"env": "dev"})
with Agent.run(
dedent(
f"""
intervalSeconds: 2
... | 736 |
def _energy_and_power_to_wave_vector(
energy_cap, base_wave_vector_path, target_wave_vector_path):
"""Add captured wave energy value from energy_cap to a field in wave_vector.
The values are set corresponding to the same I,J values which is the key of
the dictionary and used as the unique identifie... | 737 |
def _convert_rde_to_1_0_format(rde_data: dict) -> dict:
"""Convert defined entity to RDE 1.0.
:param DefEntity rde_data: Defined entity dictionary
:return: converted defined entity
:rtype: dict
"""
new_rde = common_models.DefEntity(**rde_data)
new_native_entity: AbstractNativeEntity = rde_u... | 738 |
def additive_symbols(tokens, base_url):
"""``additive-symbols`` descriptor validation."""
results = []
for part in split_on_comma(tokens):
result = pad(remove_whitespace(part), base_url)
if result is None:
return
if results and results[-1][0] <= result[0]:
ret... | 739 |
def check_detection(frame, yx_exp, fwhm, snr_thresh, deltapix=3):
"""
Verify if injected companion is recovered.
Parameters
----------
frame : 2d ndarray
yx_exp : tuple(y, x)
Expected position of the fake companion (= injected position).
fwhm : int or float
FWHM.
snr_thr... | 740 |
def plot_decision_boundary(h, X, Y,step=0.1,x1_range=None,x2_range=None,title=""):
"""
Args:
h(class:'function'): hypothesis (Model)
X: input dataset (Also Used for determining ranges if xi_range=None)
Y: output dataset (Shoud have only 1 and -1 as element values)
step: step size... | 741 |
def _index_list(key_or_list, direction=None):
"""Helper to generate a list of (key, direction) pairs.
Takes such a list, or a single key, or a single key and direction.
"""
if direction is not None:
return [(key_or_list, direction)]
else:
if isinstance(key_or_list, string_type):
... | 742 |
def load_only_test(cfg):
"""Load and process test data only
Args:
cfg (dict): configuration file
Returns:
DataLoader: test DataLoader
"""
# Set test path
path_to_test = os.path.join(cfg["DATA_DIR"], "test/")
# Load the test set
test_dataset = TestLoader(path_to_test)
... | 743 |
def update_covid(update_name: str) -> None:
"""Updates the global webpage_covid_data list, this is in main.py and is what gets passed to the web page"""
logging.info(f"Updating COVID data due to update '{update_name}'")
global webpage_covid_data
location = get_config_data()["local_location"]
locat... | 744 |
def test_row_drop_bad_condition_in_dict():
"""Testing the ColDrop pipeline stage."""
with pytest.raises(ValueError):
RowDrop({'a': 'bad'}) | 745 |
def create_camvid_dataset(path_from, path_to, split_train=0.8):
"""
Reads each `.mat` file in the `path_from` dir and creates segmentation dataset in the `path_to` dir.
Assumes that `path_from` contains only `.mat` files.
:path_from: str or PosixPath to folder with `.mat` files
:path_to: str or Posi... | 746 |
def CalculateOSNames(os_name, os_variants):
"""Calculates all the names an OS can be called, according to its variants.
@type os_name: string
@param os_name: base name of the os
@type os_variants: list or None
@param os_variants: list of supported variants
@rtype: list
@return: list of valid names
"""... | 747 |
def generate_schema(schema_json, use_logical_types=False, custom_imports=None, avro_json_converter=None):
"""
Generate file containing concrete classes for RecordSchemas in given avro schema json
:param str schema_json: JSON representing avro schema
:param list[str] custom_imports: Add additional import... | 748 |
def c4x(c: Circuit, c0: int, c1: int, c2: int, c3: int, t: int) -> Circuit:
"""A macro of 4-controlled X gate"""
return c.h[t].c4z(c0, c1, c2, c3, t).h[t] | 749 |
def pad(data, pad_id):
""" Pad all lists in data to the same length. """
width = max(len(d) for d in data)
return [d + [pad_id] * (width - len(d)) for d in data] | 750 |
def draw_boxes_on_image(img, boxes, color='blue', thickness=1,
box_format=None):
"""
Example:
>>> from netharn import util
>>> img = np.zeros((10, 10, 3), dtype=np.uint8)
>>> color = 'blue'
>>> thickness = 1
>>> boxes = util.Boxes([[1, 1, 8, 8]], ... | 751 |
def test_initism(simulation):
"""
Test init_ism function.
"""
simulation.init_ism() | 752 |
def _select_random_features(feature_list, amount):
"""Selects a given amount of random features from the feature list"""
set_size = len(feature_list) -1
random_features = []
for i in range(amount):
while(True):
random_feature = feature_list[randint(0, set_size)]
if... | 753 |
def matrixop_inp_matr():
"""
Функция возвращает матрицу, введённую пользователем с клавиатуры.
Returns
-------
a : [[float, float, ...],
[float, float, ...],
...]
Матрица, введенная пользователем
"""
while True:
try:
m = int(input('Сколько буде... | 754 |
def gen_info(run_event_files):
"""Generate subject_info structure from a list of event files
"""
info = []
for i, event_files in enumerate(run_event_files):
runinfo = Bunch(conditions=[], onsets=[], durations=[], amplitudes=[])
for event_file in event_files:
_, name = os.path... | 755 |
def _SectionNameToSymbols(section_name, section_to_symbols_map):
"""Yields all symbols which could be referred to by section_name.
If the section name is present in the map, the names in the map are returned.
Otherwise, any clone annotations and prefixes are stripped from the section
name and the remainder is ... | 756 |
def dropannotation(annotation_list):
"""
Drop out the annotation contained in annotation_list
"""
target = ""
for c in annotation_list:
if not c == "#":
target += c
else:
return target
return target | 757 |
def TIF_to_jpg_all(path):
"""run TIF_to_jpg() on every TIF of a folder."""
for fname in sorted(glob.glob(path+"/*.tif")):
print(fname)
TIF_to_jpg(fname) | 758 |
def iter_folders(parent_dir, outdir, target):
"""iterates through subfolders"""
for dir in os.scandir(parent_dir):
if dir.is_dir():
path = pathlib.Path(dir).absolute()
ident = '.'.join(path.parts[len(path.parts)-3:])
i_path = path / 'TIF'
if i_path.exists(... | 759 |
def test_collection_detail(app, client):
"""Test collection detail endpoint"""
with app.app_context():
response = client.get("/api/collections/1", content_type="text/json")
data = json.loads(response.data)
assert response.status_code == 200
col1 = Collection.query.filter(Collecti... | 760 |
def do_associate_latest_edit(parser, token):
"""
AssociateLatestEdit
"""
try:
tag, node = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError, "%r tag requires one argument" % token.contents.split()[0]
return AssociateLatestEdit(node) | 761 |
def read_tics_output():
"""Read all the TICS register values from all the txt files.
Reading all the configurations from the current directory. We assume the
file has a format `CHIPNAME_frequency.txt`.
"""
dir_path = os.path.dirname(os.path.realpath(__file__))
all_txt = glob.glob(os.path.join(... | 762 |
def sample_flips_without_replacement() -> None:
"""Samples the coin flips without replacement, printing out the results."""
randomizer = ur.UniqueRandomizer()
# Sample pairs of coin flips until all possible results have been sampled.
while not randomizer.exhausted():
sample = flip_two_weighted_coins(random... | 763 |
def ci_test(c, python=""):
"""
Test suite for continous integration testing.
Installs with pip, tests with pytest and checks coverage with coverage.
"""
python_version = "" if len(python) == 0 else f"-p {python}"
c.run(f"nox --session tests_pip {python_version}") | 764 |
def open_monitoring_db(dbhost, dbuser, dbpass, database):
"""
Open MySQL monitoring DB
"""
try:
conn = MySQLdb.connect(host=dbhost, user=dbuser,
passwd=dbpass, db=database)
except MySQLdb.Error, err:
print "Error %d: %s" % (err.args[0], err.args[1])
... | 765 |
def matdiff(matrix1,matrix2,figsize=None,cmap=None):
"""
display the difference between two real matrices, alongside this plot this difference
on a log- colour scale (if diff!=0)
"""
if not figsize:
figsize = defaults['figsize']
if not cmap:
cmap = defaults['cmap']
_matdiff = matrix1-matrix2
f, (ax1, ax2)... | 766 |
def _tokens_by_class_of(tokens):
"""Generates lookup table of tokens in each class."""
out = defaultdict(set)
for token, token_classes in tokens.items():
for token_class in token_classes:
out[token_class].add(token)
return out | 767 |
def test_fileformattoml_pass_with_substitutions():
"""Relative path to file should succeed.
Strictly speaking not a unit test.
"""
context = Context({
'k1': 'v1',
'k2': 'v2',
'k3': 'v3',
'k4': 'v4',
'k5': 'v5',
'fileFormatToml': {'in': './tests/testfiles/... | 768 |
def load_mnist_dataset(shape=(-1, 784), path='data'):
"""Load the original mnist.
Automatically download MNIST dataset and return the training, validation and test set with 50000, 10000 and 10000 digit images respectively.
Parameters
----------
shape : tuple
The shape of digit images (the ... | 769 |
def handle_move_arm_to_position(arm_position_entry, mqtt_sender):
"""
Tells the robot to move its Arm to the position in the given Entry box.
The robot must have previously calibrated its Arm.
:type arm_position_entry ttk.Entry
:type mqtt_sender: com.MqttClient
"""
print('move_... | 770 |
def main(*args, **kwargs):
"""Defines the behavior of the app if run with flags and/or other parameters
"""
# If no command line arguments or only '-gui' flag provided, run blank GUI
if (len(args) == 0 and len(kwargs) == 0) or \
(len(args) == 1 and args[0] == "-gui" and len(kwargs) == 0):
... | 771 |
def entropy_of_states(probabilities, output_path, n_clusters):
"""
Computes the entropy of probabilities of states
:param probabilities: array with states probabilities
:type probabilities: np.ndarray
:param output_path: path to output directory
:type output_path: str
:param n_clusters: num... | 772 |
def test_pv_creation(client, core_api): # NOQA
"""
Test creating PV using Longhorn API
1. Create volume
2. Create PV for the volume
3. Try to create another PV for the same volume. It should fail.
4. Check Kubernetes Status for the volume since PV is created.
"""
volume_name = "test-pv... | 773 |
def EntryToSlaveName(entry):
"""Produces slave name from the slaves config dict."""
name = entry.get('slavename') or entry.get('hostname')
if 'subdir' in entry:
return '%s#%s' % (name, entry['subdir'])
return name | 774 |
def create_app(config_class=Config):
"""
Constructs a Flask application instance.
Parameters
----------
config_class: class that stores the configuration variables.
Returns
-------
app : Flask application
"""
app = Flask(__name__)
app.config.from_object(config_class)
bo... | 775 |
def is_symmetric_re(root: TreeNode) -> bool:
"""Check if a binary tree is a mirror of itself (symmetric around its center)."""
if not root:
return False
def is_mirror(t1, t2):
if not t1 and not t2:
return True
if not t1 or not t2:
return False
return... | 776 |
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up paperless from a config entry."""
hass.config_entries.async_setup_platforms(entry, PLATFORMS)
return True | 777 |
def azimuthal_average(image, center=None, stddev=True, binsize=0.5, interpnan=False):
"""
Modified based on https://github.com/keflavich/image_tools/blob/master/image_tools/radialprofile.py
Calculate the azimuthally averaged radial profile.
Parameters:
imgae (numpy ndarray): 2-D image
... | 778 |
def assign_probe_int(probe: SimHandleBase, val: int):
"""Assign int val to int var. Use for debug to display python int in waveforms"""
if not isinstance(probe, SimHandleBase):
return
probe.value = val | 779 |
def find_assign(data, varname):
"""Finds a substring that looks like an assignment.
:param data: Source to search in.
:param varname: Name of the variable for which an assignment should be
found.
"""
ASSIGN_RE = re.compile(BASE_ASSIGN_PATTERN.format(varname))
if len(ASSIGN_... | 780 |
def test_uninstalled_non_existing(kb):
"""
test wusa.uninstalled when the kb is not installed
"""
mock_installed = MagicMock(return_value=False)
with patch.dict(wusa.__salt__, {"wusa.is_installed": mock_installed}):
returned = wusa.uninstalled(name=kb)
expected = {
"chang... | 781 |
def remove(token: str, server: str="http://localhost:8080/remove", params: dict=None) -> int:
"""
Removes the data associated with the token.
:param token: the token to download the data for
:type token: str
:param server: the URL of the server to upload to
:type server: str
:param params: ... | 782 |
def get_pending_surveys_batch_number(batch_no):
"""
Gets batch number for the shared survey
:param batch_no: Shared survey batch number
:type batch_no: str
:raises ApiError: Raised when party returns api error
:return: list share surveys
"""
bound_logger = logger.bind(batch_no=batch_no)... | 783 |
def before_after_to_box(element, pseudo_type, state, style_for,
get_image_from_uri, target_collector):
"""Return the boxes for ::before or ::after pseudo-element."""
style = style_for(element, pseudo_type)
if pseudo_type and style is None:
# Pseudo-elements with no style at a... | 784 |
def get_beads_MDA_atomgroups(ns):
"""For each CG bead, create atom groups for trajectory geoms calculation using mass and atom
weights across beads.
ns requires:
mapping_type
atom_w
aa_universe
ns creates:
mda_beads_atom_grps
mda_weights_atom_grps
"""
ns... | 785 |
def test_remove_one_child_left(test_bsts):
"""Test delete node one child, left."""
test_bsts[4].delete(3)
assert not test_bsts[4].contains(3)
assert test_bsts[4].size() is 3 | 786 |
def pool_delete(transport, request, pool_name):
"""Deletes the pool `pool_name`
:param transport: Transport instance to use
:type transport: `transport.base.Transport`
:param request: Request instance ready to be sent.
:type request: `transport.request.Request`
:param pool_name: Pool reference ... | 787 |
def make_sign_initializer(random_sign_init):
"""Random sign intitializer for HyperBatchEnsemble layers."""
if random_sign_init > 0:
return ed.initializers.RandomSign(random_sign_init)
else:
return tf.keras.initializers.RandomNormal(
mean=1.0, stddev=-random_sign_init) | 788 |
def _parallel_predict_proba(ensemble, X, idx, results):
"""
Compute predictions of SCM estimators
"""
for k in idx:
res = ensemble.estimators[k].predict(X[:, ensemble.estim_features[k]])
results = results + res
return results | 789 |
def compute_FP_TP_Probs(Ycorr, Xcorr, Probs, is_tumor, evaluation_mask, Isolated_Tumor_Cells, level):
"""Generates true positive and false positive stats for the analyzed image
Args:
Probs: list of the Probabilities of the detected lesions
Xcorr: list of X-coordinates of the lesions
... | 790 |
def main(token, language, account):
""" Get the account balances on Revolut """
if token is None:
print("You don't seem to have a Revolut token")
answer = input("Would you like to generate a token [yes/no]? ")
selection(answer)
while token is None:
try:
... | 791 |
def split_sushi_data(K):
"""
Needs to be run once on raw sushi data
before starting sushi experiments
Splits edges in set b of sushi data with cross validation
Makes sure no node is shared in train and test sets
Saves splits and scores
"""
print('Reading sushi data...')
home_path = ... | 792 |
def make_sentences(text, src):
"""
Builds a list of dictionaries, one for each sentence resulting from
the sentence parser. The dictionary schema is
{"src": src, "label": 0, "sentence": sent}
Substitutions are made for the identified tokens.
Args:
text (str): text to process
... | 793 |
def read_test_case(file_path):
"""
reads one test case from file.
returns contents of test case
Parameters
----------
file_path : str
the path of the test case file to read.
Returns
-------
list
a list of contents of the test case.
"""
file = open(file_path... | 794 |
def get_order_cart_product_options_output(cart_id: Optional[pulumi.Input[str]] = None,
catalog_name: Optional[pulumi.Input[Optional[str]]] = None,
plan_code: Optional[pulumi.Input[str]] = None,
... | 795 |
def add_checkbox_column(col_list, row_list, checkbox_pos=1):
"""Insert a new column into the list of column dictionaries so that it
is the second column dictionary found in the list. Also add the
checkbox column header to the list of row dictionaries and
subsequent checkbox value
'c... | 796 |
def save_make_lines(filename, make_lines):
""" Save entries of make_lines as lines in filename
"""
with open(filename, mode="w") as f:
for line in make_lines:
f.write(line+"\n") | 797 |
def accuracy4batch(model, testloader, criterion):
"""save a model checkpoint
INPUT:
model: pytorch nn model.
testloader: DataLoader. test data set
criterion: criterion. loss criterion
device: torch.device. device on which model/data is based
OUTPUT:
accuracy: float in [0:1]. percenct... | 798 |
def AmendmentLinks(amendment, users_by_id, project_name):
"""Produce a list of value/url pairs for an Amendment PB.
Args:
amendment: Amendment PB to display.
users_by_id: dict {user_id: user_view, ...} including all users
mentioned in amendment.
project_nme: Name of project the issue/comment/amen... | 799 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.