content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def batch_post(
api_key: str, host: Optional[str] = None, gzip: bool = False, timeout: int = 15, **kwargs
) -> requests.Response:
"""Post the `kwargs` to the batch API endpoint for events"""
res = post(api_key, host, "/batch/", gzip, timeout, **kwargs)
return _process_response(res, success_message="data... | 3,400 |
def log_csv(msg, level='info', path='test.csv', format=None, name='csvlog'):
"""csv으로 log 메시지를 보냄
Args:
msg (str): 메시지
name (str, optional): log 이름
path (str, optional): csv 저장 경로path
"""
csvlog = CsvLog(path=path, format=format, name=name)
if level == 'debug':
csvl... | 3,401 |
def _raise_for_status(response: Response) -> None:
"""Check response status, raising ClickException for errors"""
try:
response.raise_for_status()
except HTTPError as e:
raise click.ClickException("%s" % e) from e | 3,402 |
def load_ste_data(task_name: str) -> List[pd.DataFrame]:
"""Loads the STE data corresponding to the given task name.
Args:
task_name (str): The name of the STE data file.
Returns:
List[pd.DataFrame]: The STE data if found, else empty list.
"""
# Variant-aware STE task names
st... | 3,403 |
def bsplclib_CacheD1(*args):
"""
* Perform the evaluation of the of the cache the parameter must be normalized between the 0 and 1 for the span. The Cache must be valid when calling this routine. Geom Package will insure that. and then multiplies by the weights this just evaluates the current point the CacheParam... | 3,404 |
def fileDialog2(bbo="int",cc="string",cap="string",ds="int",ff="string",fm="int",ftc="script",hne=1,okc="string",oca="script",ocm="script",oc2="script",ocr="script",oin="script",rf=1,sff="string",sc="script",spe=1,dir="string"):
"""
http://help.autodesk.com/cloudhelp/2019/ENU/Maya-Tech-Docs/CommandsPython/fileDialo... | 3,405 |
def velocity_filter(freq, corr_spectrum, interstation_distance, cmin=1.0,
cmax=5.0, p=0.05):
"""
Filters a frequency-domain cross-spectrum so as to remove all signal
corresponding to a specified velocity range.
In practice, the procedure (i) inverse-Fourier transforms the cros... | 3,406 |
def transcribe_file(path, language):
"""
Translate an PCM_16 encoded audio signal stored in a file using Google's STT API (Google Cloud Speech).
This implementation should be changed to transcribe audio-bytes directly.
:param path: path to audio file holding audio bytes
:param language: language of ... | 3,407 |
def make_aware(dt):
"""Appends tzinfo and assumes UTC, if datetime object has no tzinfo already."""
return dt if dt.tzinfo else dt.replace(tzinfo=timezone.utc) | 3,408 |
def train(model, tokenizer, train_dataset, batch_size, lr, adam_epsilon,
epochs):
"""
:param model: Bert Model to train
:param tokenizer: Bert Tokenizer to train
:param train_dataset:
:param batch_size: Stick to 1 if not using using a high end GPU
:param lr: Suggested learning rate fr... | 3,409 |
def fastqcounter(infile):
"""
Returns the number of unique sequences in a fastq file
"""
#check if file is derep'd using DerepCheck()
derep = reptools.DerepCheck(infile)
n=0
if derep:
with open(infile) as fn:
for title,seq,qual in reptools.FASTQparser(fn):
... | 3,410 |
def raichuMoves(board,player):
""""Generate All raichu Successors"""
piece = "@" if player == "w" else "$"
possible_boards = []
raichu_locs=[(row_i,col_i) for col_i in range(len(board[0])) for row_i in range(len(board)) if board[row_i][col_i]==piece]
for each_raichu in raichu_locs:
... | 3,411 |
def check_supported():
"""返回模块是否可用"""
return True | 3,412 |
def handle_request_parsing_error(err):
""" This handles request parsing errors generated for example by schema
field validation failing."""
abort(HTTPStatus.BAD_REQUEST, errors=err.messages) | 3,413 |
def tf_batch_propagate(hamiltonian, hks, signals, dt, batch_size):
"""
Propagate signal in batches
Parameters
----------
hamiltonian: tf.tensor
Drift Hamiltonian
hks: Union[tf.tensor, List[tf.tensor]]
List of control hamiltonians
signals: Union[tf.tensor, List[tf.tensor]]
... | 3,414 |
def test_filter_cancer_variants_wrong_params(app, institute_obj, case_obj):
"""test filter cancer SNV variants with filter form filled with parameters having the wrong format"""
# GIVEN an initialized app
with app.test_client() as client:
# GIVEN that the user could be logged in
resp = clie... | 3,415 |
def update_emoji(payload):
"""Process an emoji update event."""
print("Got an emoji added event")
event = payload.get("event", {})
user_id = event.get("user")
ts = event.get("event_ts")
user = User(user_id)
if float(ts) < user.last_update:
return
else:
user.last_update ... | 3,416 |
def noise_dither_bayer(img:np.ndarray) -> np.ndarray:
"""Adds colored bayer dithering noise to the image.
Args:
img: Image to be dithered.
Returns:
version of the image with dithering applied.
"""
imgtype = img.dtype
size = img.shape
#Note: these are very slow for large ... | 3,417 |
def get_match_rank(track, tagged_file):
"""
:param track:
:param files:
:type track: TrackMetadata
:return:
"""
filenames = [filter_filename(os.path.splitext(os.path.basename(filename.path))[0]) for filename in tagged_file]
rank1 = [0]*len(tagged_file)
# Alphabetically closest
... | 3,418 |
def execshell_withpipe_ex(cmd, b_printcmd=True):
"""
Deprecated. Recommand using ShellExec.
"""
strfile = '/tmp/%s.%d.%d' % (
'shell_env.py', int(os.getpid()), random.randint(100000, 999999)
)
os.mknod(strfile)
cmd = cmd + ' 1>' + strfile + ' 2>/dev/null'
os.system(cmd)
if Tr... | 3,419 |
def parseData(file_name, delimiter=None, header_size=0, col_types=None, ret_array=False):
""" Parse data form a text file
Arguments:
file_name: [str] Name of the input file.
Keyword arguments:
delimiter: [str] Data delimiter (often a comma of a semicolon). None by default, i.e. space/tab
delimited data
h... | 3,420 |
def file_command(client: Client, args: Dict[str, Any], params: Dict[str, Any]) -> List[CommandResults]:
"""
Returns file's reputation
"""
files = argToList(args.get('file'))
since = convert_string_to_epoch_time(args.get('since'), arg_name='since')
until = convert_string_to_epoch_time(args.get('u... | 3,421 |
def run_task(client, cmd, cwd, prerequire=[], shell=False, quiet=False):
""" run cmd, in cwd
cmd should be a list (*args), if shell is False
when wildcards are used, shell should be Ture, and cmd is just a string
prerequire is a list of futures that must be gathered before the cmd can run
... | 3,422 |
def Kane_2D_builder(N,dis,mu,B=0,
params={},crystal='zincblende',
mesh=0,
sparse='yes'):
"""
2D 8-band k.p Hamiltonian builder. It obtaines the Hamiltoninan for a 3D
wire which is infinite in one direction, decribed using 8-band k.p theory.
... | 3,423 |
def has_loop(edges, threshold=2):
""" check if a list of edges representing a directed graph contains a loop
args:
edges: list of edge sets representing a directed graph i.e. [(1, 2), (2, 1)]
threshold: min number of nodes contained in loop
returns:
bool
"""... | 3,424 |
async def test_supported_features(hass):
"""Test supported features reporting."""
pause_play_stop = SUPPORT_PAUSE | SUPPORT_PLAY | SUPPORT_STOP
play_media = SUPPORT_PLAY_MEDIA
volume = SUPPORT_VOLUME_MUTE | SUPPORT_VOLUME_SET | SUPPORT_VOLUME_STEP
await async_setup_component(
hass,
... | 3,425 |
def connect(host=None, dbname=None, user=None, password=None, minconn=1,
maxconn=4):
"""
Attempts to connect to Postgres.
"""
if not any((host, dbname, user, password)):
host, dbname, user, password = get_db_env()
if not any((host, dbname, user, password)):
raise Exceptio... | 3,426 |
def animate( data_cube, slit_data=None, slit_cmap="viridis", raster_pos=None, index_start=None, index_stop=None, interval_ms=50, gamma=0.4, figsize=(7,7), cutoff_percentile=99.9, save_path=None ):
"""
Creates an animation from the individual images of a data cube.
This function can be pretty slow and take 1... | 3,427 |
def build_vrt(in_vrts, out_vrt, pixel_function):
"""
in_vrts must be a list
out_vrt must be a path/to/filename.vrt
"""
projection, geotransform, raster_size_x, raster_size_y = get_info_vrt(in_vrts[0])
write_vrt(in_vrts, out_vrt, projection, geotransform, raster_size_x, raster_size_y, pixel_fun... | 3,428 |
def tau_profile(ncols,vshifts,vdop,which_line,wave_cut_off=2.0):
"""
Computes a Lyman-alpha Voigt profile for HI or DI given column density,
velocity centroid, and b parameter.
"""
## defining rest wavelength, oscillator strength, and damping parameter
if which_line == 'h1':
lam0s,fs... | 3,429 |
def main(args):
"""
chandl's entry point.
:param args: Command-line arguments, with the program in position 0.
"""
args = _parse_args(args)
# sort out logging output and level
level = util.log_level_from_vebosity(args.verbosity)
root = logging.getLogger()
root.setLevel(level)
... | 3,430 |
def welcome_page():
""" On-boarding page
"""
g.project.update_on_boarding_state()
if g.project.on_boarding['import']:
return redirect(url_for('data_manager_blueprint.tasks_page'))
return flask.render_template(
'welcome.html',
config=g.project.config,
project=g.project... | 3,431 |
def remove_missing_entries(dataset):
"""Remove missing entries.
Some of the datasets have missing entries that sneak in as zero'd out
feature vectors. Get rid of them.
"""
for i, (X, y, w, ids) in enumerate(dataset.itershards()):
available_rows = X.any(axis=1)
X = X[available_rows]
y = y[availabl... | 3,432 |
def brokerUrl(host):
"""We use a different brokerUrl when running the workers than when
running within the flask app. Generate an appropriate URL with that in
mind"""
return '{broker_scheme}://{username}:{password}@{host}:{port}//'.format(
host=host, **CONFIG_JOB_QUEUE) | 3,433 |
def displayStat(statDict):
"""
Display formated result into screen
args :
- statDict (dict): data to display
"""
totalOccur = sum(occur for word, occur in statDict["wordcount"])
print "\n"
print "{:<20} : {:>6}".format("Number of lines", statDict["nbLines"])
print "... | 3,434 |
def show_lat_lon_gps(
move_data,
kind='scatter',
figsize=(21, 9),
plot_start_and_end=True,
return_fig=True,
save_fig=False,
name='show_gps_points.png',
):
"""
Generate a visualization with points [lat, lon] of dataset.
Parameters
----------
move_data : pymove.core.MoveDa... | 3,435 |
def filter_seqlets(seqlet_acts, seqlet_intervals, genome_fasta_file, end_distance=100, verbose=True):
""" Filter seqlets by valid chromosome coordinates. """
# read chromosome lengths
chr_lengths = {}
genome_fasta_open = pysam.Fastafile(genome_fasta_file)
for chrom in genome_fasta_open.references:
... | 3,436 |
def load_RIMO(path, comm=None):
"""
Load and broadcast the reduced instrument model,
a.k.a. focal plane database.
"""
# Read database, parse and broadcast
if comm is not None:
comm.Barrier()
timer = Timer()
timer.start()
RIMO = {}
if comm is None or comm.rank == 0:
... | 3,437 |
def local_shuffle(bed, loc='500000'):
"""
Randomize the location of each interval in `bed` by moving its
start location to within `loc` bp of its current location or to
its containing interval in `loc`.
Arguments:
bed - input bed file
loc - shuffle intervals to within this distance ... | 3,438 |
def format_top(data):
"""
Format "top" output
:param data: dict
:return: list
"""
result = []
if data:
if 'Titles' in data:
result.append(data['Titles'])
if 'Processes' in data:
for process in data['Processes']:
result.append(process)
... | 3,439 |
def process_arguments(parser):
"""This function parses the input arguments."""
args = parser.parse_args()
# Distribute input arguments
request = args.request
if "num_tests" in args:
num_tests = int(args.num_tests)
else:
num_tests = None
# Test validity of input arguments
... | 3,440 |
def generate_sections(logdata: pd.DataFrame):
"""
Generates a list of SectionDescriptors based on iMotions packets
SlideStart and SlideEnd.
If the first Slide related packet is an End packet, the first
descriptor will include all timestamps up to that packet, else it
will drop the packets before.
The last desc... | 3,441 |
def etree2dict(element):
"""Convert an element tree into a dict imitating how Yahoo Pipes does it.
"""
i = dict(element.items())
i.update(_make_content(i, element.text, strip=True))
for child in element:
tag = child.tag
value = etree2dict(child)
i.update(_make_content(i, val... | 3,442 |
def write_input_file(input_file, molecule, parameters):
"""Write LAMMPS input file"""
write_every = 10000
num_timesteps = int(parameters['sim_length'] / parameters['ts'] * 1e6)
mol_ids, surface_ids = parameters['mol_ids'], parameters['surface_ids']
atom_names = sorted(list(set(molecule.atoms)))
... | 3,443 |
def full_chain():
"""
GETing `/chain` will returns the full blockchain.
Returns:
The node's full blockchain list, as a JSON response.
"""
logger.info("Received GET request for the full chain")
return {
"chain": blockchain.chain,
"length": len(blockchain.chain),
} | 3,444 |
def check_files(in_file, out_file, args):
""" Check files exist/don't exist.
Parameters
----------
in_file : str:
the input file
out_file : str
the output file
args : parser args
any additional arguments from the parser
Raises
------
FileNotFound
in ... | 3,445 |
def _patched_is_incomplete_option(all_args, cmd_param):
"""Patched version of is_complete_option.
Fixes issue testing a cmd param against the current list of
args. Upstream version does not consider combined short form args
and so a command like `guild check -nt <auto>` doesn't work. The
patched ve... | 3,446 |
def confirm_install() -> bool:
"""
Confirms that update should be performed on an empty install
"""
message = (
"The pack you are trying to update doesn't have a pack-manifest.json file. "
"Unless you are doing a first install, *THIS SHOULD NOT HAPPEN*. If you are doing a first install, ... | 3,447 |
def f_cv(x, dt):
""" state transition function for a
constant velocity aircraft"""
F = np.array([[1, dt, 0.5*dt*dt, 0, 0, 0],
[0, 1, dt, 0, 0, 0],
[0, 0, 1, 0, 0, 0],
[0, 0, 0, 1, dt, 0.5*dt*... | 3,448 |
def p_definition(p):
"""definition : PARSER_NAME COLON expr NL"""
p[0] = Definition(p[1], p[3], p[2], p[4]) | 3,449 |
async def generate(args: argparse.Namespace, core: Voice2JsonCore) -> None:
"""Generate randomish examples from intent graph."""
import networkx as nx
import rhasspynlu
# Make sure profile has been trained
assert core.check_trained(), "Not trained"
# Load settings
intent_graph_path = core.... | 3,450 |
def prompt_for_password(args):
"""
if no password is specified on the command line, prompt for it
"""
if not args.password:
args.password = getpass.getpass(
prompt='Enter password for host %s and user %s: ' %
(args.host, args.user))
return args | 3,451 |
def update_hparams(hparams, new_hparams):
""" Update existing with new hyperparameters """
if new_hparams is None:
return hparams
if isinstance(new_hparams, str) and new_hparams.endswith('.json'):
tf.logging.info("Overriding default hparams from JSON")
with open(new_hparams) as fh:
... | 3,452 |
def relative_subpackage_import(path: str, package: str) -> Any:
"""[summary]
Args:
path (str): [description]
package (str): [description].
Returns:
Any: [description]
"""
if not path.startswith('.'):
path = '.' + path
return importlib.import_module(pat... | 3,453 |
def convertToNpArray(train,test):
"""
Converts the data into numpy arrays
:param train: training data csv path
:param test: test data csv path
:return: training data and labels, test data and labels
"""
train_data = pd.read_csv(train, delimiter=',', quotechar='"',
... | 3,454 |
def update_layers(**kwargs):
"""
Update all the layers when base has been updated.
"""
pecha_id = get_pecha_id(kwargs["pecha_number"])
src_pecha_path = download_pecha(pecha_id)
click.echo(INFO.format(f"Updating base of {pecha_id} ..."))
src_opf_path = src_pecha_path / f"{pecha_id}.opf"
... | 3,455 |
def attachment_to_multidim_measurement(attachment, name=None):
"""Convert an OpenHTF test record attachment to a multi-dim measurement.
This is a best effort attempt to reverse, as some data is lost in converting
from a multidim to an attachment.
Args:
attachment: an `openhtf.test_record.Attachment` from ... | 3,456 |
def save_xyz_file(fname, R, Z, comment=""):
"""Units for R are expected to be Bohr and will be translated to Angstrom in output"""
assert len(R) == len(Z)
PERIODIC_TABLE = 'H He Li Be B C N O F Ne'.split()
ANGSTROM_IN_BOHR = 1.88973
with open(fname, 'w') as f:
f.write(str(len(R)) + '\n')
... | 3,457 |
def similarity_iou_2d(pred_boxes, true_boxes):
"""
Return intersection-over-union (Jaccard index) of boxes.
Both sets of boxes are expected to be in (cx, cy, w, h) format.
Arguments:
pred_boxes (Tensor[B, 4, N])
true_boxes (Tensor[B, 4, M])
Returns:
iou (Tensor[N, M]): the Nx... | 3,458 |
def get_atom_coords_by_names(residue, atom_names):
"""Given a ProDy Residue and a list of atom names, this attempts to select and return
all the atoms.
If atoms are not present, it substitutes the pad character in lieu of their
coordinates.
"""
coords = []
pad_coord = np.asarray([GLOBAL_PAD... | 3,459 |
def test_record_get_dynamic_url_function():
"""Record get_dynamic_url function sends properly formated get request."""
response = record.get_dynamic_url('example.com', record_id=1234)
assert response.success
payload = response.payload
assert payload['url'] == 'https://api.cloudns.net/dns/get-dynami... | 3,460 |
def calculate_chord(radius, arc_degrees):
"""
Please see the wikipedia link for more information on how this works.
https://en.wikipedia.org/wiki/Chord_(geometry)
"""
# Calculate the arc_degrees in radians.
# We need this because sin() expects it.
arc_radians = radians(arc_degrees)
# C... | 3,461 |
def broadcast(right, left, left_fk=None, right_pk=None, keep_right_index=False):
"""
Re-indexes a series or data frame (right) to align with
another (left) series or data frame via foreign key relationship.
The index or keys on the right must be unique (i.e. this only supports
1:1 or 1:m relationhip... | 3,462 |
def _is_arraylike(arr):
"""Check if object is an array."""
return (
hasattr(arr, "shape")
and hasattr(arr, "dtype")
and hasattr(arr, "__array__")
and hasattr(arr, "ndim")
) | 3,463 |
def exportCandidatePairs(candidatePairs, output_file, log, numReads=None):
"""
Export candidate pairs to a file.
The type of file is determined on the provided filename for output_file.
Supported filetypes: txt, json, pickle (python) and csv.
"""
tim = time.clock()
# Output file extension
ext = output_file.rspl... | 3,464 |
def test_deep_agg_feat_chain(es):
"""
Agg feat of agg feat:
region.Mean(customer.Count(Log))
"""
customer_count_feat = ft.Feature(es['log']['id'], parent_entity=es['customers'], primitive=Count)
region_avg_feat = ft.Feature(customer_count_feat, parent_entity=es[u'régions'], primitive=Mean)
... | 3,465 |
def tokenize_docstring(text):
"""Tokenize docstrings.
Args:
text: A docstring to be tokenized.
Returns:
A list of strings representing the tokens in the docstring.
"""
en = spacy.load('en')
tokens = en.tokenizer(text.decode('utf8'))
return [token.text.lower() for token in tokens if not token.is_... | 3,466 |
def conv_output_length(input_length, filter_size,
border_mode, stride, dilation=1):
"""Determines output length of a convolution given input length.
# Arguments
input_length: integer.
filter_size: integer.
border_mode: one of "same", "valid", "full".
strid... | 3,467 |
def source_ccp4():
"""Function to return bash command to source CCP4"""
if os.name == "nt":
return
return "source {}".format(os.path.join(os.environ["CCP4"], "bin", "ccp4.setup-sh")) | 3,468 |
def dblHour():
"""(read-only) Array of doubles containgin time value in hours for time-sampled monitor values; Empty if frequency-sampled values for harmonics solution (see dblFreq)"""
return get_float64_array(lib.Monitors_Get_dblHour) | 3,469 |
def select_uuid_like_indexes_on_table(model, cursor):
"""
Gets a list of database index names for the given model for the
uuid-containing fields that have had a like-index created on them.
:param model: Django model
:param cursor: database connection cursor
:return: list of database rows; the f... | 3,470 |
def retrieve_jambalaya(request):
"""
Retrieve a jambalaya recipe by name or country of origin
---
serializer: JambalayaSerializer
parameters:
- name: name
description: name as found in recipe
type: string
paramType: query
required: false
- name... | 3,471 |
def get_pymatgen(optimade_structure: OptimadeStructure) -> Union[Structure, Molecule]:
"""Get pymatgen `Structure` or `Molecule` from OPTIMADE structure.
This function will return either a pymatgen `Structure` or `Molecule` based
on the periodicity or periodic dimensionality of OPTIMADE structure.
For... | 3,472 |
def get_meshgrid_samples(lower, upper, mesh_size: tuple, dtype) ->\
torch.Tensor:
"""
Often we want to get the mesh samples in a box lower <= x <= upper.
This returns a torch tensor of size (prod(mesh_size), sample_dim), where
each row is a sample in the meshgrid.
"""
sample_dim = len(me... | 3,473 |
def draw_nodes(start, nodes_list, cores, minute_scale, space_between_minutes,
colors):
"""
Function to return the html-string of the node drawings for the
gantt chart
Parameters
----------
start : datetime.datetime obj
start time for first node
nodes_list : list
... | 3,474 |
def has_understood_request(
sys_nlu: dict, slot: str, domain: str, lowercase_slots: bool = True
) -> bool:
"""Check if the system has understood a user request in a particular domain."""
# assume perfect system if NLU not available
if not sys_nlu:
return True
sys_nlu_requested = get_turn_a... | 3,475 |
def lengthOfLongestSubstring(s):
"""
:type s: str
:rtype: int
"""
res = ""
n = 0
for i in s:
if i not in res:
res = res + i
else:
indexofi = res.find(i)
res = res[indexofi+1::] + i
k = len(res)
if k > n:
n = k
... | 3,476 |
async def get_sequence_metadata(checksum: str, accept: str = ""):
"""Return Refget sequence metadata based on checksum value."""
headers = Headers()
url_path = "sequence/" + checksum + "/metadata"
try:
result = await create_request_coroutine(
url_list=metadata_url_list(checksum),
... | 3,477 |
def getattrs(o, *attrs, **kwargs):
"""
>>> getattrs((), '__iter__', '__name__', 'strip')('_')
'iter'
>>> getattrs((), 'foo', 'bar', default=0)
0
"""
if 'default' in kwargs:
default = kwargs['default']
c = o
for attr in attrs:
try:
c = getat... | 3,478 |
def maria_create_account(params):
"""root user and dbuser are created at startup.
grant all to dbuser is all we need to do after the DB starts
:type params: dict
"""
error_msg = 'ERROR: mariadb_util; maria_create_account; '
error_msg += 'action: %s user: %s error: %s'
password = Config.accou... | 3,479 |
def compute_coef_xz(y_val, coef_3d):
"""
compute the 2D polynoimal coefficients for a given x
:param x_val: value of x
:param coef_3d: the original 3D polynomials
:return:
"""
coef_xz = np.zeros((coef_3d.shape[1], coef_3d.shape[2]), dtype=coef_3d.dtype)
max_degree_y = coef_3d.shape[0] - ... | 3,480 |
def bbox_overlaps(bboxes1, bboxes2, mode='iou'):
"""Calculate the ious between each bbox of bboxes1 and bboxes2.
Args:
bboxes1(ndarray): shape (n, 4)
bboxes2(ndarray): shape (k, 4)
mode(str): iou (intersection over union) or iof (intersection
over foreground)
Returns:
... | 3,481 |
def me_length_filter(me_iv_pairs, min_length=100):
"""Returns list of (InsertionVertices, InsertionVertices) tuples
with those containing paths going backwards through the ME sequence
filtered out
"""
filtered = []
for iv_pair in me_iv_pairs:
enter_iv, exit_iv = iv_pair
me_seq_l... | 3,482 |
def main_cli(ctx, args=None, init=False, tables=False, refresh=False, prompt=False):
"""Helper to print GBQ schema information. Pass '.' to show all fields.
$ dimschema <OPT:table-name> <field-pattern>
"""
click.secho("Dimensions GBQ schema-helper (" + VERSION + ")", dim=True)
config = Config... | 3,483 |
def test_updated_large_investor_profile_synced(es_with_signals):
"""Test that when an large investor profile is updated it is synced to ES."""
large_investor_profile = LargeCapitalInvestorProfileFactory()
large_investor_profile.investable_capital = 12345
large_investor_profile.save()
es_with_signals... | 3,484 |
def build_big_map_schema(data, schema: Schema) -> BigMapSchema:
""" Generate Big_map schema from the contract storage
:param data: Raw storage (Micheline expression)
:param schema: Storage schema
:returns: Mappings: Big_map id to JSON path and vice versa
:rtype: BigMapSchema
"""
bin_to_id =... | 3,485 |
def test_initial_tokens():
"""Checks the procedure for finding the initial tokens"""
csnet = example.control.cs_network()
initial_step = Fraction(1, 5)
slaves, connections = csnet
step_sizes: cs.StepSizes = {
name: (idx + 5) * initial_step for idx, name in enumerate(slaves.keys())
}
... | 3,486 |
def _get(session, urlTail):
# type: (Session, str) -> Dict
"""Make an HTTP(s) GET request to Batfish coordinator.
:raises SSLError if SSL connection failed
:raises ConnectionError if the coordinator is not available
"""
headers = {CoordConsts.HTTP_HEADER_BATFISH_APIKEY: session.apiKey,
... | 3,487 |
def compose(fns):
"""Creates a function composition."""
def composition(*args, fns_):
res = fns_[0](*args)
for f in fns_[1:]:
res = f(*res)
return res
return functools.partial(composition, fns_=fns) | 3,488 |
def load_participants_file():
"""
Load participants.tsv file and build pandas DF of participants
This function assumes that the file participants.tsv is present in the -path-results
:return: participants: pandas dataframe
"""
participants = pd.read_csv(os.path.join('participants.tsv'), sep="\t")... | 3,489 |
def test_word3():
"""runs test3"""
out1 = getoutput(open + ' foobarbaz')
assert out1.rstrip() == 'ffofoofoobfoobafoobarfoobarbfoobarbafoobarbaz' | 3,490 |
def clr_tilcmt(*args):
"""
clr_tilcmt(ea)
"""
return _ida_nalt.clr_tilcmt(*args) | 3,491 |
def canarize_service(args, input_yaml, labels={}):
"""
Create a canary for an existing Service.
We do this by:
- adding a '-canary' suffix to the name of the Service
- adding a '-canary' suffix to all the labels in the Service selector
"""
res = []
# append the -canary to the Service na... | 3,492 |
def get_query_results(query):
"""
Get the data with common fields from the Close using the provided query.
:param query: Any Close search query eg. 'lead_status:Potential has:emails'
:return: 2D array with a header and results
"""
api = Client(CLOSE_API_KEY)
leads = api.get('lead', params={... | 3,493 |
def width_pcc_dmera_2d(n, D, supp):
"""
Optimal width of the circuit for the pcc after compression
Args:
n(int): Number of scales
D(int): Number of cycles per scale
supp(list): List of integers
Returns:
int: Optimal width
"""
supp_con = [convert_2d_to_1d(c,n)... | 3,494 |
def global_delete(key):
"""Delete an entity from the global cache.
Args:
key (bytes): The key to delete.
Returns:
tasklets.Future: Eventual result will be ``None``.
"""
batch = _batch.get_batch(_GlobalCacheDeleteBatch)
return batch.add(key) | 3,495 |
def get_drawing_x(image: Image = None) -> float:
"""
Get the x coordinate value of the current drawing position (x,y).
Some drawing functions will use the current pos to draw.(see line_to(),line_rel(),move_to(),move_rel()).
:param image: the target image whose drawing pos is to be gotten. None means i... | 3,496 |
async def get_by_name(username: str) -> Dict[str, Any]:
"""
Retrieve one row based by its name. Return object is a dict.
Raises if the record was not found.
"""
username = username.lower()
for user in Database:
if(user["username"] == username):
return user
raise Recor... | 3,497 |
def semantics(address: str) -> "show_semantics_page":
""" Semantics of address. """
response = requests.get(
f"{request.url_root}api/semantics/{EthConfig.DEFAULT_CHAIN}/{address}",
headers={"x-api-key": current_app.config["API_KEY"]},
)
return show_semantics_page(response) | 3,498 |
def update_trail(clt, trail_name, log_group_arn, role_arn):
"""
Update Trail to integrate with CloudWatch Logs
"""
try:
result = clt.update_trail(
Name = trail_name,
CloudWatchLogsLogGroupArn = log_group_arn,
CloudWatchLogsRoleArn = role_arn,
)
except ClientError as e:
print(e.r... | 3,499 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.