content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def serve_application(
kb_model_dir: Text = KB_DEFAULT_MODEL_DIR,
kb_data_dir: Text = KB_DEFAULT_DATA_DIR,
qa_model_dir: Text = QA_MODEL_DIR,
es_url: Text = ELASTICSEARCH_URL,
index: Text = QA_INDEX,
interface: Optional[Text] = DEFAULT_SERVER_INTERFACE,
port: int = DEFAULT_SERVER_PORT,
c... | 500 |
def decimal_to_boolean_list(num: int, padding: int = 0) -> Tuple[bool, ...]:
"""
Convert a decimal number into a tuple of booleans, representing its binary value.
"""
# Convert the decimal into binary
binary = bin(num).replace('0b', '').zfill(padding)
# Return a tuple of booleans, one for each ... | 501 |
def test_initialize_database(pristine_db_engine: Engine):
"""Test that the database can be correctly initialized and that the
expected tables and views exist.
"""
config = PacsaniniConfig(
storage=StorageConfig(resources=str(pristine_db_engine.url), directory="./")
)
utils.initialize_dat... | 502 |
def get_credentials(fn, url, username, allowed):
"""Call fn and return the credentials object"""
url_str = maybe_string(url)
username_str = maybe_string(username)
creds = fn(url_str, username_str, allowed)
credential_type = getattr(creds, 'credential_type', None)
credential_tuple = getattr(cr... | 503 |
def tpack(text, width=100):
"""Pack a list of words into lines, so long as each line (including
intervening spaces) is no longer than _width_"""
lines = [text[0]]
for word in text[1:]:
if len(lines[-1]) + 1 + len(word) <= width:
lines[-1] += (' ' + word)
else:
lin... | 504 |
async def on_ready():
"""
Prints Logged in when the Bot is Ready
"""
print("Logged in as: " + bot.user.name + "\n") | 505 |
def _log_evidence_func(arr):
"""Returns an estimate of the log evidence from a set of log importance wegiths
in arr. arr has shape TxN where T is the number of trials and N is the number
of samples for estimation.
Args:
arr (torch.FloatTensor of shape TxN): log importance weights
Returns:
... | 506 |
def _accumulated_moments_for_inference(mean, variance, is_training):
"""Use accumulated statistics for moments during inference.
After training the user is responsible for filling the accumulators with the
actual values. See _UpdateBnAccumulators() in eval_gan_lib.py for an example.
Args:
mean: Tensor of ... | 507 |
def parse_bgp_attr(atype, aval_buf):
"""Given a type and value buffer, parses a BGP attribute and returns the value
parsed"""
if atype == BGP_ATYPE_ORIGIN:
attr = 'ORIGIN'
if len(aval_buf) != 1:
return None, None, -1
aval = struct.unpack('B', aval_buf)[0]
aval = B... | 508 |
def _get_embedding_filename(base_dir, split_name, step):
"""Create the filename for embeddings."""
return os.path.join(base_dir, str(step), f'{split_name}-embeddings.tfrecord') | 509 |
def getUsage():
""" Get usage information about running APBS via Python
Returns (usage)
usage: Text about running APBS via Python
"""
usage = "\n\n\
----------------------------------------------------------------------\n\
This driver program calculates electrostatic potentials,... | 510 |
def run_length_div_decode(x, n, divisor):
"""Decodes a run length encoded array and scales/converts integer values to float
Parameters
----------
x : encoded array of integers (value, repeat pairs)
n : number of element in decoded array
"""
y = np.empty(n, dtype=np.float32)
start = 0
... | 511 |
def parse_args():
"""Build file label list"""
parser = argparse.ArgumentParser(description='Build file label list')
parser.add_argument('data_path', type=str,
help='root directory for the dataset')
parser.add_argument('dataset', type=str, choices=[
'ucf101... | 512 |
def modelFnBuilder(config):
"""Returns 'model_fn' closure for Estimator."""
def model_fn(features, labels, mode, params):
print('*** Features ***')
for name in sorted(features.keys()):
tf.logging.info(' name = {}, shape = {}'.format(name, features[name].shape))
is_training = (mode == tf.estimator.... | 513 |
def jack_is_dull():
"""Take the sentence: All work and no play makes Jack a dull boy.
Store each word in a separate variable, then print out the sentence on one
line using print."""
for i in ["All", "work", "and", "no", "play", "makes", "Jack", "a", "dull", "boy."]:
print i,
print | 514 |
def accreds_validate(request, pk):
"""Validate an accred"""
accreds = [get_object_or_404(Accreditation, pk=pk_, end_date=None) for pk_ in filter(lambda x: x, pk.split(','))]
multi_obj = len(accreds) > 1
for accred in accreds:
if not accred.rights_can('VALIDATE', request.user):
rai... | 515 |
def need_verified_email(request, *args, **kwargs): # pylint: disable=unused-argument
"""
Returns error page for unverified email on edX
"""
return standard_error_page(request, 401, "verify_email.html") | 516 |
def random_masking(token_ids_all):
"""对输入进行随机mask,增加泛化能力
"""
result = []
for token_ids in token_ids_all:
rands = np.random.random(len(token_ids))
result.append([
t if r > 0.15 else np.random.choice(token_ids)
for r, t in zip(rands, token_ids)
])
return... | 517 |
def is_directory(dir_path):
"""Validates that the argument passed into 'argparse' is a directory."""
if not os.path.isdir(dir_path):
raise ValueError('Path is not a directory: %s' % dir_path)
return dir_path | 518 |
def test042():
"""
check that a modified loaded version of p isn't equal
"""
assert isinstance(p, Pod)
code = get_python_source(p, style="black")
x = eval(code, globals(), locals())
assert isinstance(x, Pod)
x.spec.containers[1].lifecycle.postStart.httpGet.port = 4
assert x != p | 519 |
def used_caches_and_sources(layers, caches, sources):
"""
Find used cache and source names in layers and caches configuration.
"""
used_layer_sources = find_layer_sources(layers)
used_cache_sources = find_cache_sources(caches)
all_used_sources = used_layer_sources.union(used_cache_sources)
... | 520 |
def encode_labels(
labels: Union[list, np.ndarray, pd.Series],
multi_label: bool = False,
sep: str = '|'
):
"""Encode labels
Return coded labels, encoder, and decoder.
Examples:
>>> # multi-class problem
>>> labels = ['OK', 'OK', 'NG1', 'NG2', 'OK']
>>> encode_labels(la... | 521 |
def test_perspective_transform():
"""
Tests with no parameters
"""
simp_manager = SimplificationManager()
with pytest.raises(TypeError):
simp_manager.perspectiveTransformation() | 522 |
def sigmoid_xent(*, logits, labels, reduction=True):
"""Computes a sigmoid cross-entropy (Bernoulli NLL) loss over examples."""
log_p = jax.nn.log_sigmoid(logits)
log_not_p = jax.nn.log_sigmoid(-logits)
nll = -jnp.sum(labels * log_p + (1. - labels) * log_not_p, axis=-1)
return jnp.mean(nll) if reduction else ... | 523 |
def is_palindrome(s: str) -> bool:
"""Return whether a string is a palindrome
This is as efficient as you can get when computing whether a string is a
palindrome. It runs in O(n) time and O(1) space.
"""
if len(s) <= 1:
return True
i = 0
j = len(s) - 1
while i < j:
if ... | 524 |
def accuracy(pred_cls, true_cls, nclass=3):
"""
compute per-node classification accuracy
"""
accu = []
for i in range(nclass):
intersect = ((pred_cls == i) + (true_cls == i)).eq(2).sum().item()
thiscls = (true_cls == i).sum().item()
accu.append(intersect / thiscls)
return... | 525 |
def my_hostogram(gray, bins):
""" pixel values has to be within bins range, otherwise index out of range, for example
if pixel 400th has value 70, but bins are -> [0...40], then histogram[70] yields IOR
"""
histogram = [0 for i in bins]
for i in range(gray.shape[0]):
for j in range(gray.... | 526 |
def reverse(password, position_x, position_y):
"""Reverse from position_x to position_y in password."""
password_slice = password[position_x:position_y + 1]
password[position_x:position_y + 1] = password_slice[::-1]
return password | 527 |
def test_encode_mixed_items():
"""
>>> encode(['a', ('b', 'c')])
'a; b=c;'
>>> encode([('', 'a'), ('b', 'c')])
'a; b=c;'
>>> encode([('b', 'c'), 'a'])
'b=c; a;'
>>> encode([('b', 'c'), ('', 'a')])
'b=c; a;'
""" | 528 |
def gen_sentence(
start_seq: str = None,
N: int = 4,
prob: float = 0.001,
output_str: bool = True
) -> Union[List[str], str]:
"""
Text generator using Thai2fit
:param str start_seq: word for begin word.
:param int N: number of word.
:param bool output_str: output is str
:param bool dupl... | 529 |
def GetBoolValueFromString(s):
"""Returns True for true/1 strings, and False for false/0, None otherwise."""
if s and s.lower() == 'true' or s == '1':
return True
elif s and s.lower() == 'false' or s == '0':
return False
else:
return None | 530 |
def genomic_del6_abs_37(genomic_del6_37_loc):
"""Create test fixture absolute copy number variation"""
return {
"type": "AbsoluteCopyNumber",
"_id": "ga4gh:VAC.60XjT6dzYKX8rn6ocG4AVAxCoUFfdjI6",
"subject": genomic_del6_37_loc,
"copies": {"type": "Number", "value": 1}
} | 531 |
def bar_chart_classification_report(classification_report, title, folder):
"""
Plot a bar graph which sums up the classification report of the scikit
learn tool.
:param classification_report: Sliced classification report : classes,
toPlot, support. toPlot must be a tuple (precision, recall, f1-score... | 532 |
def get_typical_qualifications(cfg):
"""
create qualification list to filter just workers with:
- + 98% approval rate
- + 500 or more accepted HIT
- Location USA
:param cfg:
:return:
"""
if not cfg['hit_type'].getboolean('apply_qualification'):
return []
quali... | 533 |
def remove_files(pattern=None, paths=None, ensure_success=True):
"""
Removes a file from disk.
Parameters
----------
pattern : str, optional
paths : iterable of str, optional
ensure_success : bool, optional
"""
assert [pattern, paths].count(None) == 1
if (paths is None):
paths = glob.glob(patte... | 534 |
def _gen_codegen_dev_to_reg(
nybble: int,
) -> Callable[[Context, Op], Tuple[Context, Op]]:
"""'Code generator generator' for device-to-register instructions."""
def codegen_dev_to_reg(context: Context, op: Op) -> Tuple[Context, Op]:
op = op._replace(args=parse_args_if_able(
_PARSE_OPTIONS, context... | 535 |
def _shell_wrap_inner(command, shell=True, sudo_prefix=None):
"""
Conditionally wrap given command in env.shell (while honoring sudo.)
(Modified from fabric.operations._shell_wrap to avoid double escaping,
as the wrapping host command would also get shell escaped.)
"""
# Honor env.shell, while ... | 536 |
def events_until(events: List[ScheduleEvent],
until: time, *, after: time = None) \
-> List[ScheduleEvent]:
"""
Return events up to and including the given time.
Keyword arguments:
after -- if specified, only events after this time will be included.
"""
if afte... | 537 |
def get_ip():
"""
Get local ip from socket connection
:return: IP Addr string
"""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(('bing.com', 80))
return s.getsockname()[0] | 538 |
def test_compute_projection(shape):
"""Test PCA projection of X vs X.T @ X"""
X = jax.random.uniform(random.generate_key(), shape=shape)
XTX = X.T @ X
k = 1 if X.ndim == 1 else min(X.shape)
p1 = compute_projection(X, k)
p2 = compute_projection(XTX, k)
np.testing.assert_array_almost_equal(a... | 539 |
def check_trash_path(filename):
"""Check trash directory exist for given filename, create it otherwise"""
trash_path = get_trash_path(filename)
if not os.path.exists(trash_path):
os.makedirs(trash_path) | 540 |
def shave_marks(txt):
"""去掉全部变音符号"""
norm_txt = unicodedata.normalize('NFD', txt) # 把所有的字符分解为基字符和组合记号
shaved = ''.join(c for c in norm_txt
if not unicodedata.combining(c)) # 过滤掉所有的组合记号
return unicodedata.normalize('NFC', shaved) | 541 |
def test_demag_2d_pbc():
"""
Attempt to check that demag with 2d_pbc option does
not give a nonsensical answer.
"""
A=1.3e-11
Ms=8.6e5
n = 40
d = 2.5
mesh = fidimag.common.CuboidMesh(nx=n, ny=n, nz=1, dx=d, dy=d, dz=d, unit_length=1e-9, periodicity=(True, True, False))
sim =... | 542 |
def _kld_gamma(p_data, q_data):
"""
Computes the Kullback-Leibler divergence between two gamma PDFs
Parameters
----------
p_data: np.array
Data of the first process
q_data: np.array
Data of the first process
Returns
-------
r_kld_gamma: numeric
Kullba... | 543 |
def pickAColor() -> colors.Color: # pylint: disable=invalid-name
""" Not Implemented
:rtype colors.Color:
"""
not_implemented = " is not implemented in the MediaComp.jes module"
raise NotImplementedError("pickAColor()" + not_implemented) | 544 |
def interpolate_rat(nodes, values, use_mp=False):
"""Compute a rational function which interpolates the given nodes/values.
Args:
nodes (array): the interpolation nodes; must have odd length and
be passed in strictly increasing or decreasing order
values (array): the values at the i... | 545 |
def binder_update_page_range(payload):
"""Parser for `binder_update_page_range`"""
try:
match = re.match(binder_update_page_range_pattern, payload)
if match:
match_group_dict = match.groupdict()
return BinderUpdatePageRange(int(match.group(1)), int(match.group(2)), int(ma... | 546 |
def generate_breadcrumb(url: str, separator: str) -> str:
"""
Fungsi yang menerima input berupa string url dan separator
dan mengembalikan string yang berisi navigasi breadcrumb.
Halaman Wikipedia tentang navigasi breadcrumb:
https://en.wikipedia.org/wiki/Breadcrumb_navigation
Contoh:
>>> ... | 547 |
def cleanup_files(session):
"""Deletes all entries in table File in session without any entry
in table Match.
Parameters:
session: An SQLAlchemy database session.
Side-effects:
All entries in table File whose id do not exist in Match.file_id
deleted.
"""
... | 548 |
def publish_events():
"""
Publish a Py_ps_event_msg
"""
event_msg = Py_ps_event_msg()
event_msg.dest_guid = 1234
event_msg.id = 9876
event_msg.data.kind = event_msg.data.kinds['PARAMETER_VALUE_STRING']
event_msg.data.value = b"Hello, world!"
events.publish(event_msg) | 549 |
def get_work_path():
"""
获取工作目录,若不存在并创建
:return: work_path str
"""
work_dir = config.WORK_DIR
work_path = f'{work_dir}/{time.strftime("%Y%m%d%H%M%S")}'
print(f'work path: {work_path}')
if not os.path.exists(work_path):
os.makedirs(work_path)
return work_path | 550 |
def extract_all_sentences(dataset_path, features_outfile=None):
""" Extract features from sentences using pretrained universal sentence embeddings and save them in a pickle file
:param dataset_path: the path of the dataset to use
:param features_outfile: file used to store the extracted features
:retur... | 551 |
def _load_yaml(blueoil_config_filename):
"""load blueoil config yaml
Args:
blueoil_config_filename(str): File path of blueoil config yaml file.
Returns:
blueoil_config(dict): dict of blueoil config.
"""
if not os.path.exists(blueoil_config_filename):
FileNotFoundError("File... | 552 |
def bat_activity(request):
""" """
# TODO:
wave_file_name = 'WURB-2_20160908T220024+0200_N57.6627E12.6393_TE-384.wav'
# Pandas data frame
peak_df = None
try:
# Prod:
peak_df = pd.read_csv('/srv/django/cloudedbats/src/test_data/peak_file.txt',
s... | 553 |
def search_sorted(array, value):
"""
Searches the given sorted array for the given value using a
BinarySearch which should execute in O(log N).
array a 1D sorted numerical array
value the numerical value to search for
returns index of array closest to value
returns None if valu... | 554 |
def to_doc(d: DatasetDoc) -> Dict:
"""
Serialise a DatasetDoc to a dict
If you plan to write this out as a yaml file on disk, you're
better off with `to_formatted_doc()`.
"""
doc = attr.asdict(
d,
recurse=True,
dict_factory=dict,
# Exclude fields that are the def... | 555 |
def test_search_iter():
"""Test iteration of search results."""
for result in cs.search('glucose'):
assert isinstance(result.csid, int) | 556 |
def annealing_exp(start, end, pct):
"""Exponentially anneal from start to end as pct goes from 0.0 to 1.0."""
return start * (end / start) ** pct | 557 |
def _compile_theano_function(param, vars, givens=None):
"""Compile theano function for a given parameter and input variables.
This function is memoized to avoid repeating costly theano compilations
when repeatedly drawing values, which is done when generating posterior
predictive samples.
Paramete... | 558 |
def do_fertilization(cookies):
"""
施肥
"""
global totalLevelApplyFertilizerAmount
global totalLevelNeedFertilizerAmount
global totalRewardAmount
global can_steal_list
headers = {
'Host': 'farm.dmall.com',
'Connection': 'keep-alive',
'Pragma': 'no-cache',
'... | 559 |
def load_data():
""" Helper function to load and initialize data
"""
global input_shape, X_train, y_train_labels, y_train, X_test, y_test_labels, y_test
(X_train, y_train_labels), (X_test, y_test_labels) = mnist.load_data()
X_train, X_test, input_shape = preprocess_image_data(X_train, X_test, img_ro... | 560 |
def redirect(url):
"""Create a response object representing redirection.
:param url: a URL
:return: a Response
"""
headers = {
"Location": url,
}
return Response(headers=headers, code=HTTPStatus.FOUND) | 561 |
def test_is_valid_password_v2_false1():
"""
Test of is_valid_password_v2() with a false example, take 1
"""
result = is_valid_password_v2(
{"low": 1, "high": 2, "letter": "w", "password": "ww"}
)
assert not result | 562 |
def remove_ordereddict(data, dangerous=True):
"""turns a nested OrderedDict dict into a regular dictionary.
dangerous=True will replace unserializable values with the string '[unserializable]' """
# so nasty.
return json.loads(json_dumps(data, dangerous)) | 563 |
def test_list_base64_binary_max_length_2_nistxml_sv_iv_list_base64_binary_max_length_3_5(mode, save_output, output_format):
"""
Type list/base64Binary is restricted by facet maxLength with value 7.
"""
assert_bindings(
schema="nistData/list/base64Binary/Schema+Instance/NISTSchema-SV-IV-list-base... | 564 |
def evaluate_v1(tokens: List[str]) -> Number:
"""Evaluates a tokenized expression and returns the result"""
stack: List = []
for token in tokens:
stack = consume_token(token, stack)
return get_result_from_stack(stack) | 565 |
def load_gromacs_reaction_coord_files(us_path, n_wins, step=10, verbose=False):
"""
Parameters
----------
us_path: string
Path to the xvg files with sampled reaction coordinate values
n_wins: integer
Number of umbrella runs
step: integer
Time interval for analysis
ver... | 566 |
def test_get_mount_target_in_az_no_az_id_match_to_az_name(mocker):
"""
When the az_name provided does not have a valid az_id
"""
get_mount_targets_info_response = [
MOUNT_TARGET_INFO,
{
"MountTargetId": "fsmt-ijklmnop",
"AvailabilityZoneId": "use2-az3",
... | 567 |
def h_customer_role_playing(
process_configuration: Dict[str, str], h_customer: Hub, staging_table: StagingTable
) -> RolePlayingHub:
"""Define h_customer_role_playing test hub.
Args:
process_configuration: Process configuration fixture value.
h_customer: Hub customer fixture value.
... | 568 |
def call_port(command, arguments):
"""
This function calls the port executable with the specified parameters,
printing the output to stdout.
"""
command = ["port", command] + arguments
if (os.getuid != 0):
print("Using sudo to execute port.")
return subprocess.call(["sudo"] + com... | 569 |
def _classify(text:str, name:str=None, service:str=None, language:Language=None):
"""Takes the input text (and optional filename) and makes a best effort to extract/label the code content needed for classification.
E.g. a markdown file has codeblocks extracted and labeled with language, and a code file is ... | 570 |
def parse_command_line_arguments():
"""
Parse the command-line arguments being passed to RMG Py. This uses the
:mod:`argparse` module, which ensures that the command-line arguments are
sensible, parses them, and returns them.
"""
parser = argparse.ArgumentParser(formatter_class=argparse.Argumen... | 571 |
def _js_requires(offline: bool = False) -> str:
"""Format JS requires for Plotly dependency.
Args:
offline: if True, inject entire Plotly library for offline use.
Returns:
str: <script> block with Plotly dependency.
"""
helper_fxns = _load_js_resource(_AxPlotJSResources.HELPER_FXN... | 572 |
def resnet_retinanet(num_classes, backbone='resnet50', inputs=None, modifier=None, **kwargs):
""" Constructs a retinanet model using a resnet backbone.
Args
num_classes: Number of classes to predict.
backbone: Which backbone to use (one of ('resnet50', 'resnet101', 'resnet152')).
inputs... | 573 |
def trans_full_matrix_projection(input, size=0, param_attr=None):
"""
Different from full_matrix_projection, this projection performs matrix
multiplication, using transpose of weight.
.. math::
out.row[i] += in.row[i] * w^\mathrm{T}
:math:`w^\mathrm{T}` means transpose of weight.
The ... | 574 |
def encrypt_module(module_path, output_path, key, removal_expression=None):
"""Encrypts python file with into output path"""
with open(module_path, "r") as module_file:
module_content = module_file.read()
if removal_expression is not None:
module_content = _remove_tagged_source(module_cont... | 575 |
def create_permissions_and_grant_privileges(*args, **kwargs):
"""
Creates database permissions to assign to a user.
Creates django permissions that reflect what a corresponding database user is
allowed to do when directly logged into the database. These permissions are
translated into database priv... | 576 |
def http_head_deck_etag(gist_url):
"""Perform a HEAD against gist_url and return the etag."""
class HeadRequest(Request):
def get_method(self):
return 'HEAD'
head_request = HeadRequest(gist_url + '/raw')
response = urlopen(head_request)
headers = response.headers
etag = h... | 577 |
def updateContourProbabilities(contoursGroupedByImage, probabilityFunction):
"""Set probability that contour is salient based on a probability function."""
contourList = nonnullObjects(contoursGroupedByImage)
print "updateContourProbabilities"
for contour in contourList:
p = probabilityFuncti... | 578 |
def _get_fluxes(sol, reactions):
"""Get the primal values for a set of variables."""
fluxes = {
r.id: sol.fluxes.loc[r.community_id, r.global_id] for r in reactions
}
return pd.Series(fluxes) | 579 |
def compatible_elfs(elf1, elf2):
"""See if two ELFs are compatible
This compares the aspects of the ELF to see if they're compatible:
bit size, endianness, machine type, and operating system.
Parameters
----------
elf1 : ELFFile
elf2 : ELFFile
Returns
-------
True if compatibl... | 580 |
def initialize():
"""
Initializes the figure with reasonably sane settings--the most
important of which is telling matplotlib to use Type1 fonts (the ACM
paper format checker will complain endlessly if using Type3).
"""
matplotlib.rcParams.update({
'pdf.fonttype' : 42 # use Type1 fonts instead... | 581 |
def load_source(source, delete_on_exit):
"""Loads the given source code as a Python module."""
with tempfile.NamedTemporaryFile(
mode='w',
suffix='.py',
prefix='__autograph_generated_file',
delete=False,
encoding='utf-8') as f:
module_name = os.path.basename(f.name[:-3])
file_n... | 582 |
def _tear_down_response(data):
"""Helper function to extract header, payload and end from received response
data."""
response_header = data[2:17]
# Below is actually not used
response_payload_size = data[18]
response_payload = data[19:-2]
response_end = data[-2:]
return response_header, ... | 583 |
def read_data_from_bd(query,
host,
user,
port,
database,
password):
""" get data from abc database
arg:
query: sql
username: database username
password: database password
... | 584 |
def stack_atomic_call_middleware(q_dict, q_queryset, logger, middleware):
""" Calls the middleware function atomically.
* Returns cached queue on error or None """
cached_q_dict = q_dict[:]
cached_q_query = q_queryset.all()
try:
middleware(q_dict, q_queryset, logger)
except:
... | 585 |
def collate_with_neg_fn(generator):
"""Collate a list of datapoints into a batch, with negative samples in last half of batch."""
users, items, item_attr, num_attr = collate_fn(generator)
users[len(users) // 2:] = users[:len(users) // 2]
return users, items, item_attr, num_attr | 586 |
def kudzify_logger(logger=None, format=BASIC_FORMAT):
"""Extends format string of a logger by request context placeholders.
It calls `kudzify_handler` on each handler registered to the given
logger. So this function must be called after handlers are configured.
"""
if not isinstance(logger, logging... | 587 |
def build_scenario_3(FW, verbosity=None):
"""
Tests if override is cleared when all switch behaviours go out of scope.
And tests switch command with opaque value.
Returns a list of 2-lists: [time, 0ary function] that describes exactly
what needs to be executed when. The 0ary functions return a fals... | 588 |
def main(environment_name, agent_cfg_file):
"""
Load, train and evaluate a Reinforcment Learning agent.
Parameters
----------
environment_name : str
agent_cfg_file : str
"""
cfg = load_cfg(agent_cfg_file)
# Set up environment and agent
env = gym.make(environment_name)
cfg['... | 589 |
def preorder(root):
""" preorder traversal (root, left, right) """
if root is not None:
print(root.data, end=" ")
preorder(root.left)
preorder(root.right) | 590 |
def test_faceinfo_v4(photosdb4, uuid_dict):
""" Test FaceInfo object """
import json
for uuid in uuid_dict:
photo = photosdb4.get_photo(uuid)
faces = photo.face_info
assert len(faces) == len(uuid_dict[uuid])
for face in faces:
assert face.uuid in uuid_dict[uuid]
... | 591 |
def read(input):
"""Read an entire zonefile, returning an AST for it which contains formatting information."""
return _parse(input, actions=Actions()) | 592 |
def generate_conditionally(text='welcome', random_seed=1, **kwargs):
"""
Input:
text - str
random_seed - integer
Output:
stroke - numpy 2D-array (T x 3)
"""
model = ConditionalStrokeModel.load(
str(MODEL_DIR / 'conditional-stroke-model'),
batch_size=1, rnn_steps=1,... | 593 |
def parse_field(
parser: argparse.ArgumentParser,
field: pydantic.fields.ModelField,
) -> None:
"""Adds standard pydantic field to argument parser.
Args:
parser (argparse.ArgumentParser): Argument parser to add to.
field (pydantic.fields.ModelField): Field to be added to parser.
... | 594 |
def test_properties_dataframe():
"""test if properties can be provided as a DataFrame"""
shape = (10, 2)
np.random.seed(0)
shape = (10, 2, 2)
data = np.random.random(shape)
data[:, 0, :] = 20 * data[:, 0, :]
properties = {'vector_type': np.array(['A', 'B'] * int(shape[0] / 2))}
propertie... | 595 |
def cross_entropy(pred, soft_targets):
""" pred: unscaled logits
soft_targets: target-distributions (i.e., sum to 1)
"""
logsoftmax = nn.LogSoftmax(dim=1)
return torch.mean(torch.sum(-soft_targets * logsoftmax(pred), 1)) | 596 |
def test_separations():
"""Test if sky separations are the same in all spherical coordinate systems.
This is a simple consistency check.
Sky separations computed between consecutive event positions should
be the same in any spherical coordinate system.
"""
table = Table.read('hess_event_list_2.... | 597 |
def turnAwayFrom(speed = SPEED, rotation = 0):
""" Turn robot away from a hazard """
if rotation > 20: # If hazard is >20° to the right:
turn(speed, -1) # - turn right.
elif rotation < -20: # If hazard is >20° to the left:
turn(speed, 1) # - turn left.
else: #... | 598 |
def config(path) -> None:
"""Read the default config"""
logger.debug("Reading config from %s", path)
try:
with open(path, encoding="utf-8") as config_file_object:
# Read into dict
config_json = json.load(config_file_object)
logger.info("Loaded config into dict")
... | 599 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.