content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def get_credentials(_globals: dict):
"""
Gets Credentials from Globals
Structure may be found in modules/ducktests/tests/checks/utils/check_get_credentials.py
This function return default username and password, defaults may be overriden throw globals
"""
if USERNAME_KEY in _globals[AUTHENTICATIO... | 600 |
def hideablerevs(repo):
"""Revision candidates to be hidden
This is a standalone function to allow extensions to wrap it.
Because we use the set of immutable changesets as a fallback subset in
branchmap (see mercurial.branchmap.subsettable), you cannot set "public"
changesets as "hideable". Doing ... | 601 |
def _find_matches(pattern_pieces, directory):
"""
Used by eglob.
"""
import glob
result = []
if not _os.path.isdir(directory):
return
piece = pattern_pieces[0]
last = len(pattern_pieces) == 1
remaining_pieces = []
if piece == '**':
if not last:
remai... | 602 |
def eject(force=False, unless_exists=False, verbose=False):
"""Write the generated files, without header warnings."""
docs_dir = Path('./docs')
write_template_files(docs_dir, force=force, include_generated_warning=False,
unless_exists=unless_exists, verbose=verbose) | 603 |
def start(queue, height):
""" Start finding pending intents """
results = []
global tree
global current_file
count = 0
# TODO - Look for use of fillIn method which can make this a much more exploitable condition
for j in common.java_files:
count = count + 1
pub.sendMessage('... | 604 |
def squeeze_features(protein):
"""Remove singleton and repeated dimensions in protein features."""
protein["aatype"] = torch.argmax(protein["aatype"], dim=-1)
for k in [
"domain_name",
"msa",
"num_alignments",
"seq_length",
"sequence",
"superfamily",
"... | 605 |
def get_cache_dir(app_name: str, suffix: str = None, create: bool = True):
"""Get a local cache directory for a given application name.
Args:
app_name: The name of the application.
suffix: A subdirectory appended to the cache dir.
create: Whether to create the directory and its parents ... | 606 |
def _dump_test_data(filename, num_per_type=10):
"""Get corpus of statements for testing that has a range of stmt types."""
sp = signor.process_from_web()
# Group statements by type
stmts_by_type = defaultdict(list)
for stmt in sp.statements:
stmts_by_type[stmt.__class__].append(stmt)
# S... | 607 |
def drawBoundingBoxes(imageData, boxes: Union[List[Section], List[Line]], color = (0, 120, 0, 120)):
"""Draw bounding boxes on an image.
imageData: image data in numpy array format
inferenceResults: inference results array off object (l,t,w,h)
colorMap: Bounding box color candidates, list of RGB tuples.... | 608 |
def gin_dict_parser(coll):
"""
Use for parsing collections that may contain a 'gin' key.
The 'gin' key is assumed to map to either a dict or str value that contains gin bindings.
e.g.
{'gin': {'Classifier.n_layers': 2, 'Classifier.width': 3}}
or
{'gin': 'Classifier.n_layers = 2\nClassifier.w... | 609 |
def ones(shape, dtype):
"""
Declare a new worker-local tensor with all elements initialized to one.
:param shape: the tensor shape
:param dtype: the tensor data type
:return: the tensor expression
"""
np_dtype = DType(dtype).as_numpy()
init = _ConstTensor(np.ones(shape, dtype=np_dtype))... | 610 |
def _valid_url(url):
"""Checks that the given URL is Discord embed friendly. Or at least, it tries."""
def _valid_string(segment, main=True):
if not len(segment):
return False
for c in [ord(it.lower()) for it in segment]:
if not (97 <= c <= 122 or (main and (48 <= c <= 5... | 611 |
def generate_initialization_perturbation(
blk, bound_push=1e-2, bound_frac=1e-2, bound_relax_factor=1e-8, user_scaling=False
):
"""
Generate the initialization perturbations performed by IPOPT for a given Block
Args:
blk: Pyomo block
bound_push: bound_push to evaluate (same as IPOPT opt... | 612 |
def isText(node):
"""
Returns True if the supplied node is free text.
"""
return node.nodeType == node.TEXT_NODE | 613 |
def test_zscore(dir_seq, dir_targets):
"""Test that TETRA Z-score calculated correctly."""
tetra_z = calculate_tetra_zscore(dir_seq / "NC_002696.fna")
with (dir_targets / "tetra" / "zscore.json").open("r") as ifh:
target = json.load(ifh)
assert ordered(tetra_z) == ordered(target) | 614 |
def _parse_descriptor(desc: str, ctx: '_ParseDescriptorContext') -> 'Descriptor':
"""
:meta private:
Parse a descriptor given the context level we are in.
Used recursively to parse subdescriptors
:param desc: The descriptor string to parse
:param ctx: The :class:`_ParseDescriptorContext` indic... | 615 |
def get_editable_fields(cc_content, context):
"""
Return the set of fields that the requester can edit on the given content
"""
# For closed thread:
# no edits, except 'abuse_flagged' and 'read' are allowed for thread
# no edits, except 'abuse_flagged' is allowed for comment
ret = {"abuse_f... | 616 |
def rate_of_matrix_function(A, Adot, f, fprime):
"""Find the rate of the tensor A
Parameters
----------
A : ndarray (3,3)
A diagonalizable tensor
Adot : ndarray (3,3)
Rate of A
f : callable
fprime : callable
Derivative of f
Returns
-------
Ydot : ndarray... | 617 |
def enumerate_changes(levels):
"""Assign a unique integer to each run of identical values.
Repeated but non-consecutive values will be assigned different integers.
"""
return levels.diff().fillna(0).abs().cumsum().astype(int) | 618 |
def TDataStd_ByteArray_Set(*args):
"""
* Finds or creates an attribute with the array. If <isDelta> == False, DefaultDeltaOnModification is used. If attribute is already set, all input parameters are refused and the found attribute is returned.
:param label:
:type label: TDF_Label &
:param lower:
... | 619 |
def number_generator(doc):
"""Searches for occurrences of number patterns (cardinal, ordinal, quantity or percent) in text"""
i = 0
while i < len(doc):
tok = doc[i]
if tok.lower_ in ORDINALS:
yield i, i + 1, "ORDINAL"
elif re.search("\\d", tok.text):
j = i ... | 620 |
def vis_verts(mean_shape, verts, face, mvs=None, textures=None):
"""
mean_shape: N x 3
verts: B x N x 3
face: numpy F x 3
textures: B x F x T x T (x T) x 3
"""
from psbody.mesh.mesh import Mesh
from psbody.mesh.meshviewer import MeshViewers
if mvs is None:
mvs = MeshViewers((... | 621 |
def _get_total_elements(viewer) -> int:
"""
We need to fetch a workflows listing to figure out how many entries we
have in the database, since the API does not contain a method to count
the DB entries.
:param viewer: CWL Viewer instance URL
:return: number of total elements in the CWL Viewer in... | 622 |
def entrepreneursIncubated(dateFrom=None, dateTo=None):
"""
Returns all entrepreneurs ages count between a set of ranges
"""
queryset = Stage.objects
output = {
'queryset': None,
'fields': [],
'values': [],
'fieldLabels': [],
}
queryset = queryset.filter(stage... | 623 |
def get_steps(x, shape):
"""
Convert a (vocab_size, steps * batch_size) array
into a [(vocab_size, batch_size)] * steps list of views
"""
steps = shape[1]
if x is None:
return [None for step in range(steps)]
xs = x.reshape(shape + (-1,))
return [xs[:, step, :] for step in range(s... | 624 |
def LHS(
a: int,
operation1: str,
b: int,
operation2: str,
c: float
):
"""
E.g. LHS(a, 'plus', b, 'times', c) does
(a + b) * c
params:
a: int. First number in equation
operation1: str. Must be 'plus', 'minus', 'times', 'divide'
b : int. Second num... | 625 |
def _validate(api_indicator_matype, option, parameters:dict, **kwargs): # -> dict
"""Validates kwargs and attaches them to parameters."""
# APO, PPO, BBANDS
matype = int(math.fabs(kwargs["matype"])) if "matype" in kwargs else None
if option == "matype" and matype is not None and matype in api_indicator... | 626 |
def scale_y_values(y_data, y_reference, y_max):
"""
Scale the plot in y direction, to prevent extreme values.
:param y_data: the y data of the plot
:param y_reference: the maximum value of the plot series (e.g. Normal force), which will be scaled to y_max
:param y_max: the maximum y value... | 627 |
def set_heating_contribution(agent, pv_power):
""" If the water tank is currently in use, compute and return the part of the pv_power used for heating the water"""
pv_power_to_heating = 0
if agent.water_tank.is_active():
pv_power_to_heating = pv_power * agent.pv_panel.heating_contribution
return... | 628 |
def test_melt_columns() -> None:
"""Melt selected columns to rows."""
before_melt = pd.DataFrame(
{
"GEOGID": ["SA2017_017001001"],
"Pre 1919 (No. of households)": [10],
"Pre 1919 (No. of persons)": [25],
},
)
expected_output = pd.DataFrame(
{
... | 629 |
def test_put_vector_mixed_dtypes():
"""
Passing a numpy array of mixed dtypes to a dataset.
See https://github.com/GenericMappingTools/pygmt/issues/255
"""
dtypes = "float32 float64 int32 int64 uint32 uint64".split()
for dtypex, dtypey in itertools.permutations(dtypes, r=2):
with clib.S... | 630 |
def predict_direction(clf, tickers, **kwargs):
"""
Use clf (an untrained classifier) to predict direction of change for validation
data for each stock in 'tickers'. Pass additional keyword arguments to be
used in building the stock datasets.
Args:
--clf: An untrained sklearn classifier
--ti... | 631 |
def rename(node_name, new_name):
"""Change the name of a storage NODE to NEW-NAME."""
config_connect()
try:
node = st.StorageNode.get(name=node_name)
try:
st.StorageNode.get(name=new_name)
print('Node "%s" already exists.' % new_name)
exit(1)
exce... | 632 |
def plot_simulation_results(df_plot, week, year):
"""Make wildcard and division winner plots by simulation number
:param df_plot: data frame with summarised simulation information
:param week: current week
:param year: current season
:return: None
"""
# Calculate label positions
df_plot_label_pos = (
... | 633 |
def load_from_csv():
""" Loads a list of Currency objects from CSV """
file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'currencies.csv')
currencies = []
with open(file) as csvfile:
reader = csv.reader(csvfile)
headers = next(reader)
for row in reader:
... | 634 |
def parse_primary(index):
"""Parse primary expression."""
if token_is(index, token_kinds.open_paren):
node, index = parse_expression(index + 1)
index = match_token(index, token_kinds.close_paren, ParserError.GOT)
return expr_nodes.ParenExpr(node), index
elif token_is(index, token_kin... | 635 |
def chopper_pulses_of_mode(i):
"""How many single pulses the chopper transmits per opening,
or in hybrid mode, how many single bunches the tranmitted intensity
corresponds to, based on the current settings of the chopper.
i: 0-based integer"""
if isnan(i) or i<0 or i>=len(chopper.pulses): return nan... | 636 |
def _get_content_from_tag(tag):
"""Gets the content from tag till before a new section."""
contents = []
next_tag = tag
while next_tag and not _is_section(next_tag):
content = parse_content(next_tag.text())
if content:
contents.append(content)
next_tag = next_tag.next... | 637 |
def get_ngrams(corpus, n):
"""
Get ngrams from provided corpus according to provided value of n.
"""
words = []
ngrams = {}
for word_list in [elt.split(' ') for elt in corpus]:
# print(word_list)
# for word in word_list:
# words.append(word)
words = ['... | 638 |
async def get_connections(request: data_models.ConnectionsRequest):
"""Get connections *from* and *to* each entity in the request.
Connections *to* are all the subject-predicate pairs where the entity is the object, and connections *from* are all the predicate-object pairs where the entity is the subject."""
... | 639 |
def partida_26():
"""partida_26"""
check50.run("python3 volleyball.py").stdin("B\nA\nB\nB\nB\nA\nA\nA\nB\nA\nB\nA\nA\nB\nB\nB\nA\nB\nB", prompt=False).stdout("EMPIEZA\nSACA A\nGANA B\nA 0 B 0\nSACA B\nGANA A\nA 0 B 0\nSACA A\nGANA B\nA 0 B 0\nSACA B\nGANA B\nA 0 B 1\nSACA B\nGANA B\nA 0 B 2\nSACA B\nGANA A\nA 0... | 640 |
def sunlight_duration(hour_angle_sunrise):
"""Returns the duration of Sunlight, in minutes, with Hour Angle in degrees,
hour_angle."""
sunlight_durration = 8 * hour_angle_sunrise # this seems like the wrong output
return sunlight_durration | 641 |
def add_particle_bunch_gaussian(sim, q, m, sig_r, sig_z, n_emit, gamma0,
sig_gamma, n_physical_particles,
n_macroparticles, tf=0., zf=0., boost=None,
save_beam=None, z_injection_plane=None,
in... | 642 |
def test_frame_seq_caching(frame_sequence: FrameSequence):
"""Test that we only interpolate on demand, and cache results."""
fs = frame_sequence
# index into the sequence and watch whether interpolate is called
with patch.object(
fs, "_interpolate_state", wraps=fs._interpolate_state
) as moc... | 643 |
def send_tweets_to_twitter(tweets: List[str], reference: str, api: tweepy.API):
"""Post tweets to the twitter account. If more than one tweet divides into
a tweet string that is enumerated.
Args:
tweets (List[str]): list containing tweet length text strings.
reference (str): reference to... | 644 |
def sell_shares_nb(cash_now, shares_now, size, direction, price, fees, fixed_fees, slippage,
min_size, allow_partial, raise_reject, log_record, log):
"""Sell shares."""
# Get optimal order size
if direction == Direction.LongOnly:
final_size = min(shares_now, size)
else:
... | 645 |
def radec2altaz(ra, dec, obstime, lat=None, lon=None, debug=False):
"""
calculates the altitude and azimuth, given an ra, dec, time, and observatory location
Parameters:
===========
- ra: float
The right ascension of the target (in degrees)
- dec: float
... | 646 |
def handler_no_answer(f):
"""Decorator that creates message handlers that don't reply."""
def handle_wrapper(*args, **kwds):
answer = None
try:
f(*args, **kwds)
except Exception:
return MSG_STATUS_ERROR, [
'Calling the cmd handler caused an error:... | 647 |
def wrapper_handle_attrs(func):
"""转化html的标签属性为字典"""
# 这是一个装饰Parsing.handle_attrs_tmp、Parsing.handle_attrs_tag的装饰器
def handle_attrs(self, attrs_str):
attrs = dict()
if attrs_str == '/':
return attrs
attrs_list = re.findall(self.attr_reg, attrs_str)
for attr in at... | 648 |
def transfer_weights(model, weights=None):
"""
Always trains from scratch; never transfers weights
:param model:
:param weights:
:return:
"""
print('ENet has found no compatible pretrained weights! Skipping weight transfer...')
return model | 649 |
def status():
"""Print downloader's status to screen.
"""
used = get_space_used()
avail = get_space_available()
allowed = config.download.space_to_use
print "Space used by downloaded files: %.2f GB of %.2f GB (%.2f%%)" % \
(used/1024.0**3, allowed/1024.0**3, 100.0*used/allowed)
p... | 650 |
def collapse_json(text, indent=4):
"""Compacts a string of json data by collapsing whitespace after the
specified indent level
NOTE: will not produce correct results when indent level is not a multiple
of the json indent level
"""
initial = " " * indent
out = [] # final json output
sub... | 651 |
def get_skills_v1():
"""
READING THE FIRST SKILLSET
"""
f = open('skills_v1.json', 'rb')
for a in f:
skills_v1 = ast.literal_eval(a)
f.close()
return skills_v1 | 652 |
def create_mask(board: np.ndarray, dimensions: Tuple[int, int]) -> List[List[int]]:
""" Function to create Mask of possible valid values based on the initial sudoku Board. """
mask = list(board.tolist())
counts = Counter(board.flatten())
del counts[0]
counts = [number[0] for number in counts.most_c... | 653 |
def telegram(text: str, token: str, chat_id: int) -> str:
"""Send a telegram message"""
webhookAddress = f"https://api.telegram.org/bot{token}/sendMessage?" + urlencode({"text":text, "chat_id":chat_id})
handler = urlopen(webhookAddress)
return handler.read().decode('utf-8') | 654 |
def match_term(term, dictionary, case_sensitive, lemmatize=True):
"""
Parameters
----------
term
dictionary
case_sensitive
lemmatize Including lemmas improves performance slightly
Returns
-------
"""
if (not case_sensitive and term.lower() in dictionary) or term in dicti... | 655 |
def fill_user(user_ids, filename='user', write=True):
"""
Input: user_ids dictionary (user ids: task values)
Output: csv file with user id, name, email
"""
emails = {}
for user in user_ids:
r = requests.get('https://pe.goodlylabs.org'
'/api/user/{}?api_key={}&lim... | 656 |
def test_update_transaction(
lunch_money_obj: LunchMoney, test_transactions: List[TransactionObject]
):
"""
Update a Transaction in Lunch Money
"""
transaction_note = f"Updated on {datetime.datetime.now()}"
transaction_update_obj = TransactionUpdateObject(notes=transaction_note)
response = l... | 657 |
def project_along_flow(dX_raw,dY_raw,dX_prio,dY_prio,e_perp):
"""
Parameters
----------
dX_raw : np.array, size=(m,n), dtype=float
raw horizontal displacement with mixed signal
dY_raw : np.array, size=(m,n), dtype=float
raw vertical displacement with mixed signal
dX_prio : np.ar... | 658 |
def get_existing_pks(engine: Engine, table: Table) -> Mapping[int, dict]:
"""
Creates an index of hashes of the values of the primary keys in the table provided.
:param engine:
:param table:
:return:
"""
with engine.connect() as conn:
pk_cols = [table.c[col.name] for col in table.col... | 659 |
def create_transform_parameters(
fill_mode = 'nearest',
interpolation = 'linear',
cval = 0,
data_format = None,
relative_translation = True,
):
""" Creates a dictionary to store parameters containing information on
method to apply transformation to ... | 660 |
def main():
"""Test sampler."""
nx = 40
ny = 40
nchan = 8
C = np.random.rand(ny,nx,nchan)
csum = np.sum(C,2);
for ix in range(nx):
for iy in range(ny):
C[iy,ix,:] /= csum[iy,ix]
print("C = ",C)
iterlist = [10 , 100, 1000, 10000, 100000]
#iterlist = [10000 ... | 661 |
def error_rate(model, dataset):
"""Returns error rate for Keras model on dataset."""
d = dataset['dimension']
scores = np.squeeze(model.predict(dataset['features'][:, :, 0:d]), axis=-1)
diff = scores[:, 0] - scores[:, 1]
return np.mean(diff.reshape((-1)) <= 0) | 662 |
def diffs(**kwargs):
"""Log Datadog resources diffs."""
cfg = build_config(**kwargs)
check_diffs(cfg)
if cfg.logger.exception_logged:
exit(1) | 663 |
def test_kovasznay_0():
"""
test kovasznay
"""
standard_value = np.load("./standard/kovasznay.npz", allow_pickle=True)
solution = standard_value['solution'].tolist()
dynamic_rslt = kovasznay(static=False)
static_rslt = kovasznay()
compare(dynamic_rslt, static_rslt)
compare(solution, ... | 664 |
def k8s_cr_callback(func: Callable) -> Callable:
"""
Decorate a method as a K8s CR callback.
Is working only for K8sCRHandler and child classes.
"""
@functools.wraps(func)
def decorated_func(self, *args, **kwargs):
"""Provide automatic locking of CRs in process by this method."""
... | 665 |
def account():
"""Update the user's account"""
return _templates.account(UserContext.user()) | 666 |
def run_synchronously(computation: Awaitable[TSource]) -> TSource:
"""Runs the asynchronous computation and await its result."""
return asyncio.run(computation) | 667 |
def create_running_command(
command_id: str = "command-id",
command_key: str = "command-key",
command_type: str = "command-type",
created_at: datetime = datetime(year=2021, month=1, day=1),
params: Optional[BaseModel] = None,
) -> cmd.Command:
"""Given command data, build a running command model... | 668 |
def x11_linux_stop_record():
"""
stop test_record action
"""
return xwindows_listener.stop_record() | 669 |
def yxy_intrinsic(mat: np.ndarray) -> np.ndarray:
"""Return yxy intrinsic Euler angle decomposition of mat (.., 4, 4))"""
# extract components
not_nan, r00, r01, r02, r10, r11, r12, _, r21, _ = extract_mat_components(mat)
# pre-initialize results
theta_y0 = np.full(not_nan.shape, np.nan)
theta_... | 670 |
def mnemonic_and_path_to_key(*, mnemonic: str, path: str, password: str) -> int:
"""
Return the SK at position `path`, derived from `mnemonic`. The password is to be
compliant with BIP39 mnemonics that use passwords, but is not used by this CLI outside of tests.
"""
seed = get_seed(mnemonic=mnemonic... | 671 |
def test_function_with_annotations():
"""Parse a function docstring with signature annotations."""
def f(x: int, y: int, *, z: int) -> int:
"""
This function has annotations.
Parameters:
x: X value.
y: Y value.
Keyword Arguments:
z: Z value.... | 672 |
def acceleration(bodies, i, j):
"""
Calculer l'acceleration relative à un objet bodies[i]
bodies: tous les objets
i: index of concerned body which undergoes the gravitation of other objects.
j: index of the step
"""
N = len(bodies)
ax = 0; ay = 0; az = 0 #L'acceleration
for ip in range(N):
#Chaque objet bo... | 673 |
def formatted(s):
"""If s contains substrings of form '#'<txt>'#', '(('<txt>'))',
"''"<txt>"''", returns list of tuples (FORMAT_x, txt).
Otherwise, returns s.
"""
matches = re.findall(_format_re, normalize(s))
if len(matches) == 1 and matches[0][0] != '':
return matches[0][0]
def to_... | 674 |
def get_ftp_creds(repo, options):
"""
Retrieves the data to connect to the FTP from .git/ftpdata
or interactively.
ftpdata format example:
[branch]
username=me
password=s00perP4zzw0rd
hostname=ftp.hostname.com
remotepath=/htdocs
ssl=yes
gitftpign... | 675 |
def deep_len(lnk):
""" Returns the deep length of a possibly deep linked list.
>>> deep_len(Link(1, Link(2, Link(3))))
3
>>> deep_len(Link(Link(1, Link(2)), Link(3, Link(4))))
4
>>> levels = Link(Link(Link(1, Link(2)), \
Link(3)), Link(Link(4), Link(5)))
>>> print(levels)
<<... | 676 |
def gsearch_node(command,comm):
"""
This function is used to search the string that is obtained from the user's
comment. This function requires the 'googlesearch' module.
"""
print "Currently at the gsearch node"
print command
m = re.search('!gsearch(.+)',command)
print m
if m:
... | 677 |
def union(A, B):
""" Add two subspaces (A, B) together.
Args:
- A: a matrix whose columns span subspace A [ndarray].
- B: a matrix whose columns span subspace B [ndarray].
Returns:
- union: a matrix whose columns form the orthogonal basis for subspace
addition A+B [... | 678 |
def ldns_create_nsec(*args):
"""LDNS buffer."""
return _ldns.ldns_create_nsec(*args) | 679 |
def str_to_seconds(time):
"""
Returns the number of seconds since midnight in the string time (as an int).
The value time is a string in extended ISO 8601 format. That is, it has the form
'hh:mm:ss' where h, m, and s are digits. There must be exactly two digits each for
hours, minutes, and ... | 680 |
def conv_variance_scaling_initializer(in_channel, out_channel, kernel_size):
"""conv init"""
fan_in = in_channel * kernel_size * kernel_size
scale = 1.0
scale /= max(1., fan_in)
stddev = (scale ** 0.5) / .87962566103423978
mu, sigma = 0, stddev
weight = truncnorm(-2, 2, loc=mu, scale=sigma).... | 681 |
def vgg8(**kwargs):
"""VGG 8-layer model (configuration "S")
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = VGG(cfg['S'], **kwargs)
return model | 682 |
def get_char_from_ascii(key_num):
"""Function that converts a character to an ascii code
Parameters
----------
ascii_code : int
Ascii code of character
Returns
-------
char : character
character converted from ascii
"""
return chr(key_num) | 683 |
def testTrade():
"""测试交易"""
try:
f = file('vnpy/trader/gateway/huobiGateway/HUOBI_connect.json')
except IOError:
return
# 解析json文件
setting = json.load(f)
try:
accessKey = str(setting['accessKey'])
secretKey = str(setting['secretKey'])
accountId = ... | 684 |
def get_files_path(file_path: str) -> list:
"""Get all files path
Args:
file_path: root folder path
Returns:
list: list of string containing all files paths
"""
filepath='data'
all_files = []
for root, dirs, files in os.walk(filepath):
files = glob.glob(os.path.join(... | 685 |
def start():
"""
view for data entry for optimisation
"""
form = LocationForm()
if form.validate_on_submit():
return optimise(form.data)
return flask.render_template("start.html",
title="Start", form=form) | 686 |
def add_lldp_filter_by_host(query, hostid):
"""Adds a lldp-specific ihost filter to a query.
Filters results by host id if supplied value is an integer,
otherwise attempts to filter results by host uuid.
:param query: Initial query to add filter to.
:param hostid: host id or uuid to filter results... | 687 |
def read_shakemap_data_from_str(grid_data_text):
"""
Helper to work with the tokens.
Can work with both strings and floats.
"""
# it must be tokenized (because of xml processing the newlines
# may not be consistent)
tokens = tokenize.tokenize(
io.BytesIO(grid_data_text.encode("utf-8"... | 688 |
def c_flag(opt, test_not=False):
""" convert a test parameter into t if true for the Fortran build system """
if test_not:
if opt: return "FALSE"
else: return "TRUE"
else:
if opt: return "TRUE"
else: return "FALSE" | 689 |
def create_menu(*args):
"""
Add an item into the Maya interface.
"""
maya_window = get_maya_main_window()
menu = mc.menu("Kitsu", parent=maya_window)
mc.menuItem(label="Launch Kitsu", command=launch_kitsu, parent=menu) | 690 |
def test_uniform_simplex_homotopy(sarr):
"""Test homotopy"""
uniform = sarr.uniform_mixture()
simp = sarr.mixture_to_simplex(uniform)
assert np.allclose(simp[0], simp[1:])
assert np.allclose(uniform, sarr.mixture_from_simplex(simp)) | 691 |
def false_function():
"""Sample function to test unit testing."""
return False | 692 |
def broker_task_send(task_uuid, request, broker_point, reply_to=None):
"""Command to publish `primitives.Request` to customer
Args:
task_uuid(str): task identification
request: Serialized request
broker_point(gromozeka.BrokerPoint):
reply_to(gromozeka.BrokerPoint):
Returns:... | 693 |
def process_axis_labels(datadesc, blobs, offset=0):
"""Convert the raw axis label descriptions.
Similar to LiveDataPanel._process_axis_labels, but is flexible in datadesc.
"""
CLASSIC = {'define': 'classic'}
labels = {}
titles = {}
for size, axis in zip(reversed(datadesc['shape']), AXES):
... | 694 |
def _to_ranks_by_group(dat, group, formula, exclude_cols=[]):
"""
Covert predictors to ranks separately for each group for use in rank Lmer. Any columns not in the model formula or in exclude_cols will not be converted to ranks. Used by models.Lmer
Args:
dat (pd.DataFrame): dataframe of data
... | 695 |
def build_input_data(sentences, labels, vocabulary):
"""
Maps sentencs and labels to vectors based on a vocabulary.
"""
# With capped vocab, need to account for word not present in
# vocab. Using the padding word.
# TODO -- pass padding word in as an arg
padding_word = "<PAD/>"
pad_idx ... | 696 |
def list_scans():
"""
:return: A JSON containing a list of:
- Scan resource URL (eg. /scans/1)
- Scan target
- Scan status
"""
data = []
for scan_id, scan_info in SCANS.iteritems():
if scan_info is None:
continue
target_urls = scan_info.target_u... | 697 |
def factor_list(f, *gens, **args):
"""
Compute a list of irreducible factors of ``f``.
**Examples**
>>> from sympy import factor_list
>>> from sympy.abc import x, y
>>> factor_list(2*x**5 + 2*x**4*y + 4*x**3 + 4*x**2*y + 2*x + 2*y)
(2, [(x + y, 1), (1 + x**2, 2)])
"""
return _gen... | 698 |
def all_gather(data):
"""
Run all_gather on arbitrary picklable data (not necessarily tensors)
Args:
data: any picklable object
Returns:
list[data]: list of data gathered from each rank
"""
world_size = dist.get_world_size()
if world_size == 1:
return [data]
# se... | 699 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.