Dataset Viewer
Auto-converted to Parquet Duplicate
code
stringlengths
81
3.79k
def apply_option(self, cmd, option, active=True): return re.sub(r'{{{}\:(?P<option>[^}}]*)}}'.format(option), '\g<option>' if active else '', cmd)
def make_local_static_report_files(self): for static, pkgdir in self.STATIC_FILES: shutil.copyfile( data_filename(static, pkgdir), os.path.join(self.directory, static) ) if self.extra_css: shutil.copyfile( self.confi...
def format_filesize(size): for suffix in ("bytes", "KB", "MB", "GB", "TB"): if size < 1024.0: if suffix in ("GB", "TB"): return "{0:3.2f} {1}".format(size, suffix) else: return "{0:3.1f} {1}".format(size, suffix) size /= 1024.0
def add_lexicon_ref(self, lrid, name, lrtype, url, lexicon_id, lexicon_name, datcat_id=None, datcat_name=None): self.lexicon_refs[lrid] = { 'LEX_REF_ID': lrid, 'NAME': name, 'TYPE': lrtype, 'URL': url, 'LEXICON_ID': lexicon_id, ...
def invert_hash(self, tok_hash): return [tok_encoded.decode('utf8') for (_, tok_encoded) in self.client.scan_keys(HASH_KEYWORD_INDEX_TABLE, ((tok_hash,), (tok_hash,)))]
def find_unique_points(explored_parameters): ranges = [param.f_get_range(copy=False) for param in explored_parameters] zipped_tuples = list(zip(*ranges)) try: unique_elements = OrderedDict() for idx, val_tuple in enumerate(zipped_tuples): if val_tuple not in unique_elements: ...
def unfinished(cls): return [ cls.NONE, cls.SCHEDULED, cls.QUEUED, cls.RUNNING, cls.SHUTDOWN, cls.UP_FOR_RETRY, cls.UP_FOR_RESCHEDULE ]
def merge_left(field, local_task, remote_issue, hamming=False): local_field = local_task.get(field, []) remote_field = remote_issue.get(field, []) if field not in local_task: local_task[field] = [] new_count = 0 for remote in remote_field: for local in local_field: if ( ...
def images(self, query=None): from sregistry.database.models import Collection, Container rows = [] if query is not None: like = "%" + query + "%" containers = Container.query.filter(or_(Container.name == query, Container.tag.like(like), ...
def set_max_in_flight(self, max_in_flight): assert isinstance(max_in_flight, int) self.max_in_flight = max_in_flight if max_in_flight == 0: for conn in itervalues(self.conns): if conn.rdy > 0: logger.debug('[%s:%s] rdy: %d -> 0', conn.id, self.name...
def _store(self, lines, buffer=None, store='source'): if buffer is None: buffer = self._buffer if lines.endswith('\n'): buffer.append(lines) else: buffer.append(lines+'\n') setattr(self, store, self._set_source(buffer))
def _ast_option_group_to_code(self, option_group, **kwargs): lines = ["option("] lines.extend(self._indent(self._ast_to_code(option_group.expression))) lines.append(")") return lines
def add(symbol: str, date, value, currency: str): symbol = symbol.upper() currency = currency.upper() app = PriceDbApplication() price = PriceModel() price.symbol.parse(symbol) price.datum.from_iso_date_string(date) price.value = Decimal(value) price.currency = currency app.add_price...
def _close(self): if self.connection: with self.wrap_database_errors: self.connection.client.close()
def _get_byte_parser(self): if not self._byte_parser: self._byte_parser = \ ByteParser(text=self.text, filename=self.filename) return self._byte_parser
def _configureShortcuts(self): self._upShortcut = QtGui.QShortcut( QtGui.QKeySequence('Backspace'), self ) self._upShortcut.setAutoRepeat(False) self._upShortcut.activated.connect(self._onNavigateUpButtonClicked)
def update_message_dict(message_dict,action): global g_ok_java_messages allKeys = g_ok_java_messages.keys() for key in message_dict.keys(): if key in allKeys: for message in message_dict[key]: if action == 1: if message not in g_ok_java_messages[key]: ...
def _addHdlProcToRun(self, trigger: SimSignal, proc) -> None: if not self._applyValPlaned: self._scheduleApplyValues() if isEvDependentOn(trigger, proc): if self.now == 0: return self._seqProcsToRun.append(proc) else: self._combProc...
def resolve_backend_name(name, backends, deprecated, aliased): available = [backend.name() for backend in backends] resolved_name = deprecated.get(name, aliased.get(name, name)) if isinstance(resolved_name, list): resolved_name = next((b for b in resolved_name if b in available), "") if resolved...
def _rewrite_insert_nothing(self, sql, params, returning): conflict_target = self._build_conflict_target() where_clause = ' AND '.join([ '{0} = %s'.format(self._format_field_name(field_name)) for field_name in self.query.conflict_target ]) where_clause_params = [ ...
def get_stores(self, search_term): params = {'SearchText': search_term} response = self.__get('/storefindermap/storesearch', params=params) return Stores(response.json())
def list_recommendations( self, keywords=None, max_domain_recommendations=None, custom_headers=None, raw=False, **operation_config): parameters = models.DomainRecommendationSearchParameters(keywords=keywords, max_domain_recommendations=max_domain_recommendations) def internal_paging(next_lin...
def query_string(self, **params): return SearchResult(self, self._api.get(self._href, **params))
def add_bias(self, name, size, mean=0, std=1): mean = self.kwargs.get('mean_{}'.format(name), mean) std = self.kwargs.get('std_{}'.format(name), std) self._params.append(theano.shared( util.random_vector(size, mean, std, rng=self.rng), name=self._fmt(name)))
def ancestors(self, lhs, rhs): def _search(node): if node in lhs: return True if not node.parent: return False return _search(node.parent) return [node for node in rhs if _search(node)]
def get_prices_on(self, on_date: str, namespace: str, symbol: str): repo = self.get_price_repository() query = ( repo.query.filter(dal.Price.namespace == namespace) .filter(dal.Price.symbol == symbol) .filter(dal.Price.date == on_date) .order_by(dal.Price....
def getEvents(self): events = [] for json in self.conn.endpoints["self"].getEvents(): events.append(SkypeEvent.fromRaw(self, json)) return events
def prepare(self): self.output_dim = 10 self.encoder = Chain(self.input_dim).stack(Dense(self.internal_layer_size, 'tanh')) self.decoder = Chain(self.internal_layer_size).stack(Dense(self.input_dim)) self.classifier = Chain(self.internal_layer_size).stack(Dense(50, 'tanh'), ...
def _set_configurations(self): logger.debug("======================") logger.debug("Setting configurations") logger.debug("======================") resources = "" containers = "" params = "" manifest = "" if self.merge_params: params += self._g...
def add_range(self, sequence, begin, end): sequence.parser_tree = parsing.Range(self.value(begin).strip("'"), self.value(end).strip("'")) return True
def streams(self): result = self.db.read(self.path, {"q": "ls"}) if result is None or result.json() is None: return [] streams = [] for s in result.json(): strm = self[s["name"]] strm.metadata = s streams.append(strm) return streams
def truncate_string(data, headers, max_field_width=None, **_): return (([utils.truncate_string(v, max_field_width) for v in row] for row in data), [utils.truncate_string(h, max_field_width) for h in headers])
def login(self): if self._session is None: self._session = requests.session() self._session.headers.update({'User-agent': str(UserAgent().random)}) return self._post_login_page()
def file_read(filename): fobj = open(filename,'r'); source = fobj.read(); fobj.close() return source
def einsum_vecmul_index(gate_indices, number_of_qubits): mat_l, mat_r, tens_lin, tens_lout = _einsum_matmul_index_helper(gate_indices, number_of_qubits) return "{mat_l}{mat_r}, ".format(mat_l=mat_l, mat_r=mat_r) + \ "{tens_lin}->{ten...
def validate_widget(widget): if not has_valid_id(widget): raise InvalidWidget("%s must contain a valid 'id' attribute" % widget.__name__) if not has_valid_name(widget): raise InvalidWidget("%s must contain a valid 'name' attribute" % widget.__name__) if not has_valid_template(widget): ...
def institute(context, institute_id, sanger_recipient, coverage_cutoff, frequency_cutoff, display_name, remove_sanger): adapter = context.obj['adapter'] LOG.info("Running scout update institute") try: adapter.update_institute( internal_id=institute_id, sanger_...
def get_agency_id(relation): op = relation.tags.get('operator') if op: return int(hashlib.sha256(op.encode('utf-8')).hexdigest(), 16) % 10**8 return -1
def writes(nb, format, **kwargs): format = unicode(format) if format == u'json' or format == u'ipynb': return writes_json(nb, **kwargs) elif format == u'py': return writes_py(nb, **kwargs) else: raise NBFormatError('Unsupported format: %s' % format)
def f_add_config_group(self, *args, **kwargs): return self._nn_interface._add_generic(self, type_name=CONFIG_GROUP, group_type_name=CONFIG_GROUP, args=args, kwargs=kwargs)
def _get_authorization(self, request, httpclient): return 'WRAP access_token="' + \ self._get_token(request.host, request.path, httpclient) + '"'
def evaluate(self, expression, i1=None, i2=None, out=None, selection=None, delay=False): expression = _ensure_strings_from_expressions(expression) result = self.server._call_dataset("evaluate", self, expression=expression, i1=i1, i2=i2, selection=selection, delay=delay) return result
def _get_value(self, key, func=None, split_val=None, as_boolean=False, exception_default=None): try: if as_boolean: return self.config.getboolean(key[0], key[1]) value = self.config.get(key[0], key[1]) if split_val is not None: value = value.split(split_val) if func is not None: return func(...
def get_system_cpu_times(): user, system, idle = 0, 0, 0 for cpu_time in _psutil_mswindows.get_system_cpu_times(): user += cpu_time[0] system += cpu_time[1] idle += cpu_time[2] return _cputimes_ntuple(user, system, idle)
def _spark_fit(self, cls, Z, *args, **kwargs): mapper = lambda X_y: super(cls, self).fit( X_y[0], X_y[1], *args, **kwargs ) models = Z.map(mapper) avg = models.reduce(operator.add) / models.count() self.__dict__.update(avg.__dict__) return self
def main(sample_id, assembly_file, coverage_file, coverage_bp_file, bam_file, opts, gsize): min_assembly_coverage, max_contigs = opts logger.info("Starting assembly mapping processing") logger.info("Parsing coverage table") coverage_info, a_cov = parse_coverage_table(coverage_file) a_size, ...
def fetchmany(self, size=None): self._check_executed() r = self._fetch_row(size or self.arraysize) self.rownumber = self.rownumber + len(r) if not r: self._warning_check() return r
def find_source(self, filename): source = None base, ext = os.path.splitext(filename) TRY_EXTS = { '.py': ['.py', '.pyw'], '.pyw': ['.pyw'], } try_exts = TRY_EXTS.get(ext) if not try_exts: return filename, None for try_ext in t...
def get_document(self, document_id, database_name=None, collection_name=None): if document_id is None: raise AirflowBadRequest("Cannot get a document without an id") try: return self.get_conn().ReadItem( get_document_link( self.__get_database_n...
def execute_actions(self, cwd): self._execute_globals(cwd) for action in self.actions: logger.info("executing {}".format(action)) p = subprocess.Popen(action, shell=True, cwd=cwd) p.wait()
def __var_find_to_py_ast( var_name: str, ns_name: str, py_var_ctx: ast.AST ) -> GeneratedPyAST: return GeneratedPyAST( node=ast.Attribute( value=ast.Call( func=_FIND_VAR_FN_NAME, args=[ ast.Call( func=_NEW_SYM_FN_NAM...
def create_storage_account(self, service_name, description, label, affinity_group=None, location=None, geo_replication_enabled=None, extended_properties=None, account_type='Standard_GRS'): ...
def set_selection(self, selection, name="default", executor=None): def create(current): return selection self._selection(create, name, executor=executor, execute_fully=True)
def set_resolved_name(self, ref: dict, type_name2solve: TypeName, type_name_ref: TypeName): if self.resolution[type_name2solve.value] is None: self.resolution[type_name2solve.value] = ref[type_name_ref.value]
def format_data(self, data, scale=True): if len(self.analytes) == 1: d = nominal_values(data[self.analytes[0]]) ds = np.array(list(zip(d, np.zeros(len(d))))) else: d = [nominal_values(data[a]) for a in self.analytes] ds = np.vstack(d).T finite = np...
def _referer(self, extension): iana_record = self.lookup.whois( PyFunceble.CONFIGURATION["iana_whois_server"], "hello.%s" % extension ) if iana_record and "refer" in iana_record: regex_referer = r"(?s)refer\:\s+([a-zA-Z0-9._-]+)\n" matched = Regex( ...
def shape_rb_data(raw_rb): rb_data = [] rb_data.append(np.mean(raw_rb, 0)) rb_data.append(np.std(raw_rb, 0)) return rb_data
def update_function(self, name, body, update_mask): response = self.get_conn().projects().locations().functions().patch( updateMask=",".join(update_mask), name=name, body=body ).execute(num_retries=self.num_retries) operation_name = response["name"] se...
def boolean(ctx, obj): if hasattr(obj, 'compute'): obj = next(seq.compute(ctx), '') else: obj = seq yield next(to_boolean(obj), '')
def expects_none(options): if any(options.get(key) is not None for key in ["count", "maximum", "minimum", "between"]): return matches_count(0, options) else: return False
def __early_downsample(y, sr, hop_length, res_type, n_octaves, nyquist, filter_cutoff, scale): downsample_count = __early_downsample_count(nyquist, filter_cutoff, hop_length, n_octaves) if downsample_count > 0 and res_type == 'kaiser_fast': ...
def log_parser(self): size_stamp = os.path.getsize(self.log_file) self.log_retry = 0 if size_stamp and size_stamp == self.log_sizestamp: return else: logger.debug("Updating log size stamp to: {}".format(size_stamp)) self.log_sizestamp = size_stamp ...
def _remove_exploration(self): for param in self._explored_parameters.values(): if param._stored: try: self.f_delete_item(param) except Exception: self._logger.exception('Could not delete expanded parameter `%s` ' ...
def setup_platform(hass, config, add_entities, discovery_info=None): host = config.get(CONF_HOST) token = config.get(CONF_ACCESS_TOKEN) name = config.get(CONF_NAME) volume_step = config.get(CONF_VOLUME_STEP) device_type = config.get(CONF_DEVICE_CLASS) device = VizioDevice(host, token, name, volu...
def handle_oauth2_response(self, args): client = self.make_client() remote_args = { 'code': args.get('code'), 'client_secret': self.consumer_secret, 'redirect_uri': session.get('%s_oauthredir' % self.name) } log.debug('Prepare oauth2 remote args %r', r...
def decode(self, val): new_val = self.decode_date(val) if val != new_val: return new_val return json.JSONDecoder.decode(self, val)
def extractPrintSaveIntermittens(): global g_summary_dict_intermittents localtz = time.tzname[0] for ind in range(len(g_summary_dict_all["TestName"])): if g_summary_dict_all["TestInfo"][ind]["FailureCount"] >= g_threshold_failure: addFailedTests(g_summary_dict_intermittents, g_summary_di...
def _get_rule_source(self, rule): p = len(self.input_source) + rule.position source = self.input_source[p:p + rule.consumed].rstrip() return self._indent(source, depth=self.indent + " ", skip_first_line=True)
def _resubscribe(self, soft=False): if self.bitfinex_config: self.send(**self.bitfinex_config) q_list = [] while True: try: identifier, q = self.channel_configs.popitem(last=True if soft else False) except KeyError: break ...
def _receive_data(self): while True: while len(self._buffer) < self.max_size and self.conn.poll(): data = self._read_chunks() if data is not None: self._buffer.append(data) if len(self._buffer) > 0: return self._buffer.p...
def _build_purchase_item(course_id, course_url, cost_in_cents, mode, course_data, sku): item = { 'id': "{}-{}".format(course_id, mode), 'url': course_url, 'price': cost_in_cents, 'qty': 1, } if 'title' in course_data: item['title'] = course_data['title'] else: ...
def _vector_matrix(vs, ms): return tf.reduce_sum(input_tensor=vs[..., tf.newaxis] * ms, axis=-2)
def mix_over_posterior_draws(means, variances): with tf.compat.v1.name_scope( 'mix_over_posterior_draws', values=[means, variances]): num_posterior_draws = dist_util.prefer_static_value( tf.shape(input=means))[0] component_observations = tfd.Independent( distribution=tfd.Normal( ...
def DeleteItem(self, item): "Remove the item from the list and unset the related data" wx_data = self.GetItemData(item) py_data = self._py_data_map[wx_data] del self._py_data_map[wx_data] del self._wx_data_map[py_data] wx.ListCtrl.DeleteItem(self, item)
def add_route(self, command, adapter): if not isinstance(adapter, BaseAdapter): try: adapter = self.adapter_aliases[adapter] except KeyError: self.adapter_aliases[adapter] = adapter = resolve_adapter( adapter ) s...
def progress(iterator, prefix): if terminal_width(prefix) > 25: prefix = (".." + get_cut_prefix(prefix, 23)) speed_updated = start = time() speed_written = written = 0 speed_history = deque(maxlen=5) for data in iterator: yield data now = time() elapsed = now - start ...
def get_params(brightness, contrast, saturation, hue): transforms = [] if brightness is not None: brightness_factor = random.uniform(brightness[0], brightness[1]) transforms.append(Lambda(lambda img: F.adjust_brightness(img, brightness_factor))) if contrast is not None: ...
def panel(context, panel, version, update_date, update_version): adapter = context.obj['adapter'] panel_obj = adapter.gene_panel(panel, version=version) if not panel_obj: LOG.warning("Panel %s (version %s) could not be found" % (panel, version)) context.abort() date_obj = None if upd...
def random_ports(port, n): for i in range(min(5, n)): yield port + i for i in range(n-5): yield port + random.randint(-2*n, 2*n)
def PermissiveDict(fields=None): if fields: check_user_facing_fields_dict(fields, 'PermissiveDict') class _PermissiveDict(_ConfigComposite): def __init__(self): key = 'PermissiveDict.' + str(DictCounter.get_next_count()) super(_PermissiveDict, self).__init__( ...
def gravatar_url(user_or_email, size=GRAVATAR_DEFAULT_SIZE): if hasattr(user_or_email, 'email'): email = user_or_email.email else: email = user_or_email try: return escape(get_gravatar_url(email=email, size=size)) except: return ''
def run_as_cmd(cmd, user, shell='bash'): to_execute = get_shell(shell) + [EXECUTE_SHELL_PARAM, cmd] if user == 'root': return to_execute return ['sudo', '-s', '--set-home', '-u', user] + to_execute
def profile_function(self): with _CodeHeatmapCalculator() as prof: result = self._run_object(*self._run_args, **self._run_kwargs) code_lines, start_line = inspect.getsourcelines(self._run_object) source_lines = [] for line in code_lines: source_lines.append(('line...
def dashboard(request): if not isinstance(mc_client, dict): cache_stats = _get_cache_stats() else: cache_stats = None if cache_stats: data = _context_data({ 'title': _('Memcache Dashboard'), 'cache_stats': cache_stats, 'can_get_slabs': hasattr(mc_c...
def _validate_initial_statevector(self): if self._initial_statevector is None: return length = len(self._initial_statevector) required_dim = 2 ** self._number_of_qubits if length != required_dim: raise BasicAerError('initial statevector is incorrect length: ' + ...
def chatToId(url): match = re.search(r"conversations/([0-9]+:[^/]+)", url) return match.group(1) if match else None
def prepare_pids(self): self.pids = [] for fetcher in self.pid_fetchers: val = fetcher(None, self.revisions[-1][1]) if val: self.pids.append(val)
def use(network=False): global _engine __engine = _engine activated = __engine.active if activated: __engine.disable() _engine = Engine(network=network) _engine.activate() yield _engine _engine.disable() if network: _engine.disable_network() _engine = __engine ...
def parse_yaml_linenumbers(data, filename): def compose_node(parent, index): line = loader.line node = Composer.compose_node(loader, parent, index) node.__line__ = line + 1 return node def construct_mapping(node, deep=False): if ANSIBLE_VERSION < 2: mapping = ...
def log_cdf_laplace(x, name="log_cdf_laplace"): with tf.name_scope(name): x = tf.convert_to_tensor(value=x, name="x") lower_solution = -np.log(2.) + x safe_exp_neg_x = tf.exp(-tf.abs(x)) upper_solution = tf.math.log1p(-0.5 * safe_exp_neg_x) return tf.where(x < 0., lower_solution, upper_solution)
def samefile(path1, path2): info1 = fs.getfileinfo(path1) info2 = fs.getfileinfo(path2) return (info1.dwVolumeSerialNumber == info2.dwVolumeSerialNumber and info1.nFileIndexHigh == info2.nFileIndexHigh and info1.nFileIndexLow == info2.nFileIndexLow)
def add_netnode_plugin_name(plugin_name): current_names = set(get_netnode_plugin_names()) if plugin_name in current_names: return current_names.add(plugin_name) get_meta_netnode()[PLUGIN_NAMES_KEY] = json.dumps(list(current_names))
def set_serial(self, hex_str): bignum_serial = _ffi.gc(_lib.BN_new(), _lib.BN_free) bignum_ptr = _ffi.new("BIGNUM**") bignum_ptr[0] = bignum_serial bn_result = _lib.BN_hex2bn(bignum_ptr, hex_str) if not bn_result: raise ValueError("bad hex string") asn1_serial...
def run_samblaster(job, sam): work_dir = job.fileStore.getLocalTempDir() job.fileStore.readGlobalFile(sam, os.path.join(work_dir, 'input.sam')) command = ['/usr/local/bin/samblaster', '-i', '/data/input.sam', '-o', '/data/output.sam', '--ignoreUnmated'] start...
def handle_stranded_tasks(self, engine): lost = self.pending[engine] for msg_id in lost.keys(): if msg_id not in self.pending[engine]: continue raw_msg = lost[msg_id].raw_msg idents,msg = self.session.feed_identities(raw_msg, copy=False) pa...
def get_group_all(group, path=None): result = [] for config, distro in iter_files_distros(path=path): if group in config: for name, epstr in config[group].items(): with BadEntryPoint.err_to_warnings(): result.append(EntryPoint.from_string(epstr, name, dist...
def build_filters_and_sizers(self, ppoi_value, create_on_demand): name = self.name if not name and self.field.placeholder_image_name: name = self.field.placeholder_image_name self.filters = FilterLibrary( name, self.storage, versatileimagefield_reg...
def predict_logit(self, x, **kwargs): return self.feed_forward(x, **kwargs)[self.layers[-1].full_name('pre')]
def get_source_lane(fork_process, pipeline_list): fork_source = fork_process[-1] fork_sig = [x for x in fork_process if x != "__init__"] for position, p in enumerate(pipeline_list[::-1]): if p["output"]["process"] == fork_source: lane = p["output"]["lane"] logger.debug("Possi...
def article(self, msgid_article=None, decode=None): args = None if msgid_article is not None: args = utils.unparse_msgid_article(msgid_article) code, message = self.command("ARTICLE", args) if code != 220: raise NNTPReplyError(code, message) parts = messag...
End of preview. Expand in Data Studio

No dataset card yet

Downloads last month
11