code
stringlengths
81
3.79k
def publish( self, resource_group_name, automation_account_name, runbook_name, custom_headers=None, raw=False, polling=True, **operation_config): raw_result = self._publish_initial( resource_group_name=resource_group_name, automation_account_name=automation_account_name, ...
def get_rules(self) -> parsing.Node: res = None try: res = self.eval_rule('bnf_dsl') if not res: self.diagnostic.notify( error.Severity.ERROR, "Parse error in '%s' in EBNF bnf" % self._lastRule, error.Loc...
def save(self, filename=None, deleteid3=False): if filename is None: filename = self.filename f = open(filename, 'rb+') try: self.metadata_blocks.append(Padding(b'\x00' * 1020)) MetadataBlock.group_padding(self.metadata_blocks) header = self.__chec...
def text(self, etype, value, tb, tb_offset=None, context=5): tb_list = self.structured_traceback(etype, value, tb, tb_offset, context) return self.stb2text(tb_list)
def child_object(self): from . import types child_klass = types.get(self.task_type.split('.')[1]) return child_klass.retrieve(self.task_id, client=self._client)
def controller_factory(cls, passes, options, **partial_controller): if None in partial_controller.values(): raise TranspilerError('The controller needs a condition.') if partial_controller: for registered_controller in cls.registered_controllers.keys(): if registe...
def sprint(text, *colors): return "\33[{}m{content}\33[{}m".format(";".join([str(color) for color in colors]), RESET, content=text) if IS_ANSI_TERMINAL and colors else text
def __build_question(html_question, question, comments): question_object = {} question_container = AskbotParser.parse_question_container(html_question[0]) question_object.update(question_container) if comments[int(question['id'])]: question_object['comments'] = comments[int(q...
def gpio_interrupts_enable(self): try: bring_gpio_interrupt_into_userspace() set_gpio_interrupt_edge() except Timeout as e: raise InterruptEnableException( "There was an error bringing gpio%d into userspace. %s" % (GPIO_INTERRUPT_PIN, e...
def request(self, method, url, **kwargs): opts = { 'allow_redirects': True, 'auth': self._auth, 'data': {}, 'files': None, 'headers': dict(self._headers), 'params': {}, 'timeout': 80, 'verify': True } ...
def make_default_options_response(self): adapter = _request_ctx_stack.top.url_adapter if hasattr(adapter, 'allowed_methods'): methods = adapter.allowed_methods() else: methods = [] try: adapter.match(method='--') except MethodNotAll...
def load(self, path): self.network = graph.Network.load(path) return self.network
def _uri2path(self, uri): if uri == self.package_name: return os.path.join(self.root_path, '__init__.py') path = uri.replace('.', os.path.sep) path = path.replace(self.package_name + os.path.sep, '') path = os.path.join(self.root_path, path) if os.path.exists(path + '...
def describe_object(self, obj): conn = self.get_conn() return conn.__getattr__(obj).describe()
def get_ilvl(li, w_namespace): ilvls = li.xpath('.//w:ilvl', namespaces=li.nsmap) if len(ilvls) == 0: return -1 return int(ilvls[0].get('%sval' % w_namespace))
def validateAttrib(self, method, cls = None): any = False for group in self.attribs: match = True for key, value in group: attr = get_method_attr(method, cls, key) if callable(value): if not value(key, method, cls): ...
def matchmaker_matches(institute_id, case_name): user_obj = store.user(current_user.email) if 'mme_submitter' not in user_obj['roles']: flash('unauthorized request', 'warning') return redirect(request.referrer) mme_base_url = current_app.config.get('MME_URL') mme_token = current_app.conf...
def get_container(self, name, collection_id, tag="latest", version=None): from sregistry.database.models import Container if version is None: container = Container.query.filter_by(collection_id = collection_id, name = name, ...
def _plant_trie(strings: _List[str]) -> dict: trie = {} for string in strings: d = trie for char in string: d[char] = char in d and d[char] or {} d = d[char] d[''] = None return trie
def _is_known_unsigned_by_dtype(dt): return { tf.bool: True, tf.uint8: True, tf.uint16: True, }.get(dt.base_dtype, False)
def create_record_and_pid(data): from invenio_records.api import Record from invenio_pidstore.models import PersistentIdentifier, PIDStatus, \ RecordIdentifier deposit = Record.create(data=data) created = arrow.get(data['_p']['created']).datetime deposit.model.created = created.replace(tzinf...
def is_embargoed(record): return record.get('access_right') == 'embargoed' and \ record.get('embargo_date') and \ record.get('embargo_date') > datetime.utcnow().date()
def add_comment(self, comment_text): return self.fetch_json( uri_path=self.base_uri + '/actions/comments', http_method='POST', query_params={'text': comment_text} )
def pkt_text(pkt): if pkt.src.upper() in BANNED_DEVICES: body = '' elif pkt.src.upper()[:8] in AMAZON_DEVICES: body = '{} (Amazon Device)'.format(pkt.src) else: body = pkt.src return body
def _update_barrier_status(self): with open(self.log_file) as fh: for line in fh: if "Session aborted" in line: return if "<<< barrier arrive" in line: process_m = re.match(".*process: (.*)\)", line) if proce...
def copy_notebook(self, notebook_id): last_mod, nb = self.get_notebook_object(notebook_id) name = nb.metadata.name + '-Copy' path, name = self.increment_filename(name) nb.metadata.name = name notebook_id = self.new_notebook_id(name) self.save_notebook_object(notebook_id, ...
def entity_name_decorator(top_cls): class_name = inflection.underscore(top_cls.__name__).lower() def entity_name(cls): return class_name top_cls.entity_name = classmethod(entity_name) return top_cls
def init_modules(self): if not self.config: raise ValueError("please read your config file.") log.debug("begin to import customer's service modules.") modules = ServiceModules(self.config) modules.import_modules() log.debug("end to import customer's service modules.")
def _normalized(self, data): int_keys = ('frames', 'width', 'height', 'size') for key in int_keys: if key not in data: continue try: data[key] = int(data[key]) except ValueError: pass return data
def get_product_by_name(self, name): return next(i for i in self.items if i.name.lower() == name.lower())
def add_point(self, point, value, check=True): if self.tier_type != 'TextTier': raise Exception('Tiertype must be TextTier.') if check and any(i for i in self.intervals if i[0] == point): raise Exception('No overlap is allowed') self.intervals.append((point, value))
def create_adapter(cmph, ffi, obj): if is_file_location(obj): fd = open(obj) adapter = cmph.cmph_io_nlfile_adapter(fd) def dtor(): cmph.cmph_io_nlfile_adapter_destroy(adapter) fd.close() return _AdapterCxt(adapter, dtor) elif is_file(obj): adapter ...
def _hash_file(self, algo): hash_data = getattr(hashlib, algo)() with open(self.path, "rb") as file: content = file.read() hash_data.update(content) return hash_data.hexdigest()
def to_string(self, indent): ind = indent * ' ' print(ind, 'qreg') self.children[0].to_string(indent + 3)
def _installation_trace(self, frame_unused, event_unused, arg_unused): sys.settrace(None) fn = self._start_tracer() if fn: fn = fn(frame_unused, event_unused, arg_unused) return fn
def parse_sv_frequencies(variant): frequency_keys = [ 'clingen_cgh_benignAF', 'clingen_cgh_benign', 'clingen_cgh_pathogenicAF', 'clingen_cgh_pathogenic', 'clingen_ngi', 'clingen_ngiAF', 'swegen', 'swegenAF', 'decipherAF', 'decipher' ...
def calc_fwhm(distribution, is_neg_log=True): if isinstance(distribution, interp1d): if is_neg_log: ymin = distribution.y.min() log_prob = distribution.y-ymin else: log_prob = -np.log(distribution.y) log_prob -= log_prob.min...
def submit_order(self, symbol, qty, side, type, time_in_force, limit_price=None, stop_price=None, client_order_id=None): params = { 'symbol': symbol, 'qty': qty, 'side': side, 'type': type, 'time_in_force': time_in_force, }...
def get_queryset(self): queryset = self.get_publishable_queryset() queryset = queryset \ .select_related('featured_image', 'featured_video', 'topic', 'section', 'subsection') \ .prefetch_related( 'tags', 'featured_image__image__authors', ...
def get_instance_group_manager(self, zone, resource_id, project_id=None): response = self.get_conn().instanceGroupManagers().get( project=project_id, zone=zone, instanceGroupManager=resource_id ).execute(num_retries=self.num_retries) return response
def get_contacts(address_books, query, method="all", reverse=False, group=False, sort="first_name"): contacts = [] for address_book in address_books: contacts.extend(address_book.search(query, method=method)) if group: if sort == "first_name": return sorted(conta...
def remove_property(self, property_): if property_.name in self.properties: del self.properties[property_.name]
def delete(self, filename=None): if filename is None: filename = self.filename delete(filename) self.clear()
def DeleteAllItems(self): "Remove all the item from the list and unset the related data" self._py_data_map.clear() self._wx_data_map.clear() wx.ListCtrl.DeleteAllItems(self)
def phone_subcommand(search_terms, vcard_list, parsable): all_phone_numbers_list = [] matching_phone_number_list = [] for vcard in vcard_list: for type, number_list in sorted(vcard.get_phone_numbers().items(), key=lambda k: k[0].lower()): for numbe...
def add_and_rename_file(self, filename: str, new_filename: str) -> None: dest = os.path.join( self.name + ':' + SANDBOX_WORKING_DIR_NAME, new_filename) subprocess.check_call(['docker', 'cp', filename, dest]) self._chown_files([new_filename])
def parse_frequencies(variant, transcripts): frequencies = {} thousand_genomes_keys = ['1000GAF'] thousand_genomes_max_keys = ['1000G_MAX_AF'] exac_keys = ['EXACAF'] exac_max_keys = ['ExAC_MAX_AF', 'EXAC_MAX_AF'] gnomad_keys = ['GNOMADAF', 'GNOMAD_AF'] gnomad_max_keys = ['GNOMADAF_POPMAX', '...
def random(cls, num_qubits, seed=None): if seed is not None: np.random.seed(seed) z = np.random.randint(2, size=num_qubits).astype(np.bool) x = np.random.randint(2, size=num_qubits).astype(np.bool) return cls(z, x)
def lost_dimensions(point_fmt_in, point_fmt_out): unpacked_dims_in = PointFormat(point_fmt_in).dtype unpacked_dims_out = PointFormat(point_fmt_out).dtype out_dims = unpacked_dims_out.fields completely_lost = [] for dim_name in unpacked_dims_in.names: if dim_name not in out_dims: ...
def rename(script, label='blank', layer_num=None): filter_xml = ''.join([ ' <filter name="Rename Current Mesh">\n', ' <Param name="newName" ', 'value="{}" '.format(label), 'description="New Label" ', 'type="RichString" ', '/>\n', ' </filter>\n']) if i...
def verify_signature(amazon_cert: crypto.X509, signature: str, request_body: bytes) -> bool: signature = base64.b64decode(signature) try: crypto.verify(amazon_cert, signature, request_body, 'sha1') result = True except crypto.Error: result = False return result
def parallel_execute(self, cell, block=None, groupby='type', save_name=None): block = self.view.block if block is None else block base = "Parallel" if block else "Async parallel" targets = self.view.targets if isinstance(targets, list) and len(targets) > 10: str_targets = str...
def class_is_abstract(node: astroid.ClassDef) -> bool: for method in node.methods(): if method.parent.frame() is node: if method.is_abstract(pass_is_abstract=False): return True return False
def _merge_statements(statements: List["HdlStatement"])\ -> Tuple[List["HdlStatement"], int]: order = {} for i, stm in enumerate(statements): order[stm] = i new_statements = [] rank_decrease = 0 for rank, stms in groupedby(statements, lambda s: s.rank): ...
def convert(self, value): if not isinstance(value, ConvertingDict) and isinstance(value, dict): value = ConvertingDict(value) value.configurator = self elif not isinstance(value, ConvertingList) and isinstance(value, list): value = ConvertingList(value) va...
def bind_parameter(binding_key, value): if config_is_locked(): raise RuntimeError('Attempted to modify locked Gin config.') pbk = ParsedBindingKey(binding_key) fn_dict = _CONFIG.setdefault(pbk.config_key, {}) fn_dict[pbk.arg_name] = value
def verify_signature(self, signing_key, message, signature, padding_method, signing_algorithm=None, hashing_algorithm=None, digital_signature_alg...
def predict(self, x, distributed=True): if is_distributed: if isinstance(x, np.ndarray): features = to_sample_rdd(x, np.zeros([x.shape[0]])) elif isinstance(x, RDD): features = x else: raise TypeError("Unsupported prediction dat...
def generate(self, outputfile=None, dotfile=None, mapfile=None): import subprocess name = self.graphname if not dotfile: if outputfile and outputfile.endswith(".dot"): dotfile = outputfile else: dotfile = "%s.dot" % name if outputfi...
def connect(com, peers, tree, pub_url, root_id): com.connect(peers, tree, pub_url, root_id)
def delete_instance(self, instance_id, project_id=None): instance = self.get_instance(instance_id=instance_id, project_id=project_id) if instance: instance.delete() else: self.log.info("The instance '%s' does not exist in project '%s'. Exiting", instance_id, ...
def encode(self, input, errors='strict'): if isinstance(input, memoryview): input = input.tobytes() if not isinstance(input, (binary_type, bytearray)): raise with_context( exc=TypeError( "Can't encode {type}; byte string expected.".format( ...
def get_hash( cls, version: str, frequency: int, timestamp: int, seed_value: str, prev_output: str, status_code: str, ) -> SHA512Hash: return SHA512.new( version.encode() + struct.pack( ...
def init( dist='dist', minver=None, maxver=None, use_markdown_readme=True, use_stdeb=False, use_distribute=False, ): if not minver == maxver == None: import sys if not minver <= sys.version < (maxver or 'Any'): sys.stderr.write( '%s: requires p...
def get_defined_srms(srm_file): srms = read_table(srm_file) return np.asanyarray(srms.index.unique())
def read(self, input_buffer, kmip_version=enums.KMIPVersion.KMIP_2_0): if kmip_version < enums.KMIPVersion.KMIP_2_0: raise exceptions.VersionNotSupported( "KMIP {} does not support the DefaultsInformation " "object.".format( kmip_version.value ...
def unused_variable_line_numbers(messages): for message in messages: if isinstance(message, pyflakes.messages.UnusedVariable): yield message.lineno
def set_data(data): "Write content to the clipboard, data can be either a string or a bitmap" try: if wx.TheClipboard.Open(): if isinstance(data, (str, unicode)): do = wx.TextDataObject() do.SetText(data) wx.TheClipboard.SetData(do) ...
def partial(f, *args): @functools.wraps(f) def partial_f(*inner_args): return f(*itertools.chain(args, inner_args)) return partial_f
def match(self, request): errors = [] def match(matcher): try: return matcher.match(request) except Exception as err: err = '{}: {}'.format(type(matcher).__name__, err) errors.append(err) return False return ...
def batch_shape_tensor(self): batch_shape = tf.constant([], dtype=tf.int32) for param in self.parameters: batch_shape = tf.broadcast_dynamic_shape( batch_shape, param.prior.batch_shape_tensor()) return batch_shape
def generate(self, *arg, **kw): for p, meth in self.plugins: result = None try: result = meth(*arg, **kw) if result is not None: for r in result: yield r except (KeyboardInterrupt, SystemExit): ...
def fracpols(str, **kwargs): I,Q,U,V,L=get_stokes(str, **kwargs) return L/I,V/I
def checkUser(self, user): return not self.conn("POST", "{0}/GetCredentialType.srf".format(SkypeConnection.API_MSACC), json={"username": user}).json().get("IfExistsResult")
def user_institutes(store, login_user): if login_user.is_admin: institutes = store.institutes() else: institutes = [store.institute(inst_id) for inst_id in login_user.institutes] return institutes
def start(self, job): if self.hostname is None: self.hostname = subprocess.check_output(["hostname", "-f",])[:-1] _log.info("Started Spark master container.") self.sparkContainerID = dockerCheckOutput(job=job, defer=STOP, ...
def sequence_LH(self, pos=None, full_sequence=False): if not hasattr(self.tree, "total_sequence_LH"): self.logger("TreeAnc.sequence_LH: you need to run marginal ancestral inference first!", 1) self.infer_ancestral_sequences(marginal=True) if pos is not None: if full_s...
def adjust_saturation(img, saturation_factor): if not _is_pil_image(img): raise TypeError('img should be PIL Image. Got {}'.format(type(img))) enhancer = ImageEnhance.Color(img) img = enhancer.enhance(saturation_factor) return img
def issue_funds(ctx, amount='uint256', rtgs_hash='bytes32', returns=STATUS): "In the IOU fungible the supply is set by Issuer, who issue funds." ctx.accounts[ctx.msg_sender] += amount ctx.issued_amounts[ctx.msg_sender] += amount ctx.Issuance(ctx.msg_sender, rtgs_hash, amount) ret...
def build_seasonal_transition_noise( drift_scale, num_seasons, is_last_day_of_season): drift_scale_diag = tf.stack( [tf.zeros_like(drift_scale)] * (num_seasons - 1) + [drift_scale], axis=-1) def seasonal_transition_noise(t): noise_scale_diag = dist_util.pick_scalar_condition( is_last_day...
def resolve_url(self, url, follow_redirect=True): url = update_scheme("http://", url) available_plugins = [] for name, plugin in self.plugins.items(): if plugin.can_handle_url(url): available_plugins.append(plugin) available_plugins.sort(key=lambda x: x.priori...
def embed_font_to_svg(filepath, outfile, font_files): tree = _embed_font_to_svg(filepath, font_files) tree.write(outfile, encoding='utf-8', pretty_print=True)
def verify_type_product(self, satellite): if satellite == 'L5': id_satellite = '3119' stations = ['GLC', 'ASA', 'KIR', 'MOR', 'KHC', 'PAC', 'KIS', 'CHM', 'LGS', 'MGR', 'COA', 'MPS'] elif satellite == 'L7': id_satellite = '3373' stations = ['EDC', 'SGS', 'A...
def brent(seqs, f=None, start=None, key=lambda x: x): power = period = 1 tortise, hare = seqs yield hare.next() tortise_value = tortise.next() hare_value = hare.next() while key(tortise_value) != key(hare_value): yield hare_value if power == period: power *= 2 ...
def add_s(self, s, obj, priority= 0 ): chain = self.strs.get(s, CommandChainDispatcher()) chain.add(obj,priority) self.strs[s] = chain