signature
stringlengths
29
44.1k
implementation
stringlengths
0
85.2k
def make_transaction ( self ) : """Create the transaction for this RecurredCost May only be used to create the RecurredCost ' s initial transaction . Returns : Transaction : The created transaction , also assigned to self . transaction . None if the amount is zero ."""
if self . pk : raise CannotRecreateTransactionOnRecurredCost ( 'The transaction for this recurred cost has already been created. You cannot create it again.' ) amount = self . recurring_cost . get_amount ( self . billing_cycle ) # It is quite possible that there will be nothing to bill , in which # case we cannot c...
def kuhn_munkres ( G , TOLERANCE = 1e-6 ) : """Maximum profit bipartite matching by Kuhn - Munkres : param G : weight matrix where G [ u ] [ v ] is the weight of edge ( u , v ) , : param TOLERANCE : a value with absolute value below tolerance is considered as being zero . If G consists of integer or fractio...
nU = len ( G ) U = range ( nU ) nV = len ( G [ 0 ] ) V = range ( nV ) assert nU <= nV mu = [ None ] * nU # empty matching mv = [ None ] * nV lu = [ max ( row ) for row in G ] # trivial labels lv = [ 0 ] * nV for root in U : # build an alternate tree au = [ False ] * nU # au , av mark nodes . . . au [ root ]...
def register_eventclass ( event_id ) : """Decorator for registering event classes for parsing"""
def register ( cls ) : if not issubclass ( cls , Event ) : raise MessageException ( ( 'Cannot register a class that' ' is not a subclass of Event' ) ) EVENT_REGISTRY [ event_id ] = cls logger . debug ( '######### Event registry is now: {0}' . format ( EVENT_REGISTRY ) ) return cls return registe...
def unquote_redirection_tokens ( args : List [ str ] ) -> None : """Unquote redirection tokens in a list of command - line arguments This is used when redirection tokens have to be passed to another command : param args : the command line args"""
for i , arg in enumerate ( args ) : unquoted_arg = strip_quotes ( arg ) if unquoted_arg in constants . REDIRECTION_TOKENS : args [ i ] = unquoted_arg
def check_arguments ( cls , conf ) : """Sanity check plugin options values . As a side effect , it also converts the specified interval and port to an integer ."""
# Checking the interval if not conf [ 'tcp_check_interval' ] : raise ArgsError ( "A TCP health-test interval needs to be " "specified (--tcp_check_interval)." ) if not ( 1 <= conf [ 'tcp_check_interval' ] <= 3600 ) : raise ArgsError ( "Specified TCP health-test interval must be " "between 1 and 3600 seconds" ) ...
def getbalance ( self , account = None , minconf = 6 ) : """retrieve balance , If [ account ] is specified , returns the balance in the account ."""
if account : return self . req ( "getbalance" , [ account , minconf ] ) else : return self . req ( "getbalance" )
def load_metascenario ( self , scenario_list ) : """Load one or more scenarios from a list . Each entry in scenario _ list should be a dict containing at least a name key and an optional tile key and args key . If tile is present and its value is not None , the scenario specified will be loaded into the giv...
for scenario in scenario_list : name = scenario . get ( 'name' ) if name is None : raise DataError ( "Scenario in scenario list is missing a name parameter" , scenario = scenario ) tile_address = scenario . get ( 'tile' ) args = scenario . get ( 'args' , { } ) dest = self if tile_address...
def _collate_metadata ( self ) : """Turns a list of objects associated with a classification result into a DataFrame of metadata . Returns None , but stores a result in self . _ cached ."""
import pandas as pd DEFAULT_FIELDS = None metadata = [ ] for c in self . _classifications : m = c . sample . metadata if DEFAULT_FIELDS is None : DEFAULT_FIELDS = list ( m . _resource . _schema [ "properties" ] . keys ( ) ) DEFAULT_FIELDS . remove ( "$uri" ) DEFAULT_FIELDS . remove ( "sa...
def add_key ( self , key , first = False ) : """Adds the given key to this row . : param key : Key to be added to this row . : param first : BOolean flag that indicates if key is added at the beginning or at the end ."""
if first : self . keys = [ key ] + self . keys else : self . keys . append ( key ) if isinstance ( key , VSpaceKey ) : self . space = key
def validate_fixed ( datum , schema , ** kwargs ) : """Check that the data value is fixed width bytes , matching the schema [ ' size ' ] exactly ! Parameters datum : Any Data being validated schema : dict Schema kwargs : Any Unused kwargs"""
return ( ( isinstance ( datum , bytes ) and len ( datum ) == schema [ 'size' ] ) or ( isinstance ( datum , decimal . Decimal ) ) )
def read_population_file ( file_name ) : """Reads the population file . : param file _ name : the name of the population file . : type file _ name : str : returns : a : py : class : ` dict ` containing the population for each of the samples . The population file should contain three columns : 1 . The fa...
pops = { } required_pops = { "CEU" , "YRI" , "JPT-CHB" , "SOURCE" } with open ( file_name , 'rb' ) as input_file : for line in input_file : row = line . rstrip ( "\r\n" ) . split ( "\t" ) # The data sample_id = tuple ( row [ : 2 ] ) pop = row [ - 1 ] # Checking the pop ...
def render_search ( self , ctx , data ) : """Render some UI for performing searches , if we know about a search aggregator ."""
if self . username is None : return '' translator = self . _getViewerPrivateApplication ( ) searchAggregator = translator . getPageComponents ( ) . searchAggregator if searchAggregator is None or not searchAggregator . providers ( ) : return '' return ctx . tag . fillSlots ( 'form-action' , translator . linkTo ...
def _format_msg ( text , width , indent = 0 , prefix = "" ) : r"""Format exception message . Replace newline characters \ n with ` ` \ n ` ` , ` with \ ` and then wrap text as needed"""
text = repr ( text ) . replace ( "`" , "\\`" ) . replace ( "\\n" , " ``\\n`` " ) sindent = " " * indent if not prefix else prefix wrapped_text = textwrap . wrap ( text , width , subsequent_indent = sindent ) # [1 : - 1 ] eliminates quotes generated by repr in first line return ( "\n" . join ( wrapped_text ) ) [ 1 : - 1...
def download_data ( url : str , fname : PathOrStr = None , data : bool = True , ext : str = '.tgz' ) -> Path : "Download ` url ` to destination ` fname ` ."
fname = Path ( ifnone ( fname , _url2tgz ( url , data , ext = ext ) ) ) os . makedirs ( fname . parent , exist_ok = True ) if not fname . exists ( ) : print ( f'Downloading {url}' ) download_url ( f'{url}{ext}' , fname ) return fname
def get_hosts ( self , path , start , length ) : """Get hostnames where a particular block ( determined by pos and blocksize ) of a file is stored . Due to replication , a single block could be present on multiple hosts . : type path : str : param path : the path of the file : type start : int : param s...
_complain_ifclosed ( self . closed ) return self . fs . get_hosts ( path , start , length )
def init_layout ( self ) : """Set the checked state after all children have been populated ."""
super ( AndroidRadioGroup , self ) . init_layout ( ) d = self . declaration w = self . widget if d . checked : self . set_checked ( d . checked ) else : # : Check if any of the children have " checked = True " for c in d . children : if c . checked : d . checked = c w . setOnCheckedChangeLis...
def format_git_describe ( git_str , pep440 = False ) : """format the result of calling ' git describe ' as a python version"""
if git_str is None : return None if "-" not in git_str : # currently at a tag return git_str else : # formatted as version - N - githash # want to convert to version . postN - githash git_str = git_str . replace ( "-" , ".post" , 1 ) if pep440 : # does not allow git hash afterwards return git_st...
def yn_choice ( msg , indent = 4 , fg_color = 'cyan' , separator = '' ) : """传入 msg , 返回 True / False : param separator : : type separator : : param fg _ color : : type fg _ color : : param indent : : type indent : : param msg : : type msg : : return : : rtype :"""
_header , _footer = gen_separator ( separator = separator ) if _header : textui . puts ( getattr ( textui . colored , fg_color ) ( _header ) ) with textui . indent ( indent , quote = ' {}' . format ( ' ' ) ) : textui . puts ( textui . colored . green ( msg ) ) if _footer : textui . puts ( getattr ( textui ....
def soft_break ( self , el , text ) : """Apply soft break ."""
# Break word documents by paragraphs . if self . type == 'docx' and el . namespace == self . namespaces [ 'w' ] and el . name == 'p' : text . append ( '\n' ) # Break slides by paragraphs . if self . type == 'pptx' and el . namespace == self . namespaces [ 'a' ] and el . name == 'p' : text . append ( '\n' )
def mapping ( self ) : """Get a mapping class for this model This method will return a Mapping class for your model , generating it using settings from a ` Mapping ` class on your model ( if one exists ) . The generated class is cached on the manager ."""
if not hasattr ( self , "_mapping" ) : if hasattr ( self . model , "Mapping" ) : mapping_klass = type ( "Mapping" , ( DjangoMapping , self . model . Mapping ) , { } ) else : mapping_klass = get_first_mapping ( self . model ) if mapping_klass is None : mapping_klass = DjangoMa...
def init_from_str ( self , entries ) : """Initialize the structured and textual data based on a string representing the entries . For detailed information about the format of this string , refer to the : func : ` ~ taxi . timesheet . parser . parse _ text ` function ."""
self . lines = self . parser . parse_text ( entries ) for line in self . lines : if isinstance ( line , DateLine ) : current_date = line . date self [ current_date ] = self . default_factory ( self , line . date ) elif isinstance ( line , Entry ) : if len ( self [ current_date ] ) > 0 : ...
def _respond ( self , resp ) : """Respond to the person waiting"""
response_queue = self . _response_queues . get ( timeout = 0.1 ) response_queue . put ( resp ) self . _completed_response_lines = [ ] self . _is_multiline = None
def write_input ( self , output_dir , make_dir_if_not_present = True , include_cif = False ) : """Writes a set of VASP input to a directory . Args : output _ dir ( str ) : Directory to output the VASP input files make _ dir _ if _ not _ present ( bool ) : Set to True if you want the directory ( and the whol...
vinput = self . get_vasp_input ( ) vinput . write_input ( output_dir , make_dir_if_not_present = make_dir_if_not_present ) if include_cif : s = vinput [ "POSCAR" ] . structure fname = Path ( output_dir ) / ( "%s.cif" % re . sub ( r'\s' , "" , s . formula ) ) s . to ( filename = fname )
def get_pipeline_newsfeeds ( self , pipeline_key , detail_level = None ) : '''Function to get newsfeed for a pipeline Args : pipeline _ keypipeline key detail _ level arguments for req str [ ' ALL ' , ' CONDENSED ' ] return list of feed dicts parse at your convenience'''
uri = '/' . join ( [ self . api_uri , self . pipelines_suffix , pipeline_key , self . newsfeed_suffix ] ) return self . _get_newsfeeds ( uri , detail_level )
def _rfc3339_to_datetime ( dt_str ) : """Convert a microsecond - precision timestamp to a native datetime . : type dt _ str : str : param dt _ str : The string to convert . : rtype : : class : ` datetime . datetime ` : returns : The datetime object created from the string ."""
return datetime . datetime . strptime ( dt_str , _RFC3339_MICROS ) . replace ( tzinfo = UTC )
def args_from_config ( func ) : """Decorator that injects parameters from the configuration ."""
func_args = signature ( func ) . parameters @ wraps ( func ) def wrapper ( * args , ** kwargs ) : config = get_config ( ) for i , argname in enumerate ( func_args ) : if len ( args ) > i or argname in kwargs : continue elif argname in config : kwargs [ argname ] = config ...
def _refresh_oath_token ( self ) : """Refresh Monzo OAuth 2 token . Official docs : https : / / monzo . com / docs / # refreshing - access : raises UnableToRefreshTokenException : when token couldn ' t be refreshed"""
url = urljoin ( self . api_url , '/oauth2/token' ) data = { 'grant_type' : 'refresh_token' , 'client_id' : self . _client_id , 'client_secret' : self . _client_secret , 'refresh_token' : self . _token [ 'refresh_token' ] , } token_response = requests . post ( url , data = data ) token = token_response . json ( ) # Not ...
def recommended_overlap ( name , nfft = None ) : """Returns the recommended fractional overlap for the given window If ` ` nfft ` ` is given , the return is in samples Parameters name : ` str ` the name of the window you are using nfft : ` int ` , optional the length of the window Returns rov : ` fl...
try : name = canonical_name ( name ) except KeyError as exc : raise ValueError ( str ( exc ) ) try : rov = ROV [ name ] except KeyError : raise ValueError ( "no recommended overlap for %r window" % name ) if nfft : return int ( ceil ( nfft * rov ) ) return rov
def restore_descriptor ( self , converted_descriptor ) : """Restore descriptor rom BigQuery"""
# Convert fields = [ ] for field in converted_descriptor [ 'fields' ] : field_type = self . restore_type ( field [ 'type' ] ) resfield = { 'name' : field [ 'name' ] , 'type' : field_type , } if field . get ( 'mode' , 'NULLABLE' ) != 'NULLABLE' : resfield [ 'constraints' ] = { 'required' : True } ...
def get_additional_actions ( self , reftrack ) : """Return a list of additional actions you want to provide for the menu of the reftrack . E . e . you want to have a menu entry , that will select the entity in your programm . This will call : meth : ` ReftypeInterface . get _ additional _ actions ` . The ba...
inter = self . get_typ_interface ( reftrack . get_typ ( ) ) return inter . get_additional_actions ( reftrack )
def remove_child_gradebook ( self , gradebook_id , child_id ) : """Removes a child from a gradebook . arg : gradebook _ id ( osid . id . Id ) : the ` ` Id ` ` of a gradebook arg : child _ id ( osid . id . Id ) : the ` ` Id ` ` of the new child raise : NotFound - ` ` gradebook _ id ` ` not a parent of ` ` chil...
# Implemented from template for # osid . resource . BinHierarchyDesignSession . remove _ child _ bin _ template if self . _catalog_session is not None : return self . _catalog_session . remove_child_catalog ( catalog_id = gradebook_id , child_id = child_id ) return self . _hierarchy_session . remove_child ( id_ = g...
def get_permission_requests ( parser , token ) : """Retrieves all permissions requests associated with the given obj and user and assigns the result to a context variable . Syntax : : { % get _ permission _ requests obj % } { % for perm in permissions % } { { perm } } { % endfor % } { % get _ permissi...
return PermissionsForObjectNode . handle_token ( parser , token , approved = False , name = '"permission_requests"' )
def on ( device ) : '''Turns on the quota system CLI Example : . . code - block : : bash salt ' * ' quota . on'''
cmd = 'quotaon {0}' . format ( device ) __salt__ [ 'cmd.run' ] ( cmd , python_shell = False ) return True
def _nest_at_rules ( self , rule , scope , block ) : """Implements @ - blocks"""
# TODO handle @ charset , probably ? # Interpolate the current block # TODO this seems like it should be done in the block header . and more # generally ? calculator = self . _make_calculator ( rule . namespace ) if block . header . argument : # TODO is this correct ? do ALL at - rules ALWAYS allow both vars and # inte...
def apply_kwargs ( func , ** kwargs ) : """Call * func * with kwargs , but only those kwargs that it accepts ."""
new_kwargs = { } params = signature ( func ) . parameters for param_name in params . keys ( ) : if param_name in kwargs : new_kwargs [ param_name ] = kwargs [ param_name ] return func ( ** new_kwargs )
def refresh_index ( meta , index ) -> None : """Recalculate the projection , hash _ key , and range _ key for the given index . : param meta : model . Meta to find columns by name : param index : The index to refresh"""
# All projections include model + index keys projection_keys = set . union ( meta . keys , index . keys ) proj = index . projection mode = proj [ "mode" ] if mode == "keys" : proj [ "included" ] = projection_keys elif mode == "all" : proj [ "included" ] = meta . columns elif mode == "include" : # pragma : no br...
def validate_version_pragma ( version_str : str , start : ParserPosition ) -> None : """Validates a version pragma directive against the current compiler version ."""
from vyper import ( __version__ , ) version_arr = version_str . split ( '@version' ) file_version = version_arr [ 1 ] . strip ( ) file_major , file_minor , file_patch = _parse_version_str ( file_version , start ) compiler_major , compiler_minor , compiler_patch = _parse_version_str ( __version__ , start ) if ( file_maj...
def merge_cameras ( self ) : """Merge all sync camera dicts into one ."""
combined = CaseInsensitiveDict ( { } ) for sync in self . sync : combined = merge_dicts ( combined , self . sync [ sync ] . cameras ) return combined
def run_cmd ( cmd , show_output = True , raise_errs = True , ** kwargs ) : """Run a console command . When show _ output = True , prints output and returns exit code , otherwise returns output . When raise _ errs = True , raises a subprocess . CalledProcessError if the command fails ."""
internal_assert ( cmd and isinstance ( cmd , list ) , "console commands must be passed as non-empty lists" ) try : from shutil import which except ImportError : pass else : cmd [ 0 ] = which ( cmd [ 0 ] ) or cmd [ 0 ] logger . log_cmd ( cmd ) try : if show_output and raise_errs : return subproce...
def name_build ( self , name , is_policy = False , prefix = True ) : """Build name from prefix and name + type : param name : Name of the role / policy : param is _ policy : True if policy should be added as suffix : param prefix : True if prefix should be added : return : Joined name"""
str = name # Add prefix if prefix : str = self . __role_name_prefix + str # Add policy suffix if is_policy : str = str + "-policy" return str
def printTemporalMemory ( tm , outFile ) : """Given an instance of TemporalMemory , print out the relevant parameters"""
table = PrettyTable ( [ "Parameter name" , "Value" , ] ) table . add_row ( [ "columnDimensions" , tm . getColumnDimensions ( ) ] ) table . add_row ( [ "cellsPerColumn" , tm . getCellsPerColumn ( ) ] ) table . add_row ( [ "activationThreshold" , tm . getActivationThreshold ( ) ] ) table . add_row ( [ "minThreshold" , tm...
def want_service_notification ( self , timeperiods , timestamp , state , n_type , business_impact , cmd = None ) : # pylint : disable = too - many - return - statements """Check if notification options match the state of the service Notification is NOT wanted in ONE of the following case : : * service notificat...
if not self . service_notifications_enabled : return False # Maybe the command we ask for are not for us , but for another notification ways # on the same contact . If so , bail out if cmd and cmd not in self . service_notification_commands : return False # If the business _ impact is not high enough , we bail ...
def stop_timer ( self , request_len , reply_len , server_time = None , exception = False ) : """This is a low - level method is called by pywbem at the end of an operation . It completes the measurement for that operation by capturing the needed data , and updates the statistics data , if statistics is enable...
if not self . container . enabled : return None # stop the timer if self . _start_time is None : raise RuntimeError ( 'stop_timer() called without preceding ' 'start_timer()' ) dt = time . time ( ) - self . _start_time self . _start_time = None self . _count += 1 self . _time_sum += dt self . _request_len_sum +...
def validate_vertex_field_directive_in_context ( parent_location , vertex_field_name , directives , context ) : """Ensure that the specified vertex field directives are allowed in the current context ."""
fold_directive = directives . get ( 'fold' , None ) optional_directive = directives . get ( 'optional' , None ) recurse_directive = directives . get ( 'recurse' , None ) output_source_directive = directives . get ( 'output_source' , None ) fold_context = 'fold' in context optional_context = 'optional' in context output...
def _get_server ( vm_ , volumes , nics ) : '''Construct server instance from cloud profile config'''
# Apply component overrides to the size from the cloud profile config vm_size = _override_size ( vm_ ) # Set the server availability zone from the cloud profile config availability_zone = config . get_cloud_config_value ( 'availability_zone' , vm_ , __opts__ , default = None , search_global = False ) # Assign CPU famil...
def permute_data ( arrays , random_state = None ) : """Permute multiple numpy arrays with the same order ."""
if any ( len ( a ) != len ( arrays [ 0 ] ) for a in arrays ) : raise ValueError ( 'All arrays must be the same length.' ) if not random_state : random_state = np . random order = random_state . permutation ( len ( arrays [ 0 ] ) ) return [ a [ order ] for a in arrays ]
def check_newline_after_last_paragraph ( self , definition , docstring ) : """D209 : Put multi - line docstring closing quotes on separate line . Unless the entire docstring fits on a line , place the closing quotes on a line by themselves ."""
if docstring : lines = [ l for l in ast . literal_eval ( docstring ) . split ( '\n' ) if not is_blank ( l ) ] if len ( lines ) > 1 : if docstring . split ( "\n" ) [ - 1 ] . strip ( ) not in [ '"""' , "'''" ] : return violations . D209 ( )
def ashrae_revised_clear_sky ( altitudes , tb , td , use_2017_model = False ) : """Calculate solar flux for an ASHRAE Revised Clear Sky ( " Tau Model " ) . By default , this function returns clear sky values following the methods originally published in the ASHRAE 2009 HOF . Args : altitudes : A list of sol...
dir_norm_rad = [ ] dif_horiz_rad = [ ] if use_2017_model is True : ab = 1.454 - ( 0.406 * tb ) - ( 0.268 * td ) - ( 0.021 * tb * td ) ad = 0.507 + ( 0.205 * tb ) - ( 0.080 * td ) - ( 0.190 * tb * td ) else : ab = 1.219 - ( 0.043 * tb ) - ( 0.151 * td ) - ( 0.204 * tb * td ) ad = 0.202 + ( 0.852 * tb ) -...
def _to_mongo_query ( query ) : """Convert the query received by the Sacred Web API to a MongoDB query . Takes a query in format { " type " : " and " , " filters " : [ { " field " : " host . hostname " , " operator " : " = = " , " value " : " ntbacer " } , { " type " : " or " , " filters " : [ { " field "...
mongo_query = [ ] for clause in query [ "filters" ] : if clause . get ( "type" ) is None : mongo_clause = MongoRunDAO . _simple_clause_to_query ( clause ) else : # It ' s a subclause mongo_clause = MongoRunDAO . _to_mongo_query ( clause ) mongo_query . append ( mongo_clause ) if len ( mongo_...
def insert ( self , key , value , ttl = 0 , format = None , persist_to = 0 , replicate_to = 0 ) : """Store an object in Couchbase unless it already exists . Follows the same conventions as : meth : ` upsert ` but the value is stored only if it does not exist already . Conversely , the value is not stored if t...
return _Base . insert ( self , key , value , ttl = ttl , format = format , persist_to = persist_to , replicate_to = replicate_to )
def _to_add_with_category ( self , catid ) : '''Used for info2. : param catid : the uid of category'''
catinfo = MCategory . get_by_uid ( catid ) kwd = { 'uid' : self . _gen_uid ( ) , 'userid' : self . userinfo . user_name if self . userinfo else '' , 'gcat0' : catid , 'parentname' : MCategory . get_by_uid ( catinfo . pid ) . name , 'catname' : MCategory . get_by_uid ( catid ) . name , } self . render ( 'autogen/add/add...
def fit ( self , X , y , sample_weight = None ) : """Fit the Genetic Program according to X , y . Parameters X : array - like , shape = [ n _ samples , n _ features ] Training vectors , where n _ samples is the number of samples and n _ features is the number of features . y : array - like , shape = [ n _...
random_state = check_random_state ( self . random_state ) # Check arrays if isinstance ( self , ClassifierMixin ) : X , y = check_X_y ( X , y , y_numeric = False ) check_classification_targets ( y ) self . classes_ , y = np . unique ( y , return_inverse = True ) n_trim_classes = np . count_nonzero ( np ...
def get_index ( self , key : Hashable ) -> pd . Index : """Get an index for a dimension , with fall - back to a default RangeIndex"""
if key not in self . dims : raise KeyError ( key ) try : return self . indexes [ key ] except KeyError : # need to ensure dtype = int64 in case range is empty on Python 2 return pd . Index ( range ( self . sizes [ key ] ) , name = key , dtype = np . int64 )
def __expand_meta_datas ( meta_datas , meta_datas_expanded ) : """expand meta _ datas to one level Args : meta _ datas ( dict / list ) : maybe in nested format Returns : list : expanded list in one level Examples : > > > meta _ datas = [ dict1, dict2 dict3 > > > meta _ datas _ expanded = [ ] >...
if isinstance ( meta_datas , dict ) : meta_datas_expanded . append ( meta_datas ) elif isinstance ( meta_datas , list ) : for meta_data in meta_datas : __expand_meta_datas ( meta_data , meta_datas_expanded )
def mask_by_ind ( self , inds ) : """Create a new image by zeroing out data at locations not in the given indices . Parameters inds : : obj : ` numpy . ndarray ` of int A 2D ndarray whose first entry is the list of row indices and whose second entry is the list of column indices . The data at these indi...
new_data = np . zeros ( self . shape ) for ind in inds : new_data [ ind [ 0 ] , ind [ 1 ] ] = self . data [ ind [ 0 ] , ind [ 1 ] ] return type ( self ) ( new_data . astype ( self . data . dtype ) , self . frame )
def toggle ( self , rows ) : 'Toggle selection of given ` rows ` .'
for r in Progress ( rows , 'toggling' , total = len ( self . rows ) ) : if not self . unselectRow ( r ) : self . selectRow ( r )
def accounts_frontiers ( self , accounts ) : """Returns a list of pairs of account and block hash representing the head block for * * accounts * * list : param accounts : Accounts to return frontier blocks for : type accounts : list of str : raises : : py : exc : ` nano . rpc . RPCException ` > > > rpc . ...
accounts = self . _process_value ( accounts , 'list' ) payload = { "accounts" : accounts } resp = self . call ( 'accounts_frontiers' , payload ) return resp . get ( 'frontiers' ) or { }
def logger ( self ) : """uses " global logger " for logging"""
if self . _logger : return self . _logger else : log_builder = p_logging . ProsperLogger ( self . PROGNAME , self . config . get_option ( 'LOGGING' , 'log_path' ) , config_obj = self . config ) if self . verbose : log_builder . configure_debug_logger ( ) else : id_string = '({platform}--...
def get_literals ( self , c , i , depth ) : """Get a string literal . Gather all the literal chars up to opening curly or closing brace . Also gather chars between braces and commas within a group ( is _ expanding ) ."""
result = [ '' ] is_dollar = False try : while c : ignore_brace = is_dollar is_dollar = False if c == '$' : is_dollar = True elif c == '\\' : c = [ self . get_escape ( c , i ) ] elif not ignore_brace and c == '{' : # Try and get the group in...
def remove_all_servers ( self ) : """Remove all registered WBEM servers from the subscription manager . This also unregisters listeners from these servers and removes all owned indication subscriptions , owned indication filters , and owned listener destinations . Raises : Exceptions raised by : class : `...
for server_id in list ( self . _servers . keys ( ) ) : self . remove_server ( server_id )
def update ( self , gradient , step ) : """Update the search direction given the latest gradient and step"""
self . old_gradient = self . gradient self . gradient = gradient N = len ( self . gradient ) if self . inv_hessian is None : # update the direction self . direction = - self . gradient self . status = "SD" # new guess of the inverse hessian self . inv_hessian = np . identity ( N , float ) else : # updat...
def kde ( data , npoints = _npoints ) : """Identify peak using Gaussian kernel density estimator . Parameters : data : The 1d data sample npoints : The number of kde points to evaluate"""
# Clipping of severe outliers to concentrate more KDE samples in the parameter range of interest mad = np . median ( np . fabs ( np . median ( data ) - data ) ) cut = ( data > np . median ( data ) - 5. * mad ) & ( data < np . median ( data ) + 5. * mad ) x = data [ cut ] kde = scipy . stats . gaussian_kde ( x ) # No pe...
def best ( self ) : """Returns the element with the highest probability ."""
b = ( - 1e999999 , None ) for k , c in iteritems ( self . counts ) : b = max ( b , ( c , k ) ) return b [ 1 ]
def query ( number , domains , resolver = None ) : """Look for NAPTR RRs for the specified number in the specified domains . e . g . lookup ( ' 16505551212 ' , [ ' e164 . dnspython . org . ' , ' e164 . arpa . ' ] )"""
if resolver is None : resolver = dns . resolver . get_default_resolver ( ) for domain in domains : if isinstance ( domain , ( str , unicode ) ) : domain = dns . name . from_text ( domain ) qname = dns . e164 . from_e164 ( number , domain ) try : return resolver . query ( qname , 'NAPTR' ...
def load_more_data ( self , value , rows = False , columns = False ) : """Load more rows and columns to display ."""
try : if rows and value == self . verticalScrollBar ( ) . maximum ( ) : self . model ( ) . fetch_more ( rows = rows ) self . sig_fetch_more_rows . emit ( ) if columns and value == self . horizontalScrollBar ( ) . maximum ( ) : self . model ( ) . fetch_more ( columns = columns ) s...
def ip_addrs ( interface = None , include_loopback = False , cidr = None , type = None ) : '''Returns a list of IPv4 addresses assigned to the host . interface Only IP addresses from that interface will be returned . include _ loopback : False Include loopback 127.0.0.1 IPv4 address . cidr Describes sub...
addrs = salt . utils . network . ip_addrs ( interface = interface , include_loopback = include_loopback ) if cidr : return [ i for i in addrs if salt . utils . network . in_subnet ( cidr , [ i ] ) ] else : if type == 'public' : return [ i for i in addrs if not is_private ( i ) ] elif type == 'privat...
def limit ( self , limit ) : """Apply a LIMIT to the query and return the newly resulting Query ."""
query = self . _copy ( ) query . _limit = limit return query
def tick ( self ) : """Emulate a timer : in order to avoid a real timer we " tick " a number of times depending on the actual time passed since the last tick"""
now = time . time ( ) elapsed = now - self . latest_tick if elapsed > self . tick_interval : ticks = int ( elapsed / self . tick_interval ) self . tick_all ( ticks ) self . latest_tick = now
def _make ( c ) : """create html from template , adding figure , annotation and sequences counts"""
ann = defaultdict ( list ) for pos in c [ 'ann' ] : for db in pos : ann [ db ] += list ( pos [ db ] ) logger . debug ( ann ) valid = [ l for l in c [ 'valid' ] ] ann_list = [ ", " . join ( list ( set ( ann [ feature ] ) ) ) for feature in ann if feature in valid ] return valid , ann_list
def create_request ( version , method , url , headers ) : """Create a HTTP request header ."""
# According to my measurements using b ' ' . join is faster that constructing a # bytearray . message = [ ] message . append ( '{} {} HTTP/{}\r\n' . format ( method , url , version ) ) for name , value in headers : message . append ( name ) message . append ( ': ' ) message . append ( value ) message . ...
def java_potential_term ( mesh , instructions ) : '''java _ potential _ term ( mesh , instructions ) yields a Java object that implements the potential field described in the given list of instructions . Generally , this should not be invoked directly and should only be called by mesh _ register . Note : this e...
faces = to_java_ints ( mesh . indexed_faces ) edges = to_java_ints ( mesh . indexed_edges ) coords = to_java_doubles ( mesh . coordinates ) return _parse_field_arguments ( [ instructions ] , faces , edges , coords )
def Conditions ( cls , artifact = None , os_name = None , cpe = None , labels = None ) : """Provide a series of condition tuples . A Target can specify multiple artifact , os _ name , cpe or label entries . These are expanded to all distinct tuples . When an entry is undefined or None , it is treated as a sin...
artifact = cls . _AsList ( artifact ) os_name = cls . _AsList ( os_name ) cpe = cls . _AsList ( cpe ) labels = cls . _AsList ( labels ) for condition in itertools . product ( artifact , os_name , cpe , labels ) : yield condition
def list_containers ( ** kwargs ) : '''Returns a list of containers by name . This is different from : py : func : ` docker . ps < salt . modules . dockermod . ps _ > ` in that : py : func : ` docker . ps < salt . modules . dockermod . ps _ > ` returns its results organized by container ID . all : False I...
ret = set ( ) for item in six . itervalues ( ps_ ( all = kwargs . get ( 'all' , False ) ) ) : names = item . get ( 'Names' ) if not names : continue for c_name in [ x . lstrip ( '/' ) for x in names or [ ] ] : ret . add ( c_name ) return sorted ( ret )
def open ( self , host , port = 23 ) : """Opens a telnet connection to the desired AttenuatorDevice and queries basic information . Args : host : A valid hostname ( IP address or DNS - resolvable name ) to an MC - DAT attenuator instrument . port : An optional port number ( defaults to telnet default 23)"...
self . _telnet_client . open ( host , port ) config_str = self . _telnet_client . cmd ( "MN?" ) if config_str . startswith ( "MN=" ) : config_str = config_str [ len ( "MN=" ) : ] self . properties = dict ( zip ( [ 'model' , 'max_freq' , 'max_atten' ] , config_str . split ( "-" , 2 ) ) ) self . max_atten = float ( s...
def transformer_nat_big ( ) : """Set of hyperparameters ."""
hparams = transformer_nat_small ( ) hparams . batch_size = 2048 hparams . hidden_size = 1024 hparams . filter_size = 4096 hparams . num_hidden_layers = 6 hparams . num_heads = 16 hparams . layer_prepostprocess_dropout = 0.3 return hparams
def plotallanvar ( data , dt , tmax = 10 , ax = None , ** kwargs ) : """Plot Allan variance . Args : data ( np . ndarray ) : Input data . dt ( float ) : Time between each data . tmax ( float ) : Maximum time . ax ( matplotlib . axes ) : Axis the figure is plotted on . kwargs ( optional ) : Plot options ...
if ax is None : ax = plt . gca ( ) tk , allanvar = allan_variance ( data , dt , tmax ) ax . loglog ( tk , allanvar , ** kwargs ) ax . set_xlabel ( 'Time [s]' ) ax . set_ylabel ( 'Allan Variance' ) ax . legend ( )
def _bind_method ( self , name , unconditionally = False ) : """Generate a Matlab function and bind it to the instance This is where the magic happens . When an unknown attribute of the Matlab class is requested , it is assumed to be a call to a Matlab function , and is generated and bound to the instance . ...
# TODO : This does not work if the function is a mex function inside a folder of the same name exists = self . run_func ( 'exist' , name ) [ 'result' ] in [ 2 , 3 , 5 ] if not unconditionally and not exists : raise AttributeError ( "'Matlab' object has no attribute '%s'" % name ) # create a new method instance meth...
def html_to_fc ( html = None , clean_html = None , clean_visible = None , encoding = None , url = None , timestamp = None , other_features = None ) : '''` html ` is expected to be a raw string received over the wire from a remote webserver , and ` encoding ` , if provided , is used to decode it . Typically , en...
def add_feature ( name , xs ) : if name not in fc : fc [ name ] = StringCounter ( ) fc [ name ] += StringCounter ( xs ) timestamp = timestamp or int ( time . time ( ) * 1000 ) other_features = other_features or { } if clean_html is None : if html is not None : try : clean_html_ut...
def get_changed_files ( self , first_sha , second_sha , exclude_paths = None ) : """: param first _ sha : : param second _ sha : : param exclude _ paths : : return :"""
if not exclude_paths : exclude_paths = [ ] first_commit = self . repo . commit ( first_sha ) second_commit = self . repo . commit ( second_sha ) diffs = first_commit . diff ( second_commit ) changed_files = [ ] for diff in diffs : excluded = False for exclude in exclude_paths : if diff . a_path . st...
def whoami ( anchore_config ) : """Show user data for current user if available : param anchore _ config : : return :"""
ecode = 0 try : aa = contexts [ 'anchore_auth' ] if aa and 'username' in aa and 'password' in aa : info = { 'Current user' : aa [ 'user_info' ] if aa [ 'user_info' ] else 'anonymous' } anchore_print ( info , do_formatting = True ) else : anchore_print_err ( 'No anchore auth context f...
def find_recurring ( number , min_repeat = 5 ) : """Attempts to find repeating digits in the fractional component of a number . Args : number ( tuple ) : the number to process in the form : ( int , int , int , . . . " . " , . . . , int int int ) min _ repeat ( int ) : the minimum number of times a pattern m...
# Return number if it has no fractional part , or min _ repeat value invalid . if "." not in number or min_repeat < 1 : return number # Seperate the number into integer and fractional parts . integer_part , fractional_part = integer_fractional_parts ( number ) # Reverse fractional part to get a sequence . sequence ...
def draw_separators ( self ) : """Draw the lines separating the categories on the Canvas"""
total = 1 self . _timeline . create_line ( ( 0 , 1 , self . pixel_width , 1 ) ) for index , ( category , label ) in enumerate ( self . _category_labels . items ( ) ) : height = label . winfo_reqheight ( ) self . _rows [ category ] = ( total , total + height ) total += height self . _timeline . create_li...
def prepare_weighted_spans ( targets , # type : List [ TargetExplanation ] preserve_density = None , # type : Optional [ bool ] ) : # type : ( . . . ) - > List [ Optional [ List [ PreparedWeightedSpans ] ] ] """Return weighted spans prepared for rendering . Calculate a separate weight range for each different wei...
targets_char_weights = [ [ get_char_weights ( ws , preserve_density = preserve_density ) for ws in t . weighted_spans . docs_weighted_spans ] if t . weighted_spans else None for t in targets ] # type : List [ Optional [ List [ np . ndarray ] ] ] max_idx = max_or_0 ( len ( ch_w or [ ] ) for ch_w in targets_char_weights ...
def get ( self , column , default_value = None ) : """Get an item from the Row by column name . Args : column : Tuple of column names , or a ( str ) column name , or positional column number , 0 - indexed . default _ value : The value to use if the key is not found . Returns : A list or string with colu...
if isinstance ( column , ( list , tuple ) ) : ret = [ ] for col in column : ret . append ( self . get ( col , default_value ) ) return ret # Perhaps we have a range like ' 1 ' , ' : - 1 ' or ' 1 : ' . try : return self . _values [ column ] except ( IndexError , TypeError ) : pass try : r...
def select_window ( pymux , variables ) : """Select a window . E . g : select - window - t : 3"""
window_id = variables [ '<target-window>' ] def invalid_window ( ) : raise CommandException ( 'Invalid window: %s' % window_id ) if window_id . startswith ( ':' ) : try : number = int ( window_id [ 1 : ] ) except ValueError : invalid_window ( ) else : w = pymux . arrangement . ge...
def getReceivers ( sender = Any , signal = Any ) : """Get list of receivers from global tables This utility function allows you to retrieve the raw list of receivers from the connections table for the given sender and signal pair . Note : there is no guarantee that this is the actual list stored in the ...
try : return connections [ id ( sender ) ] [ signal ] except KeyError : return [ ]
def _user_perm_cache ( self ) : """cached _ permissions will generate the cache in a lazy fashion ."""
# Check to see if the cache has been primed . if not self . user : return { } cache_filled = getattr ( self . user , '_authority_perm_cache_filled' , False , ) if cache_filled : # Don ' t really like the name for this , but this matches how Django # does it . return self . user . _authority_perm_cache # Prime t...
def _fetch_datatype ( self , transport , bucket , key , r = None , pr = None , basic_quorum = None , notfound_ok = None , timeout = None , include_context = None ) : """_ fetch _ datatype ( bucket , key , r = None , pr = None , basic _ quorum = None , notfound _ ok = None , timeout = None , include _ context = No...
_validate_timeout ( timeout ) return transport . fetch_datatype ( bucket , key , r = r , pr = pr , basic_quorum = basic_quorum , notfound_ok = notfound_ok , timeout = timeout , include_context = include_context )
def json_obj ( self , method , params = None , auth = True ) : """Return JSON object expected by the Zabbix API"""
if params is None : params = { } obj = { 'jsonrpc' : '2.0' , 'method' : method , 'params' : params , 'auth' : self . __auth if auth else None , 'id' : self . id , } return json . dumps ( obj )
def run_all ( self , delay_seconds = 0 ) : """Run all jobs regardless if they are scheduled to run or not . A delay of ` delay ` seconds is added between each job . This helps distribute system load generated by the jobs more evenly over time . : param delay _ seconds : A delay added between every executed ...
logger . info ( 'Running *all* %i jobs with %is delay inbetween' , len ( self . jobs ) , delay_seconds ) for job in self . jobs [ : ] : self . _run_job ( job ) time . sleep ( delay_seconds )
def LessThan ( self , value ) : """Sets the type of the WHERE clause as " less than " . Args : value : The value to be used in the WHERE condition . Returns : The query builder that this WHERE builder links to ."""
self . _awql = self . _CreateSingleValueCondition ( value , '<' ) return self . _query_builder
def find_ident ( self , name ) : """Searches this module and * * all * * of its sub - modules for an identifier with name ` name ` in its list of exported identifiers according to ` pydoc ` . Note that unexported sub - modules are searched . A bare identifier ( without ` . ` separators ) will only be checke...
if name in self . refdoc : return self . refdoc [ name ] for module in self . submodules ( ) : o = module . find_ident ( name ) if not isinstance ( o , External ) : return o return External ( name )
def pop_viewport ( self ) : """Pop a viewport from the stack ."""
vp = self . _vp_stack . pop ( ) # Activate latest if len ( self . _vp_stack ) > 0 : self . context . set_viewport ( * self . _vp_stack [ - 1 ] ) else : self . context . set_viewport ( 0 , 0 , * self . physical_size ) self . _update_transforms ( ) return vp
def list_images ( self , identifier , offset = - 1 , limit = - 1 ) : """Parameters identifier : string Unique image group identifier limit : int Limit number of results in returned object listing offset : int Set offset in list ( order as defined by object store ) Returns ObjectListing Listing of ...
# Get image group to ensure that it exists . The object contains the full # list of group images img_grp = self . get_object ( identifier ) if img_grp is None : return None # Extract subset of group images based on offset and limit arguments total_count = len ( img_grp . images ) items = [ ] if offset < total_count...
def is_bsd ( name = None ) : """Return true if this is a BSD like operating system ."""
name = name or sys . platform return Platform . is_darwin ( name ) or Platform . is_freebsd ( name )
def idfdiffs ( idf1 , idf2 ) : """return the diffs between the two idfs"""
# for any object type , it is sorted by name thediffs = { } keys = idf1 . model . dtls # undocumented variable for akey in keys : idfobjs1 = idf1 . idfobjects [ akey ] idfobjs2 = idf2 . idfobjects [ akey ] names = set ( [ getobjname ( i ) for i in idfobjs1 ] + [ getobjname ( i ) for i in idfobjs2 ] ) na...
def _get_roles_for_request ( request , application ) : """Check the authentication of the current user ."""
roles = application . get_roles_for_person ( request . user ) if common . is_admin ( request ) : roles . add ( "is_admin" ) roles . add ( 'is_authorised' ) return roles
def get_brandings ( self ) : """Get all account brandings @ return List of brandings"""
connection = Connection ( self . token ) connection . set_url ( self . production , self . BRANDINGS_URL ) return connection . get_request ( )
def is_connected ( self ) : r"""Check if the graph is connected ( cached ) . A graph is connected if and only if there exists a ( directed ) path between any two vertices . Returns connected : bool True if the graph is connected , False otherwise . Notes For undirected graphs , starting at a vertex an...
if self . _connected is not None : return self . _connected adjacencies = [ self . W ] if self . is_directed ( ) : adjacencies . append ( self . W . T ) for adjacency in adjacencies : visited = np . zeros ( self . n_vertices , dtype = np . bool ) stack = set ( [ 0 ] ) while stack : vertex = ...
def get_dialect ( self ) : """Return the SQLAlchemy database dialect class corresponding to this URL ' s driver name ."""
if "+" not in self . drivername : name = self . drivername else : name = self . drivername . replace ( "+" , "." ) cls = registry . load ( name ) # check for legacy dialects that # would return a module with ' dialect ' as the # actual class if ( hasattr ( cls , "dialect" ) and isinstance ( cls . dialect , type...