signature
stringlengths
29
44.1k
implementation
stringlengths
0
85.2k
def sum ( self , only_valid = True ) -> ErrorValue : """Calculate the sum of pixels , not counting the masked ones if only _ valid is True ."""
if not only_valid : mask = 1 else : mask = self . mask return ErrorValue ( ( self . intensity * mask ) . sum ( ) , ( ( self . error * mask ) ** 2 ) . sum ( ) ** 0.5 )
def _format_fields ( cls , declared_fields : typing . List [ tuple ] ) : """Process declared fields and construct a list of tuples that can be fed into dataclass constructor factory ."""
formatted_fields = [ ] for declared_field in declared_fields : field_name = field_type = field_defn = None # Case when only ( name ) , or " name " , is specified if isinstance ( declared_field , str ) or len ( declared_field ) == 1 : field_name = declared_field field_type = typing . Any ...
def sleep ( self , time ) : """Sleep ( no action ) for * time * ( in millisecond )"""
target = 'wait for %s' % str ( time ) self . device ( text = target ) . wait . exists ( timeout = time )
def s3_download ( source , destination , exists_strategy = ExistsStrategy . RAISE , profile_name = None ) : """Copy a file from an S3 source to a local destination . Parameters source : str Path starting with s3 : / / , e . g . ' s3 : / / bucket - name / key / foo . bar ' destination : str exists _ strate...
if not isinstance ( exists_strategy , ExistsStrategy ) : raise ValueError ( 'exists_strategy \'{}\' is not in {}' . format ( exists_strategy , ExistsStrategy ) ) session = boto3 . Session ( profile_name = profile_name ) s3 = session . resource ( 's3' ) bucket_name , key = _s3_path_split ( source ) if os . path . is...
def record_sets_list_by_dns_zone ( zone_name , resource_group , top = None , recordsetnamesuffix = None , ** kwargs ) : '''. . versionadded : : Fluorine Lists all record sets in a DNS zone . : param zone _ name : The name of the DNS zone ( without a terminating dot ) . : param resource _ group : The name of t...
result = { } dnsconn = __utils__ [ 'azurearm.get_client' ] ( 'dns' , ** kwargs ) try : record_sets = __utils__ [ 'azurearm.paged_object_to_list' ] ( dnsconn . record_sets . list_by_dns_zone ( zone_name = zone_name , resource_group_name = resource_group , top = top , recordsetnamesuffix = recordsetnamesuffix ) ) ...
def _match_by_norm_func ( l1 , l2 , norm_fn , dist_fn , thresh ) : """Matches elements in l1 and l2 using normalization functions . Splits the elements in each list into buckets given by the normalization function . If the same normalization value points to a bucket from the first list and a bucket from the s...
common = [ ] l1_only_idx = set ( range ( len ( l1 ) ) ) l2_only_idx = set ( range ( len ( l2 ) ) ) buckets_l1 = _group_by_fn ( enumerate ( l1 ) , lambda x : norm_fn ( x [ 1 ] ) ) buckets_l2 = _group_by_fn ( enumerate ( l2 ) , lambda x : norm_fn ( x [ 1 ] ) ) for normed , l1_elements in buckets_l1 . items ( ) : l2_e...
def get_first_line ( filepath , dialect ) : """Returns List of first line items of file filepath"""
with open ( filepath , "rb" ) as csvfile : csvreader = csv . reader ( csvfile , dialect = dialect ) for first_line in csvreader : break return first_line
def complete_info ( self , text , line , begidx , endidx ) : """completion for info command"""
opts = self . INFO_OPTS if not text : completions = opts else : completions = [ f for f in opts if f . startswith ( text ) ] return completions
def map_dual_axis ( low , high , centre , dead_zone , hot_zone , value ) : """Map an axis with a central dead zone and hot zones at each end to a range from - 1.0 to 1.0 . This in effect uses two calls to map _ single _ axis , choosing whether to use centre and low , or centre and high as the low and high values ...
if value <= centre : return map_single_axis ( centre , low , dead_zone , hot_zone , value ) else : return map_single_axis ( centre , high , dead_zone , hot_zone , value )
def get ( account ) : """Returns the class object identified by ` account _ id ` Args : account ( ` int ` , ` str ` ) : Unique ID of the account to load from database Returns : ` Account ` object if found , else None"""
account = Account . get ( account ) if not account : return None acct_type = AccountType . get ( account . account_type_id ) . account_type account_class = get_plugin_by_name ( PLUGIN_NAMESPACES [ 'accounts' ] , acct_type ) return account_class ( account )
def parse ( cls , element ) : """Create a new Agent by parsing root . : param element : Element to be parsed into an Agent . : raises exceptions . ParseError : If element is not a valid agent ."""
if element . tag != cls . AGENT_TAG : raise exceptions . ParseError ( u"Agent got unexpected tag {}; expected {}" . format ( element . tag , cls . AGENT_TAG ) ) role = element . get ( u"ROLE" ) if not role : raise exceptions . ParseError ( u"Agent must have a ROLE attribute." ) if role == u"OTHER" : role = ...
def deserialize ( self , value , ** kwargs ) : """Return a deserialized copy of the tuple"""
kwargs . update ( { 'trusted' : kwargs . get ( 'trusted' , False ) } ) if self . deserializer is not None : return self . deserializer ( value , ** kwargs ) if value is None : return None output_list = [ self . prop . deserialize ( val , ** kwargs ) for val in value ] return self . _class_container ( output_lis...
def _git_enable_branch ( desired_branch ) : """Enable desired branch name ."""
preserved_branch = _git_get_current_branch ( ) try : if preserved_branch != desired_branch : _tool_run ( 'git checkout ' + desired_branch ) yield finally : if preserved_branch and preserved_branch != desired_branch : _tool_run ( 'git checkout ' + preserved_branch )
def requires_application_json ( f ) : """Decorator for enforcing application / json Content - Type"""
@ functools . wraps ( f ) def wrapped ( * args , ** kwargs ) : from flask import request if request . get_json ( silent = True ) is None : er = ErrorResponse ( description = 'Improper Content-Type header. Expecting "application/json"' ) return to_json_response ( er ) , HTTPStatus . BAD_REQUEST ...
def _JRStaeckelIntegrandSquared ( u , E , Lz , I3U , delta , u0 , sinh2u0 , v0 , sin2v0 , potu0v0 , pot ) : # potu0v0 = potentialStaeckel ( u0 , v0 , pot , delta ) """The J _ R integrand : p ^ 2 _ u ( u ) / 2 / delta ^ 2"""
sinh2u = nu . sinh ( u ) ** 2. dU = ( sinh2u + sin2v0 ) * potentialStaeckel ( u , v0 , pot , delta ) - ( sinh2u0 + sin2v0 ) * potu0v0 return E * sinh2u - I3U - dU - Lz ** 2. / 2. / delta ** 2. / sinh2u
def toggle_service_status ( self , service_id ) : """Toggles the service status . : param int service _ id : The id of the service to delete"""
svc = self . client [ 'Network_Application_Delivery_Controller_' 'LoadBalancer_Service' ] return svc . toggleStatus ( id = service_id )
def cli_run_viz ( source = None , outputpath = "" , theme = "" , verbose = False ) : """This application is a wrapper on the main ontospy - viz script . It generates docs for all models in the local library . Using the Complex - html template . . @ todo allow to pass a custom folder . . > python - m ontospy . v...
if outputpath : if not ( os . path . exists ( outputpath ) ) or not ( os . path . isdir ( outputpath ) ) : click . secho ( "WARNING: the -o option must include a valid directory path." , fg = "red" ) sys . exit ( 0 ) else : from os . path import expanduser home = expanduser ( "~" ) outpu...
def build_sdist ( source_dir , sdist_dir , config_settings = None ) : """Build an sdist from a source directory using PEP 517 hooks . : param str source _ dir : Source directory containing pyproject . toml : param str sdist _ dir : Target directory to place sdist in : param dict config _ settings : Options to...
if config_settings is None : config_settings = { } requires , backend = _load_pyproject ( source_dir ) hooks = Pep517HookCaller ( source_dir , backend ) with BuildEnvironment ( ) as env : env . pip_install ( requires ) reqs = hooks . get_requires_for_build_sdist ( config_settings ) env . pip_install ( r...
def subsystems ( self ) : """Returns all subsystem types used by tasks in this goal , in no particular order ."""
ret = set ( ) for task_type in self . task_types ( ) : ret . update ( [ dep . subsystem_cls for dep in task_type . subsystem_dependencies_iter ( ) ] ) return ret
def filter ( resources , query ) : """Filter a list of resources according to a query expression . The search criteria specified in the query parameter has two parts : 1 . a VISA regular expression over a resource string . 2 . optional logical expression over attribute values ( not implemented in this funct...
if '{' in query : query , _ = query . split ( '{' ) logger . warning ( 'optional part of the query expression not supported. ' 'See filter2' ) try : query = query . replace ( '?' , '.' ) matcher = re . compile ( query , re . IGNORECASE ) except re . error : raise errors . VisaIOError ( constants . V...
def get_cpuinfo_field ( self , field ) : """Search / proc / cpuinfo for a field and return its value , if found , otherwise None ."""
# Match a line like ' Hardware : BCM2709 ' : pattern = r'^' + field + r'\s+:\s+(.*)$' with open ( '/proc/cpuinfo' , 'r' ) as infile : cpuinfo = infile . read ( ) . split ( '\n' ) for line in cpuinfo : match = re . search ( pattern , line , flags = re . IGNORECASE ) if match : return ...
def _upload_in_splits ( self , destination_folder_id , source_path , preflight_check , verbose = True , chunked_upload_threads = 5 ) : '''Since Box has a maximum file size limit ( 15 GB at time of writing ) , we need to split files larger than this into smaller parts , and chunk upload each part'''
file_size = os . stat ( source_path ) . st_size split_size = BOX_MAX_FILE_SIZE # Make sure that the last split piece is still big enough for a chunked upload while file_size % split_size < BOX_MIN_CHUNK_UPLOAD_SIZE : split_size -= 1000 if split_size < BOX_MIN_CHUNK_UPLOAD_SIZE : raise Exception ( 'Lazy ...
def get_app_guid ( self , app_name ) : """Returns the GUID for the app instance with the given name ."""
summary = self . space . get_space_summary ( ) for app in summary [ 'apps' ] : if app [ 'name' ] == app_name : return app [ 'guid' ]
def plot ( self , filename , title = None , reciprocal = None , limits = None , dtype = 'rho' , return_fig = False , ** kwargs ) : """Standard plot of spectrum Parameters filename : string Output filename . Include the ending to specify the filetype ( usually . pdf or . png ) title : string , optional T...
fig , axes = self . _plot ( reciprocal = reciprocal , limits = limits , title = title , dtype = dtype , ** kwargs ) fig . savefig ( filename , dpi = 300 ) if return_fig : return fig else : plt . close ( fig )
def attach_session ( self , target_session = None ) : """` ` $ tmux attach - session ` ` aka alias : ` ` $ tmux attach ` ` . Parameters target _ session : str name of the session . fnmatch ( 3 ) works . Raises : exc : ` exc . BadSessionName `"""
session_check_name ( target_session ) tmux_args = tuple ( ) if target_session : tmux_args += ( '-t%s' % target_session , ) proc = self . cmd ( 'attach-session' , * tmux_args ) if proc . stderr : raise exc . LibTmuxException ( proc . stderr )
async def post ( self , url_path : str , params : dict = None , rtype : str = RESPONSE_JSON , schema : dict = None ) -> Any : """POST request on self . endpoint + url _ path : param url _ path : Url encoded path following the endpoint : param params : Url query string parameters dictionary : param rtype : Res...
if params is None : params = dict ( ) client = API ( self . endpoint . conn_handler ( self . session , self . proxy ) ) # get aiohttp response response = await client . requests_post ( url_path , ** params ) # if schema supplied . . . if schema is not None : # validate response await parse_response ( response ,...
def wraps ( __fn , ** kw ) : """Like ` ` functools . wraps ` ` , with support for annotations ."""
kw [ 'assigned' ] = kw . get ( 'assigned' , WRAPPER_ASSIGNMENTS ) return functools . wraps ( __fn , ** kw )
def pack ( self , value = None ) : """Pack the value as a binary representation . Returns : bytes : The binary representation ."""
if isinstance ( value , type ( self ) ) : return value . pack ( ) if value is None : value = self else : container = type ( self ) ( items = None ) container . extend ( value ) value = container bin_message = b'' try : for item in value : bin_message += item . pack ( ) return bin_mes...
def update_stored_win32tz_map ( ) : """Downloads the cldr win32 timezone map and stores it in win32tz _ map . py ."""
windows_zones_xml = download_cldr_win32tz_map_xml ( ) source_hash = hashlib . md5 ( windows_zones_xml ) . hexdigest ( ) if hasattr ( windows_zones_xml , "decode" ) : windows_zones_xml = windows_zones_xml . decode ( "utf-8" ) map_zones = create_win32tz_map ( windows_zones_xml ) map_dir = os . path . dirname ( os . p...
def ring2nest ( nside , ipix ) : """Drop - in replacement for healpy ` ~ healpy . pixelfunc . ring2nest ` ."""
ipix = np . atleast_1d ( ipix ) . astype ( np . int64 , copy = False ) return ring_to_nested ( ipix , nside )
def dump ( bqm , fp , vartype_header = False ) : """Dump a binary quadratic model to a string in COOrdinate format ."""
for triplet in _iter_triplets ( bqm , vartype_header ) : fp . write ( '%s\n' % triplet )
def _get_callable ( obj , of_class = None ) : """Get callable for an object and its full name . Supports : * functions * classes ( jumps to _ _ init _ _ ( ) ) * methods * @ classmethod * @ property : param obj : function | class : type obj : Callable : param of _ class : Class that this method is ...
# Cases o = obj if inspect . isclass ( obj ) : try : o = obj . __init__ of_class = obj except AttributeError : pass # Finish return qualname ( obj ) , o , of_class
def __grabHotkey ( self , key , modifiers , window ) : """Grab a specific hotkey in the given window"""
logger . debug ( "Grabbing hotkey: %r %r" , modifiers , key ) try : keycode = self . __lookupKeyCode ( key ) mask = 0 for mod in modifiers : mask |= self . modMasks [ mod ] window . grab_key ( keycode , mask , True , X . GrabModeAsync , X . GrabModeAsync ) if Key . NUMLOCK in self . modMasks...
def write_f90 ( self ) : """Writes the F90 module file to the specified directory ."""
from os import path self . _check_dir ( ) # Find the list of executables that we actually need to write wrappers for . self . _find_executables ( ) lines = [ ] lines . append ( "!!<summary>Auto-generated Fortran module for interaction with ctypes\n" "!!through python. Generated for module {}.</summary>" . format ( self...
def format_item ( item , py = True ) : """: param py : python format or not"""
# for non python format , just output itself . # so the result is ` something ` instead of ` " something " ` if not py : return unicode ( item ) if isinstance ( item , ( str , unicode ) ) : # long int is prefixed by a # if item . startswith ( '#' ) : return unicode ( long ( item [ 1 : ] ) ) return u...
def send ( self , request , ** kwargs ) : """Send a given PreparedRequest ."""
# Set defaults that the hooks can utilize to ensure they always have # the correct parameters to reproduce the previous request . kwargs . setdefault ( 'stream' , self . stream ) kwargs . setdefault ( 'verify' , self . verify ) kwargs . setdefault ( 'cert' , self . cert ) kwargs . setdefault ( 'proxies' , self . proxie...
def depth ( self ) : """Compute the depth of the tree ( depth of a leaf = 0 ) ."""
return self . fold_up ( lambda n , fl , fg : max ( fl + 1 , fg + 1 ) , lambda leaf : 0 )
def open ( self , name , mode = 'r' , compression = None ) : """Open a file pointer . Note that a file is * always * opened in text mode . The method inherits its input parameters from the constructor of : class : ` FileObject ` ."""
if compression == 'use_ext' : self . get_compression_type ( name ) else : self . ctype = compression if not self . ctype : self . fp = open ( name , mode ) elif self . ctype == 'gzip' : self . fp = gzip . open ( name , mode + 't' ) elif self . ctype == 'bzip2' : try : # Python 3 supports opening bzi...
def _check_deprecated ( self , dest , kwargs ) : """Checks option for deprecation and issues a warning / error if necessary ."""
removal_version = kwargs . get ( 'removal_version' , None ) if removal_version is not None : warn_or_error ( removal_version = removal_version , deprecated_entity_description = "option '{}' in {}" . format ( dest , self . _scope_str ( ) ) , deprecation_start_version = kwargs . get ( 'deprecation_start_version' , No...
async def digital_write ( self , pin , value ) : """Set the specified pin to the specified value . : param pin : pin number : param value : pin value : returns : No return value"""
# The command value is not a fixed value , but needs to be calculated # using the pin ' s port number port = pin // 8 calculated_command = PrivateConstants . DIGITAL_MESSAGE + port mask = 1 << ( pin % 8 ) # Calculate the value for the pin ' s position in the port mask if value == 1 : PrivateConstants . DIGITAL_OUTP...
def byte ( self ) : """Return a byte representation of ControlFlags ."""
flags = int ( self . _in_use ) << 7 | int ( self . _controller ) << 6 | int ( self . _bit5 ) << 5 | int ( self . _bit4 ) << 4 | int ( self . _used_before ) << 1 return flags
def parse_pr_numbers ( git_log_lines ) : """Parse PR numbers from commit messages . At GitHub those have the format : ` here is the message ( # 1234 ) ` being ` 1234 ` the PR number ."""
prs = [ ] for line in git_log_lines : pr_number = parse_pr_number ( line ) if pr_number : prs . append ( pr_number ) return prs
def listDataTiers ( self , data_tier_name = "" ) : """API to list data tiers known to DBS . : param data _ tier _ name : List details on that data tier ( Optional ) : type data _ tier _ name : str : returns : List of dictionaries containing the following keys ( data _ tier _ id , data _ tier _ name , create _...
data_tier_name = data_tier_name . replace ( "*" , "%" ) try : conn = self . dbi . connection ( ) return self . dbsDataTierListDAO . execute ( conn , data_tier_name . upper ( ) ) except dbsException as de : dbsExceptionHandler ( de . eCode , de . message , self . logger . exception , de . message ) except Va...
def list_certs ( self , filters = None ) : """Retrieve loaded certificates . : param filters : retrieve only matching certificates ( optional ) : type filters : dict : return : list of installed trap , drop and bypass policies : rtype : list"""
_ , cert_list = self . handler . streamed_request ( "list-certs" , "list-cert" , filters ) return cert_list
def map2matrix ( data_map , layout ) : r"""Map to Matrix This method transforms a 2D map to a 2D matrix Parameters data _ map : np . ndarray Input data map , 2D array layout : tuple 2D layout of 2D images Returns np . ndarray 2D matrix Raises ValueError For invalid layout Examples > > > fr...
layout = np . array ( layout ) # Select n objects n_obj = np . prod ( layout ) # Get the shape of the images image_shape = ( np . array ( data_map . shape ) // layout ) [ 0 ] # Stack objects from map data_matrix = [ ] for i in range ( n_obj ) : lower = ( image_shape * ( i // layout [ 1 ] ) , image_shape * ( i % lay...
def _py_func_with_gradient ( func , inp , Tout , stateful = True , name = None , grad_func = None ) : """PyFunc defined as given by Tensorflow : param func : Custom Function : param inp : Function Inputs : param Tout : Ouput Type of out Custom Function : param stateful : Calculate Gradients when stateful is...
# Generate random name in order to avoid conflicts with inbuilt names rnd_name = 'PyFuncGrad-' + '%0x' % getrandbits ( 30 * 4 ) # Register Tensorflow Gradient tf . RegisterGradient ( rnd_name ) ( grad_func ) # Get current graph g = tf . get_default_graph ( ) # Add gradient override map with g . gradient_override_map ( ...
def list_parse ( name_list ) : """Parse a comma - separated list of values , or a filename ( starting with @ ) containing a list value on each line ."""
if name_list and name_list [ 0 ] == '@' : value = name_list [ 1 : ] if not os . path . exists ( value ) : log . warning ( 'The file %s does not exist' % value ) return try : return [ v . strip ( ) for v in open ( value , 'r' ) . readlines ( ) ] except IOError as e : log ....
def get_pairs ( self , format_string ) : """Tokenize a logging format string and extract field names from tokens . : param format _ string : The logging format string . : returns : A generator of : class : ` FormatStringToken ` objects ."""
for token in self . get_tokens ( format_string ) : match = self . name_pattern . search ( token ) name = match . group ( 1 ) if match else None yield FormatStringToken ( name = name , text = token )
def total_memory ( self , image = 'ubuntu' ) : '''Get the available ram fo the docker machine in Kb'''
try : ret = subprocess . check_output ( f'''docker run -t {image} cat /proc/meminfo | grep MemTotal''' , shell = True , stdin = subprocess . DEVNULL ) # ret : MemTotal : 30208916 kB self . tot_mem = int ( ret . split ( ) [ 1 ] ) except Exception : # some system does not have cat or grep self . tot_mem ...
def import_components_from_dataframe ( network , dataframe , cls_name ) : """Import components from a pandas DataFrame . If columns are missing then defaults are used . If extra columns are added , these are left in the resulting component dataframe . Parameters dataframe : pandas . DataFrame cls _ name :...
if cls_name == "Generator" and "source" in dataframe . columns : logger . warning ( "'source' for generators is deprecated, use 'carrier' instead." ) if cls_name == "Generator" and "dispatch" in dataframe . columns : logger . warning ( "'dispatch' for generators is deprecated, use time-varing 'p_max_pu' for 'va...
def get_neighborhood_in_mask ( image , mask , radius , physical_coordinates = False , boundary_condition = None , spatial_info = False , get_gradient = False ) : """Get neighborhoods for voxels within mask . This converts a scalar image to a matrix with rows that contain neighbors around a center voxel ANTsR ...
if not isinstance ( image , iio . ANTsImage ) : raise ValueError ( 'image must be ANTsImage type' ) if not isinstance ( mask , iio . ANTsImage ) : raise ValueError ( 'mask must be ANTsImage type' ) if isinstance ( radius , ( int , float ) ) : radius = [ radius ] * image . dimension if ( not isinstance ( rad...
def selected_subcategory ( self ) : """Obtain the subcategory selected by user . : returns : Metadata of the selected subcategory . : rtype : dict , None"""
item = self . lstSubcategories . currentItem ( ) try : return definition ( item . data ( QtCore . Qt . UserRole ) ) except ( AttributeError , NameError ) : return None
def to_gremlin ( self ) : """Return a unicode object with the Gremlin representation of this expression ."""
self . validate ( ) edge_direction , edge_name = self . fold_scope_location . get_first_folded_edge ( ) validate_safe_string ( edge_name ) inverse_direction_table = { 'out' : 'in' , 'in' : 'out' , } inverse_direction = inverse_direction_table [ edge_direction ] base_location_name , _ = self . fold_scope_location . base...
def zset ( self , name , key , score = 1 ) : """Set the score of ` ` key ` ` from the zset ` ` name ` ` to ` ` score ` ` Like * * Redis . ZADD * * : param string name : the zset name : param string key : the key name : param int score : the score for ranking : return : ` ` True ` ` if ` ` zset ` ` created...
score = get_integer ( 'score' , score ) return self . execute_command ( 'zset' , name , key , score )
def populate_observable ( self , time , kind , dataset , ** kwargs ) : """TODO : add documentation"""
if kind in [ 'mesh' , 'orb' ] : return if time == self . time and dataset in self . populated_at_time and 'pblum' not in kind : # then we ' ve already computed the needed columns # TODO : handle the case of intensities already computed by # / different / dataset ( ie RVs computed first and filling intensities # and...
def var ( self ) : """Compute the variance across images ."""
return self . _constructor ( self . values . var ( axis = 0 , keepdims = True ) )
def get_billing_report_active_devices ( self , month , ** kwargs ) : # noqa : E501 """Get raw billing data of the active devices for the month . # noqa : E501 Fetch the raw billing data of the active devices for the currently authenticated commercial non - subtenant account . This is supplementary data for the bi...
kwargs [ '_return_http_data_only' ] = True if kwargs . get ( 'asynchronous' ) : return self . get_billing_report_active_devices_with_http_info ( month , ** kwargs ) # noqa : E501 else : ( data ) = self . get_billing_report_active_devices_with_http_info ( month , ** kwargs ) # noqa : E501 return data
def filter_reads ( self , input_bam , output_bam , metrics_file , paired = False , cpus = 16 , Q = 30 ) : """Remove duplicates , filter for > Q , remove multiple mapping reads . For paired - end reads , keep only proper pairs ."""
nodups = re . sub ( "\.bam$" , "" , output_bam ) + ".nodups.nofilter.bam" cmd1 = self . tools . sambamba + " markdup -t {0} -r --compression-level=0 {1} {2} 2> {3}" . format ( cpus , input_bam , nodups , metrics_file ) cmd2 = self . tools . sambamba + ' view -t {0} -f bam --valid' . format ( cpus ) if paired : cmd2...
def run ( self ) : """Begins simultaneous generation / acquisition : returns : numpy . ndarray - - read samples"""
try : if self . aotask is None : print u"You must arm the calibration first" return # acquire data and stop task , lock must have been release by # previous reset self . daq_lock . acquire ( ) self . aotask . StartTask ( ) self . aitask . StartTask ( ) # blocking read dat...
def geoadd ( self , key , longitude , latitude , member , * args , ** kwargs ) : """Add one or more geospatial items in the geospatial index represented using a sorted set . : rtype : int"""
return self . execute ( b'GEOADD' , key , longitude , latitude , member , * args , ** kwargs )
def source ( inp , features = None , top = None , chunksize = None , ** kw ) : r"""Defines trajectory data source This function defines input trajectories without loading them . You can pass the resulting object into transformers such as : func : ` pyemma . coordinates . tica ` or clustering algorithms such a...
from pyemma . coordinates . data . _base . iterable import Iterable from pyemma . coordinates . data . util . reader_utils import create_file_reader from pyemma . util . reflection import get_default_args cs = _check_old_chunksize_arg ( chunksize , get_default_args ( source ) [ 'chunksize' ] , ** kw ) # CASE 1 : input ...