idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
100
def set_font_size ( self , pt = None , px = None ) : self . font . set_size ( pt , px ) self . _render ( )
Set the font size to the desired size in pt or px .
101
def cursor ( self ) : if self . _cursor < 0 : self . cursor = 0 if self . _cursor > len ( self ) : self . cursor = len ( self ) return self . _cursor
The position of the cursor in the text .
102
def delete_one_letter ( self , letter = RIGHT ) : assert letter in ( self . RIGHT , self . LEFT ) if letter == self . LEFT : papy = self . cursor self . text = self . text [ : self . cursor - 1 ] + self . text [ self . cursor : ] self . cursor = papy - 1 else : self . text = self . text [ : self . cursor ] + self . tex...
Delete one letter the right or the the left of the cursor .
103
def delete_one_word ( self , word = RIGHT ) : assert word in ( self . RIGHT , self . LEFT ) if word == self . RIGHT : papy = self . text . find ( ' ' , self . cursor ) + 1 if not papy : papy = len ( self . text ) self . text = self . text [ : self . cursor ] + self . text [ papy : ] else : papy = self . text . rfind ( ...
Delete one word the right or the the left of the cursor .
104
def add_letter ( self , letter ) : assert isinstance ( letter , str ) assert len ( letter ) == 1 self . text = self . text [ : self . cursor ] + letter + self . text [ self . cursor : ] self . cursor += 1
Add a letter at the cursor pos .
105
def update ( self , event_or_list ) : event_or_list = super ( ) . update ( event_or_list ) for e in event_or_list : if e . type == KEYDOWN : if e . key == K_RIGHT : if e . mod * KMOD_CTRL : self . move_cursor_one_word ( self . RIGHT ) else : self . move_cursor_one_letter ( self . RIGHT ) elif e . key == K_LEFT : if e ....
Update the text and position of cursor according to the event passed .
106
def shawn_text ( self ) : if len ( self . _shawn_text ) == len ( self ) : return self . _shawn_text if self . style == self . DOTS : return chr ( 0x2022 ) * len ( self ) ranges = [ ( 902 , 1366 ) , ( 192 , 683 ) , ( 33 , 122 ) ] s = '' while len ( s ) < len ( self . text ) : apolo = randint ( 33 , 1366 ) for a , b in r...
The text displayed instead of the real one .
107
def cursor_pos ( self ) : if len ( self ) == 0 : return self . left + self . default_text . get_width ( ) papy = self . _surface . get_width ( ) if papy > self . w : shift = papy - self . width else : shift = 0 return self . left + self . font . size ( self . shawn_text [ : self . cursor ] ) [ 0 ] - shift
The cursor position in pixels .
108
def latex_to_img ( tex ) : with tempfile . TemporaryDirectory ( ) as tmpdirname : with open ( tmpdirname + r'\tex.tex' , 'w' ) as f : f . write ( tex ) os . system ( r"latex {0}\tex.tex -halt-on-error -interaction=batchmode -disable-installer -aux-directory={0} " r"-output-directory={0}" . format ( tmpdirname ) ) os . ...
Return a pygame image from a latex template .
109
def name2rgb ( name ) : try : import colour except ImportError : raise ImportError ( 'You need colour to be installed: pip install colour' ) c = colour . Color ( name ) color = int ( c . red * 255 ) , int ( c . green * 255 ) , int ( c . blue * 255 ) return color
Convert the name of a color into its RGB value
110
def parse_page ( page ) : colors = get_config ( ) [ 'colors' ] with io . open ( page , encoding = 'utf-8' ) as f : lines = f . readlines ( ) output_lines = [ ] for line in lines [ 1 : ] : if is_headline ( line ) : continue elif is_description ( line ) : output_lines . append ( click . style ( line . replace ( '>' , ' '...
Parse the command man page .
111
def configure_logging ( level = logging . DEBUG ) : if level == logging . DEBUG : logging . basicConfig ( level = logging . DEBUG , format = '%(asctime)s - %(levelname)s - %(message)s' ) return logging logger = logging . getLogger ( __name__ ) logger . setLevel ( level ) formatter = logging . Formatter ( '%(asctime)s -...
Configure the module logging engine .
112
def parse_username_password_hostname ( remote_url ) : assert remote_url assert ':' in remote_url if '@' in remote_url : username , hostname = remote_url . rsplit ( '@' , 1 ) else : username , hostname = None , remote_url hostname , remote_path = hostname . split ( ':' , 1 ) password = None if username and ':' in userna...
Parse a command line string and return username password remote hostname and remote path .
113
def get_ssh_agent_keys ( logger ) : agent , agent_keys = None , None try : agent = paramiko . agent . Agent ( ) _agent_keys = agent . get_keys ( ) if not _agent_keys : agent . close ( ) logger . error ( "SSH agent didn't provide any valid key. Trying to continue..." ) else : agent_keys = tuple ( k for k in _agent_keys ...
Ask the SSH agent for a list of keys and return it .
114
def create_parser ( ) : parser = argparse . ArgumentParser ( description = 'Sync a local and a remote folder through SFTP.' ) parser . add_argument ( "path" , type = str , metavar = "local-path" , help = "the path of the local folder" , ) parser . add_argument ( "remote" , type = str , metavar = "user[:password]@hostna...
Create the CLI argument parser .
115
def main ( args = None ) : parser = create_parser ( ) args = vars ( parser . parse_args ( args ) ) log_mapping = { 'CRITICAL' : logging . CRITICAL , 'ERROR' : logging . ERROR , 'WARNING' : logging . WARNING , 'INFO' : logging . INFO , 'DEBUG' : logging . DEBUG , 'NOTSET' : logging . NOTSET , } log_level = log_mapping [...
The main .
116
def _must_be_deleted ( local_path , r_st ) : if not os . path . lexists ( local_path ) : return True l_st = os . lstat ( local_path ) if S_IFMT ( r_st . st_mode ) != S_IFMT ( l_st . st_mode ) : return True return False
Return True if the remote correspondent of local_path has to be deleted .
117
def file_upload ( self , local_path , remote_path , l_st ) : self . sftp . put ( local_path , remote_path ) self . _match_modes ( remote_path , l_st )
Upload local_path to remote_path and set permission and mtime .
118
def remote_delete ( self , remote_path , r_st ) : if S_ISDIR ( r_st . st_mode ) : for item in self . sftp . listdir_attr ( remote_path ) : full_path = path_join ( remote_path , item . filename ) self . remote_delete ( full_path , item ) self . sftp . rmdir ( remote_path ) else : try : self . sftp . remove ( remote_path...
Remove the remote directory node .
119
def check_for_deletion ( self , relative_path = None ) : if not relative_path : relative_path = str ( ) remote_path = path_join ( self . remote_path , relative_path ) local_path = path_join ( self . local_path , relative_path ) for remote_st in self . sftp . listdir_attr ( remote_path ) : r_lstat = self . sftp . lstat ...
Traverse the entire remote_path tree .
120
def create_update_symlink ( self , link_destination , remote_path ) : try : self . sftp . remove ( remote_path ) except IOError : pass finally : try : self . sftp . symlink ( link_destination , remote_path ) except OSError as e : self . logger . error ( "error while symlinking {} to {}: {}" . format ( remote_path , lin...
Create a new link pointing to link_destination in remote_path position .
121
def run ( self ) : try : self . sftp . stat ( self . remote_path ) except FileNotFoundError as e : if self . create_remote_directory : self . sftp . mkdir ( self . remote_path ) self . logger . info ( "Created missing remote dir: '" + self . remote_path + "'" ) else : self . logger . error ( "Remote folder does not exi...
Run the sync .
122
def list_files ( start_path ) : s = u'\n' for root , dirs , files in os . walk ( start_path ) : level = root . replace ( start_path , '' ) . count ( os . sep ) indent = ' ' * 4 * level s += u'{}{}/\n' . format ( indent , os . path . basename ( root ) ) sub_indent = ' ' * 4 * ( level + 1 ) for f in files : s += u'{}{}\n...
tree unix command replacement .
123
def file_tree ( start_path ) : nested_dirs = { } root_dir = start_path . rstrip ( os . sep ) start = root_dir . rfind ( os . sep ) + 1 for path , dirs , files in os . walk ( root_dir ) : folders = path [ start : ] . split ( os . sep ) subdir = dict . fromkeys ( files ) parent = reduce ( dict . get , folders [ : - 1 ] ,...
Create a nested dictionary that represents the folder structure of start_path .
124
def capture_sys_output ( ) : capture_out , capture_err = StringIO ( ) , StringIO ( ) current_out , current_err = sys . stdout , sys . stderr try : sys . stdout , sys . stderr = capture_out , capture_err yield capture_out , capture_err finally : sys . stdout , sys . stderr = current_out , current_err
Capture standard output and error .
125
def suppress_logging ( log_level = logging . CRITICAL ) : logging . disable ( log_level ) yield logging . disable ( logging . NOTSET )
Suppress logging .
126
def override_env_variables ( ) : env_vars = ( "LOGNAME" , "USER" , "LNAME" , "USERNAME" ) old = [ os . environ [ v ] if v in os . environ else None for v in env_vars ] for v in env_vars : os . environ [ v ] = "test" yield for i , v in enumerate ( env_vars ) : if old [ i ] : os . environ [ v ] = old [ i ]
Override user environmental variables with custom one .
127
def get_config ( ) : config_path = path . join ( ( os . environ . get ( 'TLDR_CONFIG_DIR' ) or path . expanduser ( '~' ) ) , '.tldrrc' ) if not path . exists ( config_path ) : sys . exit ( "Can't find config file at: {0}. You may use `tldr init` " "to init the config file." . format ( config_path ) ) with io . open ( c...
Get the configurations from . tldrrc and return it as a dict .
128
def parse_man_page ( command , platform ) : page_path = find_page_location ( command , platform ) output_lines = parse_page ( page_path ) return output_lines
Parse the man page and return the parsed lines .
129
def find_page_location ( command , specified_platform ) : repo_directory = get_config ( ) [ 'repo_directory' ] default_platform = get_config ( ) [ 'platform' ] command_platform = ( specified_platform if specified_platform else default_platform ) with io . open ( path . join ( repo_directory , 'pages/index.json' ) , enc...
Find the command man page in the pages directory .
130
def find ( command , on ) : output_lines = parse_man_page ( command , on ) click . echo ( '' . join ( output_lines ) )
Find the command usage .
131
def update ( ) : repo_directory = get_config ( ) [ 'repo_directory' ] os . chdir ( repo_directory ) click . echo ( "Check for updates..." ) local = subprocess . check_output ( 'git rev-parse master' . split ( ) ) . strip ( ) remote = subprocess . check_output ( 'git ls-remote https://github.com/tldr-pages/tldr/ HEAD' ....
Update to the latest pages .
132
def init ( ) : default_config_path = path . join ( ( os . environ . get ( 'TLDR_CONFIG_DIR' ) or path . expanduser ( '~' ) ) , '.tldrrc' ) if path . exists ( default_config_path ) : click . echo ( "There is already a config file exists, " "skip initializing it." ) else : repo_path = click . prompt ( "Input the tldr rep...
Init config file .
133
def locate ( command , on ) : location = find_page_location ( command , on ) click . echo ( location )
Locate the command s man page .
134
def map_to ( self , attrname , tablename = None , selectable = None , schema = None , base = None , mapper_args = util . immutabledict ( ) ) : if attrname in self . _cache : raise SQLSoupError ( "Attribute '%s' is already mapped to '%s'" % ( attrname , class_mapper ( self . _cache [ attrname ] ) . mapped_table ) ) if t...
Configure a mapping to the given attrname .
135
def map ( self , selectable , base = None , ** mapper_args ) : return _class_for_table ( self . session , self . engine , selectable , base or self . base , mapper_args )
Map a selectable directly .
136
def with_labels ( self , selectable , base = None , ** mapper_args ) : return self . map ( expression . _clause_element_as_expr ( selectable ) . select ( use_labels = True ) . alias ( 'foo' ) , base = base , ** mapper_args )
Map a selectable directly wrapping the selectable in a subquery with labels .
137
def left ( self , f , n = 1 ) : intervals = self . intervals [ f . chrom ] if intervals == [ ] : return [ ] iright = binsearch_left_start ( intervals , f . start , 0 , len ( intervals ) ) + 1 ileft = binsearch_left_start ( intervals , f . start - self . max_len [ f . chrom ] - 1 , 0 , 0 ) results = sorted ( ( distance ...
return the nearest n features strictly to the left of a Feature f . Overlapping features are not considered as to the left .
138
def right ( self , f , n = 1 ) : intervals = self . intervals [ f . chrom ] ilen = len ( intervals ) iright = binsearch_right_end ( intervals , f . end , 0 , ilen ) results = [ ] while iright < ilen : i = len ( results ) if i > n : if distance ( f , results [ i - 1 ] ) != distance ( f , results [ i - 2 ] ) : return res...
return the nearest n features strictly to the right of a Feature f . Overlapping features are not considered as to the right .
139
def upstream ( self , f , n = 1 ) : if f . strand == - 1 : return self . right ( f , n ) return self . left ( f , n )
find n upstream features where upstream is determined by the strand of the query Feature f Overlapping features are not considered .
140
def downstream ( self , f , n = 1 ) : if f . strand == - 1 : return self . left ( f , n ) return self . right ( f , n )
find n downstream features where downstream is determined by the strand of the query Feature f Overlapping features are not considered .
141
def sequence ( db , chrom , start , end ) : url = "http://genome.ucsc.edu/cgi-bin/das/%s" % db url += "/dna?segment=%s:%i,%i" xml = U . urlopen ( url % ( chrom , start , end ) ) . read ( ) return _seq_from_xml ( xml )
return the sequence for a region using the UCSC DAS server . note the start is 1 - based each feature will have it s own . sequence method which sends the correct start and end to this function .
142
def set_table ( genome , table , table_name , connection_string , metadata ) : table = Table ( table_name , genome . _metadata , autoload = True , autoload_with = genome . bind , extend_existing = True ) for i , idx in enumerate ( table . indexes ) : idx . name = table_name + "." + idx . name + "_ix" + str ( i ) cols =...
alter the table to work between different dialects
143
def mirror ( self , tables , dest_url ) : from mirror import mirror return mirror ( self , tables , dest_url )
miror a set of tables from dest_url
144
def dataframe ( self , table ) : from pandas import DataFrame if isinstance ( table , six . string_types ) : table = getattr ( self , table ) try : rec = table . first ( ) except AttributeError : rec = table [ 0 ] if hasattr ( table , "all" ) : records = table . all ( ) else : records = [ tuple ( t ) for t in table ] c...
create a pandas dataframe from a table or query
145
def david_go ( refseq_list , annot = ( 'SP_PIR_KEYWORDS' , 'GOTERM_BP_FAT' , 'GOTERM_CC_FAT' , 'GOTERM_MF_FAT' ) ) : URL = "http://david.abcc.ncifcrf.gov/api.jsp?type=REFSEQ_MRNA&ids=%s&tool=term2term&annot=" import webbrowser webbrowser . open ( URL % "," . join ( set ( refseq_list ) ) + "," . join ( annot ) )
open a web - browser to the DAVID online enrichment tool
146
def bin_query ( self , table , chrom , start , end ) : if isinstance ( table , six . string_types ) : table = getattr ( self , table ) try : tbl = table . _table except AttributeError : tbl = table . column_descriptions [ 0 ] [ 'type' ] . _table q = table . filter ( tbl . c . chrom == chrom ) if hasattr ( tbl . c , "bi...
perform an efficient spatial query using the bin column if available . The possible bins are calculated from the start and end sent to this function .
147
def upstream ( self , table , chrom_or_feat , start = None , end = None , k = 1 ) : res = self . knearest ( table , chrom_or_feat , start , end , k , "up" ) end = getattr ( chrom_or_feat , "end" , end ) start = getattr ( chrom_or_feat , "start" , start ) rev = getattr ( chrom_or_feat , "strand" , "+" ) == "-" if rev : ...
Return k - nearest upstream features
148
def knearest ( self , table , chrom_or_feat , start = None , end = None , k = 1 , _direction = None ) : assert _direction in ( None , "up" , "down" ) if start is None : assert end is None chrom , start , end = chrom_or_feat . chrom , chrom_or_feat . start , chrom_or_feat . end if _direction in ( "up" , "down" ) and get...
Return k - nearest features
149
def annotate ( self , fname , tables , feature_strand = False , in_memory = False , header = None , out = sys . stdout , parallel = False ) : from . annotate import annotate return annotate ( self , fname , tables , feature_strand , in_memory , header = header , out = out , parallel = parallel )
annotate a file with a number of tables
150
def bins ( start , end ) : if end - start < 536870912 : offsets = [ 585 , 73 , 9 , 1 ] else : raise BigException offsets = [ 4681 , 585 , 73 , 9 , 1 ] binFirstShift = 17 binNextShift = 3 start = start >> binFirstShift end = ( end - 1 ) >> binFirstShift bins = [ 1 ] for offset in offsets : bins . extend ( range ( offset...
Get all the bin numbers for a particular interval defined by ( start end ]
151
def _find_filepath_in_roots ( filename ) : for root in settings . DJANGO_STATIC_MEDIA_ROOTS : filepath = _filename2filepath ( filename , root ) if os . path . isfile ( filepath ) : return filepath , root if settings . DEBUG : try : from django . contrib . staticfiles import finders absolute_path = finders . find ( file...
Look for filename in all MEDIA_ROOTS and return the first one found .
152
def default_combine_filenames_generator ( filenames , max_length = 40 ) : path = None names = [ ] extension = None timestamps = [ ] for filename in filenames : name = os . path . basename ( filename ) if not extension : extension = os . path . splitext ( name ) [ 1 ] elif os . path . splitext ( name ) [ 1 ] != extensio...
Return a new filename to use as the combined file name for a bunch of files . A precondition is that they all have the same file extension
153
def overlaps ( self , other ) : if self . chrom != other . chrom : return False if self . start >= other . end : return False if other . start >= self . end : return False return True
check for overlap with the other interval
154
def is_upstream_of ( self , other ) : if self . chrom != other . chrom : return None if getattr ( other , "strand" , None ) == "+" : return self . end <= other . start return self . start >= other . end
check if this is upstream of the other interval taking the strand of the other interval into account
155
def gene_features ( self ) : nm , strand = self . gene_name , self . strand feats = [ ( self . chrom , self . start , self . end , nm , strand , 'gene' ) ] for feat in ( 'introns' , 'exons' , 'utr5' , 'utr3' , 'cdss' ) : fname = feat [ : - 1 ] if feat [ - 1 ] == 's' else feat res = getattr ( self , feat ) if res is Non...
return a list of features for the gene features of this object . This would include exons introns utrs etc .
156
def tss ( self , up = 0 , down = 0 ) : if not self . is_gene_pred : return None tss = self . txEnd if self . strand == '-' else self . txStart start , end = tss , tss if self . strand == '+' : start -= up end += down else : start += up end -= down start , end = end , start return max ( 0 , start ) , max ( end , start ,...
Return a start end tuple of positions around the transcription - start site
157
def promoter ( self , up = 2000 , down = 0 ) : if not self . is_gene_pred : return None return self . tss ( up = up , down = down )
Return a start end tuple of positions for the promoter region of this gene
158
def cds ( self ) : ces = self . coding_exons if len ( ces ) < 1 : return ces ces [ 0 ] = ( self . cdsStart , ces [ 0 ] [ 1 ] ) ces [ - 1 ] = ( ces [ - 1 ] [ 0 ] , self . cdsEnd ) assert all ( ( s < e for s , e in ces ) ) return ces
just the parts of the exons that are translated
159
def is_downstream_of ( self , other ) : if self . chrom != other . chrom : return None if getattr ( other , "strand" , None ) == "-" : return self . end <= other . start return self . start >= other . end
return a boolean indicating whether this feature is downstream of other taking the strand of other into account
160
def utr5 ( self ) : if not self . is_coding or len ( self . exons ) < 2 : return ( None , None ) if self . strand == "+" : s , e = ( self . txStart , self . cdsStart ) else : s , e = ( self . cdsEnd , self . txEnd ) if s == e : return ( None , None ) return s , e
return the 5 UTR if appropriate
161
def sequence ( self , per_exon = False ) : db = self . db if not per_exon : start = self . txStart + 1 return _sequence ( db , self . chrom , start , self . txEnd ) else : seqs = [ ] for start , end in self . exons : seqs . append ( _sequence ( db , self . chrom , start + 1 , end ) ) return seqs
Return the sequence for this feature . if per - exon is True return an array of exon sequences This sequence is never reverse complemented
162
def ncbi_blast ( self , db = "nr" , megablast = True , sequence = None ) : import requests requests . defaults . max_retries = 4 assert sequence in ( None , "cds" , "mrna" ) seq = self . sequence ( ) if sequence is None else ( "" . join ( self . cds_sequence if sequence == "cds" else self . mrna_sequence ) ) r = reques...
perform an NCBI blast against the sequence of this feature
163
def blat ( self , db = None , sequence = None , seq_type = "DNA" ) : from . blat_blast import blat , blat_all assert sequence in ( None , "cds" , "mrna" ) seq = self . sequence ( ) if sequence is None else ( "" . join ( self . cds_sequence if sequence == "cds" else self . mrna_sequence ) ) if isinstance ( db , ( tuple ...
make a request to the genome - browsers BLAT interface sequence is one of None mrna cds returns a list of features that are hits to this sequence .
164
def bed ( self , * attrs , ** kwargs ) : exclude = ( "chrom" , "start" , "end" , "txStart" , "txEnd" , "chromStart" , "chromEnd" ) if self . is_gene_pred : return self . bed12 ( ** kwargs ) return "\t" . join ( map ( str , ( [ self . chrom , self . start , self . end ] + [ getattr ( self , attr ) for attr in attrs if n...
return a bed formatted string of this feature
165
def dereference_url ( url ) : res = open_url ( url , method = 'HEAD' ) res . close ( ) return res . url
Makes a HEAD request to find the final destination of a URL after following any redirects
166
def read ( url , ** kwargs ) : response = open_url ( url , ** kwargs ) try : return response . read ( ) finally : response . close ( )
Read the contents of a URL into memory return
167
def check_extracted_paths ( namelist , subdir = None ) : def relpath ( p ) : q = os . path . relpath ( p ) if p . endswith ( os . path . sep ) or p . endswith ( '/' ) : q += os . path . sep return q parent = os . path . abspath ( '.' ) if subdir : if os . path . isabs ( subdir ) : raise FileException ( 'subdir must be ...
Check whether zip file paths are all relative and optionally in a specified subdirectory raises an exception if not
168
def get_as_local_path ( path , overwrite , progress = 0 , httpuser = None , httppassword = None ) : m = re . match ( '([A-Za-z]+)://' , path ) if m : log . debug ( 'Detected URL protocol: %s' , m . group ( 1 ) ) localpath = path . split ( '/' ) [ - 1 ] if not localpath : raise FileException ( 'Remote path appears to be...
Automatically handle local and remote URLs files and directories
169
def create ( fs , channels , application ) : result_code = ctypes . c_int ( ) result = _create ( fs , channels , application , ctypes . byref ( result_code ) ) if result_code . value is not constants . OK : raise OpusError ( result_code . value ) return result
Allocates and initializes an encoder state .
170
def encode ( encoder , pcm , frame_size , max_data_bytes ) : pcm = ctypes . cast ( pcm , c_int16_pointer ) data = ( ctypes . c_char * max_data_bytes ) ( ) result = _encode ( encoder , pcm , frame_size , data , max_data_bytes ) if result < 0 : raise OpusError ( result ) return array . array ( 'c' , data [ : result ] ) ....
Encodes an Opus frame
171
def encode_float ( encoder , pcm , frame_size , max_data_bytes ) : pcm = ctypes . cast ( pcm , c_float_pointer ) data = ( ctypes . c_char * max_data_bytes ) ( ) result = _encode_float ( encoder , pcm , frame_size , data , max_data_bytes ) if result < 0 : raise OpusError ( result ) return array . array ( 'c' , data [ : ...
Encodes an Opus frame from floating point input
172
def __parse_tostr ( self , text , ** kwargs ) : n = self . options . get ( 'nbest' , 1 ) if self . _KW_BOUNDARY in kwargs : patt = kwargs . get ( self . _KW_BOUNDARY , '.' ) tokens = list ( self . __split_pattern ( text , patt ) ) text = '' . join ( [ t [ 0 ] for t in tokens ] ) btext = self . __str2bytes ( text ) self...
Builds and returns the MeCab function for parsing Unicode text .
173
def parse ( self , text , ** kwargs ) : if text is None : logger . error ( self . _ERROR_EMPTY_STR ) raise MeCabError ( self . _ERROR_EMPTY_STR ) elif not isinstance ( text , str ) : logger . error ( self . _ERROR_NOTSTR ) raise MeCabError ( self . _ERROR_NOTSTR ) elif 'partial' in self . options and not text . endswit...
Parse the given text and return result from MeCab .
174
def generate ( tagGroups , terms ) : rv = [ ] for pid in tagGroups : if pid not in terms . keys ( ) : continue groupData = terms [ pid ] groupName = "[%s] %s" % ( pid , groupData [ 'name' ] ) groupDesc = groupData [ 'desc' ] children = [ ] group = dict ( name = groupName , desc = groupDesc , set = children ) rv . appen...
create Tag Groups and Child Tags using data from terms dict
175
def _handle_args ( self , cmd , args ) : if cmd == 'install' : if args . upgrade : if args . initdb or args . upgradedb : raise Stop ( 10 , ( 'Deprecated --initdb --upgradedb flags ' 'are incompatible with --upgrade' ) ) newinstall = None else : newinstall = True if args . managedb : if args . initdb or args . upgraded...
We need to support deprecated behaviour for now which makes this quite complicated
176
def handle_database ( self ) : if self . args . initdb or self . args . upgradedb : db = DbAdmin ( self . dir , None , self . args , self . external ) status = db . check ( ) log . debug ( 'OMERO database upgrade status: %s' , status ) else : log . warn ( 'OMERO database check disabled' ) return DB_INIT_NEEDED if statu...
Handle database initialisation and upgrade taking into account command line arguments
177
def run ( self , command ) : if isinstance ( command , basestring ) : command = command . split ( ) else : command = list ( command ) self . external . omero_cli ( command )
Runs a command as if from the command - line without the need for using popen or subprocess
178
def sort_schemas ( schemas ) : def keyfun ( v ) : x = SQL_SCHEMA_REGEXP . match ( v ) . groups ( ) return ( int ( x [ 0 ] ) , x [ 1 ] , int ( x [ 2 ] ) if x [ 2 ] else None , x [ 3 ] if x [ 3 ] else 'zzz' , int ( x [ 4 ] ) ) return sorted ( schemas , key = keyfun )
Sort a list of SQL schemas in order
179
def parse_schema_files ( files ) : f_dict = { } for f in files : root , ext = os . path . splitext ( f ) if ext != ".sql" : continue vto , vfrom = os . path . split ( root ) vto = os . path . split ( vto ) [ 1 ] if is_schema ( vto ) and is_schema ( vfrom ) : f_dict [ f ] = ( vfrom , vto ) return f_dict
Parse a list of SQL files and return a dictionary of valid schema files where each key is a valid schema file and the corresponding value is a tuple containing the source and the target schema .
180
def dump ( self ) : dumpfile = self . args . dumpfile if not dumpfile : db , env = self . get_db_args_env ( ) dumpfile = fileutils . timestamp_filename ( 'omero-database-%s' % db [ 'name' ] , 'pgdump' ) log . info ( 'Dumping database to %s' , dumpfile ) if not self . args . dry_run : self . pgdump ( '-Fc' , '-f' , dump...
Dump the database using the postgres custom format
181
def get_db_args_env ( self ) : db = { 'name' : self . args . dbname , 'host' : self . args . dbhost , 'user' : self . args . dbuser , 'pass' : self . args . dbpass } if not self . args . no_db_config : try : c = self . external . get_config ( force = True ) except Exception as e : log . warn ( 'config.xml not found: %s...
Get a dictionary of database connection parameters and create an environment for running postgres commands . Falls back to omego defaults .
182
def psql ( self , * psqlargs ) : db , env = self . get_db_args_env ( ) args = [ '-v' , 'ON_ERROR_STOP=on' , '-d' , db [ 'name' ] , '-h' , db [ 'host' ] , '-U' , db [ 'user' ] , '-w' , '-A' , '-t' ] + list ( psqlargs ) stdout , stderr = External . run ( 'psql' , args , capturestd = True , env = env ) if stderr : log . w...
Run a psql command
183
def pgdump ( self , * pgdumpargs ) : db , env = self . get_db_args_env ( ) args = [ '-d' , db [ 'name' ] , '-h' , db [ 'host' ] , '-U' , db [ 'user' ] , '-w' ] + list ( pgdumpargs ) stdout , stderr = External . run ( 'pg_dump' , args , capturestd = True , env = env ) if stderr : log . warn ( 'stderr: %s' , stderr ) log...
Run a pg_dump command
184
def set_server_dir ( self , dir ) : self . dir = os . path . abspath ( dir ) config = os . path . join ( self . dir , 'etc' , 'grid' , 'config.xml' ) self . configured = os . path . exists ( config )
Set the directory of the server to be controlled
185
def get_config ( self , force = False ) : if not force and not self . has_config ( ) : raise Exception ( 'No config file' ) configxml = os . path . join ( self . dir , 'etc' , 'grid' , 'config.xml' ) if not os . path . exists ( configxml ) : raise Exception ( 'No config file' ) try : c = self . _omero . config . Config...
Returns a dictionary of all config . xml properties
186
def setup_omero_cli ( self ) : if not self . dir : raise Exception ( 'No server directory set' ) if 'omero.cli' in sys . modules : raise Exception ( 'omero.cli can only be imported once' ) log . debug ( "Setting up omero CLI" ) lib = os . path . join ( self . dir , "lib" , "python" ) if not os . path . exists ( lib ) :...
Imports the omero CLI module so that commands can be run directly . Note Python does not allow a module to be imported multiple times so this will only work with a single omero instance .
187
def setup_previous_omero_env ( self , olddir , savevarsfile ) : env = self . get_environment ( savevarsfile ) def addpath ( varname , p ) : if not os . path . exists ( p ) : raise Exception ( "%s does not exist!" % p ) current = env . get ( varname ) if current : env [ varname ] = p + os . pathsep + current else : env ...
Create a copy of the current environment for interacting with the current OMERO server installation
188
def omero_cli ( self , command ) : assert isinstance ( command , list ) if not self . cli : raise Exception ( 'omero.cli not initialised' ) log . info ( "Invoking CLI [current environment]: %s" , " " . join ( command ) ) self . cli . invoke ( command , strict = True )
Runs a command as if from the OMERO command - line without the need for using popen or subprocess .
189
def run ( exe , args , capturestd = False , env = None ) : command = [ exe ] + args if env : log . info ( "Executing [custom environment]: %s" , " " . join ( command ) ) else : log . info ( "Executing : %s" , " " . join ( command ) ) start = time . time ( ) outfile = None errfile = None if capturestd : outfile = tempfi...
Runs an executable with an array of arguments optionally in the specified environment . Returns stdout and stderr
190
def string_support ( py3enc ) : if sys . version < '3' : def bytes2str ( b ) : return b def str2bytes ( s ) : return s else : def bytes2str ( b ) : return b . decode ( py3enc ) def str2bytes ( u ) : return u . encode ( py3enc ) return ( bytes2str , str2bytes )
Create byte - to - string and string - to - byte conversion functions for internal use .
191
def splitter_support ( py2enc ) : if sys . version < '3' : def _fn_sentence ( pattern , sentence ) : if REGEXTYPE == type ( pattern ) : if pattern . flags & re . UNICODE : return sentence . decode ( py2enc ) else : return sentence else : return sentence def _fn_token2str ( pattern ) : if REGEXTYPE == type ( pattern ) :...
Create tokenizer for use in boundary constraint parsing .
192
def upsert ( self , doc , namespace , timestamp , update_spec = None ) : index , doc_type = self . _index_and_mapping ( namespace ) doc_id = u ( doc . pop ( "_id" ) ) metadata = { 'ns' : namespace , '_ts' : timestamp } action = { '_op_type' : 'index' , '_index' : index , '_type' : doc_type , '_id' : doc_id , '_source' ...
Insert a document into Elasticsearch .
193
def bulk_upsert ( self , docs , namespace , timestamp ) : def docs_to_upsert ( ) : doc = None for doc in docs : index , doc_type = self . _index_and_mapping ( namespace ) doc_id = u ( doc . pop ( "_id" ) ) document_action = { '_index' : index , '_type' : doc_type , '_id' : doc_id , '_source' : self . _formatter . forma...
Insert multiple documents into Elasticsearch .
194
def remove ( self , document_id , namespace , timestamp ) : index , doc_type = self . _index_and_mapping ( namespace ) action = { '_op_type' : 'delete' , '_index' : index , '_type' : doc_type , '_id' : u ( document_id ) } meta_action = { '_op_type' : 'delete' , '_index' : self . meta_index_name , '_type' : self . meta_...
Remove a document from Elasticsearch .
195
def send_buffered_operations ( self ) : with self . lock : try : action_buffer = self . BulkBuffer . get_buffer ( ) if action_buffer : successes , errors = bulk ( self . elastic , action_buffer ) LOG . debug ( "Bulk request finished, successfully sent %d " "operations" , successes ) if errors : LOG . error ( "Bulk requ...
Send buffered operations to Elasticsearch .
196
def get_last_doc ( self ) : try : result = self . elastic . search ( index = self . meta_index_name , body = { "query" : { "match_all" : { } } , "sort" : [ { "_ts" : "desc" } ] , } , size = 1 ) [ "hits" ] [ "hits" ] for r in result : r [ '_source' ] [ '_id' ] = r [ '_id' ] return r [ '_source' ] except es_exceptions . ...
Get the most recently modified document from Elasticsearch .
197
def parse_type_signature ( sig ) : match = TYPE_SIG_RE . match ( sig . strip ( ) ) if not match : raise RuntimeError ( 'Type signature invalid, got ' + sig ) groups = match . groups ( ) typ = groups [ 0 ] generic_types = groups [ 1 ] if not generic_types : generic_types = [ ] else : generic_types = split_sig ( generic_...
Parse a type signature
198
def parse_attr_signature ( sig ) : match = ATTR_SIG_RE . match ( sig . strip ( ) ) if not match : raise RuntimeError ( 'Attribute signature invalid, got ' + sig ) name , _ , params = match . groups ( ) if params is not None and params . strip ( ) != '' : params = split_sig ( params ) params = [ parse_param_signature ( ...
Parse an attribute signature
199
def get_msdn_ref ( name ) : in_msdn = False if name in MSDN_VALUE_TYPES : name = MSDN_VALUE_TYPES [ name ] in_msdn = True if name . startswith ( 'System.' ) : in_msdn = True if in_msdn : link = name . split ( '<' ) [ 0 ] if link in MSDN_LINK_MAP : link = MSDN_LINK_MAP [ link ] else : link = link . lower ( ) url = 'http...
Try and create a reference to a type on MSDN