sentence1
stringlengths
52
3.87M
sentence2
stringlengths
1
47.2k
label
stringclasses
1 value
def prepare(self, connection_id, sql, max_rows_total=None): """Prepares a statement. :param connection_id: ID of the current connection. :param sql: SQL query. :param max_rows_total: The maximum number of rows that will be allowed for this query. ...
Prepares a statement. :param connection_id: ID of the current connection. :param sql: SQL query. :param max_rows_total: The maximum number of rows that will be allowed for this query. :returns: Signature of the prepared statement.
entailment
def execute(self, connection_id, statement_id, signature, parameter_values=None, first_frame_max_size=None): """Returns a frame of rows. The frame describes whether there may be another frame. If there is not another frame, the current iteration is done when we have finished the rows in...
Returns a frame of rows. The frame describes whether there may be another frame. If there is not another frame, the current iteration is done when we have finished the rows in the this frame. :param connection_id: ID of the current connection. :param statement_id: ...
entailment
def fetch(self, connection_id, statement_id, offset=0, frame_max_size=None): """Returns a frame of rows. The frame describes whether there may be another frame. If there is not another frame, the current iteration is done when we have finished the rows in the this frame. :param...
Returns a frame of rows. The frame describes whether there may be another frame. If there is not another frame, the current iteration is done when we have finished the rows in the this frame. :param connection_id: ID of the current connection. :param statement_id: ...
entailment
def close(self): """Closes the cursor. No further operations are allowed once the cursor is closed. If the cursor is used in a ``with`` statement, this method will be automatically called at the end of the ``with`` block. """ if self._closed: raise Programmin...
Closes the cursor. No further operations are allowed once the cursor is closed. If the cursor is used in a ``with`` statement, this method will be automatically called at the end of the ``with`` block.
entailment
def _transform_row(self, row): """Transforms a Row into Python values. :param row: A ``common_pb2.Row`` object. :returns: A list of values casted into the correct Python types. :raises: NotImplementedError """ tmp_row = [] f...
Transforms a Row into Python values. :param row: A ``common_pb2.Row`` object. :returns: A list of values casted into the correct Python types. :raises: NotImplementedError
entailment
def rownumber(self): """Read-only attribute providing the current 0-based index of the cursor in the result set or ``None`` if the index cannot be determined. The index can be seen as index of the cursor in a sequence (the result set). The next fetch operation will fetch the ...
Read-only attribute providing the current 0-based index of the cursor in the result set or ``None`` if the index cannot be determined. The index can be seen as index of the cursor in a sequence (the result set). The next fetch operation will fetch the row indexed by :attr:`rownu...
entailment
def Timestamp(year, month, day, hour, minute, second): """Constructs an object holding a datetime/timestamp value.""" return datetime.datetime(year, month, day, hour, minute, second)
Constructs an object holding a datetime/timestamp value.
entailment
def open(self): """Opens the connection.""" self._id = str(uuid.uuid4()) self._client.open_connection(self._id, info=self._connection_args)
Opens the connection.
entailment
def close(self): """Closes the connection. No further operations are allowed, either on the connection or any of its cursors, once the connection is closed. If the connection is used in a ``with`` statement, this method will be automatically called at the end of the ``with`` blo...
Closes the connection. No further operations are allowed, either on the connection or any of its cursors, once the connection is closed. If the connection is used in a ``with`` statement, this method will be automatically called at the end of the ``with`` block.
entailment
def cursor(self, cursor_factory=None): """Creates a new cursor. :param cursor_factory: This argument can be used to create non-standard cursors. The class returned must be a subclass of :class:`~phoenixdb.cursor.Cursor` (for example :class:`~phoenixdb.cursor.DictCurs...
Creates a new cursor. :param cursor_factory: This argument can be used to create non-standard cursors. The class returned must be a subclass of :class:`~phoenixdb.cursor.Cursor` (for example :class:`~phoenixdb.cursor.DictCursor`). A default factory for the connec...
entailment
def set_session(self, autocommit=None, readonly=None): """Sets one or more parameters in the current connection. :param autocommit: Switch the connection to autocommit mode. With the current version, you need to always enable this, because :meth:`commit` is not imple...
Sets one or more parameters in the current connection. :param autocommit: Switch the connection to autocommit mode. With the current version, you need to always enable this, because :meth:`commit` is not implemented. :param readonly: Switch the connectio...
entailment
def fit(self, X, y): """Fit the model Parameters ---------- X : array-like of shape = [n_samples, n_features] The training input samples. y : array-like, shape = [n_samples] The target values Returns ------- self : object ...
Fit the model Parameters ---------- X : array-like of shape = [n_samples, n_features] The training input samples. y : array-like, shape = [n_samples] The target values Returns ------- self : object Returns self.
entailment
def predict(self, X): """Predict target values for X. Parameters ---------- X : array-like of shape = [n_samples, n_features] The input samples. Returns ------- y : array of shape = [n_samples] The predicted target value. """ ...
Predict target values for X. Parameters ---------- X : array-like of shape = [n_samples, n_features] The input samples. Returns ------- y : array of shape = [n_samples] The predicted target value.
entailment
def _compute_hidden_activations(self, X): """Compute hidden activations given X""" self._compute_input_activations(X) acts = self.input_activations_ if (callable(self.activation_func)): args_dict = self.activation_args if (self.activation_args) else {} X_new = ...
Compute hidden activations given X
entailment
def transform(self, X, y=None): """Generate the random hidden layer's activations given X as input. Parameters ---------- X : {array-like, sparse matrix}, shape [n_samples, n_features] Data to transform y : is not used: placeholder to allow for usage in a Pipeline. ...
Generate the random hidden layer's activations given X as input. Parameters ---------- X : {array-like, sparse matrix}, shape [n_samples, n_features] Data to transform y : is not used: placeholder to allow for usage in a Pipeline. Returns ------- X_...
entailment
def _compute_radii(self): """Generate RBF radii""" # use supplied radii if present radii = self._get_user_components('radii') # compute radii if (radii is None): centers = self.components_['centers'] n_centers = centers.shape[0] max_dist = n...
Generate RBF radii
entailment
def _compute_centers(self, X, sparse, rs): """Generate RBF centers""" # use supplied centers if present centers = self._get_user_components('centers') # use points taken uniformly from the bounding # hyperrectangle if (centers is None): n_features = X.shape[...
Generate RBF centers
entailment
def _compute_biases(self, rs): """Generate MLP biases""" # use supplied biases if present biases = self._get_user_components('biases') if (biases is None): b_size = self.n_hidden biases = rs.normal(size=b_size) self.components_['biases'] = biases
Generate MLP biases
entailment
def _compute_weights(self, X, rs): """Generate MLP weights""" # use supplied weights if present weights = self._get_user_components('weights') if (weights is None): n_features = X.shape[1] hw_size = (n_features, self.n_hidden) weights = rs.normal(size...
Generate MLP weights
entailment
def _generate_components(self, X): """Generate components of hidden layer given X""" rs = check_random_state(self.random_state) if (self._use_mlp_input): self._compute_biases(rs) self._compute_weights(X, rs) if (self._use_rbf_input): self._compute_ce...
Generate components of hidden layer given X
entailment
def _compute_input_activations(self, X): """Compute input activations given X""" n_samples = X.shape[0] mlp_acts = np.zeros((n_samples, self.n_hidden)) if (self._use_mlp_input): b = self.components_['biases'] w = self.components_['weights'] mlp_acts ...
Compute input activations given X
entailment
def _compute_centers(self, X, sparse, rs): """Generate centers, then compute tau, dF and dN vals""" super(GRBFRandomLayer, self)._compute_centers(X, sparse, rs) centers = self.components_['centers'] sorted_distances = np.sort(squareform(pdist(centers))) self.dF_vals = sorted_di...
Generate centers, then compute tau, dF and dN vals
entailment
def _compute_radii(self): """Generate radii""" denom = pow(-np.log(self.grbf_lambda), 1.0/self.tau_vals) self.components_['radii'] = self.dF_vals/denom
Generate radii
entailment
def _fit_regression(self, y): """ fit regression using pseudo-inverse or supplied regressor """ if self.regressor is None: self.coefs_ = safe_sparse_dot(pinv2(self.hidden_activations_), y) else: self.regressor.fit(self.hidden_activations_, y) ...
fit regression using pseudo-inverse or supplied regressor
entailment
def fit(self, X, y): """ Fit the model using X, y as training data. Parameters ---------- X : {array-like, sparse matrix} of shape [n_samples, n_features] Training vectors, where n_samples is the number of samples and n_features is the number of features....
Fit the model using X, y as training data. Parameters ---------- X : {array-like, sparse matrix} of shape [n_samples, n_features] Training vectors, where n_samples is the number of samples and n_features is the number of features. y : array-like of shape [n_samp...
entailment
def _get_predictions(self): """ get predictions using internal least squares/supplied regressor """ if self.regressor is None: preds = safe_sparse_dot(self.hidden_activations_, self.coefs_) else: preds = self.regressor.predict(self.hidden_activations_) ...
get predictions using internal least squares/supplied regressor
entailment
def predict(self, X): """ Predict values using the model Parameters ---------- X : {array-like, sparse matrix} of shape [n_samples, n_features] Returns ------- C : numpy array of shape [n_samples, n_outputs] Predicted values. """ ...
Predict values using the model Parameters ---------- X : {array-like, sparse matrix} of shape [n_samples, n_features] Returns ------- C : numpy array of shape [n_samples, n_outputs] Predicted values.
entailment
def fit(self, X, y): """ Fit the model using X, y as training data. Parameters ---------- X : {array-like, sparse matrix} of shape [n_samples, n_features] Training vectors, where n_samples is the number of samples and n_features is the number of features....
Fit the model using X, y as training data. Parameters ---------- X : {array-like, sparse matrix} of shape [n_samples, n_features] Training vectors, where n_samples is the number of samples and n_features is the number of features. y : array-like of shape [n_samp...
entailment
def predict(self, X): """Predict values using the model Parameters ---------- X : {array-like, sparse matrix} of shape [n_samples, n_features] Returns ------- C : numpy array of shape [n_samples, n_outputs] Predicted values. """ raw_p...
Predict values using the model Parameters ---------- X : {array-like, sparse matrix} of shape [n_samples, n_features] Returns ------- C : numpy array of shape [n_samples, n_outputs] Predicted values.
entailment
def _create_random_layer(self): """Pass init params to RandomLayer""" return RandomLayer(n_hidden=self.n_hidden, alpha=self.alpha, random_state=self.random_state, activation_func=self.activation_func, ...
Pass init params to RandomLayer
entailment
def fit(self, X, y): """ Fit the model using X, y as training data. Parameters ---------- X : {array-like, sparse matrix} of shape [n_samples, n_features] Training vectors, where n_samples is the number of samples and n_features is the number of features....
Fit the model using X, y as training data. Parameters ---------- X : {array-like, sparse matrix} of shape [n_samples, n_features] Training vectors, where n_samples is the number of samples and n_features is the number of features. y : array-like of shape [n_samp...
entailment
def predict(self, X): """ Predict values using the model Parameters ---------- X : {array-like, sparse matrix} of shape [n_samples, n_features] Returns ------- C : numpy array of shape [n_samples, n_outputs] Predicted values. """ ...
Predict values using the model Parameters ---------- X : {array-like, sparse matrix} of shape [n_samples, n_features] Returns ------- C : numpy array of shape [n_samples, n_outputs] Predicted values.
entailment
def fit(self, X, y): """ Fit the model using X, y as training data. Parameters ---------- X : {array-like, sparse matrix} of shape [n_samples, n_features] Training vectors, where n_samples is the number of samples and n_features is the number of features....
Fit the model using X, y as training data. Parameters ---------- X : {array-like, sparse matrix} of shape [n_samples, n_features] Training vectors, where n_samples is the number of samples and n_features is the number of features. y : array-like of shape [n_samp...
entailment
def score(self, X, y): """Force use of accuracy score since we don't inherit from ClassifierMixin""" from sklearn.metrics import accuracy_score return accuracy_score(y, self.predict(X))
Force use of accuracy score since we don't inherit from ClassifierMixin
entailment
def compat_serializer_check_is_valid(serializer): """ http://www.django-rest-framework.org/topics/3.0-announcement/#using-is_validraise_exceptiontrue """ if DRFVLIST[0] >= 3: serializer.is_valid(raise_exception=True) else: if not serializer.is_valid(): serializers.ValidationError...
http://www.django-rest-framework.org/topics/3.0-announcement/#using-is_validraise_exceptiontrue
entailment
def compat_serializer_attr(serializer, obj): """ Required only for DRF 3.1, which does not make dynamically added attribute available in obj in serializer. This is a quick solution but works without breajing anything. """ if DRFVLIST[0] == 3 and DRFVLIST[1] == 1: for i in serializer.instance...
Required only for DRF 3.1, which does not make dynamically added attribute available in obj in serializer. This is a quick solution but works without breajing anything.
entailment
def compat_get_paginated_response(view, page): """ get_paginated_response is unknown to DRF 3.0 """ if DRFVLIST[0] == 3 and DRFVLIST[1] >= 1: from rest_messaging.serializers import ComplexMessageSerializer # circular import serializer = ComplexMessageSerializer(page, many=True) return v...
get_paginated_response is unknown to DRF 3.0
entailment
def compat_pagination_messages(cls): """ For DRF 3.1 and higher, pagination is defined at the paginator level (see http://www.django-rest-framework.org/topics/3.2-announcement/). For DRF 3.0 and lower, it can be handled at the view level. """ if DRFVLIST[0] == 3 and DRFVLIST[1] >= 1: setattr...
For DRF 3.1 and higher, pagination is defined at the paginator level (see http://www.django-rest-framework.org/topics/3.2-announcement/). For DRF 3.0 and lower, it can be handled at the view level.
entailment
def get_participants(self, obj): """ Allows to define a callback for serializing information about the user. """ # we set the many to many serialization to False, because we only want it with retrieve requests if self.callback is None: return [participant.id for participant in obj.pa...
Allows to define a callback for serializing information about the user.
entailment
def get_is_notification(self, obj): """ We say if the message should trigger a notification """ try: o = compat_serializer_attr(self, obj) return o.is_notification except Exception: return False
We say if the message should trigger a notification
entailment
def get_readers(self, obj): """ Return the ids of the people who read the message instance. """ try: o = compat_serializer_attr(self, obj) return o.readers except Exception: return []
Return the ids of the people who read the message instance.
entailment
def get_threads_where_participant_is_active(self, participant_id): """ Gets all the threads in which the current participant is involved. The method excludes threads where the participant has left. """ participations = Participation.objects.\ filter(participant__id=participant_id).\ ...
Gets all the threads in which the current participant is involved. The method excludes threads where the participant has left.
entailment
def get_active_threads_involving_all_participants(self, *participant_ids): """ Gets the threads where the specified participants are active and no one has left. """ query = Thread.objects.\ exclude(participation__date_left__lte=now()).\ annotate(count_participants=Count('partici...
Gets the threads where the specified participants are active and no one has left.
entailment
def get_or_create_thread(self, request, name=None, *participant_ids): """ When a Participant posts a message to other participants without specifying an existing Thread, we must 1. Create a new Thread if they have not yet opened the discussion. 2. If they have already opened the ...
When a Participant posts a message to other participants without specifying an existing Thread, we must 1. Create a new Thread if they have not yet opened the discussion. 2. If they have already opened the discussion and multiple Threads are not allowed for the same users, we must re...
entailment
def return_daily_messages_count(self, sender): """ Returns the number of messages sent in the last 24 hours so we can ensure the user does not exceed his messaging limits """ h24 = now() - timedelta(days=1) return Message.objects.filter(sender=sender, sent_at__gte=h24).count()
Returns the number of messages sent in the last 24 hours so we can ensure the user does not exceed his messaging limits
entailment
def check_who_read(self, messages): """ Check who read each message. """ # we get the corresponding Participation objects for m in messages: readers = [] for p in m.thread.participation_set.all(): if p.date_last_check is None: pass ...
Check who read each message.
entailment
def check_is_notification(self, participant_id, messages): """ Check if each message requires a notification for the specified participant. """ try: # we get the last check last_check = NotificationCheck.objects.filter(participant__id=participant_id).latest('id').date_check ...
Check if each message requires a notification for the specified participant.
entailment
def get_lasts_messages_of_threads(self, participant_id, check_who_read=True, check_is_notification=True): """ Returns the last message in each thread """ # we get the last message for each thread # we must query the messages using two queries because only Postgres supports .order_by('thread', '-...
Returns the last message in each thread
entailment
def get_all_messages_in_thread(self, participant_id, thread_id, check_who_read=True): """ Returns all the messages in a thread. """ try: messages = Message.objects.filter(thread__id=thread_id).\ order_by('-id').\ select_related('thread').\ pref...
Returns all the messages in a thread.
entailment
def create(self, request, *args, **kwargs): """ We ensure the Thread only involves eligible participants. """ serializer = self.get_serializer(data=compat_get_request_data(request)) compat_serializer_check_is_valid(serializer) self.perform_create(request, serializer) headers = se...
We ensure the Thread only involves eligible participants.
entailment
def mark_thread_as_read(self, request, pk=None): """ Pk is the pk of the Thread to which the messages belong. """ # we get the thread and check for permission thread = Thread.objects.get(id=pk) self.check_object_permissions(request, thread) # we save the date try: ...
Pk is the pk of the Thread to which the messages belong.
entailment
def process(self, quoted=False): ''' Parse an URL ''' self.p = urlparse(self.raw) self.scheme = self.p.scheme self.netloc = self.p.netloc self.opath = self.p.path if not quoted else quote(self.p.path) self.path = [x for x in self.opath.split('/') if x] self.params...
Parse an URL
entailment
def fetch(self, url, encoding=None, force_refetch=False, nocache=False, quiet=True): ''' Fetch a HTML file as binary''' try: if not force_refetch and self.cache is not None and url in self.cache: # try to look for content in cache logging.debug('Retrieving con...
Fetch a HTML file as binary
entailment
def download(self, url, path, force_refetch=False, nocache=False): ''' Download a file at $url and save it to $path ''' # Enable cache if os.path.isfile(path): getLogger().info("File exists, download task skipped -> {path}".format(path=path)) return True t...
Download a file at $url and save it to $path
entailment
def _platform(self) -> Optional[str]: """Extract platform.""" try: return str(self.journey.MainStop.BasicStop.Dep.Platform.text) except AttributeError: return None
Extract platform.
entailment
def _delay(self) -> int: """Extract departure delay.""" try: return int(self.journey.MainStop.BasicStop.Dep.Delay.text) except AttributeError: return 0
Extract departure delay.
entailment
def _departure(self) -> datetime: """Extract departure time.""" departure_time = datetime.strptime( self.journey.MainStop.BasicStop.Dep.Time.text, "%H:%M" ).time() if departure_time > (self.now - timedelta(hours=1)).time(): return datetime.combine(self.now.date(),...
Extract departure time.
entailment
def _extract(self, attribute) -> str: """Extract train information.""" attr_data = self.journey.JourneyAttributeList.JourneyAttribute[ self.attr_types.index(attribute) ].Attribute attr_variants = attr_data.xpath("AttributeVariant/@type") data = attr_data.AttributeVari...
Extract train information.
entailment
def _info(self) -> Optional[str]: """Extract journey information.""" try: return str(html.unescape(self.journey.InfoTextList.InfoText.get("text"))) except AttributeError: return None
Extract journey information.
entailment
def _info_long(self) -> Optional[str]: """Extract journey information.""" try: return str( html.unescape(self.journey.InfoTextList.InfoText.get("textL")).replace( "<br />", "\n" ) ) except AttributeError: ret...
Extract journey information.
entailment
def _pass_list(self) -> List[Dict[str, Any]]: """Extract next stops along the journey.""" stops: List[Dict[str, Any]] = [] for stop in self.journey.PassList.BasicStop: index = stop.get("index") station = stop.Location.Station.HafasName.Text.text station_id = s...
Extract next stops along the journey.
entailment
def validate(style): """Check `style` against pyout.styling.schema. Parameters ---------- style : dict Style object to validate. Raises ------ StyleValidationError if `style` is not valid. """ try: import jsonschema except ImportError: return try: ...
Check `style` against pyout.styling.schema. Parameters ---------- style : dict Style object to validate. Raises ------ StyleValidationError if `style` is not valid.
entailment
def value_type(value): """Classify `value` of bold, color, and underline keys. Parameters ---------- value : style value Returns ------- str, {"simple", "lookup", "re_lookup", "interval"} """ try: keys = list(value.keys()) except AttributeError: return "simple" ...
Classify `value` of bold, color, and underline keys. Parameters ---------- value : style value Returns ------- str, {"simple", "lookup", "re_lookup", "interval"}
entailment
def _register_mecab_loc(location): ''' Set MeCab binary location ''' global MECAB_LOC if not os.path.isfile(location): logging.getLogger(__name__).warning("Provided mecab binary location does not exist {}".format(location)) logging.getLogger(__name__).info("Mecab binary is switched to: {}"....
Set MeCab binary location
entailment
def run_mecab_process(content, *args, **kwargs): ''' Use subprocess to run mecab ''' encoding = 'utf-8' if 'encoding' not in kwargs else kwargs['encoding'] mecab_loc = kwargs['mecab_loc'] if 'mecab_loc' in kwargs else None if mecab_loc is None: mecab_loc = MECAB_LOC proc_args = [mecab_...
Use subprocess to run mecab
entailment
def parse(content, *args, **kwargs): ''' Use mecab-python3 by default to parse JP text. Fall back to mecab binary app if needed ''' global MECAB_PYTHON3 if 'mecab_loc' not in kwargs and MECAB_PYTHON3 and 'MeCab' in globals(): return MeCab.Tagger(*args).parse(content) else: return r...
Use mecab-python3 by default to parse JP text. Fall back to mecab binary app if needed
entailment
def create_track(self, path_in_ipod=None, checksum=None): """ :param path_in_ipod: the path of audio file in the iPod base :param checksum: CHECKSUM of the audio file in member audiodb :return: a new Track, you may want append it to the playlist.tracks """ if bool(path_in...
:param path_in_ipod: the path of audio file in the iPod base :param checksum: CHECKSUM of the audio file in member audiodb :return: a new Track, you may want append it to the playlist.tracks
entailment
def voice(self): """tuple. contain text and lang code """ dbid = self.lldb.dbid text, lang = self._voiceoverdb.get_text_lang(dbid) return text, lang
tuple. contain text and lang code
entailment
def add(self, src): """ store an audio file to storage dir :param src: audio file path :return: checksum value """ if not audio.get_type(src): raise TypeError('The type of this file is not supported.') return super().add(src)
store an audio file to storage dir :param src: audio file path :return: checksum value
entailment
def _get_cmd(command, arguments): """Merge command with arguments.""" if arguments is None: arguments = [] if command.endswith(".py") or command.endswith(".pyw"): return [sys.executable, command] + list(arguments) else: return [command] + list(arguments)
Merge command with arguments.
entailment
def argparse(argv, parser, arguments): """ A command line argument parser. Parses arguments coming from the argv Observable and outputs them as Argument items in the output observable. Parameters ----------- argv : Observable An Observable of strings. parser : Observable An ...
A command line argument parser. Parses arguments coming from the argv Observable and outputs them as Argument items in the output observable. Parameters ----------- argv : Observable An Observable of strings. parser : Observable An Observable containing one Parser item. argu...
entailment
def gassists_pv(self,dg,dt,dt2,na=None,memlimit=-1): """Calculates p-values of gene i regulating gene j with genotype data assisted method with multiple tests. dg: numpy.ndarray(nt,ns,dtype=gtype(='u1' by default)) Genotype data. Entry dg[i,j] is genotype i's value for sample j. Each value must be among 0,1,...,n...
Calculates p-values of gene i regulating gene j with genotype data assisted method with multiple tests. dg: numpy.ndarray(nt,ns,dtype=gtype(='u1' by default)) Genotype data. Entry dg[i,j] is genotype i's value for sample j. Each value must be among 0,1,...,na. Genotype i must be best (and significant) eQTL of ge...
entailment
def _gassist_any(self,dg,dt,dt2,name,na=None,nodiag=False,memlimit=-1): """Calculates probability of gene i regulating gene j with genotype data assisted method, with the recommended combination of multiple tests. dg: numpy.ndarray(nt,ns,dtype=gtype(='u1' by default)) Genotype data. Entry dg[i,j] is genotype i's v...
Calculates probability of gene i regulating gene j with genotype data assisted method, with the recommended combination of multiple tests. dg: numpy.ndarray(nt,ns,dtype=gtype(='u1' by default)) Genotype data. Entry dg[i,j] is genotype i's value for sample j. Each value must be among 0,1,...,na. Genotype i must ...
entailment
def gassist(self,dg,dt,dt2,na=None,nodiag=False,memlimit=-1): """Calculates probability of gene i regulating gene j with genotype data assisted method, with the recommended combination of multiple tests. Probabilities are converted from likelihood ratios separately for each A. This gives better predictions when the...
Calculates probability of gene i regulating gene j with genotype data assisted method, with the recommended combination of multiple tests. Probabilities are converted from likelihood ratios separately for each A. This gives better predictions when the number of secondary targets (dt2) is large. (Check program warnin...
entailment
def _cassists_any(self,dc,dt,dt2,name,nodiag=False,memlimit=-1): """Calculates probability of gene i regulating gene j with continuous anchor data assisted method, with multiple tests, by converting log likelihoods into probabilities per A for all B. dc: numpy.ndarray(nt,ns,dtype=ftype(='f4' by default)) Continuous ...
Calculates probability of gene i regulating gene j with continuous anchor data assisted method, with multiple tests, by converting log likelihoods into probabilities per A for all B. dc: numpy.ndarray(nt,ns,dtype=ftype(='f4' by default)) Continuous anchor data. Entry dc[i,j] is anchor i's value for sample j. Anch...
entailment
def cassists(self,dc,dt,dt2,nodiag=False,memlimit=-1): """Calculates probability of gene i regulating gene j with continuous data assisted method, with multiple tests, by converting log likelihoods into probabilities per A for all B. Probabilities are converted from likelihood ratios separately for each A. This give...
Calculates probability of gene i regulating gene j with continuous data assisted method, with multiple tests, by converting log likelihoods into probabilities per A for all B. Probabilities are converted from likelihood ratios separately for each A. This gives better predictions when the number of secondary targets ...
entailment
def _cassist_any(self,dc,dt,dt2,name,nodiag=False,memlimit=-1): """Calculates probability of gene i regulating gene j with continuous data assisted method, with the recommended combination of multiple tests. dc: numpy.ndarray(nt,ns,dtype=ftype(='f4' by default)) Continuous anchor data. Entry dc[i,j] is anchor i's ...
Calculates probability of gene i regulating gene j with continuous data assisted method, with the recommended combination of multiple tests. dc: numpy.ndarray(nt,ns,dtype=ftype(='f4' by default)) Continuous anchor data. Entry dc[i,j] is anchor i's value for sample j. Anchor i is used to infer the probability of g...
entailment
def cassist(self,dc,dt,dt2,nodiag=False,memlimit=-1): """Calculates probability of gene i regulating gene j with continuous data assisted method, with the recommended combination of multiple tests. Probabilities are converted from likelihood ratios separately for each A. This gives better predictions when the numbe...
Calculates probability of gene i regulating gene j with continuous data assisted method, with the recommended combination of multiple tests. Probabilities are converted from likelihood ratios separately for each A. This gives better predictions when the number of secondary targets (dt2) is large. (Check program warn...
entailment
def rank_pv(self,dt,dt2,memlimit=-1): """Calculates p-values of gene i correlating with gene j by converting log likelihoods into probabilities per A for all B. dt: numpy.ndarray(nt,ns,dtype=ftype(='=f4' by default)) Gene expression data for A Entry dt[i,j] is gene i's expression level for sample j. dt2:numpy.ndar...
Calculates p-values of gene i correlating with gene j by converting log likelihoods into probabilities per A for all B. dt: numpy.ndarray(nt,ns,dtype=ftype(='=f4' by default)) Gene expression data for A Entry dt[i,j] is gene i's expression level for sample j. dt2:numpy.ndarray(nt2,ns,dtype=ftype(='=f4' by default))...
entailment
def qn(phi, *n): """ Calculate the complex flow vector `Q_n`. :param array-like phi: Azimuthal angles. :param int n: One or more harmonics to calculate. :returns: A single complex number if only one ``n`` was given or a complex array for multiple ``n``. """ phi = np.ravel...
Calculate the complex flow vector `Q_n`. :param array-like phi: Azimuthal angles. :param int n: One or more harmonics to calculate. :returns: A single complex number if only one ``n`` was given or a complex array for multiple ``n``.
entailment
def correlation(self, n, k, error=False): r""" Calculate `\langle k \rangle_n`, the `k`-particle correlation function for `n`\ th-order anisotropy. :param int n: Anisotropy order. :param int k: Correlation order. :param bool error: Whether to calculate stati...
r""" Calculate `\langle k \rangle_n`, the `k`-particle correlation function for `n`\ th-order anisotropy. :param int n: Anisotropy order. :param int k: Correlation order. :param bool error: Whether to calculate statistical error (for `\langle 2 \rangle` ...
entailment
def cumulant(self, n, k, error=False): r""" Calculate `c_n\{k\}`, the `k`-particle cumulant for `n`\ th-order anisotropy. :param int n: Anisotropy order. :param int k: Correlation order. :param bool error: Whether to calculate statistical error (for `c_n\{2\...
r""" Calculate `c_n\{k\}`, the `k`-particle cumulant for `n`\ th-order anisotropy. :param int n: Anisotropy order. :param int k: Correlation order. :param bool error: Whether to calculate statistical error (for `c_n\{2\}` only). If true, return a tuple `...
entailment
def flow(self, n, k, error=False, imaginary='nan'): r""" Calculate `v_n\{k\}`, the estimate of flow coefficient `v_n` from the `k`-particle cumulant. :param int n: Anisotropy order. :param int k: Correlation order. :param bool error: Whether to calculate sta...
r""" Calculate `v_n\{k\}`, the estimate of flow coefficient `v_n` from the `k`-particle cumulant. :param int n: Anisotropy order. :param int k: Correlation order. :param bool error: Whether to calculate statistical error (for `v_n\{2\}` only). If true, r...
entailment
def _pdf(self, phi): """ Evaluate the _unnormalized_ flow PDF. """ pdf = np.inner(self._vn, np.cos(np.outer(phi, self._n))) pdf *= 2. pdf += 1. return pdf
Evaluate the _unnormalized_ flow PDF.
entailment
def _uniform_phi(M): """ Generate M random numbers in [-pi, pi). """ return np.random.uniform(-np.pi, np.pi, M)
Generate M random numbers in [-pi, pi).
entailment
def pdf(self, phi): r""" Evaluate the flow PDF `dN/d\phi`. :param array-like phi: Azimuthal angles. :returns: The flow PDF evaluated at ``phi``. """ if self._n is None: pdf = np.empty_like(phi) pdf.fill(.5/np.pi) return pdf ...
r""" Evaluate the flow PDF `dN/d\phi`. :param array-like phi: Azimuthal angles. :returns: The flow PDF evaluated at ``phi``.
entailment
def sample(self, multiplicity): r""" Randomly sample azimuthal angles `\phi`. :param int multiplicity: Number to sample. :returns: Array of sampled angles. """ if self._n is None: return self._uniform_phi(multiplicity) # Since the flow PDF does not...
r""" Randomly sample azimuthal angles `\phi`. :param int multiplicity: Number to sample. :returns: Array of sampled angles.
entailment
def dumps(obj, *args, **kwargs): ''' Typeless dump an object to json string ''' return json.dumps(obj, *args, cls=TypelessSONEncoder, ensure_ascii=False, **kwargs)
Typeless dump an object to json string
entailment
def flex_update_obj(source, target, __silent, *fields, **field_map): ''' Pull data from source to target. Target's __dict__ (object data) will be used by default. Otherwise, it'll be treated as a dictionary ''' source_dict = source.__dict__ if hasattr(source, '__dict__') else source if not fields: ...
Pull data from source to target. Target's __dict__ (object data) will be used by default. Otherwise, it'll be treated as a dictionary
entailment
def to_obj(cls, obj_data=None, *fields, **field_map): ''' Use obj_data (dict-like) to construct an object of type cls prioritize obj_dict when there are conficts ''' if not fields: fields = obj_data.keys() try: kwargs = {field(f, field_map): obj_data[f] for f in fields if f in obj_data} ...
Use obj_data (dict-like) to construct an object of type cls prioritize obj_dict when there are conficts
entailment
def is_kana(text): ''' Check if a text if written in kana only (hiragana & katakana) if text is empty then return True ''' if text is None: raise ValueError("text cannot be None") for c in text: if c not in HIRAGANA and c not in KATAKANA: return False return True
Check if a text if written in kana only (hiragana & katakana) if text is empty then return True
entailment
def txt2mecab(text, **kwargs): ''' Use mecab to parse one sentence ''' mecab_out = _internal_mecab_parse(text, **kwargs).splitlines() tokens = [MeCabToken.parse(x) for x in mecab_out] return MeCabSent(text, tokens)
Use mecab to parse one sentence
entailment
def lines2mecab(lines, **kwargs): ''' Use mecab to parse many lines ''' sents = [] for line in lines: sent = txt2mecab(line, **kwargs) sents.append(sent) return sents
Use mecab to parse many lines
entailment
def tokenize_sent(mtokens, raw='', auto_strip=True): ''' Tokenize a text to multiple sentences ''' sents = [] bucket = [] cfrom = 0 cto = 0 token_cfrom = 0 logger = getLogger() logger.debug("raw text: {}".format(raw)) logger.debug("tokens: {}".format(mtokens)) for t in mtokens: ...
Tokenize a text to multiple sentences
entailment
def analyse(content, splitlines=True, format=None, **kwargs): ''' Japanese text > tokenize/txt/html ''' sents = DekoText.parse(content, splitlines=splitlines, **kwargs) doc = [] final = sents # Generate output if format == 'html': for sent in sents: doc.append(sent.to_ruby())...
Japanese text > tokenize/txt/html
entailment
def pos3(self): ''' Use pos-sc1-sc2 as POS ''' parts = [self.pos] if self.sc1 and self.sc1 != '*': parts.append(self.sc1) if self.sc2 and self.sc2 != '*': parts.append(self.sc2) return '-'.join(parts)
Use pos-sc1-sc2 as POS
entailment
def to_ruby(self): ''' Convert one MeCabToken into HTML ''' if self.need_ruby(): surface = self.surface reading = self.reading_hira() return '<ruby><rb>{sur}</rb><rt>{read}</rt></ruby>'.format(sur=surface, read=reading) elif self.is_eos: return '' ...
Convert one MeCabToken into HTML
entailment
def add(self, sentence_text, **kwargs): ''' Parse a text string and add it to this doc ''' sent = MeCabSent.parse(sentence_text, **kwargs) self.sents.append(sent) return sent
Parse a text string and add it to this doc
entailment
def smart_text(s, encoding="utf-8", strings_only=False, errors="strict"): """Return a unicode object representing 's'. Treats bytes using the 'encoding' codec. If strings_only is True, don't convert (some) non-string-like objects. """ if isinstance(s, six.text_type): return s ...
Return a unicode object representing 's'. Treats bytes using the 'encoding' codec. If strings_only is True, don't convert (some) non-string-like objects.
entailment
def smart_bytes(s, encoding="utf-8", strings_only=False, errors="strict"): """Return a bytes version of 's' encoded as specified in 'encoding'. If strings_only is True, don't convert (some) non-string-like objects. """ if isinstance(s, six.binary_type): if encoding == "utf-8": ...
Return a bytes version of 's' encoded as specified in 'encoding'. If strings_only is True, don't convert (some) non-string-like objects.
entailment