code stringlengths 18 4.5k | signature stringlengths 7 879 | docstring stringlengths 3 4.31k | loss_without_docstring float64 1.08 2.36k | loss_with_docstring float64 1.07 1.49k | factor float64 1 23.3 | rendered stringlengths 91 5.22k | quality_prob float64 0.5 0.97 | learning_prob float64 0.5 1 |
|---|---|---|---|---|---|---|---|---|
if Class is True: Class = self.__class__
if scope is True: scope = STRUCTURESCOPE
structural = Class is not None and issubclass(Class,AbstractStructureElement)
if reverse:
order = reversed
descendindex = -1
else:
order = lambda x: x ... | def next(self, Class=True, scope=True, reverse=False) | Returns the next element, if it is of the specified type and if it does not cross the boundary of the defined scope. Returns None if no next element is found. Non-authoritative elements are never returned.
Arguments:
* ``Class``: The class to select; any python class subclassed off `'AbstractElemen... | 5.550529 | 5.39329 | 1.029155 | def next(self, Class=True, scope=True, reverse=False):
"""
Returns the next element, if it is of the specified type and if it does not cross the boundary of the defined scope. Returns None if no next element is found. Non-authoritative elements are never returned.
Arguments:
* ``Class``: Th... | 0.785648 | 0.509093 |
depth = 0
e = self
while True:
if e.parent:
e = e.parent #pylint: disable=redefined-variable-type
else:
#no parent, breaking
return False
if isinstance(e,AbstractStructureElement) or isinstance(e,Abstr... | def finddefaultreference(self) | Find the default reference for text offsets:
The parent of the current textcontent's parent (counting only Structure Elements and Subtoken Annotation Elements)
Note: This returns not a TextContent element, but its parent. Whether the textcontent actually exists is checked later/elsewhere | 6.768433 | 4.726374 | 1.432056 | def finddefaultreference(self):
"""
Find the default reference for text offsets:
The parent of the current textcontent's parent (counting only Structure Elements and Subtoken Annotation Elements)
Note: This returns not a TextContent element, but its parent. Whether the textcontent actually ... | 0.659378 | 0.54468 |
l = []
for e in self.data:
l += e.items()
return l | def items(self) | Returns a depth-first flat list of all items in the document | 6.009556 | 5.584836 | 1.076049 | def items(self):
"""
Returns a depth-first flat list of all items in the document
"""
l = []
for e in self.data:
l += e.items()
return l | 0.716281 | 0.523786 |
finalsolution = None
bestscore = None
for solution in self:
if bestscore == None:
bestscore = solution.score()
finalsolution = solution
elif self.minimize:
score = solution.score()
if score < bestsco... | def searchbest(self) | Returns the single best result (if multiple have the same score, the first match is returned) | 2.211629 | 2.082187 | 1.062166 | def searchbest(self):
"""
Returns the single best result (if multiple have the same score, the first match is returned)
"""
finalsolution = None
bestscore = None
for solution in self:
if bestscore == None:
bestscore = solution.score()
finalsolution = solution... | 0.729026 | 0.514217 |
solutions = deque([], n)
for solution in self:
solutions.append(solution)
return solutions | def searchlast(self,n=10) | Return the last n results (or possibly less if not found). Note that the last results are not necessarily the best ones! Depending on the search type. | 9.863415 | 8.865089 | 1.112613 | def searchlast(self,n=10):
"""
Return the last n results (or possibly less if not found). Note that the last results are not necessarily the best ones! Depending on the search type.
"""
solutions = deque([], n)
for solution in self:
solutions.append(solution)
return solutions | 0.639708 | 0.501038 |
l = []
for word_id, senses,distance in self:
for sense, confidence in senses:
if not sense in l: l.append(sense)
if bestonly:
break
return l | def senses(self, bestonly=False) | Returns a list of all predicted senses | 5.897015 | 5.76519 | 1.022866 | def senses(self, bestonly=False):
"""
Returns a list of all predicted senses
"""
l = []
for word_id, senses,distance in self:
for sense, confidence in senses:
if not sense in l: l.append(sense)
if bestonly:
break
return l | 0.62681 | 0.507385 |
if self.children:
return sum( ( c.size() for c in self.children.values() ) ) + 1
else:
return 1 | def size(self) | Size is number of nodes under the trie, including the current node | 3.820153 | 3.063506 | 1.246987 | def size(self):
"""
Size is number of nodes under the trie, including the current node
"""
if self.children:
return sum( ( c.size() for c in self.children.values() ) ) + 1
else:
return 1 | 0.534066 | 0.596051 |
global namespaces
return self.tree.xpath(expression, namespaces=namespaces) | def xpath(self, expression) | Executes an xpath expression using the correct namespaces | 8.032832 | 6.176901 | 1.300463 | def xpath(self, expression):
"""
Executes an xpath expression using the correct namespaces
"""
global namespaces
return self.tree.xpath(expression, namespaces=namespaces) | 0.585931 | 0.84241 |
'''
Given a function to map from an ID to an underlying object, and a function
to map from an underlying object to the concrete GraphQLObjectType it
corresponds to, constructs a `Node` interface that objects can implement,
and a field config for a `node` root field.
If the type_resolver is omit... | def node_definitions(id_fetcher, type_resolver=None, id_resolver=None) | Given a function to map from an ID to an underlying object, and a function
to map from an underlying object to the concrete GraphQLObjectType it
corresponds to, constructs a `Node` interface that objects can implement,
and a field config for a `node` root field.
If the type_resolver is omitted, object ... | 3.925619 | 1.697994 | 2.311916 | def node_definitions(id_fetcher, type_resolver=None, id_resolver=None):
"""
Given a function to map from an ID to an underlying object, and a function
to map from an underlying object to the concrete GraphQLObjectType it
corresponds to, constructs a `Node` interface that objects can implement,
and a... | 0.723065 | 0.739281 |
'''
Given an optional cursor and a default offset, returns the offset
to use; if the cursor contains a valid offset, that will be used,
otherwise it will be the default.
'''
if not is_str(cursor):
return default_offset
offset = cursor_to_offset(cursor)
try:
return int(of... | def get_offset_with_default(cursor=None, default_offset=0) | Given an optional cursor and a default offset, returns the offset
to use; if the cursor contains a valid offset, that will be used,
otherwise it will be the default. | 3.98127 | 2.254083 | 1.766248 | def get_offset_with_default(cursor=None, default_offset=0):
"""
Given an optional cursor and a default offset, returns the offset
to use; if the cursor contains a valid offset, that will be used,
otherwise it will be the default.
"""
'''
Given an optional cursor and a default offset, ret... | 0.613179 | 0.515132 |
# Get a list of node ids from the edge data
nodes = set(e['source'] for e in edges) | set(e['target'] for e in edges)
# Convert to a data-storing object and initialize some values
d = 3 if is_3d else 2
nodes = {n: {'velocity': [0.0] * d, 'force': [0.0] * d} for n in nodes}
# Repeat n tim... | def run(edges, iterations=1000, force_strength=5.0, dampening=0.01,
max_velocity=2.0, max_distance=50, is_3d=True) | Runs a force-directed-layout algorithm on the input graph.
iterations - Number of FDL iterations to run in coordinate generation
force_strength - Strength of Coulomb and Hooke forces
(edit this to scale the distance between nodes)
dampening - Multiplier to reduce force applied to nodes... | 3.911408 | 3.815891 | 1.025032 | def run(edges, iterations=1000, force_strength=5.0, dampening=0.01,
max_velocity=2.0, max_distance=50, is_3d=True):
"""
Runs a force-directed-layout algorithm on the input graph.
iterations - Number of FDL iterations to run in coordinate generation
force_strength - Strength of Coulomb and Hooke... | 0.620219 | 0.634932 |
logger.debug("starting")
assert pipeline
assert steps_group
logger.debug(f"retrieving {steps_group} steps from pipeline")
if steps_group in pipeline:
steps = pipeline[steps_group]
if steps is None:
logger.warn(
f"{steps_group}: sequence has no eleme... | def get_pipeline_steps(pipeline, steps_group) | Get the steps attribute of module pipeline.
If there is no steps sequence on the pipeline, return None. Guess you
could theoretically want to run a pipeline with nothing in it. | 4.105674 | 3.856518 | 1.064607 | def get_pipeline_steps(pipeline, steps_group):
"""
Get the steps attribute of module pipeline.
If there is no steps sequence on the pipeline, return None. Guess you
could theoretically want to run a pipeline with nothing in it.
"""
logger.debug("starting")
assert pipeline
assert st... | 0.68988 | 0.5984 |
tag_representers = [PyString, SicString]
yaml_loader = get_yaml_parser_safe()
for representer in tag_representers:
yaml_loader.register_class(representer)
pipeline_definition = yaml_loader.load(file)
return pipeline_definition | def get_pipeline_yaml(file) | Return pipeline yaml from open file object.
Use specific custom representers to model the custom pypyr pipeline yaml
format, to load in special literal types like py and sic strings.
If looking to extend the pypyr pipeline syntax with special types, add
these to the tag_representers list.
Args:
... | 7.524 | 5.018706 | 1.499191 | def get_pipeline_yaml(file):
"""
Return pipeline yaml from open file object.
Use specific custom representers to model the custom pypyr pipeline yaml
format, to load in special literal types like py and sic strings.
If looking to extend the pypyr pipeline syntax with special types, add
these t... | 0.718829 | 0.606761 |
def partial(func, col, *args, **kwargs):
def new_func(gdf):
return func(gdf[col], *args, **kwargs)
return new_func
def make_statement(func, col):
if isinstance(func, str):
expr = '{}({})'.format(func, col)
elif callable(func):
... | def build_expressions(verb) | Build expressions for helper verbs
Parameters
----------
verb : verb
A verb with a *functions* attribute.
Returns
-------
out : tuple
(List of Expressions, New columns). The expressions and the
new columns in which the results of those expressions will
be stored... | 3.490683 | 3.391462 | 1.029256 | def build_expressions(verb):
"""
Build expressions for helper verbs
Parameters
----------
verb : verb
A verb with a *functions* attribute.
Returns
-------
out : tuple
(List of Expressions, New columns). The expressions and the
new columns in which the results of... | 0.788359 | 0.708515 |
# Note: There's an experimental JSON encoder floating around in
# pandas land that hasn't made it into the main branch. This
# function should be revisited if it ever does.
if not pd:
raise LoadError('pandas could not be imported')
if not hasattr(data, 'index... | def from_pandas(cls, data, columns=None, key_on='idx', name=None,
series_key='data', grouped=False, records=False, **kwargs) | Load values from a pandas ``Series`` or ``DataFrame`` object
Parameters
----------
data : pandas ``Series`` or ``DataFrame``
Pandas object to import data from.
columns: list, default None
DataFrame columns to convert to Data. Keys default to col names.
... | 3.233727 | 3.127881 | 1.03384 | def from_pandas(cls, data, columns=None, key_on='idx', name=None,
series_key='data', grouped=False, records=False, **kwargs):
"""
Load values from a pandas ``Series`` or ``DataFrame`` object
Parameters
----------
data : pandas ``Series`` or ``DataFrame``
... | 0.655253 | 0.510496 |
if not np:
raise LoadError('numpy could not be imported')
_assert_is_type('numpy object', np_obj, np.ndarray)
# Integer index if none is provided
index = index or range(np_obj.shape[0])
# Explicitly map dict-keys to strings for JSON serializer.
colu... | def from_numpy(cls, np_obj, name, columns, index=None, index_key=None,
**kwargs) | Load values from a numpy array
Parameters
----------
np_obj : numpy.ndarray
numpy array to load data from
name : string
``name`` field for the data
columns : iterable
Sequence of column names, from left to right. Must have same
len... | 3.328766 | 3.271502 | 1.017504 | def from_numpy(cls, np_obj, name, columns, index=None, index_key=None,
**kwargs):
"""
Load values from a numpy array
Parameters
----------
np_obj : numpy.ndarray
numpy array to load data from
name : string
``name`` field for the data
... | 0.837188 | 0.722845 |
if not name:
name = 'table'
cls.raw_data = data
# Tuples
if isinstance(data, tuple):
values = [{"x": x[0], "y": x[1]} for x in data]
# Lists
elif isinstance(data, list):
values = [{"x": x, "y": y}
for x,... | def keypairs(cls, data, columns=None, use_index=False, name=None) | This will format the data as Key: Value pairs, rather than the
idx/col/val style. This is useful for some transforms, and to
key choropleth map data
Standard Data Types:
List: [0, 10, 20, 30, 40]
Paired Tuples: ((0, 1), (0, 2), (0, 3))
Dict: {'A': 10, 'B': 20... | 2.374697 | 2.351368 | 1.009921 | def keypairs(cls, data, columns=None, use_index=False, name=None):
"""
This will format the data as Key: Value pairs, rather than the
idx/col/val style. This is useful for some transforms, and to
key choropleth map data
Standard Data Types:
List: [0, 10, 20, 30, 40]
... | 0.786889 | 0.643427 |
'''Convert a NumPy array to values attribute'''
def to_list_no_index(xvals, yvals):
return [{"x": x, "y": np.asscalar(y)}
for x, y in zip(xvals, yvals)]
if len(data.shape) == 1 or data.shape[1] == 1:
xvals = range(data.shape[0] + 1)
values... | def _numpy_to_values(data) | Convert a NumPy array to values attribute | 2.689877 | 2.631855 | 1.022046 | def _numpy_to_values(data):
"""
Convert a NumPy array to values attribute
"""
'''Convert a NumPy array to values attribute'''
def to_list_no_index(xvals, yvals):
return [{"x": x, "y": np.asscalar(y)}
for x, y in zip(xvals, yvals)]
if len(data.shape) == 1 or data.shap... | 0.601008 | 0.73848 |
retval = tuple()
for val in self.VALUES:
retval += (getattr(self, val),)
return retval | def get_value_tuple(self) | Returns a tuple of the color's values (in order). For example,
an LabColor object will return (lab_l, lab_a, lab_b), where each
member of the tuple is the float value for said variable. | 6.593547 | 5.879219 | 1.1215 | def get_value_tuple(self):
"""
Returns a tuple of the color's values (in order). For example,
an LabColor object will return (lab_l, lab_a, lab_b), where each
member of the tuple is the float value for said variable.
"""
retval = tuple()
for val in self.VALUES:
retval +=... | 0.664568 | 0.57824 |
# This holds the obect's spectral data, and will be passed to
# numpy.array() to create a numpy array (matrix) for the matrix math
# that will be done during the conversion to XYZ.
values = []
# Use the required value list to build this dynamically. Default to
#... | def get_numpy_array(self) | Dump this color into NumPy array. | 11.971473 | 11.074844 | 1.080961 | def get_numpy_array(self):
"""
Dump this color into NumPy array.
"""
# This holds the obect's spectral data, and will be passed to
# numpy.array() to create a numpy array (matrix) for the matrix math
# that will be done during the conversion to XYZ.
values = []
# Use the required v... | 0.562417 | 0.6705 |
blue_density = ansi_density(color, ANSI_STATUS_T_BLUE)
green_density = ansi_density(color, ANSI_STATUS_T_GREEN)
red_density = ansi_density(color, ANSI_STATUS_T_RED)
densities = [blue_density, green_density, red_density]
min_density = min(densities)
max_density = max(densities)
density_... | def auto_density(color) | Given a SpectralColor, automatically choose the correct ANSI T filter.
Returns a tuple with a string representation of the filter the
calculated density.
:param SpectralColor color: The SpectralColor object to calculate
density for.
:rtype: float
:returns: The density value, with the filter... | 2.772206 | 2.771969 | 1.000086 | def auto_density(color):
"""
Given a SpectralColor, automatically choose the correct ANSI T filter.
Returns a tuple with a string representation of the filter the
calculated density.
:param SpectralColor color: The SpectralColor object to calculate
density for.
:rtype: float
:return... | 0.848219 | 0.810291 |
color1_vector = _get_lab_color1_vector(color1)
color2_matrix = _get_lab_color2_matrix(color2)
delta_e = color_diff_matrix.delta_e_cie1976(color1_vector, color2_matrix)[0]
return numpy.asscalar(delta_e) | def delta_e_cie1976(color1, color2) | Calculates the Delta E (CIE1976) of two colors. | 3.257775 | 3.218294 | 1.012268 | def delta_e_cie1976(color1, color2):
"""
Calculates the Delta E (CIE1976) of two colors.
"""
color1_vector = _get_lab_color1_vector(color1)
color2_matrix = _get_lab_color2_matrix(color2)
delta_e = color_diff_matrix.delta_e_cie1976(color1_vector, color2_matrix)[0]
return numpy.asscalar(d... | 0.693668 | 0.655694 |
def decorator(f):
f.start_type = start_type
f.target_type = target_type
_conversion_manager.add_type_conversion(start_type, target_type, f)
return f
return decorator | def color_conversion_function(start_type, target_type) | Decorator to indicate a function that performs a conversion from one color
space to another.
This decorator will return the original function unmodified, however it will
be registered in the _conversion_manager so it can be used to perform color
space transformations between color spaces that do not ha... | 2.876948 | 2.635455 | 1.091632 | def color_conversion_function(start_type, target_type):
"""
Decorator to indicate a function that performs a conversion from one color
space to another.
This decorator will return the original function unmodified, however it will
be registered in the _conversion_manager so it can be used to perform... | 0.788268 | 0.714298 |
rgb = self.xyz_to_rgb(xyz)
logger.debug('RGB: {}'.format(rgb))
rgb_w = self.xyz_to_rgb(xyz_w)
logger.debug('RGB_W: {}'.format(rgb_w))
y_w = xyz_w[1]
y_b = xyz_b[1]
h_rgb = 3 * rgb_w / (rgb_w.sum())
logger.debug('H_RGB: {}'.format(h_rgb))
... | def _adaptation(self, f_l, l_a, xyz, xyz_w, xyz_b, xyz_p=None, p=None, helson_judd=False, discount_illuminant=True) | :param f_l: Luminance adaptation factor
:param l_a: Adapting luminance
:param xyz: Stimulus color in XYZ
:param xyz_w: Reference white color in XYZ
:param xyz_b: Background color in XYZ
:param xyz_p: Proxima field color in XYZ
:param p: Simultaneous contrast/assimilation ... | 2.953449 | 2.886516 | 1.023188 | def _adaptation(self, f_l, l_a, xyz, xyz_w, xyz_b, xyz_p=None, p=None, helson_judd=False, discount_illuminant=True):
"""
:param f_l: Luminance adaptation factor
:param l_a: Adapting luminance
:param xyz: Stimulus color in XYZ
:param xyz_w: Reference white color in XYZ
:param xyz_... | 0.701496 | 0.511412 |
x_e = 0.3320
y_e = 0.1858
n = ((x / (x + z + z)) - x_e) / ((y / (x + z + z)) - y_e)
a_0 = -949.86315
a_1 = 6253.80338
a_2 = 28.70599
a_3 = 0.00004
t_1 = 0.92159
t_2 = 0.20039
t_3 = 0.07125
cct = a_0 + a_1 * numpy.exp(-n... | def _get_cct(x, y, z) | Reference
Hernandez-Andres, J., Lee, R. L., & Romero, J. (1999).
Calculating correlated color temperatures across the entire gamut of daylight and skylight chromaticities.
Applied Optics, 38(27), 5703-5709. | 3.796291 | 3.73003 | 1.017764 | def _get_cct(x, y, z):
"""
Reference
Hernandez-Andres, J., Lee, R. L., & Romero, J. (1999).
Calculating correlated color temperatures across the entire gamut of daylight and skylight chromaticities.
Applied Optics, 38(27), 5703-5709.
"""
x_e = 0.3320
y_e = 0.1858
n ... | 0.588653 | 0.530419 |
# Transform input colors to cone responses
rgb = self._xyz_to_rgb(xyz)
logger.debug("RGB: {}".format(rgb))
rgb_b = self._xyz_to_rgb(self._xyz_b)
rgb_w = self._xyz_to_rgb(xyz_w)
rgb_w = Hunt.adjust_white_for_scc(rgb, rgb_b, rgb_w, self._p)
logger.debug("R... | def _compute_adaptation(self, xyz, xyz_w, f_l, d) | Modified adaptation procedure incorporating simultaneous chromatic contrast from Hunt model.
:param xyz: Stimulus XYZ.
:param xyz_w: Reference white XYZ.
:param f_l: Luminance adaptation factor
:param d: Degree of adaptation.
:return: Tuple of adapted rgb and rgb_w arrays. | 3.020088 | 2.857865 | 1.056764 | def _compute_adaptation(self, xyz, xyz_w, f_l, d):
"""
Modified adaptation procedure incorporating simultaneous chromatic contrast from Hunt model.
:param xyz: Stimulus XYZ.
:param xyz_w: Reference white XYZ.
:param f_l: Luminance adaptation factor
:param d: Degree of adaptation... | 0.846768 | 0.510192 |
year = super(BuildableDayArchiveView, self).get_year()
month = super(BuildableDayArchiveView, self).get_month()
day = super(BuildableDayArchiveView, self).get_day()
fmt = self.get_day_format()
dt = date(int(year), int(month), int(day))
return dt.strftime(fmt) | def get_day(self) | Return the day from the database in the format expected by the URL. | 2.811006 | 2.513987 | 1.118147 | def get_day(self):
"""
Return the day from the database in the format expected by the URL.
"""
year = super(BuildableDayArchiveView, self).get_year()
month = super(BuildableDayArchiveView, self).get_month()
day = super(BuildableDayArchiveView, self).get_day()
fmt = self.get_day_format()... | 0.576244 | 0.501038 |
if isinstance(p, str):
p = string(p)
return regex(r'\s*') >> p << regex(r'\s*') | def lexeme(p) | From a parser (or string), make a parser that consumes
whitespace on either side. | 5.255884 | 4.095832 | 1.283227 | def lexeme(p):
"""
From a parser (or string), make a parser that consumes
whitespace on either side.
"""
if isinstance(p, str):
p = string(p)
return regex(r'\s*') >> p << regex(r'\s*') | 0.662524 | 0.660501 |
with open(schemafile) as f:
return cls(json.load(f)) | def from_schemafile(cls, schemafile) | Create a Flatson instance from a schemafile | 3.612632 | 3.453533 | 1.046069 | def from_schemafile(cls, schemafile):
"""
Create a Flatson instance from a schemafile
"""
with open(schemafile) as f:
return cls(json.load(f)) | 0.562537 | 0.519643 |
if self._conn.status == psycopg2.extensions.STATUS_BEGIN:
return self.READY
return self._conn.status | def _status(self) | Return the current connection status as an integer value.
The status should match one of the following constants:
- queries.Session.INTRANS: Connection established, in transaction
- queries.Session.PREPARED: Prepared for second phase of transaction
- queries.Session.READY: Connected, n... | 7.788153 | 6.305507 | 1.235135 | def _status(self):
"""
Return the current connection status as an integer value.
The status should match one of the following constants:
- queries.Session.INTRANS: Connection established, in transaction
- queries.Session.PREPARED: Prepared for second phase of transaction
- quer... | 0.658253 | 0.533397 |
if not self.cursor.rowcount:
return []
self.cursor.scroll(0, 'absolute')
return self.cursor.fetchall() | def items(self) | Return all of the rows that are in the result set.
:rtype: list | 4.90019 | 4.43229 | 1.105566 | def items(self):
"""
Return all of the rows that are in the result set.
:rtype: list
"""
if not self.cursor.rowcount:
return []
self.cursor.scroll(0, 'absolute')
return self.cursor.fetchall() | 0.715735 | 0.512998 |
if self.term.is_a_tty:
return self.term.width // self.hint_width
return 1 | def num_columns(self) | Number of columns displayed. | 13.668602 | 8.479787 | 1.611904 | def num_columns(self):
"""
Number of columns displayed.
"""
if self.term.is_a_tty:
return self.term.width // self.hint_width
return 1 | 0.567817 | 0.664921 |
path_processor = ((lambda x : x) if path_prefix is None
else get_rp_stripper(path_prefix))
reports = []
for result in bads:
if len(result) == 3:
depended_lib, depending_lib, missing_archs = result
reports.append("{0} needs {1} {2} missing from {3}".... | def bads_report(bads, path_prefix=None) | Return a nice report of bad architectures in `bads`
Parameters
----------
bads : set
set of length 2 or 3 tuples. A length 2 tuple is of form
``(depending_lib, missing_archs)`` meaning that an arch in
`require_archs` was missing from ``depending_lib``. A length 3 tuple
is o... | 2.796139 | 2.235158 | 1.25098 | def bads_report(bads, path_prefix=None):
"""
Return a nice report of bad architectures in `bads`
Parameters
----------
bads : set
set of length 2 or 3 tuples. A length 2 tuple is of form
``(depending_lib, missing_archs)`` meaning that an arch in
`require_archs` was missing f... | 0.813683 | 0.522994 |
if not lib_path.startswith('@rpath/'):
return lib_path
lib_rpath = lib_path.split('/', 1)[1]
for rpath in rpaths:
rpath_lib = realpath(pjoin(rpath, lib_rpath))
if os.path.exists(rpath_lib):
return rpath_lib
warnings.warn(
"Couldn't find {0} on paths:\n\... | def resolve_rpath(lib_path, rpaths) | Return `lib_path` with its `@rpath` resolved
If the `lib_path` doesn't have `@rpath` then it's returned as is.
If `lib_path` has `@rpath` then returns the first `rpaths`/`lib_path`
combination found. If the library can't be found in `rpaths` then a
detailed warning is printed and `lib_path` is return... | 2.451395 | 2.355479 | 1.04072 | def resolve_rpath(lib_path, rpaths):
"""
Return `lib_path` with its `@rpath` resolved
If the `lib_path` doesn't have `@rpath` then it's returned as is.
If `lib_path` has `@rpath` then returns the first `rpaths`/`lib_path`
combination found. If the library can't be found in `rpaths` then a
det... | 0.80354 | 0.54056 |
N = community.shape[0]
C = community.shape[1]
T = P = np.zeros([N, N])
for t in range(len(community[0, :])):
for i in range(len(community[:, 0])):
for j in range(len(community[:, 0])):
if i == j:
continue
# T_ij indicates the ... | def allegiance(community) | Computes the allegiance matrix with values representing the probability that
nodes i and j were assigned to the same community by time-varying clustering methods.
parameters
----------
community : array
array of community assignment of size node,time
returns
-------
P : array
... | 4.341524 | 3.232135 | 1.343237 | def allegiance(community):
"""
Computes the allegiance matrix with values representing the probability that
nodes i and j were assigned to the same community by time-varying clustering methods.
parameters
----------
community : array
array of community assignment of size node,time
... | 0.789518 | 0.815857 |
if isinstance(ncontacts, list):
if len(ncontacts) != nnodes:
raise ValueError(
'Number of contacts, if a list, should be one per node')
if isinstance(lam, list):
if len(lam) != nnodes:
raise ValueError(
'Lambda value of Poisson distri... | def rand_poisson(nnodes, ncontacts, lam=1, nettype='bu', netinfo=None, netrep='graphlet') | Generate a random network where intervals between contacts are distributed by a poisson distribution
Parameters
----------
nnodes : int
Number of nodes in networks
ncontacts : int or list
Number of expected contacts (i.e. edges). If list, number of contacts for each node.
Any ... | 2.343398 | 2.287699 | 1.024347 | def rand_poisson(nnodes, ncontacts, lam=1, nettype='bu', netinfo=None, netrep='graphlet'):
"""
Generate a random network where intervals between contacts are distributed by a poisson distribution
Parameters
----------
nnodes : int
Number of nodes in networks
ncontacts : int or list
... | 0.770422 | 0.568296 |
if not report:
report = {}
# Note the min value of all time series will now be at least 1.
mindata = 1 - np.nanmin(data)
data = data + mindata
ind = np.triu_indices(data.shape[0], k=1)
boxcox_list = np.array([sp.stats.boxcox(np.squeeze(
data[ind[0][n], ind[1][n], :])) for n... | def postpro_boxcox(data, report=None) | Performs box cox transform on everything in data.
If report variable is passed, this is added to the report. | 3.859484 | 3.796534 | 1.016581 | def postpro_boxcox(data, report=None):
"""
Performs box cox transform on everything in data.
If report variable is passed, this is added to the report.
"""
if not report:
report = {}
# Note the min value of all time series will now be at least 1.
mindata = 1 - np.nanmin(data)
... | 0.578389 | 0.546073 |
# Data should be timexnode
report = {}
# Derivative
tdat = data[1:, :] - data[:-1, :]
# Normalize
tdat = tdat / np.std(tdat, axis=0)
# Coupling
coupling = np.array([tdat[:, i] * tdat[:, j] for i in np.arange(0,
tda... | def _temporal_derivative(data, params, report) | Performs mtd method. See func: teneto.derive.derive. | 3.043904 | 3.043396 | 1.000167 | def _temporal_derivative(data, params, report):
"""
Performs mtd method. See func: teneto.derive.derive.
"""
# Data should be timexnode
report = {}
# Derivative
tdat = data[1:, :] - data[:-1, :]
# Normalize
tdat = tdat / np.std(tdat, axis=0)
# Coupling
coupling = np.arr... | 0.729496 | 0.605391 |
if threshold_type == 'percent':
netout = binarize_percent(netin, threshold_level, sign, axis)
elif threshold_type == 'magnitude':
netout = binarize_magnitude(netin, threshold_level, sign)
elif threshold_type == 'rdp':
netout = binarize_rdp(netin, threshold_level, sign, axis)
... | def binarize(netin, threshold_type, threshold_level, sign='pos', axis='time') | Binarizes a network, returning the network. General wrapper function for different binarization functions.
Parameters
----------
netin : array or dict
Network (graphlet or contact representation),
threshold_type : str
What type of thresholds to make binarization. Options: 'rdp', 'perce... | 2.25078 | 2.055175 | 1.095177 | def binarize(netin, threshold_type, threshold_level, sign='pos', axis='time'):
"""
Binarizes a network, returning the network. General wrapper function for different binarization functions.
Parameters
----------
netin : array or dict
Network (graphlet or contact representation),
thresh... | 0.881385 | 0.869604 |
inputtype = checkInput(netIn)
# Convert TN to G representation
if inputtype == 'TN' and 'TN' in allowedformats and outputformat != 'TN':
G = netIn.df_to_array()
netInfo = {'nettype': netIn.nettype, 'netshape': netIn.netshape}
elif inputtype == 'TN' and 'TN' in allowedformats and out... | def process_input(netIn, allowedformats, outputformat='G') | Takes input network and checks what the input is.
Parameters
----------
netIn : array, dict, or TemporalNetwork
Network (graphlet, contact or object)
allowedformats : str
Which format of network objects that are allowed. Options: 'C', 'TN', 'G'.
outputformat: str, default=G
... | 2.646709 | 2.436064 | 1.086469 | def process_input(netIn, allowedformats, outputformat='G'):
"""
Takes input network and checks what the input is.
Parameters
----------
netIn : array, dict, or TemporalNetwork
Network (graphlet, contact or object)
allowedformats : str
Which format of network objects that are al... | 0.740737 | 0.682097 |
d = collections.OrderedDict()
for c in C['contacts']:
ct = tuple(c)
if ct in d:
d[ct] += 1
else:
d[ct] = 1
new_contacts = []
new_values = []
for (key, value) in d.items():
new_values.append(value)
new_contacts.append(key)
C_ou... | def multiple_contacts_get_values(C) | Given an contact representation with repeated contacts, this function removes duplicates and creates a value
Parameters
----------
C : dict
contact representation with multiple repeated contacts.
Returns
-------
:C_out: dict
Contact representation with duplicate contacts re... | 2.388001 | 2.190151 | 1.090336 | def multiple_contacts_get_values(C):
"""
Given an contact representation with repeated contacts, this function removes duplicates and creates a value
Parameters
----------
C : dict
contact representation with multiple repeated contacts.
Returns
-------
:C_out: dict
... | 0.820343 | 0.676834 |
if len(df) > 0:
idx = np.array(list(map(list, df.values)))
G = np.zeros([netshape[0], netshape[0], netshape[1]])
if idx.shape[1] == 3:
if nettype[-1] == 'u':
idx = np.vstack([idx, idx[:, [1, 0, 2]]])
idx = idx.astype(int)
G[idx[:, 0], ... | def df_to_array(df, netshape, nettype) | Returns a numpy array (snapshot representation) from thedataframe contact list
Parameters:
df : pandas df
pandas df with columns, i,j,t.
netshape : tuple
network shape, format: (node, time)
nettype : str
'wu', 'wd', 'bu', 'bd'
Returns:
--------
... | 1.946345 | 1.890199 | 1.029704 | def df_to_array(df, netshape, nettype):
"""
Returns a numpy array (snapshot representation) from thedataframe contact list
Parameters:
df : pandas df
pandas df with columns, i,j,t.
netshape : tuple
network shape, format: (node, time)
nettype : str
... | 0.704592 | 0.628835 |
if distance_func_name == 'default' and netinfo['nettype'][0] == 'b':
print('Default distance funciton specified. As network is binary, using Hamming')
distance_func_name = 'hamming'
elif distance_func_name == 'default' and netinfo['nettype'][0] == 'w':
distance_func_name = 'euclide... | def check_distance_funciton_input(distance_func_name, netinfo) | Funciton checks distance_func_name, if it is specified as 'default'. Then given the type of the network selects a default distance function.
Parameters
----------
distance_func_name : str
distance function name.
netinfo : dict
the output of utils.process_input
Returns
-------... | 3.215411 | 3.008744 | 1.068689 | def check_distance_funciton_input(distance_func_name, netinfo):
"""
Funciton checks distance_func_name, if it is specified as 'default'. Then given the type of the network selects a default distance function.
Parameters
----------
distance_func_name : str
distance function name.
netin... | 0.871229 | 0.568895 |
if isinstance(parcellation, str):
parcin = ''
if '+' in parcellation:
parcin = parcellation
parcellation = parcellation.split('+')[0]
if '+OH' in parcin:
subcortical = True
else:
subcortical = None
if '+SUIT' in parcin:
... | def make_parcellation(data_path, parcellation, parc_type=None, parc_params=None) | Performs a parcellation which reduces voxel space to regions of interest (brain data).
Parameters
----------
data_path : str
Path to .nii image.
parcellation : str
Specify which parcellation that you would like to use. For MNI: 'gordon2014_333', 'power2012_264', For TAL: 'shen2013_278'... | 2.531412 | 2.223121 | 1.138675 | def make_parcellation(data_path, parcellation, parc_type=None, parc_params=None):
"""
Performs a parcellation which reduces voxel space to regions of interest (brain data).
Parameters
----------
data_path : str
Path to .nii image.
parcellation : str
Specify which parcellation t... | 0.750238 | 0.540318 |
steps = (1.0/(N-1)) * (stop - start)
if np.isscalar(steps):
return steps*np.arange(N) + start
else:
return steps[:, None]*np.arange(N) + start[:, None] | def create_traj_ranges(start, stop, N) | Fills in the trajectory range.
# Adapted from https://stackoverflow.com/a/40624614 | 3.039685 | 2.983643 | 1.018783 | def create_traj_ranges(start, stop, N):
"""
Fills in the trajectory range.
# Adapted from https://stackoverflow.com/a/40624614
"""
steps = (1.0/(N-1)) * (stop - start)
if np.isscalar(steps):
return steps*np.arange(N) + start
else:
return steps[:, None]*np.arange(N) + st... | 0.559892 | 0.509642 |
newnetwork = tnet.network.copy()
newnetwork['i'] = (tnet.network['i']) + \
((tnet.netshape[0]) * (tnet.network['t']))
newnetwork['j'] = (tnet.network['j']) + \
((tnet.netshape[0]) * (tnet.network['t']))
if 'weight' not in newnetwork.columns:
newnetwork['weight'] = 1
newn... | def create_supraadjacency_matrix(tnet, intersliceweight=1) | Returns a supraadjacency matrix from a temporal network structure
Parameters
--------
tnet : TemporalNetwork
Temporal network (any network type)
intersliceweight : int
Weight that links the same node from adjacent time-points
Returns
--------
supranet : dataframe
Su... | 2.527046 | 2.362498 | 1.06965 | def create_supraadjacency_matrix(tnet, intersliceweight=1):
"""
Returns a supraadjacency matrix from a temporal network structure
Parameters
--------
tnet : TemporalNetwork
Temporal network (any network type)
intersliceweight : int
Weight that links the same node from adjacent t... | 0.846308 | 0.652158 |
r
com_membership = np.array(com_membership)
D = []
for i in range(com_membership.shape[0]):
for j in range(i+1, com_membership.shape[0]):
con = np.sum((com_membership[i, :] - com_membership[j, :])
== 0, axis=-1) / com_membership.shape[-1]
twhere ... | def make_consensus_matrix(com_membership, th=0.5) | r"""
Makes the consensus matrix
.
Parameters
----------
com_membership : array
Shape should be node, time, iteration.
th : float
threshold to cancel noisey edges
Returns
-------
D : array
consensus matrix | 3.630805 | 3.608872 | 1.006077 | def make_consensus_matrix(com_membership, th=0.5):
"""
r"""
Makes the consensus matrix
.
Parameters
----------
com_membership : array
Shape should be node, time, iteration.
th : float
threshold to cancel noisey edges
Returns
-------
D : array
consensus... | 0.793316 | 0.604428 |
r
com_membership = np.array(com_membership)
# make first indicies be between 0 and 1.
com_membership[:, 0] = clean_community_indexes(com_membership[:, 0])
# loop over all timepoints, get jacccard distance in greedy manner for largest community to time period before
for t in range(1, com_members... | def make_temporal_consensus(com_membership) | r"""
Matches community labels accross time-points
Jaccard matching is in a greedy fashiong. Matching the largest community at t with the community at t-1.
Parameters
----------
com_membership : array
Shape should be node, time.
Returns
-------
D : array
temporal cons... | 3.279249 | 3.015183 | 1.087579 | def make_temporal_consensus(com_membership):
"""
r"""
Matches community labels accross time-points
Jaccard matching is in a greedy fashiong. Matching the largest community at t with the community at t-1.
Parameters
----------
com_membership : array
Shape should be node, time.
... | 0.821778 | 0.665954 |
# Preallocate
flex = np.zeros(communities.shape[0])
# Go from the second time point to last, compare with time-point before
for t in range(1, communities.shape[1]):
flex[communities[:, t] != communities[:, t-1]] += 1
# Normalize
flex = flex / (communities.shape[1] - 1)
return f... | def flexibility(communities) | Amount a node changes community
Parameters
----------
communities : array
Community array of shape (node,time)
Returns
--------
flex : array
Size with the flexibility of each node.
Notes
-----
Flexbility calculates the number of times a node switches its community ... | 4.733977 | 3.730659 | 1.268938 | def flexibility(communities):
"""
Amount a node changes community
Parameters
----------
communities : array
Community array of shape (node,time)
Returns
--------
flex : array
Size with the flexibility of each node.
Notes
-----
Flexbility calculates the numb... | 0.837437 | 0.808729 |
relfun = []
threshold = []
for ec in exclusion_criteria:
if ec[0:2] == '>=':
relfun.append(np.greater_equal)
threshold.append(float(ec[2:]))
elif ec[0:2] == '<=':
relfun.append(np.less_equal)
threshold.append(float(ec[2:]))
elif ec... | def process_exclusion_criteria(exclusion_criteria) | Parses an exclusion critera string to get the function and threshold.
Parameters
----------
exclusion_criteria : list
list of strings where each string is of the format [relation][threshold]. E.g. \'<0.5\' or \'>=1\'
Returns
-------
relfun : list
list of numpy f... | 2.13075 | 1.827252 | 1.166095 | def process_exclusion_criteria(exclusion_criteria):
"""
Parses an exclusion critera string to get the function and threshold.
Parameters
----------
exclusion_criteria : list
list of strings where each string is of the format [relation][threshold]. E.g. \'<0.5\' or \'>=1\'
Retur... | 0.776999 | 0.627866 |
# make sure the static and temporal communities have the same number of nodes
if staticcommunities.shape[0] != temporalcommunities.shape[0]:
raise ValueError(
'Temporal and static communities have different dimensions')
alleg = allegiance(temporalcommunities)
Rcoeff = np.z... | def recruitment(temporalcommunities, staticcommunities) | Calculates recruitment coefficient for each node. Recruitment coefficient is the average probability of nodes from the
same static communities being in the same temporal communities at other time-points or during different tasks.
Parameters:
------------
temporalcommunities : array
temporal ... | 3.982296 | 3.488743 | 1.14147 | def recruitment(temporalcommunities, staticcommunities):
"""
Calculates recruitment coefficient for each node. Recruitment coefficient is the average probability of nodes from the
same static communities being in the same temporal communities at other time-points or during different tasks.
Parameters... | 0.872102 | 0.748168 |
# make sure the static and temporal communities have the same number of nodes
if staticcommunities.shape[0] != temporalcommunities.shape[0]:
raise ValueError(
'Temporal and static communities have different dimensions')
alleg = allegiance(temporalcommunities)
Icoeff = np.zero... | def integration(temporalcommunities, staticcommunities) | Calculates the integration coefficient for each node. Measures the average probability
that a node is in the same community as nodes from other systems.
Parameters:
------------
temporalcommunities : array
temporal communities vector (node,time)
staticcommunities : array
... | 4.464949 | 3.614507 | 1.235286 | def integration(temporalcommunities, staticcommunities):
"""
Calculates the integration coefficient for each node. Measures the average probability
that a node is in the same community as nodes from other systems.
Parameters:
------------
temporalcommunities : array
temporal co... | 0.86916 | 0.630372 |
lowest, highest = self.tracks[0].get_active_pitch_range()
if len(self.tracks) > 1:
for track in self.tracks[1:]:
low, high = track.get_active_pitch_range()
if low < lowest:
lowest = low
if high > highest:
... | def get_active_pitch_range(self) | Return the active pitch range of the pianorolls of all tracks as a tuple
(lowest, highest).
Returns
-------
lowest : int
The lowest active pitch among the pianorolls of all tracks.
highest : int
The lowest highest pitch among the pianorolls of all tracks. | 1.955238 | 1.833228 | 1.066555 | def get_active_pitch_range(self):
"""
Return the active pitch range of the pianorolls of all tracks as a tuple
(lowest, highest).
Returns
-------
lowest : int
The lowest active pitch among the pianorolls of all tracks.
highest : int
The lowest hig... | 0.741364 | 0.631438 |
empty_track_indices = [idx for idx, track in enumerate(self.tracks)
if not np.any(track.pianoroll)]
return empty_track_indices | def get_empty_tracks(self) | Return the indices of tracks with empty pianorolls.
Returns
-------
empty_track_indices : list
The indices of tracks with empty pianorolls. | 4.312263 | 3.439944 | 1.253585 | def get_empty_tracks(self):
"""
Return the indices of tracks with empty pianorolls.
Returns
-------
empty_track_indices : list
The indices of tracks with empty pianorolls.
"""
empty_track_indices = [idx for idx, track in enumerate(self.tracks)
... | 0.691406 | 0.596198 |
if not isinstance(obj, Multitrack):
raise TypeError("Support only `pypianoroll.Multitrack` class objects")
copied = deepcopy(obj)
copied.pad_to_same()
return copied | def pad_to_same(obj) | Return a copy of the object with shorter piano-rolls padded with zeros
at the end along the time axis to the length of the piano-roll with the
maximal length. | 7.100032 | 5.867205 | 1.210122 | def pad_to_same(obj):
"""
Return a copy of the object with shorter piano-rolls padded with zeros
at the end along the time axis to the length of the piano-roll with the
maximal length.
"""
if not isinstance(obj, Multitrack):
raise TypeError("Support only `pypianoroll.Multitrack` cla... | 0.786028 | 0.732053 |
_validate_pianoroll(pianoroll)
reshaped = pianoroll[:, :120].reshape(-1, 12, 10)
reshaped[..., :8] += pianoroll[:, 120:].reshape(-1, 1, 8)
return np.sum(reshaped, 1) | def _to_chroma(pianoroll) | Return the unnormalized chroma features of a pianoroll. | 2.94459 | 2.857388 | 1.030518 | def _to_chroma(pianoroll):
"""
Return the unnormalized chroma features of a pianoroll.
"""
_validate_pianoroll(pianoroll)
reshaped = pianoroll[:, :120].reshape(-1, 12, 10)
reshaped[..., :8] += pianoroll[:, 120:].reshape(-1, 1, 8)
return np.sum(reshaped, 1) | 0.744208 | 0.62088 |
_validate_pianoroll(pianoroll)
reshaped = pianoroll.reshape(-1, beat_resolution * pianoroll.shape[1])
n_empty_beats = np.count_nonzero(reshaped.any(1))
return n_empty_beats / len(reshaped) | def empty_beat_rate(pianoroll, beat_resolution) | Return the ratio of empty beats to the total number of beats in a
pianoroll. | 2.961781 | 2.79248 | 1.060628 | def empty_beat_rate(pianoroll, beat_resolution):
"""
Return the ratio of empty beats to the total number of beats in a
pianoroll.
"""
_validate_pianoroll(pianoroll)
reshaped = pianoroll.reshape(-1, beat_resolution * pianoroll.shape[1])
n_empty_beats = np.count_nonzero(reshaped.any(1))
... | 0.725077 | 0.781247 |
_validate_pianoroll(pianoroll)
chroma = _to_chroma(pianoroll)
return np.count_nonzero(np.any(chroma, 0)) | def n_pitche_classes_used(pianoroll) | Return the number of unique pitch classes used in a pianoroll. | 3.500182 | 3.410502 | 1.026295 | def n_pitche_classes_used(pianoroll):
"""
Return the number of unique pitch classes used in a pianoroll.
"""
_validate_pianoroll(pianoroll)
chroma = _to_chroma(pianoroll)
return np.count_nonzero(np.any(chroma, 0)) | 0.688704 | 0.565299 |
_validate_pianoroll(pianoroll)
if np.issubdtype(pianoroll.dtype, np.bool_):
pianoroll = pianoroll.astype(np.uint8)
padded = np.pad(pianoroll, ((1, 1), (0, 0)), 'constant')
diff = np.diff(padded, axis=0).reshape(-1)
onsets = (diff > 0).nonzero()[0]
offsets = (diff < 0).nonzero()[0]
... | def qualified_note_rate(pianoroll, threshold=2) | Return the ratio of the number of the qualified notes (notes longer than
`threshold` (in time step)) to the total number of notes in a pianoroll. | 2.257025 | 2.245966 | 1.004924 | def qualified_note_rate(pianoroll, threshold=2):
"""
Return the ratio of the number of the qualified notes (notes longer than
`threshold` (in time step)) to the total number of notes in a pianoroll.
"""
_validate_pianoroll(pianoroll)
if np.issubdtype(pianoroll.dtype, np.bool_):
pian... | 0.791932 | 0.76487 |
if beat_resolution not in (4, 6, 8, 9, 12, 16, 18, 24):
raise ValueError("Unsupported beat resolution. Only 4, 6, 8 ,9, 12, "
"16, 18, 42 are supported.")
_validate_pianoroll(pianoroll)
def _drum_pattern_mask(res, tol):
if res == 24:
drum_p... | def drum_in_pattern_rate(pianoroll, beat_resolution, tolerance=0.1) | Return the ratio of the number of drum notes that lie on the drum
pattern (i.e., at certain time steps) to the total number of drum notes. | 1.908838 | 1.893038 | 1.008347 | def drum_in_pattern_rate(pianoroll, beat_resolution, tolerance=0.1):
"""
Return the ratio of the number of drum notes that lie on the drum
pattern (i.e., at certain time steps) to the total number of drum notes.
"""
if beat_resolution not in (4, 6, 8, 9, 12, 16, 18, 24):
raise ValueErro... | 0.57517 | 0.609553 |
if not isinstance(key, int):
raise TypeError("`key` must an integer.")
if key > 11 or key < 0:
raise ValueError("`key` must be in an integer in between 0 and 11.")
if kind not in ('major', 'minor'):
raise ValueError("`kind` must be one of 'major' or 'minor'.")
_validate_pian... | def in_scale_rate(pianoroll, key=3, kind='major') | Return the ratio of the number of nonzero entries that lie in a specific
scale to the total number of nonzero entries in a pianoroll. Default to C
major scale. | 2.141032 | 2.103438 | 1.017873 | def in_scale_rate(pianoroll, key=3, kind='major'):
"""
Return the ratio of the number of nonzero entries that lie in a specific
scale to the total number of nonzero entries in a pianoroll. Default to C
major scale.
"""
if not isinstance(key, int):
raise TypeError("`key` must an inte... | 0.674345 | 0.549761 |
nonzero_steps = np.any(self.pianoroll, axis=1)
inv_last_nonzero_step = np.argmax(np.flip(nonzero_steps, axis=0))
active_length = self.pianoroll.shape[0] - inv_last_nonzero_step
return active_length | def get_active_length(self) | Return the active length (i.e., without trailing silence) of the
pianoroll. The unit is time step.
Returns
-------
active_length : int
The active length (i.e., without trailing silence) of the pianoroll. | 3.980884 | 3.492857 | 1.139721 | def get_active_length(self):
"""
Return the active length (i.e., without trailing silence) of the
pianoroll. The unit is time step.
Returns
-------
active_length : int
The active length (i.e., without trailing silence) of the pianoroll.
"""
nonzero_steps... | 0.748076 | 0.526769 |
if self.pianoroll.shape[1] < 1:
raise ValueError("Cannot compute the active pitch range for an "
"empty pianoroll")
lowest = 0
highest = 127
while lowest < highest:
if np.any(self.pianoroll[:, lowest]):
break
... | def get_active_pitch_range(self) | Return the active pitch range as a tuple (lowest, highest).
Returns
-------
lowest : int
The lowest active pitch in the pianoroll.
highest : int
The highest active pitch in the pianoroll. | 2.558522 | 2.321702 | 1.102003 | def get_active_pitch_range(self):
"""
Return the active pitch range as a tuple (lowest, highest).
Returns
-------
lowest : int
The lowest active pitch in the pianoroll.
highest : int
The highest active pitch in the pianoroll.
"""
if self.pian... | 0.766242 | 0.609611 |
if x.shape[0] != 1:
raise ValueError("Only one sample can be plotted at a time.")
# compile theano function
xs = T.tensor4('xs').astype(theano.config.floatX)
get_activity = theano.function([xs], get_output(layer, xs))
activity = get_activity(x)
shape = activity.shape
nrows = n... | def plot_conv_activity(layer, x, figsize=(6, 8)) | Plot the acitivities of a specific layer.
Only really makes sense with layers that work 2D data (2D
convolutional layers, 2D pooling layers ...).
Parameters
----------
layer : lasagne.layers.Layer
x : numpy.ndarray
Only takes one sample at a time, i.e. x.shape[0] == 1. | 2.865567 | 2.825922 | 1.014029 | def plot_conv_activity(layer, x, figsize=(6, 8)):
"""
Plot the acitivities of a specific layer.
Only really makes sense with layers that work 2D data (2D
convolutional layers, 2D pooling layers ...).
Parameters
----------
layer : lasagne.layers.Layer
x : numpy.ndarray
Only takes... | 0.831742 | 0.851583 |
import pydotplus as pydot
pydot_graph = pydot.Dot('Network', graph_type='digraph')
pydot_nodes = {}
pydot_edges = []
for i, layer in enumerate(layers):
layer_name = getattr(layer, 'name', None)
if layer_name is None:
layer_name = layer.__class__.__name__
laye... | def make_pydot_graph(layers, output_shape=True, verbose=False) | :parameters:
- layers : list
List of the layers, as obtained from lasagne.layers.get_all_layers
- output_shape: (default `True`)
If `True`, the output shape of each layer will be displayed.
- verbose: (default `False`)
If `True`, layer attributes like filter s... | 1.925423 | 1.858983 | 1.03574 | def make_pydot_graph(layers, output_shape=True, verbose=False):
"""
:parameters:
- layers : list
List of the layers, as obtained from lasagne.layers.get_all_layers
- output_shape: (default `True`)
If `True`, the output shape of each layer will be displayed.
- verb... | 0.703639 | 0.542015 |
from IPython.display import Image
layers = (layers.get_all_layers() if hasattr(layers, 'get_all_layers')
else layers)
dot = make_pydot_graph(layers, **kwargs)
return Image(dot.create_png()) | def draw_to_notebook(layers, **kwargs) | Draws a network diagram in an IPython notebook
:parameters:
- layers : list or NeuralNet instance
List of layers or the neural net to draw.
- **kwargs : see the docstring of make_pydot_graph for other options | 3.869769 | 3.186751 | 1.21433 | def draw_to_notebook(layers, **kwargs):
"""
Draws a network diagram in an IPython notebook
:parameters:
- layers : list or NeuralNet instance
List of layers or the neural net to draw.
- **kwargs : see the docstring of make_pydot_graph for other options
"""
from IPyth... | 0.77102 | 0.540621 |
from decaf.util import transform # soft dep
_JEFFNET_FLIP = True
# first, extract the 256x256 center.
image = transform.scale_and_extract(transform.as_rgb(image), 256)
# convert to [0,255] float32
image = image.astype(np.float32) * 255.
if _JEFFNET_FLIP... | def prepare_image(self, image) | Returns image of shape `(256, 256, 3)`, as expected by
`transform` when `classify_direct = True`. | 8.974966 | 8.861794 | 1.012771 | def prepare_image(self, image):
"""
Returns image of shape `(256, 256, 3)`, as expected by
`transform` when `classify_direct = True`.
"""
from decaf.util import transform # soft dep
_JEFFNET_FLIP = True
# first, extract the 256x256 center.
image = transform.scale_and_extract(t... | 0.828037 | 0.671918 |
mapping = kwargs
if args:
if len(args) != 1 or not isinstance(args[0], dict):
raise RedisError('MSET requires **kwargs or a single dict arg')
mapping.update(args[0])
if len(mapping) == 0:
raise ResponseError("wrong number of arguments... | def mset(self, *args, **kwargs) | Sets key/values based on a mapping. Mapping can be supplied as a single
dictionary argument or as kwargs. | 3.485207 | 3.135663 | 1.111474 | def mset(self, *args, **kwargs):
"""
Sets key/values based on a mapping. Mapping can be supplied as a single
dictionary argument or as kwargs.
"""
mapping = kwargs
if args:
if len(args) != 1 or not isinstance(args[0], dict):
raise RedisError('MSET requires **kwargs o... | 0.680242 | 0.526586 |
redis_hash = self._get_hash(hashkey, 'HEXISTS')
return self._encode(attribute) in redis_hash | def hexists(self, hashkey, attribute) | Emulate hexists. | 8.409638 | 7.622225 | 1.103305 | def hexists(self, hashkey, attribute):
"""
Emulate hexists.
"""
redis_hash = self._get_hash(hashkey, 'HEXISTS')
return self._encode(attribute) in redis_hash | 0.626524 | 0.569613 |
disco = self.dependencies[aioxmpp.disco.DiscoClient]
response = yield from disco.query_info(
peer_jid,
)
return namespaces.xep0050_commands in response.features | def supports_commands(self, peer_jid) | Detect whether a peer supports :xep:`50` Ad-Hoc commands.
:param peer_jid: JID of the peer to query
:type peer_jid: :class:`aioxmpp.JID`
:rtype: :class:`bool`
:return: True if the peer supports the Ad-Hoc commands protocol, false
otherwise.
Note that the fact t... | 12.254194 | 9.169634 | 1.336389 | def supports_commands(self, peer_jid):
"""
Detect whether a peer supports :xep:`50` Ad-Hoc commands.
:param peer_jid: JID of the peer to query
:type peer_jid: :class:`aioxmpp.JID`
:rtype: :class:`bool`
:return: True if the peer supports the Ad-Hoc commands protocol, false
... | 0.847968 | 0.582313 |
if self._response is not None:
raise RuntimeError("command execution already started")
request = aioxmpp.IQ(
type_=aioxmpp.IQType.SET,
to=self._peer_jid,
payload=adhoc_xso.Command(self._command_name),
)
self._response = yield fr... | def start(self) | Initiate the session by starting to execute the command with the peer.
:return: The :attr:`~.xso.Command.first_payload` of the response
This sends an empty command IQ request with the
:attr:`~.ActionType.EXECUTE` action.
The :attr:`status`, :attr:`response` and related attributes get ... | 6.721424 | 4.330876 | 1.551978 | def start(self):
"""
Initiate the session by starting to execute the command with the peer.
:return: The :attr:`~.xso.Command.first_payload` of the response
This sends an empty command IQ request with the
:attr:`~.ActionType.EXECUTE` action.
The :attr:`status`, :attr:`response... | 0.74459 | 0.581957 |
if self._this_occupant is not None:
items = [self._this_occupant]
else:
items = []
items += list(self._occupant_info.values())
return items | def members(self) | A copy of the list of occupants. The local user is always the first
item in the list, unless the :meth:`on_enter` has not fired yet. | 6.388441 | 3.987495 | 1.602119 | def members(self):
"""
A copy of the list of occupants. The local user is always the first
item in the list, unless the :meth:`on_enter` has not fired yet.
"""
if self._this_occupant is not None:
items = [self._this_occupant]
else:
items = []
items += list(self._occ... | 0.633297 | 0.560974 |
keys = list(self.keys())
try:
keys.remove(None)
except ValueError:
pass
keys.sort()
key = lookup_language(keys, language_ranges)
return self[key] | def lookup(self, language_ranges) | Perform an RFC4647 language range lookup on the keys in the
dictionary. `language_ranges` must be a sequence of
:class:`LanguageRange` instances.
Return the entry in the dictionary with a key as produced by
`lookup_language`. If `lookup_language` does not find a match and the
ma... | 3.510848 | 3.125103 | 1.123434 | def lookup(self, language_ranges):
"""
Perform an RFC4647 language range lookup on the keys in the
dictionary. `language_ranges` must be a sequence of
:class:`LanguageRange` instances.
Return the entry in the dictionary with a key as produced by
`lookup_language`. If `lookup_lan... | 0.861858 | 0.734715 |
record = b".".join([
b"_" + service.encode("ascii"),
b"_" + transport.encode("ascii"),
domain])
answer = yield from repeated_query(
record,
dns.rdatatype.SRV,
**kwargs)
if answer is None:
return None
items = [
(rec.priority, rec.we... | def lookup_srv(
domain: bytes,
service: str,
transport: str = "tcp",
**kwargs) | Query the DNS for SRV records describing how the given `service` over the
given `transport` is implemented for the given `domain`. `domain` must be
an IDNA-encoded :class:`bytes` object; `service` must be a normal
:class:`str`.
Keyword arguments are passed to :func:`repeated_query`.
Return a list ... | 3.632017 | 2.949542 | 1.231384 | def lookup_srv(
domain: bytes,
service: str,
transport: str = "tcp",
**kwargs):
"""
Query the DNS for SRV records describing how the given `service` over the
given `transport` is implemented for the given `domain`. `domain` must be
an IDNA-encoded :class:`bytes` object; `... | 0.88796 | 0.698278 |
record = b".".join([
b"_" + str(port).encode("ascii"),
b"_" + transport.encode("ascii"),
hostname
])
answer = yield from repeated_query(
record,
dns.rdatatype.TLSA,
require_ad=require_ad,
**kwargs)
if answer is None:
return None
... | def lookup_tlsa(hostname, port, transport="tcp", require_ad=True, **kwargs) | Query the DNS for TLSA records describing the certificates and/or keys to
expect when contacting `hostname` at the given `port` over the given
`transport`. `hostname` must be an IDNA-encoded :class:`bytes` object.
The keyword arguments are passed to :func:`repeated_query`; `require_ad`
defaults to :dat... | 4.397869 | 3.582686 | 1.227534 | def lookup_tlsa(hostname, port, transport="tcp", require_ad=True, **kwargs):
"""
Query the DNS for TLSA records describing the certificates and/or keys to
expect when contacting `hostname` at the given `port` over the given
`transport`. `hostname` must be an IDNA-encoded :class:`bytes` object.
The ... | 0.863017 | 0.678487 |
parts = [
_process_identity(identity)
for identity in identities
]
parts.sort()
return b"".join(parts)+b"\x1c" | def _process_identities(identities) | Generate the `Identities String` from an iterable of identities.
:param identities: The identities to generate the features string from.
:type identities: :class:`~collections.abc.Iterable` of
:class:`~.disco.xso.Identity`
:return: The `Identities String`
:rtype: :class:`bytes`
Generate th... | 6.086969 | 5.620315 | 1.08303 | def _process_identities(identities):
"""
Generate the `Identities String` from an iterable of identities.
:param identities: The identities to generate the features string from.
:type identities: :class:`~collections.abc.Iterable` of
:class:`~.disco.xso.Identity`
:return: The `Identities St... | 0.850002 | 0.538073 |
parts = [
_process_form(form)
for form in exts
]
parts.sort()
return b"".join(parts)+b"\x1c" | def _process_extensions(exts) | Generate the `Extensions String` from an iterable of data forms.
:param exts: The data forms to generate the extensions string from.
:type exts: :class:`~collections.abc.Iterable` of
:class:`~.forms.xso.Data`
:return: The `Extensions String`
:rtype: :class:`bytes`
Generate the `Extensions ... | 7.861825 | 6.660774 | 1.180317 | def _process_extensions(exts):
"""
Generate the `Extensions String` from an iterable of data forms.
:param exts: The data forms to generate the extensions string from.
:type exts: :class:`~collections.abc.Iterable` of
:class:`~.forms.xso.Data`
:return: The `Extensions String`
:rtype: :c... | 0.830388 | 0.508117 |
stanza = aioxmpp.Presence()
self._state.apply_to_stanza(stanza)
stanza.status.update(self._status)
return stanza | def make_stanza(self) | Create and return a presence stanza with the current settings.
:return: Presence stanza
:rtype: :class:`aioxmpp.Presence` | 7.006313 | 5.696356 | 1.229964 | def make_stanza(self):
"""
Create and return a presence stanza with the current settings.
:return: Presence stanza
:rtype: :class:`aioxmpp.Presence`
"""
stanza = aioxmpp.Presence()
self._state.apply_to_stanza(stanza)
stanza.status.update(self._status)
return stanza | 0.618752 | 0.541833 |
if not isinstance(priority, numbers.Integral):
raise TypeError(
"invalid priority: got {}, expected integer".format(
type(priority)
)
)
if not isinstance(state, aioxmpp.PresenceState):
raise TypeError(
... | def set_presence(self, state, status={}, priority=0) | Change the presence broadcast by the client.
:param state: New presence state to broadcast
:type state: :class:`aioxmpp.PresenceState`
:param status: New status information to broadcast
:type status: :class:`dict` or :class:`str`
:param priority: New priority for the resource
... | 2.556511 | 2.386725 | 1.071138 | def set_presence(self, state, status={}, priority=0):
"""
Change the presence broadcast by the client.
:param state: New presence state to broadcast
:type state: :class:`aioxmpp.PresenceState`
:param status: New status information to broadcast
:type status: :class:`dict` or :cla... | 0.859987 | 0.53607 |
pk = pyasn1_struct.getComponentByName(
"tbsCertificate"
).getComponentByName(
"subjectPublicKeyInfo"
)
return pyasn1.codec.der.encoder.encode(pk) | def extract_pk_blob_from_pyasn1(pyasn1_struct) | Extract an ASN.1 encoded public key blob from the given :mod:`pyasn1`
structure (which must represent a certificate). | 3.346926 | 3.08438 | 1.085121 | def extract_pk_blob_from_pyasn1(pyasn1_struct):
"""
Extract an ASN.1 encoded public key blob from the given :mod:`pyasn1`
structure (which must represent a certificate).
"""
pk = pyasn1_struct.getComponentByName(
"tbsCertificate"
).getComponentByName(
"subjectPublicKeyInfo"... | 0.808786 | 0.504639 |
cert_structure = extract_python_dict_from_x509(x509)
try:
ssl.match_hostname(cert_structure, hostname)
except ssl.CertificateError:
return False
return True | def check_x509_hostname(x509, hostname) | Check whether the given :class:`OpenSSL.crypto.X509` certificate `x509`
matches the given `hostname`.
Return :data:`True` if the name matches and :data:`False` otherwise. This
uses :func:`ssl.match_hostname` and :func:`extract_python_dict_from_x509`. | 4.243701 | 2.652637 | 1.599805 | def check_x509_hostname(x509, hostname):
"""
Check whether the given :class:`OpenSSL.crypto.X509` certificate `x509`
matches the given `hostname`.
Return :data:`True` if the name matches and :data:`False` otherwise. This
uses :func:`ssl.match_hostname` and :func:`extract_python_dict_from_x509`.
... | 0.803617 | 0.54256 |
key = self._x509_key(x509)
try:
pins = self._storage[hostname]
except KeyError:
return None
if key in pins:
return True
return None | def query(self, hostname, x509) | Return true if the given :class:`OpenSSL.crypto.X509` object `x509` has
previously been pinned for use with the given `hostname` and
:data:`None` otherwise.
Returning :data:`None` allows this method to be used with
:class:`PinningPKIXCertificateVerifier`. | 4.754226 | 3.829011 | 1.241633 | def query(self, hostname, x509):
"""
Return true if the given :class:`OpenSSL.crypto.X509` object `x509` has
previously been pinned for use with the given `hostname` and
:data:`None` otherwise.
Returning :data:`None` allows this method to be used with
:class:`PinningPKIXCertific... | 0.858333 | 0.502441 |
End of preview. Expand in Data Studio
Filtered version of code search net python subset, with filtering based on perplexity with/without docstring, learning value/quality classifiers, and manual filtering.
Original data with perplexity filtering is from here, with credit to bjoernp.
- Downloads last month
- 4