positive stringlengths 100 30.3k | anchor stringlengths 1 15k |
|---|---|
def send(self, message):
"""
Formatted Message to send to Polyglot. Connection messages are sent automatically from this module
so this method is used to send commands to/from Polyglot and formats it for consumption
"""
if not isinstance(message, dict) and self.connected:
... | Formatted Message to send to Polyglot. Connection messages are sent automatically from this module
so this method is used to send commands to/from Polyglot and formats it for consumption |
def _build_trigram_indices(trigram_index):
"""Build a dictionary of trigrams and their indices from a csv"""
result = {}
trigram_count = 0
for key, val in csv.reader(open(trigram_index)):
result[key] = int(val)
trigram_count += 1
return result, trigram_count | Build a dictionary of trigrams and their indices from a csv |
def _tv2(data,weight,Niter=50):
"""
chambolles tv regularized denoising
weight should be around 2+1.5*noise_sigma
"""
if dev is None:
dev = imgtools.__DEFAULT_OPENCL_DEVICE__
if dev is None:
raise ValueError("no OpenCLDevice found...")
proc = OCLProcessor(dev,utils.absP... | chambolles tv regularized denoising
weight should be around 2+1.5*noise_sigma |
def copy(self):
"""Return a copy of this `Fact`."""
content = [(k, v) for k, v in self.items()]
intidx = [(k, v) for k, v in content if isinstance(k, int)]
args = [v for k, v in sorted(intidx)]
kwargs = {k: v
for k, v in content
if not isinst... | Return a copy of this `Fact`. |
def nonFinalisedReqs(self, reqKeys: List[Tuple[str, int]]):
"""
Check if there are any requests which are not finalised, i.e for
which there are not enough PROPAGATEs
"""
return {key for key in reqKeys if not self.requests.is_finalised(key)} | Check if there are any requests which are not finalised, i.e for
which there are not enough PROPAGATEs |
def clear_port_stats(self):
""" Clear only port stats (leave stream and packet group stats).
Do not use - still working with Ixia to resolve.
"""
stat = IxeStat(self)
stat.ix_set_default()
stat.enableValidStats = True
stat.ix_set()
stat.write() | Clear only port stats (leave stream and packet group stats).
Do not use - still working with Ixia to resolve. |
def split(self, index):
"""
Spilt the list at position specified by index. Returns a tuple containing the
list up until index and the list after the index. Runs in O(index).
>>> plist([1, 2, 3, 4]).split(2)
(plist([1, 2]), plist([3, 4]))
"""
lb = _PListBuilder()
... | Spilt the list at position specified by index. Returns a tuple containing the
list up until index and the list after the index. Runs in O(index).
>>> plist([1, 2, 3, 4]).split(2)
(plist([1, 2]), plist([3, 4])) |
def natural_sorted(iterable):
"""Return human sorted list of strings.
E.g. for sorting file names.
>>> natural_sorted(['f1', 'f2', 'f10'])
['f1', 'f2', 'f10']
"""
def sortkey(x):
return [(int(c) if c.isdigit() else c) for c in re.split(numbers, x)]
numbers = re.compile(r'(\d+)')
... | Return human sorted list of strings.
E.g. for sorting file names.
>>> natural_sorted(['f1', 'f2', 'f10'])
['f1', 'f2', 'f10'] |
def attributes(self):
if 'id' in self.node.attrib:
yield PlaceholderAttribute('id', self.node.attrib['id'])
if 'tei-tag' in self.node.attrib:
yield PlaceholderAttribute('tei-tag', self.node.attrib['tei-tag'])
"""Contain attributes applicable to this element"""
f... | Contain attributes applicable to this element |
def read_dna(path):
'''Read DNA from file. Uses BioPython and coerces to coral format.
:param path: Full path to input file.
:type path: str
:returns: DNA sequence.
:rtype: coral.DNA
'''
filename, ext = os.path.splitext(os.path.split(path)[-1])
genbank_exts = ['.gb', '.ape']
fasta... | Read DNA from file. Uses BioPython and coerces to coral format.
:param path: Full path to input file.
:type path: str
:returns: DNA sequence.
:rtype: coral.DNA |
def inner(self, x1, x2):
"""Calculate the array-weighted inner product of two elements.
Parameters
----------
x1, x2 : `ProductSpaceElement`
Elements whose inner product is calculated.
Returns
-------
inner : float or complex
The inner pr... | Calculate the array-weighted inner product of two elements.
Parameters
----------
x1, x2 : `ProductSpaceElement`
Elements whose inner product is calculated.
Returns
-------
inner : float or complex
The inner product of the two provided elements. |
def str_syslog(self, *args):
'''
get/set the str_syslog, i.e. the current value of the
syslog prepend string.
str_syslog(): returns the current syslog string
str_syslog(<astr>): sets the syslog string to <astr>
'''
if len(args):
self._s... | get/set the str_syslog, i.e. the current value of the
syslog prepend string.
str_syslog(): returns the current syslog string
str_syslog(<astr>): sets the syslog string to <astr> |
def parse_data_df(data_dset, ridx, cidx, row_meta, col_meta):
"""
Parses in data_df from hdf5, subsetting if specified.
Input:
-data_dset (h5py dset): HDF5 dataset from which to read data_df
-ridx (list): list of indexes to subset from data_df
(may be all of them if no subsettin... | Parses in data_df from hdf5, subsetting if specified.
Input:
-data_dset (h5py dset): HDF5 dataset from which to read data_df
-ridx (list): list of indexes to subset from data_df
(may be all of them if no subsetting)
-cidx (list): list of indexes to subset from data_df
... |
def guess_encoding(self):
"""Guess encoding using the language, falling back on chardet.
:return: the guessed encoding.
:rtype: str
"""
logger.info('Guessing encoding for language %s', self.language)
# always try utf-8 first
encodings = ['utf-8']
# add... | Guess encoding using the language, falling back on chardet.
:return: the guessed encoding.
:rtype: str |
def complete_irradiance(self, times=None, weather=None):
"""
Determine the missing irradiation columns. Only two of the
following data columns (dni, ghi, dhi) are needed to calculate
the missing data.
This function is not safe at the moment. Results can be too high
or ne... | Determine the missing irradiation columns. Only two of the
following data columns (dni, ghi, dhi) are needed to calculate
the missing data.
This function is not safe at the moment. Results can be too high
or negative. Please contribute and help to improve this function
on https:... |
def tophat_alignment_plot (self):
""" Make the HighCharts HTML to plot the alignment rates """
# Specify the order of the different possible categories
keys = OrderedDict()
keys['aligned_not_multimapped_discordant'] = { 'color': '#437bb1', 'name': 'Aligned' }
keys['aligned_multi... | Make the HighCharts HTML to plot the alignment rates |
def getConId(self, contract_identifier):
""" Get contracts conId """
details = self.contractDetails(contract_identifier)
if len(details["contracts"]) > 1:
return details["m_underConId"]
return details["m_summary"]["m_conId"] | Get contracts conId |
def send_messages(self, topic, key=None, msgs=()):
"""
Given a topic, and optional key (for partitioning) and a list of
messages, send them to Kafka, either immediately, or when a batch is
ready, depending on the Producer's batch settings.
:param str topic: Kafka topic to send t... | Given a topic, and optional key (for partitioning) and a list of
messages, send them to Kafka, either immediately, or when a batch is
ready, depending on the Producer's batch settings.
:param str topic: Kafka topic to send the messages to
:param str key:
Message key used to... |
def genms(self, scans=[]):
""" Generate an MS that contains all calibrator scans with 1 s integration time.
"""
if len(scans):
scanstr = string.join([str(ss) for ss in sorted(scans)], ',')
else:
scanstr = self.allstr
print 'Splitting out all cal scans (%... | Generate an MS that contains all calibrator scans with 1 s integration time. |
def rename(self, path, destination, **kwargs):
"""Renames Path src to Path dst.
:returns: true if rename is successful
:rtype: bool
"""
return _json(self._put(path, 'RENAME', destination=destination, **kwargs))['boolean'] | Renames Path src to Path dst.
:returns: true if rename is successful
:rtype: bool |
def get_submission(self, submissionid, user_check=True):
""" Get a submission from the database """
sub = self._database.submissions.find_one({'_id': ObjectId(submissionid)})
if user_check and not self.user_is_submission_owner(sub):
return None
return sub | Get a submission from the database |
def get_grade_system_id(self):
"""Gets the ``GradeSystem Id`` in which this grade belongs.
return: (osid.id.Id) - the grade system ``Id``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.learning.Activity.get_objective_id
... | Gets the ``GradeSystem Id`` in which this grade belongs.
return: (osid.id.Id) - the grade system ``Id``
*compliance: mandatory -- This method must be implemented.* |
def _extract_jump_targets(stmt):
"""
Extract goto targets from a Jump or a ConditionalJump statement.
:param stmt: The statement to analyze.
:return: A list of known concrete jump targets.
:rtype: list
"""
targets = [ ]
# FIXME: We are... | Extract goto targets from a Jump or a ConditionalJump statement.
:param stmt: The statement to analyze.
:return: A list of known concrete jump targets.
:rtype: list |
def convert_ram_sp_rf(ADDR_WIDTH=8, DATA_WIDTH=8):
''' Convert RAM: Single-Port, Read-First '''
clk = Signal(bool(0))
we = Signal(bool(0))
addr = Signal(intbv(0)[ADDR_WIDTH:])
di = Signal(intbv(0)[DATA_WIDTH:])
do = Signal(intbv(0)[DATA_WIDTH:])
toVerilog(ram_sp_rf, clk, we, addr, di, do) | Convert RAM: Single-Port, Read-First |
def get_metric_data(self, applications, metrics, field, begin, end, summary=False):
"""
Requires: account ID,
list of application IDs,
list of metrics,
metric fields,
begin,
end
Method: Get
Endpoint... | Requires: account ID,
list of application IDs,
list of metrics,
metric fields,
begin,
end
Method: Get
Endpoint: api.newrelic.com
Restrictions: Rate limit to 1x per minute
Errors: 403 Invalid API key... |
def get_archive(self, archive_name, default_version=None):
'''
Retrieve a data archive
Parameters
----------
archive_name: str
Name of the archive to retrieve
default_version: version
str or :py:class:`~distutils.StrictVersion` giving the defaul... | Retrieve a data archive
Parameters
----------
archive_name: str
Name of the archive to retrieve
default_version: version
str or :py:class:`~distutils.StrictVersion` giving the default
version number to be used on read operations
Returns
... |
def thin_string_list(list_of_strings, max_nonempty_strings=50, blank=''):
"""Designed for composing lists of strings suitable for pyplot axis labels
Often the xtick spacing doesn't allow room for 100's of text labels, so this
eliminates every other one, then every other one of those, until they fit.
>... | Designed for composing lists of strings suitable for pyplot axis labels
Often the xtick spacing doesn't allow room for 100's of text labels, so this
eliminates every other one, then every other one of those, until they fit.
>>> thin_string_list(['x']*20, 5) # doctring: +NORMALIZE_WHITESPACE
['x', '',... |
def search(cls, *, limit=100, page=1, properties=None, return_query=False):
"""Search for issues based on the provided filters
Args:
limit (`int`): Number of results to return. Default: 100
page (`int`): Pagination offset for results. Default: 1
properties (`dict`): ... | Search for issues based on the provided filters
Args:
limit (`int`): Number of results to return. Default: 100
page (`int`): Pagination offset for results. Default: 1
properties (`dict`): A `dict` containing property name and value pairs. Values can be either a str or a list... |
def __get_boxes(self):
"""
Get all the word boxes of this page.
"""
boxfile = self.__box_path
try:
box_builder = pyocr.builders.LineBoxBuilder()
with self.fs.open(boxfile, 'r') as file_desc:
boxes = box_builder.read_file(file_desc)
... | Get all the word boxes of this page. |
def get_repo_keys():
'''
.. versionadded:: 2017.7.0
List known repo key details.
:return: A dictionary containing the repo keys.
:rtype: dict
CLI Examples:
.. code-block:: bash
salt '*' pkg.get_repo_keys
'''
ret = dict()
repo_keys = list()
# The double usage of ... | .. versionadded:: 2017.7.0
List known repo key details.
:return: A dictionary containing the repo keys.
:rtype: dict
CLI Examples:
.. code-block:: bash
salt '*' pkg.get_repo_keys |
def _validate(self): # type: () -> None
"""Run validation, save errors to object in self._errors"""
# class can specify it's empty obj -- list would have empty of []
self._errors = []
self._validate_type()
if self.is_valid():
self._validate_value() | Run validation, save errors to object in self._errors |
def get_top_edge_depth(self):
"""
Return minimum depth of surface's top edge.
:returns:
Float value, the vertical distance between the earth surface
and the shallowest point in surface's top edge in km.
"""
top_edge = self.mesh[0:1]
if top_edge.de... | Return minimum depth of surface's top edge.
:returns:
Float value, the vertical distance between the earth surface
and the shallowest point in surface's top edge in km. |
def replace_u(matchobj):
"""Break the interval into parts, and replace 'u's.
pieces - [pos/neg, start_year, start_month, start_day,
pos/neg, end_year, end_month, end_day]
"""
pieces = list(matchobj.groups(''))
# Replace "u"s in start and end years.
if 'u' in pieces[1]:
pie... | Break the interval into parts, and replace 'u's.
pieces - [pos/neg, start_year, start_month, start_day,
pos/neg, end_year, end_month, end_day] |
def destination(self, value):
"""
Setter for **self.__destination** attribute.
:param value: Attribute value.
:type value: unicode
"""
if value is not None:
assert type(value) is unicode, "'{0}' attribute: '{1}' type is not 'unicode'!".format(
... | Setter for **self.__destination** attribute.
:param value: Attribute value.
:type value: unicode |
def updateEmailConfig(self, Corpnum, EmailType, SendYN, UserID=None):
""" ์๋ฆผ๋ฉ์ผ ์ ์ก์ค์ ์์
args
CorpNum : ํ๋นํ์ ์ฌ์
์๋ฒํธ
EmailType: ๋ฉ์ผ์ ์ก์ ํ
SendYN: ์ ์ก์ฌ๋ถ (True-์ ์ก, False-๋ฏธ์ ์ก)
UserID : ํ๋นํ์ ์์ด๋
return
์ฒ๋ฆฌ๊ฒฐ๊ณผ. consi... | ์๋ฆผ๋ฉ์ผ ์ ์ก์ค์ ์์
args
CorpNum : ํ๋นํ์ ์ฌ์
์๋ฒํธ
EmailType: ๋ฉ์ผ์ ์ก์ ํ
SendYN: ์ ์ก์ฌ๋ถ (True-์ ์ก, False-๋ฏธ์ ์ก)
UserID : ํ๋นํ์ ์์ด๋
return
์ฒ๋ฆฌ๊ฒฐ๊ณผ. consist of code and message
raise
PopbillException |
def write_hyper_response(self, links=[], meta={}, entity_name=None, entity=None, notifications=[], actions=[]):
"""Writes a hyper media response object
:param list links: A list of links to the resources
:param dict meta: The meta data for this response
:param str entity_name: The entit... | Writes a hyper media response object
:param list links: A list of links to the resources
:param dict meta: The meta data for this response
:param str entity_name: The entity name
:param object entity: The Entity itself
:param list notifications: List of notifications
:pa... |
def determine_orig_wcsname(header, wnames, wkeys):
"""
Determine the name of the original, unmodified WCS solution
"""
orig_wcsname = None
orig_key = None
if orig_wcsname is None:
for k,w in wnames.items():
if w[:4] == 'IDC_':
orig_wcsname = w
... | Determine the name of the original, unmodified WCS solution |
def get_cmor_fp_meta(fp):
"""Processes a CMOR style file path.
Section 3.1 of the `Data Reference Syntax`_ details:
The standard CMIP5 output tool CMOR optionally writes output files
to a directory structure mapping DRS components to directory names as:
<activity>/<product>/<insti... | Processes a CMOR style file path.
Section 3.1 of the `Data Reference Syntax`_ details:
The standard CMIP5 output tool CMOR optionally writes output files
to a directory structure mapping DRS components to directory names as:
<activity>/<product>/<institute>/<model>/<experiment>/<frequ... |
def protocols(self):
"""
:rtype: dict[int, list of ProtocolAnalyzer]
"""
if self.__protocols is None:
self.__protocols = self.proto_tree_model.protocols
return self.__protocols | :rtype: dict[int, list of ProtocolAnalyzer] |
def cli(env):
"""Virtual server order options."""
vsi = SoftLayer.VSManager(env.client)
result = vsi.get_create_options()
table = formatting.KeyValueTable(['name', 'value'])
table.align['name'] = 'r'
table.align['value'] = 'l'
# Datacenters
datacenters = [dc['template']['datacenter'][... | Virtual server order options. |
def find_point_in_section_list(point, section_list):
"""Returns the start of the section the given point belongs to.
The given list is assumed to contain start points of consecutive
sections, except for the final point, assumed to be the end point of the
last section. For example, the list [5, 8, 30, 3... | Returns the start of the section the given point belongs to.
The given list is assumed to contain start points of consecutive
sections, except for the final point, assumed to be the end point of the
last section. For example, the list [5, 8, 30, 31] is interpreted as the
following list of sections: [5-... |
def evaluate():
"""Evaluate the model on validation dataset.
"""
log.info('Loader dev data...')
if version_2:
dev_data = SQuAD('dev', version='2.0')
else:
dev_data = SQuAD('dev', version='1.1')
log.info('Number of records in Train data:{}'.format(len(dev_data)))
dev_dataset ... | Evaluate the model on validation dataset. |
def get_usernames_like(username,**kwargs):
"""
Return a list of usernames like the given string.
"""
checkname = "%%%s%%"%username
rs = db.DBSession.query(User.username).filter(User.username.like(checkname)).all()
return [r.username for r in rs] | Return a list of usernames like the given string. |
def set_n(self, value):
''' setter '''
if isinstance(value, int):
self.__n = value
else:
raise TypeError("The type of n must be int.") | setter |
def hostname(self):
"""Get the hostname that this connection is associated with"""
from six.moves.urllib.parse import urlparse
return urlparse(self._base_url).netloc.split(':', 1)[0] | Get the hostname that this connection is associated with |
def remove_note(self, note, octave=-1):
"""Remove note from container.
The note can either be a Note object or a string representing the
note's name. If no specific octave is given, the note gets removed
in every octave.
"""
res = []
for x in self.notes:
... | Remove note from container.
The note can either be a Note object or a string representing the
note's name. If no specific octave is given, the note gets removed
in every octave. |
def login_required(function=None, required=False, redirect_field_name=REDIRECT_FIELD_NAME):
"""
Decorator for views that, if required, checks that the user is logged in and redirect
to the log-in page if necessary.
"""
if required:
if django.VERSION < (1, 11):
actual_decorator = ... | Decorator for views that, if required, checks that the user is logged in and redirect
to the log-in page if necessary. |
def secure(view):
"""
Authentication decorator for views.
If DEBUG is on, we serve the view without authenticating.
Default is 'django.contrib.auth.decorators.login_required'.
Can also be 'django.contrib.admin.views.decorators.staff_member_required'
or a custom decorator.
"""
auth_decor... | Authentication decorator for views.
If DEBUG is on, we serve the view without authenticating.
Default is 'django.contrib.auth.decorators.login_required'.
Can also be 'django.contrib.admin.views.decorators.staff_member_required'
or a custom decorator. |
def get_url(self, url):
"""
Get an absolute URL from a given one.
"""
if url.startswith('/'):
url = '%s%s' % (self.base_url, url)
return url | Get an absolute URL from a given one. |
def _asciify_list(data):
""" Ascii-fies list values """
ret = []
for item in data:
if isinstance(item, unicode):
item = _remove_accents(item)
item = item.encode('utf-8')
elif isinstance(item, list):
item = _asciify_list(item)
elif isinstance(item, ... | Ascii-fies list values |
def iter_filth(self, text):
"""Iterate over the different types of filth that can exist.
"""
# currently doing this by aggregating all_filths and then sorting
# inline instead of with a Filth.__cmp__ method, which is apparently
# much slower http://stackoverflow.com/a/988728/5647... | Iterate over the different types of filth that can exist. |
def read_json (self, mode='rt', **kwargs):
"""Use the :mod:`json` module to read in this file as a JSON-formatted data
structure. Keyword arguments are passed to :func:`json.load`. Returns the
read-in data structure.
"""
import json
with self.open (mode=mode) as f:
... | Use the :mod:`json` module to read in this file as a JSON-formatted data
structure. Keyword arguments are passed to :func:`json.load`. Returns the
read-in data structure. |
def init_widget(self):
""" The KeyEvent uses the parent_widget as it's widget """
super(QtKeyEvent, self).init_widget()
d = self.declaration
widget = self.widget
self._keyPressEvent = widget.keyPressEvent
self._keyReleaseEvent = widget.keyReleaseEvent
self.set_ena... | The KeyEvent uses the parent_widget as it's widget |
def get_dfdx(self):
""" Calculates 2nd derivatives of ``self.exprs`` """
if self._dfdx is True:
if self.indep is None:
zero = 0*self.be.Dummy()**0
self._dfdx = self.be.Matrix(1, self.ny, [zero]*self.ny)
else:
self._dfdx = self.be.Ma... | Calculates 2nd derivatives of ``self.exprs`` |
def format(self, options=None):
"""
Format this diagnostic for display. The options argument takes
Diagnostic.Display* flags, which can be combined using bitwise OR. If
the options argument is not provided, the default display options will
be used.
"""
if options ... | Format this diagnostic for display. The options argument takes
Diagnostic.Display* flags, which can be combined using bitwise OR. If
the options argument is not provided, the default display options will
be used. |
def ustep(self):
"""Dual variable update."""
self.U += self.rsdl_r(self.AX, self.Y) | Dual variable update. |
def flatten_urls(self, urls):
"""
Function flatten urls for route grouping feature of glim.
Args
----
urls (dict): a dict of url definitions.
current_key (unknown type): a dict or a string marking the
current key that is used for recursive calls.
... | Function flatten urls for route grouping feature of glim.
Args
----
urls (dict): a dict of url definitions.
current_key (unknown type): a dict or a string marking the
current key that is used for recursive calls.
ruleset (dict): the ruleset that is eventually r... |
def _taskkill(self, force: bool = False) -> int:
"""
Executes a Windows ``TASKKILL /pid PROCESS_ID /t`` command
(``/t`` for "tree kill" = "kill all children").
Args:
force: also add ``/f`` (forcefully)
Returns:
return code from ``TASKKILL``
**Te... | Executes a Windows ``TASKKILL /pid PROCESS_ID /t`` command
(``/t`` for "tree kill" = "kill all children").
Args:
force: also add ``/f`` (forcefully)
Returns:
return code from ``TASKKILL``
**Test code:**
Firstly we need a program that won't let itself b... |
def to_protobuf(self) -> LinkItemProto:
"""
Create protobuf item.
:return: protobuf structure
:rtype: ~unidown.plugin.protobuf.link_item_pb2.LinkItemProto
"""
result = LinkItemProto()
result.name = self._name
result.time.CopyFrom(datetime_to_timestamp(sel... | Create protobuf item.
:return: protobuf structure
:rtype: ~unidown.plugin.protobuf.link_item_pb2.LinkItemProto |
def _fetch(
queryset, model_objs, unique_fields, update_fields, returning, sync,
ignore_duplicate_updates=True, return_untouched=False
):
"""
Perfom the upsert and do an optional sync operation
"""
model = queryset.model
if (return_untouched or sync) and returning is not True:
return... | Perfom the upsert and do an optional sync operation |
def shell_comment(c):
'Do not shell-escape raw strings in comments, but do handle line breaks.'
return ShellQuoted('# {c}').format(c=ShellQuoted(
(raw_shell(c) if isinstance(c, ShellQuoted) else c)
.replace('\n', '\n# ')
)) | Do not shell-escape raw strings in comments, but do handle line breaks. |
def flip_ctrlpts2d_file(file_in='', file_out='ctrlpts_flip.txt'):
""" Flips u and v directions of a 2D control points file and saves flipped coordinates to a file.
:param file_in: name of the input file (to be read)
:type file_in: str
:param file_out: name of the output file (to be saved)
:type fil... | Flips u and v directions of a 2D control points file and saves flipped coordinates to a file.
:param file_in: name of the input file (to be read)
:type file_in: str
:param file_out: name of the output file (to be saved)
:type file_out: str
:raises IOError: an error occurred reading or writing the f... |
def pid_max(self):
"""
Get the maximum PID value.
On Linux, the value is read from the `/proc/sys/kernel/pid_max` file.
From `man 5 proc`:
The default value for this file, 32768, results in the same range of
PIDs as on earlier kernels. On 32-bit platfroms, 32768 is the ... | Get the maximum PID value.
On Linux, the value is read from the `/proc/sys/kernel/pid_max` file.
From `man 5 proc`:
The default value for this file, 32768, results in the same range of
PIDs as on earlier kernels. On 32-bit platfroms, 32768 is the maximum
value for pid_max. On 6... |
def add_log_error(self, x, flag_also_show=False, E=None):
"""Delegates to parent form"""
self.parent_form.add_log_error(x, flag_also_show, E) | Delegates to parent form |
def RemoveAndAddFeatures(self, url, pathToFeatureClass, id_field, chunksize=1000):
"""Deletes all features in a feature service and uploads features from a feature class on disk.
Args:
url (str): The URL of the feature service.
pathToFeatureClass (str): The path of the feature c... | Deletes all features in a feature service and uploads features from a feature class on disk.
Args:
url (str): The URL of the feature service.
pathToFeatureClass (str): The path of the feature class on disk.
id_field (str): The name of the field in the feature class to use fo... |
def get_queryset(self, request):
"""
Annotate the queryset with the entries count for use in the
admin list view.
"""
qs = super(FormAdmin, self).get_queryset(request)
return qs.annotate(total_entries=Count("entries")) | Annotate the queryset with the entries count for use in the
admin list view. |
def query(url, **kwargs):
'''
Query a resource, and decode the return data
Passes through all the parameters described in the
:py:func:`utils.http.query function <salt.utils.http.query>`:
.. autofunction:: salt.utils.http.query
CLI Example:
.. code-block:: bash
salt '*' http.que... | Query a resource, and decode the return data
Passes through all the parameters described in the
:py:func:`utils.http.query function <salt.utils.http.query>`:
.. autofunction:: salt.utils.http.query
CLI Example:
.. code-block:: bash
salt '*' http.query http://somelink.com/
salt '... |
def __update_display_items_model(self, display_items_model: ListModel.FilteredListModel, data_group: typing.Optional[DataGroup.DataGroup], filter_id: typing.Optional[str]) -> None:
"""Update the data item model with a new container, filter, and sorting.
This is called when the data item model is create... | Update the data item model with a new container, filter, and sorting.
This is called when the data item model is created or when the user changes
the data group or sorting settings. |
def apply_cl_function(cl_function, kernel_data, nmr_instances, use_local_reduction=False, cl_runtime_info=None):
"""Run the given function/procedure on the given set of data.
This class will wrap the given CL function in a kernel call and execute that that for every data instance using
the provided kernel ... | Run the given function/procedure on the given set of data.
This class will wrap the given CL function in a kernel call and execute that that for every data instance using
the provided kernel data. This class will respect the read write setting of the kernel data elements such that
output can be written bac... |
def set_basic(self, realm='authentication required'):
"""Clear the auth info and enable basic auth."""
dict.clear(self)
dict.update(self, {'__auth_type__': 'basic', 'realm': realm})
if self.on_update:
self.on_update(self) | Clear the auth info and enable basic auth. |
def flushall(self, asynchronous=False):
"""
Delete all keys in all databases on the current host.
``asynchronous`` indicates whether the operation is
executed asynchronously by the server.
"""
args = []
if asynchronous:
args.append(Token.get_token('AS... | Delete all keys in all databases on the current host.
``asynchronous`` indicates whether the operation is
executed asynchronously by the server. |
def inspect_periodic_tasks(self) -> List[Tuple[int, str]]:
"""Get the next periodic task schedule.
Used only for debugging and during tests.
"""
rv = self._r.zrangebyscore(
self._to_namespaced(PERIODIC_TASKS_QUEUE_KEY),
'-inf', '+inf', withscores=True
)
... | Get the next periodic task schedule.
Used only for debugging and during tests. |
def raster_weights(self, **kwargs):
"""
Compute neighbor weights for GeoRaster.
See help(gr.raster_weights) for options
Usage:
geo.raster_weights(rook=True)
"""
if self.weights is None:
self.weights = raster_weights(self.raster, **kwargs)
pass | Compute neighbor weights for GeoRaster.
See help(gr.raster_weights) for options
Usage:
geo.raster_weights(rook=True) |
def cp_string(self, source, dest, **kwargs):
"""
Copies source string into the destination location.
Parameters
----------
source: string
the string with the content to copy
dest: string
the s3 location
"""
assert isinstance(sourc... | Copies source string into the destination location.
Parameters
----------
source: string
the string with the content to copy
dest: string
the s3 location |
def latency(self):
""":class:`float`: Measures latency between a HEARTBEAT and a HEARTBEAT_ACK in seconds.
This operates similarly to :meth:`.Client.latency` except it uses the average
latency of every shard's latency. To get a list of shard latency, check the
:attr:`latencies` property... | :class:`float`: Measures latency between a HEARTBEAT and a HEARTBEAT_ACK in seconds.
This operates similarly to :meth:`.Client.latency` except it uses the average
latency of every shard's latency. To get a list of shard latency, check the
:attr:`latencies` property. Returns ``nan`` if there are... |
def makeCredBearerTokenLoginMethod(username,
password,
stsUrl,
stsCert=None):
'''Return a function that will call the vim.SessionManager.LoginByToken()
after obtaining a Bearer token from the ST... | Return a function that will call the vim.SessionManager.LoginByToken()
after obtaining a Bearer token from the STS. The result of this function
can be passed as the "loginMethod" to a SessionOrientedStub constructor.
@param username: username of the user/service registered with STS.
@param pass... |
def get_version():
"""Get single-source __version__."""
pkg_dir = get_package_dir()
with open(os.path.join(pkg_dir, 'nestcheck/_version.py')) as ver_file:
string = ver_file.read()
return string.strip().replace('__version__ = ', '').replace('\'', '') | Get single-source __version__. |
def get_diff(file1,
file2,
saltenv='base',
show_filenames=True,
show_changes=True,
template=False,
source_hash_file1=None,
source_hash_file2=None):
'''
Return unified diff of two files
file1
The first file to... | Return unified diff of two files
file1
The first file to feed into the diff utility
.. versionchanged:: 2018.3.0
Can now be either a local or remote file. In earlier releases,
thuis had to be a file local to the minion.
file2
The second file to feed into the di... |
def create(self, path, visibility):
"""
Create a new FunctionVersionInstance
:param unicode path: The path
:param FunctionVersionInstance.Visibility visibility: The visibility
:returns: Newly created FunctionVersionInstance
:rtype: twilio.rest.serverless.v1.service.func... | Create a new FunctionVersionInstance
:param unicode path: The path
:param FunctionVersionInstance.Visibility visibility: The visibility
:returns: Newly created FunctionVersionInstance
:rtype: twilio.rest.serverless.v1.service.function.function_version.FunctionVersionInstance |
def _initialize(self, chain, length):
"""Prepare for tallying. Create a new chain."""
# If this db was loaded from the disk, it may not have its
# tallied step methods' getfuncs yet.
if self._getfunc is None:
self._getfunc = self.db.model._funs_to_tally[self.name] | Prepare for tallying. Create a new chain. |
def make_reading_comprehension_instance_quac(question_list_tokens: List[List[Token]],
passage_tokens: List[Token],
token_indexers: Dict[str, TokenIndexer],
passage_text: str,
... | Converts a question, a passage, and an optional answer (or answers) to an ``Instance`` for use
in a reading comprehension model.
Creates an ``Instance`` with at least these fields: ``question`` and ``passage``, both
``TextFields``; and ``metadata``, a ``MetadataField``. Additionally, if both ``answer_text... |
def printable_name(column, path=None):
"""Provided for debug output when rendering conditions.
User.name[3]["foo"][0]["bar"] -> name[3].foo[0].bar
"""
pieces = [column.name]
path = path or path_of(column)
for segment in path:
if isinstance(segment, str):
pieces.append(segmen... | Provided for debug output when rendering conditions.
User.name[3]["foo"][0]["bar"] -> name[3].foo[0].bar |
def wait(self, timeout_ms=None):
"""Block until this command has completed.
Args:
timeout_ms: Timeout, in milliseconds, to wait.
Returns:
Output of the command if it complete and self.stdout is a StringIO
object or was passed in as None. Returns True if the command completed but
stdou... | Block until this command has completed.
Args:
timeout_ms: Timeout, in milliseconds, to wait.
Returns:
Output of the command if it complete and self.stdout is a StringIO
object or was passed in as None. Returns True if the command completed but
stdout was provided (and was not a StringIO o... |
def createVM(rh):
"""
Create a virtual machine in z/VM.
Input:
Request Handle with the following properties:
function - 'CMDVM'
subfunction - 'CMD'
userid - userid of the virtual machine
Output:
Request Handle updated with the results.
Return ... | Create a virtual machine in z/VM.
Input:
Request Handle with the following properties:
function - 'CMDVM'
subfunction - 'CMD'
userid - userid of the virtual machine
Output:
Request Handle updated with the results.
Return code - 0: ok, non-zero: error |
def ParseFileObject(self, parser_mediator, file_object):
"""Parses a PLSRecall.dat file-like object.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
file_object (dfvfs.FileIO): a file-like object.
Raises:
... | Parses a PLSRecall.dat file-like object.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
file_object (dfvfs.FileIO): a file-like object.
Raises:
UnableToParseFile: when the file cannot be parsed. |
def check_trajectory_id(self, dataset):
'''
Checks that if a variable exists for the trajectory id it has the appropriate attributes
:param netCDF4.Dataset dataset: An open netCDF dataset
'''
results = []
exists_ctx = TestCtx(BaseCheck.MEDIUM, 'Variable defining "traject... | Checks that if a variable exists for the trajectory id it has the appropriate attributes
:param netCDF4.Dataset dataset: An open netCDF dataset |
def _log_query(query):
"""
Logs the query on the console.
:param str query: The query.
"""
query = query.strip()
if os.linesep in query:
# Query is a multi line query
MetadataDataLayer.io.log_very_verbose('Executing query:')
MetadataD... | Logs the query on the console.
:param str query: The query. |
def log_likelihood_top1(data, params):
"""Compute the log-likelihood of model parameters."""
loglik = 0
params = np.asarray(params)
for winner, losers in data:
idx = np.append(winner, losers)
loglik -= logsumexp(params.take(idx) - params[winner])
return loglik | Compute the log-likelihood of model parameters. |
def json_decode(data_type, serialized_obj, caller_permissions=None,
alias_validators=None, strict=True, old_style=False):
"""Performs the reverse operation of json_encode.
Args:
data_type (Validator): Validator for serialized_obj.
serialized_obj (str): The JSON string to deseria... | Performs the reverse operation of json_encode.
Args:
data_type (Validator): Validator for serialized_obj.
serialized_obj (str): The JSON string to deserialize.
caller_permissions (list): The list of raw-string caller permissions
with which to serialize.
alias_validators ... |
def lookup_from_headers(cls, headers):
"""
Given a dictionary of headers (WSGI request.META for instance), look up
the most likely user's IP
"""
# A single address, set by this server, returned as an Array
remote_addr = cls.ips_from(headers.get("REMOTE_ADDR"))
#... | Given a dictionary of headers (WSGI request.META for instance), look up
the most likely user's IP |
def body_storage_pair(self):
"""
Return reader/writer pair for storing receiving body data.
These are event-loop specific objects.
The reader should be an awaitable object that returns the
body data once created.
"""
future = Future()
def send_body():
... | Return reader/writer pair for storing receiving body data.
These are event-loop specific objects.
The reader should be an awaitable object that returns the
body data once created. |
def Shah(m, x, D, rhol, mul, kl, Cpl, P, Pc):
r'''Calculates heat transfer coefficient for condensation
of a fluid inside a tube, as presented in [1]_ and again by the same
author in [2]_; also given in [3]_. Requires no properties of the gas.
Uses the Dittus-Boelter correlation for single phase heat t... | r'''Calculates heat transfer coefficient for condensation
of a fluid inside a tube, as presented in [1]_ and again by the same
author in [2]_; also given in [3]_. Requires no properties of the gas.
Uses the Dittus-Boelter correlation for single phase heat transfer
coefficient, with a Reynolds number a... |
def expand_block(self, feat):
"""Expand any blocks which are near the start or end of a contig.
"""
chrom_end = self._ref_sizes.get(feat.chrom)
if chrom_end:
if feat.start < self._end_buffer:
feat.start = 0
if feat.stop >= chrom_end - self._end_buf... | Expand any blocks which are near the start or end of a contig. |
def split_data(X, y, ratio=(0.8, 0.1, 0.1)):
"""Splits data into a training, validation, and test set.
Args:
X: text data
y: data labels
ratio: the ratio for splitting. Default: (0.8, 0.1, 0.1)
Returns:
split data: X_train, X_val, X_test, y_train, y_... | Splits data into a training, validation, and test set.
Args:
X: text data
y: data labels
ratio: the ratio for splitting. Default: (0.8, 0.1, 0.1)
Returns:
split data: X_train, X_val, X_test, y_train, y_val, y_test |
def get_dict_from_buffer(buf, keys=['DISTNAME', 'MAJOR',
'MINOR', 'PATCHLEVEL',
'PYTHON',
'MIN_PYTHON_MAJOR',
'MIN_PYTHON_MINOR',
'MIN_NUMPY... | Parses a string buffer for key-val pairs for the supplied keys.
Returns: Python dictionary with all the keys (all keys in buffer
if None is passed for keys) with the values being a list
corresponding to each key.
Note: Return dict will contain all keys supplied (if not None).
... |
def _igamc(a, x):
"""Complemented incomplete Gamma integral.
SYNOPSIS:
double a, x, y, igamc();
y = igamc( a, x );
DESCRIPTION:
The function is defined by::
igamc(a,x) = 1 - igam(a,x)
inf.
-
... | Complemented incomplete Gamma integral.
SYNOPSIS:
double a, x, y, igamc();
y = igamc( a, x );
DESCRIPTION:
The function is defined by::
igamc(a,x) = 1 - igam(a,x)
inf.
-
1 | | -t a... |
def Boyko_Kruzhilin(m, rhog, rhol, kl, mul, Cpl, D, x):
r'''Calculates heat transfer coefficient for condensation
of a pure chemical inside a vertical tube or tube bundle, as presented in
[2]_ according to [1]_.
.. math::
h_f = h_{LO}\left[1 + x\left(\frac{\rho_L}{\rho_G} - 1\right)\right]^{0.5... | r'''Calculates heat transfer coefficient for condensation
of a pure chemical inside a vertical tube or tube bundle, as presented in
[2]_ according to [1]_.
.. math::
h_f = h_{LO}\left[1 + x\left(\frac{\rho_L}{\rho_G} - 1\right)\right]^{0.5}
h_{LO} = 0.021 \frac{k_L}{L} Re_{LO}^{0.8} Pr^{0.... |
def draw_light_2d_linear(self, kwargs_list, n=1, new_compute=False, r_eff=1.):
"""
constructs the CDF and draws from it random realizations of projected radii R
:param kwargs_list:
:return:
"""
if not hasattr(self, '_light_cdf') or new_compute is True:
r_array... | constructs the CDF and draws from it random realizations of projected radii R
:param kwargs_list:
:return: |
def connect(self, datas=None):
"""
Connects ``Pipers`` in the order input -> output. See ``Piper.connect``.
According to the pipes (topology). If "datas" is given will connect the
input ``Pipers`` to the input data see: ``Dagger.connect_inputs``.
Argumensts:
... | Connects ``Pipers`` in the order input -> output. See ``Piper.connect``.
According to the pipes (topology). If "datas" is given will connect the
input ``Pipers`` to the input data see: ``Dagger.connect_inputs``.
Argumensts:
- datas(sequence) [default: ``None``] valid ... |
def _get_data_schema(self):
"""
Returns a dictionary of (column : type) for the data used in the
model.
"""
if not hasattr(self, "_data_schema"):
response = self.__proxy__.get_data_schema()
self._data_schema = {k : _turicreate._cython.cy_flexible_type.py... | Returns a dictionary of (column : type) for the data used in the
model. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.