positive stringlengths 100 30.3k | anchor stringlengths 1 15k |
|---|---|
def commit_async(self, offsets=None, callback=None):
"""Commit offsets to kafka asynchronously, optionally firing callback.
This commits offsets only to Kafka. The offsets committed using this API
will be used on the first fetch after every rebalance and also on
startup. As such, if you... | Commit offsets to kafka asynchronously, optionally firing callback.
This commits offsets only to Kafka. The offsets committed using this API
will be used on the first fetch after every rebalance and also on
startup. As such, if you need to store offsets in anything other than
Kafka, thi... |
def copy_location(new_node, old_node):
"""
Copy the source location hint (`lineno` and `col_offset`) from the
old to the new node if possible and return the new one.
"""
for attr in 'lineno', 'col_offset':
if attr in old_node._attributes and attr in new_node._attributes \
and hasa... | Copy the source location hint (`lineno` and `col_offset`) from the
old to the new node if possible and return the new one. |
def _to_dict(self):
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self,
'text_normalized') and self.text_normalized is not None:
_dict['text_normalized'] = self.text_normalized
return _dict | Return a json dictionary representing this model. |
def object_to_dict(cls, obj):
"""
This function converts Objects into Dictionary
"""
dict_obj = dict()
if obj is not None:
if type(obj) == list:
dict_list = []
for inst in obj:
dict_list.append(cls.object_to_dict... | This function converts Objects into Dictionary |
def AddService(self, new_service):
"""Add a new service to the list of ones we know about.
Args:
new_service (WindowsService): the service to add.
"""
for service in self._services:
if new_service == service:
# If this service is the same as one we already know about, we
# j... | Add a new service to the list of ones we know about.
Args:
new_service (WindowsService): the service to add. |
def process_deny_action(processors, action, argument):
"""Process deny action."""
for processor in processors:
processor(action, argument)
db.session.commit() | Process deny action. |
def is_on_filesystem(value, **kwargs):
"""Indicate whether ``value`` is a file or directory that exists on the local
filesystem.
:param value: The value to evaluate.
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if `... | Indicate whether ``value`` is a file or directory that exists on the local
filesystem.
:param value: The value to evaluate.
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameter... |
def version(self, v):
"""
Sets the CoAP version
:param v: the version
:raise AttributeError: if value is not 1
"""
if not isinstance(v, int) or v != 1:
raise AttributeError
self._version = v | Sets the CoAP version
:param v: the version
:raise AttributeError: if value is not 1 |
def system_monitor_sfp_alert_state(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
system_monitor = ET.SubElement(config, "system-monitor", xmlns="urn:brocade.com:mgmt:brocade-system-monitor")
sfp = ET.SubElement(system_monitor, "sfp")
alert = ET... | Auto Generated Code |
def setActiveModule(Module):
r"""Helps with collecting the members of the imported modules.
"""
module_name = Module.__name__
if module_name not in ModuleMembers:
ModuleMembers[module_name] = []
ModulesQ.append(module_name)
Group(Module, {}) # brand the module with __ec_member__
state.... | r"""Helps with collecting the members of the imported modules. |
def load_gettext_translations(directory: str, domain: str) -> None:
"""Loads translations from `gettext`'s locale tree
Locale tree is similar to system's ``/usr/share/locale``, like::
{directory}/{lang}/LC_MESSAGES/{domain}.mo
Three steps are required to have your app translated:
1. Generate... | Loads translations from `gettext`'s locale tree
Locale tree is similar to system's ``/usr/share/locale``, like::
{directory}/{lang}/LC_MESSAGES/{domain}.mo
Three steps are required to have your app translated:
1. Generate POT translation file::
xgettext --language=Python --keyword=_:1,2... |
def get_chat(chat_id, **kwargs):
"""
Use this method to get up to date information about the chat (current name of the user for one-on-one conversations, current username
of a user, group or channel, etc.).
:param chat_id: Unique identifier for the target chat or username of the target channel (in the... | Use this method to get up to date information about the chat (current name of the user for one-on-one conversations, current username
of a user, group or channel, etc.).
:param chat_id: Unique identifier for the target chat or username of the target channel (in the format @channelusername)
:param kwargs: ... |
def get_next_of_type(self, processor_type):
"""Get the next available processor of a particular type and increment
its occupancy counter.
Args:
processor_type (ProcessorType): The processor type associated with
a zmq identity.
Returns:
(Processor... | Get the next available processor of a particular type and increment
its occupancy counter.
Args:
processor_type (ProcessorType): The processor type associated with
a zmq identity.
Returns:
(Processor): Information about the transaction processor |
def _build_validation_payload(self, request):
"""
Extract relevant information from request to build a ClientValidationJWT
:param PreparedRequest request: request we will extract information from.
:return: ValidationPayload
"""
parsed = urlparse(request.url)
path ... | Extract relevant information from request to build a ClientValidationJWT
:param PreparedRequest request: request we will extract information from.
:return: ValidationPayload |
def _mems_updated_cb(self):
"""Called when the memories have been identified"""
logger.info('Memories finished updating')
self.param.refresh_toc(self._param_toc_updated_cb, self._toc_cache) | Called when the memories have been identified |
def run(bam_file, data, out_dir):
"""Run viral QC analysis:
1. Extract the unmapped reads
2. BWA-MEM to the viral sequences from GDC database https://gdc.cancer.gov/about-data/data-harmonization-and-generation/gdc-reference-files
3. Report viruses that are in more than 50% covered by at least 5... | Run viral QC analysis:
1. Extract the unmapped reads
2. BWA-MEM to the viral sequences from GDC database https://gdc.cancer.gov/about-data/data-harmonization-and-generation/gdc-reference-files
3. Report viruses that are in more than 50% covered by at least 5x |
def clean_asciidoc(text):
r""" Transform asciidoc text into ASCII text that NL parsers can handle
TODO:
Tag lines and words with meta data like italics, underlined, bold, title, heading 1, etc
>>> clean_asciidoc('**Hello** _world_!')
'"Hello" "world"!'
"""
text = re.sub(r'(\b|^)[\[_*]{1,... | r""" Transform asciidoc text into ASCII text that NL parsers can handle
TODO:
Tag lines and words with meta data like italics, underlined, bold, title, heading 1, etc
>>> clean_asciidoc('**Hello** _world_!')
'"Hello" "world"!' |
def output_hist(self, output_hist: Hist, input_observable: Any, **kwargs: Dict[str, Any]) -> Union[Hist, Any]:
""" Return an output object. It should store the ``output_hist``.
Note:
The output object could just be the raw histogram.
Note:
This function is just a basic ... | Return an output object. It should store the ``output_hist``.
Note:
The output object could just be the raw histogram.
Note:
This function is just a basic placeholder which returns the given output object (a histogram)
and likely should be overridden.
Args:... |
def validate_address(value):
"""
Helper function for validating an address
"""
if is_bytes(value):
if not is_binary_address(value):
raise InvalidAddress("Address must be 20 bytes when input type is bytes", value)
return
if not isinstance(value, str):
raise TypeEr... | Helper function for validating an address |
def get_dot(stop=True):
"""Returns a string containing a DOT file. Setting stop to True will cause
the trace to stop.
"""
defaults = []
nodes = []
edges = []
# define default attributes
for comp, comp_attr in graph_attributes.items():
attr = ', '.join( '%s = "%s"' % (attr... | Returns a string containing a DOT file. Setting stop to True will cause
the trace to stop. |
def send_special_keys(self, value):
"""
Send special keys such as <enter> or <delete>
@rtype: WebElementWrapper
@return: Self
"""
def send_keys_element():
"""
Wrapper to send keys
"""
return self.element.send_keys(va... | Send special keys such as <enter> or <delete>
@rtype: WebElementWrapper
@return: Self |
def erosion(mapfile, dilated):
"""
We will continue to work with the modified Mapfile
If we wanted to start from scratch we could simply reread it
"""
ll = mappyfile.find(mapfile["layers"], "name", "line")
ll["status"] = "OFF"
pl = mappyfile.find(mapfile["layers"], "name", "polygon")
#... | We will continue to work with the modified Mapfile
If we wanted to start from scratch we could simply reread it |
def _get_si():
'''
Authenticate with vCenter server and return service instance object.
'''
url = config.get_cloud_config_value(
'url', get_configured_provider(), __opts__, search_global=False
)
username = config.get_cloud_config_value(
'user', get_configured_provider(), __opts_... | Authenticate with vCenter server and return service instance object. |
def root_task_parser():
"""
Returns a new *ArgumentParser* instance that only contains paremeter actions of the root task.
The returned instance is cached.
"""
global _root_task_parser
if _root_task_parser:
return _root_task_parser
luigi_parser = luigi.cmdline_parser.CmdlineParser.... | Returns a new *ArgumentParser* instance that only contains paremeter actions of the root task.
The returned instance is cached. |
def binned_bitsets_from_list( list=[] ):
"""Read a list into a dictionary of bitsets"""
last_chrom = None
last_bitset = None
bitsets = dict()
for l in list:
chrom = l[0]
if chrom != last_chrom:
if chrom not in bitsets:
bitsets[chrom] = BinnedBitSet(MAX)
... | Read a list into a dictionary of bitsets |
def reset(self):
"""
Reset the state of the sandbox.
http://docs.fiesta.cc/sandbox.html#post--reset
"""
path = 'reset'
request_data = {} # Need to put data into the request to force urllib2 to make it a POST request
response_data = self.request(path, request_dat... | Reset the state of the sandbox.
http://docs.fiesta.cc/sandbox.html#post--reset |
def bbin(obj: Union[str, Element]) -> str:
""" Boldify built in types
@param obj: object name or id
@return:
"""
return obj.name if isinstance(obj, Element ) else f'**{obj}**' if obj in builtin_names else obj | Boldify built in types
@param obj: object name or id
@return: |
def make_vbox_dirs(max_vbox_id, output_dir, topology_name):
"""
Create VirtualBox working directories if required
:param int max_vbox_id: Number of directories to create
:param str output_dir: Output directory
:param str topology_name: Topology name
"""
if max_vbox_id is not None:
f... | Create VirtualBox working directories if required
:param int max_vbox_id: Number of directories to create
:param str output_dir: Output directory
:param str topology_name: Topology name |
def background(cl, proto=EchoProcess, **kw):
"""
Use the reactor to run a process in the background.
Keep the pid around.
``proto'' may be any callable which returns an instance of ProcessProtocol
"""
if isinstance(cl, basestring):
cl = shlex.split(cl)
if not cl[0].startswith('/')... | Use the reactor to run a process in the background.
Keep the pid around.
``proto'' may be any callable which returns an instance of ProcessProtocol |
def convex_conj(self):
"""Convex conjugate of the nuclear norm.
The convex conjugate is the indicator function on the unit ball of
the dual norm where the dual norm is obtained by taking the conjugate
exponent of both the outer and singular vector exponents.
"""
return I... | Convex conjugate of the nuclear norm.
The convex conjugate is the indicator function on the unit ball of
the dual norm where the dual norm is obtained by taking the conjugate
exponent of both the outer and singular vector exponents. |
def DeletePermission(self, permission_link, options=None):
"""Deletes a permission.
:param str permission_link:
The link to the permission.
:param dict options:
The request options for the request.
:return:
The deleted Permission.
:rtype:
... | Deletes a permission.
:param str permission_link:
The link to the permission.
:param dict options:
The request options for the request.
:return:
The deleted Permission.
:rtype:
dict |
def repr_part(self):
"""String usable in a space's ``__repr__`` method."""
optargs = [('weighting', self.const, 1.0),
('exponent', self.exponent, 2.0)]
return signature_string([], optargs, mod=':.4') | String usable in a space's ``__repr__`` method. |
def undo(self, hard=False):
"""Makes last commit not exist"""
if not self.fake:
return self.repo.git.reset('HEAD^', working_tree=hard)
else:
click.echo(crayons.red('Faked! >>> git reset {}{}'
.format('--hard ' if hard else '', 'HEAD^'))... | Makes last commit not exist |
def run():
"""CLI main entry point."""
# Use print() instead of logging when running in CLI mode:
set_pyftpsync_logger(None)
parser = argparse.ArgumentParser(
description="Synchronize folders over FTP.",
epilog="See also https://github.com/mar10/pyftpsync",
parents=[ve... | CLI main entry point. |
def add_request_log_fields(
self, log_fields: LogFields,
call_details: Union[grpc.HandlerCallDetails,
grpc.ClientCallDetails]
):
"""Add log fields related to a request to the provided log fields
:param log_fields: log fields instance to which ... | Add log fields related to a request to the provided log fields
:param log_fields: log fields instance to which to add the fields
:param call_details: some information regarding the call |
def output_callback(self, line, kill_switch):
"""Set status of openvpn according to what we process"""
self.notifications += line + "\n"
if "Initialization Sequence Completed" in line:
self.started = True
if "ERROR:" in line or "Cannot resolve host address:" in line:
... | Set status of openvpn according to what we process |
def drawAxis(self, painter, rect, axis):
"""
Draws the axis for the given painter.
:param painter | <QPainter>
rect | <QRect>
"""
if not axis:
return
# draw the axis lines
painter.save()
... | Draws the axis for the given painter.
:param painter | <QPainter>
rect | <QRect> |
def focusout(self, event):
"""Change style on focus out events."""
bc = self.style.lookup("TEntry", "bordercolor", ("!focus",))
dc = self.style.lookup("TEntry", "darkcolor", ("!focus",))
lc = self.style.lookup("TEntry", "lightcolor", ("!focus",))
self.style.configure("%s.spinbox.... | Change style on focus out events. |
def console_new(w: int, h: int) -> tcod.console.Console:
"""Return an offscreen console of size: w,h.
.. deprecated:: 8.5
Create new consoles using :any:`tcod.console.Console` instead of this
function.
"""
return tcod.console.Console(w, h) | Return an offscreen console of size: w,h.
.. deprecated:: 8.5
Create new consoles using :any:`tcod.console.Console` instead of this
function. |
def repl_proc(self, inputstring, log=True, **kwargs):
"""Process using replprocs."""
return self.apply_procs(self.replprocs, kwargs, inputstring, log=log) | Process using replprocs. |
def discrete(self, vertices, scale=1.0):
"""
Discretize into a world- space path.
Parameters
------------
vertices: (n, dimension) float
Points in space
scale : float
Size of overall scene for numerical comparisons
Returns
-----------... | Discretize into a world- space path.
Parameters
------------
vertices: (n, dimension) float
Points in space
scale : float
Size of overall scene for numerical comparisons
Returns
-------------
discrete: (m, dimension) float
Path in s... |
def analyze_cluster_size_per_scan_parameter(input_file_hits, output_file_cluster_size, parameter='GDAC', max_chunk_size=10000000, overwrite_output_files=False, output_pdf=None):
''' This method takes multiple hit files and determines the cluster size for different scan parameter values of
Parameters
-----... | This method takes multiple hit files and determines the cluster size for different scan parameter values of
Parameters
----------
input_files_hits: string
output_file_cluster_size: string
The data file with the results
parameter: string
The name of the parameter to separate the dat... |
def get_excluded_categories():
"""Get excluded category IDs."""
from indico_livesync.plugin import LiveSyncPlugin
return {int(x['id']) for x in LiveSyncPlugin.settings.get('excluded_categories')} | Get excluded category IDs. |
def del_value(self, keys, complete=False, on_projects=False,
on_globals=False, projectname=None, base='', dtype=None,
**kwargs):
"""
Delete a value in the configuration
Parameters
----------
keys: list of str
A list of keys to be d... | Delete a value in the configuration
Parameters
----------
keys: list of str
A list of keys to be deleted. %(get_value_note)s
%(ModelOrganizer.info.common_params)s
base: str
A base string that shall be put in front of each key in `values` to
av... |
def get_instance(uri):
"""Return an instance of MediaFile."""
global _instances
try:
instance = _instances[uri]
except KeyError:
instance = MediaFile(
uri,
client.get_instance()
)
_instances[uri] = instance
return instance | Return an instance of MediaFile. |
def get_pixel(self, x: int, y: int) -> Tuple[int, int, int]:
"""Get the color of a pixel in this Image.
Args:
x (int): X pixel of the Image. Starting from the left at 0.
y (int): Y pixel of the Image. Starting from the top at 0.
Returns:
Tuple[int, int, in... | Get the color of a pixel in this Image.
Args:
x (int): X pixel of the Image. Starting from the left at 0.
y (int): Y pixel of the Image. Starting from the top at 0.
Returns:
Tuple[int, int, int]:
An (r, g, b) tuple containing the pixels color value... |
def replace_label(self, oldLabel, newLabel):
""" Replaces old label with a new one
"""
if oldLabel == newLabel:
return
tmp = re.compile(r'\b' + oldLabel + r'\b')
last = 0
l = len(newLabel)
while True:
match = tmp.search(self.asm[last:])
... | Replaces old label with a new one |
def _get_maxcov_downsample(data):
"""Calculate maximum coverage downsampling for whole genome samples.
Returns None if we're not doing downsampling.
"""
from bcbio.bam import ref
from bcbio.ngsalign import alignprep, bwa
from bcbio.variation import coverage
fastq_file = data["files"][0]
... | Calculate maximum coverage downsampling for whole genome samples.
Returns None if we're not doing downsampling. |
def get(self, **params):
'''
Returns details for a specific offer.
.. code-block:: python
amadeus.shopping.hotel_offer('XXX').get
:rtype: amadeus.Response
:raises amadeus.ResponseError: if the request could not be completed
'''
return self.client.ge... | Returns details for a specific offer.
.. code-block:: python
amadeus.shopping.hotel_offer('XXX').get
:rtype: amadeus.Response
:raises amadeus.ResponseError: if the request could not be completed |
def datetime_to_ns(then):
"""Transform a :any:`datetime.datetime` into a NationStates-style
string.
For example "6 days ago", "105 minutes ago", etc.
"""
if then == datetime(1970, 1, 1, 0, 0):
return 'Antiquity'
now = datetime.utcnow()
delta = now - then
seconds = delta.total_s... | Transform a :any:`datetime.datetime` into a NationStates-style
string.
For example "6 days ago", "105 minutes ago", etc. |
def to_df(self) -> pd.DataFrame:
"""Convert to pandas dataframe."""
df = pd.DataFrame(index=RangeIndex(0, self.shape[0], name=None))
for key in self.keys():
value = self[key]
for icolumn, column in enumerate(value.T):
df['{}{}'.format(key, icolumn+1)] = co... | Convert to pandas dataframe. |
def _get_object_as_soft(self):
"""Get object as SOFT formatted string."""
soft = []
if self.database is not None:
soft.append(self.database._get_object_as_soft())
soft += ["^%s = %s" % (self.geotype, self.name),
self._get_metadata_as_string()]
for gsm... | Get object as SOFT formatted string. |
def assert_no_js_errors(self):
""" Asserts that there are no JavaScript "SEVERE"-level page errors.
Works ONLY for Chrome (non-headless) and Chrome-based browsers.
Does NOT work on Firefox, Edge, IE, and some other browsers:
* See https://github.com/SeleniumHQ/selenium/is... | Asserts that there are no JavaScript "SEVERE"-level page errors.
Works ONLY for Chrome (non-headless) and Chrome-based browsers.
Does NOT work on Firefox, Edge, IE, and some other browsers:
* See https://github.com/SeleniumHQ/selenium/issues/1161
Based on the followin... |
def register_languages():
"""Register all supported languages to ensure compatibility."""
for language in set(SUPPORTED_LANGUAGES) - {"en"}:
language_stemmer = partial(nltk_stemmer, get_language_stemmer(language))
Pipeline.register_function(language_stemmer, "stemmer-{}".format(language)) | Register all supported languages to ensure compatibility. |
def update(cls, whitelist_sdd_id, monetary_account_paying_id=None,
maximum_amount_per_month=None, custom_headers=None):
"""
:type user_id: int
:type whitelist_sdd_id: int
:param monetary_account_paying_id: ID of the monetary account of which
you want to pay from.
... | :type user_id: int
:type whitelist_sdd_id: int
:param monetary_account_paying_id: ID of the monetary account of which
you want to pay from.
:type monetary_account_paying_id: int
:param maximum_amount_per_month: The maximum amount of money that is
allowed to be deducted ba... |
def stop(self):
"""Stop this gateway agent."""
if self._disconnector:
self._disconnector.stop()
self.client.disconnect() | Stop this gateway agent. |
def upload_rpm(rpm_path, repoid, connector, callback=None):
"""upload an rpm into pulp
rpm_path: path to an rpm
connector: the connector to use for interacting with pulp
callback: Optional callback to call after an RPM is
uploaded. Callback should accept one argument, the name of the RPM
which... | upload an rpm into pulp
rpm_path: path to an rpm
connector: the connector to use for interacting with pulp
callback: Optional callback to call after an RPM is
uploaded. Callback should accept one argument, the name of the RPM
which was uploaded |
def conv_precip_frac(precip_largescale, precip_convective):
"""Fraction of total precip that is from convection parameterization.
Parameters
----------
precip_largescale, precip_convective : xarray.DataArrays
Precipitation from grid-scale condensation and from convective
parameterizatio... | Fraction of total precip that is from convection parameterization.
Parameters
----------
precip_largescale, precip_convective : xarray.DataArrays
Precipitation from grid-scale condensation and from convective
parameterization, respectively.
Returns
-------
xarray.DataArray |
def render_template(content, variables):
"""
Return a bytestring representing a templated file based on the
input (content) and the variable names defined (vars).
"""
fsenc = sys.getfilesystemencoding()
def to_native(s, encoding='latin-1', errors='strict'):
if six.PY3:
if is... | Return a bytestring representing a templated file based on the
input (content) and the variable names defined (vars). |
def start_service(conn, service='ceph'):
"""
Stop a service on a remote host depending on the type of init system.
Obviously, this should be done for RHEL/Fedora/CentOS systems.
This function does not do any kind of detection.
"""
if is_systemd(conn):
remoto.process.run(
con... | Stop a service on a remote host depending on the type of init system.
Obviously, this should be done for RHEL/Fedora/CentOS systems.
This function does not do any kind of detection. |
def ext_pillar(minion_id, # pylint: disable=W0613
pillar, # pylint: disable=W0613
config_file):
'''
Execute LDAP searches and return the aggregated data
'''
config_template = None
try:
config_template = _render_template(config_file)
except jinja2.exception... | Execute LDAP searches and return the aggregated data |
def adjust_hours_view(request, semester):
"""
Adjust members' workshift hours requirements.
"""
page_name = "Adjust Hours"
pools = WorkshiftPool.objects.filter(semester=semester).order_by(
"-is_primary", "title",
)
workshifters = WorkshiftProfile.objects.filter(semester=semester)
... | Adjust members' workshift hours requirements. |
def find_trigger_value(psd_var, idx, start, sample_rate):
""" Find the PSD variation value at a particular time
Parameters
----------
psd_var : TimeSeries
Time series of the varaibility in the PSD estimation
idx : numpy.ndarray
Time indices of the triggers
start : float
... | Find the PSD variation value at a particular time
Parameters
----------
psd_var : TimeSeries
Time series of the varaibility in the PSD estimation
idx : numpy.ndarray
Time indices of the triggers
start : float
GPS start time
sample_rate : float
Sample rate defined... |
def next_basis_label_or_index(self, label_or_index, n=1):
"""Given the label or index of a basis state, return the label/index of
the next basis state.
More generally, if `n` is given, return the `n`'th next basis state
label/index; `n` may also be negative to obtain previous basis stat... | Given the label or index of a basis state, return the label/index of
the next basis state.
More generally, if `n` is given, return the `n`'th next basis state
label/index; `n` may also be negative to obtain previous basis state
labels/indices.
The return type is the same as the... |
def __execute_rot(self, surface):
"""Executes the rotating operation"""
self.image = pygame.transform.rotate(surface, self.__rotation)
self.__resize_surface_extents() | Executes the rotating operation |
def set_elapsed_time(self, client):
"""Set elapsed time for slave clients."""
related_clients = self.get_related_clients(client)
for cl in related_clients:
if cl.timer is not None:
client.create_time_label()
client.t0 = cl.t0
cli... | Set elapsed time for slave clients. |
def flatten (d, *keys):
"""Flattens the dictionary d by merging keys in order such that later
keys take precedence over earlier keys.
"""
flat = { }
for k in keys:
flat = merge(flat, d.pop(k, { }))
return flat | Flattens the dictionary d by merging keys in order such that later
keys take precedence over earlier keys. |
def create_widget(self):
""" Create the underlying widget.
A dialog is not a subclass of view, hence we don't set name as widget
or children will try to use it as their parent.
"""
d = self.declaration
self.dialog = Dialog(self.get_context(), d.style) | Create the underlying widget.
A dialog is not a subclass of view, hence we don't set name as widget
or children will try to use it as their parent. |
def balance(self):
"""Returns a tuple of (total amount deposited, total amount
withdrawn)."""
sin = Decimal("0.00")
sout = Decimal("0.00")
for t in self.trans:
if t.amount < Decimal("0.00"):
sout += t.amount
else:
sin += t.... | Returns a tuple of (total amount deposited, total amount
withdrawn). |
def get_plain_image_as_widget(self):
"""Used for generating thumbnails. Does not include overlaid
graphics.
"""
pixbuf = self.get_plain_image_as_pixbuf()
image = Gtk.Image()
image.set_from_pixbuf(pixbuf)
image.show()
return image | Used for generating thumbnails. Does not include overlaid
graphics. |
def deploy_clone_from_vm(self, context, deploy_action, cancellation_context):
"""
Deploy Cloned VM From VM Command, will deploy vm from template
:param CancellationContext cancellation_context:
:param ResourceCommandContext context: the context of the command
:param DeployApp de... | Deploy Cloned VM From VM Command, will deploy vm from template
:param CancellationContext cancellation_context:
:param ResourceCommandContext context: the context of the command
:param DeployApp deploy_action:
:return DeployAppResult deploy results |
def polygon_from_points(points):
"""
Constructs a numpy-compatible polygon from a page representation.
"""
polygon = []
for pair in points.split(" "):
x_y = pair.split(",")
polygon.append([float(x_y[0]), float(x_y[1])])
return polygon | Constructs a numpy-compatible polygon from a page representation. |
def connect_to_ec2(region='us-east-1', access_key=None, secret_key=None):
""" Connect to AWS ec2
:type region: str
:param region: AWS region to connect to
:type access_key: str
:param access_key: AWS access key id
:type secret_key: str
:param secret_key: AWS secret access key
:returns: ... | Connect to AWS ec2
:type region: str
:param region: AWS region to connect to
:type access_key: str
:param access_key: AWS access key id
:type secret_key: str
:param secret_key: AWS secret access key
:returns: boto.ec2.connection.EC2Connection -- EC2 connection |
def make_folium_polyline(edge, edge_color, edge_width, edge_opacity, popup_attribute=None):
"""
Turn a row from the gdf_edges GeoDataFrame into a folium PolyLine with
attributes.
Parameters
----------
edge : GeoSeries
a row from the gdf_edges GeoDataFrame
edge_color : string
... | Turn a row from the gdf_edges GeoDataFrame into a folium PolyLine with
attributes.
Parameters
----------
edge : GeoSeries
a row from the gdf_edges GeoDataFrame
edge_color : string
color of the edge lines
edge_width : numeric
width of the edge lines
edge_opacity : num... |
def format(self, record) -> str:
"""
:type record: aiologger.loggers.json.LogRecord
"""
msg = dict(self.formatter_fields_for_record(record))
if record.flatten and isinstance(record.msg, dict):
msg.update(record.msg)
else:
msg[MSG_FIELDNAME] = recor... | :type record: aiologger.loggers.json.LogRecord |
def _get_covariance(self, X):
"""Compute covariance matrix with transformed data.
Args:
X: `numpy.ndarray` or `pandas.DataFrame`.
Returns:
np.ndarray
"""
result = pd.DataFrame(index=range(len(X)))
column_names = self.get_column_names(X)
... | Compute covariance matrix with transformed data.
Args:
X: `numpy.ndarray` or `pandas.DataFrame`.
Returns:
np.ndarray |
def _initial_broks(self, broker_name):
"""Get initial_broks from the scheduler
This is used by the brokers to prepare the initial status broks
This do not send broks, it only makes scheduler internal processing. Then the broker
must use the *_broks* API to get all the stuff
:p... | Get initial_broks from the scheduler
This is used by the brokers to prepare the initial status broks
This do not send broks, it only makes scheduler internal processing. Then the broker
must use the *_broks* API to get all the stuff
:param broker_name: broker name, used to filter brok... |
def prior_groups(self):
"""get the prior info groups
Returns
-------
prior_groups : list
a list of prior information groups
"""
og = list(self.prior_information.groupby("obgnme").groups.keys())
#og = list(map(pst_utils.SFMT, og))
return og | get the prior info groups
Returns
-------
prior_groups : list
a list of prior information groups |
def _scale(self, mode):
"""
Returns value scaling coefficient for the given mode.
"""
if mode in self._mode_scale:
scale = self._mode_scale[mode]
else:
scale = 10**(-self.decimals)
self._mode_scale[mode] = scale
return scale | Returns value scaling coefficient for the given mode. |
def __get_percpu(self):
"""Update and/or return the per CPU list using the psutil library."""
# Never update more than 1 time per cached_time
if self.timer_percpu.finished():
self.percpu_percent = []
for cpu_number, cputimes in enumerate(psutil.cpu_times_percent(interval=... | Update and/or return the per CPU list using the psutil library. |
def write_bytes(out_data, encoding="ascii"):
"""Write Python2 and Python3 compatible byte stream."""
if sys.version_info[0] >= 3:
if isinstance(out_data, type("")):
if encoding == "utf-8":
return out_data.encode("utf-8")
else:
return out_data.encod... | Write Python2 and Python3 compatible byte stream. |
def get_create_base_agent(self, agent):
"""Return base agent with given name, creating it if needed."""
try:
base_agent = self.agents[_n(agent.name)]
except KeyError:
base_agent = BaseAgent(_n(agent.name))
self.agents[_n(agent.name)] = base_agent
# If... | Return base agent with given name, creating it if needed. |
def handle_call_response(self, result, node):
"""
If we get a response, add the node to the routing table. If
we get no response, make sure it's removed from the routing table.
"""
if not result[0]:
log.warning("no response from %s, removing from router", node)
... | If we get a response, add the node to the routing table. If
we get no response, make sure it's removed from the routing table. |
def get_user_modules(self):
"""
Search configured include directories for user provided modules.
user_modules: {
'weather_yahoo': ('~/i3/py3status/', 'weather_yahoo.py')
}
"""
user_modules = {}
for include_path in self.config["include_paths"]:
... | Search configured include directories for user provided modules.
user_modules: {
'weather_yahoo': ('~/i3/py3status/', 'weather_yahoo.py')
} |
def get_system_root_directory():
"""
Get system root directory (application installed root directory)
Returns
-------
string
A full path
"""
root = os.path.dirname(__file__)
root = os.path.dirname(root)
root = os.path.abspath(root)
return root | Get system root directory (application installed root directory)
Returns
-------
string
A full path |
def load_script(zap_helper, **options):
"""Load a script from a file."""
with zap_error_handler():
if not os.path.isfile(options['file_path']):
raise ZAPError('No file found at "{0}", cannot load script.'.format(options['file_path']))
if not _is_valid_script_engine(zap_helper.zap, o... | Load a script from a file. |
def max(self):
"""Maximum, ignorning nans."""
if "max" not in self.attrs.keys():
def f(dataset, s):
return np.nanmax(dataset[s])
self.attrs["max"] = np.nanmax(list(self.chunkwise(f).values()))
return self.attrs["max"] | Maximum, ignorning nans. |
def clusterQueues(self):
""" Return a dict of queues in cluster and servers running them
"""
servers = yield self.getClusterServers()
queues = {}
for sname in servers:
qs = yield self.get('rhumba.server.%s.queues' % sname)
uuid = yield self.get('rhumba.s... | Return a dict of queues in cluster and servers running them |
def get(self, name, default=None):
"""Get the value at ``name`` for this :class:`Config` container
The returned value is obtained from:
* the value at ``name`` in the :attr:`settings` dictionary
if available.
* the value at ``name`` in the :attr:`params` dictionary if availab... | Get the value at ``name`` for this :class:`Config` container
The returned value is obtained from:
* the value at ``name`` in the :attr:`settings` dictionary
if available.
* the value at ``name`` in the :attr:`params` dictionary if available.
* the ``default`` value. |
def search_variant_annotations(
self, variant_annotation_set_id, reference_name="",
reference_id="", start=0, end=0, effects=[]):
"""
Returns an iterator over the Variant Annotations fulfilling
the specified conditions from the specified VariantSet.
:param str va... | Returns an iterator over the Variant Annotations fulfilling
the specified conditions from the specified VariantSet.
:param str variant_annotation_set_id: The ID of the
:class:`ga4gh.protocol.VariantAnnotationSet` of interest.
:param int start: Required. The beginning of the window (... |
def bootstrap(directory='.',
config='buildout.cfg',
python=sys.executable,
onlyif=None,
unless=None,
runas=None,
env=(),
distribute=None,
buildout_ver=None,
test_release=False,
off... | Run the buildout bootstrap dance (python bootstrap.py).
directory
directory to execute in
config
alternative buildout configuration file to use
runas
User used to run buildout as
env
environment variables to set when running
buildout_ver
force a specific ... |
def net_query(name: str) -> Constants:
'''Find the NetworkParams for a network by its long or short name. Raises
UnsupportedNetwork if no NetworkParams is found.
'''
for net_params in networks:
if name in (net_params.name, net_params.shortname,):
return net_params
raise Unsuppo... | Find the NetworkParams for a network by its long or short name. Raises
UnsupportedNetwork if no NetworkParams is found. |
def setHoverIcon( self, column, icon ):
"""
Returns the icon to use when coloring when the user hovers over
the item for the given column.
:param column | <int>
icon | <QtGui.QIcon)
"""
self._hoverIcon[column] = QtGui.QIcon(icon... | Returns the icon to use when coloring when the user hovers over
the item for the given column.
:param column | <int>
icon | <QtGui.QIcon) |
def complex_to_real(complex_fid):
"""
Standard optimization routines as used in lmfit require real data. This
function takes a complex FID and constructs a real version by concatenating
the imaginary part to the complex part. The imaginary part is also reversed
to keep the maxima at each end of the ... | Standard optimization routines as used in lmfit require real data. This
function takes a complex FID and constructs a real version by concatenating
the imaginary part to the complex part. The imaginary part is also reversed
to keep the maxima at each end of the FID and avoid discontinuities in the
cente... |
def extract_gcc_binaries():
"""Try to find GCC on OSX for OpenMP support."""
patterns = ['/opt/local/bin/g++-mp-[0-9].[0-9]',
'/opt/local/bin/g++-mp-[0-9]',
'/usr/local/bin/g++-[0-9].[0-9]',
'/usr/local/bin/g++-[0-9]']
if 'darwin' in platform.platform().lower(... | Try to find GCC on OSX for OpenMP support. |
def _build_index(maf_strm, ref_spec):
"""Build an index for a MAF genome alig file and return StringIO of it."""
idx_strm = StringIO.StringIO()
bound_iter = functools.partial(genome_alignment_iterator,
reference_species=ref_spec)
hash_func = JustInTimeGenomeAlignmentBlock.build_... | Build an index for a MAF genome alig file and return StringIO of it. |
def _rectify_countdown_or_bool(count_or_bool):
"""
used by recrusive functions to specify which level to turn a bool on in
counting down yeilds True, True, ..., False
conting up yeilds False, False, False, ... True
Args:
count_or_bool (bool or int): if positive will count down, if negative
... | used by recrusive functions to specify which level to turn a bool on in
counting down yeilds True, True, ..., False
conting up yeilds False, False, False, ... True
Args:
count_or_bool (bool or int): if positive will count down, if negative
will count up, if bool will remain same
Re... |
def handle_key_cache(self):
'''
Evaluate accepted keys and create a msgpack file
which contains a list
'''
if self.opts['key_cache'] == 'sched':
keys = []
#TODO DRY from CKMinions
if self.opts['transport'] in ('zeromq', 'tcp'):
... | Evaluate accepted keys and create a msgpack file
which contains a list |
def report_change(self, name, value, maxdiff=1, deltat=10):
'''report a sensor change'''
r = self.reports[name]
if time.time() < r.last_report + deltat:
return
r.last_report = time.time()
if math.fabs(r.value - value) < maxdiff:
return
r.value = va... | report a sensor change |
def apply(self, coro_function, args=None, kwargs=None, callback=None):
"""Submit a coro_function(*args, **kwargs) as NewTask to self.loop with loop.frequncy control.
::
from torequests.dummy import Loop
import asyncio
loop = Loop()
async def test(i):
... | Submit a coro_function(*args, **kwargs) as NewTask to self.loop with loop.frequncy control.
::
from torequests.dummy import Loop
import asyncio
loop = Loop()
async def test(i):
result = await asyncio.sleep(1)
return (loop.frequen... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.