text stringlengths 81 112k |
|---|
Decode a JSON symbol to Python string.
def _parse_canonical_symbol(doc):
"""Decode a JSON symbol to Python string."""
symbol = doc['$symbol']
if len(doc) != 1:
raise TypeError('Bad $symbol, extra field(s): %s' % (doc,))
return text_type(symbol) |
Decode a JSON code to bson.code.Code.
def _parse_canonical_code(doc):
"""Decode a JSON code to bson.code.Code."""
for key in doc:
if key not in ('$code', '$scope'):
raise TypeError('Bad $code, extra field(s): %s' % (doc,))
return Code(doc['$code'], scope=doc.get('$scope')) |
Decode a JSON regex to bson.regex.Regex.
def _parse_canonical_regex(doc):
"""Decode a JSON regex to bson.regex.Regex."""
regex = doc['$regularExpression']
if len(doc) != 1:
raise TypeError('Bad $regularExpression, extra field(s): %s' % (doc,))
if len(regex) != 2:
raise TypeError('Bad $r... |
Decode a JSON DBRef to bson.dbref.DBRef.
def _parse_canonical_dbref(doc):
"""Decode a JSON DBRef to bson.dbref.DBRef."""
for key in doc:
if key.startswith('$') and key not in _DBREF_KEYS:
# Other keys start with $, so dct cannot be parsed as a DBRef.
return doc
return DBRef(... |
Decode a JSON (deprecated) DBPointer to bson.dbref.DBRef.
def _parse_canonical_dbpointer(doc):
"""Decode a JSON (deprecated) DBPointer to bson.dbref.DBRef."""
dbref = doc['$dbPointer']
if len(doc) != 1:
raise TypeError('Bad $dbPointer, extra field(s): %s' % (doc,))
if isinstance(dbref, DBRef):
... |
Decode a JSON int32 to python int.
def _parse_canonical_int32(doc):
"""Decode a JSON int32 to python int."""
i_str = doc['$numberInt']
if len(doc) != 1:
raise TypeError('Bad $numberInt, extra field(s): %s' % (doc,))
if not isinstance(i_str, string_type):
raise TypeError('$numberInt must... |
Decode a JSON int64 to bson.int64.Int64.
def _parse_canonical_int64(doc):
"""Decode a JSON int64 to bson.int64.Int64."""
l_str = doc['$numberLong']
if len(doc) != 1:
raise TypeError('Bad $numberLong, extra field(s): %s' % (doc,))
return Int64(l_str) |
Decode a JSON double to python float.
def _parse_canonical_double(doc):
"""Decode a JSON double to python float."""
d_str = doc['$numberDouble']
if len(doc) != 1:
raise TypeError('Bad $numberDouble, extra field(s): %s' % (doc,))
if not isinstance(d_str, string_type):
raise TypeError('$n... |
Decode a JSON decimal128 to bson.decimal128.Decimal128.
def _parse_canonical_decimal128(doc):
"""Decode a JSON decimal128 to bson.decimal128.Decimal128."""
d_str = doc['$numberDecimal']
if len(doc) != 1:
raise TypeError('Bad $numberDecimal, extra field(s): %s' % (doc,))
if not isinstance(d_str,... |
Decode a JSON MinKey to bson.min_key.MinKey.
def _parse_canonical_minkey(doc):
"""Decode a JSON MinKey to bson.min_key.MinKey."""
if doc['$minKey'] is not 1:
raise TypeError('$minKey value must be 1: %s' % (doc,))
if len(doc) != 1:
raise TypeError('Bad $minKey, extra field(s): %s' % (doc,))... |
Decode a JSON MaxKey to bson.max_key.MaxKey.
def _parse_canonical_maxkey(doc):
"""Decode a JSON MaxKey to bson.max_key.MaxKey."""
if doc['$maxKey'] is not 1:
raise TypeError('$maxKey value must be 1: %s', (doc,))
if len(doc) != 1:
raise TypeError('Bad $minKey, extra field(s): %s' % (doc,))
... |
Parse authentication credentials.
def _parse_credentials(username, password, database, options):
"""Parse authentication credentials."""
mechanism = options.get('authmechanism', 'DEFAULT')
if username is None and mechanism != 'MONGODB-X509':
return None
source = options.get('authsource', databa... |
Parse read preference options.
def _parse_read_preference(options):
"""Parse read preference options."""
if 'read_preference' in options:
return options['read_preference']
name = options.get('readpreference', 'primary')
mode = read_pref_mode_from_name(name)
tags = options.get('readpreferen... |
Parse connection pool options.
def _parse_pool_options(options):
"""Parse connection pool options."""
max_pool_size = options.get('maxpoolsize', common.MAX_POOL_SIZE)
min_pool_size = options.get('minpoolsize', common.MIN_POOL_SIZE)
max_idle_time_ms = options.get('maxidletimems', common.MAX_IDLE_TIME_MS... |
Validate all `service_checks.json` files.
def service_checks():
"""Validate all `service_checks.json` files."""
root = get_root()
echo_info("Validating all service_checks.json files...")
failed_checks = 0
ok_checks = 0
for check_name in sorted(os.listdir(root)):
service_checks_file = os... |
Convenience wrapper to take a series of date/time elements and return a WMI time
of the form `yyyymmddHHMMSS.mmmmmm+UUU`. All elements may be int, string or
omitted altogether. If omitted, they will be replaced in the output string
by a series of stars of the appropriate length.
:param year: The year el... |
Convenience wrapper to take a WMI datetime string of the form
yyyymmddHHMMSS.mmmmmm+UUU and return a 9-tuple containing the
individual elements, or None where string contains placeholder
stars.
:param wmi_time: The WMI datetime string in `yyyymmddHHMMSS.mmmmmm+UUU` format
:returns: A 9-tuple of (y... |
Format `tag_query` or raise on incorrect parameters.
def _format_tag_query(self, sampler, wmi_obj, tag_query):
"""
Format `tag_query` or raise on incorrect parameters.
"""
try:
link_source_property = int(wmi_obj[tag_query[0]])
target_class = tag_query[1]
... |
Design a query based on the given WMIObject to extract a tag.
Returns: tag or TagQueryUniquenessFailure exception.
def _get_tag_query_tag(self, sampler, wmi_obj, tag_query):
"""
Design a query based on the given WMIObject to extract a tag.
Returns: tag or TagQueryUniquenessFailure exc... |
Extract and tag metrics from the WMISampler.
Raise when multiple WMIObject were returned by the sampler with no `tag_by` specified.
Returns: List of WMIMetric
```
[
WMIMetric("freemegabytes", 19742, ["name:_total"]),
WMIMetric("avgdiskbytesperwrite", 1536, ["nam... |
Resolve metric names and types and submit it.
def _submit_metrics(self, metrics, metric_name_and_type_by_property):
"""
Resolve metric names and types and submit it.
"""
for metric in metrics:
if (
metric.name not in metric_name_and_type_by_property
... |
Return an index key for a given instance. Useful for caching.
def _get_instance_key(self, host, namespace, wmi_class, other=None):
"""
Return an index key for a given instance. Useful for caching.
"""
if other:
return "{host}:{namespace}:{wmi_class}-{other}".format(
... |
Create and cache a WMISampler for the given (class, properties)
def _get_wmi_sampler(self, instance_key, wmi_class, properties, tag_by="", **kwargs):
"""
Create and cache a WMISampler for the given (class, properties)
"""
properties = list(properties) + [tag_by] if tag_by else list(prop... |
Create and cache a (metric name, metric type) by WMI property map and a property list.
def _get_wmi_properties(self, instance_key, metrics, tag_queries):
"""
Create and cache a (metric name, metric type) by WMI property map and a property list.
"""
if instance_key not in self.wmi_props:... |
Submit a metric as a rate, additional tags provided will be added to
the ones from the label provided via the metrics object.
`custom_tags` is an array of 'tag:value' that will be added to the
metric when sending the rate to Datadog.
def _submit_rate(self, metric_name, val, metric, custom_tags... |
Submit a metric as a monotonic count, additional tags provided will be added to
the ones from the label provided via the metrics object.
`custom_tags` is an array of 'tag:value' that will be added to the
metric when sending the monotonic count to Datadog.
def _submit_monotonic_count(self, metr... |
Report all the metrics based on the configuration in instance
If a metric is not well configured or is not present in the payload,
continue processing metrics but log the information to the info page
def parse_expvar_data(self, data, tags, metrics, max_metrics, namespace):
'''
Report al... |
Allow to retrieve content nested inside a several layers deep dict/list
Examples: -content: {
"key1": {
"key2" : [
{
"name" : "object1",
... |
Split a host:port string into (host, int(port)) pair.
def partition_node(node):
"""Split a host:port string into (host, int(port)) pair."""
host = node
port = 27017
idx = node.rfind(':')
if idx != -1:
host, port = node[:idx], int(node[idx + 1:])
if host.startswith('['):
host = h... |
Validates that value is True, False, 'true', or 'false'.
def validate_boolean_or_string(option, value):
"""Validates that value is True, False, 'true', or 'false'."""
if isinstance(value, string_type):
if value not in ('true', 'false'):
raise ValueError("The value of %s must be "
... |
Validate that 'value' is a positive integer, which does not include 0.
def validate_positive_integer(option, value):
"""Validate that 'value' is a positive integer, which does not include 0.
"""
val = validate_integer(option, value)
if val <= 0:
raise ValueError("The value of %s must be "
... |
Validate that 'value' is a positive integer or 0.
def validate_non_negative_integer(option, value):
"""Validate that 'value' is a positive integer or 0.
"""
val = validate_integer(option, value)
if val < 0:
raise ValueError("The value of %s must be "
"a non negative int... |
Validates that 'value' is file-like and readable.
def validate_readable(option, value):
"""Validates that 'value' is file-like and readable.
"""
if value is None:
return value
# First make sure its a string py3.3 open(True, 'r') succeeds
# Used in ssl cert checking due to poor ssl module er... |
Validates that 'value' is an instance of `basestring` for Python 2
or `str` for Python 3.
def validate_string(option, value):
"""Validates that 'value' is an instance of `basestring` for Python 2
or `str` for Python 3.
"""
if isinstance(value, string_type):
return value
raise TypeError(... |
Validates that 'value' is a float, or can be converted to one, and is
positive.
def validate_positive_float(option, value):
"""Validates that 'value' is a float, or can be converted to one, and is
positive.
"""
errmsg = "%s must be an integer or float" % (option,)
try:
value = flo... |
Validates a timeout specified in milliseconds returning
a value in floating point seconds for the case where None is an error
and 0 is valid. Setting the timeout to nothing in the URI string is a
config error.
def validate_timeout_or_zero(option, value):
"""Validates a timeout specified in milliseconds... |
Validate the authMechanism URI option.
def validate_auth_mechanism(option, value):
"""Validate the authMechanism URI option.
"""
# CRAM-MD5 is for server testing only. Undocumented,
# unsupported, may be removed at any time. You have
# been warned.
if value not in MECHANISMS and value != 'CRAM-... |
Validate the uuid representation option selected in the URI.
def validate_uuid_representation(dummy, value):
"""Validate the uuid representation option selected in the URI.
"""
try:
return _UUID_REPRESENTATIONS[value]
except KeyError:
raise ValueError("%s is an invalid UUID representati... |
Parse readPreferenceTags if passed as a client kwarg.
def validate_read_preference_tags(name, value):
"""Parse readPreferenceTags if passed as a client kwarg.
"""
if not isinstance(value, list):
value = [value]
tag_sets = []
for tag_set in value:
if tag_set == '':
tag_s... |
Validate authMechanismProperties.
def validate_auth_mechanism_properties(option, value):
"""Validate authMechanismProperties."""
value = validate_string(option, value)
props = {}
for opt in value.split(','):
try:
key, val = opt.split(':')
except ValueError:
raise... |
Validate the document_class option.
def validate_document_class(option, value):
"""Validate the document_class option."""
if not issubclass(value, (collections.MutableMapping, RawBSONDocument)):
raise TypeError("%s must be dict, bson.son.SON, "
"bson.raw_bson.RawBSONDocument, or... |
Validate the type of method arguments that expect a MongoDB document.
def validate_is_document_type(option, value):
"""Validate the type of method arguments that expect a MongoDB document."""
if not isinstance(value, (collections.MutableMapping, RawBSONDocument)):
raise TypeError("%s must be an instanc... |
Validate the appname option.
def validate_appname_or_none(option, value):
"""Validate the appname option."""
if value is None:
return value
validate_string(option, value)
# We need length in bytes, so encode utf8 first.
if len(value.encode('utf-8')) > 128:
raise ValueError("%s must ... |
Validate a replacement document.
def validate_ok_for_replace(replacement):
"""Validate a replacement document."""
validate_is_mapping("replacement", replacement)
# Replacement can be {}
if replacement and not isinstance(replacement, RawBSONDocument):
first = next(iter(replacement))
if f... |
Validate an update document.
def validate_ok_for_update(update):
"""Validate an update document."""
validate_is_mapping("update", update)
# Update can not be {}
if not update:
raise ValueError('update only works with $ operators')
first = next(iter(update))
if not first.startswith('$'):... |
Validate the Unicode decode error handler option of CodecOptions.
def validate_unicode_decode_error_handler(dummy, value):
"""Validate the Unicode decode error handler option of CodecOptions.
"""
if value not in _UNICODE_DECODE_ERROR_HANDLERS:
raise ValueError("%s is an invalid Unicode decode error... |
Validate the tzinfo option
def validate_tzinfo(dummy, value):
"""Validate the tzinfo option
"""
if value is not None and not isinstance(value, datetime.tzinfo):
raise TypeError("%s must be an instance of datetime.tzinfo" % value)
return value |
Validate optional authentication parameters.
def validate_auth_option(option, value):
"""Validate optional authentication parameters.
"""
lower, value = validate(option, value)
if lower not in _AUTH_OPTIONS:
raise ConfigurationError('Unknown '
'authentication op... |
Generic validation function.
def validate(option, value):
"""Generic validation function.
"""
lower = option.lower()
validator = VALIDATORS.get(lower, raise_config_error)
value = validator(option, value)
return lower, value |
Validate each entry in options and raise a warning if it is not valid.
Returns a copy of options with invalid entries removed
def get_validated_options(options, warn=True):
"""Validate each entry in options and raise a warning if it is not valid.
Returns a copy of options with invalid entries removed
"... |
Take from a more recent redis.py, parse_info
def _parse_dict_string(self, string, key, default):
"""Take from a more recent redis.py, parse_info"""
try:
for item in string.split(','):
k, v = item.rsplit('=', 1)
if k == key:
try:
... |
Compute the length of the configured keys across all the databases
def _check_key_lengths(self, conn, instance, tags):
"""
Compute the length of the configured keys across all the databases
"""
key_list = instance.get('keys')
if key_list is None:
return
if ... |
Retrieve length and entries from Redis' SLOWLOG
This will parse through all entries of the SLOWLOG and select ones
within the time range between the last seen entries and now
def _check_slowlog(self, instance, custom_tags):
"""Retrieve length and entries from Redis' SLOWLOG
This will ... |
Get command-specific statistics from redis' INFO COMMANDSTATS command
def _check_command_stats(self, conn, tags):
"""Get command-specific statistics from redis' INFO COMMANDSTATS command
"""
try:
command_stats = conn.info("commandstats")
except Exception:
self.wa... |
Transform each CoreDNS instance into a OpenMetricsBaseCheck instance
def create_generic_instances(self, instances):
"""
Transform each CoreDNS instance into a OpenMetricsBaseCheck instance
"""
generic_instances = []
for instance in instances:
transformed_instance = s... |
Set up coredns instance so it can be used in OpenMetricsBaseCheck
def _create_core_dns_instance(self, instance):
"""
Set up coredns instance so it can be used in OpenMetricsBaseCheck
"""
endpoint = instance.get('prometheus_url')
if endpoint is None:
raise Configurati... |
Removes a project's build artifacts.
If `check` is not specified, the current working directory will be used.
All `*.pyc`/`*.pyd`/`*.pyo`/`*.whl` files and `__pycache__` directories will be
removed. Additionally, the following patterns will be removed from the root of
the path: `.cache`, `.coverage`, ... |
Hit a given URL and return the parsed json
def get(self, url, instance, service_check_tags, run_check=False):
"Hit a given URL and return the parsed json"
self.log.debug('Fetching CouchDB stats at url: %s' % url)
auth = None
if 'user' in instance and 'password' in instance:
... |
Convert a SON document to a normal Python dictionary instance.
This is trickier than just *dict(...)* because it needs to be
recursive.
def to_dict(self):
"""Convert a SON document to a normal Python dictionary instance.
This is trickier than just *dict(...)* because it needs to be
... |
Save a set of authentication credentials.
The credentials are used to login a socket whenever one is created.
If `connect` is True, verify the credentials on the server first.
def _cache_credentials(self, source, credentials, connect=False):
"""Save a set of authentication credentials.
... |
Test if `index` is cached.
def _cached(self, dbname, coll, index):
"""Test if `index` is cached."""
cache = self.__index_cache
now = datetime.datetime.utcnow()
with self.__index_cache_lock:
return (dbname in cache and
coll in cache[dbname] and
... |
Purge an index from the index cache.
If `index_name` is None purge an entire collection.
If `collection_name` is None purge an entire database.
def _purge_index(self, database_name,
collection_name=None, index_name=None):
"""Purge an index from the index cache.
I... |
An attribute of the current server's description.
If the client is not connected, this will block until a connection is
established or raise ServerSelectionTimeoutError if no server is
available.
Not threadsafe if used multiple times in a single method, since
the server may cha... |
(host, port) of the current standalone, primary, or mongos, or None.
Accessing :attr:`address` raises :exc:`~.errors.InvalidOperation` if
the client is load-balancing among mongoses, since there is no single
address. Use :attr:`nodes` instead.
If the client is not connected, this will ... |
Set of all currently connected servers.
.. warning:: When connected to a replica set the value of :attr:`nodes`
can change over time as :class:`MongoClient`'s view of the replica
set changes. :attr:`nodes` can also be an empty set when
:class:`MongoClient` is first instantiated an... |
Attempt to connect to a writable server, or return False.
def _is_writable(self):
"""Attempt to connect to a writable server, or return False.
"""
topology = self._get_topology() # Starts monitors if necessary.
try:
svr = topology.select_server(writable_server_selector)
... |
DEPRECATED - Set this client's cursor manager.
Raises :class:`TypeError` if `manager_class` is not a subclass of
:class:`~pymongo.cursor_manager.CursorManager`. A cursor manager
handles closing cursors. Different managers can implement different
policies in terms of when to actually kil... |
Send a message to MongoDB and return a Response.
:Parameters:
- `operation`: a _Query or _GetMore object.
- `read_preference` (optional): A ReadPreference.
- `exhaust` (optional): If True, the socket used stays checked out.
It is returned along with its Pool in the Res... |
Execute an operation. Reset the server on network error.
Returns fn()'s return value on success. On error, clears the server's
pool and marks the server Unknown.
Re-raises any exception thrown by fn().
def _reset_on_error(self, server, func, *args, **kwargs):
"""Execute an operation. ... |
Send a kill cursors message soon with the given id.
Raises :class:`TypeError` if `cursor_id` is not an instance of
``(int, long)``. What closing the cursor actually means
depends on this client's cursor manager.
This method may be called from a :class:`~pymongo.cursor.Cursor`
d... |
Send a kill cursors message with the given id.
What closing the cursor actually means depends on this client's
cursor manager. If there is none, the cursor is closed synchronously
on the current thread.
def _close_cursor_now(self, cursor_id, address=None):
"""Send a kill cursors messag... |
DEPRECATED - Send a kill cursors message soon with the given ids.
Raises :class:`TypeError` if `cursor_ids` is not an instance of
``list``.
:Parameters:
- `cursor_ids`: list of cursor ids to kill
- `address` (optional): (host, port) pair of the cursor's server.
... |
Drop a database.
Raises :class:`TypeError` if `name_or_database` is not an instance of
:class:`basestring` (:class:`str` in python 3) or
:class:`~pymongo.database.Database`.
:Parameters:
- `name_or_database`: the name of a database to drop, or a
:class:`~pymongo.d... |
DEPRECATED - Get the database named in the MongoDB connection URI.
>>> uri = 'mongodb://host/my_database'
>>> client = MongoClient(uri)
>>> db = client.get_default_database()
>>> assert db.name == 'my_database'
>>> db = client.get_database()
>>> assert db.name == 'my_dat... |
Get a :class:`~pymongo.database.Database` with the given name and
options.
Useful for creating a :class:`~pymongo.database.Database` with
different codec options, read preference, and/or write concern from
this :class:`MongoClient`.
>>> client.read_preference
Primar... |
Flush all pending writes to datafiles.
:Parameters:
Optional parameters can be passed as keyword arguments:
- `lock`: If True lock the server to disallow writes.
- `async`: If True don't block while synchronizing.
.. warning:: `async` and `lock` can not be use... |
Unlock a previously locked server.
def unlock(self):
"""Unlock a previously locked server.
"""
cmd = {"fsyncUnlock": 1}
with self._socket_for_writes() as sock_info:
if sock_info.max_wire_version >= 4:
try:
sock_info.command("admin", cmd)
... |
Collect metrics for the given gunicorn instance.
def check(self, instance):
""" Collect metrics for the given gunicorn instance. """
self.log.debug("Running instance: %s", instance)
custom_tags = instance.get('tags', [])
# Validate the config.
if not instance or self.PROC_NAME ... |
Return a psutil process for the master gunicorn process with the given name.
def _get_master_proc_by_name(self, name, tags):
""" Return a psutil process for the master gunicorn process with the given name. """
master_name = GUnicornCheck._get_master_proc_name(name)
master_procs = [p for p in ps... |
Limits the number of documents returned in one batch. Each batch
requires a round trip to the server. It can be adjusted to optimize
performance and limit data transfer.
.. note:: batch_size can not override MongoDB's internal limits on the
amount of data it will return to the client... |
Send a getmore message and handle the response.
def __send_message(self, operation):
"""Send a getmore message and handle the response.
"""
client = self.__collection.database.client
listeners = client._event_listeners
publish = listeners.enabled_for_commands
try:
... |
Refreshes the cursor with more data from the server.
Returns the length of self.__data after refresh. Will exit early if
self.__data is already non-empty. Raises OperationFailure when the
cursor cannot be refreshed due to an error on the query.
def _refresh(self):
"""Refreshes the curs... |
Advance the cursor.
def next(self):
"""Advance the cursor."""
if len(self.__data) or self._refresh():
coll = self.__collection
return coll.database._fix_outgoing(self.__data.popleft(), coll)
else:
raise StopIteration |
Return True for disks we don't want or that match regex in the config file
def _exclude_disk(self, device, file_system, mount_point):
"""
Return True for disks we don't want or that match regex in the config file
"""
self.log.debug('_exclude_disk: {}, {}, {}'.format(device, file_system,... |
Given raw output for the df command, transform it into a normalized
list devices. A 'device' is a list with fields corresponding to the
output of df output on each platform.
def _list_devices(self, df_output):
"""
Given raw output for the df command, transform it into a normalized
... |
Compile regex strings from device_tag_re option and return list of compiled regex/tag pairs
def _compile_tag_re(self):
"""
Compile regex strings from device_tag_re option and return list of compiled regex/tag pairs
"""
device_tag_list = []
for regex_str, tags in iteritems(self._... |
Set a dict mapping (resouce_type --> objects[]) for a given key
def fill(self, key, mor_dict):
"""
Set a dict mapping (resouce_type --> objects[]) for a given key
"""
with self._objects_queue_lock:
self._objects_queue[key] = mor_dict |
Return the size of the queue for a given key and resource type.
If the key is not in the cache, this will raise a KeyError.
def size(self, key, resource_type):
"""
Return the size of the queue for a given key and resource type.
If the key is not in the cache, this will raise a KeyError.... |
Extract an object from the list.
If the key is not in the cache, this will raise a KeyError.
If the list is empty, method will return None
def pop(self, key, resource_type):
"""
Extract an object from the list.
If the key is not in the cache, this will raise a KeyError.
... |
Exact matching of IP addresses.
RFC 6125 explicitly doesn't define an algorithm for this
(section 1.7.2 - "Out of Scope").
def _ipaddress_match(ipname, host_ip):
"""Exact matching of IP addresses.
RFC 6125 explicitly doesn't define an algorithm for this
(section 1.7.2 - "Out of Scope").
"""
... |
Return a TLS object to establish a secure connection to a server
def _get_tls_object(self, ssl_params):
"""
Return a TLS object to establish a secure connection to a server
"""
if ssl_params is None:
return None
if not ssl_params["verify"] and ssl_params["ca_certs"]... |
Parse instance configuration and perform minimal verification
def _get_instance_params(cls, instance):
"""
Parse instance configuration and perform minimal verification
"""
url = instance.get("url")
if url is None:
raise ConfigurationError("You must specify a url for... |
Collect metrics from the monitor backend
def _collect_monitor_metrics(self, conn, tags):
"""
Collect metrics from the monitor backend
"""
for entry in conn.entries:
# Get metrics from monitor backend
dn = entry.entry_dn.lower()
if dn.endswith(self.CON... |
Perform custom queries to collect additional metrics like number of result and duration of the query
def _perform_custom_queries(self, conn, custom_queries, tags, instance):
"""
Perform custom queries to collect additional metrics like number of result and duration of the query
"""
for ... |
extract first common name (cn) from DN that looks like "cn=max file descriptors,cn=connections,cn=monitor"
def _extract_common_name(cls, dn):
"""
extract first common name (cn) from DN that looks like "cn=max file descriptors,cn=connections,cn=monitor"
"""
dn = dn.lower().replace(" ", "... |
Process and send cluster health data (i.e. cluster membership status and node health
def _process_cluster_health_data(self, node_name, node_stats, tags):
"""
Process and send cluster health data (i.e. cluster membership status and node health
"""
# Tags for service check
cluste... |
Create an event object
def _create_event(self, alert_type, msg_title, msg, server, tags=None):
"""
Create an event object
"""
msg_title = 'Couchbase {}: {}'.format(server, msg_title)
msg = 'Couchbase instance {} {}'.format(server, msg)
return {
'timestamp': ... |
Hit a given URL and return the parsed json.
def _get_stats(self, url, instance):
"""
Hit a given URL and return the parsed json.
"""
self.log.debug('Fetching Couchbase stats at url: {}'.format(url))
ssl_verify = instance.get('ssl_verify', True)
timeout = float(instance... |
NOTE: We assume that all the public keys needed to verify any in-toto
root layout, or sublayout, metadata file has been directly signed by
the top-level TUF targets role using *OFFLINE* keys. This is a
reasonable assumption, as TUF does not offer meaningful security
guarantees if _ALL_ t... |
Returns:
If download over TUF and in-toto is successful, this function will
return the complete filepath to the desired target.
def download(self, target_relpath, download_in_toto_metadata=True):
'''
Returns:
If download over TUF and in-toto is successful, this funct... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.