text stringlengths 81 112k |
|---|
Apply incoming manipulators to `son`.
def _apply_incoming_manipulators(self, son, collection):
"""Apply incoming manipulators to `son`."""
for manipulator in self.__incoming_manipulators:
son = manipulator.transform_incoming(son, collection)
return son |
Apply incoming copying manipulators to `son`.
def _apply_incoming_copying_manipulators(self, son, collection):
"""Apply incoming copying manipulators to `son`."""
for manipulator in self.__incoming_copying_manipulators:
son = manipulator.transform_incoming(son, collection)
return so... |
Apply manipulators to an incoming SON object before it gets stored.
:Parameters:
- `son`: the son object going into the database
- `collection`: the collection the son object is being saved in
def _fix_incoming(self, son, collection):
"""Apply manipulators to an incoming SON object... |
Apply manipulators to a SON object as it comes out of the database.
:Parameters:
- `son`: the son object coming out of the database
- `collection`: the collection the son object was saved in
def _fix_outgoing(self, son, collection):
"""Apply manipulators to a SON object as it comes... |
Internal command helper.
def _command(self, sock_info, command, slave_ok=False, value=1, check=True,
allowable_errors=None, read_preference=ReadPreference.PRIMARY,
codec_options=DEFAULT_CODEC_OPTIONS,
write_concern=None,
parse_write_concern_error=Fals... |
Issue a MongoDB command.
Send command `command` to the database and return the
response. If `command` is an instance of :class:`basestring`
(:class:`str` in python 3) then the command {`command`: `value`}
will be sent. Otherwise, `command` must be an instance of
:class:`dict` an... |
Internal listCollections helper.
def _list_collections(self, sock_info, slave_okay, criteria=None):
"""Internal listCollections helper."""
criteria = criteria or {}
cmd = SON([("listCollections", 1), ("cursor", {})])
if criteria:
cmd["filter"] = criteria
if sock_inf... |
Get a list of all the collection names in this database.
:Parameters:
- `include_system_collections` (optional): if ``False`` list
will not include system collections (e.g ``system.indexes``)
def collection_names(self, include_system_collections=True):
"""Get a list of all the co... |
Drop a collection.
:Parameters:
- `name_or_collection`: the name of a collection to drop or the
collection object itself
.. note:: The :attr:`~pymongo.database.Database.write_concern` of
this database is automatically applied to this operation when using
Mon... |
Get information on operations currently running.
:Parameters:
- `include_all` (optional): if ``True`` also list currently
idle operations in the result
def current_op(self, include_all=False):
"""Get information on operations currently running.
:Parameters:
- `... |
**DEPRECATED**: Get the error if one occurred on the last operation.
This method is obsolete: all MongoDB write operations (insert, update,
remove, and so on) use the write concern ``w=1`` and report their
errors by default.
.. versionchanged:: 2.8
Deprecated.
def error(sel... |
**DEPRECATED**: Get the most recent error on this database.
This method is obsolete: all MongoDB write operations (insert, update,
remove, and so on) use the write concern ``w=1`` and report their
errors by default.
Only returns errors that have occurred since the last call to
... |
Use a command to create (if create=True) or modify a user.
def _create_or_update_user(
self, create, name, password, read_only, **kwargs):
"""Use a command to create (if create=True) or modify a user.
"""
opts = {}
if read_only or (create and "roles" not in kwargs):
... |
Uses v1 system to add users, i.e. saving to system.users.
def _legacy_add_user(self, name, password, read_only, **kwargs):
"""Uses v1 system to add users, i.e. saving to system.users.
"""
# Use a Collection with the default codec_options.
system_users = self._collection_default_options(... |
Create user `name` with password `password`.
Add a new user with permissions for this :class:`Database`.
.. note:: Will change the password if user `name` already exists.
:Parameters:
- `name`: the name of the user to create
- `password` (optional): the password of the use... |
Remove user `name` from this :class:`Database`.
User `name` will no longer have permissions to access this
:class:`Database`.
:Parameters:
- `name`: the name of the user to remove
def remove_user(self, name):
"""Remove user `name` from this :class:`Database`.
User `... |
**DEPRECATED**: Authenticate to use this database.
Authentication lasts for the life of the underlying client
instance, or until :meth:`logout` is called.
Raises :class:`TypeError` if (required) `name`, (optional) `password`,
or (optional) `source` is not an instance of :class:`basestr... |
**DEPRECATED**: Deauthorize use of this database.
def logout(self):
"""**DEPRECATED**: Deauthorize use of this database."""
warnings.warn("Database.logout() is deprecated",
DeprecationWarning, stacklevel=2)
# Sockets will be deauthenticated as they are used.
self.... |
**DEPRECATED**: Evaluate a JavaScript expression in MongoDB.
:Parameters:
- `code`: string representation of JavaScript code to be
evaluated
- `args` (optional): additional positional arguments are
passed to the `code` being evaluated
.. warning:: the eval c... |
Get a list of the names of the functions stored in this database.
def list(self):
"""Get a list of the names of the functions stored in this database."""
return [x["_id"] for x in self._db.system.js.find(projection=["_id"])] |
Move _id to the front if it's there.
def transform_incoming(self, son, collection):
"""Move _id to the front if it's there.
"""
if not "_id" in son:
return son
transformed = SON({"_id": son["_id"]})
transformed.update(son)
return transformed |
Replace embedded documents with DBRefs.
def transform_incoming(self, son, collection):
"""Replace embedded documents with DBRefs.
"""
def transform_value(value):
if isinstance(value, collections.MutableMapping):
if "_id" in value and "_ns" in value:
... |
This function provides a convenient API wrapping subprocess.Popen. Captured output
is guaranteed not to deadlock, but will still reside in memory in the end.
:param command: The command to run in a subprocess.
:type command: ``str`` or ``list`` of ``str``
:param capture: The method of output capture:
... |
Get HDFS namenode metrics from JMX
def _hdfs_namenode_metrics(self, beans, metrics, tags):
"""
Get HDFS namenode metrics from JMX
"""
bean = next(iter(beans))
bean_name = bean.get('name')
if bean_name != bean_name:
raise Exception("Unexpected bean name {}".f... |
Set a metric
def _set_metric(self, metric_name, metric_type, value, tags=None):
"""
Set a metric
"""
if metric_type == self.GAUGE:
self.gauge(metric_name, value, tags=tags)
else:
self.log.error('Metric type "{}" unknown'.format(metric_type)) |
Query the given URL and return the JSON response
def _rest_request_to_json(self, instance, url, object_path, query_params, tags=None):
"""
Query the given URL and return the JSON response
"""
if object_path:
url = self._join_url_dir(url, object_path)
# Add query_par... |
Join a URL with multiple directories
def _join_url_dir(cls, url, *args):
"""
Join a URL with multiple directories
"""
for path in args:
url = url.rstrip('/') + '/'
url = urljoin(url, path.lstrip('/'))
return url |
Add $readPreference to spec when appropriate.
def _maybe_add_read_preference(spec, read_preference):
"""Add $readPreference to spec when appropriate."""
mode = read_preference.mode
tag_sets = read_preference.tag_sets
max_staleness = read_preference.max_staleness
# Only add $readPreference if it's s... |
Convert a legacy write result to write commmand format.
def _convert_write_result(operation, command, result):
"""Convert a legacy write result to write commmand format."""
# Based on _merge_legacy from bulk.py
affected = result.get("n", 0)
res = {"ok": 1, "n": affected}
errmsg = result.get("errms... |
Generate an explain command document.
def _gen_explain_command(
coll, spec, projection, skip, limit, batch_size,
options, read_concern):
"""Generate an explain command document."""
cmd = _gen_find_command(
coll, spec, projection, skip, limit, batch_size, options)
if read_concern.lev... |
Generate a find command document.
def _gen_find_command(coll, spec, projection, skip, limit, batch_size,
options, read_concern=DEFAULT_READ_CONCERN,
collation=None):
"""Generate a find command document."""
cmd = SON([('find', coll)])
if '$query' in spec:
... |
Generate a getMore command document.
def _gen_get_more_command(cursor_id, coll, batch_size, max_await_time_ms):
"""Generate a getMore command document."""
cmd = SON([('getMore', cursor_id),
('collection', coll)])
if batch_size:
cmd['batchSize'] = batch_size
if max_await_time_ms i... |
Data to send to do a lastError.
def __last_error(namespace, args):
"""Data to send to do a lastError.
"""
cmd = SON([("getlasterror", 1)])
cmd.update(args)
splitns = namespace.split('.', 1)
return query(0, splitns[0] + '.$cmd', 0, -1, cmd,
None, DEFAULT_CODEC_OPTIONS) |
Takes message data and adds a message header based on the operation.
Returns the resultant message string.
def __pack_message(operation, data):
"""Takes message data and adds a message header based on the operation.
Returns the resultant message string.
"""
request_id = _randint()
message = s... |
Get an **insert** message.
def insert(collection_name, docs, check_keys,
safe, last_error_args, continue_on_error, opts):
"""Get an **insert** message."""
options = 0
if continue_on_error:
options += 1
data = struct.pack("<i", options)
data += bson._make_c_string(collection_name)... |
Get an **update** message.
def update(collection_name, upsert, multi,
spec, doc, safe, last_error_args, check_keys, opts):
"""Get an **update** message.
"""
options = 0
if upsert:
options += 1
if multi:
options += 2
data = _ZERO_32
data += bson._make_c_string(col... |
Get a **query** message.
def query(options, collection_name, num_to_skip,
num_to_return, query, field_selector, opts, check_keys=False):
"""Get a **query** message.
"""
data = struct.pack("<I", options)
data += bson._make_c_string(collection_name)
data += struct.pack("<i", num_to_skip)
... |
Get a **delete** message.
`opts` is a CodecOptions. `flags` is a bit vector that may contain
the SingleRemove flag or not:
http://docs.mongodb.org/meta-driver/latest/legacy/mongodb-wire-protocol/#op-delete
def delete(collection_name, spec, safe,
last_error_args, opts, flags=0):
"""Get a **... |
Get a **killCursors** message.
def kill_cursors(cursor_ids):
"""Get a **killCursors** message.
"""
data = _ZERO_32
data += struct.pack("<i", len(cursor_ids))
for cursor_id in cursor_ids:
data += struct.pack("<q", cursor_id)
return __pack_message(2007, data) |
Execute a batch of insert, update, or delete commands.
def _do_batched_write_command(namespace, operation, command,
docs, check_keys, opts, ctx):
"""Execute a batch of insert, update, or delete commands.
"""
max_bson_size = ctx.max_bson_size
max_write_batch_size = ctx.max_... |
Return a find command document for this query.
Should be called *after* get_message.
def as_command(self):
"""Return a find command document for this query.
Should be called *after* get_message.
"""
if '$explain' in self.spec:
self.name = 'explain'
retu... |
Get a query message, possibly setting the slaveOk bit.
def get_message(self, set_slave_ok, is_mongos, use_cmd=False):
"""Get a query message, possibly setting the slaveOk bit."""
if set_slave_ok:
# Set the slaveOk bit.
flags = self.flags | 4
else:
flags = sel... |
Return a getMore command document for this query.
def as_command(self):
"""Return a getMore command document for this query."""
return _gen_get_more_command(self.cursor_id, self.coll,
self.ntoreturn,
self.max_await_time_ms), self... |
Get a getmore message.
def get_message(self, dummy0, dummy1, use_cmd=False):
"""Get a getmore message."""
ns = _UJOIN % (self.db, self.coll)
if use_cmd:
ns = _UJOIN % (self.db, "$cmd")
spec = self.as_command()[0]
return query(0, ns, 0, -1, spec, None, self... |
A proxy for SocketInfo.legacy_write that handles event publishing.
def legacy_write(self, request_id, msg, max_doc_size, acknowledged, docs):
"""A proxy for SocketInfo.legacy_write that handles event publishing.
"""
if self.publish:
duration = datetime.datetime.now() - self.start_ti... |
A proxy for SocketInfo.write_command that handles event publishing.
def write_command(self, request_id, msg, docs):
"""A proxy for SocketInfo.write_command that handles event publishing.
"""
if self.publish:
duration = datetime.datetime.now() - self.start_time
self._star... |
Publish a CommandStartedEvent.
def _start(self, request_id, docs):
"""Publish a CommandStartedEvent."""
cmd = self.command.copy()
cmd[self.field] = docs
self.listeners.publish_command_start(
cmd, self.db_name,
request_id, self.sock_info.address, self.op_id)
... |
Publish a CommandSucceededEvent.
def _succeed(self, request_id, reply, duration):
"""Publish a CommandSucceededEvent."""
self.listeners.publish_command_success(
duration, reply, self.name,
request_id, self.sock_info.address, self.op_id) |
Publish a CommandFailedEvent.
def _fail(self, request_id, failure, duration):
"""Publish a CommandFailedEvent."""
self.listeners.publish_command_failure(
duration, failure, self.name,
request_id, self.sock_info.address, self.op_id) |
Get the machine portion of an ObjectId.
def _machine_bytes():
"""Get the machine portion of an ObjectId.
"""
machine_hash = hashlib.md5()
if PY3:
# gethostname() returns a unicode string in python 3.x
# while update() requires a byte string.
machine_hash.update(socket.gethostnam... |
Create a dummy ObjectId instance with a specific generation time.
This method is useful for doing range queries on a field
containing :class:`ObjectId` instances.
.. warning::
It is not safe to insert a document containing an ObjectId
generated using this method. This met... |
Checks if a `oid` string is valid or not.
:Parameters:
- `oid`: the object id to validate
.. versionadded:: 2.3
def is_valid(cls, oid):
"""Checks if a `oid` string is valid or not.
:Parameters:
- `oid`: the object id to validate
.. versionadded:: 2.3
... |
Generate a new value for this ObjectId.
def __generate(self):
"""Generate a new value for this ObjectId.
"""
# 4 bytes current time
oid = struct.pack(">i", int(time.time()))
# 3 bytes machine
oid += ObjectId._machine_bytes
# 2 bytes pid
oid += struct.p... |
Validate and use the given id for this ObjectId.
Raises TypeError if id is not an instance of
(:class:`basestring` (:class:`str` or :class:`bytes`
in python 3), ObjectId) and InvalidId if it is not a
valid ObjectId.
:Parameters:
- `oid`: a valid ObjectId
def __valida... |
Get the master address from the instance configuration
def _get_master_address(self, instance):
"""
Get the master address from the instance configuration
"""
master_address = instance.get(MASTER_ADDRESS)
if master_address is None:
master_address = instance.get(DEPR... |
Get the request address, build with proxy if necessary
def _get_request_url(self, instance, url):
"""
Get the request address, build with proxy if necessary
"""
parsed = urlparse(url)
_url = url
if not (parsed.netloc and parsed.scheme) and is_affirmative(instance.get('s... |
Determine what mode was specified
def _get_running_apps(self, instance, requests_config):
"""
Determine what mode was specified
"""
tags = instance.get('tags', [])
if tags is None:
tags = []
master_address = self._get_master_address(instance)
# Get th... |
Return a dictionary of {app_id: (app_name, tracking_url)} for the running Spark applications
def _standalone_init(self, spark_master_address, pre_20_mode, requests_config, tags):
"""
Return a dictionary of {app_id: (app_name, tracking_url)} for the running Spark applications
"""
metrics... |
Return a dictionary of {app_id: (app_name, tracking_url)} for running Spark applications.
def _mesos_init(self, instance, master_address, requests_config, tags):
"""
Return a dictionary of {app_id: (app_name, tracking_url)} for running Spark applications.
"""
running_apps = {}
... |
Return a dictionary of {app_id: (app_name, tracking_url)} for running Spark applications.
def _yarn_init(self, rm_address, requests_config, tags):
"""
Return a dictionary of {app_id: (app_name, tracking_url)} for running Spark applications.
"""
running_apps = self._yarn_get_running_spar... |
Return the application URL from the app info page on the Spark master.
Due to a bug, we need to parse the HTML manually because we cannot
fetch JSON data from HTTP interface.
def _get_standalone_app_url(self, app_id, spark_master_address, requests_config, tags):
"""
Return the applicati... |
Return a dictionary of {app_id: (app_name, tracking_url)} for running Spark applications.
The `app_id` returned is that of the YARN application. This will eventually be mapped into
a Spark application ID.
def _yarn_get_running_spark_apps(self, rm_address, requests_config, tags):
"""
Re... |
Traverses the Spark application master in YARN to get a Spark application ID.
Return a dictionary of {app_id: (app_name, tracking_url)} for Spark applications
def _get_spark_app_ids(self, running_apps, requests_config, tags):
"""
Traverses the Spark application master in YARN to get a Spark ap... |
Get metrics for each Spark job.
def _spark_job_metrics(self, instance, running_apps, addl_tags, requests_config):
"""
Get metrics for each Spark job.
"""
for app_id, (app_name, tracking_url) in iteritems(running_apps):
base_url = self._get_request_url(instance, tracking_url... |
Get metrics for each Spark executor.
def _spark_executor_metrics(self, instance, running_apps, addl_tags, requests_config):
"""
Get metrics for each Spark executor.
"""
for app_id, (app_name, tracking_url) in iteritems(running_apps):
base_url = self._get_request_url(instanc... |
Get metrics for each Spark RDD.
def _spark_rdd_metrics(self, instance, running_apps, addl_tags, requests_config):
"""
Get metrics for each Spark RDD.
"""
for app_id, (app_name, tracking_url) in iteritems(running_apps):
base_url = self._get_request_url(instance, tracking_url... |
Get metrics for each application streaming statistics.
def _spark_streaming_statistics_metrics(self, instance, running_apps, addl_tags, requests_config):
"""
Get metrics for each application streaming statistics.
"""
for app_id, (app_name, tracking_url) in iteritems(running_apps):
... |
Set a metric
def _set_metric(self, metric_name, metric_type, value, tags=None):
"""
Set a metric
"""
if tags is None:
tags = []
if metric_type == GAUGE:
self.gauge(metric_name, value, tags=tags)
elif metric_type == COUNT:
self.count(me... |
Query the given URL and return the response
def _rest_request(self, url, object_path, service_name, requests_config, tags, *args, **kwargs):
"""
Query the given URL and return the response
"""
service_check_tags = ['url:%s' % self._get_url_base(url)] + tags
if object_path:
... |
Query the given URL and return the JSON response
def _rest_request_to_json(self, address, object_path, service_name, requests_config, tags, *args, **kwargs):
"""
Query the given URL and return the JSON response
"""
response = self._rest_request(address, object_path, service_name, reques... |
Return the base of a URL
def _get_url_base(cls, url):
"""
Return the base of a URL
"""
s = urlsplit(url)
return urlunsplit([s.scheme, s.netloc, '', '', '']) |
This parses tags from a dn designator. They look like this:
topology/pod-1/node-101/sys/phys-[eth1/6]/CDeqptMacsectxpkts5min
def parse_capacity_tags(dn):
"""
This parses tags from a dn designator. They look like this:
topology/pod-1/node-101/sys/phys-[eth1/6]/CDeqptMacsectxpkts5min
"""
tags = [... |
This grabs the event tags from the dn designator. They look like this:
uni/tn-DataDog/ap-DtDg-AP1-EcommerceApp/epg-DtDg-Ecomm/HDl2IngrPktsAg1h
def get_event_tags_from_dn(dn):
"""
This grabs the event tags from the dn designator. They look like this:
uni/tn-DataDog/ap-DtDg-AP1-EcommerceApp/epg-DtDg-Ecom... |
This parses the hostname from a dn designator. They look like this:
topology/pod-1/node-101/sys/phys-[eth1/6]/CDeqptMacsectxpkts5min
def get_hostname_from_dn(dn):
"""
This parses the hostname from a dn designator. They look like this:
topology/pod-1/node-101/sys/phys-[eth1/6]/CDeqptMacsectxpkts5min
... |
the json objects look like this:
{
"objType": {
"attributes": {
...
}
}
It always has the attributes nested below the object type
This helper provides a way of getting at the attributes
def get_attributes(obj):
"""
the json objects look like this:
{
"objType": {
... |
When a counter is reset, don't send a zero because it will look bad on the graphs
This checks if the zero makes sense or not
def check_metric_can_be_zero(metric_name, metric_value, json_attributes):
"""
When a counter is reset, don't send a zero because it will look bad on the graphs
This checks if the... |
Create scaffolding for a new integration.
def create(ctx, name, integration_type, location, non_interactive, quiet, dry_run):
"""Create scaffolding for a new integration."""
repo_choice = ctx.obj['repo_choice']
root = resolve_path(location) if location else get_root()
path_sep = os.path.sep
integr... |
Freezes any mutable object including dictionaries and lists for hashing.
Accepts nested dictionaries.
def freeze(o):
"""
Freezes any mutable object including dictionaries and lists for hashing.
Accepts nested dictionaries.
"""
if isinstance(o, (tuple, list)):
return tuple(sorted(freeze(... |
Converts a decimal.Decimal to BID (high bits, low bits).
:Parameters:
- `value`: An instance of decimal.Decimal
def _decimal_to_128(value):
"""Converts a decimal.Decimal to BID (high bits, low bits).
:Parameters:
- `value`: An instance of decimal.Decimal
"""
with decimal.localcontext(... |
Returns an instance of :class:`decimal.Decimal` for this
:class:`Decimal128`.
def to_decimal(self):
"""Returns an instance of :class:`decimal.Decimal` for this
:class:`Decimal128`.
"""
high = self.__high
low = self.__low
sign = 1 if (high & _SIGN) else 0
... |
Create an instance of :class:`Decimal128` from Binary Integer
Decimal string.
:Parameters:
- `value`: 16 byte string (128-bit IEEE 754-2008 decimal floating
point in Binary Integer Decimal (BID) format).
def from_bid(cls, value):
"""Create an instance of :class:`Decimal12... |
Compile regex strings from queue_tag_re option and return list of compiled regex/tag pairs
def _compile_tag_re(self):
"""
Compile regex strings from queue_tag_re option and return list of compiled regex/tag pairs
"""
queue_tag_list = []
for regex_str, tags in iteritems(self._que... |
Determine the server type from an ismaster response.
def _get_server_type(doc):
"""Determine the server type from an ismaster response."""
if not doc.get('ok'):
return SERVER_TYPE.Unknown
if doc.get('isreplicaset'):
return SERVER_TYPE.RSGhost
elif doc.get('setName'):
if doc.get... |
List of hosts, passives, and arbiters known to this server.
def all_hosts(self):
"""List of hosts, passives, and arbiters known to this server."""
return set(imap(common.clean_node, itertools.chain(
self._doc.get('hosts', []),
self._doc.get('passives', []),
self._doc... |
Set up the gitlab_runner instance so it can be used in OpenMetricsBaseCheck
def _create_gitlab_runner_prometheus_instance(self, instance, init_config):
"""
Set up the gitlab_runner instance so it can be used in OpenMetricsBaseCheck
"""
# Mapping from Prometheus metrics names to Datadog ... |
Get namenode beans data from JMX endpoint
def _get_jmx_data(self, instance, jmx_address, tags):
"""
Get namenode beans data from JMX endpoint
"""
response = self._rest_request_to_json(
instance, jmx_address, self.JMX_PATH, {'qry': self.HDFS_DATANODE_BEAN_NAME}, tags=tags
... |
Process HDFS Datanode metrics from given beans
def _hdfs_datanode_metrics(self, beans, tags):
"""
Process HDFS Datanode metrics from given beans
"""
# Only get the first bean
bean = next(iter(beans))
bean_name = bean.get('name')
self.log.debug("Bean name retriev... |
Write the `requirements-agent-release.txt` file at the root of the repo
listing all the Agent-based integrations pinned at the version they currently
have in HEAD.
def requirements(ctx):
"""Write the `requirements-agent-release.txt` file at the root of the repo
listing all the Agent-based integrations ... |
Nest a payload in a dict under the keys contained in `keys`
def _nest_payload(self, keys, payload):
"""
Nest a payload in a dict under the keys contained in `keys`
"""
if len(keys) == 0:
return payload
return {keys[0]: self._nest_payload(keys[1:], payload)} |
Recursively flattens the nginx json object. Returns the following: [(metric_name, value, tags)]
def _flatten_json(cls, metric_base, val, tags):
"""
Recursively flattens the nginx json object. Returns the following: [(metric_name, value, tags)]
"""
output = []
if isinstance(val,... |
Return a list of tags from integrations-core representing an Agent release,
sorted by more recent first.
def get_agent_tags(since, to):
"""
Return a list of tags from integrations-core representing an Agent release,
sorted by more recent first.
"""
agent_tags = sorted(parse_version_info(t) for ... |
Run commands in the proper repo.
def run(ctx, args):
"""Run commands in the proper repo."""
if not args:
click.echo(ctx.get_help())
return
with chdir(get_root()):
result = run_command(args)
ctx.exit(result.code) |
Check if the server URL starts with a HTTP or HTTPS scheme, fall back to http if not present
def _normalize_server_url(self, server):
"""
Check if the server URL starts with a HTTP or HTTPS scheme, fall back to http if not present
"""
server = server if server.startswith(("http://", "ht... |
Get the running version of elasticsearch.
def _get_es_version(self, config):
"""
Get the running version of elasticsearch.
"""
try:
data = self._get_data(config.url, config, send_sc=False)
# pre-release versions of elasticearch are suffixed with -rcX etc..
... |
overrides `urlparse.urljoin` since it removes base url path
https://docs.python.org/2/library/urlparse.html#urlparse.urljoin
def _join_url(self, base, url, admin_forwarder=False):
"""
overrides `urlparse.urljoin` since it removes base url path
https://docs.python.org/2/library/urlparse.... |
Compute the URLs we need to hit depending on the running ES version
def _get_urls(self, version, cluster_stats):
"""
Compute the URLs we need to hit depending on the running ES version
"""
pshard_stats_url = "/_stats"
health_url = "/_cluster/health"
if version >= [0, 90... |
Hit a given URL and return the parsed json
def _get_data(self, url, config, send_sc=True):
"""
Hit a given URL and return the parsed json
"""
# Load basic authentication configuration, if available.
if config.username and config.password:
auth = (config.username, con... |
data: dictionary containing all the stats
metric: datadog metric
path: corresponding path in data, flattened, e.g. thread_pool.bulk.queue
xform: a lambda to apply to the numerical value
def _process_metric(self, data, metric, xtype, path, xform=None, tags=None, hostname=None):
"""
... |
Resets state and uid set. To be called asap to free memory
def reset(self):
"""
Resets state and uid set. To be called asap to free memory
"""
self.reached_limit = False
self.count = 0
self.seen.clear() |
is_reached is to be called for every object that counts towards the limit.
- When called with no uid, the Limiter assumes this is a new object and
unconditionally increments the counter (less CPU and memory usage).
- When a given object can be passed multiple times, a uid must be provided to
... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.