text stringlengths 81 112k |
|---|
Get or Create a build job based on the params.
If a build job already exists, then we check if the build has already an image created.
If the image does not exists, and the job is already done we force create a new job.
Returns:
tuple: (build_job, image_exists[bool], build_status[bool])
def creat... |
Scan a YAML stream and produce scanning tokens.
def scan(stream, Loader=Loader):
"""
Scan a YAML stream and produce scanning tokens.
"""
loader = Loader(stream)
try:
while loader.check_token():
yield loader.get_token()
finally:
loader.dispose() |
Parse a YAML stream and produce parsing events.
def parse(stream, Loader=Loader):
"""
Parse a YAML stream and produce parsing events.
"""
loader = Loader(stream)
try:
while loader.check_event():
yield loader.get_event()
finally:
loader.dispose() |
Parse the first YAML document in a stream
and produce the corresponding representation tree.
def compose(stream, Loader=Loader):
"""
Parse the first YAML document in a stream
and produce the corresponding representation tree.
"""
loader = Loader(stream)
try:
return loader.get_single... |
Parse all YAML documents in a stream
and produce corresponding representation trees.
def compose_all(stream, Loader=Loader):
"""
Parse all YAML documents in a stream
and produce corresponding representation trees.
"""
loader = Loader(stream)
try:
while loader.check_node():
... |
Parse the first YAML document in a stream
and produce the corresponding Python object.
def load(stream, Loader=None):
"""
Parse the first YAML document in a stream
and produce the corresponding Python object.
"""
if Loader is None:
load_warning('load')
Loader = FullLoader
l... |
Parse all YAML documents in a stream
and produce corresponding Python objects.
def load_all(stream, Loader=None):
"""
Parse all YAML documents in a stream
and produce corresponding Python objects.
"""
if Loader is None:
load_warning('load_all')
Loader = FullLoader
loader = ... |
Emit YAML parsing events into a stream.
If stream is None, return the produced string instead.
def emit(events, stream=None, Dumper=Dumper,
canonical=None, indent=None, width=None,
allow_unicode=None, line_break=None):
"""
Emit YAML parsing events into a stream.
If stream is None, retur... |
Serialize a representation tree into a YAML stream.
If stream is None, return the produced string instead.
def serialize(node, stream=None, Dumper=Dumper, **kwds):
"""
Serialize a representation tree into a YAML stream.
If stream is None, return the produced string instead.
"""
return serialize... |
Serialize a Python object into a YAML stream.
If stream is None, return the produced string instead.
def dump(data, stream=None, Dumper=Dumper, **kwds):
"""
Serialize a Python object into a YAML stream.
If stream is None, return the produced string instead.
"""
return dump_all([data], stream, D... |
Serialize a sequence of Python objects into a YAML stream.
Produce only basic YAML tags.
If stream is None, return the produced string instead.
def safe_dump_all(documents, stream=None, **kwds):
"""
Serialize a sequence of Python objects into a YAML stream.
Produce only basic YAML tags.
If stre... |
Serialize a Python object into a YAML stream.
Produce only basic YAML tags.
If stream is None, return the produced string instead.
def safe_dump(data, stream=None, **kwds):
"""
Serialize a Python object into a YAML stream.
Produce only basic YAML tags.
If stream is None, return the produced str... |
Add an implicit scalar detector.
If an implicit scalar value matches the given regexp,
the corresponding tag is assigned to the scalar.
first is a sequence of possible initial characters or None.
def add_implicit_resolver(tag, regexp, first=None,
Loader=Loader, Dumper=Dumper):
"""
Add an im... |
Add a path based resolver for the given tag.
A path is a list of keys that forms a path
to a node in the representation tree.
Keys can be string values, integers, or None.
def add_path_resolver(tag, path, kind=None, Loader=Loader, Dumper=Dumper):
"""
Add a path based resolver for the given tag.
... |
Convert a Python object to a representation node.
def to_yaml(cls, dumper, data):
"""
Convert a Python object to a representation node.
"""
return dumper.represent_yaml_object(cls.yaml_tag, data, cls,
flow_style=cls.yaml_flow_style) |
Serialize a sequence of representation trees into a YAML stream.
If stream is None, return the produced string instead.
def serialize_all(nodes, stream=None, Dumper=Dumper,
canonical=None, indent=None, width=None,
allow_unicode=None, line_break=None,
encoding=None, explicit_start=None, expl... |
Do not produce empty tokens.
def something(TokenClass):
"""Do not produce empty tokens."""
def callback(lexer, match, context):
text = match.group()
if not text:
return
yield match.start(), TokenClass, text
context.pos = match.end()
return callback |
Set the previously saved indentation level.
def set_indent(TokenClass, implicit=False):
"""Set the previously saved indentation level."""
def callback(lexer, match, context):
text = match.group()
if context.indent < context.next_indent:
context.indent_stack.append(context.indent)
... |
Process an empty line in a block scalar.
def parse_block_scalar_empty_line(IndentTokenClass, ContentTokenClass):
"""Process an empty line in a block scalar."""
def callback(lexer, match, context):
text = match.group()
if (context.block_scalar_indent is None or
len(text) <= conte... |
Process indentation spaces in a plain scalar.
def parse_plain_scalar_indent(TokenClass):
"""Process indentation spaces in a plain scalar."""
def callback(lexer, match, context):
text = match.group()
if len(text) <= context.indent:
context.stack.pop()
context.stack.pop()
... |
Collocation score
def score(count_bigram, count1, count2, n_words):
"""Collocation score"""
if n_words <= count1 or n_words <= count2:
# only one words appears in the whole document
return 0
N = n_words
c12 = count_bigram
c1 = count1
c2 = count2
p = c2 / N
p1 = c12 / c1
... |
Normalize cases and remove plurals.
Each word is represented by the most common case.
If a word appears with an "s" on the end and without an "s" on the end,
the version with "s" is assumed to be a plural and merged with the
version without "s" (except if the word ends with "ss").
Parameters
-... |
Returns a single_color_func associated with the word
def get_color_func(self, word):
"""Returns a single_color_func associated with the word"""
try:
color_func = next(
color_func for (color_func, words) in self.color_func_to_words
if word in words)
ex... |
Random hue color generation.
Default coloring method. This just picks a random hue with value 80% and
lumination 50%.
Parameters
----------
word, font_size, position, orientation : ignored.
random_state : random.Random object or None, (default=None)
If a random object is given, this ... |
Create a color function which returns a single hue and saturation with.
different values (HSV). Accepted values are color strings as usable by
PIL/Pillow.
>>> color_func1 = get_single_color_func('deepskyblue')
>>> color_func2 = get_single_color_func('#00b4d2')
def get_single_color_func(color):
"""... |
Create a word_cloud from words and frequencies.
Parameters
----------
frequencies : dict from string to float
A contains words and associated frequency.
max_font_size : int
Use this font-size instead of self.max_font_size
Returns
-------
... |
Splits a long text into words, eliminates the stopwords.
Parameters
----------
text : string
The text to be processed.
Returns
-------
words : dict (string, int)
Word tokens with associated frequency.
..versionchanged:: 1.2.2
... |
Generate wordcloud from text.
The input "text" is expected to be a natural text. If you pass a sorted
list of words, words will appear in your output twice. To remove this
duplication, set ``collocations=False``.
Calls process_text and generate_from_frequencies.
..versionchang... |
Recolor existing layout.
Applying a new coloring is much faster than generating the whole
wordcloud.
Parameters
----------
random_state : RandomState, int, or None, default=None
If not None, a fixed random state is used. If an int is given, this
is used ... |
Export to image file.
Parameters
----------
filename : string
Location to write to.
Returns
-------
self
def to_file(self, filename):
"""Export to image file.
Parameters
----------
filename : string
Location to w... |
Cast to two dimensional boolean mask.
def _get_bolean_mask(self, mask):
"""Cast to two dimensional boolean mask."""
if mask.dtype.kind == 'f':
warnings.warn("mask image should be unsigned byte between 0"
" and 255. Got a float array")
if mask.ndim == 2:
... |
Draw mask contour on a pillow image.
def _draw_contour(self, img):
"""Draw mask contour on a pillow image."""
if self.mask is None or self.contour_width == 0:
return img
mask = self._get_bolean_mask(self.mask) * 255
contour = Image.fromarray(mask.astype(np.uint8))
c... |
Retrieves the leader-election annotation from a given object, and
submits metrics and a service check.
An integration warning is sent if the object is not retrievable,
or no record is found. Monitors on the service-check should have
no-data alerts enabled to account for this.
T... |
Python2.4 doesn't have a partition method so we provide
our own that mimics str.partition from later releases.
Split the string at the first occurrence of sep, and return a
3-tuple containing the part before the separator, the separator
itself, and the part after the separator. If the separator is not
... |
Python2.4 doesn't have an rpartition method so we provide
our own that mimics str.rpartition from later releases.
Split the string at the last occurrence of sep, and return a
3-tuple containing the part before the separator, the separator
itself, and the part after the separator. If the separator is no... |
Validates the format of user information in a MongoDB URI.
Reserved characters like ':', '/', '+' and '@' must be escaped
following RFC 3986.
Returns a 2-tuple containing the unescaped username followed
by the unescaped password.
:Paramaters:
- `userinfo`: A string of the form <username>:<... |
Validates an IPv6 literal host:port string.
Returns a 2-tuple of IPv6 literal followed by port where
port is default_port if it wasn't specified in entity.
:Parameters:
- `entity`: A string that represents an IPv6 literal enclosed
in braces (e.g. '[::1]' or '[::1]:27017').
... |
Validates a host string
Returns a 2-tuple of host followed by port where port is default_port
if it wasn't specified in the string.
:Parameters:
- `entity`: A host or host:port string where host could be a
hostname or IP address.
- `default_port`: The port number to use... |
Helper method for split_options which creates the options dict.
Also handles the creation of a list for the URI tag_sets/
readpreferencetags portion.
def _parse_options(opts, delim):
"""Helper method for split_options which creates the options dict.
Also handles the creation of a list for the URI tag_s... |
Takes a string of the form host1[:port],host2[:port]... and
splits it into (host, port) tuples. If [:port] isn't present the
default_port is used.
Returns a set of 2-tuples containing the host name (or IP) followed by
port number.
:Parameters:
- `hosts`: A string of the form host1[:port],h... |
Parse and validate a MongoDB URI.
Returns a dict of the form::
{
'nodelist': <list of (host, port) tuples>,
'username': <username> or None,
'password': <password> or None,
'database': <database name> or None,
'collection': <collection name> or No... |
Apply max_staleness, in seconds, to a Selection with a known primary.
def _with_primary(max_staleness, selection):
"""Apply max_staleness, in seconds, to a Selection with a known primary."""
primary = selection.primary
sds = []
for s in selection.server_descriptions:
if s.server_type == SERVER... |
Apply max_staleness, in seconds, to a Selection with no known primary.
def _no_primary(max_staleness, selection):
"""Apply max_staleness, in seconds, to a Selection with no known primary."""
# Secondary that's replicated the most recent writes.
smax = selection.secondary_with_max_last_write_date()
if n... |
Apply max_staleness, in seconds, to a Selection.
def select(max_staleness, selection):
"""Apply max_staleness, in seconds, to a Selection."""
if max_staleness == -1:
return selection
# Server Selection Spec: If the TopologyType is ReplicaSetWithPrimary or
# ReplicaSetNoPrimary, a client MUST r... |
Create an event with a message describing the replication
state of a mongo node
def create_event(self, state, server, agentConfig):
"""Create an event with a message describing the replication
state of a mongo node"""
def get_state_description(state):
if state == 0:... |
A decorator to timeout a function. Decorated method calls are executed in a separate new thread
with a specified timeout.
Also check if a thread for the same function already exists before creating a new one.
Note: Compatible with Windows (thread based).
def timeout(timeout):
"""
A decorator to tim... |
Create a set of pids of selected processes.
Search for search_string
def find_pids(self, name, search_string, exact_match, ignore_ad=True):
"""
Create a set of pids of selected processes.
Search for search_string
"""
if not self.should_refresh_pid_cache(name):
... |
A psutil wrapper that is calling
* psutil.method(*args, **kwargs) and returns the result
OR
* psutil.method(*args, **kwargs).accessor[i] for each accessors
given in a list, the result being indexed in a dictionary
by the accessor name
def psutil_wrapper(self, process, method, ac... |
Report a service check, for each process in search_string.
Report as OK if the process is in the warning thresholds
CRITICAL out of the critical thresholds
WARNING out of the warning thresholds
def _process_service_check(self, name, nb_procs, bound... |
Filter pids by it's username.
:param user: string with name of system user
:param pids: set of pids to filter
:return: set of filtered pids
def _filter_by_user(self, user, pids):
"""
Filter pids by it's username.
:param user: string with name of system user
:para... |
A wrapper around wmi.from_time to get a WMI-formatted time from a
time struct.
def _dt_to_wmi(self, dt):
''' A wrapper around wmi.from_time to get a WMI-formatted time from a
time struct.
'''
return from_time(
year=dt.year,
month=dt.month,
... |
Generate the event's body to send to Datadog.
Consider `event_format` parameter:
* Only use the specified list of event properties.
* If unspecified, default to the EventLog's `Message` or `InsertionStrings`.
def _msg_text(self):
"""
Generate the event's body to send to Datadog... |
Compare this event's timestamp to a give timestamp.
def is_after(self, ts):
''' Compare this event's timestamp to a give timestamp. '''
if self.timestamp >= int(calendar.timegm(ts.timetuple())):
return True
return False |
Convert a wmi formatted timestamp into an epoch.
def _wmi_to_ts(self, wmi_ts):
''' Convert a wmi formatted timestamp into an epoch.
'''
year, month, day, hour, minute, second, microsecond, tz = to_time(wmi_ts)
tz_delta = timedelta(minutes=int(tz))
if '+' in wmi_ts:
t... |
Inject additional tags into the list already supplied to LogEvent.
def _tags(self, tags, event_code):
''' Inject additional tags into the list already supplied to LogEvent.
'''
tags_list = []
if tags is not None:
tags_list += list(tags)
tags_list.append("event_id:{ev... |
Kill a cursor.
Raises TypeError if cursor_id is not an instance of (int, long).
:Parameters:
- `cursor_id`: cursor id to close
- `address`: the cursor's server's (host, port) pair
.. versionchanged:: 3.0
Now requires an `address` argument.
def close(self, curso... |
Don't send the "can connect" service check if we have troubles getting
the health status
def _get_health_status(self, url, ssl_params, timeout):
"""
Don't send the "can connect" service check if we have troubles getting
the health status
"""
try:
r = self._pe... |
Stop environments.
def stop(check, env):
"""Stop environments."""
all_checks = check == 'all'
checks = get_configured_checks() if all_checks else [check]
if all_checks:
env_indent = DEFAULT_INDENT
status_indent = DEFAULT_INDENT * 2
else:
env_indent = None
status_ind... |
Process both the istio_mesh instance and process_mixer instance associated with this instance
def check(self, instance):
"""
Process both the istio_mesh instance and process_mixer instance associated with this instance
"""
# Get the config for the istio_mesh instance
istio_mesh... |
Generalize each (single) Istio instance into two OpenMetricsBaseCheck instances
def create_generic_instances(self, instances):
"""
Generalize each (single) Istio instance into two OpenMetricsBaseCheck instances
"""
generic_instances = []
for instance in instances:
i... |
Grab the istio mesh scraper from the dict and return it if it exists,
otherwise create the scraper and add it to the dict
def _create_istio_mesh_instance(self, instance):
"""
Grab the istio mesh scraper from the dict and return it if it exists,
otherwise create the scraper and add it to... |
Grab the mixer scraper from the dict and return it if it exists,
otherwise create the scraper and add it to the dict
def _create_process_mixer_instance(self, instance):
"""
Grab the mixer scraper from the dict and return it if it exists,
otherwise create the scraper and add it to the di... |
The dictionary containing the apps is cached during collection and reset
at every `check()` call.
def get_apps_json(self, url, timeout, auth, acs_url, ssl_verify, tags, group):
"""
The dictionary containing the apps is cached during collection and reset
at every `check()` call.
... |
Ensure `marathon.queue.count` is reported as zero for apps without queued instances.
def ensure_queue_count(
self, queued, url, timeout, auth, acs_url, ssl_verify, tags=None, label_tags=None, group=None
):
"""
Ensure `marathon.queue.count` is reported as zero for apps without queued instanc... |
Verify if a custom check or integration can run on python 3. CHECK
can be an integration name or a valid path to a Python module or package folder.
def py3(check):
"""Verify if a custom check or integration can run on python 3. CHECK
can be an integration name or a valid path to a Python module or package ... |
Create a config object from an instance dictionary
def from_instance(instance, default_ca_certs=None):
"""
Create a config object from an instance dictionary
"""
method = instance.get('method', 'get')
data = instance.get('data', {})
tags = instance.get('tags', [])
ntlm_domain = instance.get... |
Change the Python code in the __about__.py module so that `__version__`
contains the new value.
def update_version_module(check_name, old_ver, new_ver):
"""
Change the Python code in the __about__.py module so that `__version__`
contains the new value.
"""
version_file = get_version_file(check_... |
Compose a text line to be used in a requirements.txt file to install a check
pinned to a specific version.
def get_agent_requirement_line(check, version):
"""
Compose a text line to be used in a requirements.txt file to install a check
pinned to a specific version.
"""
package_name = get_packag... |
Replace the requirements line for the given check
def update_agent_requirements(req_file, check, newline):
"""
Replace the requirements line for the given check
"""
package_name = get_package_name(check)
lines = read_file_lines(req_file)
for i, line in enumerate(lines):
current_package... |
Modified version of https://docs.python.org/3/library/os.html#os.scandir
that returns https://docs.python.org/3/library/os.html#os.DirEntry for files
directly to take advantage of possible cached os.stat calls.
def _walk(top):
"""Modified version of https://docs.python.org/3/library/os.html#os.scandir
... |
Get metrics related to YARN cluster
def _yarn_cluster_metrics(self, rm_address, instance, addl_tags):
"""
Get metrics related to YARN cluster
"""
metrics_json = self._rest_request_to_json(rm_address, instance, YARN_CLUSTER_METRICS_PATH, addl_tags)
if metrics_json:
... |
Get metrics for running applications
def _yarn_app_metrics(self, rm_address, instance, app_tags, addl_tags):
"""
Get metrics for running applications
"""
metrics_json = self._rest_request_to_json(
rm_address, instance, YARN_APPS_PATH, addl_tags, states=YARN_APPLICATION_STATE... |
Get metrics related to YARN nodes
def _yarn_node_metrics(self, rm_address, instance, addl_tags):
"""
Get metrics related to YARN nodes
"""
metrics_json = self._rest_request_to_json(rm_address, instance, YARN_NODES_PATH, addl_tags)
if metrics_json and metrics_json['nodes'] is no... |
Get metrics from YARN scheduler
def _yarn_scheduler_metrics(self, rm_address, instance, addl_tags, queue_blacklist):
"""
Get metrics from YARN scheduler
"""
metrics_json = self._rest_request_to_json(rm_address, instance, YARN_SCHEDULER_PATH, addl_tags)
try:
metrics_... |
Get metrics from YARN scheduler if it's type is capacityScheduler
def _yarn_capacity_scheduler_metrics(self, metrics_json, addl_tags, queue_blacklist):
"""
Get metrics from YARN scheduler if it's type is capacityScheduler
"""
tags = ['queue_name:{}'.format(metrics_json['queueName'])]
... |
Parse the JSON response and set the metrics
def _set_yarn_metrics_from_json(self, tags, metrics_json, yarn_metrics):
"""
Parse the JSON response and set the metrics
"""
for dict_path, metric in iteritems(yarn_metrics):
metric_name, metric_type = metric
metric_va... |
Get a value from a dictionary under N keys, represented as str("key1.key2...key{n}")
def _get_value_from_json(self, dict_path, metrics_json):
"""
Get a value from a dictionary under N keys, represented as str("key1.key2...key{n}")
"""
for key in dict_path.split('.'):
if key ... |
Set a metric
def _set_metric(self, metric_name, metric_type, value, tags=None, device_name=None):
"""
Set a metric
"""
if metric_type == GAUGE:
self.gauge(metric_name, value, tags=tags, device_name=device_name)
elif metric_type == INCREMENT:
self.incremen... |
Query the given URL and return the JSON response
def _rest_request_to_json(self, url, instance, object_path, tags, *args, **kwargs):
"""
Query the given URL and return the JSON response
"""
service_check_tags = ['url:{}'.format(self._get_url_base(url))] + tags
service_check_tags... |
Join a URL with multiple directories
def _join_url_dir(self, url, *args):
"""
Join a URL with multiple directories
"""
for path in args:
url = url.rstrip('/') + '/'
url = urljoin(url, path.lstrip('/'))
return url |
Return the base of a URL
def _get_url_base(self, url):
"""
Return the base of a URL
"""
s = urlsplit(url)
return urlunsplit([s.scheme, s.netloc, '', '', '']) |
Get the ntp server port
def _get_service_port(self, instance):
"""
Get the ntp server port
"""
host = instance.get('host', DEFAULT_HOST)
port = instance.get('port', DEFAULT_PORT)
# default port is the name of the service but lookup would fail
# if the /etc/servic... |
Create a copy of the instance and set default values.
This is so the base class can create a scraper_config with the proper values.
def _create_cadvisor_prometheus_instance(self, instance):
"""
Create a copy of the instance and set default values.
This is so the base class can create a ... |
Return whether a metric is about a container or not.
It can be about pods, or even higher levels in the cgroup hierarchy
and we don't want to report on that.
:param metric:
:return: bool
def _is_container_metric(labels):
"""
Return whether a metric is about a container o... |
Return whether a metric is about a pod or not.
It can be about containers, pods, or higher levels in the cgroup hierarchy
and we don't want to report on that.
:param metric
:return bool
def _is_pod_metric(labels):
"""
Return whether a metric is about a pod or not.
... |
Should only be called on a container-scoped metric
It gets the container id from the podlist using the metrics labels
:param labels
:return str or None
def _get_container_id(self, labels):
"""
Should only be called on a container-scoped metric
It gets the container id f... |
Checks the labels indicate a container metric,
then extract the container id from them.
:param labels
:return str or None
def _get_entity_id_if_container_metric(self, labels):
"""
Checks the labels indicate a container metric,
then extract the container id from them.
... |
Return the id of a pod
:param labels:
:return: str or None
def _get_pod_uid(self, labels):
"""
Return the id of a pod
:param labels:
:return: str or None
"""
namespace = CadvisorPrometheusScraperMixin._get_container_label(labels, "namespace")
pod_... |
Return if the pod is on host Network
Return False if the Pod isn't in the pod list
:param pod_uid: str
:return: bool
def _is_pod_host_networked(self, pod_uid):
"""
Return if the pod is on host Network
Return False if the Pod isn't in the pod list
:param pod_uid: ... |
:param labels: metric labels: iterable
:return:
def _get_pod_by_metric_label(self, labels):
"""
:param labels: metric labels: iterable
:return:
"""
pod_uid = self._get_pod_uid(labels)
return get_pod_by_uid(pod_uid, self.pod_list) |
Iterates over all metrics in a metric and sums the values
matching the same uid. Modifies the metric family in place.
:param metric: prometheus metric family
:param uid_from_labels: function mapping a metric.label to a unique context id
:return: dict with uid as keys, metric object refer... |
Takes a simple metric about a container, reports it as a rate or gauge.
If several series are found for a given container, values are summed before submission.
def _process_container_metric(self, type, metric_name, metric, scraper_config):
"""
Takes a simple metric about a container, reports it... |
Takes a simple metric about a pod, reports it as a rate.
If several series are found for a given pod, values are summed before submission.
def _process_pod_rate(self, metric_name, metric, scraper_config):
"""
Takes a simple metric about a pod, reports it as a rate.
If several series are... |
Takes a metric object, a metric name, and a cache dict where it will store
container_name --> (value, tags) so that _process_limit_metric can compute usage_pct
it also submit said value and tags as a gauge.
def _process_usage_metric(self, m_name, metric, cache, scraper_config):
"""
Take... |
Reports limit metrics if m_name is not an empty string,
and optionally checks in the given cache if there's a usage
for each sample in the metric and reports the usage_pct
def _process_limit_metric(self, m_name, metric, cache, scraper_config, pct_m_name=None):
"""
Reports limit metrics ... |
Number of bytes that are consumed by the container on this filesystem.
def container_fs_usage_bytes(self, metric, scraper_config):
"""
Number of bytes that are consumed by the container on this filesystem.
"""
metric_name = scraper_config['namespace'] + '.filesystem.usage'
if me... |
Number of bytes that can be consumed by the container on this filesystem.
This method is used by container_fs_usage_bytes, it doesn't report any metric
def container_fs_limit_bytes(self, metric, scraper_config):
"""
Number of bytes that can be consumed by the container on this filesystem.
... |
Report the member's replica set state
* Submit a service check.
* Create an event on state change.
def _report_replica_set_state(self, state, clean_server_name, replset_name):
"""
Report the member's replica set state
* Submit a service check.
* Create an event on state ... |
Return a reasonable hostname for a replset membership event to mention.
def hostname_for_event(self, clean_server_name):
"""Return a reasonable hostname for a replset membership event to mention."""
uri = urlsplit(clean_server_name)
if '@' in uri.netloc:
hostname = uri.netloc.split(... |
Create an event with a message describing the replication
state of a mongo node
def create_event(self, last_state, state, clean_server_name, replset_name):
"""Create an event with a message describing the replication
state of a mongo node"""
status = self.get_state_description(... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.