code stringlengths 81 3.79k |
|---|
def wrap_function(func=None, error_threshold=None, reraise_exception=True, save_current_stack_trace=True):
if func:
return flawless.client.client._wrap_function_with_error_decorator(
func=func,
error_threshold=error_threshold,
reraise_exception=reraise_exception,
... |
def post(self, url, params=None, data=None, files=None, **kwargs):
return self.call_api(
"POST",
url,
params=params,
data=data,
files=files,
**kwargs
) |
def is_dark_rgb(r, g, b):
try:
midpoint = int(environ.get('TERMINAL_COLOR_MIDPOINT', None))
except:
pass
if not midpoint:
term = environ.get('TERM', None)
print("midpoint", midpoint, 'vs', (16*5 + 16*g + 16*b))
midpoint = 383 if term and term == 'xterm-256color' else... |
def stop(self):
with self.synclock:
if self.syncthread is not None:
self.syncthread.cancel()
self.syncthread = None |
def evaluate_marker(cls, text, extra=None):
return cls.interpret(parser.expr(text).totuple(1)[1]) |
def _get_job(self, project_id, job_id):
job_name = 'projects/{}/jobs/{}'.format(project_id, job_id)
request = self._mlengine.projects().jobs().get(name=job_name)
while True:
try:
return request.execute()
except HttpError as e:
if e.resp.sta... |
def pause(self):
for tracer in self.tracers:
tracer.stop()
stats = tracer.get_stats()
if stats:
print("\nCoverage.py tracer stats:")
for k in sorted(stats.keys()):
print("%16s: %s" % (k, stats[k]))
threading.settrace... |
def to_py(o, keyword_fn: Callable[[kw.Keyword], Any] = _kw_name):
if isinstance(o, ISeq):
return _to_py_list(o, keyword_fn=keyword_fn)
elif not isinstance(
o, (IPersistentList, IPersistentMap, IPersistentSet, IPersistentVector)
):
return o
else:
return _to_py_backup(o, ke... |
def indent(instr,nspaces=4, ntabs=0, flatten=False):
if instr is None:
return
ind = '\t'*ntabs+' '*nspaces
if flatten:
pat = re.compile(r'^\s*', re.MULTILINE)
else:
pat = re.compile(r'^', re.MULTILINE)
outstr = re.sub(pat, ind, instr)
if outstr.endswith(os.linesep+ind):
... |
def handleCONNACK(self, response):
state = self.__class__.__name__
log.error("Unexpected {packet:7} packet received in {log_source}", packet="CONNACK") |
def filter_bam(coverage_info, bam_file, min_coverage, output_bam):
contig_list = [x for x, vals in coverage_info.items()
if vals["cov"] >= min_coverage]
cli = [
"samtools",
"view",
"-bh",
"-F",
"4",
"-o",
output_bam,
"-@",
... |
def create_tfs_tfvc_client(url, token=None):
if token is None:
token = os.environ.get('TFS_API_TOKEN', None)
tfs_connection = create_tfs_connection(url, token)
tfs_tfvc_client = tfs_connection.get_client('vsts.tfvc.v4_1.tfvc_client.TfvcClient')
if tfs_tfvc_client is None:
msg = 'Unable t... |
def visualize_qualitative_analysis(inputs, model, samples=1, batch_size=3,
length=8):
average = lambda dist: tf.reduce_mean(
input_tensor=dist.mean(), axis=0)
with tf.compat.v1.name_scope("val_reconstruction"):
reconstruct = functools.partial(model.reconstruct, inputs=in... |
def debug_script(src, pm=False, globs=None):
"Debug a test script. `src` is the script, as a string."
import pdb
srcfilename = tempfile.mktemp(".py", "doctestdebug")
f = open(srcfilename, 'w')
f.write(src)
f.close()
try:
if globs:
globs = globs.copy()
else:
... |
def _dot_product(self, imgs_to_decode):
return np.dot(imgs_to_decode.T, self.feature_images).T |
def set_certificate_issuer(
self, vault_base_url, issuer_name, provider, credentials=None, organization_details=None, attributes=None, custom_headers=None, raw=False, **operation_config):
parameter = models.CertificateIssuerSetParameters(provider=provider, credentials=credentials, organization_detai... |
def remove_interval(self, time):
if self.tier_type != 'IntervalTier':
raise Exception('Tiertype must be IntervalTier.')
self.intervals = [i for i in self.intervals
if not(i[0] <= time and i[1] >= time)] |
def _run_valid(self, epoch, valid_set, dry_run=False, save_path=None):
costs = self.valid_step(valid_set)
_, J = costs[0]
new_best = False
if self.best_cost - J > self.best_cost * self.min_improvement:
self.best_params = self.copy_params()
new_best = True
... |
def Bin(self):
err = _Bin(self.transit, self.limbdark, self.settings, self.arrays)
if err != _ERR_NONE: RaiseError(err) |
def get_namespaces(self, prefix=None):
params = {"prefix": prefix}
return self.request(method="get", params=params).json() |
def geckoboard_rag_widget(request):
params = get_gecko_params(request)
print params['uids']
max_date = datetime.now()-timedelta(days=params['days_back'])
metrics = Metric.objects.filter(uid__in=params['uids'])
results = [(metric.latest_count(frequency=params['frequency'], count=not params['cumulativ... |
def _read_compressed_points_data(self, laszip_vlr, point_format):
offset_to_chunk_table = struct.unpack("<q", self.stream.read(8))[0]
size_of_point_data = offset_to_chunk_table - self.stream.tell()
if offset_to_chunk_table <= 0:
logger.warning(
"Strange offset to chun... |
def get_system_per_cpu_times():
ret = []
for cpu_t in _psutil_mswindows.get_system_cpu_times():
user, system, idle = cpu_t
item = _cputimes_ntuple(user, system, idle)
ret.append(item)
return ret |
def _init_from_bool(self, z, x):
if z is None:
raise QiskitError("z vector must not be None.")
if x is None:
raise QiskitError("x vector must not be None.")
if len(z) != len(x):
raise QiskitError("length of z and x vectors must be "
... |
def _expand_default(self, option):
if self.parser is None or not self.default_tag:
return option.help
optname = option._long_opts[0][2:]
try:
provider = self.parser.options_manager._all_options[optname]
except KeyError:
value = None
else:
optdict = provider.get_option... |
def map(self, func, value_shape=None, dtype=None):
if value_shape is None or dtype is None:
try:
mapped = func(random.randn(*self.plan).astype(self.dtype))
except Exception:
first = self._rdd.first()
if first:
mapped = f... |
def get_data(self, cache=True, as_text=False, parse_form_data=False):
rv = getattr(self, '_cached_data', None)
if rv is None:
if parse_form_data:
self._load_form_data()
rv = self.stream.read()
if cache:
self._cached_data = rv
if... |
def main(mash_output, sample_id):
logger.info("Reading file : {}".format(mash_output))
read_mash_output = open(mash_output)
dic = {}
median_list = []
filtered_dic = {}
logger.info("Generating dictionary and list to pre-process the final json")
for line in read_mash_output:
tab_split ... |
def add_import(
self, sym: sym.Symbol, module: types.ModuleType, *aliases: sym.Symbol
) -> None:
self._imports.swap(lambda m: m.assoc(sym, module))
if aliases:
self._import_aliases.swap(
lambda m: m.assoc(
*itertools.chain.from_iterable([(alias... |
def delete_report(self, report):
url = ACCOUNTS_API.format(report.account_id) + "/reports/{}/{}".format(
report.type, report.report_id)
response = self._delete_resource(url)
return True |
def delete(self, blocksize=100):
from .columns import MODELS_REFERENCED
if not self._model._no_fk or self._model._namespace in MODELS_REFERENCED:
raise QueryError("Can't delete entities of models with foreign key relationships")
de = []
i = 0
for result in self.iter_r... |
def _ep_need_close(self):
LOG.debug("Session %s close requested - closing...",
self._name)
links = self._links.copy()
for link in links:
link._session_closed() |
def users(store):
user_objs = list(store.users())
total_events = store.user_events().count()
for user_obj in user_objs:
if user_obj.get('institutes'):
user_obj['institutes'] = [store.institute(inst_id) for inst_id in user_obj.get('institutes')]
else:
user_obj['institu... |
def linear_connection(plist, lane):
logger.debug(
"Establishing linear connection with processes: {}".format(plist))
res = []
previous = None
for p in plist:
if not previous:
previous = p
continue
res.append({
"input": {
"proces... |
def run_migrations_online():
connectable = settings.engine
with connectable.connect() as connection:
context.configure(
connection=connection,
transaction_per_migration=True,
target_metadata=target_metadata,
compare_type=COMPARE_TYPE,
)
wit... |
def map_generic(self, func):
def process_record(val):
newval = empty(1, dtype="object")
newval[0] = func(val)
return newval
rdd = self._rdd.mapValues(process_record)
nchunks = self.getnumber(self.plan, self.vshape)
newshape = tuple([int(s) for s in r_... |
def get_conn(self):
if not self._conn:
http_authorized = self._authorize()
self._conn = build('compute', self.api_version,
http=http_authorized, cache_discovery=False)
return self._conn |
def parse_args(argv):
global g_new_messages_to_exclude
global g_old_messages_to_remove
global g_load_java_message_filename
global g_save_java_message_filename
global g_print_java_messages
if len(argv) < 2:
usage()
i = 1
while (i < len(argv)):
s = argv[i]
if (s == ... |
def expand_files(self, modules):
result, errors = utils.expand_modules(
modules, self.config.black_list, self.config.black_list_re
)
for error in errors:
message = modname = error["mod"]
key = error["key"]
self.set_current_module(modname)
... |
def _m(self):
assert not hasattr(self, "_interfaces") or not self._interfaces, \
"Too late to change direction of interface"
self._direction = DIRECTION.asIntfDirection(DIRECTION.opposite(self._masterDir))
return self |
def __nn_filter_helper(R_data, R_indices, R_ptr, S, aggregate):
s_out = np.empty_like(S)
for i in range(len(R_ptr)-1):
targets = R_indices[R_ptr[i]:R_ptr[i+1]]
if not len(targets):
s_out[i] = S[i]
continue
neighbors = np.take(S, targets, axis=0)
if aggrega... |
def fit(self, Z, **fit_params):
Zt, fit_params = self._pre_transform(Z, **fit_params)
self.steps[-1][-1].fit(Zt, **fit_params)
Zt.unpersist()
return self |
def _make_content_item(node, mime_type=None, alternate_data=None):
raw = node.data
if getattr(node, 'encoding', None) == 'zlib':
try:
raw = zlib.decompress(node.data)
except Exception, exc:
if alternate_data is not None:
try:
raw = zlib... |
def simUnit(self, synthesisedUnit: Unit, until: float, extraProcesses=[]):
beforeSim = self.config.beforeSim
if beforeSim is not None:
beforeSim(self, synthesisedUnit)
add_proc = self.add_process
for p in extraProcesses:
add_proc(p(self))
self._initUnitSig... |
def parse(self, hcl, canonicalize=False):
return self.request("parse", json={"JobHCL": hcl, "Canonicalize": canonicalize}, method="post", allow_redirects=True).json() |
def has_no_unchecked_field(self, locator, **kwargs):
kwargs["checked"] = False
return self.has_no_selector("field", locator, **kwargs) |
def reconnect(self):
self.log.debug("reconnect(): Initialzion reconnect sequence..")
self.connected.clear()
self.reconnect_required.set()
if self.socket:
self.socket.close() |
def lowpass_filter(data, cutoff, fs, order=5):
nyq = 0.5 * fs
normal_cutoff = cutoff / nyq
b, a = signal.butter(order, normal_cutoff, btype='low', analog=False)
y = signal.lfilter(b, a, data)
return y |
def _add(self, to_add):
if PyFunceble.CONFIGURATION["mining"]:
if PyFunceble.INTERN["file_to_test"] not in PyFunceble.INTERN["mined"]:
PyFunceble.INTERN["mined"][PyFunceble.INTERN["file_to_test"]] = {}
for element in to_add:
if (
elemen... |
def square(duration: int, amp: complex, period: float = None,
phase: float = 0, name: str = None) -> SamplePulse:
if period is None:
period = duration
return _sampled_square_pulse(duration, amp, period, phase=phase, name=name) |
def size(self, train=False, valid=False, xval=False):
tm = ModelBase._get_metrics(self, train, valid, xval)
m = {}
for k, v in tm.items():
m[k] = None if v is None else [v[2] for v in v._metric_json["centroid_stats"].cell_values]
return list(m.values())[0] if len(m) == 1 else... |
def find_files(filenames, recursive, exclude):
while filenames:
name = filenames.pop(0)
if recursive and os.path.isdir(name):
for root, directories, children in os.walk(name):
filenames += [os.path.join(root, f) for f in children
if match_fil... |
def is_literal_or_name(value):
try:
ast.literal_eval(value)
return True
except (SyntaxError, ValueError):
pass
if value.strip() in ['dict()', 'list()', 'set()']:
return True
return re.match(r'^\w+\s*$', value) |
def complete_restore(
self, location_name, operation_id, last_backup_name, custom_headers=None, raw=False, polling=True, **operation_config):
raw_result = self._complete_restore_initial(
location_name=location_name,
operation_id=operation_id,
last_backup_name=last... |
def post(self, headers={}, body=""):
code, message = self.command("POST")
if code != 340:
raise NNTPReplyError(code, message)
hdrs = utils.unparse_headers(headers)
self.socket.sendall(hdrs)
if isinstance(body, basestring):
body = cStringIO.StringIO(body)
... |
def has_context_loop(state, incorrect_msg, exact_names):
return _test(
state,
incorrect_msg or MSG_INCORRECT_LOOP,
exact_names,
tv_name="_target_vars",
highlight_name="target",
) |
def imcrop(img, bboxes, scale=1.0, pad_fill=None):
chn = 1 if img.ndim == 2 else img.shape[2]
if pad_fill is not None:
if isinstance(pad_fill, (int, float)):
pad_fill = [pad_fill for _ in range(chn)]
assert len(pad_fill) == chn
_bboxes = bboxes[None, ...] if bboxes.ndim == 1 else... |
def validate(self, obj, value):
try:
if issubclass(value, self.klass):
return value
except:
if (value is None) and (self._allow_none):
return value
self.error(obj, value) |
def plot(
self, data, bbox=None, plot_type='scatter',
fig_kwargs=None, bmap_kwargs=None, plot_kwargs=None,
cbar_kwargs=None):
from mpl_toolkits.basemap import Basemap
fig_kwargs = fig_kwargs or {}
bmap_kwargs = bmap_kwargs or {}
plot_kwargs = plot_kwar... |
def update_configuration(cfgfile=None):
configobj.DEFAULT_INTERPOLATION = 'template'
cfgfile = configuration_file(cfgfile)
cfg = configobj.ConfigObj(cfgfile, configspec=cfgspec, encoding='utf-8')
validator = Validator()
val = cfg.validate(validator)
if val is not True:
raise ValueError('... |
def times_csv(path, times, annotations=None, delimiter=',', fmt='%0.3f'):
r
if annotations is not None and len(annotations) != len(times):
raise ParameterError('len(annotations) != len(times)')
with open(path, 'w') as output_file:
writer = csv.writer(output_file, delimiter=delimiter)
... |
def add_subgraph(self, info):
if not info.initialized:
return
graph = self._request_graph(info.ui.control)
if graph is not None:
subgraph = Subgraph()
retval = subgraph.edit_traits(parent = info.ui.control,
kind = "l... |
def _onDeviceStatus(self, client, userdata, pahoMessage):
try:
status = Status(pahoMessage)
self.logger.debug("Received %s action from %s" % (status.action, status.clientId))
if self.deviceStatusCallback:
self.deviceStatusCallback(status)
except Invali... |
def get_input(prompt, default=None, exit_msg='bye!'):
try:
response = six.moves.input(prompt)
except (KeyboardInterrupt, EOFError):
print()
print(exit_msg)
exit()
try:
return int(response)
except ValueError:
if response.strip() == "" and default is not Non... |
def encode(self):
header = bytearray(1)
varHeader = encode16Int(self.msgId)
header[0] = 0xB0
header.extend(encodeLength(len(varHeader)))
header.extend(varHeader)
self.encoded = header
return str(header) if PY2 else bytes(header) |
def set_piece_at(self, square, piece, from_hand=False, into_hand=False):
if from_hand:
self.remove_piece_from_hand(piece.piece_type, self.turn)
self.remove_piece_at(square, into_hand)
self.pieces[square] = piece.piece_type
mask = BB_SQUARES[square]
piece_type = piece.... |
def _check_relative_import(
self, modnode, importnode, importedmodnode, importedasname
):
if not self.linter.is_message_enabled("relative-import"):
return None
if importedmodnode.file is None:
return False
if modnode is importedmodnode:
return Fals... |
def sold_out_and_unregistered(context):
user = user_for_context(context)
if hasattr(user, "attendee") and user.attendee.completed_registration:
return None
ticket_category = settings.TICKET_PRODUCT_CATEGORY
categories = available_categories(context)
return ticket_category not in [cat.id for ... |
def is_token_from_emulator(auth_header: str) -> bool:
if not auth_header:
return False
parts = auth_header.split(' ')
if len(parts) != 2:
return False
auth_scheme = parts[0]
bearer_token = parts[1]
if auth_scheme != 'Bearer':
return Fal... |
def render_template(template_file, dst_file, **kwargs):
with open(template_file) as f:
template_text = f.read()
dst_text = template_text
for key, value in kwargs.iteritems():
dst_text = dst_text .replace("{{" + key + "}}", value)
with open(dst_file, "wt") as f:
f.write(dst_text) |
def is_subdomain(self, domain=None):
if domain:
to_test = domain
elif self.element:
to_test = self.element
else:
to_test = PyFunceble.INTERN["to_test"]
return self.is_domain_valid(to_test, subdomain_check=True) |
def make_logging_handlers_and_tools(self, multiproc=False):
log_stdout = self.log_stdout
if sys.stdout is self._stdout_to_logger:
log_stdout = False
if self.log_config:
if multiproc:
proc_log_config = self._mp_config
else:
proc_... |
def _get_index_urls_locations(self, project_name):
def mkurl_pypi_url(url):
loc = posixpath.join(url, project_url_name)
if not loc.endswith('/'):
loc = loc + '/'
return loc
project_url_name = urllib_parse.quote(project_name.lower())
if self.ind... |
def _setup_logging(self, log_level: str):
level = getattr(logging, log_level)
names = (
'aiohttp.access', 'aiohttp.internal', 'aiohttp.server',
'aiohttp.web', self.name)
for name in names:
setup_logger(name=name, stream=sys.stderr, level=level) |
def parse(self, selector):
log.debug(self.obj)
tokens = lex(selector)
if self.peek(tokens, 'operator') == '*':
self.match(tokens, 'operator')
results = list(object_iter(self.obj))
else:
results = self.selector_production(tokens)
results = [node... |
def bandpass_filter(data, low, high, fs, order=5):
nyq = 0.5 * fs
low = low / nyq
high = high / nyq
b, a = signal.butter(order, [low, high], btype='band')
y = signal.lfilter(b, a, data)
return y |
def build_schema(m, c_c):
schema = ET.Element('xs:schema')
schema.set('xmlns:xs', 'http://www.w3.org/2001/XMLSchema')
global_filter = lambda selected: ooaofooa.is_global(selected)
for s_dt in m.select_many('S_DT', global_filter):
datatype = build_type(s_dt)
if datatype is not None:
... |
def execute(option):
namelist_option = []
makefile_option = []
flags = ""
for entry in option:
key = entry.keys()[0]
if key == "Problem Size":
namelist_option.append({"SIZE": entry[key]})
elif key == "F90":
makefile_option.append(entry)
else:
... |
def img_from_vgg(x):
x = x.transpose((1, 2, 0))
x[:, :, 0] += 103.939
x[:, :, 1] += 116.779
x[:, :, 2] += 123.68
x = x[:,:,::-1]
return x |
def remove_unique_identifiers(identifiers_to_tags, pipeline_links):
for index, val in enumerate(pipeline_links):
if val["input"]["process"] != "__init__":
val["input"]["process"] = identifiers_to_tags[
val["input"]["process"]]
if val["output"]["process"] != "__init__":
... |
def fetch_items(self, category, **kwargs):
from_date = kwargs['from_date']
if self.client.version[0] == 2 and self.client.version[1] == 8:
fetcher = self._fetch_gerrit28(from_date)
else:
fetcher = self._fetch_gerrit(from_date)
for review in fetcher:
yi... |
def _imported_module(self, node, mod_path, relative):
module = node.root()
context_name = module.name
if relative:
mod_path = "%s.%s" % (".".join(context_name.split(".")[:-1]), mod_path)
if self.compute_module(context_name, mod_path):
if not hasattr(module, "depen... |
def _get_existing_instance(self, query, value):
if self.columns:
result = query.filter_by(
**{prop.key: value.get(prop.key) for prop in self.related_keys}
).one()
else:
result = query.get([value.get(prop.key) for prop in self.related_keys])
... |
def deprecated(*args):
def wrap(func):
def wrapped_func(*args, **kwargs):
warnings.warn(msg, category=DeprecationWarning)
return func(*args, **kwargs)
return wrapped_func
if len(args) == 1 and callable(args[0]):
msg = "Function '%s' will be deprecated in future ve... |
def _get_required_args(fn):
argspec = tf_inspect.getfullargspec(fn)
args = argspec.args
if tf_inspect.isclass(fn):
args = args[1:]
if argspec.defaults:
args = args[:-len(argspec.defaults)]
return tuple(args) |
def compute_lst(self):
if self.header[b'telescope_id'] == 6:
self.coords = gbt_coords
elif self.header[b'telescope_id'] == 4:
self.coords = parkes_coords
else:
raise RuntimeError("Currently only Parkes and GBT supported")
if HAS_SLALIB:
dut... |
def register_metric(metric_name: str) -> Callable[..., Any]:
def decorate(fn):
fn_name = fn.__module__ + ':' + fn.__name__
if metric_name in _REGISTRY and _REGISTRY[metric_name] != fn_name:
log.warning('"{}" is already registered as a metric name, the old function will be ignored'
... |
def _check_type(var, vtype):
if vtype is None:
return var is None
if isinstance(vtype, _primitive_type):
return var == vtype
if vtype is str:
return isinstance(var, _str_type)
if vtype is int:
return isinstance(var, _int_type)
if vtype is numeric:
return isins... |
def _basilisp_bytecode(
mtime: int, source_size: int, code: List[types.CodeType]
) -> bytes:
data = bytearray(MAGIC_NUMBER)
data.extend(_w_long(mtime))
data.extend(_w_long(source_size))
data.extend(marshal.dumps(code))
return data |
def select_name_pattern(source, pat):
return filter(lambda x: pat.match(x.xml_name) is not None, select_elements(source)) |
def get_overrides_filename(variable):
filename = os.environ.get(variable)
if filename is None:
msg = 'Please set the {} environment variable.'.format(variable)
raise EnvironmentError(msg)
return filename |
def get_order(self, order_id):
resp = self.get('/orders/{}'.format(order_id))
return Order(resp) |
def networkdays(from_date, to_date, locale='en-US'):
holidays = locales[locale]
return workdays.networkdays(from_date, to_date, holidays) |
def start_proxy(self):
self._download_sql_proxy_if_needed()
if self.sql_proxy_process:
raise AirflowException("The sql proxy is already running: {}".format(
self.sql_proxy_process))
else:
command_to_run = [self.sql_proxy_path]
command_to_run.ex... |
def init_role(self, role_name, role_vms, role_perms):
pvms = self.get_session.query(sqla_models.PermissionView).all()
pvms = [p for p in pvms if p.permission and p.view_menu]
role = self.find_role(role_name)
if not role:
role = self.add_role(role_name)
if len(role.per... |
def glm(interactive=True, echo=True, testing=False):
def demo_body(go):
go()
h2o.init()
go()
prostate = h2o.load_dataset("prostate")
go()
prostate.describe()
go()
train, test = prostate.split_frame(ratios=[0.70])
go()
train["CAPSULE"] =... |
def dsync_handler(self, args):
self.opt.recursive = True
self.opt.sync_check = True
self.opt.force = True
self.validate('cmd|s3,local|s3,local', args)
source = args[1]
target = args[2]
self.s3handler().dsync_files(source, target) |
def mkstemp(self, suffix, prefix, directory=None):
if not directory:
directory = self.artifacts_dir
fd, fname = tempfile.mkstemp(suffix, prefix, directory)
os.close(fd)
os.chmod(fname, 0o644)
return fname |
def patch_protocol_for_agent(protocol):
old_makeConnection = protocol.makeConnection
old_connectionLost = protocol.connectionLost
def new_makeConnection(transport):
patch_transport_fake_push_producer(transport)
patch_transport_abortConnection(transport, protocol)
return old_makeConne... |
def cinder(*arg):
check_event_type(Openstack.Cinder, *arg)
event_type = arg[0]
def decorator(func):
if event_type.find("*") != -1:
event_type_pattern = pre_compile(event_type)
cinder_customer_process_wildcard[event_type_pattern] = func
else:
cinder_custome... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.