positive
stringlengths
100
30.3k
anchor
stringlengths
1
15k
def disable_jt_ha(self, active_name): """ Disable high availability for a MR JobTracker active-standby pair. @param active_name: name of the JobTracker that will be active after the disable operation. The other JobTracker and Failover Controllers will be remo...
Disable high availability for a MR JobTracker active-standby pair. @param active_name: name of the JobTracker that will be active after the disable operation. The other JobTracker and Failover Controllers will be removed. @return: Reference to the submitted comma...
def focus_next_sibling(self): """move focus to next sibling of currently focussed one""" w, focuspos = self.get_focus() sib = self._tree.next_sibling_position(focuspos) if sib is not None: self.set_focus(sib)
move focus to next sibling of currently focussed one
def retrieve_commands(self, module): """ Function smartly imports Command type classes given module Args ---- module (module): The module which Command classes will be extracted from Returns ------- commands (list): A list of ...
Function smartly imports Command type classes given module Args ---- module (module): The module which Command classes will be extracted from Returns ------- commands (list): A list of Command instances Note: This function ...
async def disconnect(self, conn_id): """Disconnect from a connected device. See :meth:`AbstractDeviceAdapter.disconnect`. """ adapter_id = self._get_property(conn_id, 'adapter') await self.adapters[adapter_id].disconnect(conn_id) self._teardown_connection(conn_id)
Disconnect from a connected device. See :meth:`AbstractDeviceAdapter.disconnect`.
def create_event_subscription(self, instance, on_data, timeout=60): """ Create a new subscription for receiving events of an instance. This method returns a future, then returns immediately. Stop the subscription by canceling the future. :param str instance: A Yamcs instance na...
Create a new subscription for receiving events of an instance. This method returns a future, then returns immediately. Stop the subscription by canceling the future. :param str instance: A Yamcs instance name :param on_data: Function that gets called on each :class:`.Event`. :...
def get_metric_group_infos(self): """ Get the faked metric group definitions for this context object that are to be returned from its create operation, in the format needed for the "Create Metrics Context" operation response. Returns: "metric-group-infos" JSON object ...
Get the faked metric group definitions for this context object that are to be returned from its create operation, in the format needed for the "Create Metrics Context" operation response. Returns: "metric-group-infos" JSON object as described for the "Create Metrics Conte...
def add_output(self, out_name, type_or_serialize=None, **kwargs): """ Declare an output """ if out_name not in self.engine.all_outputs(): raise ValueError("'%s' is not generated by the engine %s" % (out_name, self.engine.all_outputs())) if type_or_serialize is None: ...
Declare an output
def com_google_fonts_check_metadata_canonical_weight_value(font_metadata): """METADATA.pb: Check that font weight has a canonical value.""" first_digit = font_metadata.weight / 100 if (font_metadata.weight % 100) != 0 or \ (first_digit < 1 or first_digit > 9): yield FAIL, ("METADATA.pb: The weight is decl...
METADATA.pb: Check that font weight has a canonical value.
def GetMemActiveMB(self): '''Retrieves the amount of memory the virtual machine is actively using its estimated working set size.''' counter = c_uint() ret = vmGuestLib.VMGuestLib_GetMemActiveMB(self.handle.value, byref(counter)) if ret != VMGUESTLIB_ERROR_SUCCESS: raise VMGue...
Retrieves the amount of memory the virtual machine is actively using its estimated working set size.
def get_name_for(self, dynamic_part): """ Return the name for the current dynamic field, accepting a limpyd instance for the dynamic part """ dynamic_part = self.from_python(dynamic_part) return super(DynamicRelatedFieldMixin, self).get_name_for(dynamic_part)
Return the name for the current dynamic field, accepting a limpyd instance for the dynamic part
def accounts(self): """ Access the Accounts Twilio Domain :returns: Accounts Twilio Domain :rtype: twilio.rest.accounts.Accounts """ if self._accounts is None: from twilio.rest.accounts import Accounts self._accounts = Accounts(self) retur...
Access the Accounts Twilio Domain :returns: Accounts Twilio Domain :rtype: twilio.rest.accounts.Accounts
def do_break(self, arg, temporary = 0): """b(reak) [ ([filename:]lineno | function) [, condition] ] Without argument, list all breaks. With a line number argument, set a break at this line in the current file. With a function name, set a break at the first executable line of th...
b(reak) [ ([filename:]lineno | function) [, condition] ] Without argument, list all breaks. With a line number argument, set a break at this line in the current file. With a function name, set a break at the first executable line of that function. If a second argument is prese...
def register_custom_serializer(cls, use_pickle=False, use_dict=False, serializer=None, deserializer=None, local=False, driver_id=None,...
Enable serialization and deserialization for a particular class. This method runs the register_class function defined below on every worker, which will enable ray to properly serialize and deserialize objects of this class. Args: cls (type): The class that ray should use this custom serializer...
def find_servers(self, tags, running=True): """ Returns any servers in the region that have tags that match the key-value pairs in :attr:`tags`. :param Mapping tags: A mapping object in which the keys are the tag names and the values are the tag values. :param bool...
Returns any servers in the region that have tags that match the key-value pairs in :attr:`tags`. :param Mapping tags: A mapping object in which the keys are the tag names and the values are the tag values. :param bool running: A flag to limit server list to instances that are ...
def _pad(string, size): """ 'Pad' a string with leading zeroes to fit the given size, truncating if necessary. """ strlen = len(string) if strlen == size: return string if strlen < size: return _padding[0:size-strlen] + string return string[-size:]
'Pad' a string with leading zeroes to fit the given size, truncating if necessary.
def schedule_job(job_id, schedule_in, connection=None, **kwargs): """Schedules a job. :param job_id: unique identifier for this job :param schedule_in: number of seconds from now in which to schedule the job or timedelta object. :param **kwargs: parameters to attach to the job, key-value structure...
Schedules a job. :param job_id: unique identifier for this job :param schedule_in: number of seconds from now in which to schedule the job or timedelta object. :param **kwargs: parameters to attach to the job, key-value structure. >>> schedule_job('http://example.com/test', schedule_in=10, num_re...
def get_escape(self, c, i): """Get an escape.""" try: escaped = next(i) except StopIteration: escaped = '' return c + escaped if self.keep_escapes else escaped
Get an escape.
def is_default_port(self): """A check for default port. Return True if port is default for specified scheme, e.g. 'http://python.org' or 'http://python.org:80', False otherwise. """ if self.port is None: return False default = DEFAULT_PORTS.get(self....
A check for default port. Return True if port is default for specified scheme, e.g. 'http://python.org' or 'http://python.org:80', False otherwise.
def reverse_geocode(client, latlng, result_type=None, location_type=None, language=None): """ Reverse geocoding is the process of converting geographic coordinates into a human-readable address. :param latlng: The latitude/longitude value or place_id for which you wish to ob...
Reverse geocoding is the process of converting geographic coordinates into a human-readable address. :param latlng: The latitude/longitude value or place_id for which you wish to obtain the closest, human-readable address. :type latlng: string, dict, list, or tuple :param result_type: One or m...
def daemon_run(no_error, restart, record_path, keep_json, check_duplicate, use_polling, log_level): """ Run RASH index daemon. This daemon watches the directory ``~/.config/rash/data/record`` and translate the JSON files dumped by ``record`` command into sqlite3 DB at ``~/.config/ras...
Run RASH index daemon. This daemon watches the directory ``~/.config/rash/data/record`` and translate the JSON files dumped by ``record`` command into sqlite3 DB at ``~/.config/rash/data/db.sqlite``. ``rash init`` will start RASH automatically by default. But there are alternative ways to start da...
def bisect(f, a, b, args=(), xtol=_xtol, rtol=_rtol, maxiter=_iter, disp=True): """ Find root of a function within an interval adapted from Scipy's bisect. Basic bisection routine to find a zero of the function `f` between the arguments `a` and `b`. `f(a)` and `f(b)` cannot have the same sig...
Find root of a function within an interval adapted from Scipy's bisect. Basic bisection routine to find a zero of the function `f` between the arguments `a` and `b`. `f(a)` and `f(b)` cannot have the same signs. `f` must be jitted via numba. Parameters ---------- f : jitted and callable ...
def update(self, observable, handlers): """Toolbar ReaderObserver callback that is notified when readers are added or removed.""" addedreaders, removedreaders = handlers for reader in addedreaders: item = self.Append(str(reader)) self.SetClientData(item, reader) ...
Toolbar ReaderObserver callback that is notified when readers are added or removed.
def render(self, get_params, module, _if=None): """ render the block and return the output. """ enough = False output = [] valid = None if self.commands.show: valid = True if self.parent and self.commands.soft and _if is None: retu...
render the block and return the output.
def temp_shell_task(cls, inp, mpi_procs=1, workdir=None, manager=None): """ Build a Task with a temporary workdir. The task is executed via the shell with 1 MPI proc. Mainly used for invoking Abinit to get important parameters needed to prepare the real task. Args: mpi_procs...
Build a Task with a temporary workdir. The task is executed via the shell with 1 MPI proc. Mainly used for invoking Abinit to get important parameters needed to prepare the real task. Args: mpi_procs: Number of MPI processes to use.
def set_mass(self, masses, dataset_number=None, validated=None): """Sets the mass (masses) for the test (datasets). """ self._set_run_attribute("mass", masses, dataset_number=dataset_number, validated=validated)
Sets the mass (masses) for the test (datasets).
def kmodels(wordlen: int, k: int, input=None, output=None): """Return a circuit taking a wordlen bitvector where only k valuations return True. Uses encoding from [1]. Note that this is equivalent to (~x < k). - TODO: Add automated simplification so that the circuits are equiv. [1]: Ch...
Return a circuit taking a wordlen bitvector where only k valuations return True. Uses encoding from [1]. Note that this is equivalent to (~x < k). - TODO: Add automated simplification so that the circuits are equiv. [1]: Chakraborty, Supratik, et al. "From Weighted to Unweighted Model ...
def import_simple_cookie(cls, simple_cookie): """ Create cookie jar from SimpleCookie object :param simple_cookie: cookies to import :return: WHTTPCookieJar """ cookie_jar = WHTTPCookieJar() for cookie_name in simple_cookie.keys(): cookie_attrs = {} for attr_name in WHTTPCookie.cookie_attr_value_comp...
Create cookie jar from SimpleCookie object :param simple_cookie: cookies to import :return: WHTTPCookieJar
def compress_multiple_pdfs(source_directory, output_directory, ghostscript_binary): """Compress all PDF files in the current directory and place the output in the given output directory. This is a generator function that first yields the amount of files to be compressed, and then yields the output path of e...
Compress all PDF files in the current directory and place the output in the given output directory. This is a generator function that first yields the amount of files to be compressed, and then yields the output path of each file. Args: source_directory (str): Filepath to the source directory. ...
def phi_effect_mip(self, mechanism, purview): """Return the |small_phi| of the effect MIP. This is the distance between the unpartitioned effect repertoire and the MIP cause repertoire. """ mip = self.effect_mip(mechanism, purview) return mip.phi if mip else 0
Return the |small_phi| of the effect MIP. This is the distance between the unpartitioned effect repertoire and the MIP cause repertoire.
def func_args_as_dict(func, args, kwargs): """ Return given function's positional and key value arguments as an ordered dictionary. """ if six.PY2: _getargspec = inspect.getargspec else: _getargspec = inspect.getfullargspec arg_names = list( OrderedDict.fromkeys( ...
Return given function's positional and key value arguments as an ordered dictionary.
def is_reachable(host, port=23): """Check reachability for specified hostname/port. It tries to open TCP socket. It supports IPv6. :param host: hostname or ip address string :rtype: str :param port: tcp port number :rtype: number :return: True if host is reachable else false """ ...
Check reachability for specified hostname/port. It tries to open TCP socket. It supports IPv6. :param host: hostname or ip address string :rtype: str :param port: tcp port number :rtype: number :return: True if host is reachable else false
def serialize(self): ''' Return a JSON string of the serialized topology ''' return json.dumps(json_graph.node_link_data(self.__nxgraph), cls=Encoder)
Return a JSON string of the serialized topology
def SensorsGet(self, parameters = None, sensor_id = -1): """ Retrieve sensors from CommonSense, according to parameters, or by sensor id. If successful, result can be obtained by a call to getResponse(), and should be a json string. @param parameters (dicti...
Retrieve sensors from CommonSense, according to parameters, or by sensor id. If successful, result can be obtained by a call to getResponse(), and should be a json string. @param parameters (dictionary) (optional) - Dictionary containing the parameters for the api-call. ...
def hincrbyfloat(self, key, field, increment=1.0): """Increment the float value of a hash field by the given number.""" fut = self.execute(b'HINCRBYFLOAT', key, field, increment) return wait_convert(fut, float)
Increment the float value of a hash field by the given number.
def get_ad_url(self, ad_id, sandbox): """ get_ad_url: gets ad server thing """ if sandbox: return self.sandbox_ad_server + '/view/' + str(ad_id) else: return self.ad_server + '/view/' + str(ad_id)
get_ad_url: gets ad server thing
def read_nodes(rows, source_id=1): """ Return an iterator of rows ready to insert into table "nodes". * rows - iterator of lists (eg, output from read_archive or read_dmp) """ ncbi_keys = ['tax_id', 'parent_id', 'rank', 'embl_code', 'division_id'] extra_keys = ['source_id', 'is_valid'] is_...
Return an iterator of rows ready to insert into table "nodes". * rows - iterator of lists (eg, output from read_archive or read_dmp)
def blackbox(blackbox): """Validate a macro blackboxing.""" if tuple(sorted(blackbox.output_indices)) != blackbox.output_indices: raise ValueError('Output indices {} must be ordered'.format( blackbox.output_indices)) partition(blackbox.partition) for part in blackbox.partition: ...
Validate a macro blackboxing.
def setup(sphinx): """Setup Sphinx object.""" from flask import has_app_context from invenio_base.factory import create_app PACKAGES = ['invenio_base', 'invenio.modules.accounts', 'invenio.modules.records', 'invenio_knowledge'] if not has_app_context(): app = create_app(PACK...
Setup Sphinx object.
def get_charm_url(self): """Get charm URL for the bracket this rank is in Returns ------- :class:`str` the URL for the charm """ if self.rank_id <= 4: return self.RANK_CHARMS[0] if self.rank_id <= 8: return self.RANK_CHARMS[1] if self.rank_id...
Get charm URL for the bracket this rank is in Returns ------- :class:`str` the URL for the charm
def _logtrick_gen(bounds): """Generate warping functions and new bounds for the log trick.""" # Test which parameters we can apply the log trick too ispos = np.array([isinstance(b, bt.Positive) for b in bounds], dtype=bool) nispos = ~ispos # Functions that implement the log trick def logx(x): ...
Generate warping functions and new bounds for the log trick.
def _get_animation_frames(self, all_datasets, shape, fill_value=None, ignore_missing=False): """Create enhanced image frames to save to a file.""" for idx, ds in enumerate(all_datasets): if ds is None and ignore_missing: continue elif...
Create enhanced image frames to save to a file.
def program_page(self, address, bytes): """! @brief Flash one or more pages. @exception FlashProgramFailure """ assert self._active_operation == self.Operation.PROGRAM # prevent security settings from locking the device bytes = self.override_security_bit...
! @brief Flash one or more pages. @exception FlashProgramFailure
def plot_rh(self, rh, plot_range=None): """ Required input: RH: Relative humidity (%) Optional Input: plot_range: Data range for making figure (list of (min,max,step)) """ # PLOT RELATIVE HUMIDITY if not plot_range: plot_range = [0, 100...
Required input: RH: Relative humidity (%) Optional Input: plot_range: Data range for making figure (list of (min,max,step))
def gather_file_data(config): """ Gather policy information from files """ file_regex = re.compile(config['file_regex']) category_regex = re.compile(config['category_regex']) policies = {} for root, dirs, files in os.walk(config['c7n_policy_directory']): for file in files: i...
Gather policy information from files
def has_class(self, classname): """Test if an element has a specific classname @type classname: str @param classname: Classname to test for; cannot contain spaces @rtype: bool @return: True if classname exists; false otherwise """ def element_has_class(): ...
Test if an element has a specific classname @type classname: str @param classname: Classname to test for; cannot contain spaces @rtype: bool @return: True if classname exists; false otherwise
def _upload_s3(self, zip_file): ''' Uploads the lambda package to s3 ''' s3_client = self._aws_session.client('s3') transfer = boto3.s3.transfer.S3Transfer(s3_client) transfer.upload_file(zip_file, self._config.s3_bucket, self._config.s3_packa...
Uploads the lambda package to s3
def get_arguments(context): """Parse arguments for pype from context and assign default values. Args: context: pypyr.context.Context. context is mandatory. Returns: tuple (pipeline_name, #str use_parent_context, #bool pipe_arg, #str skip_parse, ...
Parse arguments for pype from context and assign default values. Args: context: pypyr.context.Context. context is mandatory. Returns: tuple (pipeline_name, #str use_parent_context, #bool pipe_arg, #str skip_parse, #bool raise_error #b...
def normalize_pattern(pattern): """Converts backslashes in path patterns to forward slashes. Doesn't normalize regular expressions - they may contain escapes. """ if not (pattern.startswith('RE:') or pattern.startswith('!RE:')): pattern = _slashes.sub('/', pattern) if len(pattern) > 1: ...
Converts backslashes in path patterns to forward slashes. Doesn't normalize regular expressions - they may contain escapes.
def update(self, docs, golds, drop=0.0, sgd=None, losses=None, component_cfg=None): """Update the models in the pipeline. docs (iterable): A batch of `Doc` objects. golds (iterable): A batch of `GoldParse` objects. drop (float): The droput rate. sgd (callable): An optimizer. ...
Update the models in the pipeline. docs (iterable): A batch of `Doc` objects. golds (iterable): A batch of `GoldParse` objects. drop (float): The droput rate. sgd (callable): An optimizer. RETURNS (dict): Results from the update. DOCS: https://spacy.io/api/language#upda...
def find(file_pattern, top_dir, max_depth=None, path_pattern=None): """Generator function to find files recursively. Usage:: for filename in find("*.properties", "/var/log/foobar"): print filename """ if max_depth: base_depth = os.path.dirname(top_dir).count(os.path.sep) ...
Generator function to find files recursively. Usage:: for filename in find("*.properties", "/var/log/foobar"): print filename
def _void_array_to_list(restuple, _func, _args): """ Convert the FFI result to Python data structures """ shape = (restuple.e.len, 1) array_size = np.prod(shape) mem_size = 8 * array_size array_str_e = string_at(restuple.e.data, mem_size) array_str_n = string_at(restuple.n.data, mem_size) ...
Convert the FFI result to Python data structures
def backprop(self, input_data, df_output, cache=None): """ Backpropagate through the hidden layer **Parameters:** input_data : ``GPUArray`` Inpute data to perform dropout on. df_output : ``GPUArray`` Gradients with respect to the output of this layer ...
Backpropagate through the hidden layer **Parameters:** input_data : ``GPUArray`` Inpute data to perform dropout on. df_output : ``GPUArray`` Gradients with respect to the output of this layer (received from the layer above). cache : list of ``GPUAr...
def convert_timezone(date_str, tz_from, tz_to="UTC", fmt=None): """ get timezone as tz_offset """ tz_offset = datetime_to_timezone( datetime.datetime.now(), tz=tz_from).strftime('%z') tz_offset = tz_offset[:3] + ':' + tz_offset[3:] date = parse_date(str(date_str) + tz_offset) if tz_from != ...
get timezone as tz_offset
def finish(self): """Clean up the JLigier controlhost connection""" log.debug("Disconnecting from JLigier.") self.client.socket.shutdown(socket.SHUT_RDWR) self.client._disconnect()
Clean up the JLigier controlhost connection
def format_field(self, value, format_spec): """When field missing, return original spec.""" if isinstance(value, MissingField): if format_spec is not None: value.format_spec = format_spec return str(value) return super(FormatterWrapper, self).format_field...
When field missing, return original spec.
def preview(file): """Render appropiate template with embed flag.""" file_info = validate_csv(file) return render_template( 'invenio_previewer/csv_bar.html', file=file, delimiter=file_info['delimiter'], encoding=file_info['encoding'], js_bundles=current_previewer.js_b...
Render appropiate template with embed flag.
def register(coordinator): """Registers this module as a worker with the given coordinator.""" if FLAGS.phantomjs_script: utils.verify_binary('phantomjs_binary', ['--version']) assert os.path.exists(FLAGS.phantomjs_script) else: utils.verify_binary('capture_binary', ['--version']) ...
Registers this module as a worker with the given coordinator.
def add_trunk_ports(self): """SDN Controller enable trunk ports :rtype: list[tuple[str, str]] """ ports = self.attributes.get("{}Enable Full Trunk Ports".format(self.namespace_prefix), None) return self._parse_ports(ports=ports)
SDN Controller enable trunk ports :rtype: list[tuple[str, str]]
def create_postgresql_psycopg2(username, password, host, port, database, **kwargs): # pragma: no cover """ create an engine connected to a postgresql database using psycopg2. """ return create_engine( _create_postgresql_psycopg2(username, password, host, port, database), **kwargs )
create an engine connected to a postgresql database using psycopg2.
def publish_results(self, view, submitters, commenters): """Submit the results to the subreddit. Has no return value (None).""" def timef(timestamp, date_only=False): """Return a suitable string representaation of the timestamp.""" dtime = datetime.fromtimestamp(timestamp) ...
Submit the results to the subreddit. Has no return value (None).
def increment(key, delta=1, host=DEFAULT_HOST, port=DEFAULT_PORT): ''' Increment the value of a key CLI Example: .. code-block:: bash salt '*' memcached.increment <key> salt '*' memcached.increment <key> 2 ''' conn = _connect(host, port) _check_stats(conn) cur = get(ke...
Increment the value of a key CLI Example: .. code-block:: bash salt '*' memcached.increment <key> salt '*' memcached.increment <key> 2
def model_field_attr(model, model_field, attr): """ Returns the specified attribute for the specified field on the model class. """ fields = dict([(field.name, field) for field in model._meta.fields]) return getattr(fields[model_field], attr)
Returns the specified attribute for the specified field on the model class.
def get_lock (name, debug=False): """Get a new lock. @param debug: if True, acquire() and release() will have debug messages @ptype debug: boolean, default is False @return: a lock object @rtype: threading.Lock or DebugLock """ lock = threading.Lock() # for thread debugging, use the Debu...
Get a new lock. @param debug: if True, acquire() and release() will have debug messages @ptype debug: boolean, default is False @return: a lock object @rtype: threading.Lock or DebugLock
def serialize(self, include_class=True, save_dynamic=False, **kwargs): """Serialize nested HasUID instances to a flat dictionary **Parameters**: * **include_class** - If True (the default), the name of the class will also be saved to the serialized dictionary under key :cod...
Serialize nested HasUID instances to a flat dictionary **Parameters**: * **include_class** - If True (the default), the name of the class will also be saved to the serialized dictionary under key :code:`'__class__'` * **save_dynamic** - If True, dynamic properties are writt...
def agg_autocorrelation(x, param): r""" Calculates the value of an aggregation function :math:`f_{agg}` (e.g. the variance or the mean) over the autocorrelation :math:`R(l)` for different lags. The autocorrelation :math:`R(l)` for lag :math:`l` is defined as .. math:: R(l) = \frac{1}{(n-l)\sig...
r""" Calculates the value of an aggregation function :math:`f_{agg}` (e.g. the variance or the mean) over the autocorrelation :math:`R(l)` for different lags. The autocorrelation :math:`R(l)` for lag :math:`l` is defined as .. math:: R(l) = \frac{1}{(n-l)\sigma^{2}} \sum_{t=1}^{n-l}(X_{t}-\mu )(X_...
def list_dcm_datain(datain): ''' List all DICOM file paths in the datain dictionary of input data. ''' if not isinstance(datain, dict): raise ValueError('The input is not a dictionary!') dcmlst = [] # list of mu-map DICOM files if 'mumapDCM' in datain: dcmump = os.listdir(datai...
List all DICOM file paths in the datain dictionary of input data.
def data(self): """ Returns raw data representation of the document or document segment. Mappings are rendered as ordered dicts, sequences as lists and scalar values as whatever the validator returns (int, string, etc.). If no validators are used, scalar values are always retur...
Returns raw data representation of the document or document segment. Mappings are rendered as ordered dicts, sequences as lists and scalar values as whatever the validator returns (int, string, etc.). If no validators are used, scalar values are always returned as strings.
def raw_content(self, output=None, str_output=None): """Searches for `output` regex match within content of page, regardless of mimetype.""" return self._search_page(output, str_output, self.response.data, lambda regex, content: regex.search(content.decode()))
Searches for `output` regex match within content of page, regardless of mimetype.
def sec_project_community(self, project=None): """ Generate the data for the Communication section in a Project report :return: """ def create_csv(metric1, csv_labels, file_label): esfilters = None csv_labels = csv_labels.replace("_", "") # LaTeX not sup...
Generate the data for the Communication section in a Project report :return:
def sample(self, x0, nsteps, nskip=1): r"""generate nsteps sample points""" x = np.zeros(shape=(nsteps + 1,)) x[0] = x0 for t in range(nsteps): q = x[t] for s in range(nskip): q = self.step(q) x[t + 1] = q return x
r"""generate nsteps sample points
def extractfields(data, commdct, objkey, fieldlists): """get all the objects of objkey. fieldlists will have a fieldlist for each of those objects. return the contents of those fields""" # TODO : this assumes that the field list identical for # each instance of the object. This is not true. # So...
get all the objects of objkey. fieldlists will have a fieldlist for each of those objects. return the contents of those fields
def external2internal(xe, bounds): """ Convert a series of external variables to internal variables""" xi = np.empty_like(xe) for i, (v, bound) in enumerate(zip(xe, bounds)): a = bound[0] # minimum b = bound[1] # maximum if a == None and b == None: # No constraints ...
Convert a series of external variables to internal variables
def parse_content_encoding(self, response_headers, response_data): """ Parses a response that contains Content-Encoding to retrieve response_data """ if response_headers['content-encoding'] == 'gzip': buf = StringIO.StringIO(response_data) zipbuf = gzip.Gz...
Parses a response that contains Content-Encoding to retrieve response_data
def _pull_and_tag_image(self, image, build_json, nonce): """Docker pull the image and tag it uniquely for use by this build""" image = image.copy() first_library_exc = None for _ in range(20): # retry until pull and tag is successful or definitively fails. # shoul...
Docker pull the image and tag it uniquely for use by this build
def find_command(self, argv): """Given an argument list, find a command and return the processor and any remaining arguments. """ search_args = argv[:] name = '' while search_args: if search_args[0].startswith('-'): name = '%s %s' % (name, sear...
Given an argument list, find a command and return the processor and any remaining arguments.
def get_tag(self, tagname, tagidx): """ :returns: the tag associated to the given tagname and tag index """ return '%s=%s' % (tagname, decode(getattr(self, tagname)[tagidx]))
:returns: the tag associated to the given tagname and tag index
def QA_fetch_future_min_adv( code, start, end=None, frequence='1min', if_drop_index=True, collections=DATABASE.future_min): ''' '获取股票分钟线' :param code: :param start: :param end: :param frequence: :param if_drop_index: :param collections: :return...
'获取股票分钟线' :param code: :param start: :param end: :param frequence: :param if_drop_index: :param collections: :return:
def _speak_as_literal_punctuation_inherit(self, element): """ Speak the punctuation for elements and descendants. :param element: The element. :type element: hatemile.util.html.htmldomelement.HTMLDOMElement """ self._reverse_speak_as(element, 'literal-punctuation') ...
Speak the punctuation for elements and descendants. :param element: The element. :type element: hatemile.util.html.htmldomelement.HTMLDOMElement
def list_clusters( self, project_id, region, filter_=None, page_size=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Lists all regions/{region}/clusters in a pr...
Lists all regions/{region}/clusters in a project. Example: >>> from google.cloud import dataproc_v1beta2 >>> >>> client = dataproc_v1beta2.ClusterControllerClient() >>> >>> # TODO: Initialize `project_id`: >>> project_id = '' >...
def _predicted(self): """The predicted values of y ('yhat').""" return np.squeeze( np.matmul(self.xwins, np.expand_dims(self.solution, axis=-1)) )
The predicted values of y ('yhat').
def unauthorized(self): ''' This is called when the user is required to log in. If you register a callback with :meth:`LoginManager.unauthorized_handler`, then it will be called. Otherwise, it will take the following actions: - Flash :attr:`LoginManager.login_message` to the...
This is called when the user is required to log in. If you register a callback with :meth:`LoginManager.unauthorized_handler`, then it will be called. Otherwise, it will take the following actions: - Flash :attr:`LoginManager.login_message` to the user. - If the app is using bl...
def unregister(self, *model_list): """ Unregisters the given model(s). If a model isn't already registered, this will raise NotRegistered. """ for model in model_list: if model not in self.registry: raise NotRegistered('The model %s is not registered'...
Unregisters the given model(s). If a model isn't already registered, this will raise NotRegistered.
def set_pos(self, pos, check=False): '''Set a chess''' self.validate_pos(pos) x, y = pos user = self.get_player() self.history[self._game_round] = copy.deepcopy(self.pos) self.pos[x][y] = user pos_str = self._cal_key(pos) self._pos_dict[pos_str] = user ...
Set a chess
def plugin_method(*plugin_names): """Plugin Method decorator. Signs a web handler function with the plugins to be applied as attributes. Args: plugin_names (list): A list of plugin callable names Returns: A wrapped handler callable. Examples: >>> @plugin_method('json', 'bi...
Plugin Method decorator. Signs a web handler function with the plugins to be applied as attributes. Args: plugin_names (list): A list of plugin callable names Returns: A wrapped handler callable. Examples: >>> @plugin_method('json', 'bill') ... def method(): .....
def cwms_process_text(): """Process text with CWMS and return INDRA Statements.""" if request.method == 'OPTIONS': return {} response = request.body.read().decode('utf-8') body = json.loads(response) text = body.get('text') cp = cwms.process_text(text) return _stmts_from_proc(cp)
Process text with CWMS and return INDRA Statements.
def update_data(ctx, models=True, pickles=False, f=False): """ Updates local django db projects and pickle files using salic database from MinC Pickles are saved in /data/raw/ from sql queries in /data/scripts/ Models are created from /data/scripts/models/ """ if pickles: save_sql_to...
Updates local django db projects and pickle files using salic database from MinC Pickles are saved in /data/raw/ from sql queries in /data/scripts/ Models are created from /data/scripts/models/
def on_message(self, message): """ Process received message """ if message.address != self._address: return if isinstance(message, velbus.ChannelNamePart1Message) or isinstance(message, velbus.ChannelNamePart1Message2): self._process_channel_name_message(1...
Process received message
def move_to(self, folder): """Moves the email to the folder specified by the folder parameter. Args: folder: A string containing the folder ID the message should be moved to, or a Folder instance """ if isinstance(folder, Folder): self.move_to(folder.id) ...
Moves the email to the folder specified by the folder parameter. Args: folder: A string containing the folder ID the message should be moved to, or a Folder instance
def render_to_template(self): """ Render the current menu instance to a template and return a string """ context_data = self.get_context_data() template = self.get_template() context_data['current_template'] = template.template.name return template.render(context...
Render the current menu instance to a template and return a string
def _set_scores(self): """ Set anomaly scores using a weighted sum. """ anom_scores_ema = self.exp_avg_detector.run() anom_scores_deri = self.derivative_detector.run() anom_scores = {} for timestamp in anom_scores_ema.timestamps: # Compute a weighted a...
Set anomaly scores using a weighted sum.
def parse_result(result): """parse_result(json result) -- print the web query according to the type of result from duckduckgo. """ if(result['Type'] == 'D'): print """There is more than one answer for this. Try making your query\ more specific. For example, if you want to learn about apple the company\ and ...
parse_result(json result) -- print the web query according to the type of result from duckduckgo.
def blockreplace(path, marker_start='#-- start managed zone --', marker_end='#-- end managed zone --', content='', append_if_not_found=False, prepend_if_not_found=False, backup='.bak', dry_run=False, show_changes=True, append_newline=False, ...
.. versionadded:: 2014.1.0 Replace content of a text block in a file, delimited by line markers A block of content delimited by comments can help you manage several lines entries without worrying about old entries removal. .. note:: This function will store two copies of the file in-memory (...
def intent(self, intent_name): """Decorator routes an Rogo IntentRequest. Functions decorated as an intent are registered as the view function for the Intent's URL, and provide the backend responses to give your Skill its functionality. @ask.intent('WeatherIntent') def weather(ci...
Decorator routes an Rogo IntentRequest. Functions decorated as an intent are registered as the view function for the Intent's URL, and provide the backend responses to give your Skill its functionality. @ask.intent('WeatherIntent') def weather(city): return statement('I predi...
def pkgconfig(*packages, **kw): """Based on http://code.activestate.com/recipes/502261-python-distutils-pkg-config/#c2""" flag_map = {'-I': 'include_dirs', '-L': 'library_dirs', '-l': 'libraries'} output = sp.Popen(["pkg-config", "--libs", "--cflags"] + list(packages), stdout=sp.PIPE)...
Based on http://code.activestate.com/recipes/502261-python-distutils-pkg-config/#c2
def one_of(s): '''Parser a char from specified string.''' @Parser def one_of_parser(text, index=0): if index < len(text) and text[index] in s: return Value.success(index + 1, text[index]) else: return Value.failure(index, 'one of {}'.format(s)) return one_of_parse...
Parser a char from specified string.
def draw_sparse_matrix( array_filename, output_image, vmax=DEFAULT_SATURATION_THRESHOLD, max_size_matrix=DEFAULT_MAX_SIZE_MATRIX, ): """Draw a quick preview of a sparse matrix with automated binning and normalization. """ matrix = np.loadtxt(array_filename, dtype=np.int32, skiprows=1) ...
Draw a quick preview of a sparse matrix with automated binning and normalization.
def query(song_name): """CLI: $ iquery -l song_name """ r = requests_get(SONG_SEARCH_URL.format(song_name)) try: # Get the first result. song_url = re.search(r'(http://www.xiami.com/song/\d+)', r.text).group(0) except AttributeError: exit_after_echo(SONG_NOT_FOUND) ...
CLI: $ iquery -l song_name
def timex_starts(self): """The list of start positions of ``timexes`` layer elements.""" if not self.is_tagged(TIMEXES): self.tag_timexes() return self.starts(TIMEXES)
The list of start positions of ``timexes`` layer elements.
def render(self, data, accepted_media_type=None, renderer_context=None): """Convert native data to JSON API Tries each of the methods in `wrappers`, using the first successful one, or raises `WrapperNotApplicable`. """ wrapper = None success = False for wrapper...
Convert native data to JSON API Tries each of the methods in `wrappers`, using the first successful one, or raises `WrapperNotApplicable`.
def _multi_take(self, tup): """ Create the indexers for the passed tuple of keys, and execute the take operation. This allows the take operation to be executed all at once - rather than once for each dimension - improving efficiency. Parameters ---------- tup : t...
Create the indexers for the passed tuple of keys, and execute the take operation. This allows the take operation to be executed all at once - rather than once for each dimension - improving efficiency. Parameters ---------- tup : tuple Tuple of indexers, one per axis...