text stringlengths 81 112k |
|---|
Returns main module and module name pair.
def main_module_name() -> str:
"""Returns main module and module name pair."""
if not hasattr(main_module, '__file__'):
# running from interactive shell
return None
main_filename = os.path.basename(main_module.__file__)
module_name, ext = os.pa... |
Write the file-like src_file object to the string dest_path
:param src_file: file-like data to be written
:param destination_path: string of the destionation file
def write_stream(src_file, destination_path):
"""
Write the file-like src_file object to the string dest_path
:param src_file: file-like... |
Build necessary directories based on a list of file paths
def build_dirs(files):
'''
Build necessary directories based on a list of file paths
'''
for i in files:
if type(i) is list:
build_dirs(i)
continue
else:
if len(i['path']) > 1:
... |
Ask the user which files in file_list he or she is interested in.
Return indices for the files inside file_list
def get_want_file_pos(file_list):
'''
Ask the user which files in file_list he or she is interested in.
Return indices for the files inside file_list
'''
want_file_pos = []
print ... |
Return the starting position (in bytes) of a list of files by
iteratively summing their lengths
def get_file_starts(file_list):
'''
Return the starting position (in bytes) of a list of files by
iteratively summing their lengths
'''
starts = []
total = 0
for i in file_list:
start... |
Retrieve the highest-indexed file that starts at or before byte_index.
def get_rightmost_index(byte_index=0, file_starts=[0]):
'''
Retrieve the highest-indexed file that starts at or before byte_index.
'''
i = 1
while i <= len(file_starts):
start = file_starts[-i]
if start <= byte_... |
Returns the leftmost file in the user's list of wanted files
(want_file_pos). If the first file it finds isn't in the list,
it will keep searching until the length of 'block' is exceeded.
def get_next_want_file(self, byte_index, block):
'''
Returns the leftmost file in the user's list o... |
Sends the state of the BTC at the time the visualizer connects,
initializing it.
def vis_init(self):
'''
Sends the state of the BTC at the time the visualizer connects,
initializing it.
'''
init_dict = {}
init_dict['kind'] = 'init'
assert len(self.want_fi... |
Send to the visualizer (if there is one) or enqueue for later
def broadcast(self, data_dict):
'''
Send to the visualizer (if there is one) or enqueue for later
'''
if self.vis_socket:
self.queued_messages.append(data_dict)
self.send_all_updates() |
Turns a dictionary into a bencoded str with alphabetized keys
e.g., {'spam': 'eggs', 'cow': 'moo'} --> d3:cow3:moo4:spam4:eggse
def bencode(canonical):
'''
Turns a dictionary into a bencoded str with alphabetized keys
e.g., {'spam': 'eggs', 'cow': 'moo'} --> d3:cow3:moo4:spam4:eggse
'''... |
Bdecodes a bencoded string
e.g., d3:cow3:moo4:spam4:eggse -> {'cow': 'moo', 'spam': 'eggs'}
def bdecode(bstring):
'''
Bdecodes a bencoded string
e.g., d3:cow3:moo4:spam4:eggse -> {'cow': 'moo', 'spam': 'eggs'}
'''
def get_val():
i = reader.next()
if i.isdigit():
... |
Builds the payload that will be sent in tracker_request
def build_payload(self):
'''
Builds the payload that will be sent in tracker_request
'''
payload = {}
hashed_info = hashlib.sha1(tparser.bencode(self.torrent_dict['info']))
self.hash_string = hashed_info.digest()
... |
Sends the initial request to the tracker, compiling list of all peers
announcing to the tracker
def tracker_request(self):
'''
Sends the initial request to the tracker, compiling list of all peers
announcing to the tracker
'''
assert self.torrent_dict['info']
pa... |
Generates list of peer IPs from tracker response. Note: not all of
these IPs might be good, which is why we only init peer objects for
the subset that respond to handshake
def get_peer_ips(self):
'''
Generates list of peer IPs from tracker response. Note: not all of
these IPs mi... |
pstrlen = length of pstr as one byte
pstr = BitTorrent protocol
reserved = chr(0)*8
info_hash = 20-byte hash above (aka self.hash_string)
peer_id = 20-byte string
def handshake_peers(self):
'''
pstrlen = length of pstr as one byte
pstr = BitTorrent protocol
... |
Creates a new peer object for a nvalid socket and adds it to reactor's
listen list
def initpeer(self, sock):
'''
Creates a new peer object for a nvalid socket and adds it to reactor's
listen list
'''
location_json = requests.request("GET", "http://freegeoip.net/json/"
... |
Chain of events:
- process_input
- check save_state and read length, id, and message accordingly
- if we have a piece (really a block), we piece.save it out
inside call to ppiece
- If we've completed a piece we:
- Tell... |
Process a piece that we've received from a peer, writing it out to
one or more files
def ppiece(self, content):
'''
Process a piece that we've received from a peer, writing it out to
one or more files
'''
piece_index, byte_begin = struct.unpack('!ii', content[0:8])
... |
Returns the connection status of the data store.
Returns:
bool: ``True`` if the data store is connected to the MongoDB server.
def is_connected(self):
""" Returns the connection status of the data store.
Returns:
bool: ``True`` if the data store is connected to the Mon... |
Establishes a connection to the MongoDB server.
Use the MongoProxy library in order to automatically handle AutoReconnect
exceptions in a graceful and reliable way.
def connect(self):
""" Establishes a connection to the MongoDB server.
Use the MongoProxy library in order to automatica... |
Checks whether a document with the specified workflow id already exists.
Args:
workflow_id (str): The workflow id that should be checked.
Raises:
DataStoreNotConnected: If the data store is not connected to the server.
Returns:
bool: ``True`` if a document ... |
Adds a new document to the data store and returns its id.
Args:
payload (dict): Dictionary of initial data that should be stored
in the new document in the meta section.
Raises:
DataStoreNotConnected: If the data store is not connected to the server.
Re... |
Removes a document specified by its id from the data store.
All associated GridFs documents are deleted as well.
Args:
workflow_id (str): The id of the document that represents a workflow run.
Raises:
DataStoreNotConnected: If the data store is not connected to the ser... |
Returns the document for the given workflow id.
Args:
workflow_id (str): The id of the document that represents a workflow run.
Raises:
DataStoreNotConnected: If the data store is not connected to the server.
Returns:
DataStoreDocument: The document for the... |
Return the field specified by its key from the specified section.
This method access the specified section of the workflow document and returns the
value for the given key.
Args:
key (str): The key pointing to the value that should be retrieved. It supports
MongoDB'... |
Store a value under the specified key in the given section of the document.
This method stores a value into the specified section of the workflow data store
document. Any existing value is overridden. Before storing a value, any linked
GridFS document under the specified key is deleted.
... |
Appends a value to a list in the specified section of the document.
Args:
key (str): The key pointing to the value that should be stored/updated.
It supports MongoDB's dot notation for nested fields.
value: The value that should be appended to a list in the data store.
... |
Extends a list in the data store with the elements of values.
Args:
key (str): The key pointing to the value that should be stored/updated.
It supports MongoDB's dot notation for nested fields.
values (list): A list of the values that should be used to extend the list
... |
Returns the MongoDB data from a key using dot notation.
Args:
key (str): The key to the field in the workflow document. Supports MongoDB's
dot notation for embedded fields.
default (object): The default value that is returned if the key
does not exist.
... |
Encodes the value such that it can be stored into MongoDB.
Any primitive types are stored directly into MongoDB, while non-primitive types
are pickled and stored as GridFS objects. The id pointing to a GridFS object
replaces the original value.
Args:
value (object): The obj... |
Decodes the value by turning any binary data back into Python objects.
The method searches for ObjectId values, loads the associated binary data from
GridFS and returns the decoded Python object.
Args:
value (object): The value that should be decoded.
Raises:
D... |
Delete all GridFS data that is linked by fields in the specified data.
Args:
data: The data that is parsed for MongoDB ObjectIDs. The linked GridFs object
for any ObjectID is deleted.
def _delete_gridfs_data(self, data):
""" Delete all GridFS data that is linked by fields i... |
Strips off country prefixes (HC2/DH1TW) and activity suffixes (DH1TW/P).
Args:
callsign (str): Amateur Radio callsign
Returns:
str: callsign without country/activity pre/suffixes
Raises:
ValueError: No callsign found in string
Example:
... |
truncate call until it corresponds to a Prefix in the database
def _iterate_prefix(self, callsign, timestamp=timestamp_now):
"""truncate call until it corresponds to a Prefix in the database"""
prefix = callsign
if re.search('(VK|AX|VI)9[A-Z]{3}', callsign): #special rule for VK9 calls
... |
try to identify the callsign's identity by analyzing it in the following order:
Args:
callsign (str): Amateur Radio callsign
timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC)
Raises:
KeyError: Callsign could not be identified
def _dismantle_callsign... |
Lookup a callsign and return all data available from the underlying database
Args:
callsign (str): Amateur Radio callsign
timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC)
Returns:
dict: Dictionary containing the callsign specific data
Raise... |
Checks if a callsign is valid
Args:
callsign (str): Amateur Radio callsign
timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC)
Returns:
bool: True / False
Example:
The following checks if "DH1TW" is a valid callsign
>>... |
Returns Latitude and Longitude for a callsign
Args:
callsign (str): Amateur Radio callsign
timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC)
Returns:
dict: Containing Latitude and Longitude
Raises:
KeyError: No data found for cal... |
Returns CQ Zone of a callsign
Args:
callsign (str): Amateur Radio callsign
timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC)
Returns:
int: containing the callsign's CQ Zone
Raises:
KeyError: no CQ Zone found for callsign
def get... |
Returns ITU Zone of a callsign
Args:
callsign (str): Amateur Radio callsign
timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC)
Returns:
int: containing the callsign's CQ Zone
Raises:
KeyError: No ITU Zone found for callsign
... |
Returns the country name where the callsign is located
Args:
callsign (str): Amateur Radio callsign
timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC)
Returns:
str: name of the Country
Raises:
KeyError: No Country found for callsi... |
Returns ADIF id of a callsign's country
Args:
callsign (str): Amateur Radio callsign
timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC)
Returns:
int: containing the country ADIF id
Raises:
KeyError: No Country found for callsign
... |
Returns the continent Identifier of a callsign
Args:
callsign (str): Amateur Radio callsign
timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC)
Returns:
str: continent identified
Raises:
KeyError: No Continent found for callsign
... |
Returns the indices for all occurrences of 'element' in 'lst'.
Args:
lst (list): List to search.
element: Element to find.
Returns:
list: List of indices or values
def find_indices(lst, element):
""" Returns the indices for all occurrences of 'element' in 'lst'.
Args:
... |
Create a workflow object from a workflow script.
Args:
name (str): The name of the workflow script.
queue (str): Name of the queue the workflow should be scheduled to.
clear_data_store (bool): Remove any documents created during the workflow
... |
Import the workflow script and load all known objects.
The workflow script is treated like a module and imported
into the Python namespace. After the import, the method looks
for instances of known classes and stores a reference for further
use in the workflow object.
Args:
... |
Run all autostart dags in the workflow.
Only the dags that are flagged as autostart are started.
Args:
config (Config): Reference to the configuration object from which the
settings for the workflow are retrieved.
data_store (DataStore): A DataStore... |
Add a new dag to the queue.
If the stop workflow flag is set, no new dag can be queued.
Args:
name (str): The name of the dag that should be queued.
data (MultiTaskData): The data that should be passed on to the new dag.
Raises:
DagNameUnknown: If the speci... |
Handle an incoming request by forwarding it to the appropriate method.
Args:
request (Request): Reference to a request object containing the
incoming request.
Raises:
RequestActionUnknown: If the action specified in the request is not known.
... |
The handler for the start_dag request.
The start_dag request creates a new dag and adds it to the queue.
Args:
request (Request): Reference to a request object containing the
incoming request. The payload has to contain the
foll... |
The handler for the stop_workflow request.
The stop_workflow request adds all running dags to the list of dags
that should be stopped and prevents new dags from being started. The dags will
then stop queueing new tasks, which will terminate the dags and in turn the
workflow.
Ar... |
The handler for the join_dags request.
If dag names are given in the payload only return a valid Response if none of
the dags specified by the names are running anymore. If no dag names are given,
wait for all dags except one, which by design is the one that issued the request,
to be fi... |
The handler for the stop_dag request.
The stop_dag request adds a dag to the list of dags that should be stopped.
The dag will then stop queueing new tasks and will eventually stop running.
Args:
request (Request): Reference to a request object containing the
... |
The handler for the dag_stopped request.
The dag_stopped request checks whether a dag is flagged to be terminated.
Args:
request (Request): Reference to a request object containing the
incoming request. The payload has to contain the
... |
This function is called when the worker received a request to terminate.
Upon the termination of the worker, the workflows for all running jobs are
stopped gracefully.
Args:
consumer (Consumer): Reference to the consumer object that handles messages
... |
Schedule the execution of a dag by sending a signal to the workflow.
Args:
dag (Dag, str): The dag object or the name of the dag that should be started.
data (MultiTaskData): The data that should be passed on to the new dag.
Returns:
str: The name of the successfull... |
Wait for the specified dags to terminate.
This function blocks until the specified dags terminate. If no dags are specified
wait for all dags of the workflow, except the dag of the task calling this signal,
to terminate.
Args:
names (list): The names of the dags that have t... |
Send a stop signal to the specified dag or the dag that hosts this task.
Args:
name str: The name of the dag that should be stopped. If no name is given the
dag that hosts this task is stopped.
Upon receiving the stop signal, the dag will not queue any new tasks and w... |
Check whether the task received a stop signal from the workflow.
Tasks can use the stop flag to gracefully terminate their work. This is
particularly important for long running tasks and tasks that employ an
infinite loop, such as trigger tasks.
Returns:
bool: True if the t... |
Generator function that returns celery events.
This function turns the callback based celery event handling into a generator.
Args:
app: Reference to a celery application object.
filter_by_prefix (str): If not None, only allow events that have a type that
start... |
Factory function that turns a celery event into an event object.
Args:
event (dict): A dictionary that represents a celery event.
Returns:
object: An event object representing the received event.
Raises:
JobEventTypeUnsupported: If an unsupported celery job event was received.
... |
Decorator that checks whether a configuration file was set.
def config_required(f):
""" Decorator that checks whether a configuration file was set. """
def new_func(obj, *args, **kwargs):
if 'config' not in obj:
click.echo(_style(obj.get('show_color', False),
'... |
Ingest the configuration object into the click context.
def ingest_config_obj(ctx, *, silent=True):
""" Ingest the configuration object into the click context. """
try:
ctx.obj['config'] = Config.from_file(ctx.obj['config_path'])
except ConfigLoadError as err:
click.echo(_style(ctx.obj['sho... |
Command line client for lightflow. A lightweight, high performance pipeline
system for synchrotrons.
Lightflow is being developed at the Australian Synchrotron.
def cli(ctx, config, no_color):
""" Command line client for lightflow. A lightweight, high performance pipeline
system for synchrotrons.
... |
Create a default configuration file.
\b
DEST: Path or file name for the configuration file.
def config_default(dest):
""" Create a default configuration file.
\b
DEST: Path or file name for the configuration file.
"""
conf_path = Path(dest).resolve()
if conf_path.is_dir():
con... |
List the current configuration.
def config_list(ctx):
""" List the current configuration. """
ingest_config_obj(ctx, silent=False)
click.echo(json.dumps(ctx.obj['config'].to_dict(), indent=4)) |
Copy the example workflows to a directory.
\b
DEST: Path to which the examples should be copied.
def config_examples(dest, user_dir):
""" Copy the example workflows to a directory.
\b
DEST: Path to which the examples should be copied.
"""
examples_path = Path(lightflow.__file__).parents[1... |
List all available workflows.
def workflow_list(obj):
""" List all available workflows. """
try:
for wf in list_workflows(config=obj['config']):
click.echo('{:23} {}'.format(
_style(obj['show_color'], wf.name, bold=True),
wf.docstring.split('\n')[0] if wf.doc... |
Send a workflow to the queue.
\b
NAME: The name of the workflow that should be started.
WORKFLOW_ARGS: Workflow arguments in the form key1=value1 key2=value2.
def workflow_start(obj, queue, keep_data, name, workflow_args):
""" Send a workflow to the queue.
\b
NAME: The name of the workflow th... |
Stop one or more running workflows.
\b
NAMES: The names, ids or job ids of the workflows that should be stopped.
Leave empty to stop all running workflows.
def workflow_stop(obj, names):
""" Stop one or more running workflows.
\b
NAMES: The names, ids or job ids of the workflows that s... |
Show the status of the workflows.
def workflow_status(obj, details):
""" Show the status of the workflows. """
show_colors = obj['show_color']
config_cli = obj['config'].cli
if details:
temp_form = '{:>{}} {:20} {:25} {:25} {:38} {}'
else:
temp_form = '{:>{}} {:20} {:25} {} {} {}... |
Start a worker process.
\b
CELERY_ARGS: Additional Celery worker command line arguments.
def worker_start(obj, queues, name, celery_args):
""" Start a worker process.
\b
CELERY_ARGS: Additional Celery worker command line arguments.
"""
try:
start_worker(queues=queues.split(','),
... |
Stop running workers.
\b
WORKER_IDS: The IDs of the worker that should be stopped or none to stop them all.
def worker_stop(obj, worker_ids):
""" Stop running workers.
\b
WORKER_IDS: The IDs of the worker that should be stopped or none to stop them all.
"""
if len(worker_ids) == 0:
... |
Show the status of all running workers.
def worker_status(obj, filter_queues, details):
""" Show the status of all running workers. """
show_colors = obj['show_color']
f_queues = filter_queues.split(',') if filter_queues is not None else None
workers = list_workers(config=obj['config'], filter_by_que... |
Show the worker and workflow event stream.
def monitor(ctx, details):
""" Show the worker and workflow event stream. """
ingest_config_obj(ctx, silent=False)
show_colors = ctx.obj['show_color']
event_display = {
JobEventName.Started: {'color': 'blue', 'label': 'started'},
JobEventName... |
Run an extension by its name.
\b
EXT_NAME: The name of the extension.
EXT_ARGS: Arguments that are passed to the extension.
def ext(obj, ext_name, ext_args):
""" Run an extension by its name.
\b
EXT_NAME: The name of the extension.
EXT_ARGS: Arguments that are passed to the extension.
... |
Helper function to enable/disable styled output text.
Args:
enable (bool): Turn on or off styling.
text (string): The string that should be styled.
kwargs (dict): Parameters that are passed through to click.style
Returns:
string: The input with either the styling applied (enabl... |
converts a Frequency [kHz] into the band and mode according to the IARU bandplan
Note:
**DEPRECATION NOTICE**
This function has been moved to pyhamtools.frequency with PyHamTools 0.4.1
Please don't use this module/function anymore. It will be removed soon.
def freq_to_band(... |
Create a fully configured Celery application object.
Args:
config (Config): A reference to a lightflow configuration object.
Returns:
Celery: A fully configured Celery application object.
def create_app(config):
""" Create a fully configured Celery application object.
Args:
c... |
Cleanup the results of a workflow when it finished.
Connects to the postrun signal of Celery. If the signal was sent by a workflow,
remove the result from the result backend.
Args:
task_id (str): The id of the task.
args (tuple): The arguments the task was started with.
**kwargs: K... |
Celery task (aka job) that runs a workflow on a worker.
This celery task starts, manages and monitors the dags that make up a workflow.
Args:
self (Task): Reference to itself, the celery task object.
workflow (Workflow): Reference to the workflow object that is being used to
... |
Celery task that runs a single dag on a worker.
This celery task starts, manages and monitors the individual tasks of a dag.
Args:
self (Task): Reference to itself, the celery task object.
dag (Dag): Reference to a Dag object that is being used to start, manage and
monitor t... |
Celery task that runs a single task on a worker.
Args:
self (Task): Reference to itself, the celery task object.
task (BaseTask): Reference to the task object that performs the work
in its run() method.
workflow_id (string): The unique ID of the workflow run that st... |
Create a BrokerStats object from the dictionary returned by celery.
Args:
broker_dict (dict): The dictionary as returned by celery.
Returns:
BrokerStats: A fully initialized BrokerStats object.
def from_celery(cls, broker_dict):
""" Create a BrokerStats object from the... |
Return a dictionary of the broker stats.
Returns:
dict: Dictionary of the stats.
def to_dict(self):
""" Return a dictionary of the broker stats.
Returns:
dict: Dictionary of the stats.
"""
return {
'hostname': self.hostname,
'por... |
Create a WorkerStats object from the dictionary returned by celery.
Args:
name (str): The name of the worker.
worker_dict (dict): The dictionary as returned by celery.
queues (list): A list of QueueStats objects that represent the queues this
worker is listen... |
Return a dictionary of the worker stats.
Returns:
dict: Dictionary of the stats.
def to_dict(self):
""" Return a dictionary of the worker stats.
Returns:
dict: Dictionary of the stats.
"""
return {
'name': self.name,
'broker': se... |
Create a JobStats object from the dictionary returned by celery.
Args:
worker_name (str): The name of the worker this jobs runs on.
job_dict (dict): The dictionary as returned by celery.
celery_app: Reference to a celery application object.
Returns:
JobS... |
Return a dictionary of the job stats.
Returns:
dict: Dictionary of the stats.
def to_dict(self):
""" Return a dictionary of the job stats.
Returns:
dict: Dictionary of the stats.
"""
return {
'name': self.name,
'id': self.id,
... |
Create a JobEvent object from the event dictionary returned by celery.
Args:
event (dict): The dictionary as returned by celery.
Returns:
JobEvent: A fully initialized JobEvent object.
def from_event(cls, event):
""" Create a JobEvent object from the event dictionary r... |
Start a single workflow by sending it to the workflow queue.
Args:
name (str): The name of the workflow that should be started. Refers to the
name of the workflow file without the .py extension.
config (Config): Reference to the configuration object from which the
settings f... |
Stop one or more workflows.
Args:
config (Config): Reference to the configuration object from which the
settings for the workflow are retrieved.
names (list): List of workflow names, workflow ids or workflow job ids for the
workflows that should be stopped. If all workflows ... |
List all available workflows.
Returns a list of all workflows that are available from the paths specified
in the config. A workflow is defined as a Python file with at least one DAG.
Args:
config (Config): Reference to the configuration object from which the
settings are retrieved.
... |
Return a list of Celery jobs.
Args:
config (Config): Reference to the configuration object from which the
settings are retrieved.
status (JobStatus): The status of the jobs that should be returned.
filter_by_type (list): Restrict the returned jobs to the types in this list.
... |
Return a generator that yields workflow events.
For every workflow event that is sent from celery this generator yields an event
object.
Args:
config (Config): Reference to the configuration object from which the
settings are retrieved.
Returns:
generator: A generator that... |
Drain the process output streams.
def run(self):
""" Drain the process output streams. """
read_stdout = partial(self._read_output, stream=self._process.stdout,
callback=self._callback_stdout,
output_file=self._stdout_file)
read_stder... |
Read the output of the process, executed the callback and save the output.
Args:
stream: A file object pointing to the output stream that should be read.
callback(callable, None): A callback function that is called for each new
line of output.
output_file: A ... |
The main run method of the Python task.
Args:
data (:class:`.MultiTaskData`): The data object that has been passed from the
predecessor task.
store (:class:`.DataStoreDocument`): The persistent data store object that allows the
task to store data for acce... |
Function wrapper that sets the user and group for the process
def _run_as(user, group):
""" Function wrapper that sets the user and group for the process """
def wrapper():
if user is not None:
os.setuid(user)
if group is not None:
os.setgid(group... |
Convert the specified value to the type of the option.
Args:
value: The value that should be converted.
Returns:
The value with the type given by the option.
def convert(self, value):
""" Convert the specified value to the type of the option.
Args:
... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.