text stringlengths 81 112k |
|---|
Read content. See file.read
def read(self, limit=-1):
"""Read content. See file.read"""
remaining = self.len - self.parent_fd.tell() + self.offset
if limit > remaining or limit == -1:
limit = remaining
return self.parent_fd.read(limit) |
Seek to position in stream, see file.seek
def seek(self, offset, whence=os.SEEK_SET):
"""Seek to position in stream, see file.seek"""
pos = None
if whence == os.SEEK_SET:
pos = self.offset + offset
elif whence == os.SEEK_CUR:
pos = self.tell() + offset
e... |
Close file, see file.close
def close(self):
"""Close file, see file.close"""
try:
self.parent_fd.fileno()
except io.UnsupportedOperation:
logger.debug("Not closing parent_fd - reusing existing")
else:
self.parent_fd.close() |
Prepare query string
def _build_query(self, uri, params=None, action_token_type=None):
"""Prepare query string"""
if params is None:
params = QueryParams()
params['response_format'] = 'json'
session_token = None
if action_token_type in self._action_tokens:
... |
Perform request to MediaFire API
action -- "category/name" of method to call
params -- dict of parameters or query string
action_token_type -- action token to use: None, "upload", "image"
upload_info -- in case of upload, dict of "fd" and "filename"
headers -- additional headers... |
Parse response
def _process_response(self, response):
"""Parse response"""
forward_raw = False
content_type = response.headers['Content-Type']
if content_type != 'application/json':
logger.debug("headers: %s", response.headers)
# API BUG: text/xml content-type w... |
Regenerate secret key
http://www.mediafire.com/developers/core_api/1.3/getting_started/#call_signature
def _regenerate_secret_key(self):
"""Regenerate secret key
http://www.mediafire.com/developers/core_api/1.3/getting_started/#call_signature
"""
# Don't regenerate the key if ... |
Set session token
value -- dict returned by user/get_session_token
def session(self, value):
"""Set session token
value -- dict returned by user/get_session_token"""
# unset session token
if value is None:
self._session = None
return
if not is... |
Set action tokens
type_ -- either "upload" or "image"
action_token -- string obtained from user/get_action_token,
set None to remove the token
def set_action_token(self, type_=None, action_token=None):
"""Set action tokens
type_ -- either "upload" or "image"
... |
user/get_session_token
http://www.mediafire.com/developers/core_api/1.3/user/#get_session_token
def user_get_session_token(self, app_id=None, email=None, password=None,
ekey=None, fb_access_token=None,
tw_oauth_token=None,
... |
user/set_avatar
http://www.mediafire.com/developers/core_api/1.3/user/#set_avatar
def user_set_avatar(self, action=None, quick_key=None, url=None):
"""user/set_avatar
http://www.mediafire.com/developers/core_api/1.3/user/#set_avatar
"""
return self.request("user/set_avatar", Q... |
user/update
http://www.mediafire.com/developers/core_api/1.3/user/#update
def user_update(self, display_name=None, first_name=None, last_name=None,
email=None, password=None, current_password=None,
birth_date=None, gender=None, website=None, subdomain=None,
... |
folder/get_info
http://www.mediafire.com/developers/core_api/1.3/folder/#get_info
def folder_get_info(self, folder_key=None, device_id=None, details=None):
"""folder/get_info
http://www.mediafire.com/developers/core_api/1.3/folder/#get_info
"""
return self.request('folder/get_... |
folder/get_content
http://www.mediafire.com/developers/core_api/1.3/folder/#get_content
def folder_get_content(self, folder_key=None, content_type=None,
filter_=None, device_id=None, order_by=None,
order_direction=None, chunk=None, details=None,
... |
folder/update
http://www.mediafire.com/developers/core_api/1.3/folder/#update
def folder_update(self, folder_key, foldername=None, description=None,
privacy=None, privacy_recursive=None, mtime=None):
"""folder/update
http://www.mediafire.com/developers/core_api/1.3/folde... |
folder/create
http://www.mediafire.com/developers/core_api/1.3/folder/#create
def folder_create(self, foldername=None, parent_key=None,
action_on_duplicate=None, mtime=None):
"""folder/create
http://www.mediafire.com/developers/core_api/1.3/folder/#create
"""
... |
upload/check
http://www.mediafire.com/developers/core_api/1.3/upload/#check
def upload_check(self, filename=None, folder_key=None, filedrop_key=None,
size=None, hash_=None, path=None, resumable=None):
"""upload/check
http://www.mediafire.com/developers/core_api/1.3/upload... |
upload/simple
http://www.mediafire.com/developers/core_api/1.3/upload/#simple
def upload_simple(self, fd, filename, folder_key=None, path=None,
filedrop_key=None, action_on_duplicate=None,
mtime=None, file_size=None, file_hash=None):
"""upload/simple
... |
upload/resumable
http://www.mediafire.com/developers/core_api/1.3/upload/#resumable
def upload_resumable(self, fd, filesize, filehash, unit_hash, unit_id,
unit_size, quick_key=None, action_on_duplicate=None,
mtime=None, version_control=None, folder_key=None,
... |
upload/instant
http://www.mediafire.com/developers/core_api/1.3/upload/#instant
def upload_instant(self, filename, size, hash_, quick_key=None,
folder_key=None, filedrop_key=None, path=None,
action_on_duplicate=None, mtime=None,
version_cont... |
file/update
http://www.mediafire.com/developers/core_api/1.3/file/#update
def file_update(self, quick_key, filename=None, description=None,
mtime=None, privacy=None):
"""file/update
http://www.mediafire.com/developers/core_api/1.3/file/#update
"""
return se... |
file/update_file
http://www.mediafire.com/developers/core_api/1.3/file/#update_file
def file_update_file(self, quick_key, file_extension=None, filename=None,
description=None, mtime=None, privacy=None,
timezone=None):
"""file/update_file
http:... |
file/zip
http://www.mediafire.com/developers/core_api/1.3/file/#zip
def file_zip(self, keys, confirm_download=None, meta_only=None):
"""file/zip
http://www.mediafire.com/developers/core_api/1.3/file/#zip
"""
return self.request('file/zip', QueryParams({
'keys': key... |
Reset all of our stateful variables
def _reset(self):
'''Reset all of our stateful variables'''
self._socket = None
# The pending messages we have to send, and the current buffer we're
# sending
self._pending = deque()
self._out_buffer = ''
# Our read buffer
... |
Establish a connection
def connect(self, force=False):
'''Establish a connection'''
# Don't re-establish existing connections
if not force and self.alive():
return True
self._reset()
# Otherwise, try to connect
with self._socket_lock:
try:
... |
Close our connection
def close(self):
'''Close our connection'''
# Flush any unsent message
try:
while self.pending():
self.flush()
except socket.error:
pass
with self._socket_lock:
try:
if self._socket:
... |
Blockingly yield the socket
def socket(self, blocking=True):
'''Blockingly yield the socket'''
# If the socket is available, then yield it. Otherwise, yield nothing
if self._socket_lock.acquire(blocking):
try:
yield self._socket
finally:
s... |
Handle a response to our 'identify' command. Returns response
def identified(self, res):
'''Handle a response to our 'identify' command. Returns response'''
# If they support it, they should give us a JSON blob which we should
# inspect.
try:
res.data = json.loads(res.data)
... |
Set whether or not this message is blocking
def setblocking(self, blocking):
'''Set whether or not this message is blocking'''
for sock in self.socket():
sock.setblocking(blocking)
self._blocking = blocking |
Flush some of the waiting messages, returns count written
def flush(self):
'''Flush some of the waiting messages, returns count written'''
# When profiling, we found that while there was some efficiency to be
# gained elsewhere, the big performance hit is sending lots of small
# message... |
Send a command over the socket with length endcoded
def send(self, command, message=None):
'''Send a command over the socket with length endcoded'''
if message:
joined = command + constants.NL + util.pack(message)
else:
joined = command + constants.NL
if self._bl... |
Send an identification message
def identify(self, data):
'''Send an identification message'''
return self.send(constants.IDENTIFY, json.dumps(data)) |
Subscribe to a topic/channel
def sub(self, topic, channel):
'''Subscribe to a topic/channel'''
return self.send(' '.join((constants.SUB, topic, channel))) |
Publish to a topic
def pub(self, topic, message):
'''Publish to a topic'''
return self.send(' '.join((constants.PUB, topic)), message) |
Publish multiple messages to a topic
def mpub(self, topic, *messages):
'''Publish multiple messages to a topic'''
return self.send(constants.MPUB + ' ' + topic, messages) |
Indicate that you're ready to receive
def rdy(self, count):
'''Indicate that you're ready to receive'''
self.ready = count
self.last_ready_sent = count
return self.send(constants.RDY + ' ' + str(count)) |
Re-queue a message
def req(self, message_id, timeout):
'''Re-queue a message'''
return self.send(constants.REQ + ' ' + message_id + ' ' + str(timeout)) |
Return all the responses read
def _read(self, limit=1000):
'''Return all the responses read'''
# It's important to know that it may return no responses or multiple
# responses. It depends on how the buffering works out. First, read from
# the socket
for sock in self.socket():
... |
Responses from an established socket
def read(self):
'''Responses from an established socket'''
responses = self._read()
# Determine the number of messages in here and decrement our ready
# count appropriately
self.ready -= sum(
map(int, (r.frame_type == Message.FRAM... |
Run the discovery mechanism
def discover(self, topic):
'''Run the discovery mechanism'''
logger.info('Discovering on topic %s', topic)
producers = []
for lookupd in self._lookupd:
logger.info('Discovering on %s', lookupd)
try:
# Find all the curre... |
Connect to all the appropriate instances
def check_connections(self):
'''Connect to all the appropriate instances'''
logger.info('Checking connections')
if self._lookupd:
self.discover(self._topic)
# Make sure we're connected to all the prescribed hosts
for hostspec... |
Run periodic reconnection checks
def connection_checker(self):
'''Run periodic reconnection checks'''
thread = ConnectionChecker(self)
logger.info('Starting connection-checker thread')
thread.start()
try:
yield thread
finally:
logger.info('Stoppin... |
Connect to the provided host, port
def connect(self, host, port):
'''Connect to the provided host, port'''
conn = connection.Connection(host, port,
reconnection_backoff=self._reconnection_backoff,
auth_secret=self._auth_secret,
timeout=self._connect_timeout,
... |
Add a connection
def add(self, connection):
'''Add a connection'''
key = (connection.host, connection.port)
with self._lock:
if key not in self._connections:
self._connections[key] = connection
self.added(connection)
return connection
... |
Remove a connection
def remove(self, connection):
'''Remove a connection'''
key = (connection.host, connection.port)
with self._lock:
found = self._connections.pop(key, None)
try:
self.close_connection(found)
except Exception as exc:
logger.wa... |
Read from any of the connections that need it
def read(self):
'''Read from any of the connections that need it'''
# We'll check all living connections
connections = [c for c in self.connections() if c.alive()]
if not connections:
# If there are no connections, obviously we ... |
Pick a random living connection
def random_connection(self):
'''Pick a random living connection'''
# While at the moment there's no need for this to be a context manager
# per se, I would like to use that interface since I anticipate
# adding some wrapping around it at some point.
... |
Wait for a response
def wait_response(self):
'''Wait for a response'''
responses = self.read()
while not responses:
responses = self.read()
return responses |
Publish the provided message to the provided topic
def pub(self, topic, message):
'''Publish the provided message to the provided topic'''
with self.random_connection() as client:
client.pub(topic, message)
return self.wait_response() |
Publish messages to a topic
def mpub(self, topic, *messages):
'''Publish messages to a topic'''
with self.random_connection() as client:
client.mpub(topic, *messages)
return self.wait_response() |
Create a socket for the daemon, depending on the directory location.
Args:
config_dir (str): The absolute path to the config directory used by the daemon.
Returns:
socket.socket: The daemon socket. Clients connect to this socket.
def create_socket(self):
"""Create a so... |
Create all directories needed for logs and configs.
def initialize_directories(self, root_dir):
"""Create all directories needed for logs and configs."""
if not root_dir:
root_dir = os.path.expanduser('~')
# Create config directory, if it doesn't exist
self.config_dir = os.... |
Send an answer to the client.
def respond_client(self, answer, socket):
"""Send an answer to the client."""
response = pickle.dumps(answer, -1)
socket.sendall(response)
self.read_list.remove(socket)
socket.close() |
Read a previous configuration file or create a new with default values.
def read_config(self):
"""Read a previous configuration file or create a new with default values."""
config_file = os.path.join(self.config_dir, 'pueue.ini')
self.config = configparser.ConfigParser()
# Try to get co... |
Write the current configuration to the config file.
def write_config(self):
"""Write the current configuration to the config file."""
config_file = os.path.join(self.config_dir, 'pueue.ini')
with open(config_file, 'w') as file_descriptor:
self.config.write(file_descriptor) |
The main function containing the loop for communication and process management.
This function is the heart of the daemon.
It is responsible for:
- Client communication
- Executing commands from clients
- Update the status of processes by polling the ProcessHandler.
- Log... |
Kill current processes and initiate daemon shutdown.
The daemon will shut down after a last check on all killed processes.
def stop_daemon(self, payload=None):
"""Kill current processes and initiate daemon shutdown.
The daemon will shut down after a last check on all killed processes.
... |
Update the current config depending on the payload and save it.
def set_config(self, payload):
"""Update the current config depending on the payload and save it."""
self.config['default'][payload['option']] = str(payload['value'])
if payload['option'] == 'maxProcesses':
self.proces... |
Send something to stdin of a specific process.
def pipe_to_process(self, payload):
"""Send something to stdin of a specific process."""
message = payload['input']
key = payload['key']
if not self.process_handler.is_running(key):
return {'message': 'No running process for thi... |
Send the daemon status and the current queue for displaying.
def send_status(self, payload):
"""Send the daemon status and the current queue for displaying."""
answer = {}
data = []
# Get daemon status
if self.paused:
answer['status'] = 'paused'
else:
... |
Kill all processes, delete the queue and clean everything up.
def reset_everything(self, payload):
"""Kill all processes, delete the queue and clean everything up."""
kill_signal = signals['9']
self.process_handler.kill_all(kill_signal, True)
self.process_handler.wait_for_finish()
... |
Clear queue from any `done` or `failed` entries.
The log will be rotated once. Otherwise we would loose all logs from
thoes finished processes.
def clear(self, payload):
"""Clear queue from any `done` or `failed` entries.
The log will be rotated once. Otherwise we would loose all logs... |
Start the daemon and all processes or only specific processes.
def start(self, payload):
"""Start the daemon and all processes or only specific processes."""
# Start specific processes, if `keys` is given in the payload
if payload.get('keys'):
succeeded = []
failed = []
... |
Start the daemon and all processes or only specific processes.
def pause(self, payload):
"""Start the daemon and all processes or only specific processes."""
# Pause specific processes, if `keys` is given in the payload
if payload.get('keys'):
succeeded = []
failed = []
... |
Edit the command of a specific entry.
def edit_command(self, payload):
"""Edit the command of a specific entry."""
key = payload['key']
command = payload['command']
if self.queue[key]:
if self.queue[key]['status'] in ['queued', 'stashed']:
self.queue[key]['co... |
Stash the specified processes.
def stash(self, payload):
"""Stash the specified processes."""
succeeded = []
failed = []
for key in payload['keys']:
if self.queue.get(key) is not None:
if self.queue[key]['status'] == 'queued':
self.queue[k... |
Pause the daemon and kill all processes or kill a specific process.
def kill_process(self, payload):
"""Pause the daemon and kill all processes or kill a specific process."""
# Kill specific processes, if `keys` is given in the payload
kill_signal = signals[payload['signal'].lower()]
ki... |
Remove specified entries from the queue.
def remove(self, payload):
"""Remove specified entries from the queue."""
succeeded = []
failed = []
for key in payload['keys']:
running = self.process_handler.is_running(key)
if not running:
removed = self... |
Switch the two specified entry positions in the queue.
def switch(self, payload):
"""Switch the two specified entry positions in the queue."""
first = payload['first']
second = payload['second']
running = self.process_handler.is_running(first) or self.process_handler.is_running(second)
... |
Restart the specified entries.
def restart(self, payload):
"""Restart the specified entries."""
succeeded = []
failed = []
for key in payload['keys']:
restarted = self.queue.restart(key)
if restarted:
succeeded.append(str(key))
else:
... |
Same as socket.sendall
def sendall(self, data, flags=0):
'''Same as socket.sendall'''
count = len(data)
while count:
sent = self.send(data, flags)
# This could probably be a buffer object
data = data[sent:]
count -= sent |
List directory
def do_ls(client, args):
"""List directory"""
for item in client.get_folder_contents_iter(args.uri):
# privacy flag
if item['privacy'] == 'public':
item['pf'] = '@'
else:
item['pf'] = '-'
if isinstance(item, Folder):
# type fl... |
Upload files
def do_file_upload(client, args):
"""Upload files"""
# Sanity check
if len(args.paths) > 1:
# destination must be a directory
try:
resource = client.get_resource_by_uri(args.dest_uri)
except ResourceNotFoundError:
resource = None
if res... |
Download file
def do_file_download(client, args):
"""Download file"""
# Sanity check
if not os.path.isdir(args.dest_path) and not args.dest_path.endswith('/'):
print("file-download: "
"target '{}' is not a directory".format(args.dest_path))
if not os.path.exists(args.dest_pat... |
Output file contents to stdout
def do_file_show(client, args):
"""Output file contents to stdout"""
for src_uri in args.uris:
client.download_file(src_uri, sys.stdout.buffer)
return True |
Create directory
def do_folder_create(client, args):
"""Create directory"""
for folder_uri in args.uris:
client.create_folder(folder_uri, recursive=True)
return True |
Remove resource
def do_resource_delete(client, args):
"""Remove resource"""
for resource_uri in args.uris:
client.delete_resource(resource_uri, purge=args.purge)
print("Deleted {}".format(resource_uri))
return True |
Update file metadata
def do_file_update_metadata(client, args):
"""Update file metadata"""
client.update_file_metadata(args.uri, filename=args.filename,
description=args.description, mtime=args.mtime,
privacy=args.privacy)
return True |
Update file metadata
def do_folder_update_metadata(client, args):
"""Update file metadata"""
client.update_folder_metadata(args.uri, foldername=args.foldername,
description=args.description,
mtime=args.mtime, privacy=args.privacy,
... |
Main entry point
def main(): # pylint: disable=too-many-statements
"""Main entry point"""
parser = argparse.ArgumentParser(prog='mediafire-cli',
description=__doc__)
parser.add_argument('--debug', dest='debug', action='store_true',
default=Fals... |
Publish a message to a topic
def pub(self, topic, message):
'''Publish a message to a topic'''
return self.post('pub', params={'topic': topic}, data=message) |
Send multiple messages to a topic. Optionally pack the messages
def mpub(self, topic, messages, binary=True):
'''Send multiple messages to a topic. Optionally pack the messages'''
if binary:
# Pack and ship the data
return self.post('mpub', data=pack(messages)[4:],
... |
Stats with topics and channels keyed on topic and channel names
def clean_stats(self):
'''Stats with topics and channels keyed on topic and channel names'''
stats = self.stats()
if 'topics' in stats: # pragma: no branch
topics = stats['topics']
topics = dict((t.pop('top... |
Add a new command to the daemon queue.
Args:
args['command'] (list(str)): The actual programm call. Something like ['ls', '-a'] or ['ls -al']
root_dir (string): The path to the root directory the daemon is running in.
def execute_add(args, root_dir=None):
"""Add a new command to the daemon que... |
Edit a existing queue command in the daemon.
Args:
args['key'] int: The key of the queue entry to be edited
root_dir (string): The path to the root directory the daemon is running in.
def execute_edit(args, root_dir=None):
"""Edit a existing queue command in the daemon.
Args:
args... |
A factory which returns functions for direct daemon communication.
This factory will create a function which sends a payload to the daemon
and returns the unpickled object which is returned by the daemon.
Args:
command (string): The type of payload this should be. This determines
as wh... |
Create file descriptors for process output.
def get_descriptor(self, number):
"""Create file descriptors for process output."""
# Create stdout file and get file descriptor
stdout_path = os.path.join(self.config_dir,
'pueue_process_{}.stdout'.format(number))
... |
Close file descriptor and remove underlying files.
def clean_descriptor(self, number):
"""Close file descriptor and remove underlying files."""
self.descriptors[number]['stdout'].close()
self.descriptors[number]['stderr'].close()
if os.path.exists(self.descriptors[number]['stdout_path'... |
Poll all processes and handle any finished processes.
def check_finished(self):
"""Poll all processes and handle any finished processes."""
changed = False
for key in list(self.processes.keys()):
# Poll process and check if it finshed
process = self.processes[key]
... |
Check if we can start a new process.
def check_for_new(self):
"""Check if we can start a new process."""
free_slots = self.max_processes - len(self.processes)
for item in range(free_slots):
key = self.queue.next()
if key is not None:
self.spawn_new(key) |
Spawn a new task and save it to the queue.
def spawn_new(self, key):
"""Spawn a new task and save it to the queue."""
# Check if path exists
if not os.path.exists(self.queue[key]['path']):
self.queue[key]['status'] = 'failed'
error_msg = "The directory for this command d... |
Kill all running processes.
def kill_all(self, kill_signal, kill_shell=False):
"""Kill all running processes."""
for key in self.processes.keys():
self.kill_process(key, kill_signal, kill_shell) |
Start a specific processes.
def start_process(self, key):
"""Start a specific processes."""
if key in self.processes and key in self.paused:
os.killpg(os.getpgid(self.processes[key].pid), signal.SIGCONT)
self.queue[key]['status'] = 'running'
self.paused.remove(key)
... |
Pause a specific processes.
def pause_process(self, key):
"""Pause a specific processes."""
if key in self.processes and key not in self.paused:
os.killpg(os.getpgid(self.processes[key].pid), signal.SIGSTOP)
self.queue[key]['status'] = 'paused'
self.paused.append(key... |
Create a closure which creates a running daemon.
We need to create a closure that contains the correct path the daemon should
be started with. This is needed as the `Daemonize` library
requires a callable function for daemonization and doesn't accept any arguments.
This function cleans up sockets and o... |
Execute entry function.
def main():
"""Execute entry function."""
args = parser.parse_args()
args_dict = vars(args)
root_dir = args_dict['root'] if 'root' in args else None
# If a root directory is specified, get the absolute path and
# check if it exists. Abort if it doesn't exist!
if roo... |
Register a pdb handler for signal 'signum'.
The handler sets pdb to listen on the ('host', 'port') internet address
and to start a remote debugging session on accepting a socket connection.
def register(host=DFLT_ADDRESS[0], port=DFLT_ADDRESS[1],
signum=signal.SIGUSR1):
"""Register a pdb hand... |
Return the handler as a named tuple.
The named tuple attributes are 'host', 'port', 'signum'.
Return None when no handler has been registered.
def get_handler():
"""Return the handler as a named tuple.
The named tuple attributes are 'host', 'port', 'signum'.
Return None when no handler has been r... |
Wait for the provided time to elapse
def wait(self, timeout):
'''Wait for the provided time to elapse'''
logger.debug('Waiting for %fs', timeout)
return self._event.wait(timeout) |
How long to wait before the next check
def delay(self):
'''How long to wait before the next check'''
if self._last_checked:
return self._interval - (time.time() - self._last_checked)
return self._interval |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.