text stringlengths 81 112k |
|---|
Does the python representation of this cell requires an explicit
start of cell marker?
def explicit_start_marker(self, source):
"""Does the python representation of this cell requires an explicit
start of cell marker?"""
if not self.use_cell_markers:
return False
if ... |
Remove end of cell marker when next cell has an explicit start marker
def remove_eoc_marker(self, text, next_text):
"""Remove end of cell marker when next cell has an explicit start marker"""
if self.cell_marker_start:
return text
if self.is_code() and text[-1] == self.comment + ' ... |
Simplify start of cell marker when previous line is blank
def simplify_soc_marker(self, text, prev_text):
"""Simplify start of cell marker when previous line is blank"""
if self.cell_marker_start:
return text
if self.is_code() and text and text[0] == self.comment + ' + {}':
... |
Return the text representation of a code cell
def code_to_text(self):
"""Return the text representation of a code cell"""
active = is_active(self.ext, self.metadata)
source = copy(self.source)
escape_code_start(source, self.ext, self.language)
if active:
comment_mag... |
Return the text representation for the cell
def cell_to_text(self):
"""Return the text representation for the cell"""
if self.cell_type != 'code':
self.metadata['cell_type'] = self.cell_type
active = is_active('py', self.metadata)
if self.language != self.default_language a... |
Return the text representation for the cell
def cell_to_text(self):
"""Return the text representation for the cell"""
if self.cell_type == 'code':
source = copy(self.source)
return comment_magic(source, self.language, self.comment_magics)
if 'cell_marker' in self.metada... |
Execute pandoc with the given arguments
def pandoc(args, filein=None, fileout=None):
"""Execute pandoc with the given arguments"""
cmd = [u'pandoc']
if filein:
cmd.append(filein)
if fileout:
cmd.append('-o')
cmd.append(fileout)
cmd.extend(args.split())
proc = subproc... |
Pandoc's version number
def pandoc_version():
"""Pandoc's version number"""
version = pandoc(u'--version').splitlines()[0].split()[1]
if parse_version(version) < parse_version('2.7.2'):
raise PandocError('Please install pandoc>=2.7.2 (found version {})'.format(version))
return version |
Convert a Markdown text to a Jupyter notebook, using Pandoc
def md_to_notebook(text):
"""Convert a Markdown text to a Jupyter notebook, using Pandoc"""
tmp_file = tempfile.NamedTemporaryFile(delete=False)
tmp_file.write(text.encode('utf-8'))
tmp_file.close()
pandoc(u'--from markdown --to ipynb -s ... |
Convert a notebook to its Markdown representation, using Pandoc
def notebook_to_md(notebook):
"""Convert a notebook to its Markdown representation, using Pandoc"""
tmp_file = tempfile.NamedTemporaryFile(delete=False)
tmp_file.write(ipynb_writes(notebook).encode('utf-8'))
tmp_file.close()
pandoc(u'... |
Return the smallest size int that can store the value
def _int_size(x):
"""Return the smallest size int that can store the value"""
if -0x80 <= x <= 0x7F:
return 1
elif -0x8000 <= x <= 0x7FFF:
return 2
elif -0x80000000 <= x <= 0x7FFFFFFF:
return 4
elif long(-0x80000000000000... |
Deserialize a BSER-encoded blob.
@param buf: The buffer to deserialize.
@type buf: bytes
@param mutable: Whether to return mutable results.
@type mutable: bool
@param value_encoding: Optional codec to use to decode values. If
unspecified or None, return values as bytest... |
The stdout of most fbcode_builder utilities is meant to be parsed.
def run_command(*cmd, **kwargs):
'The stdout of most fbcode_builder utilities is meant to be parsed.'
logging.debug('Running: {0} with {1}'.format(cmd, kwargs))
kwargs['stdout'] = sys.stderr
subprocess.check_call(cmd, **kwargs) |
Helper to read a named config file.
The grossness with the global is a workaround for this python bug:
https://bugs.python.org/issue21591
The bug prevents us from defining either a local function or a lambda
in the scope of read_fbcode_builder_config below.
def _inner_read_config(path):
'''
Hel... |
Sets `builder` configuration, and returns all the builder steps
necessary to build `spec` and its dependencies.
Traverses the dependencies in depth-first order, honoring the sequencing
in each 'depends_on' list.
def steps_for_spec(builder, spec, processed_modules=None):
'''
Sets `builder` configur... |
Figure out where vcpkg is installed.
vcpkg-exported is populated in some flavors of FB internal builds.
C:/tools/vcpkg is the appveyor location.
C:/open/vcpkg is my local location.
def vcpkg_dir():
""" Figure out where vcpkg is installed.
vcpkg-exported is populated in some flavors of FB internal b... |
Synthesize a capability enabled version response
This is a very limited emulation for relatively recent feature sets
def synthesize(vers, opts):
""" Synthesize a capability enabled version response
This is a very limited emulation for relatively recent feature sets
"""
parsed_version = pars... |
Quotes a string if it is not already quoted
def shell_quote(s):
'Quotes a string if it is not already quoted'
return s if isinstance(s, ShellQuoted) \
else ShellQuoted("'" + str(s).replace("'", "'\\''") + "'") |
Not a member of ShellQuoted so we get a useful error for raw strings
def raw_shell(s):
'Not a member of ShellQuoted so we get a useful error for raw strings'
if isinstance(s, ShellQuoted):
return s.do_not_use_raw_str
raise RuntimeError('{0} should have been ShellQuoted'.format(s)) |
Joins an iterable of ShellQuoted with a delimiter between each two
def shell_join(delim, it):
'Joins an iterable of ShellQuoted with a delimiter between each two'
return ShellQuoted(delim.join(raw_shell(s) for s in it)) |
Joins ShellQuoted and raw pieces of paths to make a shell-quoted path
def path_join(*args):
'Joins ShellQuoted and raw pieces of paths to make a shell-quoted path'
return ShellQuoted(os.path.join(*[
raw_shell(shell_quote(s)) for s in args
])) |
Do not shell-escape raw strings in comments, but do handle line breaks.
def shell_comment(c):
'Do not shell-escape raw strings in comments, but do handle line breaks.'
return ShellQuoted('# {c}').format(c=ShellQuoted(
(raw_shell(c) if isinstance(c, ShellQuoted) else c)
.replace('\n', '\n# '... |
Locate dep in the search path; if found, return its path.
If not found in the search path, and the dep is not a system-provided
dep, raise an error
def resolve_dep(self, depname):
""" Locate dep in the search path; if found, return its path.
If not found in the search path, and the dep ... |
If we can find the dep in the PATH, then we consider it to
be a system dependency that we should not bundle in the package
def resolve_dep_from_path(self, depname):
""" If we can find the dep in the PATH, then we consider it to
be a system dependency that we should not bundle in the package """... |
Parse the BSER packet
def _loads(self, response):
""" Parse the BSER packet """
return bser.loads(
response,
True,
value_encoding=encoding.get_local_encoding(),
value_errors=encoding.default_local_errors,
) |
Receive the response to a request made to the Watchman service.
async def receive_bilateral_response(self):
"""Receive the response to a request made to the Watchman service."""
self._check_receive_loop()
resp = await self.bilateral_response_queue.get()
self._check_error(resp)
... |
Send a query to the Watchman service and return the response.
async def query(self, *args):
"""Send a query to the Watchman service and return the response."""
self._check_receive_loop()
try:
await self.connection.send(args)
return await self.receive_bilateral_response(... |
Perform a server capability check.
async def capability_check(self, optional=None, required=None):
"""Perform a server capability check."""
self._check_receive_loop()
# If the returned response is an error, self.query will raise an error
await self.query(
"version", {"optio... |
Retrieve the data associated with a named subscription
Returns None if there is no data associated with `name`
If root is not None, then only return the subscription
data that matches both root and name. When used in this way,
remove processing impacts both the unscoped and scoped sto... |
Get one log from the log queue.
async def pop_log(self):
"""Get one log from the log queue."""
self._check_receive_loop()
res = self.log_queue.get()
self._check_error(res)
return res |
Close the underlying connection.
def close(self):
"""Close the underlying connection."""
self._closed = True
if self.receive_task:
self.receive_task.cancel()
if self.connection:
self.connection.close() |
Schedules the receive loop to run on the given loop.
def enable_receiving(self, loop=None):
"""Schedules the receive loop to run on the given loop."""
self.receive_task = asyncio.ensure_future(self._receive_loop(), loop=loop)
def do_if_done(fut):
try:
fut.result()
... |
Create a new AIOClient using Unix transport and BSER Codec
connecting to the specified socket. If the specified socket is None,
then resolve the socket path automatically.
This method also schedules the receive loop to run on the event loop.
This method is a coroutine.
async def from_... |
Receive the response to a request made to the Watchman service.
Note that when trying to receive a PDU from the Watchman service,
we might get a unilateral response to a subscription or log, so these
are processed and queued up for later retrieval. This function only
returns when a non-... |
expand a win32 error code into a human readable message
def _win32_strerror(err):
""" expand a win32 error code into a human readable message """
# FormatMessage will allocate memory and assign it here
buf = ctypes.c_char_p()
FormatMessage(
FORMAT_MESSAGE_FROM_SYSTEM
| FORMAT_MESSAGE_A... |
Windows 7 and earlier does not support GetOverlappedResultEx. The
alternative is to use GetOverlappedResult and wait for read or write
operation to complete. This is done be using CreateEvent and
WaitForSingleObjectEx. CreateEvent, WaitForSingleObjectEx
and GetOverlappedResult are all part of Windows AP... |
read a line
Maintains its own buffer, callers of the transport should not mix
calls to readBytes and readLine.
def readLine(self):
""" read a line
Maintains its own buffer, callers of the transport should not mix
calls to readBytes and readLine.
"""
if self.buf i... |
A read can block for an unbounded amount of time, even if the
kernel reports that the pipe handle is signalled, so we need to
always perform our reads asynchronously
def readBytes(self, size):
""" A read can block for an unbounded amount of time, even if the
kernel reports t... |
establish transport connection
def _connect(self):
""" establish transport connection """
if self.recvConn:
if self.pid != os.getpid():
raise UseAfterFork(
"do not re-use a connection after fork; open a new client instead"
)
r... |
receive the next PDU from the watchman service
If the client has activated subscriptions or logs then
this PDU may be a unilateral PDU sent by the service to
inform the client of a log event or subscription change.
It may also simply be the response portion of a request
initiat... |
Retrieve buffered log data
If remove is true the data will be removed from the buffer.
Otherwise it will be left in the buffer
def getLog(self, remove=True):
""" Retrieve buffered log data
If remove is true the data will be removed from the buffer.
Otherwise it will be left in... |
Retrieve the data associated with a named subscription
If remove is True (the default), the subscription data is removed
from the buffer. Otherwise the data is returned but left in
the buffer.
Returns None if there is no data associated with `name`
If root is not None, then o... |
Send a query to the watchman service and return the response
This call will block until the response is returned.
If any unilateral responses are sent by the service in between
the request-response they will be buffered up in the client object
and NOT returned via this method.
def quer... |
Perform a server capability check
def capabilityCheck(self, optional=None, required=None):
""" Perform a server capability check """
res = self.query(
"version", {"optional": optional or [], "required": required or []}
)
if not self._hasprop(res, "capabilities"):
... |
Read bytes from a file-like object
@param fp: File-like object that implements read(int)
@type fp: file
@param buf: Buffer to read into
@type buf: bytes
@return: buf
def _read_bytes(fp, buf):
"""Read bytes from a file-like object
@param fp: File-like object that implements read(int)
... |
Deserialize a BSER-encoded blob.
@param fp: The file-object to deserialize.
@type file:
@param mutable: Whether to return mutable results.
@type mutable: bool
@param value_encoding: Optional codec to use to decode values. If
unspecified or None, return values as bytestr... |
Returns a path to the Docker context directory. See parse_args.py.
Helper for making a command-line utility that writes your project's
Dockerfile and associated data into a (temporary) directory. Your main
program might look something like this:
print(make_docker_context(
lambda build... |
Log some system diagnostics before/after setup for ease of debugging
def diagnostics(self):
'Log some system diagnostics before/after setup for ease of debugging'
# The builder's repr is not used in a command to avoid pointlessly
# invalidating Docker's build cache.
return self.step('Di... |
This helper lets Facebook-internal CI special-cases FB projects
def fb_github_project_workdir(self, project_and_path, github_org='facebook'):
'This helper lets Facebook-internal CI special-cases FB projects'
project, path = project_and_path.split('/', 1)
return self.github_project_workdir(githu... |
Functions that perform Gaussian process regression.
cov_func has signature (cov_params, x, x')
def make_gp_funs(cov_func, num_cov_params):
"""Functions that perform Gaussian process regression.
cov_func has signature (cov_params, x, x')"""
def unpack_kernel_params(params):
mean = ... |
Flags that a function is linear wrt all args
def def_linear(fun):
"""Flags that a function is linear wrt all args"""
defjvp_argnum(fun, lambda argnum, g, ans, args, kwargs:
fun(*subval(args, argnum, g), **kwargs)) |
Project the velocity field to be approximately mass-conserving,
using a few iterations of Gauss-Seidel.
def project(vx, vy):
"""Project the velocity field to be approximately mass-conserving,
using a few iterations of Gauss-Seidel."""
p = np.zeros(vx.shape)
h = 1.0/vx.shape[0]
div = -0.5 ... |
Wraps a function so that its gradient can be specified and its invocation
can be recorded. For examples, see the docs.
def primitive(f_raw):
"""
Wraps a function so that its gradient can be specified and its invocation
can be recorded. For examples, see the docs."""
@wraps(f_raw)
def f_wrapped(... |
Numerically stable log(sum(exp(x))), also defined in scipy.misc
def logsumexp(x):
"""Numerically stable log(sum(exp(x))), also defined in scipy.misc"""
max_x = np.max(x)
return max_x + np.log(np.sum(np.exp(x - max_x))) |
Build a (weights, biases) tuples for all layers.
def init_net_params(scale, layer_sizes, rs=npr.RandomState(0)):
"""Build a (weights, biases) tuples for all layers."""
return [(scale * rs.randn(m, n), # weight matrix
scale * rs.randn(n)) # bias vector
for m, n in zip(layer_sizes... |
Loads a text file, and turns each line into an encoded sequence.
def build_dataset(filename, max_lines=-1):
"""Loads a text file, and turns each line into an encoded sequence."""
encodings = dict(list(map(reversed, enumerate(string.printable))))
digitize = lambda char: encodings[char] if char in encodings ... |
Project the velocity field to be approximately mass-conserving,
using a few iterations of Gauss-Seidel.
def project(vx, vy, occlusion):
"""Project the velocity field to be approximately mass-conserving,
using a few iterations of Gauss-Seidel."""
p = np.zeros(vx.shape)
div = -0.5 * (np.roll(vx... |
Move field f according to x and y velocities (u and v)
using an implicit Euler integrator.
def advect(f, vx, vy):
"""Move field f according to x and y velocities (u and v)
using an implicit Euler integrator."""
rows, cols = f.shape
cell_xs, cell_ys = np.meshgrid(np.arange(cols), np.arange(row... |
Takes an optimizer that operates on flat 1D numpy arrays and returns a
wrapped version that handles trees of nested containers (lists/tuples/dicts)
with arrays/scalars at the leaves.
def unflatten_optimizer(optimize):
"""Takes an optimizer that operates on flat 1D numpy arrays and returns a
wrapped ver... |
Stochastic gradient descent with momentum.
grad() must have signature grad(x, i), where i is the iteration number.
def sgd(grad, x, callback=None, num_iters=200, step_size=0.1, mass=0.9):
"""Stochastic gradient descent with momentum.
grad() must have signature grad(x, i), where i is the iteration number.""... |
Root mean squared prop: See Adagrad paper for details.
def rmsprop(grad, x, callback=None, num_iters=100,
step_size=0.1, gamma=0.9, eps=10**-8):
"""Root mean squared prop: See Adagrad paper for details."""
avg_sq_grad = np.ones(len(x))
for i in range(num_iters):
g = grad(x, i)
i... |
Adam as described in http://arxiv.org/pdf/1412.6980.pdf.
It's basically RMSprop with momentum and some correction terms.
def adam(grad, x, callback=None, num_iters=100,
step_size=0.001, b1=0.9, b2=0.999, eps=10**-8):
"""Adam as described in http://arxiv.org/pdf/1412.6980.pdf.
It's basically RMSpro... |
These functions implement independent component analysis.
The model is:
latents are drawn i.i.d. for each data point from a product of student-ts.
weights are the same across all datapoints.
each data = latents * weghts + noise.
def make_ica_funs(observed_dimension, latent_dimension):
"""These fun... |
Implements a deep neural network for classification.
params is a list of (weights, bias) tuples.
inputs is an (N x D) matrix.
returns normalized class log-probabilities.
def neural_net_predict(params, inputs):
"""Implements a deep neural network for classification.
params is a list of (... |
Computes l2 norm of params by flattening them into a vector.
def l2_norm(params):
"""Computes l2 norm of params by flattening them into a vector."""
flattened, _ = flatten(params)
return np.dot(flattened, flattened) |
These functions implement a standard multi-layer perceptron,
vectorized over both training examples and weight samples.
def make_nn_funs(layer_sizes, L2_reg, noise_variance, nonlinearity=np.tanh):
"""These functions implement a standard multi-layer perceptron,
vectorized over both training examples and wei... |
Params is a list of (weights, bias) tuples.
inputs is an (N x D) matrix.
def neural_net_predict(params, inputs):
"""Params is a list of (weights, bias) tuples.
inputs is an (N x D) matrix."""
inpW, inpb = params[0]
inputs = relu(np.dot(inputs, inpW) + inpb)
for W, b in params[1:-1]:
... |
Adam modified to do minimiax optimization, for instance to help with
training generative adversarial networks.
def adam_minimax(grad_both, init_params_max, init_params_min, callback=None, num_iters=100,
step_size_max=0.001, step_size_min=0.001, b1=0.9, b2=0.999, eps=10**-8):
"""Adam modified to do min... |
Returns a function that computes the sum of each column of the Jacobian of
`fun`, in one pass. If the Jacobian is diagonal, then this is the diagonal
of the Jacobian.
def elementwise_grad(fun, x):
"""
Returns a function that computes the sum of each column of the Jacobian of
`fun`, in one pass. If ... |
Returns a function which computes the Jacobian of `fun` with respect to
positional argument number `argnum`, which must be a scalar or array. Unlike
`grad` it is not restricted to scalar-output functions, but also it cannot
take derivatives with respect to some argument types (like lists or dicts).
If t... |
Takes gradients with respect to a named argument.
Doesn't work on *args or **kwargs.
def grad_named(fun, argname):
'''Takes gradients with respect to a named argument.
Doesn't work on *args or **kwargs.'''
arg_index = getargspec(fun).args.index(argname)
return grad(fun, arg_index) |
Builds a function that returns the exact Hessian-tensor product.
The returned function has arguments (*args, tensor, **kwargs), and for
vectors takes roughly 4x as long to evaluate as the original function.
def hessian_tensor_product(fun, argnum=0):
"""Builds a function that returns the exact Hessian-tenso... |
Builds a function that returns the exact tensor-Jacobian product, that
is the Jacobian matrix left-multiplied by tensor. The returned function
has arguments (*args, tensor, **kwargs).
def tensor_jacobian_product(fun, argnum=0):
"""Builds a function that returns the exact tensor-Jacobian product, that
i... |
Builds a function for evaluating the Jacobian-vector product at a
point. Roughly 1.5x more FLOPs than forward-mode, plus memory requirements
that scale with the number of primitives applied in the evaluation of f, as
well as other overheads. See j-towns.github.io/2017/06/12/A-new-trick.html.
def make_jvp_r... |
Builds a function for evaluating generalized-Gauss-Newton-vector products
at a point. Slightly more expensive than mixed-mode.
def make_ggnvp(f, g=lambda x: 1./2*np.sum(x**2, axis=-1), f_argnum=0):
"""Builds a function for evaluating generalized-Gauss-Newton-vector products
at a point. Slightly more expens... |
Returns a function that returns both value and gradient. Suitable for use
in scipy.optimize
def value_and_grad(fun, x):
"""Returns a function that returns both value and gradient. Suitable for use
in scipy.optimize"""
vjp, ans = _make_vjp(fun, x)
if not vspace(ans).size == 1:
raise TypeErro... |
Builds a function that returns the gradient of the first output and the
(unmodified) second output of a function that returns two outputs.
def grad_and_aux(fun, x):
"""Builds a function that returns the gradient of the first output and the
(unmodified) second output of a function that returns two outputs."... |
Takes gradients wrt all arguments simultaneously,
def multigrad_dict(fun):
"Takes gradients wrt all arguments simultaneously,"
"returns a dict mapping 'argname' to 'gradval'"
import funcsigs
sig = funcsigs.signature(fun)
def select(preds, lst):
idx = lambda item: next(
(i for ... |
Returns a checkpointed version of `fun`, where intermediate values
computed during the forward pass of `fun` are discarded and then recomputed
for the backward pass. Useful to save memory, effectively trading off time
and memory. See e.g. arxiv.org/abs/1604.06174.
def checkpoint(fun):
"""Returns a chec... |
Converts an ASCII string to a one-of-k encoding.
def string_to_one_hot(string, maxchar):
"""Converts an ASCII string to a one-of-k encoding."""
ascii = np.array([ord(c) for c in string]).T
return np.array(ascii[:,None] == np.arange(maxchar)[None, :], dtype=int) |
Loads a text file, and turns each line into an encoded sequence.
def build_dataset(filename, sequence_length, alphabet_size, max_lines=-1):
"""Loads a text file, and turns each line into an encoded sequence."""
with open(filename) as f:
content = f.readlines()
content = content[:max_lines]
cont... |
Build a list of (weights, biases) tuples, one for each layer.
def init_nn_params(scale, layer_sizes, rs=npr.RandomState(0)):
"""Build a list of (weights, biases) tuples, one for each layer."""
return [(rs.randn(insize, outsize) * scale, # weight matrix
rs.randn(outsize) * scale) # bias... |
make the compression factors and compute the normalization
for irfft and rfft.
def make_rfft_factors(axes, resshape, facshape, normshape, norm):
""" make the compression factors and compute the normalization
for irfft and rfft.
"""
N = 1.0
for n in normshape: N = N * n
# inplace mo... |
Provides a stochastic estimate of the variational lower bound,
for any variational family and model density.
def variational_lower_bound(params, t, logprob, sampler, log_density,
num_samples, rs):
"""Provides a stochastic estimate of the variational lower bound,
for any va... |
Gradient for eigenvalues and vectors of a symmetric matrix.
def grad_eigh(ans, x, UPLO='L'):
"""Gradient for eigenvalues and vectors of a symmetric matrix."""
N = x.shape[-1]
w, v = ans # Eigenvalues, eigenvectors.
def vjp(g):
wg, vg = g # Gradient w.r.t. eigenvalues, eige... |
Implements http://arxiv.org/abs/1401.0118, and uses the
local reparameterization trick from http://arxiv.org/abs/1506.02557
def black_box_variational_inference(logprob, D, num_samples):
"""Implements http://arxiv.org/abs/1401.0118, and uses the
local reparameterization trick from http://arxiv.org/abs/1506.... |
Returns the array g repeated along axis to fit vector space vs.
Also returns the number of repetitions of the array.
def repeat_to_match_shape(g, shape, dtype, axis, keepdims):
"""Returns the array g repeated along axis to fit vector space vs.
Also returns the number of repetitions of the array."""
... |
Images should be a (N_images x pixels) matrix.
def plot_images(images, ax, ims_per_row=5, padding=5, digit_dimensions=(28, 28),
cmap=matplotlib.cm.binary, vmin=None, vmax=None):
"""Images should be a (N_images x pixels) matrix."""
N_images = images.shape[0]
N_rows = (N_images - 1) // ims_pe... |
Based on code by Ryan P. Adams.
def make_pinwheel(radial_std, tangential_std, num_classes, num_per_class, rate,
rs=npr.RandomState(0)):
"""Based on code by Ryan P. Adams."""
rads = np.linspace(0, 2*np.pi, num_classes, endpoint=False)
features = rs.randn(num_classes*num_per_class, 2) \
... |
Borrowed from http://blog.dkbza.org/2007/05/scanning-data-for-entropy-anomalies.html
def shannon_entropy(data, iterator):
"""
Borrowed from http://blog.dkbza.org/2007/05/scanning-data-for-entropy-anomalies.html
"""
if not data:
return 0
entropy = 0
for x in iterator:
p_x = float... |
Serialize a list of subtitles according to the SRT format, with optional time padding.
def srt_formatter(subtitles, padding_before=0, padding_after=0):
"""
Serialize a list of subtitles according to the SRT format, with optional time padding.
"""
sub_rip_file = pysrt.SubRipFile()
for i, ((start, en... |
Serialize a list of subtitles according to the VTT format, with optional time padding.
def vtt_formatter(subtitles, padding_before=0, padding_after=0):
"""
Serialize a list of subtitles according to the VTT format, with optional time padding.
"""
text = srt_formatter(subtitles, padding_before, padding_... |
Serialize a list of subtitles as a JSON blob.
def json_formatter(subtitles):
"""
Serialize a list of subtitles as a JSON blob.
"""
subtitle_dicts = [
{
'start': start,
'end': end,
'content': text,
}
for ((start, end), text)
in subtitle... |
Calculate the given percentile of arr.
def percentile(arr, percent):
"""
Calculate the given percentile of arr.
"""
arr = sorted(arr)
index = (len(arr) - 1) * percent
floor = math.floor(index)
ceil = math.ceil(index)
if floor == ceil:
return arr[int(index)]
low_value = arr[i... |
Extract audio from an input file to a temporary WAV file.
def extract_audio(filename, channels=1, rate=16000):
"""
Extract audio from an input file to a temporary WAV file.
"""
temp = tempfile.NamedTemporaryFile(suffix='.wav', delete=False)
if not os.path.isfile(filename):
print("The given ... |
Perform voice activity detection on a given audio file.
def find_speech_regions(filename, frame_width=4096, min_region_size=0.5, max_region_size=6): # pylint: disable=too-many-locals
"""
Perform voice activity detection on a given audio file.
"""
reader = wave.open(filename)
sample_width = reader.g... |
Given an input audio/video file, generate subtitles in the specified language and format.
def generate_subtitles( # pylint: disable=too-many-locals,too-many-arguments
source_path,
output=None,
concurrency=DEFAULT_CONCURRENCY,
src_language=DEFAULT_SRC_LANGUAGE,
dst_language=DEFAU... |
Check that the CLI arguments passed to autosub are valid.
def validate(args):
"""
Check that the CLI arguments passed to autosub are valid.
"""
if args.format not in FORMATTERS:
print(
"Subtitle format not supported. "
"Run with --list-formats to see all supported format... |
Run autosub as a command-line program.
def main():
"""
Run autosub as a command-line program.
"""
parser = argparse.ArgumentParser()
parser.add_argument('source_path', help="Path to the video or audio file to subtitle",
nargs='?')
parser.add_argument('-C', '--concurrency... |
Plugin interface to pyls linter.
Args:
document: The document to be linted.
is_saved: Whether or not the file has been saved to disk.
flags: Additional flags to pass to pylint. Not exposed to
pyls_lint, but used for testing.
Returns:
A li... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.