text stringlengths 81 112k |
|---|
Get status on recording/not recording.
Returns
-------
Current state of recording.
def is_recording():
"""Get status on recording/not recording.
Returns
-------
Current state of recording.
"""
curr = ctypes.c_bool()
check_call(_LIB.MXAutogradIsRecording(ctypes.byref(curr)))
... |
Get status on training/predicting.
Returns
-------
Current state of training/predicting.
def is_training():
"""Get status on training/predicting.
Returns
-------
Current state of training/predicting.
"""
curr = ctypes.c_bool()
check_call(_LIB.MXAutogradIsTraining(ctypes.byref(... |
Mark NDArrays as variables to compute gradient for autograd.
Parameters
----------
variables: NDArray or list of NDArray
gradients: NDArray or list of NDArray
grad_reqs: str or list of str
def mark_variables(variables, gradients, grad_reqs='write'):
"""Mark NDArrays as variables to compute gra... |
parse head gradient for backward and grad.
def _parse_head(heads, head_grads):
"""parse head gradient for backward and grad."""
if isinstance(heads, NDArray):
heads = [heads]
if isinstance(head_grads, NDArray):
head_grads = [head_grads]
head_handles = c_handle_array(heads)
if head... |
Compute the gradients of heads w.r.t previously marked variables.
Parameters
----------
heads: NDArray or list of NDArray
Output NDArray(s)
head_grads: NDArray or list of NDArray or None
Gradients with respect to heads.
train_mode: bool, optional
Whether to do backward for t... |
Compute the gradients of heads w.r.t variables. Gradients will be
returned as new NDArrays instead of stored into `variable.grad`.
Supports recording gradient graph for computing higher order gradients.
.. note::
Currently only a very limited set of operators support higher order \
gradients.
... |
Retrieve recorded computation history as `Symbol`.
Parameters
----------
x : NDArray
Array representing the head of computation graph.
Returns
-------
Symbol
The retrieved Symbol.
def get_symbol(x):
"""Retrieve recorded computation history as `Symbol`.
Parameters
... |
Not particularly fast code to parse the text file and load it into three NDArray's
and product an NDArrayIter
def load_mldataset(filename):
"""Not particularly fast code to parse the text file and load it into three NDArray's
and product an NDArrayIter
"""
user = []
item = []
score = []
... |
MXNET_DLL int MXSymbolListAtomicSymbolCreators(mx_uint *out_size,
AtomicSymbolCreator **out_array);
MXNET_DLL int MXSymbolGetAtomicSymbolInfo(AtomicSymbolCreator creator,
const char **name,
... |
Read .caffemodel path and .params path as input from command line
and use CaffeModelConverter to do the conversion
def main():
"""Read .caffemodel path and .params path as input from command line
and use CaffeModelConverter to do the conversion"""
parser = argparse.ArgumentParser(description='.caffemod... |
Add a param to the .params file
def add_param(self, param_name, layer_index, blob_index):
"""Add a param to the .params file"""
blobs = self.layers[layer_index].blobs
self.dict_param[param_name] = mx.nd.array(caffe.io.blobproto_to_array(blobs[blob_index])) |
Add an arg param to .params file. Example: weights of a fully connected layer.
def add_arg_param(self, param_name, layer_index, blob_index):
"""Add an arg param to .params file. Example: weights of a fully connected layer."""
self.add_param('arg:%s' % param_name, layer_index, blob_index) |
Add an aux param to .params file. Example: moving_mean in BatchNorm layer
def add_aux_param(self, param_name, layer_index, blob_index):
"""Add an aux param to .params file. Example: moving_mean in BatchNorm layer """
self.add_param('aux:%s' % param_name, layer_index, blob_index) |
Add an arg param. If there is no such param in .caffemodel fie, silently ignore it.
def add_optional_arg_param(self, param_name, layer_index, blob_index):
"""Add an arg param. If there is no such param in .caffemodel fie, silently ignore it."""
blobs = self.layers[layer_index].blobs
if blob_ind... |
Convert a Caffe .caffemodel file to MXNet .params file
def convert(self, caffemodel_path, outmodel_path):
"""Convert a Caffe .caffemodel file to MXNet .params file"""
net_param = caffe_pb2.NetParameter()
with open(caffemodel_path, 'rb') as caffe_model_file:
net_param.ParseFromString... |
generate random sample of ROIs comprising foreground and background examples
:param rois: [n, 5] (batch_index, x1, y1, x2, y2)
:param gt_boxes: [n, 5] (x1, y1, x2, y2, cls)
:param num_classes: number of classes
:param rois_per_image: total roi number
:param fg_rois_per_image: foreground roi number
... |
Register a subclass of CustomOpProp to the registry with name reg_name.
def register(reg_name):
"""Register a subclass of CustomOpProp to the registry with name reg_name."""
def do_register(prop_cls):
"""Register a subclass of CustomOpProp to the registry."""
fb_functype = CFUNCTYPE(c_int, c_in... |
Declare dependencies of this operator for backward pass.
Parameters
----------
out_grad : list of int
ids of out_grad blobs.
in_data : list of int
ids of in_data blobs.
out_data: list of int
ids of out_data blobs.
Returns
----... |
Helper function for assigning into dst depending on requirements.
def assign(self, dst, req, src):
"""Helper function for assigning into dst depending on requirements."""
if req == 'null':
return
elif req in ('write', 'inplace'):
dst[:] = src
elif req == 'add':
... |
infer_type interface. override to create new operators
Parameters
----------
in_type : list of np.dtype
list of argument types in the same order as
declared in list_arguments.
Returns
-------
in_type : list
list of argument types. Can... |
infer_storage_type interface. Used to infer storage type of
inputs and outputs in the forward pass. When this interface is not implemented,
all stypes will be inferred as default.
Parameters
----------
in_stype : list of stypes, valid stypes are default, row_sparse and
... |
infer_storage_type_backward interface. Used to infer storage
type of inputs and outputs in the backward pass.
Will raise an error if undefined storage type is returned.
Returned lists have to be the same size as the input lists to infer_storage_type_backward,
otherwise an exception will... |
Declare dependencies of this operator for backward pass.
Parameters
----------
out_grad : list of int
ids of out_grad blobs.
in_data : list of int
ids of in_data blobs.
out_data: list of int
ids of out_data blobs.
Returns
----... |
Get index for new entry.
def inc(self):
"""Get index for new entry."""
self.lock.acquire()
cur = self.counter
self.counter += 1
self.lock.release()
return cur |
Closes the record and index files.
def close(self):
"""Closes the record and index files."""
if not self.is_open:
return
super(IndexCreator, self).close()
self.fidx.close() |
Returns the current position of read head.
def tell(self):
"""Returns the current position of read head.
"""
pos = ctypes.c_size_t()
check_call(_LIB.MXRecordIOReaderTell(self.handle, ctypes.byref(pos)))
return pos.value |
Creates the index file from open record file
def create_index(self):
"""Creates the index file from open record file
"""
self.reset()
counter = 0
pre_time = time.time()
while True:
if counter % 1000 == 0:
cur_time = time.time()
... |
Run commands, raise exception if failed
def _run_cmd(cmds):
"""Run commands, raise exception if failed"""
if not isinstance(cmds, str):
cmds = "".join(cmds)
print("Execute \"%s\"" % cmds)
try:
subprocess.check_call(cmds, shell=True)
except subprocess.CalledProcessError as err:
... |
Run the doxygen make commands
def generate_doxygen(app):
"""Run the doxygen make commands"""
_run_cmd("cd %s/.. && make doxygen" % app.builder.srcdir)
_run_cmd("cp -rf doxygen/html %s/doxygen" % app.builder.outdir) |
Build mxnet .so lib
def build_mxnet(app):
"""Build mxnet .so lib"""
if not os.path.exists(os.path.join(app.builder.srcdir, '..', 'config.mk')):
_run_cmd("cd %s/.. && cp make/config.mk config.mk && make -j$(nproc) USE_MKLDNN=0 USE_CPP_PACKAGE=1 " %
app.builder.srcdir)
else:
_... |
build r pdf
def build_r_docs(app):
"""build r pdf"""
r_root = app.builder.srcdir + '/../R-package'
pdf_path = app.builder.srcdir + '/api/r/mxnet-r-reference-manual.pdf'
_run_cmd('cd ' + r_root +
'; R -e "roxygen2::roxygenize()"; R CMD Rd2pdf . --no-preview -o ' + pdf_path)
dest_path = ... |
build scala for scala docs, java docs, and clojure docs to use
def build_scala(app):
"""build scala for scala docs, java docs, and clojure docs to use"""
if any(v in _BUILD_VER for v in ['1.2.', '1.3.', '1.4.']):
_run_cmd("cd %s/.. && make scalapkg" % app.builder.srcdir)
_run_cmd("cd %s/.. && m... |
build scala doc and then move the outdir
def build_scala_docs(app):
"""build scala doc and then move the outdir"""
scala_path = app.builder.srcdir + '/../scala-package'
scala_doc_sources = 'find . -type f -name "*.scala" | egrep \"\.\/core|\.\/infer\" | egrep -v \"\/javaapi\" | egrep -v \"Suite\"'
sca... |
build java docs and then move the outdir
def build_java_docs(app):
"""build java docs and then move the outdir"""
java_path = app.builder.srcdir + '/../scala-package'
java_doc_sources = 'find . -type f -name "*.scala" | egrep \"\.\/core|\.\/infer\" | egrep \"\/javaapi\" | egrep -v \"Suite\"'
java_doc_c... |
build clojure doc and then move the outdir
def build_clojure_docs(app):
"""build clojure doc and then move the outdir"""
clojure_path = app.builder.srcdir + '/../contrib/clojure-package'
_run_cmd('cd ' + clojure_path + '; lein codox')
dest_path = app.builder.outdir + '/api/clojure/docs'
_run_cmd('r... |
Convert a markdown table to rst format
def _convert_md_table_to_rst(table):
"""Convert a markdown table to rst format"""
if len(table) < 3:
return ''
out = '```eval_rst\n.. list-table::\n :header-rows: 1\n\n'
for i,l in enumerate(table):
cols = l.split('|')[1:-1]
if i == 0:
... |
Find tables in a markdown and then convert them into the rst format
def convert_table(app, docname, source):
"""Find tables in a markdown and then convert them into the rst format"""
num_tables = 0
for i,j in enumerate(source):
table = []
output = ''
in_table = False
for l i... |
A iterator that returns if a line is within a code block
Returns
-------
iterator of (str, bool, str, int)
- line: the line
- in_code: if this line is in a code block
- lang: the code block langunage
- indent: the code indent
def _parse_code_lines(lines):
"""A iterator ... |
split lines into code and non-code blocks
Returns
-------
iterator of (bool, str, list of str)
- if it is a code block
- source language
- lines of source
def _get_blocks(lines):
"""split lines into code and non-code blocks
Returns
-------
iterator of (bool, str, list of... |
Evaluate python source codes
Returns
(bool, str):
- True if success
- output
def _get_python_block_output(src, global_dict, local_dict):
"""Evaluate python source codes
Returns
(bool, str):
- True if success
- output
"""
src = '\n'.join([l for l in src.split('\n')
... |
Copies artifacts needed for website presentation
def copy_artifacts(app):
"""Copies artifacts needed for website presentation"""
dest_path = app.builder.outdir + '/error'
source_path = app.builder.srcdir + '/build_version_doc/artifacts'
_run_cmd('cd ' + app.builder.srcdir)
_run_cmd('rm -rf ' + dest... |
Download caffe model into disk by the given meta info
def download_caffe_model(model_name, meta_info, dst_dir='./model'):
"""Download caffe model into disk by the given meta info """
if not os.path.isdir(dst_dir):
os.mkdir(dst_dir)
model_name = os.path.join(dst_dir, model_name)
assert 'prototx... |
Download, convert and save a caffe model
def convert_caffe_model(model_name, meta_info, dst_dir='./model'):
"""Download, convert and save a caffe model"""
(prototxt, caffemodel, mean) = download_caffe_model(model_name, meta_info, dst_dir)
model_name = os.path.join(dst_dir, model_name)
convert_model(pr... |
Run _func with multi-process using params.
def multi_p_run(tot_num, _func, worker, params, n_process):
"""
Run _func with multi-process using params.
"""
from multiprocessing import Process, Queue
out_q = Queue()
procs = []
split_num = split_seq(list(range(0, tot_num)), n_process)
pri... |
Split the number(sam_num) into numbers by n_tile
def split_seq(sam_num, n_tile):
"""
Split the number(sam_num) into numbers by n_tile
"""
import math
print(sam_num)
print(n_tile)
start_num = sam_num[0::int(math.ceil(len(sam_num) / (n_tile)))]
end_num = start_num[1::]
end_num.append(... |
put worker
def put_worker(func, from_idx, to_idx, params, out_q):
"""
put worker
"""
succ, fail = func(from_idx, to_idx, params)
return out_q.put({'succ': succ, 'fail': fail}) |
create a namedtuple with default values
def namedtuple_with_defaults(typename, field_names, default_values=()):
""" create a namedtuple with default values """
T = collections.namedtuple(typename, field_names)
T.__new__.__defaults__ = (None, ) * len(T._fields)
if isinstance(default_values, collections.... |
merge dict a, b, with b overriding keys in a
def merge_dict(a, b):
""" merge dict a, b, with b overriding keys in a """
c = a.copy()
c.update(b)
return c |
accept list of namedtuple, return a dict of zipped fields
def zip_namedtuple(nt_list):
""" accept list of namedtuple, return a dict of zipped fields """
if not nt_list:
return dict()
if not isinstance(nt_list, list):
nt_list = [nt_list]
for nt in nt_list:
assert type(nt) == type... |
convert raw configuration to unified dictionary
def config_as_dict(cfg):
""" convert raw configuration to unified dictionary """
ret = cfg.__dict__.copy()
# random cropping params
del ret['rand_crop_samplers']
assert isinstance(cfg.rand_crop_samplers, list)
ret = merge_dict(ret, zip_namedtuple(... |
Imports the ONNX model file, passed as a parameter, into MXNet symbol and parameters.
Operator support and coverage -
https://cwiki.apache.org/confluence/display/MXNET/MXNet-ONNX+Integration
Parameters
----------
model_file : str
ONNX model file name
Returns
-------
sym : :clas... |
Returns the name and shape information of input and output tensors of the given ONNX model file.
Notes
-----
This method is available when you ``import mxnet.contrib.onnx``
Parameters
----------
model_file : str
ONNX model file name
Returns
-------
model_metadata : dict
... |
wrapper for a small Convolution group
Parameters:
----------
from_layer : mx.symbol
continue on which layer
name : str
base name of the new layers
num_filter : int
how many filters to use in Convolution layer
kernel : tuple (int, int)
kernel size (h, w)
pad :... |
wrapper for a small Convolution group
Parameters:
----------
from_layer : mx.symbol
continue on which layer
name : str
base name of the new layers
num_filter : int
how many filters to use in Convolution layer
kernel : tuple (int, int)
kernel size (h, w)
pad :... |
Wrapper function to extract features from base network, attaching extra
layers and SSD specific layers
Parameters
----------
from_layers : list of str
feature extraction layers, use '' for add extra layers
For example:
from_layers = ['relu4_3', 'fc7', '', '', '', '']
whi... |
the basic aggregation module for SSD detection. Takes in multiple layers,
generate multiple object detection targets by customized layers
Parameters:
----------
from_layers : list of mx.symbol
generate multibox detection from layers
num_classes : int
number of classes excluding back... |
Apply weighting to loss.
Parameters
----------
loss : Symbol
The loss to be weighted.
weight : float or None
Global scalar weight for loss.
sample_weight : Symbol or None
Per sample weighting. Must be broadcastable to
the same shape as loss. For example, if loss has
... |
Reshapes x to the same shape as y.
def _reshape_like(F, x, y):
"""Reshapes x to the same shape as y."""
return x.reshape(y.shape) if F is ndarray else F.reshape_like(x, y) |
create TV gradient executor with input binded on img
def get_tv_grad_executor(img, ctx, tv_weight):
"""create TV gradient executor with input binded on img
"""
if tv_weight <= 0.0:
return None
nchannel = img.shape[1]
simg = mx.sym.Variable("img")
skernel = mx.sym.Variable("kernel")
... |
Train a neural style network.
Args are from argparse and control input, output, hyper-parameters.
callback allows for display of training progress.
def train_nstyle(args, callback=None):
"""Train a neural style network.
Args are from argparse and control input, output, hyper-parameters.
callback al... |
Load data/label from dataset
def _get_batch(self):
"""
Load data/label from dataset
"""
batch_data = mx.nd.zeros((self.batch_size, 3, self._data_shape[0], self._data_shape[1]))
batch_label = []
for i in range(self.batch_size):
if (self._current + i) >= self._... |
perform data augmentations: crop, mirror, resize, sub mean, swap channels...
def _data_augmentation(self, data, label):
"""
perform data augmentations: crop, mirror, resize, sub mean, swap channels...
"""
if self.is_train and self._rand_samplers:
rand_crops = []
... |
Gets MNIST dataset
def get_mnist():
""" Gets MNIST dataset """
np.random.seed(1234) # set seed for deterministic ordering
mnist_data = mx.test_utils.get_mnist()
X = np.concatenate([mnist_data['train_data'], mnist_data['test_data']])
Y = np.concatenate([mnist_data['train_label'], mnist_data['test_l... |
Get input slice from the input shape.
Parameters
----------
batch_size : int
The number of samples in a mini-batch.
work_load_list : list of float or int, optional
The list of work load for different devices,
in the same order as `ctx`.
Returns
-------
slices : list... |
Check the argument names of symbol.
This function checks the duplication of arguments in Symbol.
The check is done for feedforward net for now.
Parameters
----------
symbol : Symbol
The network configuration.
def _check_arguments(symbol):
"""Check the argument names of symbol.
This... |
Load a list of arrays into a list of arrays specified by slices.
def _load_general(data, targets):
"""Load a list of arrays into a list of arrays specified by slices."""
for d_src, d_targets in zip(data, targets):
if isinstance(d_targets, nd.NDArray):
d_src.copyto(d_targets)
else:
... |
bind executor for bucketing, potentially sharing data with an existing executor.
def _bind_exec(sym, ctx, input_shapes, param_names, need_grad=False,
base_exec=None, shared_data_arrays=None, input_types=None, logger=logging):
"""bind executor for bucketing, potentially sharing data with an existing ... |
Load data and labels into arrays.
def load_data_batch(self, data_batch):
"""Load data and labels into arrays."""
_load_data(data_batch, self.data_arrays)
_load_label(data_batch, self.label_arrays) |
Perform a forward pass on each executor.
def forward(self, is_train=False):
"""Perform a forward pass on each executor."""
for texec in self.train_execs:
texec.forward(is_train=is_train) |
Update evaluation metric with label and current outputs.
def update_metric(self, metric, labels, pre_sliced=False):
"""Update evaluation metric with label and current outputs."""
for current_exec, (texec, islice) in enumerate(zip(self.train_execs, self.slices)):
if not pre_sliced:
... |
Install monitor on all executors.
def install_monitor(self, monitor):
"""Install monitor on all executors."""
if self.sym_gen is not None:
raise NotImplementedError("Monitoring is not implemented for bucketing")
for train_exec in self.execgrp.train_execs:
monitor.instal... |
Set parameter and aux values.
Parameters
----------
arg_params : list of NDArray
Source parameter arrays
aux_params : list of NDArray
Source aux arrays.
def set_params(self, arg_params, aux_params):
"""Set parameter and aux values.
Parameters
... |
Load data and labels into arrays.
def load_data_batch(self, data_batch):
"""Load data and labels into arrays."""
if self.sym_gen is not None:
key = data_batch.bucket_key
if key not in self.execgrp_bucket:
# create new bucket entry
symbol = self.sy... |
Update metric with the current executor.
def update_metric(self, metric, labels, pre_sliced=False):
"""Update metric with the current executor."""
self.curr_execgrp.update_metric(metric, labels, pre_sliced) |
Clear all contents in the relay memory
def clear(self):
"""
Clear all contents in the relay memory
"""
self.states[:] = 0
self.actions[:] = 0
self.rewards[:] = 0
self.terminate_flags[:] = 0
self.top = 0
self.size = 0 |
Get Header Guard Convention for DMLC Projects.
For headers in include, directly use the path
For headers in src, use project name plus path
Examples: with project-name = dmlc
include/dmlc/timer.h -> DMLC_TIMTER_H_
src/io/libsvm_parser.h -> DMLC_IO_LIBSVM_PARSER_H_
def get_header_guard_dmlc(... |
Process a file.
def process(fname, allow_type):
"""Process a file."""
fname = str(fname)
# HACK: ignore op.h which is automatically generated
if fname.endswith('op.h'):
return
arr = fname.rsplit('.', 1)
if fname.find('#') != -1 or arr[-1] not in allow_type:
return
if arr[-1] i... |
Main entry function.
def main():
"""Main entry function."""
if len(sys.argv) < 3:
print('Usage: <project-name> <filetype> <list-of-path to traverse>')
print('\tfiletype can be python/cpp/all')
exit(-1)
_HELPER.project_name = sys.argv[1]
file_type = sys.argv[2]
allow_type = [... |
Print summary of certain result map.
def _print_summary_map(strm, result_map, ftype):
"""Print summary of certain result map."""
if len(result_map) == 0:
return 0
npass = len([x for k, x in result_map.iteritems() if len(x) == 0])
strm.write('=====%d/%d %s files passed check=... |
Process a cpp file.
def process_cpp(self, path, suffix):
"""Process a cpp file."""
_cpplint_state.ResetErrorCounts()
cpplint.ProcessFile(str(path), _cpplint_state.verbose_level)
_cpplint_state.PrintErrorCounts()
errors = _cpplint_state.errors_by_category.copy()
if suffi... |
Process a python file.
def process_python(self, path):
"""Process a python file."""
(pylint_stdout, pylint_stderr) = epylint.py_run(
' '.join([str(path)] + self.pylint_opts), return_std=True)
emap = {}
print(pylint_stderr.read())
for line in pylint_stdout:
... |
Print summary of lint.
def print_summary(self, strm):
"""Print summary of lint."""
nerr = 0
nerr += LintHelper._print_summary_map(strm, self.cpp_header_map, 'cpp-header')
nerr += LintHelper._print_summary_map(strm, self.cpp_src_map, 'cpp-soruce')
nerr += LintHelper._print_summar... |
Start server/scheduler.
def _init_kvstore_server_module():
"""Start server/scheduler."""
is_worker = ctypes.c_int()
check_call(_LIB.MXKVStoreIsWorkerNode(ctypes.byref(is_worker)))
if is_worker.value == 0:
kvstore = create('dist')
server = KVStoreServer(kvstore)
server.run()
... |
Return the server controller.
def _controller(self):
"""Return the server controller."""
def server_controller(cmd_id, cmd_body, _):
"""Server controler."""
if not self.init_logginig:
# the reason put the codes here is because we cannot get
# kvst... |
Run the server, whose behavior is like.
>>> while receive(x):
... if is_command x: controller(x)
... else if is_key_value x: updater(x)
def run(self):
"""Run the server, whose behavior is like.
>>> while receive(x):
... if is_command x: controller(x)
... |
Generate function for ndarray op by handle and function name.
def _generate_ndarray_function_code(handle, name, func_name, signature_only=False):
"""Generate function for ndarray op by handle and function name."""
real_name = ctypes.c_char_p()
desc = ctypes.c_char_p()
num_args = mx_uint()
arg_names... |
Create a NDArray function from the FunctionHandle.
def _make_ndarray_function(handle, name, func_name):
"""Create a NDArray function from the FunctionHandle."""
code, doc_str = _generate_ndarray_function_code(handle, name, func_name)
local = {}
exec(code, None, local) # pylint: disable=exec-used
... |
Counts tokens in the specified string.
For token_delim=\'<td>\' and seq_delim=\'<sd>\', a specified string of two sequences of
tokens may look like::
<td>token1<td>token2<td>token3<td><sd><td>token4<td>token5<td><sd>
<td> and <sd> are regular expressions. Make use of \\\\ to allow special characters ... |
Return a new array of given shape and type, filled with zeros.
Parameters
----------
shape : int or tuple of int
The shape of the empty array
ctx : Context, optional
An optional device context (default is the current default context)
dtype : str or numpy.dtype, optional
An o... |
Returns a new array of given shape and type, without initializing entries.
Parameters
----------
shape : int or tuple of int
The shape of the empty array.
ctx : Context, optional
An optional device context (default is the current default context).
dtype : str or numpy.dtype, optiona... |
Creates an array from any object exposing the array interface.
Parameters
----------
source_array : array_like
An object exposing the array interface, an object whose `__array__`
method returns an array, or any (nested) sequence.
ctx : Context, optional
Device context (default i... |
Loads an array from file.
See more details in ``save``.
Parameters
----------
fname : str
The filename.
Returns
-------
list of NDArray, RowSparseNDArray or CSRNDArray, or \
dict of str to NDArray, RowSparseNDArray or CSRNDArray
Loaded data.
def load(fname):
"""Lo... |
Loads an array dictionary or list from a buffer
See more details in ``save``.
Parameters
----------
buf : str
Buffer containing contents of a file as a string or bytes.
Returns
-------
list of NDArray, RowSparseNDArray or CSRNDArray, or \
dict of str to NDArray, RowSparseNDArr... |
Saves a list of arrays or a dict of str->array to file.
Examples of filenames:
- ``/path/to/file``
- ``s3://my-bucket/path/to/file`` (if compiled with AWS S3 supports)
- ``hdfs://path/to/file`` (if compiled with HDFS supports)
Parameters
----------
fname : str
The filename.
da... |
Get the common prefix for all names
def _common_prefix(names):
"""Get the common prefix for all names"""
if not names:
return ''
prefix = names[0]
for name in names:
i = 0
while i < len(prefix) and i < len(name) and prefix[i] == name[i]:
i += 1
prefix = prefi... |
Utility function that helps in inferring DType of args and auxs params
from given input param.
Parameters
----------
in_params: List of Symbol
List of input symbol variables.
out_params: Symbol
Output symbol variable.
arg_params: List of Str
List of names of argument par... |
Creates prefix and params for new `Block`.
def create(prefix, params, hint):
"""Creates prefix and params for new `Block`."""
current = getattr(_BlockScope._current, "value", None)
if current is None:
if prefix is None:
if not hasattr(_name.NameManager._current, "val... |
Returns a :py:class:`ParameterDict` containing this :py:class:`Block` and all of its
children's Parameters(default), also can returns the select :py:class:`ParameterDict`
which match some given regular expressions.
For example, collect the specified parameters in ['conv1_weight', 'conv1_bias', ... |
[Deprecated] Please use save_parameters. Note that if you want load
from SymbolBlock later, please use export instead.
Save parameters to file.
filename : str
Path to file.
def save_params(self, filename):
"""[Deprecated] Please use save_parameters. Note that if you want l... |
Load parameters from file previously saved by `save_parameters`.
Parameters
----------
filename : str
Path to parameter file.
ctx : Context or list of Context, default cpu()
Context(s) to initialize loaded parameters on.
allow_missing : bool, default Fals... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.