text stringlengths 81 112k |
|---|
Performs cleaning steps on the data so various type comparisons can
be performed correctly.
def _sanitize_value(x):
"""
Performs cleaning steps on the data so various type comparisons can
be performed correctly.
"""
if isinstance(x, _six.string_types + _six.integer_types + (float,)):
re... |
Performs a robust equality test between elements.
def _element_equal(x, y):
"""
Performs a robust equality test between elements.
"""
if isinstance(x, _np.ndarray) or isinstance(y, _np.ndarray):
try:
return (abs(_np.asarray(x) - _np.asarray(y)) < 1e-5).all()
except:
... |
Evaluate a transformer specification for testing.
Parameters
----------
spec: [str | MLModel]
File from where to load the Model from (OR) a loaded
version of MLModel.
input_data: list[dict]
Test data on which to evaluate the models.
reference_output: list[dict]
Exp... |
Returns a list of the names of the inputs to this model.
:param spec: The model protobuf specification
:return: [str] A list of input feature names
def _get_input_names(spec):
"""
Returns a list of the names of the inputs to this model.
:param spec: The model protobuf specification
:return: [st... |
Compute the in degree, out degree and total degree of each vertex.
Parameters
----------
graph : SGraph
The graph on which to compute degree counts.
verbose : bool, optional
If True, print progress updates.
Returns
-------
out : DegreeCountingModel
Examples
------... |
replace the index'th emphasized text with s
def replace_emphasis(self, s, index = 0):
"""replace the index'th emphasized text with s"""
e = self.emphasized[index]
self.body[e[0]:e[1]] = [s]
del self.emphasized[index] |
Override of litre._execute; sets up variable context before
evaluating code
def _execute(self, code):
"""Override of litre._execute; sets up variable context before
evaluating code
"""
self.globals['example'] = self.example
eval(code, self.globals) |
Compile examples on the stack, whose topmost item is the last example
seen but not yet handled so far.
:howmany: How many of the topmost examples on the stack to compile.
You can pass a number, or 'all' to indicate that all examples should
be compiled.
:pop: How many of t... |
Loads jamfile at the given location. After loading, project global
file and jamfile needed by the loaded one will be loaded recursively.
If the jamfile at that location is loaded already, does nothing.
Returns the project module for the Jamfile.
def load (self, jamfile_location):
"""Loa... |
Loads parent of Jamfile at 'location'.
Issues an error if nothing is found.
def load_parent(self, location):
"""Loads parent of Jamfile at 'location'.
Issues an error if nothing is found."""
assert isinstance(location, basestring)
found = b2.util.path.glob_in_parents(
... |
Given 'name' which can be project-id or plain directory name,
return project module corresponding to that id or directory.
Returns nothing of project is not found.
def find(self, name, current_location):
"""Given 'name' which can be project-id or plain directory name,
return project mod... |
Returns the name of module corresponding to 'jamfile-location'.
If no module corresponds to location yet, associates default
module name with that location.
def module_name(self, jamfile_location):
"""Returns the name of module corresponding to 'jamfile-location'.
If no module correspon... |
Find the Jamfile at the given location. This returns the
exact names of all the Jamfiles in the given directory. The optional
parent-root argument causes this to search not the given directory
but the ones above it up to the directory given in it.
def find_jamfile (self, dir, parent_root=0, no_... |
Load a Jamfile at the given directory. Returns nothing.
Will attempt to load the file as indicated by the JAMFILE patterns.
Effect of calling this rule twice with the same 'dir' is underfined.
def load_jamfile(self, dir, jamfile_module):
"""Load a Jamfile at the given directory. Returns nothing... |
Loads 'file' as standalone project that has no location
associated with it. This is mostly useful for user-config.jam,
which should be able to define targets, but although it has
some location in filesystem, we do not want any build to
happen in user's HOME, for example.
The ca... |
Initialize the module for a project.
module-name is the name of the project module.
location is the location (directory) of the project to initialize.
If not specified, standalone project will be initialized
standalone_path is the path to the source-location.
... |
Make 'project-module' inherit attributes of project
root and parent module.
def inherit_attributes(self, project_module, parent_module):
"""Make 'project-module' inherit attributes of project
root and parent module."""
assert isinstance(project_module, basestring)
assert isinsta... |
Associate the given id with the given project module.
def register_id(self, id, module):
"""Associate the given id with the given project module."""
assert isinstance(id, basestring)
assert isinstance(module, basestring)
self.id2module[id] = module |
Temporary changes the current project to 'project'. Should
be followed by 'pop-current'.
def push_current(self, project):
"""Temporary changes the current project to 'project'. Should
be followed by 'pop-current'."""
if __debug__:
from .targets import ProjectTarget
... |
Returns the value of the specified attribute in the
specified jamfile module.
def attribute(self, project, attribute):
"""Returns the value of the specified attribute in the
specified jamfile module."""
assert isinstance(project, basestring)
assert isinstance(attribute, basestri... |
Returns the value of the specified attribute in the
specified jamfile module.
def attributeDefault(self, project, attribute, default):
"""Returns the value of the specified attribute in the
specified jamfile module."""
assert isinstance(project, basestring)
assert isinstance(att... |
Returns the project target corresponding to the 'project-module'.
def target(self, project_module):
"""Returns the project target corresponding to the 'project-module'."""
assert isinstance(project_module, basestring)
if project_module not in self.module2target:
self.module2target[p... |
Makes rule 'name' available to all subsequently loaded Jamfiles.
Calling that rule wil relay to 'callable'.
def add_rule(self, name, callable_):
"""Makes rule 'name' available to all subsequently loaded Jamfiles.
Calling that rule wil relay to 'callable'."""
assert isinstance(name, ba... |
Recursively walks through the b2/src subdirectories and
creates an index of base module name to package name. The
index is stored within self.__python_module_cache and allows
for an O(1) module lookup.
For example, given the base module name `toolset`,
self.__python_module_cache... |
Load a Python module that should be useable from Jamfiles.
There are generally two types of modules Jamfiles might want to
use:
- Core Boost.Build. Those are imported using plain names, e.g.
'toolset', so this function checks if we have module named
b2.package.module already.
... |
Set the named attribute from the specification given by the user.
The value actually set may be different.
def set(self, attribute, specification, exact=False):
"""Set the named attribute from the specification given by the user.
The value actually set may be different."""
assert isinst... |
Prints the project attributes.
def dump(self):
"""Prints the project attributes."""
id = self.get("id")
if not id:
id = "(none)"
else:
id = id[0]
parent = self.get("parent")
if not parent:
parent = "(none)"
else:
p... |
Given a free-standing function 'callable', return a new
callable that will call 'callable' and report all exceptins,
using 'call_and_report_errors'.
def make_wrapper(self, callable_):
"""Given a free-standing function 'callable', return a new
callable that will call 'callable' and repor... |
Declare and set a project global constant.
Project global constants are normal variables but should
not be changed. They are applied to every child Jamfile.
def constant(self, name, value):
"""Declare and set a project global constant.
Project global constants are normal variables but s... |
Declare and set a project global constant, whose value is a path. The
path is adjusted to be relative to the invocation directory. The given
value path is taken to be either absolute, or relative to this project
root.
def path_constant(self, name, value):
"""Declare and set a project gl... |
Calculates conditional requirements for multiple requirements
at once. This is a shorthand to be reduce duplication and to
keep an inline declarative syntax. For example:
lib x : x.cpp : [ conditional <toolset>gcc <variant>debug :
<define>DEBUG_EXCEPTION <define>DEBUG_TRACE ... |
Creates a feature extractor from an input array feature, return
input_features is a list of one (name, array) tuple.
extract_indices is either an integer or a list. If it's an integer,
the output type is by default a double (but may also be an integer).
If a list, the output type is an array.
def cr... |
Add a single build XML output file to our data.
def add_input(self, input):
'''
Add a single build XML output file to our data.
'''
events = xml.dom.pulldom.parse(input)
context = []
for (event,node) in events:
if event == xml.dom.pulldom.START_ELEMENT:
... |
Process the target dependency DAG into an ancestry tree so we can look up
which top-level library and test targets specific build actions correspond to.
def x_build_targets_target( self, node ):
'''
Process the target dependency DAG into an ancestry tree so we can look up
which top-leve... |
Given a build action log, process into the corresponding test log and
specific test log sub-part.
def x_build_action( self, node ):
'''
Given a build action log, process into the corresponding test log and
specific test log sub-part.
'''
action_node = node
name =... |
The time-stamp goes to the corresponding attribute in the result.
def x_build_timestamp( self, node ):
'''
The time-stamp goes to the corresponding attribute in the result.
'''
self.timestamps.append(self.get_data(node).strip())
return None |
Print the detailed info of failed or always print tests.
def print_action(self, test_succeed, action):
'''
Print the detailed info of failed or always print tests.
'''
#self.info_print(">>> {0}",action.keys())
if not test_succeed or action['info']['always_show_run_output']:
... |
Get a summary of _NeuralNetwork_pb2.WeightParams
Args:
wp : _NeuralNetwork_pb2.WeightParams - the _NeuralNetwork_pb2.WeightParams message to display
Returns:
a str summary for wp
def _get_weight_param_summary(wp):
"""Get a summary of _NeuralNetwork_pb2.WeightParams
Args:
wp : _NeuralNetwork... |
Args:
layer - an MLModel NeuralNetwork Layer protobuf message
Returns:
layer_type : str - type of layer
layer_name : str - name of the layer
layer_inputs : list[str] - a list of strings representing input blobs of the layer
layer_outputs : list[str] - a list of strings representing output blobs ... |
Summarize network into the following structure.
Args:
mlmodel_spec : mlmodel spec
Returns:
inputs : list[(str, str)] - a list of two tuple (name, descriptor) for each input blob.
outputs : list[(str, str)] - a list of two tuple (name, descriptor) for each output blob
layers : list[(str, list[str... |
Print the network information summary.
Args:
mlmodel_spec : the mlmodel spec
interface_only : Shows only the input and output of the network
def print_network_spec(mlmodel_spec, interface_only=False):
""" Print the network information summary.
Args:
mlmodel_spec : the mlmodel spec
interface... |
Takes an SVM classifier produces a starting spec using the parts. that are
shared between all SVMs.
def _generate_base_svm_classifier_spec(model):
"""
Takes an SVM classifier produces a starting spec using the parts. that are
shared between all SVMs.
"""
if not(_HAS_SKLEARN):
raise Ru... |
Convert a Support Vector Classtion (SVC) model to the protobuf spec.
Parameters
----------
model: SVC
A trained SVC encoder model.
feature_names: [str], optional (default=None)
Name of the input columns.
target: str, optional (default=None)
Name of the output column.
R... |
Extract the ordering of the input layers.
def make_input_layers(self):
"""
Extract the ordering of the input layers.
"""
self.input_layers = []
if hasattr(self.model, 'input_layers'):
input_keras_layers = self.model.input_layers[:]
self.input_layers = [No... |
Extract the ordering of output layers.
def make_output_layers(self):
"""
Extract the ordering of output layers.
"""
# TODO
# use successors == 0 as the criteria for output layer
# will fail when some intermediate layers also generate output.
# However, because th... |
Generate blob names for each one of the edge. At this time, Keras does not
support "fork" operation (a layer with more than 1 blob output). So we just
use names of the src layer to identify a blob. We also assume all neural
networks are singly-connected graphs - which should be the case.
def ... |
remove the layer and its input/output edges
def _remove_layer(self, layer):
"""
remove the layer and its input/output edges
"""
successors = self.get_successors(layer)
predecessors = self.get_predecessors(layer)
# remove all edges
for succ in successors:
... |
Insert the new_layer after layer, whose position is layer_idx. The new layer's
parameter is stored in a Keras layer called new_keras_layer
def _insert_layer_after(self, layer_idx, new_layer, new_keras_layer):
"""
Insert the new_layer after layer, whose position is layer_idx. The new layer's
... |
Insert the new_layer before layer, whose position is layer_idx. The new layer's
parameter is stored in a Keras layer called new_keras_layer
def _insert_layer_between(self, src, snk, new_layer, new_keras_layer):
"""
Insert the new_layer before layer, whose position is layer_idx. The new layer's
... |
Defuse the fused activation layers in the network.
def defuse_activation(self):
"""
Defuse the fused activation layers in the network.
"""
idx, nb_layers = 0, len(self.layer_list)
while idx < nb_layers:
layer = self.layer_list[idx]
k_layer = self.keras_la... |
Get edges that represents transition from not 1D to 1D, and 1D to not 1D
A 'in_edge e(u,v)' means u operates on non-1D blobs, but v operates on 1D blobs.
An 'out_edge e(u,v)' means u operates on 1D blobs, but v operates on non-1D blobs.
def _get_1d_interface_edges(self):
"""
Get edges t... |
Insert permutation layers before a 1D start point or after 1D end point
def insert_1d_permute_layers(self):
"""
Insert permutation layers before a 1D start point or after 1D end point
"""
idx, nb_layers = 0, len(self.layer_list)
in_edges, out_edges = self._get_1d_interface_edges... |
Replace the old node with the new one.
Old must be an indirect child of root
:param root: ast node that contains an indirect reference to old
:param old: node to replace
:param new: node to replace `old` with
def replace_nodes(root, old, new):
'''
Replace the old node with the new one. ... |
Report something about component configuration that the user should better know.
def log_component_configuration(component, message):
"""Report something about component configuration that the user should better know."""
assert isinstance(component, basestring)
assert isinstance(message, basestring)
__... |
Create a Transformer object to transform data for feature engineering.
Parameters
----------
dataset : SFrame
The dataset to use for training the model.
transformers: Transformer | list[Transformer]
An Transformer or a list of Transformers.
See Also
--------
turicreate.to... |
Preprocess each example, breaking it up into frames.
Returns two numpy arrays: preprocessed frame and their indexes
def _preprocess_data(audio_data, verbose=True):
'''
Preprocess each example, breaking it up into frames.
Returns two numpy arrays: preprocessed frame and their indexes
... |
Parameters
----------
preprocessed_data : SArray
Returns
-------
numpy array containing the deep features
def _extract_features(self, preprocessed_data, verbose=True):
"""
Parameters
----------
preprocessed_data : SArray
Returns
... |
Performs both audio preprocessing and VGGish deep feature extraction.
def get_deep_features(self, audio_data, verbose):
'''
Performs both audio preprocessing and VGGish deep feature extraction.
'''
preprocessed_data, row_ids = self._preprocess_data(audio_data, verbose)
deep_feat... |
Return the Core ML spec
def get_spec(self):
"""
Return the Core ML spec
"""
if _mac_ver() >= (10, 14):
return self.vggish_model.get_spec()
else:
vggish_model_file = VGGish()
coreml_model_path = vggish_model_file.get_model_path(format='coreml')... |
Remove redundant statements.
The statement `a = 1` will be removed::
a = 1
a = 2
The statement `a = 1` will not be removed because `b` depends on it::
a = 1
b = a + 2
a = 2
:param root: ast node
def remove_trivial(root):
'''
R... |
To prevent circular imports, this extends isinstance()
by checking also if `value` has a particular class name (or inherits from a
particular class name). This check is safe in that an AttributeError is not
raised in case `value` doesn't have a __class__ attribute.
def safe_isinstance(value, types=None, cl... |
Makes a token to refer to a Python value inside Jam language code.
The token is merely a string that can be passed around in Jam code and
eventually passed back. For example, we might want to pass PropertySet
instance to a tag function and it might eventually call back
to virtual_target.add_suffix_and_... |
Abbreviates each part of string that is delimited by a '-'.
def abbreviate_dashed(s):
"""Abbreviates each part of string that is delimited by a '-'."""
r = []
for part in s.split('-'):
r.append(abbreviate(part))
return '-'.join(r) |
Apply a set of standard transformations to string to produce an
abbreviation no more than 4 characters long.
def abbreviate(s):
"""Apply a set of standard transformations to string to produce an
abbreviation no more than 4 characters long.
"""
if not s:
return ''
# check the cache
i... |
Get the decision from this node to a child node.
Parameters
----------
child: Node
A child node of this node.
Returns
-------
dict: A dictionary that describes how to get from this node to the
child node.
def get_decision(self, child, is_missing = F... |
Return the node as a dictionary.
Returns
-------
dict: All the attributes of this node as a dictionary (minus the left
and right).
def to_dict(self):
"""
Return the node as a dictionary.
Returns
-------
dict: All the attributes of this nod... |
Recursive function to dump this tree as a json blob.
Parameters
----------
root_id: Root id of the sub-tree
output: Carry over output from the previous sub-trees.
Returns
-------
dict: A tree in JSON format. Starts at the root node and recursively
repres... |
Return the prediction score (if leaf node) or None if its an
intermediate node.
Parameters
----------
node_id: id of the node to get the prediction value.
Returns
-------
float or None: returns float value of prediction if leaf node and None
if not.
... |
Return the prediction path from this node to the parent node.
Parameters
----------
node_id : id of the node to get the prediction path.
missing_id : Additional info that contains nodes with missing features.
Returns
-------
list: The list of decisions (top t... |
Given a weighted graph with observed class labels of a subset of vertices,
infer the label probability for the unobserved vertices using the
"label propagation" algorithm.
The algorithm iteratively updates the label probability of current vertex
as a weighted sum of label probability of self and the ne... |
Check if a Turi create model is pickle safe.
The function does it by checking that _CustomModel is the base class.
Parameters
----------
obj_class : Class to be checked.
Returns
----------
True if the GLC class is a model and is pickle safe.
def _is_not_pickle_safe_gl_model_class(obj_... |
Check if class is a Turi create model.
The function does it by checking the method resolution order (MRO) of the
class and verifies that _Model is the base class.
Parameters
----------
obj_class : Class to be checked.
Returns
----------
True if the class is a GLC Model.
def _is_no... |
Internal util to get the type of the GLC class. The pickle file stores
this name so that it knows how to construct the object on unpickling.
Parameters
----------
obj_class : Class which has to be categorized.
Returns
----------
A class type for the pickle file to save.
def _get_gl_cla... |
Internal util to get a GLC object from a persistent ID in the pickle file.
Parameters
----------
type_tag : The name of the glc class as saved in the GLC pickler.
gl_archive_abs_path: An absolute path to the GLC archive where the
object was saved.
Returns
----------
... |
Provide a persistent ID for "saving" GLC objects by reference. Return
None for all non GLC objects.
Parameters
----------
obj: Name of the object whose persistent ID is extracted.
Returns
--------
None if the object is not a GLC object. (ClassName, relative pat... |
Close the pickle file, and the zip archive file. The single zip archive
file can now be shipped around to be loaded by the unpickler.
def close(self):
"""
Close the pickle file, and the zip archive file. The single zip archive
file can now be shipped around to be loaded by the unpickler... |
Reconstruct a GLC object using the persistent ID.
This method should not be used externally. It is required by the unpickler super class.
Parameters
----------
pid : The persistent ID used in pickle file to save the GLC object.
Returns
----------
The GLC o... |
Clean up files that were created.
def close(self):
"""
Clean up files that were created.
"""
if self.file:
self.file.close()
self.file = None
# If temp_file is a folder, we do not remove it because we may
# still need it after the unpickler is di... |
Convert scikit-learn pipeline, classifier, or regressor to Core ML format.
Parameters
----------
sk_obj: model | [model] of scikit-learn format.
Scikit learn model(s) to convert to a Core ML format.
The input model may be a single scikit learn model, a scikit learn
pipeline model, ... |
Generate a new Message instance from this Descriptor and a byte string.
Args:
descriptor: Protobuf Descriptor object
byte_str: Serialized protocol buffer byte string
Returns:
Newly created protobuf Message object.
def ParseMessage(descriptor, byte_str):
"""Generate a new Message instance from this ... |
Construct a class object for a protobuf described by descriptor.
Composite descriptors are handled by defining the new class as a member of the
parent class, recursing as deep as necessary.
This is the dynamic equivalent to:
class Parent(message.Message):
__metaclass__ = GeneratedProtocolMessageType
D... |
Loads images from a directory. JPEG and PNG images are supported.
Parameters
----------
url : str
The string of the path where all the images are stored.
format : {'PNG' | 'JPG' | 'auto'}, optional
The format of the images in the directory. The default 'auto' parameter
value tr... |
Internal helper function for decoding a single Image or an SArray of Images
def _decode(image_data):
"""
Internal helper function for decoding a single Image or an SArray of Images
"""
from ...data_structures.sarray import SArray as _SArray
from ... import extensions as _extensions
if type(imag... |
Resizes the image or SArray of Images to a specific width, height, and
number of channels.
Parameters
----------
image : turicreate.Image | SArray
The image or SArray of images to be resized.
width : int
The width the image is resized to.
height : int
The height the ima... |
Convert bit array to byte array.
:param arr: list
Bits as a list where each element is an integer of 0 or 1
Returns
-------
numpy.array
1D numpy array of type uint8
def _convert_1bit_array_to_byte_array(arr):
"""
Convert bit array to byte array.
:param arr: list
B... |
Unpack bytes to bits
:param arr: list
Byte Stream, as a list of uint8 values
Returns
-------
bit_arr: list
Decomposed bit stream as a list of 0/1s of length (len(arr) * 8)
def _decompose_bytes_to_bit_arr(arr):
"""
Unpack bytes to bits
:param arr: list
Byte Stream,... |
Generate a linear lookup table.
:param nbits: int
Number of bits to represent a quantized weight value
:param wp: numpy.array
Weight blob to be quantized
Returns
-------
lookup_table: numpy.array
Lookup table of shape (2^nbits, )
qw: numpy.array
Decomposed bit ... |
Generate K-Means lookup table given a weight parameter field
:param nbits:
Number of bits for quantization
:param w:
Weight as numpy array
Returns
-------
lut: numpy.array
Lookup table, numpy array of shape (1 << nbits, );
wq: numpy.array
Quantized weight of ty... |
Linearly quantize weight blob.
:param weight: numpy.array
Weight to be quantized.
:param nbits: int
Number of bits per weight element
:param axis: int
Axis of the weight blob to compute channel-wise quantization, can be 0 or 1
Returns
-------
quantized_weight: numpy.a... |
Quantize the weight blob
:param wp: numpy.array
Weight parameters
:param nbits: int
Number of bits
:param qm:
Quantization mode
:param lut_function: (``callable function``)
Python callable representing a look-up table
Returns
-------
scale: numpy.array
... |
Quantize WeightParam field in Neural Network Protobuf
:param wp: MLModel.NeuralNetwork.WeightParam
WeightParam field
:param nbits: int
Number of bits to be quantized
:param qm: str
Quantization mode
:param shape: tuple
Tensor shape held by wp
:param axis: int
... |
Utility function to compare the performance of a full precision vs quantized model
:param full_precision_model: MLModel
The full precision model with float32 weights
:param quantized_model: MLModel
Quantized version of the model with quantized weights
:param sample_data: str | [dict]
... |
Utility function to convert a full precision (float) MLModel to a
nbit quantized MLModel (float16).
:param full_precision_model: MLModel
Model which will be converted to half precision. Currently conversion
for only neural network models is supported. If a pipeline model is
passed in th... |
Create a recommender that uses item-item similarities based on
users in common.
Parameters
----------
observation_data : SFrame
The dataset to use for training the model. It must contain a column of
user ids and a column of item ids. Each row represents an observed
interaction b... |
Get the keras layer name from the activation name.
def _get_elementwise_name_from_keras_layer(keras_layer):
"""
Get the keras layer name from the activation name.
"""
if isinstance(keras_layer, _keras.layers.Add):
return 'ADD'
elif isinstance(keras_layer, _keras.layers.Multiply):
re... |
Convert a dense layer from keras to coreml.
Parameters
keras_layer: layer
----------
A keras layer object.
builder: NeuralNetworkBuilder
A neural network builder object.
def convert_dense(builder, layer, input_names, output_names, keras_layer):
"""
Convert a dense layer from k... |
Convert a dense layer from keras to coreml.
Parameters
keras_layer: layer
----------
A keras layer object.
builder: NeuralNetworkBuilder
A neural network builder object.
def convert_embedding(builder, layer, input_names, output_names, keras_layer):
"""Convert a dense layer from ke... |
Convert an activation layer from keras to coreml.
Parameters
----------
keras_layer: layer
A keras layer object.
builder: NeuralNetworkBuilder
A neural network builder object.
def convert_activation(builder, layer, input_names, output_names, keras_layer):
"""
Convert an activa... |
Convert an ReLU layer with maximum value from keras to coreml.
Parameters
----------
keras_layer: layer
A keras layer object.
builder: NeuralNetworkBuilder
A neural network builder object.
def convert_advanced_relu(builder, layer, input_names, output_names, keras_layer):
"""
C... |
Convert convolution layer from keras to coreml.
Parameters
----------
keras_layer: layer
A keras layer object.
builder: NeuralNetworkBuilder
A neural network builder object.
def convert_convolution(builder, layer, input_names, output_names, keras_layer):
"""
Convert convolutio... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.