Dataset Viewer
Auto-converted to Parquet Duplicate
prompt_id
int64
0
941
project
stringclasses
24 values
module
stringlengths
7
49
class
stringlengths
0
32
method
stringlengths
2
37
focal_method_txt
stringlengths
43
41.5k
focal_method_lines
listlengths
2
2
in_stack
bool
2 classes
globals
listlengths
0
16
type_context
stringlengths
79
41.9k
has_branch
bool
2 classes
total_branches
int64
0
3
0
apimd
apimd.loader
walk_packages
def walk_packages(name: str, path: str) -> Iterator[tuple[str, str]]: """Walk packages without import them.""" path = abspath(path) + sep valid = (path + name, path + name + PEP561_SUFFIX) for root, _, fs in walk(path): for f in fs: if not f.endswith(('.py', '.pyi')): ...
[ 43, 59 ]
false
[ "__author__", "__copyright__", "__license__", "__email__", "PEP561_SUFFIX" ]
from typing import Optional from collections.abc import Sequence, Iterator from sys import path as sys_path from os import mkdir, walk from os.path import isdir, isfile, abspath, join, sep, dirname from importlib.abc import Loader from importlib.machinery import EXTENSION_SUFFIXES from importlib.util import find_spec, ...
true
2
1
apimd
apimd.loader
loader
def loader(root: str, pwd: str, link: bool, level: int, toc: bool) -> str: """Package searching algorithm.""" p = Parser.new(link, level, toc) for name, path in walk_packages(root, pwd): # Load its source or stub pure_py = False for ext in [".py", ".pyi"]: path_ext = path...
[ 78, 105 ]
false
[ "__author__", "__copyright__", "__license__", "__email__", "PEP561_SUFFIX" ]
from typing import Optional from collections.abc import Sequence, Iterator from sys import path as sys_path from os import mkdir, walk from os.path import isdir, isfile, abspath, join, sep, dirname from importlib.abc import Loader from importlib.machinery import EXTENSION_SUFFIXES from importlib.util import find_spec, ...
true
2
2
apimd
apimd.parser
is_public_family
def is_public_family(name: str) -> bool: """Check the name is come from public modules or not.""" for n in name.split('.'): # Magic name if is_magic(n): continue # Local or private name if n.startswith('_'): return False return True
[ 61, 70 ]
false
[ "__author__", "__copyright__", "__license__", "__email__", "_I", "_G", "_API", "ANY" ]
from typing import cast, TypeVar, Union, Optional from types import ModuleType from collections.abc import Sequence, Iterable, Iterator from itertools import chain from dataclasses import dataclass, field from inspect import getdoc from ast import ( parse, unparse, get_docstring, AST, FunctionDef, AsyncFunctionDef,...
true
2
3
apimd
apimd.parser
walk_body
def walk_body(body: Sequence[stmt]) -> Iterator[stmt]: """Traverse around body and its simple definition scope.""" for node in body: if isinstance(node, If): yield from walk_body(node.body) yield from walk_body(node.orelse) elif isinstance(node, Try): yield fr...
[ 73, 86 ]
false
[ "__author__", "__copyright__", "__license__", "__email__", "_I", "_G", "_API", "ANY" ]
from typing import cast, TypeVar, Union, Optional from types import ModuleType from collections.abc import Sequence, Iterable, Iterator from itertools import chain from dataclasses import dataclass, field from inspect import getdoc from ast import ( parse, unparse, get_docstring, AST, FunctionDef, AsyncFunctionDef,...
true
2
4
apimd
apimd.parser
esc_underscore
def esc_underscore(doc: str) -> str: """Escape underscore in names.""" if doc.count('_') > 1: return doc.replace('_', r"\_") else: return doc
[ 100, 105 ]
false
[ "__author__", "__copyright__", "__license__", "__email__", "_I", "_G", "_API", "ANY" ]
from typing import cast, TypeVar, Union, Optional from types import ModuleType from collections.abc import Sequence, Iterable, Iterator from itertools import chain from dataclasses import dataclass, field from inspect import getdoc from ast import ( parse, unparse, get_docstring, AST, FunctionDef, AsyncFunctionDef,...
true
2
5
apimd
apimd.parser
doctest
def doctest(doc: str) -> str: """Wrap doctest as markdown Python code.""" keep = False docs = [] lines = doc.splitlines() for i, line in enumerate(lines): signed = line.startswith(">>> ") if signed: if not keep: docs.append("```python") kee...
[ 108, 126 ]
false
[ "__author__", "__copyright__", "__license__", "__email__", "_I", "_G", "_API", "ANY" ]
from typing import cast, TypeVar, Union, Optional from types import ModuleType from collections.abc import Sequence, Iterable, Iterator from itertools import chain from dataclasses import dataclass, field from inspect import getdoc from ast import ( parse, unparse, get_docstring, AST, FunctionDef, AsyncFunctionDef,...
true
2
6
apimd
apimd.parser
table
def table(*titles: str, items: Iterable[Union[str, Iterable[str]]]) -> str: """Create multi-column table with the titles. Usage: >>> table('a', 'b', [['c', 'd'], ['e', 'f']]) | a | b | |:---:|:---:| | c | d | | e | f | """ return '\n'.join([_table_cell(titles), _table_split(titles),...
[ 140, 150 ]
false
[ "__author__", "__copyright__", "__license__", "__email__", "_I", "_G", "_API", "ANY" ]
from typing import cast, TypeVar, Union, Optional from types import ModuleType from collections.abc import Sequence, Iterable, Iterator from itertools import chain from dataclasses import dataclass, field from inspect import getdoc from ast import ( parse, unparse, get_docstring, AST, FunctionDef, AsyncFunctionDef,...
false
0
7
apimd
apimd.parser
const_type
def const_type(node: expr) -> str: """Constant type inference.""" if isinstance(node, Constant): return _type_name(node.value) elif isinstance(node, (Tuple, List, Set)): return _type_name(node).lower() + _e_type(node.elts) elif isinstance(node, Dict): return 'dict' + _e_type(node...
[ 181, 194 ]
false
[ "__author__", "__copyright__", "__license__", "__email__", "_I", "_G", "_API", "ANY" ]
from typing import cast, TypeVar, Union, Optional from types import ModuleType from collections.abc import Sequence, Iterable, Iterator from itertools import chain from dataclasses import dataclass, field from inspect import getdoc from ast import ( parse, unparse, get_docstring, AST, FunctionDef, AsyncFunctionDef,...
true
2
8
apimd
apimd.parser
Resolver
visit_Constant
def visit_Constant(self, node: Constant) -> AST: """Check string is a name.""" if not isinstance(node.value, str): return node try: e = cast(Expr, parse(node.value).body[0]) except SyntaxError: return node else: return self.visi...
[ 207, 216 ]
false
[ "__author__", "__copyright__", "__license__", "__email__", "_I", "_G", "_API", "ANY" ]
from typing import cast, TypeVar, Union, Optional from types import ModuleType from collections.abc import Sequence, Iterable, Iterator from itertools import chain from dataclasses import dataclass, field from inspect import getdoc from ast import ( parse, unparse, get_docstring, AST, FunctionDef, AsyncFunctionDef,...
true
2
9
apimd
apimd.parser
Resolver
visit_Name
def visit_Name(self, node: Name) -> AST: """Replace global names with its expression recursively.""" if node.id == self.self_ty: return Name("Self", Load()) name = _m(self.root, node.id) if name in self.alias and name not in self.alias[name]: e = cast(Expr, pa...
[ 218, 233 ]
false
[ "__author__", "__copyright__", "__license__", "__email__", "_I", "_G", "_API", "ANY" ]
from typing import cast, TypeVar, Union, Optional from types import ModuleType from collections.abc import Sequence, Iterable, Iterator from itertools import chain from dataclasses import dataclass, field from inspect import getdoc from ast import ( parse, unparse, get_docstring, AST, FunctionDef, AsyncFunctionDef,...
true
2
10
apimd
apimd.parser
Resolver
visit_Subscript
def visit_Subscript(self, node: Subscript) -> AST: """Implementation of PEP585 and PEP604.""" if not isinstance(node.value, Name): return node name = node.value.id idf = self.alias.get(_m(self.root, name), name) if idf == 'typing.Union': if not isinsta...
[ 235, 256 ]
false
[ "__author__", "__copyright__", "__license__", "__email__", "_I", "_G", "_API", "ANY" ]
from typing import cast, TypeVar, Union, Optional from types import ModuleType from collections.abc import Sequence, Iterable, Iterator from itertools import chain from dataclasses import dataclass, field from inspect import getdoc from ast import ( parse, unparse, get_docstring, AST, FunctionDef, AsyncFunctionDef,...
true
2
11
apimd
apimd.parser
Resolver
visit_Attribute
def visit_Attribute(self, node: Attribute) -> AST: """Remove `typing.*` prefix of annotation.""" if not isinstance(node.value, Name): return node if node.value.id == 'typing': return Name(node.attr, Load()) else: return node
[ 258, 265 ]
false
[ "__author__", "__copyright__", "__license__", "__email__", "_I", "_G", "_API", "ANY" ]
from typing import cast, TypeVar, Union, Optional from types import ModuleType from collections.abc import Sequence, Iterable, Iterator from itertools import chain from dataclasses import dataclass, field from inspect import getdoc from ast import ( parse, unparse, get_docstring, AST, FunctionDef, AsyncFunctionDef,...
true
2
12
codetiming
codetiming._timers
Timers
apply
def apply(self, func: Callable[[List[float]], float], name: str) -> float: """Apply a function to the results of one named timer""" if name in self._timings: return func(self._timings[name]) raise KeyError(name)
[ 41, 45 ]
false
[]
import collections import math import statistics from typing import TYPE_CHECKING, Any, Callable, Dict, List class Timers(UserDict): def __init__(self, *args: Any, **kwargs: Any) -> None: """Add a private dictionary keeping track of all timings""" super().__init__(*args, **kwargs) self._...
true
2
13
codetiming
codetiming._timers
Timers
min
def min(self, name: str) -> float: """Minimal value of timings""" return self.apply(lambda values: min(values or [0]), name=name)
[ 55, 57 ]
false
[]
import collections import math import statistics from typing import TYPE_CHECKING, Any, Callable, Dict, List class Timers(UserDict): def __init__(self, *args: Any, **kwargs: Any) -> None: """Add a private dictionary keeping track of all timings""" super().__init__(*args, **kwargs) self._...
false
0
14
codetiming
codetiming._timers
Timers
max
def max(self, name: str) -> float: """Maximal value of timings""" return self.apply(lambda values: max(values or [0]), name=name)
[ 59, 61 ]
false
[]
import collections import math import statistics from typing import TYPE_CHECKING, Any, Callable, Dict, List class Timers(UserDict): def __init__(self, *args: Any, **kwargs: Any) -> None: """Add a private dictionary keeping track of all timings""" super().__init__(*args, **kwargs) self._...
false
0
15
codetiming
codetiming._timers
Timers
mean
def mean(self, name: str) -> float: """Mean value of timings""" return self.apply(lambda values: statistics.mean(values or [0]), name=name)
[ 63, 65 ]
false
[]
import collections import math import statistics from typing import TYPE_CHECKING, Any, Callable, Dict, List class Timers(UserDict): def __init__(self, *args: Any, **kwargs: Any) -> None: """Add a private dictionary keeping track of all timings""" super().__init__(*args, **kwargs) self._...
false
0
16
codetiming
codetiming._timers
Timers
median
def median(self, name: str) -> float: """Median value of timings""" return self.apply(lambda values: statistics.median(values or [0]), name=name)
[ 67, 69 ]
false
[]
import collections import math import statistics from typing import TYPE_CHECKING, Any, Callable, Dict, List class Timers(UserDict): def __init__(self, *args: Any, **kwargs: Any) -> None: """Add a private dictionary keeping track of all timings""" super().__init__(*args, **kwargs) self._...
false
0
17
codetiming
codetiming._timers
Timers
stdev
def stdev(self, name: str) -> float: """Standard deviation of timings""" if name in self._timings: value = self._timings[name] return statistics.stdev(value) if len(value) >= 2 else math.nan raise KeyError(name)
[ 71, 76 ]
false
[]
import collections import math import statistics from typing import TYPE_CHECKING, Any, Callable, Dict, List class Timers(UserDict): def __init__(self, *args: Any, **kwargs: Any) -> None: """Add a private dictionary keeping track of all timings""" super().__init__(*args, **kwargs) self._...
true
2
18
cookiecutter
cookiecutter.find
find_template
def find_template(repo_dir): """Determine which child directory of `repo_dir` is the project template. :param repo_dir: Local directory of newly cloned repo. :returns project_template: Relative path to project template. """ logger.debug('Searching %s for the project template.', repo_dir) repo_...
[ 9, 30 ]
false
[ "logger" ]
import logging import os from cookiecutter.exceptions import NonTemplatedInputDirException logger = logging.getLogger(__name__) def find_template(repo_dir): """Determine which child directory of `repo_dir` is the project template. :param repo_dir: Local directory of newly cloned repo. :returns project_te...
true
2
19
cookiecutter
cookiecutter.prompt
read_user_choice
def read_user_choice(var_name, options): """Prompt the user to choose from several options for the given variable. The first item will be returned if no input happens. :param str var_name: Variable as specified in the context :param list options: Sequence of options that are available to select from ...
[ 43, 77 ]
false
[]
import json from collections import OrderedDict import click from jinja2.exceptions import UndefinedError from cookiecutter.environment import StrictEnvironment from cookiecutter.exceptions import UndefinedVariableInTemplate def read_user_choice(var_name, options): """Prompt the user to choose from several optio...
true
2
20
cookiecutter
cookiecutter.prompt
process_json
def process_json(user_value): """Load user-supplied value as a JSON dict. :param str user_value: User-supplied value to load as a JSON dict """ try: user_dict = json.loads(user_value, object_pairs_hook=OrderedDict) except Exception: # Leave it up to click to ask the user again ...
[ 80, 95 ]
false
[]
import json from collections import OrderedDict import click from jinja2.exceptions import UndefinedError from cookiecutter.environment import StrictEnvironment from cookiecutter.exceptions import UndefinedVariableInTemplate def process_json(user_value): """Load user-supplied value as a JSON dict. :param st...
true
2
21
cookiecutter
cookiecutter.prompt
read_user_dict
def read_user_dict(var_name, default_value): """Prompt the user to provide a dictionary of data. :param str var_name: Variable as specified in the context :param default_value: Value that will be returned if no input is provided :return: A Python dictionary to use in the context. """ # Please s...
[ 98, 118 ]
false
[]
import json from collections import OrderedDict import click from jinja2.exceptions import UndefinedError from cookiecutter.environment import StrictEnvironment from cookiecutter.exceptions import UndefinedVariableInTemplate def read_user_dict(var_name, default_value): """Prompt the user to provide a dictionary ...
true
2
22
cookiecutter
cookiecutter.prompt
render_variable
def render_variable(env, raw, cookiecutter_dict): """Render the next variable to be displayed in the user prompt. Inside the prompting taken from the cookiecutter.json file, this renders the next variable. For example, if a project_name is "Peanut Butter Cookie", the repo_name could be be rendered with...
[ 121, 155 ]
false
[]
import json from collections import OrderedDict import click from jinja2.exceptions import UndefinedError from cookiecutter.environment import StrictEnvironment from cookiecutter.exceptions import UndefinedVariableInTemplate def render_variable(env, raw, cookiecutter_dict): """Render the next variable to be disp...
true
2
23
cookiecutter
cookiecutter.prompt
prompt_choice_for_config
def prompt_choice_for_config(cookiecutter_dict, env, key, options, no_input): """Prompt user with a set of options to choose from. Each of the possible choices is rendered beforehand. """ rendered_options = [render_variable(env, raw, cookiecutter_dict) for raw in options] if no_input: retu...
[ 158, 167 ]
false
[]
import json from collections import OrderedDict import click from jinja2.exceptions import UndefinedError from cookiecutter.environment import StrictEnvironment from cookiecutter.exceptions import UndefinedVariableInTemplate def prompt_choice_for_config(cookiecutter_dict, env, key, options, no_input): """Prompt ...
true
2
24
cookiecutter
cookiecutter.prompt
prompt_for_config
def prompt_for_config(context, no_input=False): """Prompt user to enter a new config. :param dict context: Source for field names and sample values. :param no_input: Prompt the user at command line for manual configuration? """ cookiecutter_dict = OrderedDict([]) env = StrictEnvironment(context...
[ 170, 228 ]
false
[]
import json from collections import OrderedDict import click from jinja2.exceptions import UndefinedError from cookiecutter.environment import StrictEnvironment from cookiecutter.exceptions import UndefinedVariableInTemplate def prompt_for_config(context, no_input=False): """Prompt user to enter a new config. ...
true
2
25
cookiecutter
cookiecutter.replay
get_file_name
def get_file_name(replay_dir, template_name): """Get the name of file.""" suffix = '.json' if not template_name.endswith('.json') else '' file_name = '{}{}'.format(template_name, suffix) return os.path.join(replay_dir, file_name)
[ 11, 15 ]
false
[]
import json import os from cookiecutter.utils import make_sure_path_exists def get_file_name(replay_dir, template_name): """Get the name of file.""" suffix = '.json' if not template_name.endswith('.json') else '' file_name = '{}{}'.format(template_name, suffix) return os.path.join(replay_dir, file_na...
false
0
26
cookiecutter
cookiecutter.replay
dump
def dump(replay_dir, template_name, context): """Write json data to file.""" if not make_sure_path_exists(replay_dir): raise IOError('Unable to create replay dir at {}'.format(replay_dir)) if not isinstance(template_name, str): raise TypeError('Template name is required to be of type str') ...
[ 18, 35 ]
false
[]
import json import os from cookiecutter.utils import make_sure_path_exists def dump(replay_dir, template_name, context): """Write json data to file.""" if not make_sure_path_exists(replay_dir): raise IOError('Unable to create replay dir at {}'.format(replay_dir)) if not isinstance(template_name,...
true
2
27
cookiecutter
cookiecutter.replay
load
def load(replay_dir, template_name): """Read json data from file.""" if not isinstance(template_name, str): raise TypeError('Template name is required to be of type str') replay_file = get_file_name(replay_dir, template_name) with open(replay_file, 'r') as infile: context = json.load(i...
[ 38, 51 ]
false
[]
import json import os from cookiecutter.utils import make_sure_path_exists def load(replay_dir, template_name): """Read json data from file.""" if not isinstance(template_name, str): raise TypeError('Template name is required to be of type str') replay_file = get_file_name(replay_dir, template_n...
true
2
28
cookiecutter
cookiecutter.repository
expand_abbreviations
def expand_abbreviations(template, abbreviations): """Expand abbreviations in a template name. :param template: The project template name. :param abbreviations: Abbreviation definitions. """ if template in abbreviations: return abbreviations[template] # Split on colon. If there is no c...
[ 30, 45 ]
false
[ "REPO_REGEX" ]
import os import re from cookiecutter.exceptions import RepositoryNotFound from cookiecutter.vcs import clone from cookiecutter.zipfile import unzip REPO_REGEX = re.compile( r""" # something like git:// ssh:// file:// etc. ((((git|hg)\+)?(git|ssh|file|https?):(//)?) | # or (\...
true
2
29
cookiecutter
cookiecutter.repository
repository_has_cookiecutter_json
def repository_has_cookiecutter_json(repo_directory): """Determine if `repo_directory` contains a `cookiecutter.json` file. :param repo_directory: The candidate repository directory. :return: True if the `repo_directory` is valid, else False. """ repo_directory_exists = os.path.isdir(repo_directory...
[ 48, 59 ]
false
[ "REPO_REGEX" ]
import os import re from cookiecutter.exceptions import RepositoryNotFound from cookiecutter.vcs import clone from cookiecutter.zipfile import unzip REPO_REGEX = re.compile( r""" # something like git:// ssh:// file:// etc. ((((git|hg)\+)?(git|ssh|file|https?):(//)?) | # or (\...
false
0
30
cookiecutter
cookiecutter.repository
determine_repo_dir
def determine_repo_dir( template, abbreviations, clone_to_dir, checkout, no_input, password=None, directory=None, ): """ Locate the repository directory from a template reference. Applies repository abbreviations to the template reference. If the template refers to a reposit...
[ 62, 126 ]
false
[ "REPO_REGEX" ]
import os import re from cookiecutter.exceptions import RepositoryNotFound from cookiecutter.vcs import clone from cookiecutter.zipfile import unzip REPO_REGEX = re.compile( r""" # something like git:// ssh:// file:// etc. ((((git|hg)\+)?(git|ssh|file|https?):(//)?) | # or (\...
true
2
31
cookiecutter
cookiecutter.zipfile
unzip
def unzip(zip_uri, is_url, clone_to_dir='.', no_input=False, password=None): """Download and unpack a zipfile at a given URI. This will download the zipfile to the cookiecutter repository, and unpack into a temporary directory. :param zip_uri: The URI for the zipfile. :param is_url: Is the zip URI...
[ 12, 111 ]
false
[]
import os import tempfile from zipfile import BadZipFile, ZipFile import requests from cookiecutter.exceptions import InvalidZipRepository from cookiecutter.prompt import read_repo_password from cookiecutter.utils import make_sure_path_exists, prompt_and_delete def unzip(zip_uri, is_url, clone_to_dir='.', no_input=F...
true
2
32
dataclasses_json
dataclasses_json.cfg
config
def config(metadata: dict = None, *, # TODO: these can be typed more precisely # Specifically, a Callable[A, B], where `B` is bound as a JSON type encoder: Callable = None, decoder: Callable = None, mm_field: MarshmallowField = None, letter_case: Callabl...
[ 43, 96 ]
false
[ "T", "global_config" ]
import functools from typing import Callable, Dict, Optional, TypeVar, Union from marshmallow.fields import Field as MarshmallowField from dataclasses_json.undefined import Undefined, UndefinedParameterError T = TypeVar("T") global_config = _GlobalConfig() def config(metadata: dict = None, *, # TODO: these...
true
2
33
dataclasses_json
dataclasses_json.core
_ExtendedEncoder
default
def default(self, o) -> Json: result: Json if _isinstance_safe(o, Collection): if _isinstance_safe(o, Mapping): result = dict(o) else: result = list(o) elif _isinstance_safe(o, datetime): result = o.timestamp() elif ...
[ 32, 49 ]
false
[ "Json", "confs", "FieldOverride" ]
import copy import json import warnings from collections import defaultdict, namedtuple from dataclasses import (MISSING, _is_dataclass_instance, fields, is_dataclass # type: ignore ) from datetime import datetime, time...
true
2
34
dataclasses_json
dataclasses_json.mm
build_type
def build_type(type_, options, mixin, field, cls): def inner(type_, options): while True: if not _is_new_type(type_): break type_ = type_.__supertype__ if is_dataclass(type_): if _issubclass_safe(type_, mixin): options['field_many...
[ 226, 274 ]
false
[ "TYPES", "A", "JsonData", "TEncoded", "TOneOrMulti", "TOneOrMultiEncoded" ]
import typing import warnings import sys from copy import deepcopy from dataclasses import MISSING, is_dataclass, fields as dc_fields from datetime import datetime from decimal import Decimal from uuid import UUID from enum import Enum from typing_inspect import is_union_type from marshmallow import fields, Schema, pos...
true
2
35
dataclasses_json
dataclasses_json.mm
schema
def schema(cls, mixin, infer_missing): schema = {} overrides = _user_overrides_or_exts(cls) # TODO check the undefined parameters and add the proper schema action # https://marshmallow.readthedocs.io/en/stable/quickstart.html for field in dc_fields(cls): metadata = (field.metadata or {}).ge...
[ 277, 314 ]
false
[ "TYPES", "A", "JsonData", "TEncoded", "TOneOrMulti", "TOneOrMultiEncoded" ]
import typing import warnings import sys from copy import deepcopy from dataclasses import MISSING, is_dataclass, fields as dc_fields from datetime import datetime from decimal import Decimal from uuid import UUID from enum import Enum from typing_inspect import is_union_type from marshmallow import fields, Schema, pos...
true
2
36
dataclasses_json
dataclasses_json.mm
build_schema
def build_schema(cls: typing.Type[A], mixin, infer_missing, partial) -> typing.Type[SchemaType]: Meta = type('Meta', (), {'fields': tuple(field.name for field in dc_fields(cls) if ...
[ 317, 368 ]
false
[ "TYPES", "A", "JsonData", "TEncoded", "TOneOrMulti", "TOneOrMultiEncoded" ]
import typing import warnings import sys from copy import deepcopy from dataclasses import MISSING, is_dataclass, fields as dc_fields from datetime import datetime from decimal import Decimal from uuid import UUID from enum import Enum from typing_inspect import is_union_type from marshmallow import fields, Schema, pos...
true
2
37
dataclasses_json
dataclasses_json.undefined
_UndefinedParameterAction
handle_from_dict
@staticmethod @abc.abstractmethod def handle_from_dict(cls, kvs: Dict[Any, Any]) -> Dict[str, Any]: """ Return the parameters to initialize the class with. """ pass
[ 19, 23 ]
false
[ "KnownParameters", "UnknownParameters", "CatchAll" ]
import abc import dataclasses import functools import inspect from dataclasses import Field, fields from typing import Any, Callable, Dict, Optional, Tuple from enum import Enum from marshmallow import ValidationError from dataclasses_json.utils import CatchAllVar KnownParameters = Dict[str, Any] UnknownParameters = D...
false
0
38
dataclasses_json
dataclasses_json.undefined
_UndefinedParameterAction
handle_to_dict
@staticmethod def handle_to_dict(obj, kvs: Dict[Any, Any]) -> Dict[Any, Any]: """ Return the parameters that will be written to the output dict """ return kvs
[ 26, 30 ]
false
[ "KnownParameters", "UnknownParameters", "CatchAll" ]
import abc import dataclasses import functools import inspect from dataclasses import Field, fields from typing import Any, Callable, Dict, Optional, Tuple from enum import Enum from marshmallow import ValidationError from dataclasses_json.utils import CatchAllVar KnownParameters = Dict[str, Any] UnknownParameters = D...
false
0
39
dataclasses_json
dataclasses_json.undefined
_UndefinedParameterAction
handle_dump
@staticmethod def handle_dump(obj) -> Dict[Any, Any]: """ Return the parameters that will be added to the schema dump. """ return {}
[ 33, 37 ]
false
[ "KnownParameters", "UnknownParameters", "CatchAll" ]
import abc import dataclasses import functools import inspect from dataclasses import Field, fields from typing import Any, Callable, Dict, Optional, Tuple from enum import Enum from marshmallow import ValidationError from dataclasses_json.utils import CatchAllVar KnownParameters = Dict[str, Any] UnknownParameters = D...
false
0
40
dataclasses_json
dataclasses_json.undefined
_UndefinedParameterAction
create_init
@staticmethod def create_init(obj) -> Callable: return obj.__init__
[ 40, 41 ]
false
[ "KnownParameters", "UnknownParameters", "CatchAll" ]
import abc import dataclasses import functools import inspect from dataclasses import Field, fields from typing import Any, Callable, Dict, Optional, Tuple from enum import Enum from marshmallow import ValidationError from dataclasses_json.utils import CatchAllVar KnownParameters = Dict[str, Any] UnknownParameters = D...
false
0
41
dataclasses_json
dataclasses_json.undefined
_RaiseUndefinedParameters
handle_from_dict
@staticmethod def handle_from_dict(cls, kvs: Dict) -> Dict[str, Any]: known, unknown = \ _UndefinedParameterAction._separate_defined_undefined_kvs( cls=cls, kvs=kvs) if len(unknown) > 0: raise UndefinedParameterError( f"Received undefined i...
[ 65, 72 ]
false
[ "KnownParameters", "UnknownParameters", "CatchAll" ]
import abc import dataclasses import functools import inspect from dataclasses import Field, fields from typing import Any, Callable, Dict, Optional, Tuple from enum import Enum from marshmallow import ValidationError from dataclasses_json.utils import CatchAllVar KnownParameters = Dict[str, Any] UnknownParameters = D...
true
2
42
dataclasses_json
dataclasses_json.undefined
_IgnoreUndefinedParameters
handle_from_dict
@staticmethod def handle_from_dict(cls, kvs: Dict) -> Dict[str, Any]: known_given_parameters, _ = \ _UndefinedParameterAction._separate_defined_undefined_kvs( cls=cls, kvs=kvs) return known_given_parameters
[ 86, 90 ]
false
[ "KnownParameters", "UnknownParameters", "CatchAll" ]
import abc import dataclasses import functools import inspect from dataclasses import Field, fields from typing import Any, Callable, Dict, Optional, Tuple from enum import Enum from marshmallow import ValidationError from dataclasses_json.utils import CatchAllVar KnownParameters = Dict[str, Any] UnknownParameters = D...
false
0
43
dataclasses_json
dataclasses_json.undefined
_IgnoreUndefinedParameters
create_init
@staticmethod def create_init(obj) -> Callable: original_init = obj.__init__ init_signature = inspect.signature(original_init) @functools.wraps(obj.__init__) def _ignore_init(self, *args, **kwargs): known_kwargs, _ = \ _CatchAllUndefinedParameters._se...
[ 93, 117 ]
false
[ "KnownParameters", "UnknownParameters", "CatchAll" ]
import abc import dataclasses import functools import inspect from dataclasses import Field, fields from typing import Any, Callable, Dict, Optional, Tuple from enum import Enum from marshmallow import ValidationError from dataclasses_json.utils import CatchAllVar KnownParameters = Dict[str, Any] UnknownParameters = D...
false
0
44
dataclasses_json
dataclasses_json.undefined
_CatchAllUndefinedParameters
handle_from_dict
@staticmethod def handle_from_dict(cls, kvs: Dict) -> Dict[str, Any]: known, unknown = _UndefinedParameterAction \ ._separate_defined_undefined_kvs(cls=cls, kvs=kvs) catch_all_field = _CatchAllUndefinedParameters._get_catch_all_field( cls=cls) if catch_all_field....
[ 133, 166 ]
false
[ "KnownParameters", "UnknownParameters", "CatchAll" ]
import abc import dataclasses import functools import inspect from dataclasses import Field, fields from typing import Any, Callable, Dict, Optional, Tuple from enum import Enum from marshmallow import ValidationError from dataclasses_json.utils import CatchAllVar KnownParameters = Dict[str, Any] UnknownParameters = D...
true
2
45
dataclasses_json
dataclasses_json.undefined
_CatchAllUndefinedParameters
handle_to_dict
@staticmethod def handle_to_dict(obj, kvs: Dict[Any, Any]) -> Dict[Any, Any]: catch_all_field = \ _CatchAllUndefinedParameters._get_catch_all_field(obj) undefined_parameters = kvs.pop(catch_all_field.name) if isinstance(undefined_parameters, dict): kvs.update( ...
[ 193, 200 ]
false
[ "KnownParameters", "UnknownParameters", "CatchAll" ]
import abc import dataclasses import functools import inspect from dataclasses import Field, fields from typing import Any, Callable, Dict, Optional, Tuple from enum import Enum from marshmallow import ValidationError from dataclasses_json.utils import CatchAllVar KnownParameters = Dict[str, Any] UnknownParameters = D...
true
2
46
dataclasses_json
dataclasses_json.undefined
_CatchAllUndefinedParameters
handle_dump
@staticmethod def handle_dump(obj) -> Dict[Any, Any]: catch_all_field = _CatchAllUndefinedParameters._get_catch_all_field( cls=obj) return getattr(obj, catch_all_field.name)
[ 203, 206 ]
false
[ "KnownParameters", "UnknownParameters", "CatchAll" ]
import abc import dataclasses import functools import inspect from dataclasses import Field, fields from typing import Any, Callable, Dict, Optional, Tuple from enum import Enum from marshmallow import ValidationError from dataclasses_json.utils import CatchAllVar KnownParameters = Dict[str, Any] UnknownParameters = D...
false
0
47
dataclasses_json
dataclasses_json.undefined
_CatchAllUndefinedParameters
create_init
@staticmethod def create_init(obj) -> Callable: original_init = obj.__init__ init_signature = inspect.signature(original_init) @functools.wraps(obj.__init__) def _catch_all_init(self, *args, **kwargs): known_kwargs, unknown_kwargs = \ _CatchAllUndefin...
[ 209, 240 ]
false
[ "KnownParameters", "UnknownParameters", "CatchAll" ]
import abc import dataclasses import functools import inspect from dataclasses import Field, fields from typing import Any, Callable, Dict, Optional, Tuple from enum import Enum from marshmallow import ValidationError from dataclasses_json.utils import CatchAllVar KnownParameters = Dict[str, Any] UnknownParameters = D...
true
2
48
docstring_parser
docstring_parser.common
DocstringMeta
__init__
def __init__(self, args: T.List[str], description: str) -> None: """Initialize self. :param args: list of arguments. The exact content of this variable is dependent on the kind of docstring; it's used to distinguish between custom docstring meta information...
[ 32, 41 ]
false
[ "PARAM_KEYWORDS", "RAISES_KEYWORDS", "RETURNS_KEYWORDS", "YIELDS_KEYWORDS" ]
import typing as T PARAM_KEYWORDS = { "param", "parameter", "arg", "argument", "attribute", "key", "keyword", } RAISES_KEYWORDS = {"raises", "raise", "except", "exception"} RETURNS_KEYWORDS = {"return", "returns"} YIELDS_KEYWORDS = {"yield", "yields"} class DocstringMeta: def __init__...
false
0
49
docstring_parser
docstring_parser.common
DocstringParam
__init__
def __init__( self, args: T.List[str], description: T.Optional[str], arg_name: str, type_name: T.Optional[str], is_optional: T.Optional[bool], default: T.Optional[str], ) -> None: """Initialize self.""" super().__init__(args, description) ...
[ 47, 61 ]
false
[ "PARAM_KEYWORDS", "RAISES_KEYWORDS", "RETURNS_KEYWORDS", "YIELDS_KEYWORDS" ]
import typing as T PARAM_KEYWORDS = { "param", "parameter", "arg", "argument", "attribute", "key", "keyword", } RAISES_KEYWORDS = {"raises", "raise", "except", "exception"} RETURNS_KEYWORDS = {"return", "returns"} YIELDS_KEYWORDS = {"yield", "yields"} class DocstringParam(DocstringMeta): ...
false
0
50
docstring_parser
docstring_parser.common
DocstringReturns
__init__
def __init__( self, args: T.List[str], description: T.Optional[str], type_name: T.Optional[str], is_generator: bool, return_name: T.Optional[str] = None, ) -> None: """Initialize self.""" super().__init__(args, description) self.type_name =...
[ 67, 79 ]
false
[ "PARAM_KEYWORDS", "RAISES_KEYWORDS", "RETURNS_KEYWORDS", "YIELDS_KEYWORDS" ]
import typing as T PARAM_KEYWORDS = { "param", "parameter", "arg", "argument", "attribute", "key", "keyword", } RAISES_KEYWORDS = {"raises", "raise", "except", "exception"} RETURNS_KEYWORDS = {"return", "returns"} YIELDS_KEYWORDS = {"yield", "yields"} class DocstringReturns(DocstringMeta):...
false
0
51
docstring_parser
docstring_parser.common
DocstringRaises
__init__
def __init__( self, args: T.List[str], description: T.Optional[str], type_name: T.Optional[str], ) -> None: """Initialize self.""" super().__init__(args, description) self.type_name = type_name self.description = description
[ 85, 94 ]
false
[ "PARAM_KEYWORDS", "RAISES_KEYWORDS", "RETURNS_KEYWORDS", "YIELDS_KEYWORDS" ]
import typing as T PARAM_KEYWORDS = { "param", "parameter", "arg", "argument", "attribute", "key", "keyword", } RAISES_KEYWORDS = {"raises", "raise", "except", "exception"} RETURNS_KEYWORDS = {"return", "returns"} YIELDS_KEYWORDS = {"yield", "yields"} class DocstringRaises(DocstringMeta): ...
false
0
52
docstring_parser
docstring_parser.common
DocstringDeprecated
__init__
def __init__( self, args: T.List[str], description: T.Optional[str], version: T.Optional[str], ) -> None: """Initialize self.""" super().__init__(args, description) self.version = version self.description = description
[ 100, 109 ]
false
[ "PARAM_KEYWORDS", "RAISES_KEYWORDS", "RETURNS_KEYWORDS", "YIELDS_KEYWORDS" ]
import typing as T PARAM_KEYWORDS = { "param", "parameter", "arg", "argument", "attribute", "key", "keyword", } RAISES_KEYWORDS = {"raises", "raise", "except", "exception"} RETURNS_KEYWORDS = {"return", "returns"} YIELDS_KEYWORDS = {"yield", "yields"} class DocstringDeprecated(DocstringMet...
false
0
53
docstring_parser
docstring_parser.common
Docstring
__init__
def __init__(self) -> None: """Initialize self.""" self.short_description = None # type: T.Optional[str] self.long_description = None # type: T.Optional[str] self.blank_after_short_description = False self.blank_after_long_description = False self.meta = []
[ 115, 121 ]
false
[ "PARAM_KEYWORDS", "RAISES_KEYWORDS", "RETURNS_KEYWORDS", "YIELDS_KEYWORDS" ]
import typing as T PARAM_KEYWORDS = { "param", "parameter", "arg", "argument", "attribute", "key", "keyword", } RAISES_KEYWORDS = {"raises", "raise", "except", "exception"} RETURNS_KEYWORDS = {"return", "returns"} YIELDS_KEYWORDS = {"yield", "yields"} class Docstring: def __init__(sel...
false
0
54
docstring_parser
docstring_parser.google
GoogleParser
add_section
def add_section(self, section: Section): """Add or replace a section. :param section: The new section. """ self.sections[section.title] = section self._setup()
[ 174, 181 ]
false
[ "GOOGLE_TYPED_ARG_REGEX", "GOOGLE_ARG_DESC_REGEX", "MULTIPLE_PATTERN", "DEFAULT_SECTIONS" ]
import inspect import re import typing as T from collections import namedtuple, OrderedDict from enum import IntEnum from .common import ( PARAM_KEYWORDS, RAISES_KEYWORDS, RETURNS_KEYWORDS, YIELDS_KEYWORDS, Docstring, DocstringMeta, DocstringParam, DocstringRaises, DocstringReturns, ...
false
0
55
docstring_parser
docstring_parser.google
GoogleParser
parse
def parse(self, text: str) -> Docstring: """Parse the Google-style docstring into its components. :returns: parsed docstring """ ret = Docstring() if not text: return ret # Clean according to PEP-0257 text = inspect.cleandoc(text) # Find...
[ 183, 265 ]
false
[ "GOOGLE_TYPED_ARG_REGEX", "GOOGLE_ARG_DESC_REGEX", "MULTIPLE_PATTERN", "DEFAULT_SECTIONS" ]
import inspect import re import typing as T from collections import namedtuple, OrderedDict from enum import IntEnum from .common import ( PARAM_KEYWORDS, RAISES_KEYWORDS, RETURNS_KEYWORDS, YIELDS_KEYWORDS, Docstring, DocstringMeta, DocstringParam, DocstringRaises, DocstringReturns, ...
true
2
56
docstring_parser
docstring_parser.numpydoc
parse
def parse(text: str) -> Docstring: """Parse the numpy-style docstring into its components. :returns: parsed docstring """ return NumpydocParser().parse(text)
[ 325, 330 ]
false
[ "KV_REGEX", "PARAM_KEY_REGEX", "PARAM_OPTIONAL_REGEX", "PARAM_DEFAULT_REGEX", "RETURN_KEY_REGEX", "DEFAULT_SECTIONS" ]
import inspect import itertools import re import typing as T from .common import ( Docstring, DocstringDeprecated, DocstringMeta, DocstringParam, DocstringRaises, DocstringReturns, ) KV_REGEX = re.compile(r"^[^\s].*$", flags=re.M) PARAM_KEY_REGEX = re.compile(r"^(?P<name>.*?)(?:\s*:\s*(?P<type>...
false
0
57
docstring_parser
docstring_parser.numpydoc
Section
__init__
def __init__(self, title: str, key: str) -> None: self.title = title self.key = key
[ 57, 59 ]
false
[ "KV_REGEX", "PARAM_KEY_REGEX", "PARAM_OPTIONAL_REGEX", "PARAM_DEFAULT_REGEX", "RETURN_KEY_REGEX", "DEFAULT_SECTIONS" ]
import inspect import itertools import re import typing as T from .common import ( Docstring, DocstringDeprecated, DocstringMeta, DocstringParam, DocstringRaises, DocstringReturns, ) KV_REGEX = re.compile(r"^[^\s].*$", flags=re.M) PARAM_KEY_REGEX = re.compile(r"^(?P<name>.*?)(?:\s*:\s*(?P<type>...
false
0
58
docstring_parser
docstring_parser.numpydoc
Section
parse
def parse(self, text: str) -> T.Iterable[DocstringMeta]: """Parse ``DocstringMeta`` objects from the body of this section. :param text: section body text. Should be cleaned with ``inspect.cleandoc`` before parsing. """ yield DocstringMeta([self.key], description...
[ 70, 76 ]
false
[ "KV_REGEX", "PARAM_KEY_REGEX", "PARAM_OPTIONAL_REGEX", "PARAM_DEFAULT_REGEX", "RETURN_KEY_REGEX", "DEFAULT_SECTIONS" ]
import inspect import itertools import re import typing as T from .common import ( Docstring, DocstringDeprecated, DocstringMeta, DocstringParam, DocstringRaises, DocstringReturns, ) KV_REGEX = re.compile(r"^[^\s].*$", flags=re.M) PARAM_KEY_REGEX = re.compile(r"^(?P<name>.*?)(?:\s*:\s*(?P<type>...
false
0
59
docstring_parser
docstring_parser.numpydoc
_KVSection
parse
def parse(self, text: str) -> T.Iterable[DocstringMeta]: for match, next_match in _pairwise(KV_REGEX.finditer(text)): start = match.end() end = next_match.start() if next_match is not None else None value = text[start:end] yield self._parse_item( ...
[ 93, 98 ]
false
[ "KV_REGEX", "PARAM_KEY_REGEX", "PARAM_OPTIONAL_REGEX", "PARAM_DEFAULT_REGEX", "RETURN_KEY_REGEX", "DEFAULT_SECTIONS" ]
import inspect import itertools import re import typing as T from .common import ( Docstring, DocstringDeprecated, DocstringMeta, DocstringParam, DocstringRaises, DocstringReturns, ) KV_REGEX = re.compile(r"^[^\s].*$", flags=re.M) PARAM_KEY_REGEX = re.compile(r"^(?P<name>.*?)(?:\s*:\s*(?P<type>...
true
2
60
docstring_parser
docstring_parser.numpydoc
DeprecationSection
parse
def parse(self, text: str) -> T.Iterable[DocstringDeprecated]: version, desc, *_ = text.split(sep="\n", maxsplit=1) + [None, None] if desc is not None: desc = _clean_str(inspect.cleandoc(desc)) yield DocstringDeprecated( args=[self.key], description=desc, version=_c...
[ 209, 215 ]
false
[ "KV_REGEX", "PARAM_KEY_REGEX", "PARAM_OPTIONAL_REGEX", "PARAM_DEFAULT_REGEX", "RETURN_KEY_REGEX", "DEFAULT_SECTIONS" ]
import inspect import itertools import re import typing as T from .common import ( Docstring, DocstringDeprecated, DocstringMeta, DocstringParam, DocstringRaises, DocstringReturns, ) KV_REGEX = re.compile(r"^[^\s].*$", flags=re.M) PARAM_KEY_REGEX = re.compile(r"^(?P<name>.*?)(?:\s*:\s*(?P<type>...
true
2
61
docstring_parser
docstring_parser.numpydoc
NumpydocParser
__init__
def __init__(self, sections: T.Optional[T.Dict[str, Section]] = None): """Setup sections. :param sections: Recognized sections or None to defaults. """ sections = sections or DEFAULT_SECTIONS self.sections = {s.title: s for s in sections} self._setup()
[ 256, 263 ]
false
[ "KV_REGEX", "PARAM_KEY_REGEX", "PARAM_OPTIONAL_REGEX", "PARAM_DEFAULT_REGEX", "RETURN_KEY_REGEX", "DEFAULT_SECTIONS" ]
import inspect import itertools import re import typing as T from .common import ( Docstring, DocstringDeprecated, DocstringMeta, DocstringParam, DocstringRaises, DocstringReturns, ) KV_REGEX = re.compile(r"^[^\s].*$", flags=re.M) PARAM_KEY_REGEX = re.compile(r"^(?P<name>.*?)(?:\s*:\s*(?P<type>...
false
0
62
docstring_parser
docstring_parser.numpydoc
NumpydocParser
add_section
def add_section(self, section: Section): """Add or replace a section. :param section: The new section. """ self.sections[section.title] = section self._setup()
[ 271, 278 ]
false
[ "KV_REGEX", "PARAM_KEY_REGEX", "PARAM_OPTIONAL_REGEX", "PARAM_DEFAULT_REGEX", "RETURN_KEY_REGEX", "DEFAULT_SECTIONS" ]
import inspect import itertools import re import typing as T from .common import ( Docstring, DocstringDeprecated, DocstringMeta, DocstringParam, DocstringRaises, DocstringReturns, ) KV_REGEX = re.compile(r"^[^\s].*$", flags=re.M) PARAM_KEY_REGEX = re.compile(r"^(?P<name>.*?)(?:\s*:\s*(?P<type>...
false
0
63
docstring_parser
docstring_parser.numpydoc
NumpydocParser
parse
def parse(self, text: str) -> Docstring: """Parse the numpy-style docstring into its components. :returns: parsed docstring """ ret = Docstring() if not text: return ret # Clean according to PEP-0257 text = inspect.cleandoc(text) # Find ...
[ 280, 322 ]
false
[ "KV_REGEX", "PARAM_KEY_REGEX", "PARAM_OPTIONAL_REGEX", "PARAM_DEFAULT_REGEX", "RETURN_KEY_REGEX", "DEFAULT_SECTIONS" ]
import inspect import itertools import re import typing as T from .common import ( Docstring, DocstringDeprecated, DocstringMeta, DocstringParam, DocstringRaises, DocstringReturns, ) KV_REGEX = re.compile(r"^[^\s].*$", flags=re.M) PARAM_KEY_REGEX = re.compile(r"^(?P<name>.*?)(?:\s*:\s*(?P<type>...
true
2
64
docstring_parser
docstring_parser.parser
parse
def parse(text: str, style: Style = Style.auto) -> Docstring: """Parse the docstring into its components. :param text: docstring text to parse :param style: docstring style :returns: parsed docstring representation """ if style != Style.auto: return STYLES[style](text) rets = [] ...
[ 6, 24 ]
false
[]
from docstring_parser.common import Docstring, ParseError from docstring_parser.styles import STYLES, Style def parse(text: str, style: Style = Style.auto) -> Docstring: """Parse the docstring into its components. :param text: docstring text to parse :param style: docstring style :returns: parsed do...
true
2
65
docstring_parser
docstring_parser.rest
parse
def parse(text: str) -> Docstring: """Parse the ReST-style docstring into its components. :returns: parsed docstring """ ret = Docstring() if not text: return ret text = inspect.cleandoc(text) match = re.search("^:", text, flags=re.M) if match: desc_chunk = text[: match...
[ 85, 131 ]
false
[]
import inspect import re import typing as T from .common import ( PARAM_KEYWORDS, RAISES_KEYWORDS, RETURNS_KEYWORDS, YIELDS_KEYWORDS, Docstring, DocstringMeta, DocstringParam, DocstringRaises, DocstringReturns, ParseError, ) def parse(text: str) -> Docstring: """Parse the ...
true
2
66
flutes
flutes.iterator
chunk
def chunk(n: int, iterable: Iterable[T]) -> Iterator[List[T]]: r"""Split the iterable into chunks, with each chunk containing no more than ``n`` elements. .. code:: python >>> list(chunk(3, range(10))) [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]] :param n: The maximum number of elements in one ...
[ 22, 43 ]
false
[ "__all__", "T", "A", "B", "R" ]
import weakref from typing import Callable, Generic, Iterable, Iterator, List, Optional, Sequence, TypeVar, overload __all__ = [ "chunk", "take", "drop", "drop_until", "split_by", "scanl", "scanr", "LazyList", "Range", "MapList", ] T = TypeVar('T') A = TypeVar('A') B = TypeVar('...
true
2
67
flutes
flutes.iterator
take
def take(n: int, iterable: Iterable[T]) -> Iterator[T]: r"""Take the first :attr:`n` elements from an iterable. .. code:: python >>> list(take(5, range(1000000))) [0, 1, 2, 3, 4] :param n: The number of elements to take. :param iterable: The iterable. :return: An iterator returnin...
[ 46, 65 ]
false
[ "__all__", "T", "A", "B", "R" ]
import weakref from typing import Callable, Generic, Iterable, Iterator, List, Optional, Sequence, TypeVar, overload __all__ = [ "chunk", "take", "drop", "drop_until", "split_by", "scanl", "scanr", "LazyList", "Range", "MapList", ] T = TypeVar('T') A = TypeVar('A') B = TypeVar('...
true
2
68
flutes
flutes.iterator
drop
def drop(n: int, iterable: Iterable[T]) -> Iterator[T]: r"""Drop the first :attr:`n` elements from an iterable, and return the rest as an iterator. .. code:: python >>> next(drop(5, range(1000000))) 5 :param n: The number of elements to drop. :param iterable: The iterable. :return...
[ 68, 88 ]
false
[ "__all__", "T", "A", "B", "R" ]
import weakref from typing import Callable, Generic, Iterable, Iterator, List, Optional, Sequence, TypeVar, overload __all__ = [ "chunk", "take", "drop", "drop_until", "split_by", "scanl", "scanr", "LazyList", "Range", "MapList", ] T = TypeVar('T') A = TypeVar('A') B = TypeVar('...
true
2
69
flutes
flutes.iterator
drop_until
def drop_until(pred_fn: Callable[[T], bool], iterable: Iterable[T]) -> Iterator[T]: r"""Drop elements from the iterable until an element that satisfies the predicate is encountered. Similar to the built-in :py:func:`filter` function, but only applied to a prefix of the iterable. .. code:: python >...
[ 91, 110 ]
false
[ "__all__", "T", "A", "B", "R" ]
import weakref from typing import Callable, Generic, Iterable, Iterator, List, Optional, Sequence, TypeVar, overload __all__ = [ "chunk", "take", "drop", "drop_until", "split_by", "scanl", "scanr", "LazyList", "Range", "MapList", ] T = TypeVar('T') A = TypeVar('A') B = TypeVar('...
true
2
70
flutes
flutes.iterator
split_by
def split_by(iterable: Iterable[A], empty_segments: bool = False, *, criterion=None, separator=None) \ -> Iterator[List[A]]: r"""Split a list into sub-lists by dropping certain elements. Exactly one of ``criterion`` and ``separator`` must be specified. For example: .. code:: python >>> lis...
[ 123, 156 ]
false
[ "__all__", "T", "A", "B", "R" ]
import weakref from typing import Callable, Generic, Iterable, Iterator, List, Optional, Sequence, TypeVar, overload __all__ = [ "chunk", "take", "drop", "drop_until", "split_by", "scanl", "scanr", "LazyList", "Range", "MapList", ] T = TypeVar('T') A = TypeVar('A') B = TypeVar('...
true
2
71
flutes
flutes.iterator
scanl
def scanl(func, iterable, *args): r"""Computes the intermediate results of :py:func:`~functools.reduce`. Equivalent to Haskell's ``scanl``. For example: .. code:: python >>> list(scanl(operator.add, [1, 2, 3, 4], 0)) [0, 1, 3, 6, 10] >>> list(scanl(lambda s, x: x + s, ['a', 'b', 'c...
[ 167, 196 ]
false
[ "__all__", "T", "A", "B", "R" ]
import weakref from typing import Callable, Generic, Iterable, Iterator, List, Optional, Sequence, TypeVar, overload __all__ = [ "chunk", "take", "drop", "drop_until", "split_by", "scanl", "scanr", "LazyList", "Range", "MapList", ] T = TypeVar('T') A = TypeVar('A') B = TypeVar('...
true
2
72
flutes
flutes.iterator
LazyList
__iter__
def __iter__(self): if self.exhausted: return iter(self.list) return self.LazyListIterator(self)
[ 257, 260 ]
false
[ "__all__", "T", "A", "B", "R" ]
import weakref from typing import Callable, Generic, Iterable, Iterator, List, Optional, Sequence, TypeVar, overload __all__ = [ "chunk", "take", "drop", "drop_until", "split_by", "scanl", "scanr", "LazyList", "Range", "MapList", ] T = TypeVar('T') A = TypeVar('A') B = TypeVar('...
true
2
73
flutes
flutes.iterator
LazyList
__getitem__
def __getitem__(self, idx): if isinstance(idx, slice): self._fetch_until(idx.stop) else: self._fetch_until(idx) return self.list[idx]
[ 280, 285 ]
false
[ "__all__", "T", "A", "B", "R" ]
import weakref from typing import Callable, Generic, Iterable, Iterator, List, Optional, Sequence, TypeVar, overload __all__ = [ "chunk", "take", "drop", "drop_until", "split_by", "scanl", "scanr", "LazyList", "Range", "MapList", ] T = TypeVar('T') A = TypeVar('A') B = TypeVar('...
true
2
74
flutes
flutes.iterator
LazyList
__len__
def __len__(self): if self.exhausted: return len(self.list) else: raise TypeError("__len__ is not available before the iterable is depleted")
[ 287, 291 ]
false
[ "__all__", "T", "A", "B", "R" ]
import weakref from typing import Callable, Generic, Iterable, Iterator, List, Optional, Sequence, TypeVar, overload __all__ = [ "chunk", "take", "drop", "drop_until", "split_by", "scanl", "scanr", "LazyList", "Range", "MapList", ] T = TypeVar('T') A = TypeVar('A') B = TypeVar('...
true
2
75
flutes
flutes.iterator
Range
__next__
def __next__(self) -> int: if self.val >= self.r: raise StopIteration result = self.val self.val += self.step return result
[ 332, 337 ]
false
[ "__all__", "T", "A", "B", "R" ]
import weakref from typing import Callable, Generic, Iterable, Iterator, List, Optional, Sequence, TypeVar, overload __all__ = [ "chunk", "take", "drop", "drop_until", "split_by", "scanl", "scanr", "LazyList", "Range", "MapList", ] T = TypeVar('T') A = TypeVar('A') B = TypeVar('...
true
2
End of preview. Expand in Data Studio

⚠️ Note: The dataset symprompt_supp.jsonl is not created by us. We only supplemented this dataset with additional branch-level metadata (e.g., has_branch, total_branches) to enable coverage testing.

This helps users keep their workflows clean when determining whether branches exist, simplifying branch coverage calculation.

It originates from the paper:

Code-Aware Prompting: A Study of Coverage Guided Test Generation in Regression Setting using LLM

— Gabriel Ryan, Siddhartha Jain, Mingyue Shang, Shiqi Wang, Xiaofei Ma, Murali Krishna Ramanathan, Baishakhi Ray.

Downloads last month
17