repo_id
stringlengths
15
89
file_path
stringlengths
27
180
content
stringlengths
1
2.23M
__index_level_0__
int64
0
0
hf_public_repos/tokenizers/bindings/python/tests
hf_public_repos/tokenizers/bindings/python/tests/bindings/test_normalizers.py
import pickle import pytest from tokenizers import NormalizedString, Tokenizer from tokenizers.models import BPE from tokenizers.normalizers import BertNormalizer, Lowercase, Normalizer, Sequence, Strip, Prepend class TestBertNormalizer: def test_instantiate(self): assert isinstance(BertNormalizer(), No...
0
hf_public_repos/tokenizers/bindings/python/tests
hf_public_repos/tokenizers/bindings/python/tests/bindings/test_tokenizer.py
import pickle import numpy as np import pytest from tokenizers import AddedToken, Encoding, Tokenizer from tokenizers.implementations import BertWordPieceTokenizer from tokenizers.models import BPE, Model, WordPiece, Unigram from tokenizers.normalizers import Lowercase from tokenizers.pre_tokenizers import ByteLevel ...
0
hf_public_repos/tokenizers/bindings/python/tests
hf_public_repos/tokenizers/bindings/python/tests/bindings/test_pre_tokenizers.py
import json import pickle import pytest from tokenizers.pre_tokenizers import ( BertPreTokenizer, ByteLevel, CharDelimiterSplit, Digits, Metaspace, PreTokenizer, Punctuation, Sequence, Split, UnicodeScripts, Whitespace, WhitespaceSplit, ) class TestByteLevel: def ...
0
hf_public_repos/tokenizers/bindings/python/tests
hf_public_repos/tokenizers/bindings/python/tests/bindings/test_encoding.py
import pytest from tokenizers import BertWordPieceTokenizer from ..utils import bert_files, data_dir class TestEncoding: @pytest.fixture(scope="class") def encodings(self, bert_files): tokenizer = BertWordPieceTokenizer.from_file(bert_files["vocab"]) single_encoding = tokenizer.encode("I lov...
0
hf_public_repos/tokenizers/bindings/python/tests
hf_public_repos/tokenizers/bindings/python/tests/bindings/test_decoders.py
import json import pickle import pytest from tokenizers.decoders import ( CTC, BPEDecoder, ByteLevel, Decoder, Metaspace, Sequence, WordPiece, ByteFallback, Replace, Strip, Fuse, ) class TestByteLevel: def test_instantiate(self): assert ByteLevel() is not None...
0
hf_public_repos/tokenizers/bindings/python/tests
hf_public_repos/tokenizers/bindings/python/tests/bindings/test_models.py
import pickle import pytest from tokenizers.models import BPE, Model, WordLevel, WordPiece from ..utils import bert_files, data_dir, roberta_files class TestBPE: def test_instantiate(self, roberta_files): assert isinstance(BPE(), Model) assert isinstance(BPE(), BPE) vocab = {"a": 0, "b...
0
hf_public_repos/tokenizers/bindings/python/tests
hf_public_repos/tokenizers/bindings/python/tests/bindings/test_trainers.py
import copy import os import pickle import pytest from tokenizers import ( AddedToken, SentencePieceUnigramTokenizer, Tokenizer, models, normalizers, pre_tokenizers, trainers, ) from ..utils import data_dir, train_files class TestBpeTrainer: def test_can_modify(self): traine...
0
hf_public_repos/tokenizers/bindings/python
hf_public_repos/tokenizers/bindings/python/.cargo/config.toml
[target.x86_64-apple-darwin] rustflags = [ "-C", "link-arg=-undefined", "-C", "link-arg=dynamic_lookup", "-C", "link-arg=-mmacosx-version-min=10.11", ] [target.aarch64-apple-darwin] rustflags = [ "-C", "link-arg=-undefined", "-C", "link-arg=dynamic_lookup", "-C", "link-arg=-mmacosx-version-min=10.11", ]
0
hf_public_repos/tokenizers/bindings/python
hf_public_repos/tokenizers/bindings/python/examples/train_with_datasets.py
import datasets from tokenizers import Tokenizer, models, normalizers, pre_tokenizers, trainers # Build a tokenizer bpe_tokenizer = Tokenizer(models.BPE()) bpe_tokenizer.pre_tokenizer = pre_tokenizers.Whitespace() bpe_tokenizer.normalizer = normalizers.Lowercase() # Initialize a dataset dataset = datasets.load_data...
0
hf_public_repos/tokenizers/bindings/python
hf_public_repos/tokenizers/bindings/python/examples/train_bert_wordpiece.py
import argparse import glob from tokenizers import BertWordPieceTokenizer parser = argparse.ArgumentParser() parser.add_argument( "--files", default=None, metavar="path", type=str, required=True, help="The files to use as training; accept '**/*.txt' type of patterns \ ...
0
hf_public_repos/tokenizers/bindings/python
hf_public_repos/tokenizers/bindings/python/examples/train_bytelevel_bpe.py
import argparse import glob from os.path import join from tokenizers import ByteLevelBPETokenizer parser = argparse.ArgumentParser() parser.add_argument( "--files", default=None, metavar="path", type=str, required=True, help="The files to use as training; accept '**/*.txt' type of patterns \ ...
0
hf_public_repos/tokenizers/bindings/python
hf_public_repos/tokenizers/bindings/python/examples/example.py
import argparse import logging import time from tqdm import tqdm logging.getLogger("transformers").disabled = True logging.getLogger("transformers.tokenization_utils").disabled = True from tokenizers import Tokenizer, decoders, pre_tokenizers from tokenizers.models import BPE, WordPiece from tokenizers.normalizers ...
0
hf_public_repos/tokenizers/bindings/python
hf_public_repos/tokenizers/bindings/python/examples/using_the_visualizer.ipynb
from tokenizers import BertWordPieceTokenizer from tokenizers.tools import EncodingVisualizer EncodingVisualizer.unk_token_regex.search("aaa[udsnk]aaa")text = """Mathias Bynens 'Z͑ͫ̓ͪ̂ͫ̽͏̴̙̤̞͉͚̯̞̠͍A̴̵̜̰͔ͫ͗͢L̠ͨͧͩ͘G̴̻͈͍͔̹̑͗̎̅͛́Ǫ̵̹̻̝̳͂̌̌͘!͖̬̰̙̗̿̋ͥͥ̂ͣ̐́́͜͞': Whenever you’re working on a piece of JavaScript code that deals...
0
hf_public_repos/tokenizers/bindings/python
hf_public_repos/tokenizers/bindings/python/examples/custom_components.py
from typing import List import jieba from tokenizers import NormalizedString, PreTokenizedString, Regex, Tokenizer from tokenizers.decoders import Decoder from tokenizers.models import BPE from tokenizers.normalizers import Normalizer from tokenizers.pre_tokenizers import PreTokenizer class JiebaPreTokenizer: de...
0
hf_public_repos/tokenizers/bindings/python
hf_public_repos/tokenizers/bindings/python/scripts/spm_parity_check.py
import tokenizers from argparse import ArgumentParser import sentencepiece as spm from collections import Counter import json import os import datetime try: from termcolor import colored has_color = True except Exception: has_color = False def main(): parser = ArgumentParser("SentencePiece parity ch...
0
hf_public_repos/tokenizers/bindings/python
hf_public_repos/tokenizers/bindings/python/scripts/convert.py
import transformers from tokenizers.implementations import SentencePieceUnigramTokenizer, BaseTokenizer from tokenizers.processors import TemplateProcessing from tokenizers.models import Unigram, BPE from tokenizers import decoders from tokenizers import Tokenizer, Regex from tokenizers.normalizers import ( StripAc...
0
hf_public_repos/tokenizers/bindings/python
hf_public_repos/tokenizers/bindings/python/scripts/sentencepiece_extractor.py
from argparse import ArgumentParser from json import dump from logging import basicConfig, getLogger from os import linesep, remove from os.path import exists from tempfile import NamedTemporaryFile from typing import Dict, List, Tuple from requests import get from sentencepiece import SentencePieceProcessor from tqdm...
0
hf_public_repos/tokenizers/bindings/python
hf_public_repos/tokenizers/bindings/python/src/normalizers.rs
use std::sync::{Arc, RwLock}; use pyo3::exceptions; use pyo3::prelude::*; use pyo3::types::*; use crate::error::ToPyResult; use crate::utils::{PyNormalizedString, PyNormalizedStringRefMut, PyPattern}; use serde::ser::SerializeStruct; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use tk::normalizers::...
0
hf_public_repos/tokenizers/bindings/python
hf_public_repos/tokenizers/bindings/python/src/error.rs
use pyo3::exceptions; use pyo3::prelude::*; use pyo3::type_object::PyTypeInfo; use std::fmt::{Display, Formatter, Result as FmtResult}; use tokenizers::tokenizer::Result; #[derive(Debug)] pub struct PyError(pub String); impl PyError { #[allow(dead_code)] pub fn from(s: &str) -> Self { PyError(String::f...
0
hf_public_repos/tokenizers/bindings/python
hf_public_repos/tokenizers/bindings/python/src/encoding.rs
use pyo3::exceptions; use pyo3::prelude::*; use pyo3::types::*; use tk::tokenizer::{Offsets, PaddingDirection}; use tk::utils::truncation::TruncationDirection; use tokenizers as tk; use crate::error::{deprecation_warning, PyError}; /// The :class:`~tokenizers.Encoding` represents the output of a :class:`~tokenizers.T...
0
hf_public_repos/tokenizers/bindings/python
hf_public_repos/tokenizers/bindings/python/src/token.rs
use pyo3::prelude::*; use tk::Token; #[pyclass(module = "tokenizers", name = "Token")] #[derive(Clone)] pub struct PyToken { token: Token, } impl From<Token> for PyToken { fn from(token: Token) -> Self { Self { token } } } impl From<PyToken> for Token { fn from(token: PyToken) -> Self { ...
0
hf_public_repos/tokenizers/bindings/python
hf_public_repos/tokenizers/bindings/python/src/lib.rs
#![warn(clippy::all)] #![allow(clippy::upper_case_acronyms)] // Many false positives with pyo3 it seems &str, and &PyAny get flagged #![allow(clippy::borrow_deref_ref)] extern crate tokenizers as tk; mod decoders; mod encoding; mod error; mod models; mod normalizers; mod pre_tokenizers; mod processors; mod token; mod...
0
hf_public_repos/tokenizers/bindings/python
hf_public_repos/tokenizers/bindings/python/src/decoders.rs
use std::sync::{Arc, RwLock}; use crate::utils::PyChar; use crate::utils::PyPattern; use pyo3::exceptions; use pyo3::prelude::*; use pyo3::types::*; use serde::de::Error; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use tk::decoders::bpe::BPEDecoder; use tk::decoders::byte_fallback::ByteFallback; use...
0
hf_public_repos/tokenizers/bindings/python
hf_public_repos/tokenizers/bindings/python/src/models.rs
use std::collections::HashMap; use std::path::{Path, PathBuf}; use std::sync::{Arc, RwLock}; use crate::token::PyToken; use crate::trainers::PyTrainer; use pyo3::exceptions; use pyo3::prelude::*; use pyo3::types::*; use serde::{Deserialize, Serialize}; use tk::models::bpe::{BpeBuilder, Merges, Vocab, BPE}; use tk::mod...
0
hf_public_repos/tokenizers/bindings/python
hf_public_repos/tokenizers/bindings/python/src/pre_tokenizers.rs
use std::sync::{Arc, RwLock}; use pyo3::exceptions; use pyo3::prelude::*; use pyo3::types::*; use serde::ser::SerializeStruct; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use tk::normalizer::SplitDelimiterBehavior; use tk::pre_tokenizers::bert::BertPreTokenizer; use tk::pre_tokenizers::byte_level::...
0
hf_public_repos/tokenizers/bindings/python
hf_public_repos/tokenizers/bindings/python/src/processors.rs
use std::convert::TryInto; use std::sync::Arc; use pyo3::exceptions; use pyo3::prelude::*; use pyo3::types::*; use crate::encoding::PyEncoding; use crate::error::ToPyResult; use serde::{Deserialize, Serialize}; use tk::processors::bert::BertProcessing; use tk::processors::byte_level::ByteLevel; use tk::processors::ro...
0
hf_public_repos/tokenizers/bindings/python
hf_public_repos/tokenizers/bindings/python/src/trainers.rs
use std::sync::{Arc, RwLock}; use crate::models::PyModel; use crate::tokenizer::PyAddedToken; use crate::utils::PyChar; use pyo3::exceptions; use pyo3::prelude::*; use pyo3::types::*; use serde::{Deserialize, Serialize}; use tk::models::TrainerWrapper; use tk::Trainer; use tokenizers as tk; /// Base class for all tra...
0
hf_public_repos/tokenizers/bindings/python
hf_public_repos/tokenizers/bindings/python/src/tokenizer.rs
use std::collections::{hash_map::DefaultHasher, HashMap}; use std::hash::{Hash, Hasher}; use numpy::{npyffi, PyArray1}; use pyo3::class::basic::CompareOp; use pyo3::exceptions; use pyo3::intern; use pyo3::prelude::*; use pyo3::types::*; use pyo3::AsPyPointer; use tk::models::bpe::BPE; use tk::tokenizer::{ Model, P...
0
hf_public_repos/tokenizers/bindings/python/src
hf_public_repos/tokenizers/bindings/python/src/utils/regex.rs
use onig::Regex; use pyo3::exceptions; use pyo3::prelude::*; /// Instantiate a new Regex with the given pattern #[pyclass(module = "tokenizers", name = "Regex")] pub struct PyRegex { pub inner: Regex, pub pattern: String, } #[pymethods] impl PyRegex { #[new] #[pyo3(text_signature = "(self, pattern)")]...
0
hf_public_repos/tokenizers/bindings/python/src
hf_public_repos/tokenizers/bindings/python/src/utils/mod.rs
use pyo3::exceptions; use pyo3::prelude::*; use pyo3::types::*; use std::marker::PhantomData; use std::sync::{Arc, Mutex}; mod iterators; mod normalization; mod pretokenization; mod regex; pub use iterators::*; pub use normalization::*; pub use pretokenization::*; pub use regex::*; // PyChar // This type is a tempor...
0
hf_public_repos/tokenizers/bindings/python/src
hf_public_repos/tokenizers/bindings/python/src/utils/normalization.rs
use super::regex::PyRegex; use super::{DestroyPtr, RefMutContainer, RefMutGuard}; use crate::error::ToPyResult; use pyo3::exceptions; use pyo3::prelude::*; use pyo3::types::*; use tk::normalizer::{char_to_bytes, NormalizedString, Range, SplitDelimiterBehavior}; use tk::pattern::Pattern; /// Represents a Pattern as use...
0
hf_public_repos/tokenizers/bindings/python/src
hf_public_repos/tokenizers/bindings/python/src/utils/pretokenization.rs
use tokenizers as tk; use pyo3::exceptions; use pyo3::prelude::*; use pyo3::types::*; use super::{ DestroyPtr, PyNormalizedString, PyNormalizedStringRefMut, RefMutContainer, RefMutGuard, }; use crate::encoding::PyEncoding; use crate::error::ToPyResult; use crate::token::PyToken; use tk::{OffsetReferential, Offset...
0
hf_public_repos/tokenizers/bindings/python/src
hf_public_repos/tokenizers/bindings/python/src/utils/iterators.rs
use pyo3::prelude::*; use pyo3::AsPyPointer; use std::collections::VecDeque; /// An simple iterator that can be instantiated with a specified length. /// We use this with iterators that don't have a size_hint but we might /// know its size. This is useful with progress bars for example. pub struct MaybeSizedIterator<I...
0
hf_public_repos/tokenizers/bindings
hf_public_repos/tokenizers/bindings/node/Cargo.toml
[package] authors = ["Nicolas Patry <nicolas@huggingface.co>"] edition = "2021" name = "node" version = "0.15.1-dev.0" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [lib] crate-type = ["cdylib"] [dependencies] napi = "2" napi-derive = "2" serde = { v...
0
hf_public_repos/tokenizers/bindings
hf_public_repos/tokenizers/bindings/node/.eslintrc.yml
parser: '@typescript-eslint/parser' parserOptions: ecmaFeatures: jsx: true ecmaVersion: latest sourceType: module project: ./tsconfig.json env: browser: true es6: true node: true jest: true ignorePatterns: ['index.js', 'target/'] plugins: - import - '@typescript-eslint' extends: - eslint:...
0
hf_public_repos/tokenizers/bindings
hf_public_repos/tokenizers/bindings/node/rustfmt.toml
tab_spaces = 2
0
hf_public_repos/tokenizers/bindings
hf_public_repos/tokenizers/bindings/node/LICENSE
MIT License Copyright (c) 2020 N-API for Rust Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, di...
0
hf_public_repos/tokenizers/bindings
hf_public_repos/tokenizers/bindings/node/Makefile
.PHONY: style check-style test DATA_DIR = data dir_guard=@mkdir -p $(@D) # Format source code automatically style: npm run lint # Check the source code is formatted correctly check-style: npm run lint-check TESTS_RESOURCES = $(DATA_DIR)/small.txt $(DATA_DIR)/roberta.json $(DATA_DIR)/tokenizer-wiki.json $(DATA_DI...
0
hf_public_repos/tokenizers/bindings
hf_public_repos/tokenizers/bindings/node/.yarnrc.yml
nodeLinker: node-modules npmAuditRegistry: 'https://registry.npmjs.org' yarnPath: .yarn/releases/yarn-3.5.1.cjs
0
hf_public_repos/tokenizers/bindings
hf_public_repos/tokenizers/bindings/node/jest.config.js
/* eslint-disable prettier/prettier */ // For a detailed explanation regarding each configuration property, visit: // https://jestjs.io/docs/en/configuration.html module.exports = { // All imported modules in your tests should be mocked automatically // automock: false, // Stop running tests after `n` failures ...
0
hf_public_repos/tokenizers/bindings
hf_public_repos/tokenizers/bindings/node/build.rs
extern crate napi_build; fn main() { napi_build::setup(); }
0
hf_public_repos/tokenizers/bindings
hf_public_repos/tokenizers/bindings/node/index.js
/* tslint:disable */ /* eslint-disable */ /* prettier-ignore */ /* auto-generated by NAPI-RS */ const { existsSync, readFileSync } = require('fs') const { join } = require('path') const { platform, arch } = process let nativeBinding = null let localFileExisted = false let loadError = null function isMusl() { // ...
0
hf_public_repos/tokenizers/bindings
hf_public_repos/tokenizers/bindings/node/.prettierignore
target .yarn
0
hf_public_repos/tokenizers/bindings
hf_public_repos/tokenizers/bindings/node/.editorconfig
# EditorConfig helps developers define and maintain consistent # coding styles between different editors or IDEs # http://editorconfig.org root = true [*] indent_style = space indent_size = 2 end_of_line = lf charset = utf-8 trim_trailing_whitespace = true insert_final_newline = true [*.md] trim_trailing_whitespace =...
0
hf_public_repos/tokenizers/bindings
hf_public_repos/tokenizers/bindings/node/README.md
<p align="center"> <br> <img src="https://huggingface.co/landing/assets/tokenizers/tokenizers-logo.png" width="600"/> <br> <p> <p align="center"> <a href="https://badge.fury.io/js/tokenizers"> <img alt="Build" src="https://badge.fury.io/js/tokenizers.svg"> </a> <a href="https://github.com/huggingface/to...
0
hf_public_repos/tokenizers/bindings
hf_public_repos/tokenizers/bindings/node/types.ts
export type TextInputSequence = string export type PreTokenizedInputSequence = string[] export type InputSequence = TextInputSequence | PreTokenizedInputSequence export type TextEncodeInput = TextInputSequence | [TextInputSequence, TextInputSequence] export type PreTokenizedEncodeInput = PreTokenizedInputSequence | [P...
0
hf_public_repos/tokenizers/bindings
hf_public_repos/tokenizers/bindings/node/yarn.lock
# This file is generated by running "yarn install" inside your project. # Manual changes might be lost - proceed with caution! __metadata: version: 6 cacheKey: 8 "@aashutoshrathi/word-wrap@npm:^1.2.3": version: 1.2.6 resolution: "@aashutoshrathi/word-wrap@npm:1.2.6" checksum: ada901b9e7c680d190f1d012c84217c...
0
hf_public_repos/tokenizers/bindings
hf_public_repos/tokenizers/bindings/node/package.json
{ "name": "tokenizers", "version": "0.14.0-dev0", "repository": { "type": "git", "url": "git+https://github.com/huggingface/tokenizers.git" }, "bugs": { "url": "https://github.com/huggingface/tokenizers/issues" }, "homepage": "https://github.com/huggingface/tokenizers/tree/master/bindings/node...
0
hf_public_repos/tokenizers/bindings
hf_public_repos/tokenizers/bindings/node/tsconfig.json
{ "compilerOptions": { "target": "ES2018", "strict": true, "moduleResolution": "node", "module": "CommonJS", "noUnusedLocals": true, "noUnusedParameters": true, "esModuleInterop": true, "allowSyntheticDefaultImports": true }, "include": ["."], "exclude": ["node_modules"] }
0
hf_public_repos/tokenizers/bindings
hf_public_repos/tokenizers/bindings/node/index.d.ts
/* tslint:disable */ /* eslint-disable */ /* auto-generated by NAPI-RS */ export function bpeDecoder(suffix?: string | undefined | null): Decoder export function byteFallbackDecoder(): Decoder export function ctcDecoder( padToken?: string = '<pad>', wordDelimiterToken?: string | undefined | null, cleanup?: bool...
0
hf_public_repos/tokenizers/bindings
hf_public_repos/tokenizers/bindings/node/.taplo.toml
exclude = ["node_modules/**/*.toml"] # https://taplo.tamasfe.dev/configuration/formatter-options.html [formatting] align_entries = true indent_tables = true reorder_keys = true
0
hf_public_repos/tokenizers/bindings/node/.yarn
hf_public_repos/tokenizers/bindings/node/.yarn/releases/yarn-3.5.1.cjs
#!/usr/bin/env node /* eslint-disable */ //prettier-ignore (()=>{var Sge=Object.create;var lS=Object.defineProperty;var vge=Object.getOwnPropertyDescriptor;var xge=Object.getOwnPropertyNames;var Pge=Object.getPrototypeOf,Dge=Object.prototype.hasOwnProperty;var J=(r=>typeof require<"u"?require:typeof Proxy<"u"?new Proxy...
0
hf_public_repos/tokenizers/bindings/node
hf_public_repos/tokenizers/bindings/node/.cargo/config.toml
[target.aarch64-unknown-linux-musl] linker = "aarch64-linux-musl-gcc" rustflags = ["-C", "target-feature=-crt-static"]
0
hf_public_repos/tokenizers/bindings/node/examples
hf_public_repos/tokenizers/bindings/node/examples/documentation/pipeline.test.ts
/* eslint-disable */ var globRequire = require; describe("pipelineExample", () => { // This is a hack to let us require using path similar to what the user has to use function require(mod: string) { if (mod.startsWith("tokenizers")) { // let path = mod.slice("tokenizers".length); ...
0
hf_public_repos/tokenizers/bindings/node/examples
hf_public_repos/tokenizers/bindings/node/examples/documentation/quicktour.test.ts
/* eslint-disable */ var globRequire = require console.log = (..._args: any[]) => {} describe('quicktourExample', () => { function require(mod: string) { if (mod.startsWith('tokenizers')) { return globRequire('../../') } else { return globRequire(mod) } } it.skip('trains the tokenizer',...
0
hf_public_repos/tokenizers/bindings/node/lib
hf_public_repos/tokenizers/bindings/node/lib/bindings/utils.test.ts
// import { promisify } from 'util' import { BPE, Tokenizer, mergeEncodings, slice } from '../../' describe('slice', () => { const text = 'My name is John 👋' const sliceText = slice.bind({}, text) it('returns the full text when no params', () => { const sliced = sliceText() expect(sliced).toEqual(text...
0
hf_public_repos/tokenizers/bindings/node/lib
hf_public_repos/tokenizers/bindings/node/lib/bindings/models.test.ts
/* eslint-disable @typescript-eslint/no-empty-function */ /* eslint-disable @typescript-eslint/no-explicit-any */ import { BPE, Unigram, WordPiece } from '../../' const MOCKS_DIR = __dirname + '/__mocks__' describe('WordPiece', () => { describe('fromFile', () => { it('throws if called with only one argument', ...
0
hf_public_repos/tokenizers/bindings/node/lib
hf_public_repos/tokenizers/bindings/node/lib/bindings/pre-tokenizers.test.ts
import { byteLevelPreTokenizer, metaspacePreTokenizer, punctuationPreTokenizer, sequencePreTokenizer, splitPreTokenizer, whitespaceSplitPreTokenizer, } from '../../' describe('byteLevelPreTokenizer', () => { it('instantiates correctly', () => { const processor = byteLevelPreTokenizer() expect(pro...
0
hf_public_repos/tokenizers/bindings/node/lib
hf_public_repos/tokenizers/bindings/node/lib/bindings/encoding.test.ts
import { PaddingDirection, WordPiece, punctuationPreTokenizer, sequencePreTokenizer, whitespacePreTokenizer, Encoding, EncodeOptions, Tokenizer, } from '../../' import { InputSequence } from '../../types' const MOCKS_DIR = __dirname + '/__mocks__' describe('Can modify pretokenizers on the fly', () => ...
0
hf_public_repos/tokenizers/bindings/node/lib
hf_public_repos/tokenizers/bindings/node/lib/bindings/tokenizer.test.ts
/* eslint-disable @typescript-eslint/no-explicit-any */ /* eslint-disable @typescript-eslint/no-empty-function */ import { TruncationStrategy, BPE, Encoding, AddedToken, Tokenizer } from '../../' // jest.mock('../../bindings/tokenizer'); // jest.mock('../../bindings/models', () => ({ // __esModule: true, // Model...
0
hf_public_repos/tokenizers/bindings/node/lib
hf_public_repos/tokenizers/bindings/node/lib/bindings/decoders.test.ts
import { bpeDecoder, byteFallbackDecoder, ctcDecoder, fuseDecoder, metaspaceDecoder, replaceDecoder, sequenceDecoder, stripDecoder, wordPieceDecoder, } from '../../' describe('wordPieceDecoder', () => { it('accepts `undefined` as first parameter', () => { expect(wordPieceDecoder(undefined)).toB...
0
hf_public_repos/tokenizers/bindings/node/lib
hf_public_repos/tokenizers/bindings/node/lib/bindings/normalizers.test.ts
import { prependNormalizer, stripAccentsNormalizer, stripNormalizer } from '../../' describe('stripNormalizer', () => { it('instantiates with no parameters', () => { const normalizer = stripNormalizer() expect(normalizer.constructor.name).toEqual('Normalizer') }) it('accepts `undefined` as first paramet...
0
hf_public_repos/tokenizers/bindings/node/lib
hf_public_repos/tokenizers/bindings/node/lib/bindings/post-processors.test.ts
/* eslint-disable @typescript-eslint/no-explicit-any */ import { bertProcessing, byteLevelProcessing, robertaProcessing, sequenceProcessing, templateProcessing } from '../../' describe('bertProcessing', () => { it('instantiates correctly with only two parameters', () => { const processor = bertProcessing(['sep'...
0
hf_public_repos/tokenizers/bindings/node/lib/bindings
hf_public_repos/tokenizers/bindings/node/lib/bindings/__mocks__/vocab.txt
my name is jo ##hn what yours pair [UNK]
0
hf_public_repos/tokenizers/bindings/node/lib/bindings
hf_public_repos/tokenizers/bindings/node/lib/bindings/__mocks__/vocab.json
{}
0
hf_public_repos/tokenizers/bindings/node/npm
hf_public_repos/tokenizers/bindings/node/npm/win32-arm64-msvc/README.md
# `tokenizers-win32-arm64-msvc` This is the **aarch64-pc-windows-msvc** binary for `tokenizers`
0
hf_public_repos/tokenizers/bindings/node/npm
hf_public_repos/tokenizers/bindings/node/npm/win32-arm64-msvc/package.json
{ "name": "tokenizers-win32-arm64-msvc", "version": "0.13.4-rc1", "os": [ "win32" ], "cpu": [ "arm64" ], "main": "tokenizers.win32-arm64-msvc.node", "files": [ "tokenizers.win32-arm64-msvc.node" ], "description": "Tokenizers platform specific bindings", "keywords": [ "napi-rs", ...
0
hf_public_repos/tokenizers/bindings/node/npm
hf_public_repos/tokenizers/bindings/node/npm/linux-arm-gnueabihf/README.md
# `tokenizers-linux-arm-gnueabihf` This is the **armv7-unknown-linux-gnueabihf** binary for `tokenizers`
0
hf_public_repos/tokenizers/bindings/node/npm
hf_public_repos/tokenizers/bindings/node/npm/linux-arm-gnueabihf/package.json
{ "name": "tokenizers-linux-arm-gnueabihf", "version": "0.13.4-rc1", "os": [ "linux" ], "cpu": [ "arm" ], "main": "tokenizers.linux-arm-gnueabihf.node", "files": [ "tokenizers.linux-arm-gnueabihf.node" ], "description": "Tokenizers platform specific bindings", "keywords": [ "napi-r...
0
hf_public_repos/tokenizers/bindings/node/npm
hf_public_repos/tokenizers/bindings/node/npm/darwin-x64/README.md
# `tokenizers-darwin-x64` This is the **x86_64-apple-darwin** binary for `tokenizers`
0
hf_public_repos/tokenizers/bindings/node/npm
hf_public_repos/tokenizers/bindings/node/npm/darwin-x64/package.json
{ "name": "tokenizers-darwin-x64", "version": "0.13.4-rc1", "os": [ "darwin" ], "cpu": [ "x64" ], "main": "tokenizers.darwin-x64.node", "files": [ "tokenizers.darwin-x64.node" ], "description": "Tokenizers platform specific bindings", "keywords": [ "napi-rs", "NAPI", "N-API...
0
hf_public_repos/tokenizers/bindings/node/npm
hf_public_repos/tokenizers/bindings/node/npm/linux-arm64-musl/README.md
# `tokenizers-linux-arm64-musl` This is the **aarch64-unknown-linux-musl** binary for `tokenizers`
0
hf_public_repos/tokenizers/bindings/node/npm
hf_public_repos/tokenizers/bindings/node/npm/linux-arm64-musl/package.json
{ "name": "tokenizers-linux-arm64-musl", "version": "0.13.4-rc1", "os": [ "linux" ], "cpu": [ "arm64" ], "main": "tokenizers.linux-arm64-musl.node", "files": [ "tokenizers.linux-arm64-musl.node" ], "description": "Tokenizers platform specific bindings", "keywords": [ "napi-rs", ...
0
hf_public_repos/tokenizers/bindings/node/npm
hf_public_repos/tokenizers/bindings/node/npm/linux-x64-gnu/README.md
# `tokenizers-linux-x64-gnu` This is the **x86_64-unknown-linux-gnu** binary for `tokenizers`
0
hf_public_repos/tokenizers/bindings/node/npm
hf_public_repos/tokenizers/bindings/node/npm/linux-x64-gnu/package.json
{ "name": "tokenizers-linux-x64-gnu", "version": "0.13.4-rc1", "os": [ "linux" ], "cpu": [ "x64" ], "main": "tokenizers.linux-x64-gnu.node", "files": [ "tokenizers.linux-x64-gnu.node" ], "description": "Tokenizers platform specific bindings", "keywords": [ "napi-rs", "NAPI", ...
0
hf_public_repos/tokenizers/bindings/node/npm
hf_public_repos/tokenizers/bindings/node/npm/android-arm-eabi/README.md
# `tokenizers-android-arm-eabi` This is the **armv7-linux-androideabi** binary for `tokenizers`
0
hf_public_repos/tokenizers/bindings/node/npm
hf_public_repos/tokenizers/bindings/node/npm/android-arm-eabi/package.json
{ "name": "tokenizers-android-arm-eabi", "version": "0.13.4-rc1", "os": [ "android" ], "cpu": [ "arm" ], "main": "tokenizers.android-arm-eabi.node", "files": [ "tokenizers.android-arm-eabi.node" ], "description": "Tokenizers platform specific bindings", "keywords": [ "napi-rs", ...
0
hf_public_repos/tokenizers/bindings/node/npm
hf_public_repos/tokenizers/bindings/node/npm/android-arm64/README.md
# `tokenizers-android-arm64` This is the **aarch64-linux-android** binary for `tokenizers`
0
hf_public_repos/tokenizers/bindings/node/npm
hf_public_repos/tokenizers/bindings/node/npm/android-arm64/package.json
{ "name": "tokenizers-android-arm64", "version": "0.13.4-rc1", "os": [ "android" ], "cpu": [ "arm64" ], "main": "tokenizers.android-arm64.node", "files": [ "tokenizers.android-arm64.node" ], "description": "Tokenizers platform specific bindings", "keywords": [ "napi-rs", "NAPI"...
0
hf_public_repos/tokenizers/bindings/node/npm
hf_public_repos/tokenizers/bindings/node/npm/win32-ia32-msvc/README.md
# `tokenizers-win32-ia32-msvc` This is the **i686-pc-windows-msvc** binary for `tokenizers`
0
hf_public_repos/tokenizers/bindings/node/npm
hf_public_repos/tokenizers/bindings/node/npm/win32-ia32-msvc/package.json
{ "name": "tokenizers-win32-ia32-msvc", "version": "0.13.4-rc1", "os": [ "win32" ], "cpu": [ "ia32" ], "main": "tokenizers.win32-ia32-msvc.node", "files": [ "tokenizers.win32-ia32-msvc.node" ], "description": "Tokenizers platform specific bindings", "keywords": [ "napi-rs", "NA...
0
hf_public_repos/tokenizers/bindings/node/npm
hf_public_repos/tokenizers/bindings/node/npm/freebsd-x64/README.md
# `tokenizers-freebsd-x64` This is the **x86_64-unknown-freebsd** binary for `tokenizers`
0
hf_public_repos/tokenizers/bindings/node/npm
hf_public_repos/tokenizers/bindings/node/npm/freebsd-x64/package.json
{ "name": "tokenizers-freebsd-x64", "version": "0.13.4-rc1", "os": [ "freebsd" ], "cpu": [ "x64" ], "main": "tokenizers.freebsd-x64.node", "files": [ "tokenizers.freebsd-x64.node" ], "description": "Tokenizers platform specific bindings", "keywords": [ "napi-rs", "NAPI", "N...
0
hf_public_repos/tokenizers/bindings/node/npm
hf_public_repos/tokenizers/bindings/node/npm/linux-x64-musl/README.md
# `tokenizers-linux-x64-musl` This is the **x86_64-unknown-linux-musl** binary for `tokenizers`
0
hf_public_repos/tokenizers/bindings/node/npm
hf_public_repos/tokenizers/bindings/node/npm/linux-x64-musl/package.json
{ "name": "tokenizers-linux-x64-musl", "version": "0.13.4-rc1", "os": [ "linux" ], "cpu": [ "x64" ], "main": "tokenizers.linux-x64-musl.node", "files": [ "tokenizers.linux-x64-musl.node" ], "description": "Tokenizers platform specific bindings", "keywords": [ "napi-rs", "NAPI",...
0
hf_public_repos/tokenizers/bindings/node/npm
hf_public_repos/tokenizers/bindings/node/npm/darwin-arm64/README.md
# `tokenizers-darwin-arm64` This is the **aarch64-apple-darwin** binary for `tokenizers`
0
hf_public_repos/tokenizers/bindings/node/npm
hf_public_repos/tokenizers/bindings/node/npm/darwin-arm64/package.json
{ "name": "tokenizers-darwin-arm64", "version": "0.13.4-rc1", "os": [ "darwin" ], "cpu": [ "arm64" ], "main": "tokenizers.darwin-arm64.node", "files": [ "tokenizers.darwin-arm64.node" ], "description": "Tokenizers platform specific bindings", "keywords": [ "napi-rs", "NAPI", ...
0
hf_public_repos/tokenizers/bindings/node/npm
hf_public_repos/tokenizers/bindings/node/npm/linux-arm64-gnu/README.md
# `tokenizers-linux-arm64-gnu` This is the **aarch64-unknown-linux-gnu** binary for `tokenizers`
0
hf_public_repos/tokenizers/bindings/node/npm
hf_public_repos/tokenizers/bindings/node/npm/linux-arm64-gnu/package.json
{ "name": "tokenizers-linux-arm64-gnu", "version": "0.13.4-rc1", "os": [ "linux" ], "cpu": [ "arm64" ], "main": "tokenizers.linux-arm64-gnu.node", "files": [ "tokenizers.linux-arm64-gnu.node" ], "description": "Tokenizers platform specific bindings", "keywords": [ "napi-rs", "N...
0
hf_public_repos/tokenizers/bindings/node/npm
hf_public_repos/tokenizers/bindings/node/npm/win32-x64-msvc/README.md
# `tokenizers-win32-x64-msvc` This is the **x86_64-pc-windows-msvc** binary for `tokenizers`
0
hf_public_repos/tokenizers/bindings/node/npm
hf_public_repos/tokenizers/bindings/node/npm/win32-x64-msvc/package.json
{ "name": "tokenizers-win32-x64-msvc", "version": "0.13.4-rc1", "os": [ "win32" ], "cpu": [ "x64" ], "main": "tokenizers.win32-x64-msvc.node", "files": [ "tokenizers.win32-x64-msvc.node" ], "description": "Tokenizers platform specific bindings", "keywords": [ "napi-rs", "NAPI",...
0
hf_public_repos/tokenizers/bindings/node
hf_public_repos/tokenizers/bindings/node/src/normalizers.rs
use crate::arc_rwlock_serde; use napi::bindgen_prelude::*; use napi_derive::napi; use serde::{Deserialize, Serialize}; use std::sync::{Arc, RwLock}; use tk::normalizers::NormalizerWrapper; use tk::NormalizedString; use tokenizers as tk; /// Normalizer #[derive(Debug, Clone, Serialize, Deserialize)] #[napi] pub struct ...
0
hf_public_repos/tokenizers/bindings/node
hf_public_repos/tokenizers/bindings/node/src/encoding.rs
use crate::tokenizer::PaddingOptions; use napi::bindgen_prelude::*; use napi_derive::napi; use tokenizers::utils::truncation::TruncationDirection; use tokenizers::Encoding; #[napi(js_name = "Encoding")] #[derive(Clone, Default)] pub struct JsEncoding { pub(crate) encoding: Option<Encoding>, } impl From<Encoding> fo...
0
hf_public_repos/tokenizers/bindings/node
hf_public_repos/tokenizers/bindings/node/src/lib.rs
#![deny(clippy::all)] pub const VERSION: &str = env!("CARGO_PKG_VERSION"); mod arc_rwlock_serde; pub mod decoders; pub mod encoding; pub mod models; pub mod normalizers; pub mod pre_tokenizers; pub mod processors; pub mod tasks; pub mod tokenizer; pub mod trainers; pub mod utils;
0
hf_public_repos/tokenizers/bindings/node
hf_public_repos/tokenizers/bindings/node/src/decoders.rs
use crate::arc_rwlock_serde; use serde::{Deserialize, Serialize}; extern crate tokenizers as tk; use napi::bindgen_prelude::*; use napi_derive::napi; use std::sync::{Arc, RwLock}; use tk::decoders::DecoderWrapper; /// Decoder #[derive(Clone, Serialize, Deserialize)] #[napi] pub struct Decoder { #[serde(flatten, wi...
0
hf_public_repos/tokenizers/bindings/node
hf_public_repos/tokenizers/bindings/node/src/models.rs
use crate::arc_rwlock_serde; use crate::tasks::models::{BPEFromFilesTask, WordLevelFromFilesTask, WordPieceFromFilesTask}; use crate::trainers::Trainer; use napi::bindgen_prelude::*; use napi_derive::napi; use serde::{Deserialize, Serialize}; use std::collections::HashMap; use std::path::{Path, PathBuf}; use std::sync:...
0
hf_public_repos/tokenizers/bindings/node
hf_public_repos/tokenizers/bindings/node/src/pre_tokenizers.rs
use crate::arc_rwlock_serde; use napi::bindgen_prelude::*; use napi_derive::napi; use serde::{Deserialize, Serialize}; use std::sync::{Arc, RwLock}; use tk::pre_tokenizers::PreTokenizerWrapper; use tk::PreTokenizedString; use tk::SplitDelimiterBehavior; use tokenizers as tk; #[napi(string_enum)] pub enum JsSplitDelimi...
0
hf_public_repos/tokenizers/bindings/node
hf_public_repos/tokenizers/bindings/node/src/processors.rs
use crate::arc_rwlock_serde; use serde::{Deserialize, Serialize}; extern crate tokenizers as tk; use napi::bindgen_prelude::*; use napi_derive::napi; use std::sync::{Arc, RwLock}; use tk::processors::PostProcessorWrapper; use tk::Encoding; #[derive(Clone, Serialize, Deserialize)] #[napi] pub struct Processor { #[se...
0
hf_public_repos/tokenizers/bindings/node
hf_public_repos/tokenizers/bindings/node/src/trainers.rs
use crate::models::Model; use napi_derive::napi; use std::sync::{Arc, RwLock}; use tokenizers as tk; use tokenizers::models::TrainerWrapper; #[napi] pub struct Trainer { trainer: Option<Arc<RwLock<TrainerWrapper>>>, } impl From<TrainerWrapper> for Trainer { fn from(trainer: TrainerWrapper) -> Self { Self { ...
0
hf_public_repos/tokenizers/bindings/node
hf_public_repos/tokenizers/bindings/node/src/arc_rwlock_serde.rs
use serde::de::Deserializer; use serde::ser::Serializer; use serde::{Deserialize, Serialize}; use std::sync::{Arc, RwLock}; pub fn serialize<S, T>(val: &Option<Arc<RwLock<T>>>, s: S) -> Result<S::Ok, S::Error> where S: Serializer, T: Serialize, { T::serialize(&*(val.clone().unwrap()).read().unwrap(), s) } pub f...
0