Datasets:
Add files using upload-large-folder tool
Browse files- KakoIndex.py +308 -0
- README.md +35 -0
- data/newdump-ai_bouhan.jsonl.zst +3 -0
- data/newdump-ai_galileo.jsonl.zst +3 -0
- data/newdump-ai_gameswf-1.jsonl.zst +3 -0
- data/newdump-ai_hanryu.jsonl.zst +3 -0
- data/newdump-ai_jsdf.jsonl.zst +3 -0
- data/newdump-ai_kinoko.jsonl.zst +3 -0
- data/newdump-ai_life.jsonl.zst +3 -0
- data/newdump-ai_rradio.jsonl.zst +3 -0
- data/newdump-ai_sepia.jsonl.zst +3 -0
- data/newdump-ai_venture.jsonl.zst +3 -0
- data/newdump-ai_wc.jsonl.zst +3 -0
- data/newdump-anago_119.jsonl.zst +3 -0
- data/newdump-anago_diplomacy.jsonl.zst +3 -0
- data/newdump-anago_jan.jsonl.zst +3 -0
- data/newdump-anago_koukoku.jsonl.zst +3 -0
- data/newdump-anago_news5plus.jsonl.zst +3 -0
- data/newdump-anago_scienceplus.jsonl.zst +3 -0
- data/newdump-awabi_movie.jsonl.zst +3 -0
- data/newdump-awabi_net-1.jsonl.zst +3 -0
- data/newdump-awabi_pedagogy.jsonl.zst +3 -0
- data/newdump-awabi_radio.jsonl.zst +3 -0
- data/newdump-awabi_techno.jsonl.zst +3 -0
- data/newdump-hayabusa2_liveabema.jsonl.zst +3 -0
- data/newdump-hayabusa2_liveyonmoji.jsonl.zst +3 -0
- data/newdump-hayabusa5_oonna.jsonl.zst +3 -0
- data/newdump-ikura_ice.jsonl.zst +3 -0
- data/newdump-ikura_out.jsonl.zst +3 -0
- data/newdump-ikura_recipe.jsonl.zst +3 -0
- data/newdump-ikura_wine.jsonl.zst +3 -0
- data/newdump-maguro_dog.jsonl.zst +3 -0
- data/newdump-maguro_jyudo.jsonl.zst +3 -0
- data/newdump-maguro_kcar.jsonl.zst +3 -0
- data/newdump-maguro_keirin.jsonl.zst +3 -0
- data/newdump-maguro_offreg.jsonl.zst +3 -0
- data/newdump-maguro_sports.jsonl.zst +3 -0
- data/newdump-maguro_usedcar.jsonl.zst +3 -0
- data/newdump-nozomi_basket.jsonl.zst +3 -0
- data/newdump-nozomi_lovelive-1.jsonl.zst +3 -0
- data/newdump-nozomi_os.jsonl.zst +3 -0
- data/newdump-nozomi_sec.jsonl.zst +3 -0
- data/newdump-nozomi_ymag.jsonl.zst +3 -0
- data/newdump-tarte_starwars.jsonl.zst +3 -0
- data/newdump-tarte_world48.jsonl.zst +3 -0
- data/newdump-toro_antispam.jsonl.zst +3 -0
- data/newdump-toro_cg.jsonl.zst +3 -0
- data/newdump-toro_hack.jsonl.zst +3 -0
- data/newdump-toro_offevent.jsonl.zst +3 -0
- data/newdump-toro_tech.jsonl.zst +3 -0
KakoIndex.py
ADDED
|
@@ -0,0 +1,308 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
from io import BytesIO
|
| 3 |
+
import pathlib
|
| 4 |
+
import re
|
| 5 |
+
from urllib.parse import urljoin, urlparse
|
| 6 |
+
|
| 7 |
+
import httpx
|
| 8 |
+
import msgspec
|
| 9 |
+
import typer
|
| 10 |
+
import tqdm
|
| 11 |
+
from bs4 import BeautifulSoup
|
| 12 |
+
from loguru import logger
|
| 13 |
+
from dateutil.parser import parse
|
| 14 |
+
from dateutil import tz
|
| 15 |
+
import aiofile
|
| 16 |
+
|
| 17 |
+
app = typer.Typer()
|
| 18 |
+
|
| 19 |
+
JST = tz.gettz("Japan/Tokyo")
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class ThreadData(msgspec.Struct):
|
| 23 |
+
id: int
|
| 24 |
+
title: str
|
| 25 |
+
region: str # Root Server
|
| 26 |
+
city: str # Channel/Category/etc.
|
| 27 |
+
replies: int
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class Message(msgspec.Struct):
|
| 31 |
+
name: str
|
| 32 |
+
mail: str | None
|
| 33 |
+
dateid: str | None
|
| 34 |
+
body: str
|
| 35 |
+
title: str | None
|
| 36 |
+
|
| 37 |
+
class BrokenMessage(msgspec.Struct):
|
| 38 |
+
blocks:list[str]
|
| 39 |
+
|
| 40 |
+
class FilledThreadData(ThreadData):
|
| 41 |
+
messages: list[Message]
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
date_refixer = re.compile(
|
| 45 |
+
r"\((?:[月火水木金土日]|Sat|Sun|Mon|Tue|Wed|Thr|Fri)\)", flags=re.IGNORECASE
|
| 46 |
+
)
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
def parse_date_id(dateid_string: str) -> tuple[float, str | None]:
|
| 50 |
+
"""
|
| 51 |
+
Parses a string to extract a timestamp and message IDs.
|
| 52 |
+
|
| 53 |
+
Args:
|
| 54 |
+
dateid_string: The input string potentially containing date, ID, and BE ID.
|
| 55 |
+
|
| 56 |
+
Returns:
|
| 57 |
+
A tuple containing the Unix timestamp (float) or None and the formatted
|
| 58 |
+
message ID string or None.
|
| 59 |
+
"""
|
| 60 |
+
# Standardize the input string
|
| 61 |
+
cleaned_id = dateid_string.removesuffix(".net").strip()
|
| 62 |
+
|
| 63 |
+
# Use regular expressions to find all ID parts
|
| 64 |
+
# This is more robust than splitting
|
| 65 |
+
id_parts = re.findall(r"(ID|BE):([\w\.\/]+)", cleaned_id)
|
| 66 |
+
message_ids = [f"{kind}:{value}" for kind, value in id_parts]
|
| 67 |
+
message_id_str = " | ".join(message_ids) if message_ids else None
|
| 68 |
+
|
| 69 |
+
# Extract the date part by removing the ID parts
|
| 70 |
+
date_part = re.sub(r"\s*(ID|BE):[\w\.\/+-=?!()★]+", "", cleaned_id).strip()
|
| 71 |
+
|
| 72 |
+
timestamp = -1
|
| 73 |
+
if date_part:
|
| 74 |
+
# Assuming date_refixer is a compiled regex object like:
|
| 75 |
+
# For demonstration, we'll define a simple one.
|
| 76 |
+
try:
|
| 77 |
+
clean_date_str = date_refixer.sub("", date_part) + " JST"
|
| 78 |
+
timestamp = parse(clean_date_str, tzinfos={"JST": JST}).timestamp()
|
| 79 |
+
except ValueError as e:
|
| 80 |
+
logger.error(f"{clean_date_str}|{dateid_string}|{e}")
|
| 81 |
+
# Handle cases where parsing might fail
|
| 82 |
+
timestamp = -1
|
| 83 |
+
|
| 84 |
+
return timestamp, message_id_str
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
def parse_splitter(line: str):
|
| 88 |
+
if not line:
|
| 89 |
+
return
|
| 90 |
+
blocks = line.split("<>")
|
| 91 |
+
if len(blocks) > 5:
|
| 92 |
+
print(blocks)
|
| 93 |
+
logger.warning(f"{blocks}\nBlock count validation failed. Please check.")
|
| 94 |
+
return BrokenMessage(blocks)
|
| 95 |
+
name, email, dateid, body, *rest = blocks
|
| 96 |
+
if body == "<em>■ このスレッドは過去ログ倉庫に格納されています</em>":
|
| 97 |
+
return
|
| 98 |
+
rest = [i for i in rest if i]
|
| 99 |
+
|
| 100 |
+
if dateid.lower().startswith(("停止", "あぼーん", "over 1000 ", "移転")):
|
| 101 |
+
return
|
| 102 |
+
# Remove
|
| 103 |
+
# XXX: not used and to be processed seperately. Because as it turns out, it's kind of hard!
|
| 104 |
+
# _, _ = parse_date_id(dateid)
|
| 105 |
+
# logger.debug(f"{jp_date, message_id}")
|
| 106 |
+
return Message(
|
| 107 |
+
name,
|
| 108 |
+
email if email else None,
|
| 109 |
+
dateid,
|
| 110 |
+
body,
|
| 111 |
+
"<>".join(rest) if rest else None,
|
| 112 |
+
)
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
def parse_messages(text: str) -> list[Message]:
|
| 116 |
+
messages = []
|
| 117 |
+
for message in text.split("\n"):
|
| 118 |
+
try:
|
| 119 |
+
specmsg = parse_splitter(message)
|
| 120 |
+
messages.append(specmsg)
|
| 121 |
+
except Exception as e:
|
| 122 |
+
logger.exception(e)
|
| 123 |
+
return messages
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
async def retry_error(session: httpx.AsyncClient, url: str):
|
| 127 |
+
while True:
|
| 128 |
+
try:
|
| 129 |
+
response = await session.get(url)
|
| 130 |
+
response.raise_for_status()
|
| 131 |
+
return response
|
| 132 |
+
except Exception as e:
|
| 133 |
+
logger.warning(f"Failed to fetch {url} | {e} | Trying again")
|
| 134 |
+
await asyncio.sleep(5)
|
| 135 |
+
|
| 136 |
+
|
| 137 |
+
async def main(output_dump: pathlib.Path):
|
| 138 |
+
session = httpx.AsyncClient()
|
| 139 |
+
session.headers["user-agent"] = ""
|
| 140 |
+
|
| 141 |
+
loop = asyncio.get_running_loop()
|
| 142 |
+
|
| 143 |
+
boards = await session.get("https://2ch.sc/bbsmenu.html")
|
| 144 |
+
boards.raise_for_status()
|
| 145 |
+
soup = BeautifulSoup(boards.text, "lxml")
|
| 146 |
+
|
| 147 |
+
encoder = msgspec.json.Encoder()
|
| 148 |
+
|
| 149 |
+
# with BytesIO() as fout:
|
| 150 |
+
for links in soup.find_all("a", attrs={"href": True}):
|
| 151 |
+
link = links["href"]
|
| 152 |
+
if (
|
| 153 |
+
"/be/" in link
|
| 154 |
+
or "//be." in link
|
| 155 |
+
or "//be." in link
|
| 156 |
+
or link
|
| 157 |
+
in [
|
| 158 |
+
"//2ch.sc/",
|
| 159 |
+
"//info.2ch.sc/guide/",
|
| 160 |
+
"//sp.2ch.sc/",
|
| 161 |
+
"//sweet.2ch.sc/headline/",
|
| 162 |
+
"//find.2ch.sc/",
|
| 163 |
+
"//irc.2ch.sc/",
|
| 164 |
+
]
|
| 165 |
+
or link.endswith(".html")
|
| 166 |
+
):
|
| 167 |
+
continue
|
| 168 |
+
if "2ch.sc" not in link:
|
| 169 |
+
continue
|
| 170 |
+
if not isinstance(link, str):
|
| 171 |
+
raise Exception()
|
| 172 |
+
|
| 173 |
+
decomposed = urlparse(link)
|
| 174 |
+
city = decomposed.path.strip("/")
|
| 175 |
+
region = decomposed.hostname.replace(".2ch.sc", "")
|
| 176 |
+
|
| 177 |
+
dumpFile = output_dump.with_stem(f"{output_dump.stem}-{region}_{city}")
|
| 178 |
+
if dumpFile.is_file() or dumpFile.with_suffix(".jsonl.zst").is_file():
|
| 179 |
+
continue
|
| 180 |
+
|
| 181 |
+
resolved_base = urljoin("https://", link)
|
| 182 |
+
warehouse_catalog = urljoin(resolved_base, "kako/subject.txt")
|
| 183 |
+
warehouses = await session.get(warehouse_catalog, timeout=None)
|
| 184 |
+
if warehouses.status_code == 404:
|
| 185 |
+
logger.warning(f"{warehouse_catalog} returned 404")
|
| 186 |
+
continue
|
| 187 |
+
|
| 188 |
+
warehouses.raise_for_status()
|
| 189 |
+
warehouse_ids = [
|
| 190 |
+
i.split("<>")[0] for i in warehouses.text.split("\n") if i.startswith("o")
|
| 191 |
+
]
|
| 192 |
+
threadsDump = []
|
| 193 |
+
concurrent = asyncio.Semaphore(128)
|
| 194 |
+
|
| 195 |
+
async def fetch_subjects(warehouse_id: str):
|
| 196 |
+
async with concurrent:
|
| 197 |
+
subject_id = urljoin(resolved_base, f"kako/{warehouse_id}/subject.txt")
|
| 198 |
+
subjects = [
|
| 199 |
+
i
|
| 200 |
+
for i in (await retry_error(session, subject_id)).text.split("\n")
|
| 201 |
+
if i
|
| 202 |
+
]
|
| 203 |
+
for subject in subjects:
|
| 204 |
+
subject = subject.strip()
|
| 205 |
+
thread_dat, *rest = subject.split("<>")
|
| 206 |
+
*thread_title, replies = "<>".join(rest).split("(")
|
| 207 |
+
|
| 208 |
+
thread = ThreadData(
|
| 209 |
+
int(thread_dat.split(".")[0]),
|
| 210 |
+
"[".join(thread_title).rstrip(" "),
|
| 211 |
+
region,
|
| 212 |
+
city,
|
| 213 |
+
int(replies.rstrip(")")),
|
| 214 |
+
)
|
| 215 |
+
threadsDump.append(thread)
|
| 216 |
+
|
| 217 |
+
threads = [
|
| 218 |
+
asyncio.create_task(fetch_subjects(subject_id))
|
| 219 |
+
for subject_id in warehouse_ids
|
| 220 |
+
]
|
| 221 |
+
for completed in tqdm.tqdm(
|
| 222 |
+
asyncio.as_completed(threads), total=len(warehouse_ids)
|
| 223 |
+
):
|
| 224 |
+
tid = await completed
|
| 225 |
+
|
| 226 |
+
if len(threadsDump) <= 1_000_000:
|
| 227 |
+
logger.warning(f"{warehouse_catalog} has less than 1,000,000 archived threads. Skipping.")
|
| 228 |
+
continue
|
| 229 |
+
|
| 230 |
+
logger.info("Requesting threads.")
|
| 231 |
+
taskQueue = asyncio.Queue(maxsize=1024 * 128)
|
| 232 |
+
responseQueue = asyncio.Queue(maxsize=1024 * 128)
|
| 233 |
+
totalThreads = len(threadsDump)
|
| 234 |
+
logger.info(f"total: {totalThreads}")
|
| 235 |
+
|
| 236 |
+
async def writer_task():
|
| 237 |
+
nonlocal dumpFile
|
| 238 |
+
rootStem = dumpFile.stem
|
| 239 |
+
i = 0
|
| 240 |
+
with tqdm.tqdm(total=totalThreads) as pbar:
|
| 241 |
+
fout = await aiofile.async_open(dumpFile, "wb")
|
| 242 |
+
while True:
|
| 243 |
+
tid = await responseQueue.get()
|
| 244 |
+
if tid is None:
|
| 245 |
+
break
|
| 246 |
+
data = await loop.run_in_executor(None,encoder.encode_lines,[tid])
|
| 247 |
+
# logger.debug(data)
|
| 248 |
+
await fout.write(data)
|
| 249 |
+
pbar.update(1)
|
| 250 |
+
if fout.tell() > (1024**3) * 10:
|
| 251 |
+
await fout.close()
|
| 252 |
+
i += 1
|
| 253 |
+
dumpFile = dumpFile.with_stem(f"{rootStem}-{i}")
|
| 254 |
+
fout = await aiofile.async_open(dumpFile, "wb")
|
| 255 |
+
|
| 256 |
+
async def fetch_thread_task():
|
| 257 |
+
while True:
|
| 258 |
+
thread_data = await taskQueue.get()
|
| 259 |
+
if thread_data is None:
|
| 260 |
+
break
|
| 261 |
+
thread_dat = urljoin(resolved_base, f"dat/{thread_data.id}.dat")
|
| 262 |
+
tries = 10
|
| 263 |
+
while tries > 0:
|
| 264 |
+
try:
|
| 265 |
+
response = await session.get(thread_dat)
|
| 266 |
+
response.raise_for_status()
|
| 267 |
+
break
|
| 268 |
+
except Exception as e:
|
| 269 |
+
tries -= 1
|
| 270 |
+
logger.warning(
|
| 271 |
+
f"Failed to fetch {thread_dat} | {e} | Trying again"
|
| 272 |
+
)
|
| 273 |
+
await responseQueue.put(
|
| 274 |
+
FilledThreadData(
|
| 275 |
+
thread_data.id,
|
| 276 |
+
thread_data.title,
|
| 277 |
+
thread_data.region,
|
| 278 |
+
thread_data.city,
|
| 279 |
+
thread_data.replies,
|
| 280 |
+
await asyncio.to_thread(parse_messages, response.text),
|
| 281 |
+
)
|
| 282 |
+
)
|
| 283 |
+
|
| 284 |
+
logger.info("Starting workers.")
|
| 285 |
+
|
| 286 |
+
workers: list[asyncio.Task] = [
|
| 287 |
+
loop.create_task(fetch_thread_task()) for _ in range(128)
|
| 288 |
+
]
|
| 289 |
+
writer = loop.create_task(writer_task())
|
| 290 |
+
|
| 291 |
+
for thread in threadsDump:
|
| 292 |
+
await taskQueue.put(thread)
|
| 293 |
+
for _ in range(128):
|
| 294 |
+
await taskQueue.put(None)
|
| 295 |
+
while workers:
|
| 296 |
+
workers = [i for i in workers if not i.done()]
|
| 297 |
+
await asyncio.sleep(0.01)
|
| 298 |
+
await responseQueue.put(None)
|
| 299 |
+
await writer
|
| 300 |
+
|
| 301 |
+
|
| 302 |
+
@app.command()
|
| 303 |
+
def dump_2ch_kako(output_catalogs: pathlib.Path):
|
| 304 |
+
asyncio.run(main(output_catalogs))
|
| 305 |
+
|
| 306 |
+
|
| 307 |
+
if __name__ == "__main__":
|
| 308 |
+
app()
|
README.md
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# 2ch.sc ()
|
| 2 |
+
|
| 3 |
+
## Processing
|
| 4 |
+
|
| 5 |
+
## Licence
|
| 6 |
+
|
| 7 |
+
...Hard to say. I think this is the first if ever 2ch.sc dataset on HF.co.
|
| 8 |
+
|
| 9 |
+
The original license allows for the following (translated from Japanese)
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
**"Print Out / Copy / Free Distribution" OK Mark**
|
| 13 |
+
* A mark indicating permission for "printing out," "copying," and "free distribution" **only**.
|
| 14 |
+
* This permission is limited to using the work **as is**. It does not include:
|
| 15 |
+
* Changing, altering, processing, cutting, partial use, summarizing, translating, transforming, or adapting the work.
|
| 16 |
+
* Use for commercial purposes is permitted as long as the distribution is free. (For example, you can copy it for distribution in a company pamphlet if the pamphlet is given away for free).
|
| 17 |
+
|
| 18 |
+
**"Non-profit Use for People with Disabilities" OK Mark**
|
| 19 |
+
* A mark indicating permission for all forms of non-profit use, such as copying, transmitting, and distributing, **only when the purpose is for use by people with disabilities**.
|
| 20 |
+
* This permission **includes** activities such as changing, altering, processing, cutting, partial use, summarizing, translating, transforming, or adapting the work.
|
| 21 |
+
|
| 22 |
+
**"Non-profit Use for School Education" OK Mark**
|
| 23 |
+
* A mark indicating permission for all forms of non-profit use, such as copying, transmitting, and distributing, **only when the purpose is for use in various school activities**.
|
| 24 |
+
* This permission **includes** activities such as changing, altering, processing, cutting, partial use, summarizing, translating, transforming, or adapting the work.
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
## お詫び (Apology)
|
| 28 |
+
|
| 29 |
+
> *負荷が掛かるので、事前相談の無いクローリングも禁止です。*
|
| 30 |
+
> *Crawling without prior consultation is also prohibited as it places a load on the system.*
|
| 31 |
+
|
| 32 |
+
Although we have not consulted, we have applied the following crawling limits:
|
| 33 |
+
|
| 34 |
+
1. Concurrently, a maximum of 128 connections can be reading from the server.
|
| 35 |
+
- While this might sound a lot, in practice the connection maxes out at 100Mbps
|
data/newdump-ai_bouhan.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:89273c102c7b8824681d24210a6f4e119359d608c06401da4c1a71ea13e41bc8
|
| 3 |
+
size 57355264
|
data/newdump-ai_galileo.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:cc70d3192a868b6c47ba10fe3113bfbb4590eceaf6db40934576d1f9642f8a96
|
| 3 |
+
size 32364265
|
data/newdump-ai_gameswf-1.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:85a345411ef5755e58348929ce3c03d6fd04c918e9d1d5e202894cc0404d29f7
|
| 3 |
+
size 740895165
|
data/newdump-ai_hanryu.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d486e2fa22800827bc4750caa34ad36ef1f3a6df5bd9cbbf0d968d40ffb17b56
|
| 3 |
+
size 41840534
|
data/newdump-ai_jsdf.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:02b2368f91f16ee0bc552856f219e610f1e77a3794b9f7f9522eac8cbf146a71
|
| 3 |
+
size 101706702
|
data/newdump-ai_kinoko.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:fde4e872d312581324b9eade1dfe799160b6025dca86c3b10191f77207ce3fa7
|
| 3 |
+
size 9239775
|
data/newdump-ai_life.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:3a742a9324603962b32dc4542232f715cb3c9a61ae010cb1831b18a91633b587
|
| 3 |
+
size 113297079
|
data/newdump-ai_rradio.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:591e2ab33aa0a9953c5c29dacf14e42916d787ab2d5246c270395a93a095c82c
|
| 3 |
+
size 25185645
|
data/newdump-ai_sepia.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:cb02e36939afa8aa8ef408a1e13216d549338c847d56c7b97e23b709fda79f5a
|
| 3 |
+
size 36733176
|
data/newdump-ai_venture.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:b89c37cda74c5ad0fe17540e664523354bf6057f8beffa813277054ef6179c8b
|
| 3 |
+
size 41748644
|
data/newdump-ai_wc.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:7b5c76f514f2d370e607c9c0b3269aeb77c9191660183dde19178c497401bc7e
|
| 3 |
+
size 401344291
|
data/newdump-anago_119.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:ed008944f2543ad260f9b404865b6dcd9e295be8eae9eea311a211c07521a2af
|
| 3 |
+
size 4091785
|
data/newdump-anago_diplomacy.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:6ff04d9b18d7c736cdd2ac7c3adba6638cab7f130dead404d7e5a8ef1f7ab9d1
|
| 3 |
+
size 15910765
|
data/newdump-anago_jan.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:c4d9ceb184cd3fee45a95ec442432286cdcc39961de6ddba7ef602c685f46e6b
|
| 3 |
+
size 110190498
|
data/newdump-anago_koukoku.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:8264ea3daea94dc76046c5118eaf92dba800e32b63220a45ca7d357f9be00d40
|
| 3 |
+
size 7402191
|
data/newdump-anago_news5plus.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:23c99e91a9eb07d00690d008bd71d22a888f029ef7da23e21018f00846c59677
|
| 3 |
+
size 123079380
|
data/newdump-anago_scienceplus.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:88031e6a8086aa45aa9ab66f3df1b4d262c4dfcbf8676ae562e879d8c6b9488e
|
| 3 |
+
size 188810827
|
data/newdump-awabi_movie.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:4e54c7e1b760374208037219939282be1b94457d28777e5f13253e3b6cb867c8
|
| 3 |
+
size 621184276
|
data/newdump-awabi_net-1.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:43263c12b37b09e3ff771c4aa58e278195a0f109eace4e28811ce5a46a164918
|
| 3 |
+
size 692998657
|
data/newdump-awabi_pedagogy.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:44a55f57e96f3ab7594d6e8a1e764b32caf0637e2fdf42a7d24d15c3bd5886ba
|
| 3 |
+
size 3781125
|
data/newdump-awabi_radio.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:55f9a59401568fd7cc4143c6f5cad8a72d1a47fcd5197fc5f1cc17fb786c9baf
|
| 3 |
+
size 146443537
|
data/newdump-awabi_techno.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:52a85505722b99c2e5bad4a3f3f8df8268c05d9b47c7287ee38dda34f9690aa4
|
| 3 |
+
size 24647993
|
data/newdump-hayabusa2_liveabema.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:07dfc0440ba86f1a10091c2fc4a14176fbdec29d6cb1e7dccdc114a63c4b1354
|
| 3 |
+
size 23497422
|
data/newdump-hayabusa2_liveyonmoji.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:58202578c225c7a4504c1d5aed101af50aebedafa8c1a48cc5ce240ebb07e7e8
|
| 3 |
+
size 284376
|
data/newdump-hayabusa5_oonna.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:7d00a3e92e2327520d8b4e9bb1d7bcfefe76c6c210251d1784925baf8f767a88
|
| 3 |
+
size 19914076
|
data/newdump-ikura_ice.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:696163b917bb1789d150c1b70cb70d23e1e92693e382e94aaa1de75a0d023c3b
|
| 3 |
+
size 8197662
|
data/newdump-ikura_out.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:2bff59377e4dbb42a5a72a276d91cf11ab940c02730d82a567f0d0a586e54683
|
| 3 |
+
size 455293652
|
data/newdump-ikura_recipe.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:965c1d7cb9917266aef5f0282042809d09ee978025ef414135db0f9c2df4b9c6
|
| 3 |
+
size 8555949
|
data/newdump-ikura_wine.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:293887a5e4892b68454e6337c266aa291a037a388216790394b814a8572f55d0
|
| 3 |
+
size 12286522
|
data/newdump-maguro_dog.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:02eff1a66ba94bc20493f630bbfb1e819179bb7961b29c341f83fbed6f630f4c
|
| 3 |
+
size 173903086
|
data/newdump-maguro_jyudo.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:c2377a8d97d5bbf13ebb09f02ee532b6f1558528a93fb61ebe0e092f6cbed109
|
| 3 |
+
size 8011964
|
data/newdump-maguro_kcar.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:4ad1aa1dd1c2f8ae7e8b2782493e063c0298cdbaa6b14946d2c933fcd3e3e636
|
| 3 |
+
size 351704794
|
data/newdump-maguro_keirin.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:e8ca4e19ca8197d1faa548f157e0c0a74e4694b02ec2486638ef5204ffe3c6f9
|
| 3 |
+
size 286208699
|
data/newdump-maguro_offreg.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:cae02eb59cf44ffb12cb4c9c2cdc43b98c648ab8f5ffe3669730060ce6668c59
|
| 3 |
+
size 56529629
|
data/newdump-maguro_sports.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:fe2f7697c92e4dd26717f579bc988c6e2a5417e7ed5393fdc710d718099b81ef
|
| 3 |
+
size 38691171
|
data/newdump-maguro_usedcar.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d9a47c7d518669f098921bcdb009654aa5c2ea66908057e303106d1854f1c914
|
| 3 |
+
size 46588886
|
data/newdump-nozomi_basket.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:8cd896d7da0d5db3c5773dc26705a0d728fe4917717cefebf421381617f60341
|
| 3 |
+
size 522348621
|
data/newdump-nozomi_lovelive-1.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:830e4254b00f4296465b8aa3531cfb255e1c5e4fbd430725430f473fc6980abe
|
| 3 |
+
size 171397700
|
data/newdump-nozomi_os.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:691bdc083e210de4e3a78aa43e460742aac2394ab95371a1fbdca576c165d259
|
| 3 |
+
size 6611574
|
data/newdump-nozomi_sec.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:6b633d12bbade4bc5bbf241b2e932477cf7ef69c5ce30317dc598b60078475d5
|
| 3 |
+
size 39102782
|
data/newdump-nozomi_ymag.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:930180ac0bfbbb09b14735e5316897c3546b252deb6bafacb03ffffe7c379010
|
| 3 |
+
size 374221907
|
data/newdump-tarte_starwars.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:05b78d7c0b75360d803b9dd468b8d3029d08b607066acea3a2cbecebd6f33e80
|
| 3 |
+
size 16045029
|
data/newdump-tarte_world48.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:556e47668add678ded2406d2a2a10bf796e0bf0ea6910d7c837dc5090df03c35
|
| 3 |
+
size 30055651
|
data/newdump-toro_antispam.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:063f7d8c69af1d6c579329c3e2a9427293ed5bf636a5224abc2ae2e101534d8b
|
| 3 |
+
size 12850763
|
data/newdump-toro_cg.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:c5757a638a50c6aa3070d0fdfc374a8a1a1dc688ff3aae852e7153ab63688b27
|
| 3 |
+
size 134857483
|
data/newdump-toro_hack.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:e4dd082f6c8937b97834c53df217e14c2c7a3f6966cf541efe109620a9fabe3f
|
| 3 |
+
size 16684938
|
data/newdump-toro_offevent.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:16cb5dbac1f3c6ad7a218a5818f51ca2b7f0eb889aef8a42957ac5875d9524b9
|
| 3 |
+
size 108307466
|
data/newdump-toro_tech.jsonl.zst
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:5580ed8a4bba419a77ab4eb59c74d854cf0d179418c22432fe5222f266685890
|
| 3 |
+
size 107381183
|