Spaces:
Paused
Paused
Upload 321 files
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- packages/clawdbot/index.js +1 -0
- packages/clawdbot/package.json +19 -0
- packages/clawdbot/scripts/postinstall.js +1 -0
- packages/moltbot/index.js +1 -0
- packages/moltbot/package.json +19 -0
- packages/moltbot/scripts/postinstall.js +1 -0
- patches/.gitkeep +0 -0
- scripts/auth-monitor.sh +89 -0
- scripts/bench-model.ts +145 -0
- scripts/build-and-run-mac.sh +18 -0
- scripts/build-docs-list.mjs +14 -0
- scripts/build_icon.sh +59 -0
- scripts/bundle-a2ui.sh +87 -0
- scripts/canvas-a2ui-copy.ts +40 -0
- scripts/changelog-to-html.sh +91 -0
- scripts/check-ts-max-loc.ts +80 -0
- scripts/claude-auth-status.sh +280 -0
- scripts/clawlog.sh +309 -0
- scripts/clawtributors-map.json +39 -0
- scripts/codesign-mac-app.sh +289 -0
- scripts/committer +117 -0
- scripts/copy-hook-metadata.ts +55 -0
- scripts/create-dmg.sh +176 -0
- scripts/debug-claude-usage.ts +391 -0
- scripts/docker/cleanup-smoke/Dockerfile +20 -0
- scripts/docker/cleanup-smoke/run.sh +32 -0
- scripts/docker/install-sh-e2e/Dockerfile +14 -0
- scripts/docker/install-sh-e2e/run.sh +531 -0
- scripts/docker/install-sh-nonroot/Dockerfile +29 -0
- scripts/docker/install-sh-nonroot/run.sh +51 -0
- scripts/docker/install-sh-smoke/Dockerfile +21 -0
- scripts/docker/install-sh-smoke/run.sh +73 -0
- scripts/docs-i18n/glossary.go +29 -0
- scripts/docs-i18n/go.mod +10 -0
- scripts/docs-i18n/go.sum +10 -0
- scripts/docs-i18n/html_translate.go +160 -0
- scripts/docs-i18n/main.go +58 -0
- scripts/docs-i18n/markdown_segments.go +131 -0
- scripts/docs-i18n/masking.go +89 -0
- scripts/docs-i18n/placeholders.go +30 -0
- scripts/docs-i18n/process.go +205 -0
- scripts/docs-i18n/segment.go +11 -0
- scripts/docs-i18n/tm.go +126 -0
- scripts/docs-i18n/translator.go +104 -0
- scripts/docs-i18n/util.go +81 -0
- scripts/docs-list.js +173 -0
- scripts/e2e/Dockerfile +23 -0
- scripts/e2e/Dockerfile.qr-import +9 -0
- scripts/e2e/doctor-install-switch-docker.sh +147 -0
- scripts/e2e/gateway-network-docker.sh +115 -0
packages/clawdbot/index.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
export * from "openclaw";
|
packages/clawdbot/package.json
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"name": "clawdbot",
|
| 3 |
+
"version": "2026.1.27-beta.1",
|
| 4 |
+
"description": "Compatibility shim that forwards to openclaw",
|
| 5 |
+
"bin": {
|
| 6 |
+
"clawdbot": "./bin/clawdbot.js"
|
| 7 |
+
},
|
| 8 |
+
"type": "module",
|
| 9 |
+
"exports": {
|
| 10 |
+
".": "./index.js",
|
| 11 |
+
"./cli-entry": "./bin/clawdbot.js"
|
| 12 |
+
},
|
| 13 |
+
"scripts": {
|
| 14 |
+
"postinstall": "node ./scripts/postinstall.js"
|
| 15 |
+
},
|
| 16 |
+
"dependencies": {
|
| 17 |
+
"openclaw": "workspace:*"
|
| 18 |
+
}
|
| 19 |
+
}
|
packages/clawdbot/scripts/postinstall.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
console.warn("clawdbot renamed -> openclaw");
|
packages/moltbot/index.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
export * from "openclaw";
|
packages/moltbot/package.json
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"name": "moltbot",
|
| 3 |
+
"version": "2026.1.27-beta.1",
|
| 4 |
+
"description": "Compatibility shim that forwards to openclaw",
|
| 5 |
+
"bin": {
|
| 6 |
+
"moltbot": "./bin/moltbot.js"
|
| 7 |
+
},
|
| 8 |
+
"type": "module",
|
| 9 |
+
"exports": {
|
| 10 |
+
".": "./index.js",
|
| 11 |
+
"./cli-entry": "./bin/moltbot.js"
|
| 12 |
+
},
|
| 13 |
+
"scripts": {
|
| 14 |
+
"postinstall": "node ./scripts/postinstall.js"
|
| 15 |
+
},
|
| 16 |
+
"dependencies": {
|
| 17 |
+
"openclaw": "workspace:*"
|
| 18 |
+
}
|
| 19 |
+
}
|
packages/moltbot/scripts/postinstall.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
console.warn("moltbot renamed -> openclaw");
|
patches/.gitkeep
ADDED
|
File without changes
|
scripts/auth-monitor.sh
ADDED
|
@@ -0,0 +1,89 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash
|
| 2 |
+
# Auth Expiry Monitor
|
| 3 |
+
# Run via cron or systemd timer to get proactive notifications
|
| 4 |
+
# before Claude Code auth expires.
|
| 5 |
+
#
|
| 6 |
+
# Suggested cron: */30 * * * * /home/admin/openclaw/scripts/auth-monitor.sh
|
| 7 |
+
#
|
| 8 |
+
# Environment variables:
|
| 9 |
+
# NOTIFY_PHONE - Phone number to send OpenClaw notification (e.g., +1234567890)
|
| 10 |
+
# NOTIFY_NTFY - ntfy.sh topic for push notifications (e.g., openclaw-alerts)
|
| 11 |
+
# WARN_HOURS - Hours before expiry to warn (default: 2)
|
| 12 |
+
|
| 13 |
+
set -euo pipefail
|
| 14 |
+
|
| 15 |
+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
| 16 |
+
CLAUDE_CREDS="$HOME/.claude/.credentials.json"
|
| 17 |
+
STATE_FILE="$HOME/.openclaw/auth-monitor-state"
|
| 18 |
+
|
| 19 |
+
# Configuration
|
| 20 |
+
WARN_HOURS="${WARN_HOURS:-2}"
|
| 21 |
+
NOTIFY_PHONE="${NOTIFY_PHONE:-}"
|
| 22 |
+
NOTIFY_NTFY="${NOTIFY_NTFY:-}"
|
| 23 |
+
|
| 24 |
+
# State tracking to avoid spam
|
| 25 |
+
mkdir -p "$(dirname "$STATE_FILE")"
|
| 26 |
+
LAST_NOTIFIED=$(cat "$STATE_FILE" 2>/dev/null || echo "0")
|
| 27 |
+
NOW=$(date +%s)
|
| 28 |
+
|
| 29 |
+
# Only notify once per hour max
|
| 30 |
+
MIN_INTERVAL=3600
|
| 31 |
+
|
| 32 |
+
send_notification() {
|
| 33 |
+
local message="$1"
|
| 34 |
+
local priority="${2:-default}"
|
| 35 |
+
|
| 36 |
+
echo "$(date '+%Y-%m-%d %H:%M:%S') - $message"
|
| 37 |
+
|
| 38 |
+
# Check if we notified recently
|
| 39 |
+
if [ $((NOW - LAST_NOTIFIED)) -lt $MIN_INTERVAL ]; then
|
| 40 |
+
echo "Skipping notification (sent recently)"
|
| 41 |
+
return
|
| 42 |
+
fi
|
| 43 |
+
|
| 44 |
+
# Send via OpenClaw if phone configured and auth still valid
|
| 45 |
+
if [ -n "$NOTIFY_PHONE" ]; then
|
| 46 |
+
# Check if we can still use openclaw
|
| 47 |
+
if "$SCRIPT_DIR/claude-auth-status.sh" simple 2>/dev/null | grep -q "OK\|EXPIRING"; then
|
| 48 |
+
echo "Sending via OpenClaw to $NOTIFY_PHONE..."
|
| 49 |
+
openclaw send --to "$NOTIFY_PHONE" --message "$message" 2>/dev/null || true
|
| 50 |
+
fi
|
| 51 |
+
fi
|
| 52 |
+
|
| 53 |
+
# Send via ntfy.sh if configured
|
| 54 |
+
if [ -n "$NOTIFY_NTFY" ]; then
|
| 55 |
+
echo "Sending via ntfy.sh to $NOTIFY_NTFY..."
|
| 56 |
+
curl -s -o /dev/null \
|
| 57 |
+
-H "Title: OpenClaw Auth Alert" \
|
| 58 |
+
-H "Priority: $priority" \
|
| 59 |
+
-H "Tags: warning,key" \
|
| 60 |
+
-d "$message" \
|
| 61 |
+
"https://ntfy.sh/$NOTIFY_NTFY" || true
|
| 62 |
+
fi
|
| 63 |
+
|
| 64 |
+
# Update state
|
| 65 |
+
echo "$NOW" > "$STATE_FILE"
|
| 66 |
+
}
|
| 67 |
+
|
| 68 |
+
# Check auth status
|
| 69 |
+
if [ ! -f "$CLAUDE_CREDS" ]; then
|
| 70 |
+
send_notification "Claude Code credentials missing! Run: claude setup-token" "high"
|
| 71 |
+
exit 1
|
| 72 |
+
fi
|
| 73 |
+
|
| 74 |
+
EXPIRES_AT=$(jq -r '.claudeAiOauth.expiresAt // 0' "$CLAUDE_CREDS")
|
| 75 |
+
NOW_MS=$((NOW * 1000))
|
| 76 |
+
DIFF_MS=$((EXPIRES_AT - NOW_MS))
|
| 77 |
+
HOURS_LEFT=$((DIFF_MS / 3600000))
|
| 78 |
+
MINS_LEFT=$(((DIFF_MS % 3600000) / 60000))
|
| 79 |
+
|
| 80 |
+
if [ "$DIFF_MS" -lt 0 ]; then
|
| 81 |
+
send_notification "Claude Code auth EXPIRED! OpenClaw is down. Run: ssh l36 '~/openclaw/scripts/mobile-reauth.sh'" "urgent"
|
| 82 |
+
exit 1
|
| 83 |
+
elif [ "$HOURS_LEFT" -lt "$WARN_HOURS" ]; then
|
| 84 |
+
send_notification "Claude Code auth expires in ${HOURS_LEFT}h ${MINS_LEFT}m. Consider re-auth soon." "high"
|
| 85 |
+
exit 0
|
| 86 |
+
else
|
| 87 |
+
echo "$(date '+%Y-%m-%d %H:%M:%S') - Auth OK: ${HOURS_LEFT}h ${MINS_LEFT}m remaining"
|
| 88 |
+
exit 0
|
| 89 |
+
fi
|
scripts/bench-model.ts
ADDED
|
@@ -0,0 +1,145 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import { completeSimple, getModel, type Model } from "@mariozechner/pi-ai";
|
| 2 |
+
|
| 3 |
+
type Usage = {
|
| 4 |
+
input?: number;
|
| 5 |
+
output?: number;
|
| 6 |
+
cacheRead?: number;
|
| 7 |
+
cacheWrite?: number;
|
| 8 |
+
totalTokens?: number;
|
| 9 |
+
};
|
| 10 |
+
|
| 11 |
+
type RunResult = {
|
| 12 |
+
durationMs: number;
|
| 13 |
+
usage?: Usage;
|
| 14 |
+
};
|
| 15 |
+
|
| 16 |
+
const DEFAULT_PROMPT = "Reply with a single word: ok. No punctuation or extra text.";
|
| 17 |
+
const DEFAULT_RUNS = 10;
|
| 18 |
+
|
| 19 |
+
function parseArg(flag: string): string | undefined {
|
| 20 |
+
const idx = process.argv.indexOf(flag);
|
| 21 |
+
if (idx === -1) {
|
| 22 |
+
return undefined;
|
| 23 |
+
}
|
| 24 |
+
return process.argv[idx + 1];
|
| 25 |
+
}
|
| 26 |
+
|
| 27 |
+
function parseRuns(raw: string | undefined): number {
|
| 28 |
+
if (!raw) {
|
| 29 |
+
return DEFAULT_RUNS;
|
| 30 |
+
}
|
| 31 |
+
const parsed = Number(raw);
|
| 32 |
+
if (!Number.isFinite(parsed) || parsed <= 0) {
|
| 33 |
+
return DEFAULT_RUNS;
|
| 34 |
+
}
|
| 35 |
+
return Math.floor(parsed);
|
| 36 |
+
}
|
| 37 |
+
|
| 38 |
+
function median(values: number[]): number {
|
| 39 |
+
if (values.length === 0) {
|
| 40 |
+
return 0;
|
| 41 |
+
}
|
| 42 |
+
const sorted = [...values].toSorted((a, b) => a - b);
|
| 43 |
+
const mid = Math.floor(sorted.length / 2);
|
| 44 |
+
if (sorted.length % 2 === 0) {
|
| 45 |
+
return Math.round((sorted[mid - 1] + sorted[mid]) / 2);
|
| 46 |
+
}
|
| 47 |
+
return sorted[mid];
|
| 48 |
+
}
|
| 49 |
+
|
| 50 |
+
async function runModel(opts: {
|
| 51 |
+
label: string;
|
| 52 |
+
model: Model<any>;
|
| 53 |
+
apiKey: string;
|
| 54 |
+
runs: number;
|
| 55 |
+
prompt: string;
|
| 56 |
+
}): Promise<RunResult[]> {
|
| 57 |
+
const results: RunResult[] = [];
|
| 58 |
+
for (let i = 0; i < opts.runs; i += 1) {
|
| 59 |
+
const started = Date.now();
|
| 60 |
+
const res = await completeSimple(
|
| 61 |
+
opts.model,
|
| 62 |
+
{
|
| 63 |
+
messages: [
|
| 64 |
+
{
|
| 65 |
+
role: "user",
|
| 66 |
+
content: opts.prompt,
|
| 67 |
+
timestamp: Date.now(),
|
| 68 |
+
},
|
| 69 |
+
],
|
| 70 |
+
},
|
| 71 |
+
{ apiKey: opts.apiKey, maxTokens: 64 },
|
| 72 |
+
);
|
| 73 |
+
const durationMs = Date.now() - started;
|
| 74 |
+
results.push({ durationMs, usage: res.usage });
|
| 75 |
+
console.log(`${opts.label} run ${i + 1}/${opts.runs}: ${durationMs}ms`);
|
| 76 |
+
}
|
| 77 |
+
return results;
|
| 78 |
+
}
|
| 79 |
+
|
| 80 |
+
async function main(): Promise<void> {
|
| 81 |
+
const runs = parseRuns(parseArg("--runs"));
|
| 82 |
+
const prompt = parseArg("--prompt") ?? DEFAULT_PROMPT;
|
| 83 |
+
|
| 84 |
+
const anthropicKey = process.env.ANTHROPIC_API_KEY?.trim();
|
| 85 |
+
const minimaxKey = process.env.MINIMAX_API_KEY?.trim();
|
| 86 |
+
if (!anthropicKey) {
|
| 87 |
+
throw new Error("Missing ANTHROPIC_API_KEY in environment.");
|
| 88 |
+
}
|
| 89 |
+
if (!minimaxKey) {
|
| 90 |
+
throw new Error("Missing MINIMAX_API_KEY in environment.");
|
| 91 |
+
}
|
| 92 |
+
|
| 93 |
+
const minimaxBaseUrl = process.env.MINIMAX_BASE_URL?.trim() || "https://api.minimax.io/v1";
|
| 94 |
+
const minimaxModelId = process.env.MINIMAX_MODEL?.trim() || "MiniMax-M2.1";
|
| 95 |
+
|
| 96 |
+
const minimaxModel: Model<"openai-completions"> = {
|
| 97 |
+
id: minimaxModelId,
|
| 98 |
+
name: `MiniMax ${minimaxModelId}`,
|
| 99 |
+
api: "openai-completions",
|
| 100 |
+
provider: "minimax",
|
| 101 |
+
baseUrl: minimaxBaseUrl,
|
| 102 |
+
reasoning: false,
|
| 103 |
+
input: ["text"],
|
| 104 |
+
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
|
| 105 |
+
contextWindow: 200000,
|
| 106 |
+
maxTokens: 8192,
|
| 107 |
+
};
|
| 108 |
+
const opusModel = getModel("anthropic", "claude-opus-4-5");
|
| 109 |
+
|
| 110 |
+
console.log(`Prompt: ${prompt}`);
|
| 111 |
+
console.log(`Runs: ${runs}`);
|
| 112 |
+
console.log("");
|
| 113 |
+
|
| 114 |
+
const minimaxResults = await runModel({
|
| 115 |
+
label: "minimax",
|
| 116 |
+
model: minimaxModel,
|
| 117 |
+
apiKey: minimaxKey,
|
| 118 |
+
runs,
|
| 119 |
+
prompt,
|
| 120 |
+
});
|
| 121 |
+
const opusResults = await runModel({
|
| 122 |
+
label: "opus",
|
| 123 |
+
model: opusModel,
|
| 124 |
+
apiKey: anthropicKey,
|
| 125 |
+
runs,
|
| 126 |
+
prompt,
|
| 127 |
+
});
|
| 128 |
+
|
| 129 |
+
const summarize = (label: string, results: RunResult[]) => {
|
| 130 |
+
const durations = results.map((r) => r.durationMs);
|
| 131 |
+
const med = median(durations);
|
| 132 |
+
const min = Math.min(...durations);
|
| 133 |
+
const max = Math.max(...durations);
|
| 134 |
+
return { label, med, min, max };
|
| 135 |
+
};
|
| 136 |
+
|
| 137 |
+
const summary = [summarize("minimax", minimaxResults), summarize("opus", opusResults)];
|
| 138 |
+
console.log("");
|
| 139 |
+
console.log("Summary (ms):");
|
| 140 |
+
for (const row of summary) {
|
| 141 |
+
console.log(`${row.label.padEnd(7)} median=${row.med} min=${row.min} max=${row.max}`);
|
| 142 |
+
}
|
| 143 |
+
}
|
| 144 |
+
|
| 145 |
+
await main();
|
scripts/build-and-run-mac.sh
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env bash
|
| 2 |
+
set -euo pipefail
|
| 3 |
+
cd "$(dirname "$0")/../apps/macos"
|
| 4 |
+
|
| 5 |
+
BUILD_PATH=".build-local"
|
| 6 |
+
PRODUCT="OpenClaw"
|
| 7 |
+
BIN="$BUILD_PATH/debug/$PRODUCT"
|
| 8 |
+
|
| 9 |
+
printf "\n▶️ Building $PRODUCT (debug, build path: $BUILD_PATH)\n"
|
| 10 |
+
swift build -c debug --product "$PRODUCT" --build-path "$BUILD_PATH"
|
| 11 |
+
|
| 12 |
+
printf "\n⏹ Stopping existing $PRODUCT...\n"
|
| 13 |
+
killall -q "$PRODUCT" 2>/dev/null || true
|
| 14 |
+
|
| 15 |
+
printf "\n🚀 Launching $BIN ...\n"
|
| 16 |
+
nohup "$BIN" >/tmp/openclaw.log 2>&1 &
|
| 17 |
+
PID=$!
|
| 18 |
+
printf "Started $PRODUCT (PID $PID). Logs: /tmp/openclaw.log\n"
|
scripts/build-docs-list.mjs
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env node
|
| 2 |
+
import fs from "node:fs";
|
| 3 |
+
import path from "node:path";
|
| 4 |
+
import { fileURLToPath } from "node:url";
|
| 5 |
+
|
| 6 |
+
const root = path.resolve(path.dirname(fileURLToPath(import.meta.url)), "..");
|
| 7 |
+
const binDir = path.join(root, "bin");
|
| 8 |
+
const binPath = path.join(binDir, "docs-list");
|
| 9 |
+
|
| 10 |
+
fs.mkdirSync(binDir, { recursive: true });
|
| 11 |
+
|
| 12 |
+
const wrapper = `#!/usr/bin/env node\nimport { spawnSync } from "node:child_process";\nimport path from "node:path";\nimport { fileURLToPath } from "node:url";\n\nconst here = path.dirname(fileURLToPath(import.meta.url));\nconst script = path.join(here, "..", "scripts", "docs-list.js");\n\nconst result = spawnSync(process.execPath, [script], { stdio: "inherit" });\nprocess.exit(result.status ?? 1);\n`;
|
| 13 |
+
|
| 14 |
+
fs.writeFileSync(binPath, wrapper, { mode: 0o755 });
|
scripts/build_icon.sh
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env bash
|
| 2 |
+
set -euo pipefail
|
| 3 |
+
|
| 4 |
+
# Render the macOS .icon bundle to a padded .icns like Trimmy's pipeline.
|
| 5 |
+
# Defaults target the OpenClaw assets so you can just run the script from repo root.
|
| 6 |
+
|
| 7 |
+
ROOT_DIR="$(cd "$(dirname "$0")/.." && pwd)"
|
| 8 |
+
|
| 9 |
+
ICON_FILE=${1:-"$ROOT_DIR/apps/macos/Icon.icon"}
|
| 10 |
+
BASENAME=${2:-OpenClaw}
|
| 11 |
+
OUT_ROOT=${3:-"$ROOT_DIR/apps/macos/build/icon"}
|
| 12 |
+
XCODE_APP=${XCODE_APP:-/Applications/Xcode.app}
|
| 13 |
+
# Where the final .icns should live; override DEST_ICNS to change.
|
| 14 |
+
DEST_ICNS=${DEST_ICNS:-"$ROOT_DIR/apps/macos/Sources/OpenClaw/Resources/OpenClaw.icns"}
|
| 15 |
+
|
| 16 |
+
ICTOOL="$XCODE_APP/Contents/Applications/Icon Composer.app/Contents/Executables/ictool"
|
| 17 |
+
if [[ ! -x "$ICTOOL" ]]; then
|
| 18 |
+
ICTOOL="$XCODE_APP/Contents/Applications/Icon Composer.app/Contents/Executables/icontool"
|
| 19 |
+
fi
|
| 20 |
+
if [[ ! -x "$ICTOOL" ]]; then
|
| 21 |
+
echo "ictool/icontool not found. Set XCODE_APP if Xcode is elsewhere." >&2
|
| 22 |
+
exit 1
|
| 23 |
+
fi
|
| 24 |
+
|
| 25 |
+
ICONSET_DIR="$OUT_ROOT/${BASENAME}.iconset"
|
| 26 |
+
TMP_DIR="$OUT_ROOT/tmp"
|
| 27 |
+
mkdir -p "$ICONSET_DIR" "$TMP_DIR"
|
| 28 |
+
|
| 29 |
+
MASTER_ART="$TMP_DIR/icon_art_824.png"
|
| 30 |
+
MASTER_1024="$TMP_DIR/icon_1024.png"
|
| 31 |
+
|
| 32 |
+
# Render inner art (no margin) with macOS Default appearance
|
| 33 |
+
"$ICTOOL" "$ICON_FILE" \
|
| 34 |
+
--export-preview macOS Default 824 824 1 -45 "$MASTER_ART"
|
| 35 |
+
|
| 36 |
+
# Pad to 1024x1024 with transparent border
|
| 37 |
+
sips --padToHeightWidth 1024 1024 "$MASTER_ART" --out "$MASTER_1024" >/dev/null
|
| 38 |
+
|
| 39 |
+
# Generate required sizes
|
| 40 |
+
sizes=(16 32 64 128 256 512 1024)
|
| 41 |
+
for sz in "${sizes[@]}"; do
|
| 42 |
+
out="$ICONSET_DIR/icon_${sz}x${sz}.png"
|
| 43 |
+
sips -z "$sz" "$sz" "$MASTER_1024" --out "$out" >/dev/null
|
| 44 |
+
if [[ "$sz" -ne 1024 ]]; then
|
| 45 |
+
dbl=$((sz*2))
|
| 46 |
+
out2="$ICONSET_DIR/icon_${sz}x${sz}@2x.png"
|
| 47 |
+
sips -z "$dbl" "$dbl" "$MASTER_1024" --out "$out2" >/dev/null
|
| 48 |
+
fi
|
| 49 |
+
done
|
| 50 |
+
|
| 51 |
+
# 512x512@2x already covered by 1024; ensure it exists
|
| 52 |
+
cp "$MASTER_1024" "$ICONSET_DIR/icon_512x512@2x.png"
|
| 53 |
+
|
| 54 |
+
iconutil -c icns "$ICONSET_DIR" -o "$OUT_ROOT/${BASENAME}.icns"
|
| 55 |
+
|
| 56 |
+
mkdir -p "$(dirname "$DEST_ICNS")"
|
| 57 |
+
cp "$OUT_ROOT/${BASENAME}.icns" "$DEST_ICNS"
|
| 58 |
+
|
| 59 |
+
echo "Icon.icns generated at $DEST_ICNS"
|
scripts/bundle-a2ui.sh
ADDED
|
@@ -0,0 +1,87 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env bash
|
| 2 |
+
set -euo pipefail
|
| 3 |
+
|
| 4 |
+
on_error() {
|
| 5 |
+
echo "A2UI bundling failed. Re-run with: pnpm canvas:a2ui:bundle" >&2
|
| 6 |
+
echo "If this persists, verify pnpm deps and try again." >&2
|
| 7 |
+
}
|
| 8 |
+
trap on_error ERR
|
| 9 |
+
|
| 10 |
+
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
| 11 |
+
HASH_FILE="$ROOT_DIR/src/canvas-host/a2ui/.bundle.hash"
|
| 12 |
+
OUTPUT_FILE="$ROOT_DIR/src/canvas-host/a2ui/a2ui.bundle.js"
|
| 13 |
+
A2UI_RENDERER_DIR="$ROOT_DIR/vendor/a2ui/renderers/lit"
|
| 14 |
+
A2UI_APP_DIR="$ROOT_DIR/apps/shared/OpenClawKit/Tools/CanvasA2UI"
|
| 15 |
+
|
| 16 |
+
# Docker builds exclude vendor/apps via .dockerignore.
|
| 17 |
+
# In that environment we must keep the prebuilt bundle.
|
| 18 |
+
if [[ ! -d "$A2UI_RENDERER_DIR" || ! -d "$A2UI_APP_DIR" ]]; then
|
| 19 |
+
echo "A2UI sources missing; keeping prebuilt bundle."
|
| 20 |
+
exit 0
|
| 21 |
+
fi
|
| 22 |
+
|
| 23 |
+
INPUT_PATHS=(
|
| 24 |
+
"$ROOT_DIR/package.json"
|
| 25 |
+
"$ROOT_DIR/pnpm-lock.yaml"
|
| 26 |
+
"$A2UI_RENDERER_DIR"
|
| 27 |
+
"$A2UI_APP_DIR"
|
| 28 |
+
)
|
| 29 |
+
|
| 30 |
+
compute_hash() {
|
| 31 |
+
ROOT_DIR="$ROOT_DIR" node --input-type=module - "${INPUT_PATHS[@]}" <<'NODE'
|
| 32 |
+
import { createHash } from "node:crypto";
|
| 33 |
+
import { promises as fs } from "node:fs";
|
| 34 |
+
import path from "node:path";
|
| 35 |
+
|
| 36 |
+
const rootDir = process.env.ROOT_DIR ?? process.cwd();
|
| 37 |
+
const inputs = process.argv.slice(2);
|
| 38 |
+
const files = [];
|
| 39 |
+
|
| 40 |
+
async function walk(entryPath) {
|
| 41 |
+
const st = await fs.stat(entryPath);
|
| 42 |
+
if (st.isDirectory()) {
|
| 43 |
+
const entries = await fs.readdir(entryPath);
|
| 44 |
+
for (const entry of entries) {
|
| 45 |
+
await walk(path.join(entryPath, entry));
|
| 46 |
+
}
|
| 47 |
+
return;
|
| 48 |
+
}
|
| 49 |
+
files.push(entryPath);
|
| 50 |
+
}
|
| 51 |
+
|
| 52 |
+
for (const input of inputs) {
|
| 53 |
+
await walk(input);
|
| 54 |
+
}
|
| 55 |
+
|
| 56 |
+
function normalize(p) {
|
| 57 |
+
return p.split(path.sep).join("/");
|
| 58 |
+
}
|
| 59 |
+
|
| 60 |
+
files.sort((a, b) => normalize(a).localeCompare(normalize(b)));
|
| 61 |
+
|
| 62 |
+
const hash = createHash("sha256");
|
| 63 |
+
for (const filePath of files) {
|
| 64 |
+
const rel = normalize(path.relative(rootDir, filePath));
|
| 65 |
+
hash.update(rel);
|
| 66 |
+
hash.update("\0");
|
| 67 |
+
hash.update(await fs.readFile(filePath));
|
| 68 |
+
hash.update("\0");
|
| 69 |
+
}
|
| 70 |
+
|
| 71 |
+
process.stdout.write(hash.digest("hex"));
|
| 72 |
+
NODE
|
| 73 |
+
}
|
| 74 |
+
|
| 75 |
+
current_hash="$(compute_hash)"
|
| 76 |
+
if [[ -f "$HASH_FILE" ]]; then
|
| 77 |
+
previous_hash="$(cat "$HASH_FILE")"
|
| 78 |
+
if [[ "$previous_hash" == "$current_hash" && -f "$OUTPUT_FILE" ]]; then
|
| 79 |
+
echo "A2UI bundle up to date; skipping."
|
| 80 |
+
exit 0
|
| 81 |
+
fi
|
| 82 |
+
fi
|
| 83 |
+
|
| 84 |
+
pnpm -s exec tsc -p "$A2UI_RENDERER_DIR/tsconfig.json"
|
| 85 |
+
rolldown -c "$A2UI_APP_DIR/rolldown.config.mjs"
|
| 86 |
+
|
| 87 |
+
echo "$current_hash" > "$HASH_FILE"
|
scripts/canvas-a2ui-copy.ts
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import fs from "node:fs/promises";
|
| 2 |
+
import path from "node:path";
|
| 3 |
+
import { fileURLToPath, pathToFileURL } from "node:url";
|
| 4 |
+
|
| 5 |
+
const repoRoot = path.resolve(path.dirname(fileURLToPath(import.meta.url)), "..");
|
| 6 |
+
|
| 7 |
+
export function getA2uiPaths(env = process.env) {
|
| 8 |
+
const srcDir = env.OPENCLAW_A2UI_SRC_DIR ?? path.join(repoRoot, "src", "canvas-host", "a2ui");
|
| 9 |
+
const outDir = env.OPENCLAW_A2UI_OUT_DIR ?? path.join(repoRoot, "dist", "canvas-host", "a2ui");
|
| 10 |
+
return { srcDir, outDir };
|
| 11 |
+
}
|
| 12 |
+
|
| 13 |
+
export async function copyA2uiAssets({ srcDir, outDir }: { srcDir: string; outDir: string }) {
|
| 14 |
+
const skipMissing = process.env.OPENCLAW_A2UI_SKIP_MISSING === "1";
|
| 15 |
+
try {
|
| 16 |
+
await fs.stat(path.join(srcDir, "index.html"));
|
| 17 |
+
await fs.stat(path.join(srcDir, "a2ui.bundle.js"));
|
| 18 |
+
} catch (err) {
|
| 19 |
+
const message = 'Missing A2UI bundle assets. Run "pnpm canvas:a2ui:bundle" and retry.';
|
| 20 |
+
if (skipMissing) {
|
| 21 |
+
console.warn(`${message} Skipping copy (OPENCLAW_A2UI_SKIP_MISSING=1).`);
|
| 22 |
+
return;
|
| 23 |
+
}
|
| 24 |
+
throw new Error(message, { cause: err });
|
| 25 |
+
}
|
| 26 |
+
await fs.mkdir(path.dirname(outDir), { recursive: true });
|
| 27 |
+
await fs.cp(srcDir, outDir, { recursive: true });
|
| 28 |
+
}
|
| 29 |
+
|
| 30 |
+
async function main() {
|
| 31 |
+
const { srcDir, outDir } = getA2uiPaths();
|
| 32 |
+
await copyA2uiAssets({ srcDir, outDir });
|
| 33 |
+
}
|
| 34 |
+
|
| 35 |
+
if (import.meta.url === pathToFileURL(process.argv[1] ?? "").href) {
|
| 36 |
+
main().catch((err) => {
|
| 37 |
+
console.error(String(err));
|
| 38 |
+
process.exit(1);
|
| 39 |
+
});
|
| 40 |
+
}
|
scripts/changelog-to-html.sh
ADDED
|
@@ -0,0 +1,91 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env bash
|
| 2 |
+
set -euo pipefail
|
| 3 |
+
|
| 4 |
+
VERSION=${1:-}
|
| 5 |
+
CHANGELOG_FILE=${2:-}
|
| 6 |
+
|
| 7 |
+
if [[ -z "$VERSION" ]]; then
|
| 8 |
+
echo "Usage: $0 <version> [changelog_file]" >&2
|
| 9 |
+
exit 1
|
| 10 |
+
fi
|
| 11 |
+
|
| 12 |
+
SCRIPT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
|
| 13 |
+
if [[ -z "$CHANGELOG_FILE" ]]; then
|
| 14 |
+
if [[ -f "$SCRIPT_DIR/../CHANGELOG.md" ]]; then
|
| 15 |
+
CHANGELOG_FILE="$SCRIPT_DIR/../CHANGELOG.md"
|
| 16 |
+
elif [[ -f "CHANGELOG.md" ]]; then
|
| 17 |
+
CHANGELOG_FILE="CHANGELOG.md"
|
| 18 |
+
elif [[ -f "../CHANGELOG.md" ]]; then
|
| 19 |
+
CHANGELOG_FILE="../CHANGELOG.md"
|
| 20 |
+
else
|
| 21 |
+
echo "Error: Could not find CHANGELOG.md" >&2
|
| 22 |
+
exit 1
|
| 23 |
+
fi
|
| 24 |
+
fi
|
| 25 |
+
|
| 26 |
+
if [[ ! -f "$CHANGELOG_FILE" ]]; then
|
| 27 |
+
echo "Error: Changelog file '$CHANGELOG_FILE' not found" >&2
|
| 28 |
+
exit 1
|
| 29 |
+
fi
|
| 30 |
+
|
| 31 |
+
extract_version_section() {
|
| 32 |
+
local version=$1
|
| 33 |
+
local file=$2
|
| 34 |
+
awk -v version="$version" '
|
| 35 |
+
BEGIN { found=0 }
|
| 36 |
+
/^## / {
|
| 37 |
+
if ($0 ~ "^##[[:space:]]+" version "([[:space:]].*|$)") { found=1; next }
|
| 38 |
+
if (found) { exit }
|
| 39 |
+
}
|
| 40 |
+
found { print }
|
| 41 |
+
' "$file"
|
| 42 |
+
}
|
| 43 |
+
|
| 44 |
+
markdown_to_html() {
|
| 45 |
+
local text=$1
|
| 46 |
+
text=$(echo "$text" | sed 's/^##### \(.*\)$/<h5>\1<\/h5>/')
|
| 47 |
+
text=$(echo "$text" | sed 's/^#### \(.*\)$/<h4>\1<\/h4>/')
|
| 48 |
+
text=$(echo "$text" | sed 's/^### \(.*\)$/<h3>\1<\/h3>/')
|
| 49 |
+
text=$(echo "$text" | sed 's/^## \(.*\)$/<h2>\1<\/h2>/')
|
| 50 |
+
text=$(echo "$text" | sed 's/^- \*\*\([^*]*\)\*\*\(.*\)$/<li><strong>\1<\/strong>\2<\/li>/')
|
| 51 |
+
text=$(echo "$text" | sed 's/^- \([^*].*\)$/<li>\1<\/li>/')
|
| 52 |
+
text=$(echo "$text" | sed 's/\*\*\([^*]*\)\*\*/<strong>\1<\/strong>/g')
|
| 53 |
+
text=$(echo "$text" | sed 's/`\([^`]*\)`/<code>\1<\/code>/g')
|
| 54 |
+
text=$(echo "$text" | sed 's/\[\([^]]*\)\](\([^)]*\))/<a href="\2">\1<\/a>/g')
|
| 55 |
+
echo "$text"
|
| 56 |
+
}
|
| 57 |
+
|
| 58 |
+
version_content=$(extract_version_section "$VERSION" "$CHANGELOG_FILE")
|
| 59 |
+
if [[ -z "$version_content" ]]; then
|
| 60 |
+
echo "<h2>OpenClaw $VERSION</h2>"
|
| 61 |
+
echo "<p>Latest OpenClaw update.</p>"
|
| 62 |
+
echo "<p><a href=\"https://github.com/openclaw/openclaw/blob/main/CHANGELOG.md\">View full changelog</a></p>"
|
| 63 |
+
exit 0
|
| 64 |
+
fi
|
| 65 |
+
|
| 66 |
+
echo "<h2>OpenClaw $VERSION</h2>"
|
| 67 |
+
|
| 68 |
+
in_list=false
|
| 69 |
+
while IFS= read -r line; do
|
| 70 |
+
if [[ "$line" =~ ^- ]]; then
|
| 71 |
+
if [[ "$in_list" == false ]]; then
|
| 72 |
+
echo "<ul>"
|
| 73 |
+
in_list=true
|
| 74 |
+
fi
|
| 75 |
+
markdown_to_html "$line"
|
| 76 |
+
else
|
| 77 |
+
if [[ "$in_list" == true ]]; then
|
| 78 |
+
echo "</ul>"
|
| 79 |
+
in_list=false
|
| 80 |
+
fi
|
| 81 |
+
if [[ -n "$line" ]]; then
|
| 82 |
+
markdown_to_html "$line"
|
| 83 |
+
fi
|
| 84 |
+
fi
|
| 85 |
+
done <<< "$version_content"
|
| 86 |
+
|
| 87 |
+
if [[ "$in_list" == true ]]; then
|
| 88 |
+
echo "</ul>"
|
| 89 |
+
fi
|
| 90 |
+
|
| 91 |
+
echo "<p><a href=\"https://github.com/openclaw/openclaw/blob/main/CHANGELOG.md\">View full changelog</a></p>"
|
scripts/check-ts-max-loc.ts
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import { execFileSync } from "node:child_process";
|
| 2 |
+
import { existsSync } from "node:fs";
|
| 3 |
+
import { readFile } from "node:fs/promises";
|
| 4 |
+
|
| 5 |
+
type ParsedArgs = {
|
| 6 |
+
maxLines: number;
|
| 7 |
+
};
|
| 8 |
+
|
| 9 |
+
function parseArgs(argv: string[]): ParsedArgs {
|
| 10 |
+
let maxLines = 500;
|
| 11 |
+
|
| 12 |
+
for (let index = 0; index < argv.length; index++) {
|
| 13 |
+
const arg = argv[index];
|
| 14 |
+
if (arg === "--max") {
|
| 15 |
+
const next = argv[index + 1];
|
| 16 |
+
if (!next || Number.isNaN(Number(next))) {
|
| 17 |
+
throw new Error("Missing/invalid --max value");
|
| 18 |
+
}
|
| 19 |
+
maxLines = Number(next);
|
| 20 |
+
index++;
|
| 21 |
+
continue;
|
| 22 |
+
}
|
| 23 |
+
}
|
| 24 |
+
|
| 25 |
+
return { maxLines };
|
| 26 |
+
}
|
| 27 |
+
|
| 28 |
+
function gitLsFilesAll(): string[] {
|
| 29 |
+
// Include untracked files too so local refactors don’t “pass” by accident.
|
| 30 |
+
const stdout = execFileSync("git", ["ls-files", "--cached", "--others", "--exclude-standard"], {
|
| 31 |
+
encoding: "utf8",
|
| 32 |
+
});
|
| 33 |
+
return stdout
|
| 34 |
+
.split("\n")
|
| 35 |
+
.map((line) => line.trim())
|
| 36 |
+
.filter(Boolean);
|
| 37 |
+
}
|
| 38 |
+
|
| 39 |
+
async function countLines(filePath: string): Promise<number> {
|
| 40 |
+
const content = await readFile(filePath, "utf8");
|
| 41 |
+
// Count physical lines. Keeps the rule simple + predictable.
|
| 42 |
+
return content.split("\n").length;
|
| 43 |
+
}
|
| 44 |
+
|
| 45 |
+
async function main() {
|
| 46 |
+
// Makes `... | head` safe.
|
| 47 |
+
process.stdout.on("error", (error: NodeJS.ErrnoException) => {
|
| 48 |
+
if (error.code === "EPIPE") {
|
| 49 |
+
process.exit(0);
|
| 50 |
+
}
|
| 51 |
+
throw error;
|
| 52 |
+
});
|
| 53 |
+
|
| 54 |
+
const { maxLines } = parseArgs(process.argv.slice(2));
|
| 55 |
+
const files = gitLsFilesAll()
|
| 56 |
+
.filter((filePath) => existsSync(filePath))
|
| 57 |
+
.filter((filePath) => filePath.endsWith(".ts") || filePath.endsWith(".tsx"));
|
| 58 |
+
|
| 59 |
+
const results = await Promise.all(
|
| 60 |
+
files.map(async (filePath) => ({ filePath, lines: await countLines(filePath) })),
|
| 61 |
+
);
|
| 62 |
+
|
| 63 |
+
const offenders = results
|
| 64 |
+
.filter((result) => result.lines > maxLines)
|
| 65 |
+
.toSorted((a, b) => b.lines - a.lines);
|
| 66 |
+
|
| 67 |
+
if (!offenders.length) {
|
| 68 |
+
return;
|
| 69 |
+
}
|
| 70 |
+
|
| 71 |
+
// Minimal, grep-friendly output.
|
| 72 |
+
for (const offender of offenders) {
|
| 73 |
+
// eslint-disable-next-line no-console
|
| 74 |
+
console.log(`${offender.lines}\t${offender.filePath}`);
|
| 75 |
+
}
|
| 76 |
+
|
| 77 |
+
process.exitCode = 1;
|
| 78 |
+
}
|
| 79 |
+
|
| 80 |
+
await main();
|
scripts/claude-auth-status.sh
ADDED
|
@@ -0,0 +1,280 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash
|
| 2 |
+
# Claude Code Authentication Status Checker
|
| 3 |
+
# Checks both Claude Code and OpenClaw auth status
|
| 4 |
+
|
| 5 |
+
set -euo pipefail
|
| 6 |
+
|
| 7 |
+
CLAUDE_CREDS="$HOME/.claude/.credentials.json"
|
| 8 |
+
OPENCLAW_AUTH="$HOME/.openclaw/agents/main/agent/auth-profiles.json"
|
| 9 |
+
|
| 10 |
+
# Colors for terminal output
|
| 11 |
+
RED='\033[0;31m'
|
| 12 |
+
YELLOW='\033[1;33m'
|
| 13 |
+
GREEN='\033[0;32m'
|
| 14 |
+
NC='\033[0m' # No Color
|
| 15 |
+
|
| 16 |
+
# Output mode: "full" (default), "json", or "simple"
|
| 17 |
+
OUTPUT_MODE="${1:-full}"
|
| 18 |
+
|
| 19 |
+
fetch_models_status_json() {
|
| 20 |
+
openclaw models status --json 2>/dev/null || true
|
| 21 |
+
}
|
| 22 |
+
|
| 23 |
+
STATUS_JSON="$(fetch_models_status_json)"
|
| 24 |
+
USE_JSON=0
|
| 25 |
+
if [ -n "$STATUS_JSON" ]; then
|
| 26 |
+
USE_JSON=1
|
| 27 |
+
fi
|
| 28 |
+
|
| 29 |
+
calc_status_from_expires() {
|
| 30 |
+
local expires_at="$1"
|
| 31 |
+
if ! [[ "$expires_at" =~ ^-?[0-9]+$ ]]; then
|
| 32 |
+
expires_at=0
|
| 33 |
+
fi
|
| 34 |
+
local now_ms=$(( $(date +%s) * 1000 ))
|
| 35 |
+
local diff_ms=$((expires_at - now_ms))
|
| 36 |
+
local hours=$((diff_ms / 3600000))
|
| 37 |
+
local mins=$(((diff_ms % 3600000) / 60000))
|
| 38 |
+
|
| 39 |
+
if [ "$expires_at" -le 0 ]; then
|
| 40 |
+
echo "MISSING"
|
| 41 |
+
return 1
|
| 42 |
+
elif [ "$diff_ms" -lt 0 ]; then
|
| 43 |
+
echo "EXPIRED"
|
| 44 |
+
return 1
|
| 45 |
+
elif [ "$diff_ms" -lt 3600000 ]; then
|
| 46 |
+
echo "EXPIRING:${mins}m"
|
| 47 |
+
return 2
|
| 48 |
+
else
|
| 49 |
+
echo "OK:${hours}h${mins}m"
|
| 50 |
+
return 0
|
| 51 |
+
fi
|
| 52 |
+
}
|
| 53 |
+
|
| 54 |
+
json_expires_for_claude_cli() {
|
| 55 |
+
echo "$STATUS_JSON" | jq -r '
|
| 56 |
+
[.auth.oauth.profiles[]
|
| 57 |
+
| select(.provider == "anthropic" and (.type == "oauth" or .type == "token"))
|
| 58 |
+
| .expiresAt // 0]
|
| 59 |
+
| max // 0
|
| 60 |
+
' 2>/dev/null || echo "0"
|
| 61 |
+
}
|
| 62 |
+
|
| 63 |
+
json_expires_for_anthropic_any() {
|
| 64 |
+
echo "$STATUS_JSON" | jq -r '
|
| 65 |
+
[.auth.oauth.profiles[]
|
| 66 |
+
| select(.provider == "anthropic" and .type == "oauth")
|
| 67 |
+
| .expiresAt // 0]
|
| 68 |
+
| max // 0
|
| 69 |
+
' 2>/dev/null || echo "0"
|
| 70 |
+
}
|
| 71 |
+
|
| 72 |
+
json_best_anthropic_profile() {
|
| 73 |
+
echo "$STATUS_JSON" | jq -r '
|
| 74 |
+
[.auth.oauth.profiles[]
|
| 75 |
+
| select(.provider == "anthropic" and .type == "oauth")
|
| 76 |
+
| {id: .profileId, exp: (.expiresAt // 0)}]
|
| 77 |
+
| sort_by(.exp) | reverse | .[0].id // "none"
|
| 78 |
+
' 2>/dev/null || echo "none"
|
| 79 |
+
}
|
| 80 |
+
|
| 81 |
+
json_anthropic_api_key_count() {
|
| 82 |
+
echo "$STATUS_JSON" | jq -r '
|
| 83 |
+
[.auth.providers[] | select(.provider == "anthropic") | .profiles.apiKey]
|
| 84 |
+
| max // 0
|
| 85 |
+
' 2>/dev/null || echo "0"
|
| 86 |
+
}
|
| 87 |
+
|
| 88 |
+
check_claude_code_auth() {
|
| 89 |
+
if [ "$USE_JSON" -eq 1 ]; then
|
| 90 |
+
local expires_at
|
| 91 |
+
expires_at=$(json_expires_for_claude_cli)
|
| 92 |
+
calc_status_from_expires "$expires_at"
|
| 93 |
+
return $?
|
| 94 |
+
fi
|
| 95 |
+
|
| 96 |
+
if [ ! -f "$CLAUDE_CREDS" ]; then
|
| 97 |
+
echo "MISSING"
|
| 98 |
+
return 1
|
| 99 |
+
fi
|
| 100 |
+
|
| 101 |
+
local expires_at
|
| 102 |
+
expires_at=$(jq -r '.claudeAiOauth.expiresAt // 0' "$CLAUDE_CREDS" 2>/dev/null || echo "0")
|
| 103 |
+
calc_status_from_expires "$expires_at"
|
| 104 |
+
}
|
| 105 |
+
|
| 106 |
+
check_openclaw_auth() {
|
| 107 |
+
if [ "$USE_JSON" -eq 1 ]; then
|
| 108 |
+
local api_keys
|
| 109 |
+
api_keys=$(json_anthropic_api_key_count)
|
| 110 |
+
if ! [[ "$api_keys" =~ ^[0-9]+$ ]]; then
|
| 111 |
+
api_keys=0
|
| 112 |
+
fi
|
| 113 |
+
local expires_at
|
| 114 |
+
expires_at=$(json_expires_for_anthropic_any)
|
| 115 |
+
|
| 116 |
+
if [ "$expires_at" -le 0 ] && [ "$api_keys" -gt 0 ]; then
|
| 117 |
+
echo "OK:static"
|
| 118 |
+
return 0
|
| 119 |
+
fi
|
| 120 |
+
|
| 121 |
+
calc_status_from_expires "$expires_at"
|
| 122 |
+
return $?
|
| 123 |
+
fi
|
| 124 |
+
|
| 125 |
+
if [ ! -f "$OPENCLAW_AUTH" ]; then
|
| 126 |
+
echo "MISSING"
|
| 127 |
+
return 1
|
| 128 |
+
fi
|
| 129 |
+
|
| 130 |
+
local expires
|
| 131 |
+
expires=$(jq -r '
|
| 132 |
+
[.profiles | to_entries[] | select(.value.provider == "anthropic") | .value.expires]
|
| 133 |
+
| max // 0
|
| 134 |
+
' "$OPENCLAW_AUTH" 2>/dev/null || echo "0")
|
| 135 |
+
|
| 136 |
+
calc_status_from_expires "$expires"
|
| 137 |
+
}
|
| 138 |
+
|
| 139 |
+
# JSON output mode
|
| 140 |
+
if [ "$OUTPUT_MODE" = "json" ]; then
|
| 141 |
+
claude_status=$(check_claude_code_auth 2>/dev/null || true)
|
| 142 |
+
openclaw_status=$(check_openclaw_auth 2>/dev/null || true)
|
| 143 |
+
|
| 144 |
+
claude_expires=0
|
| 145 |
+
openclaw_expires=0
|
| 146 |
+
if [ "$USE_JSON" -eq 1 ]; then
|
| 147 |
+
claude_expires=$(json_expires_for_claude_cli)
|
| 148 |
+
openclaw_expires=$(json_expires_for_anthropic_any)
|
| 149 |
+
else
|
| 150 |
+
claude_expires=$(jq -r '.claudeAiOauth.expiresAt // 0' "$CLAUDE_CREDS" 2>/dev/null || echo "0")
|
| 151 |
+
openclaw_expires=$(jq -r '.profiles["anthropic:default"].expires // 0' "$OPENCLAW_AUTH" 2>/dev/null || echo "0")
|
| 152 |
+
fi
|
| 153 |
+
|
| 154 |
+
jq -n \
|
| 155 |
+
--arg cs "$claude_status" \
|
| 156 |
+
--arg ce "$claude_expires" \
|
| 157 |
+
--arg bs "$openclaw_status" \
|
| 158 |
+
--arg be "$openclaw_expires" \
|
| 159 |
+
'{
|
| 160 |
+
claude_code: {status: $cs, expires_at_ms: ($ce | tonumber)},
|
| 161 |
+
openclaw: {status: $bs, expires_at_ms: ($be | tonumber)},
|
| 162 |
+
needs_reauth: (($cs | startswith("EXPIRED") or startswith("EXPIRING") or startswith("MISSING")) or ($bs | startswith("EXPIRED") or startswith("EXPIRING") or startswith("MISSING")))
|
| 163 |
+
}'
|
| 164 |
+
exit 0
|
| 165 |
+
fi
|
| 166 |
+
|
| 167 |
+
# Simple output mode (for scripts/widgets)
|
| 168 |
+
if [ "$OUTPUT_MODE" = "simple" ]; then
|
| 169 |
+
claude_status=$(check_claude_code_auth 2>/dev/null || true)
|
| 170 |
+
openclaw_status=$(check_openclaw_auth 2>/dev/null || true)
|
| 171 |
+
|
| 172 |
+
if [[ "$claude_status" == EXPIRED* ]] || [[ "$claude_status" == MISSING* ]]; then
|
| 173 |
+
echo "CLAUDE_EXPIRED"
|
| 174 |
+
exit 1
|
| 175 |
+
elif [[ "$openclaw_status" == EXPIRED* ]] || [[ "$openclaw_status" == MISSING* ]]; then
|
| 176 |
+
echo "OPENCLAW_EXPIRED"
|
| 177 |
+
exit 1
|
| 178 |
+
elif [[ "$claude_status" == EXPIRING* ]]; then
|
| 179 |
+
echo "CLAUDE_EXPIRING"
|
| 180 |
+
exit 2
|
| 181 |
+
elif [[ "$openclaw_status" == EXPIRING* ]]; then
|
| 182 |
+
echo "OPENCLAW_EXPIRING"
|
| 183 |
+
exit 2
|
| 184 |
+
else
|
| 185 |
+
echo "OK"
|
| 186 |
+
exit 0
|
| 187 |
+
fi
|
| 188 |
+
fi
|
| 189 |
+
|
| 190 |
+
# Full output mode (default)
|
| 191 |
+
echo "=== Claude Code Auth Status ==="
|
| 192 |
+
echo ""
|
| 193 |
+
|
| 194 |
+
# Claude Code credentials
|
| 195 |
+
echo "Claude Code (~/.claude/.credentials.json):"
|
| 196 |
+
if [ "$USE_JSON" -eq 1 ]; then
|
| 197 |
+
expires_at=$(json_expires_for_claude_cli)
|
| 198 |
+
else
|
| 199 |
+
expires_at=$(jq -r '.claudeAiOauth.expiresAt // 0' "$CLAUDE_CREDS" 2>/dev/null || echo "0")
|
| 200 |
+
fi
|
| 201 |
+
|
| 202 |
+
if [ -f "$CLAUDE_CREDS" ]; then
|
| 203 |
+
sub_type=$(jq -r '.claudeAiOauth.subscriptionType // "unknown"' "$CLAUDE_CREDS" 2>/dev/null || echo "unknown")
|
| 204 |
+
rate_tier=$(jq -r '.claudeAiOauth.rateLimitTier // "unknown"' "$CLAUDE_CREDS" 2>/dev/null || echo "unknown")
|
| 205 |
+
echo " Subscription: $sub_type"
|
| 206 |
+
echo " Rate tier: $rate_tier"
|
| 207 |
+
fi
|
| 208 |
+
|
| 209 |
+
if [ "$expires_at" -le 0 ]; then
|
| 210 |
+
echo -e " Status: ${RED}NOT FOUND${NC}"
|
| 211 |
+
echo " Action needed: Run 'claude setup-token'"
|
| 212 |
+
else
|
| 213 |
+
now_ms=$(( $(date +%s) * 1000 ))
|
| 214 |
+
diff_ms=$((expires_at - now_ms))
|
| 215 |
+
hours=$((diff_ms / 3600000))
|
| 216 |
+
mins=$(((diff_ms % 3600000) / 60000))
|
| 217 |
+
|
| 218 |
+
if [ "$diff_ms" -lt 0 ]; then
|
| 219 |
+
echo -e " Status: ${RED}EXPIRED${NC}"
|
| 220 |
+
echo " Action needed: Run 'claude setup-token' or re-authenticate"
|
| 221 |
+
elif [ "$diff_ms" -lt 3600000 ]; then
|
| 222 |
+
echo -e " Status: ${YELLOW}EXPIRING SOON (${mins}m remaining)${NC}"
|
| 223 |
+
echo " Consider running: claude setup-token"
|
| 224 |
+
else
|
| 225 |
+
echo -e " Status: ${GREEN}OK${NC}"
|
| 226 |
+
echo " Expires: $(date -d @$((expires_at/1000))) (${hours}h ${mins}m)"
|
| 227 |
+
fi
|
| 228 |
+
fi
|
| 229 |
+
|
| 230 |
+
echo ""
|
| 231 |
+
echo "OpenClaw Auth (~/.openclaw/agents/main/agent/auth-profiles.json):"
|
| 232 |
+
if [ "$USE_JSON" -eq 1 ]; then
|
| 233 |
+
best_profile=$(json_best_anthropic_profile)
|
| 234 |
+
expires=$(json_expires_for_anthropic_any)
|
| 235 |
+
api_keys=$(json_anthropic_api_key_count)
|
| 236 |
+
else
|
| 237 |
+
best_profile=$(jq -r '
|
| 238 |
+
.profiles | to_entries
|
| 239 |
+
| map(select(.value.provider == "anthropic"))
|
| 240 |
+
| sort_by(.value.expires) | reverse
|
| 241 |
+
| .[0].key // "none"
|
| 242 |
+
' "$OPENCLAW_AUTH" 2>/dev/null || echo "none")
|
| 243 |
+
expires=$(jq -r '
|
| 244 |
+
[.profiles | to_entries[] | select(.value.provider == "anthropic") | .value.expires]
|
| 245 |
+
| max // 0
|
| 246 |
+
' "$OPENCLAW_AUTH" 2>/dev/null || echo "0")
|
| 247 |
+
api_keys=0
|
| 248 |
+
fi
|
| 249 |
+
|
| 250 |
+
echo " Profile: $best_profile"
|
| 251 |
+
|
| 252 |
+
if [ "$expires" -le 0 ] && [ "$api_keys" -gt 0 ]; then
|
| 253 |
+
echo -e " Status: ${GREEN}OK${NC} (API key)"
|
| 254 |
+
elif [ "$expires" -le 0 ]; then
|
| 255 |
+
echo -e " Status: ${RED}NOT FOUND${NC}"
|
| 256 |
+
echo " Note: Run 'openclaw doctor --yes' to sync from Claude Code"
|
| 257 |
+
else
|
| 258 |
+
now_ms=$(( $(date +%s) * 1000 ))
|
| 259 |
+
diff_ms=$((expires - now_ms))
|
| 260 |
+
hours=$((diff_ms / 3600000))
|
| 261 |
+
mins=$(((diff_ms % 3600000) / 60000))
|
| 262 |
+
|
| 263 |
+
if [ "$diff_ms" -lt 0 ]; then
|
| 264 |
+
echo -e " Status: ${RED}EXPIRED${NC}"
|
| 265 |
+
echo " Note: Run 'openclaw doctor --yes' to sync from Claude Code"
|
| 266 |
+
elif [ "$diff_ms" -lt 3600000 ]; then
|
| 267 |
+
echo -e " Status: ${YELLOW}EXPIRING SOON (${mins}m remaining)${NC}"
|
| 268 |
+
else
|
| 269 |
+
echo -e " Status: ${GREEN}OK${NC}"
|
| 270 |
+
echo " Expires: $(date -d @$((expires/1000))) (${hours}h ${mins}m)"
|
| 271 |
+
fi
|
| 272 |
+
fi
|
| 273 |
+
|
| 274 |
+
echo ""
|
| 275 |
+
echo "=== Service Status ==="
|
| 276 |
+
if systemctl --user is-active openclaw >/dev/null 2>&1; then
|
| 277 |
+
echo -e "OpenClaw service: ${GREEN}running${NC}"
|
| 278 |
+
else
|
| 279 |
+
echo -e "OpenClaw service: ${RED}NOT running${NC}"
|
| 280 |
+
fi
|
scripts/clawlog.sh
ADDED
|
@@ -0,0 +1,309 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash
|
| 2 |
+
|
| 3 |
+
# VibeTunnel Logging Utility
|
| 4 |
+
# Simplifies access to VibeTunnel logs using macOS unified logging system
|
| 5 |
+
|
| 6 |
+
set -euo pipefail
|
| 7 |
+
|
| 8 |
+
# Configuration
|
| 9 |
+
SUBSYSTEM="ai.openclaw"
|
| 10 |
+
DEFAULT_LEVEL="info"
|
| 11 |
+
|
| 12 |
+
# Colors for output
|
| 13 |
+
RED='\033[0;31m'
|
| 14 |
+
GREEN='\033[0;32m'
|
| 15 |
+
YELLOW='\033[1;33m'
|
| 16 |
+
BLUE='\033[0;34m'
|
| 17 |
+
NC='\033[0m' # No Color
|
| 18 |
+
|
| 19 |
+
# Function to handle sudo password errors
|
| 20 |
+
handle_sudo_error() {
|
| 21 |
+
echo -e "\n${RED}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
|
| 22 |
+
echo -e "${YELLOW}⚠️ Password Required for Log Access${NC}"
|
| 23 |
+
echo -e "${RED}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}\n"
|
| 24 |
+
echo -e "clawlog needs to use sudo to show complete log data (Apple hides sensitive info by default)."
|
| 25 |
+
echo -e "\nTo avoid password prompts, configure passwordless sudo for the log command:"
|
| 26 |
+
echo -e "See: ${BLUE}apple/docs/logging-private-fix.md${NC}\n"
|
| 27 |
+
echo -e "Quick fix:"
|
| 28 |
+
echo -e " 1. Run: ${GREEN}sudo visudo${NC}"
|
| 29 |
+
echo -e " 2. Add: ${GREEN}$(whoami) ALL=(ALL) NOPASSWD: /usr/bin/log${NC}"
|
| 30 |
+
echo -e " 3. Save and exit (:wq)\n"
|
| 31 |
+
echo -e "${RED}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}\n"
|
| 32 |
+
exit 1
|
| 33 |
+
}
|
| 34 |
+
|
| 35 |
+
# Default values
|
| 36 |
+
STREAM_MODE=false
|
| 37 |
+
TIME_RANGE="5m" # Default to last 5 minutes
|
| 38 |
+
CATEGORY=""
|
| 39 |
+
LOG_LEVEL="$DEFAULT_LEVEL"
|
| 40 |
+
SEARCH_TEXT=""
|
| 41 |
+
OUTPUT_FILE=""
|
| 42 |
+
ERRORS_ONLY=false
|
| 43 |
+
SERVER_ONLY=false
|
| 44 |
+
TAIL_LINES=50 # Default number of lines to show
|
| 45 |
+
SHOW_TAIL=true
|
| 46 |
+
SHOW_HELP=false
|
| 47 |
+
|
| 48 |
+
# Function to show usage
|
| 49 |
+
show_usage() {
|
| 50 |
+
cat << EOF
|
| 51 |
+
clawlog - OpenClaw Logging Utility
|
| 52 |
+
|
| 53 |
+
USAGE:
|
| 54 |
+
clawlog [OPTIONS]
|
| 55 |
+
|
| 56 |
+
DESCRIPTION:
|
| 57 |
+
View OpenClaw logs with full details (bypasses Apple's privacy redaction).
|
| 58 |
+
Requires sudo access configured for /usr/bin/log command.
|
| 59 |
+
|
| 60 |
+
LOG FLOW ARCHITECTURE:
|
| 61 |
+
OpenClaw logs flow through the macOS unified log (subsystem: ai.openclaw).
|
| 62 |
+
|
| 63 |
+
LOG CATEGORIES (examples):
|
| 64 |
+
• voicewake - Voice wake detection/test harness
|
| 65 |
+
• gateway - Gateway process manager
|
| 66 |
+
• xpc - XPC service calls
|
| 67 |
+
• notifications - Notification helper
|
| 68 |
+
• screenshot - Screenshotter
|
| 69 |
+
• shell - ShellExecutor
|
| 70 |
+
|
| 71 |
+
QUICK START:
|
| 72 |
+
clawlog -n 100 Show last 100 lines from all components
|
| 73 |
+
clawlog -f Follow logs in real-time
|
| 74 |
+
clawlog -e Show only errors
|
| 75 |
+
clawlog -c ServerManager Show logs from ServerManager only
|
| 76 |
+
|
| 77 |
+
OPTIONS:
|
| 78 |
+
-h, --help Show this help message
|
| 79 |
+
-f, --follow Stream logs continuously (like tail -f)
|
| 80 |
+
-n, --lines NUM Number of lines to show (default: 50)
|
| 81 |
+
-l, --last TIME Time range to search (default: 5m)
|
| 82 |
+
Examples: 5m, 1h, 2d, 1w
|
| 83 |
+
-c, --category CAT Filter by category (e.g., ServerManager, SessionService)
|
| 84 |
+
-e, --errors Show only error messages
|
| 85 |
+
-d, --debug Show debug level logs (more verbose)
|
| 86 |
+
-s, --search TEXT Search for specific text in log messages
|
| 87 |
+
-o, --output FILE Export logs to file
|
| 88 |
+
--server Show only server output logs
|
| 89 |
+
--all Show all logs without tail limit
|
| 90 |
+
--list-categories List all available log categories
|
| 91 |
+
--json Output in JSON format
|
| 92 |
+
|
| 93 |
+
EXAMPLES:
|
| 94 |
+
clawlog Show last 50 lines from past 5 minutes (default)
|
| 95 |
+
clawlog -f Stream logs continuously
|
| 96 |
+
clawlog -n 100 Show last 100 lines
|
| 97 |
+
clawlog -e Show only recent errors
|
| 98 |
+
clawlog -l 30m -n 200 Show last 200 lines from past 30 minutes
|
| 99 |
+
clawlog -c ServerManager Show recent ServerManager logs
|
| 100 |
+
clawlog -s "fail" Search for "fail" in recent logs
|
| 101 |
+
clawlog --server -e Show recent server errors
|
| 102 |
+
clawlog -f -d Stream debug logs continuously
|
| 103 |
+
|
| 104 |
+
CATEGORIES:
|
| 105 |
+
Common categories include:
|
| 106 |
+
- ServerManager - Server lifecycle and configuration
|
| 107 |
+
- SessionService - Terminal session management
|
| 108 |
+
- TerminalManager - Terminal spawning and control
|
| 109 |
+
- GitRepository - Git integration features
|
| 110 |
+
- ScreencapService - Screen capture functionality
|
| 111 |
+
- WebRTCManager - WebRTC connections
|
| 112 |
+
- UnixSocket - Unix socket communication
|
| 113 |
+
- WindowTracker - Window tracking and focus
|
| 114 |
+
- NgrokService - Ngrok tunnel management
|
| 115 |
+
- ServerOutput - Node.js server output
|
| 116 |
+
|
| 117 |
+
TIME FORMATS:
|
| 118 |
+
- 5m = 5 minutes - 1h = 1 hour
|
| 119 |
+
- 2d = 2 days - 1w = 1 week
|
| 120 |
+
|
| 121 |
+
EOF
|
| 122 |
+
}
|
| 123 |
+
|
| 124 |
+
# Function to list categories
|
| 125 |
+
list_categories() {
|
| 126 |
+
echo -e "${BLUE}Fetching VibeTunnel log categories from the last hour...${NC}\n"
|
| 127 |
+
|
| 128 |
+
# Get unique categories from recent logs
|
| 129 |
+
log show --predicate "subsystem == \"$SUBSYSTEM\"" --last 1h 2>/dev/null | \
|
| 130 |
+
grep -E "category: \"[^\"]+\"" | \
|
| 131 |
+
sed -E 's/.*category: "([^"]+)".*/\1/' | \
|
| 132 |
+
sort | uniq | \
|
| 133 |
+
while read -r cat; do
|
| 134 |
+
echo " • $cat"
|
| 135 |
+
done
|
| 136 |
+
|
| 137 |
+
echo -e "\n${YELLOW}Note: Only categories with recent activity are shown${NC}"
|
| 138 |
+
}
|
| 139 |
+
|
| 140 |
+
# Show help if no arguments provided
|
| 141 |
+
if [[ $# -eq 0 ]]; then
|
| 142 |
+
show_usage
|
| 143 |
+
exit 0
|
| 144 |
+
fi
|
| 145 |
+
|
| 146 |
+
# Parse command line arguments
|
| 147 |
+
while [[ $# -gt 0 ]]; do
|
| 148 |
+
case $1 in
|
| 149 |
+
-h|--help)
|
| 150 |
+
show_usage
|
| 151 |
+
exit 0
|
| 152 |
+
;;
|
| 153 |
+
-f|--follow)
|
| 154 |
+
STREAM_MODE=true
|
| 155 |
+
SHOW_TAIL=false
|
| 156 |
+
shift
|
| 157 |
+
;;
|
| 158 |
+
-n|--lines)
|
| 159 |
+
TAIL_LINES="$2"
|
| 160 |
+
shift 2
|
| 161 |
+
;;
|
| 162 |
+
-l|--last)
|
| 163 |
+
TIME_RANGE="$2"
|
| 164 |
+
shift 2
|
| 165 |
+
;;
|
| 166 |
+
-c|--category)
|
| 167 |
+
CATEGORY="$2"
|
| 168 |
+
shift 2
|
| 169 |
+
;;
|
| 170 |
+
-e|--errors)
|
| 171 |
+
ERRORS_ONLY=true
|
| 172 |
+
shift
|
| 173 |
+
;;
|
| 174 |
+
-d|--debug)
|
| 175 |
+
LOG_LEVEL="debug"
|
| 176 |
+
shift
|
| 177 |
+
;;
|
| 178 |
+
-s|--search)
|
| 179 |
+
SEARCH_TEXT="$2"
|
| 180 |
+
shift 2
|
| 181 |
+
;;
|
| 182 |
+
-o|--output)
|
| 183 |
+
OUTPUT_FILE="$2"
|
| 184 |
+
shift 2
|
| 185 |
+
;;
|
| 186 |
+
--server)
|
| 187 |
+
SERVER_ONLY=true
|
| 188 |
+
CATEGORY="ServerOutput"
|
| 189 |
+
shift
|
| 190 |
+
;;
|
| 191 |
+
--list-categories)
|
| 192 |
+
list_categories
|
| 193 |
+
exit 0
|
| 194 |
+
;;
|
| 195 |
+
--json)
|
| 196 |
+
STYLE_ARGS="--style json"
|
| 197 |
+
shift
|
| 198 |
+
;;
|
| 199 |
+
--all)
|
| 200 |
+
SHOW_TAIL=false
|
| 201 |
+
shift
|
| 202 |
+
;;
|
| 203 |
+
*)
|
| 204 |
+
echo -e "${RED}Unknown option: $1${NC}"
|
| 205 |
+
echo "Use -h or --help for usage information"
|
| 206 |
+
exit 1
|
| 207 |
+
;;
|
| 208 |
+
esac
|
| 209 |
+
done
|
| 210 |
+
|
| 211 |
+
# Build the predicate
|
| 212 |
+
PREDICATE="subsystem == \"$SUBSYSTEM\""
|
| 213 |
+
|
| 214 |
+
# Add category filter if specified
|
| 215 |
+
if [[ -n "$CATEGORY" ]]; then
|
| 216 |
+
PREDICATE="$PREDICATE AND category == \"$CATEGORY\""
|
| 217 |
+
fi
|
| 218 |
+
|
| 219 |
+
# Add error filter if specified
|
| 220 |
+
if [[ "$ERRORS_ONLY" == true ]]; then
|
| 221 |
+
PREDICATE="$PREDICATE AND (eventType == \"error\" OR messageType == \"error\" OR eventMessage CONTAINS \"ERROR\" OR eventMessage CONTAINS \"[31m\")"
|
| 222 |
+
fi
|
| 223 |
+
|
| 224 |
+
# Add search filter if specified
|
| 225 |
+
if [[ -n "$SEARCH_TEXT" ]]; then
|
| 226 |
+
PREDICATE="$PREDICATE AND eventMessage CONTAINS[c] \"$SEARCH_TEXT\""
|
| 227 |
+
fi
|
| 228 |
+
|
| 229 |
+
# Build the command - always use sudo with --info to show private data
|
| 230 |
+
if [[ "$STREAM_MODE" == true ]]; then
|
| 231 |
+
# Streaming mode
|
| 232 |
+
CMD="sudo log stream --predicate '$PREDICATE' --level $LOG_LEVEL --info"
|
| 233 |
+
|
| 234 |
+
echo -e "${GREEN}Streaming VibeTunnel logs continuously...${NC}"
|
| 235 |
+
echo -e "${YELLOW}Press Ctrl+C to stop${NC}\n"
|
| 236 |
+
else
|
| 237 |
+
# Show mode
|
| 238 |
+
CMD="sudo log show --predicate '$PREDICATE'"
|
| 239 |
+
|
| 240 |
+
# Add log level for show command
|
| 241 |
+
if [[ "$LOG_LEVEL" == "debug" ]]; then
|
| 242 |
+
CMD="$CMD --debug"
|
| 243 |
+
else
|
| 244 |
+
CMD="$CMD --info"
|
| 245 |
+
fi
|
| 246 |
+
|
| 247 |
+
# Add time range
|
| 248 |
+
CMD="$CMD --last $TIME_RANGE"
|
| 249 |
+
|
| 250 |
+
if [[ "$SHOW_TAIL" == true ]]; then
|
| 251 |
+
echo -e "${GREEN}Showing last $TAIL_LINES log lines from the past $TIME_RANGE${NC}"
|
| 252 |
+
else
|
| 253 |
+
echo -e "${GREEN}Showing all logs from the past $TIME_RANGE${NC}"
|
| 254 |
+
fi
|
| 255 |
+
|
| 256 |
+
# Show applied filters
|
| 257 |
+
if [[ "$ERRORS_ONLY" == true ]]; then
|
| 258 |
+
echo -e "${RED}Filter: Errors only${NC}"
|
| 259 |
+
fi
|
| 260 |
+
if [[ -n "$CATEGORY" ]]; then
|
| 261 |
+
echo -e "${BLUE}Category: $CATEGORY${NC}"
|
| 262 |
+
fi
|
| 263 |
+
if [[ -n "$SEARCH_TEXT" ]]; then
|
| 264 |
+
echo -e "${YELLOW}Search: \"$SEARCH_TEXT\"${NC}"
|
| 265 |
+
fi
|
| 266 |
+
echo "" # Empty line for readability
|
| 267 |
+
fi
|
| 268 |
+
|
| 269 |
+
# Add style arguments if specified
|
| 270 |
+
if [[ -n "${STYLE_ARGS:-}" ]]; then
|
| 271 |
+
CMD="$CMD $STYLE_ARGS"
|
| 272 |
+
fi
|
| 273 |
+
|
| 274 |
+
# Execute the command
|
| 275 |
+
if [[ -n "$OUTPUT_FILE" ]]; then
|
| 276 |
+
# First check if sudo works without password for the log command
|
| 277 |
+
if sudo -n /usr/bin/log show --last 1s 2>&1 | grep -q "password"; then
|
| 278 |
+
handle_sudo_error
|
| 279 |
+
fi
|
| 280 |
+
|
| 281 |
+
echo -e "${BLUE}Exporting logs to: $OUTPUT_FILE${NC}\n"
|
| 282 |
+
if [[ "$SHOW_TAIL" == true ]] && [[ "$STREAM_MODE" == false ]]; then
|
| 283 |
+
eval "$CMD" 2>&1 | tail -n "$TAIL_LINES" > "$OUTPUT_FILE"
|
| 284 |
+
else
|
| 285 |
+
eval "$CMD" > "$OUTPUT_FILE" 2>&1
|
| 286 |
+
fi
|
| 287 |
+
|
| 288 |
+
# Check if file was created and has content
|
| 289 |
+
if [[ -s "$OUTPUT_FILE" ]]; then
|
| 290 |
+
LINE_COUNT=$(wc -l < "$OUTPUT_FILE" | tr -d ' ')
|
| 291 |
+
echo -e "${GREEN}✓ Exported $LINE_COUNT lines to $OUTPUT_FILE${NC}"
|
| 292 |
+
else
|
| 293 |
+
echo -e "${YELLOW}⚠ No logs found matching the criteria${NC}"
|
| 294 |
+
fi
|
| 295 |
+
else
|
| 296 |
+
# Run interactively
|
| 297 |
+
# First check if sudo works without password for the log command
|
| 298 |
+
if sudo -n /usr/bin/log show --last 1s 2>&1 | grep -q "password"; then
|
| 299 |
+
handle_sudo_error
|
| 300 |
+
fi
|
| 301 |
+
|
| 302 |
+
if [[ "$SHOW_TAIL" == true ]] && [[ "$STREAM_MODE" == false ]]; then
|
| 303 |
+
# Apply tail for non-streaming mode
|
| 304 |
+
eval "$CMD" 2>&1 | tail -n "$TAIL_LINES"
|
| 305 |
+
echo -e "\n${YELLOW}Showing last $TAIL_LINES lines. Use --all or -n to see more.${NC}"
|
| 306 |
+
else
|
| 307 |
+
eval "$CMD"
|
| 308 |
+
fi
|
| 309 |
+
fi
|
scripts/clawtributors-map.json
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"ensureLogins": [
|
| 3 |
+
"odrobnik",
|
| 4 |
+
"alphonse-arianee",
|
| 5 |
+
"aaronn",
|
| 6 |
+
"ronak-guliani",
|
| 7 |
+
"cpojer",
|
| 8 |
+
"carlulsoe",
|
| 9 |
+
"jdrhyne",
|
| 10 |
+
"latitudeki5223",
|
| 11 |
+
"longmaba",
|
| 12 |
+
"manmal",
|
| 13 |
+
"thesash",
|
| 14 |
+
"rhjoh",
|
| 15 |
+
"ysqander",
|
| 16 |
+
"atalovesyou",
|
| 17 |
+
"0xJonHoldsCrypto",
|
| 18 |
+
"hougangdev"
|
| 19 |
+
],
|
| 20 |
+
"seedCommit": "d6863f87",
|
| 21 |
+
"placeholderAvatar": "assets/avatar-placeholder.svg",
|
| 22 |
+
"displayName": {
|
| 23 |
+
"jdrhyne": "Jonathan D. Rhyne (DJ-D)"
|
| 24 |
+
},
|
| 25 |
+
"nameToLogin": {
|
| 26 |
+
"peter steinberger": "steipete",
|
| 27 |
+
"eng. juan combetto": "omniwired",
|
| 28 |
+
"mariano belinky": "mbelinky",
|
| 29 |
+
"vasanth rao naik sabavat": "vsabavat",
|
| 30 |
+
"tu nombre real": "nachx639",
|
| 31 |
+
"django navarro": "djangonavarro220"
|
| 32 |
+
},
|
| 33 |
+
"emailToLogin": {
|
| 34 |
+
"steipete@gmail.com": "steipete",
|
| 35 |
+
"sbarrios93@gmail.com": "sebslight",
|
| 36 |
+
"rltorres26+github@gmail.com": "RandyVentures",
|
| 37 |
+
"hixvac@gmail.com": "VACInc"
|
| 38 |
+
}
|
| 39 |
+
}
|
scripts/codesign-mac-app.sh
ADDED
|
@@ -0,0 +1,289 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env bash
|
| 2 |
+
set -euo pipefail
|
| 3 |
+
|
| 4 |
+
APP_BUNDLE="${1:-dist/OpenClaw.app}"
|
| 5 |
+
IDENTITY="${SIGN_IDENTITY:-}"
|
| 6 |
+
TIMESTAMP_MODE="${CODESIGN_TIMESTAMP:-auto}"
|
| 7 |
+
DISABLE_LIBRARY_VALIDATION="${DISABLE_LIBRARY_VALIDATION:-0}"
|
| 8 |
+
SKIP_TEAM_ID_CHECK="${SKIP_TEAM_ID_CHECK:-0}"
|
| 9 |
+
ENT_TMP_BASE=$(mktemp -t openclaw-entitlements-base.XXXXXX)
|
| 10 |
+
ENT_TMP_APP_BASE=$(mktemp -t openclaw-entitlements-app-base.XXXXXX)
|
| 11 |
+
ENT_TMP_RUNTIME=$(mktemp -t openclaw-entitlements-runtime.XXXXXX)
|
| 12 |
+
|
| 13 |
+
if [[ "${APP_BUNDLE}" == "--help" || "${APP_BUNDLE}" == "-h" ]]; then
|
| 14 |
+
cat <<'HELP'
|
| 15 |
+
Usage: scripts/codesign-mac-app.sh [app-bundle]
|
| 16 |
+
|
| 17 |
+
Env:
|
| 18 |
+
SIGN_IDENTITY="Apple Development: Your Name (TEAMID)"
|
| 19 |
+
ALLOW_ADHOC_SIGNING=1
|
| 20 |
+
CODESIGN_TIMESTAMP=auto|on|off
|
| 21 |
+
DISABLE_LIBRARY_VALIDATION=1 # dev-only Sparkle Team ID workaround
|
| 22 |
+
SKIP_TEAM_ID_CHECK=1 # bypass Team ID audit
|
| 23 |
+
HELP
|
| 24 |
+
exit 0
|
| 25 |
+
fi
|
| 26 |
+
|
| 27 |
+
if [ ! -d "$APP_BUNDLE" ]; then
|
| 28 |
+
echo "App bundle not found: $APP_BUNDLE" >&2
|
| 29 |
+
exit 1
|
| 30 |
+
fi
|
| 31 |
+
|
| 32 |
+
select_identity() {
|
| 33 |
+
local preferred available first
|
| 34 |
+
|
| 35 |
+
# Prefer a Developer ID Application cert.
|
| 36 |
+
preferred="$(security find-identity -p codesigning -v 2>/dev/null \
|
| 37 |
+
| awk -F'\"' '/Developer ID Application/ { print $2; exit }')"
|
| 38 |
+
|
| 39 |
+
if [ -n "$preferred" ]; then
|
| 40 |
+
echo "$preferred"
|
| 41 |
+
return
|
| 42 |
+
fi
|
| 43 |
+
|
| 44 |
+
# Next, try Apple Distribution.
|
| 45 |
+
preferred="$(security find-identity -p codesigning -v 2>/dev/null \
|
| 46 |
+
| awk -F'\"' '/Apple Distribution/ { print $2; exit }')"
|
| 47 |
+
if [ -n "$preferred" ]; then
|
| 48 |
+
echo "$preferred"
|
| 49 |
+
return
|
| 50 |
+
fi
|
| 51 |
+
|
| 52 |
+
# Then, try Apple Development.
|
| 53 |
+
preferred="$(security find-identity -p codesigning -v 2>/dev/null \
|
| 54 |
+
| awk -F'\"' '/Apple Development/ { print $2; exit }')"
|
| 55 |
+
if [ -n "$preferred" ]; then
|
| 56 |
+
echo "$preferred"
|
| 57 |
+
return
|
| 58 |
+
fi
|
| 59 |
+
|
| 60 |
+
# Fallback to the first valid signing identity.
|
| 61 |
+
available="$(security find-identity -p codesigning -v 2>/dev/null \
|
| 62 |
+
| sed -n 's/.*\"\\(.*\\)\"/\\1/p')"
|
| 63 |
+
|
| 64 |
+
if [ -n "$available" ]; then
|
| 65 |
+
first="$(printf '%s\n' "$available" | head -n1)"
|
| 66 |
+
echo "$first"
|
| 67 |
+
return
|
| 68 |
+
fi
|
| 69 |
+
|
| 70 |
+
return 1
|
| 71 |
+
}
|
| 72 |
+
|
| 73 |
+
if [ -z "$IDENTITY" ]; then
|
| 74 |
+
if ! IDENTITY="$(select_identity)"; then
|
| 75 |
+
if [[ "${ALLOW_ADHOC_SIGNING:-}" == "1" ]]; then
|
| 76 |
+
echo "WARN: No signing identity found. Falling back to ad-hoc signing (-)." >&2
|
| 77 |
+
echo " !!! WARNING: Ad-hoc signed apps do NOT persist TCC permissions (Accessibility, etc) !!!" >&2
|
| 78 |
+
echo " !!! You will need to re-grant permissions every time you restart the app. !!!" >&2
|
| 79 |
+
IDENTITY="-"
|
| 80 |
+
else
|
| 81 |
+
echo "ERROR: No signing identity found. Set SIGN_IDENTITY to a valid codesigning certificate." >&2
|
| 82 |
+
echo " Alternatively, set ALLOW_ADHOC_SIGNING=1 to fallback to ad-hoc signing (limitations apply)." >&2
|
| 83 |
+
exit 1
|
| 84 |
+
fi
|
| 85 |
+
fi
|
| 86 |
+
fi
|
| 87 |
+
|
| 88 |
+
echo "Using signing identity: $IDENTITY"
|
| 89 |
+
if [[ "$IDENTITY" == "-" ]]; then
|
| 90 |
+
cat <<'WARN' >&2
|
| 91 |
+
|
| 92 |
+
================================================================================
|
| 93 |
+
!!! AD-HOC SIGNING IN USE - PERMISSIONS WILL NOT STICK (macOS RESTRICTION) !!!
|
| 94 |
+
|
| 95 |
+
macOS ties permissions to the code signature, bundle ID, and app path.
|
| 96 |
+
Ad-hoc signing generates a new signature every build, so macOS treats the app
|
| 97 |
+
as a different binary and will forget permissions (prompts may vanish).
|
| 98 |
+
|
| 99 |
+
For correct permission behavior you MUST sign with a real Apple Development or
|
| 100 |
+
Developer ID certificate.
|
| 101 |
+
|
| 102 |
+
If prompts disappear: remove the app entry in System Settings -> Privacy & Security,
|
| 103 |
+
relaunch the app, and re-grant. Some permissions only reappear after a full
|
| 104 |
+
macOS restart.
|
| 105 |
+
================================================================================
|
| 106 |
+
|
| 107 |
+
WARN
|
| 108 |
+
fi
|
| 109 |
+
|
| 110 |
+
timestamp_arg="--timestamp=none"
|
| 111 |
+
case "$TIMESTAMP_MODE" in
|
| 112 |
+
1|on|yes|true)
|
| 113 |
+
timestamp_arg="--timestamp"
|
| 114 |
+
;;
|
| 115 |
+
0|off|no|false)
|
| 116 |
+
timestamp_arg="--timestamp=none"
|
| 117 |
+
;;
|
| 118 |
+
auto)
|
| 119 |
+
if [[ "$IDENTITY" == *"Developer ID Application"* ]]; then
|
| 120 |
+
timestamp_arg="--timestamp"
|
| 121 |
+
fi
|
| 122 |
+
;;
|
| 123 |
+
*)
|
| 124 |
+
echo "ERROR: Unknown CODESIGN_TIMESTAMP value: $TIMESTAMP_MODE (use auto|on|off)" >&2
|
| 125 |
+
exit 1
|
| 126 |
+
;;
|
| 127 |
+
esac
|
| 128 |
+
if [[ "$IDENTITY" == "-" ]]; then
|
| 129 |
+
timestamp_arg="--timestamp=none"
|
| 130 |
+
fi
|
| 131 |
+
|
| 132 |
+
options_args=()
|
| 133 |
+
if [[ "$IDENTITY" != "-" ]]; then
|
| 134 |
+
options_args=("--options" "runtime")
|
| 135 |
+
fi
|
| 136 |
+
timestamp_args=("$timestamp_arg")
|
| 137 |
+
|
| 138 |
+
cat > "$ENT_TMP_BASE" <<'PLIST'
|
| 139 |
+
<?xml version="1.0" encoding="UTF-8"?>
|
| 140 |
+
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
| 141 |
+
<plist version="1.0">
|
| 142 |
+
<dict>
|
| 143 |
+
<key>com.apple.security.automation.apple-events</key>
|
| 144 |
+
<true/>
|
| 145 |
+
<key>com.apple.security.device.audio-input</key>
|
| 146 |
+
<true/>
|
| 147 |
+
<key>com.apple.security.device.camera</key>
|
| 148 |
+
<true/>
|
| 149 |
+
</dict>
|
| 150 |
+
</plist>
|
| 151 |
+
PLIST
|
| 152 |
+
|
| 153 |
+
cat > "$ENT_TMP_APP_BASE" <<'PLIST'
|
| 154 |
+
<?xml version="1.0" encoding="UTF-8"?>
|
| 155 |
+
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
| 156 |
+
<plist version="1.0">
|
| 157 |
+
<dict>
|
| 158 |
+
<key>com.apple.security.automation.apple-events</key>
|
| 159 |
+
<true/>
|
| 160 |
+
<key>com.apple.security.device.audio-input</key>
|
| 161 |
+
<true/>
|
| 162 |
+
<key>com.apple.security.device.camera</key>
|
| 163 |
+
<true/>
|
| 164 |
+
<key>com.apple.security.personal-information.location</key>
|
| 165 |
+
<true/>
|
| 166 |
+
</dict>
|
| 167 |
+
</plist>
|
| 168 |
+
PLIST
|
| 169 |
+
|
| 170 |
+
cat > "$ENT_TMP_RUNTIME" <<'PLIST'
|
| 171 |
+
<?xml version="1.0" encoding="UTF-8"?>
|
| 172 |
+
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
| 173 |
+
<plist version="1.0">
|
| 174 |
+
<dict>
|
| 175 |
+
<key>com.apple.security.cs.allow-jit</key>
|
| 176 |
+
<true/>
|
| 177 |
+
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
|
| 178 |
+
<true/>
|
| 179 |
+
</dict>
|
| 180 |
+
</plist>
|
| 181 |
+
PLIST
|
| 182 |
+
|
| 183 |
+
if [[ "$DISABLE_LIBRARY_VALIDATION" == "1" ]]; then
|
| 184 |
+
/usr/libexec/PlistBuddy -c "Add :com.apple.security.cs.disable-library-validation bool true" "$ENT_TMP_APP_BASE" >/dev/null 2>&1 || \
|
| 185 |
+
/usr/libexec/PlistBuddy -c "Set :com.apple.security.cs.disable-library-validation true" "$ENT_TMP_APP_BASE"
|
| 186 |
+
echo "Note: disable-library-validation entitlement enabled (DISABLE_LIBRARY_VALIDATION=1)."
|
| 187 |
+
fi
|
| 188 |
+
|
| 189 |
+
APP_ENTITLEMENTS="$ENT_TMP_APP_BASE"
|
| 190 |
+
|
| 191 |
+
# clear extended attributes to avoid stale signatures
|
| 192 |
+
xattr -cr "$APP_BUNDLE" 2>/dev/null || true
|
| 193 |
+
|
| 194 |
+
sign_item() {
|
| 195 |
+
local target="$1"
|
| 196 |
+
local entitlements="$2"
|
| 197 |
+
codesign --force ${options_args+"${options_args[@]}"} "${timestamp_args[@]}" --entitlements "$entitlements" --sign "$IDENTITY" "$target"
|
| 198 |
+
}
|
| 199 |
+
|
| 200 |
+
sign_plain_item() {
|
| 201 |
+
local target="$1"
|
| 202 |
+
codesign --force ${options_args+"${options_args[@]}"} "${timestamp_args[@]}" --sign "$IDENTITY" "$target"
|
| 203 |
+
}
|
| 204 |
+
|
| 205 |
+
team_id_for() {
|
| 206 |
+
codesign -dv --verbose=4 "$1" 2>&1 | awk -F= '/^TeamIdentifier=/{print $2; exit}'
|
| 207 |
+
}
|
| 208 |
+
|
| 209 |
+
verify_team_ids() {
|
| 210 |
+
if [[ "$SKIP_TEAM_ID_CHECK" == "1" ]]; then
|
| 211 |
+
echo "Note: skipping Team ID audit (SKIP_TEAM_ID_CHECK=1)."
|
| 212 |
+
return 0
|
| 213 |
+
fi
|
| 214 |
+
|
| 215 |
+
local expected
|
| 216 |
+
expected="$(team_id_for "$APP_BUNDLE" || true)"
|
| 217 |
+
if [[ -z "$expected" ]]; then
|
| 218 |
+
echo "WARN: TeamIdentifier missing on app bundle; skipping Team ID audit."
|
| 219 |
+
return 0
|
| 220 |
+
fi
|
| 221 |
+
|
| 222 |
+
local mismatches=()
|
| 223 |
+
while IFS= read -r -d '' f; do
|
| 224 |
+
if /usr/bin/file "$f" | /usr/bin/grep -q "Mach-O"; then
|
| 225 |
+
local team
|
| 226 |
+
team="$(team_id_for "$f" || true)"
|
| 227 |
+
if [[ -z "$team" ]]; then
|
| 228 |
+
team="not set"
|
| 229 |
+
fi
|
| 230 |
+
if [[ "$expected" == "not set" ]]; then
|
| 231 |
+
if [[ "$team" != "not set" ]]; then
|
| 232 |
+
mismatches+=("$f (TeamIdentifier=$team)")
|
| 233 |
+
fi
|
| 234 |
+
elif [[ "$team" != "$expected" ]]; then
|
| 235 |
+
mismatches+=("$f (TeamIdentifier=$team)")
|
| 236 |
+
fi
|
| 237 |
+
fi
|
| 238 |
+
done < <(find "$APP_BUNDLE" -type f -print0)
|
| 239 |
+
|
| 240 |
+
if [[ "${#mismatches[@]}" -gt 0 ]]; then
|
| 241 |
+
echo "ERROR: Team ID mismatch detected (expected: $expected)"
|
| 242 |
+
for entry in "${mismatches[@]}"; do
|
| 243 |
+
echo " - $entry"
|
| 244 |
+
done
|
| 245 |
+
echo "Hint: re-sign embedded frameworks or set DISABLE_LIBRARY_VALIDATION=1 for dev builds."
|
| 246 |
+
exit 1
|
| 247 |
+
fi
|
| 248 |
+
}
|
| 249 |
+
|
| 250 |
+
# Sign main binary
|
| 251 |
+
if [ -f "$APP_BUNDLE/Contents/MacOS/OpenClaw" ]; then
|
| 252 |
+
echo "Signing main binary"; sign_item "$APP_BUNDLE/Contents/MacOS/OpenClaw" "$APP_ENTITLEMENTS"
|
| 253 |
+
fi
|
| 254 |
+
|
| 255 |
+
# Sign Sparkle deeply if present
|
| 256 |
+
SPARKLE="$APP_BUNDLE/Contents/Frameworks/Sparkle.framework"
|
| 257 |
+
if [ -d "$SPARKLE" ]; then
|
| 258 |
+
echo "Signing Sparkle framework and helpers"
|
| 259 |
+
find "$SPARKLE" -type f -print0 | while IFS= read -r -d '' f; do
|
| 260 |
+
if /usr/bin/file "$f" | /usr/bin/grep -q "Mach-O"; then
|
| 261 |
+
sign_plain_item "$f"
|
| 262 |
+
fi
|
| 263 |
+
done
|
| 264 |
+
sign_plain_item "$SPARKLE/Versions/B/Sparkle"
|
| 265 |
+
sign_plain_item "$SPARKLE/Versions/B/Autoupdate"
|
| 266 |
+
sign_plain_item "$SPARKLE/Versions/B/Updater.app/Contents/MacOS/Updater"
|
| 267 |
+
sign_plain_item "$SPARKLE/Versions/B/Updater.app"
|
| 268 |
+
sign_plain_item "$SPARKLE/Versions/B/XPCServices/Downloader.xpc/Contents/MacOS/Downloader"
|
| 269 |
+
sign_plain_item "$SPARKLE/Versions/B/XPCServices/Downloader.xpc"
|
| 270 |
+
sign_plain_item "$SPARKLE/Versions/B/XPCServices/Installer.xpc/Contents/MacOS/Installer"
|
| 271 |
+
sign_plain_item "$SPARKLE/Versions/B/XPCServices/Installer.xpc"
|
| 272 |
+
sign_plain_item "$SPARKLE/Versions/B"
|
| 273 |
+
sign_plain_item "$SPARKLE"
|
| 274 |
+
fi
|
| 275 |
+
|
| 276 |
+
# Sign any other embedded frameworks/dylibs
|
| 277 |
+
if [ -d "$APP_BUNDLE/Contents/Frameworks" ]; then
|
| 278 |
+
find "$APP_BUNDLE/Contents/Frameworks" \( -name "*.framework" -o -name "*.dylib" \) ! -path "*Sparkle.framework*" -print0 | while IFS= read -r -d '' f; do
|
| 279 |
+
echo "Signing framework: $f"; sign_plain_item "$f"
|
| 280 |
+
done
|
| 281 |
+
fi
|
| 282 |
+
|
| 283 |
+
# Finally sign the bundle
|
| 284 |
+
sign_item "$APP_BUNDLE" "$APP_ENTITLEMENTS"
|
| 285 |
+
|
| 286 |
+
verify_team_ids
|
| 287 |
+
|
| 288 |
+
rm -f "$ENT_TMP_BASE" "$ENT_TMP_APP_BASE" "$ENT_TMP_RUNTIME"
|
| 289 |
+
echo "Codesign complete for $APP_BUNDLE"
|
scripts/committer
ADDED
|
@@ -0,0 +1,117 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env bash
|
| 2 |
+
|
| 3 |
+
set -euo pipefail
|
| 4 |
+
# Disable glob expansion to handle brackets in file paths
|
| 5 |
+
set -f
|
| 6 |
+
usage() {
|
| 7 |
+
printf 'Usage: %s [--force] "commit message" "file" ["file" ...]\n' "$(basename "$0")" >&2
|
| 8 |
+
exit 2
|
| 9 |
+
}
|
| 10 |
+
|
| 11 |
+
if [ "$#" -lt 2 ]; then
|
| 12 |
+
usage
|
| 13 |
+
fi
|
| 14 |
+
|
| 15 |
+
force_delete_lock=false
|
| 16 |
+
if [ "${1:-}" = "--force" ]; then
|
| 17 |
+
force_delete_lock=true
|
| 18 |
+
shift
|
| 19 |
+
fi
|
| 20 |
+
|
| 21 |
+
if [ "$#" -lt 2 ]; then
|
| 22 |
+
usage
|
| 23 |
+
fi
|
| 24 |
+
|
| 25 |
+
commit_message=$1
|
| 26 |
+
shift
|
| 27 |
+
|
| 28 |
+
if [[ "$commit_message" != *[![:space:]]* ]]; then
|
| 29 |
+
printf 'Error: commit message must not be empty\n' >&2
|
| 30 |
+
exit 1
|
| 31 |
+
fi
|
| 32 |
+
|
| 33 |
+
if [ -e "$commit_message" ]; then
|
| 34 |
+
printf 'Error: first argument looks like a file path ("%s"); provide the commit message first\n' "$commit_message" >&2
|
| 35 |
+
exit 1
|
| 36 |
+
fi
|
| 37 |
+
|
| 38 |
+
if [ "$#" -eq 0 ]; then
|
| 39 |
+
usage
|
| 40 |
+
fi
|
| 41 |
+
|
| 42 |
+
files=("$@")
|
| 43 |
+
|
| 44 |
+
# Disallow "." because it stages the entire repository and defeats the helper's safety guardrails.
|
| 45 |
+
for file in "${files[@]}"; do
|
| 46 |
+
if [ "$file" = "." ]; then
|
| 47 |
+
printf 'Error: "." is not allowed; list specific paths instead\n' >&2
|
| 48 |
+
exit 1
|
| 49 |
+
fi
|
| 50 |
+
done
|
| 51 |
+
|
| 52 |
+
# Prevent staging node_modules even if a path is forced.
|
| 53 |
+
for file in "${files[@]}"; do
|
| 54 |
+
case "$file" in
|
| 55 |
+
*node_modules* | */node_modules | */node_modules/* | node_modules)
|
| 56 |
+
printf 'Error: node_modules paths are not allowed: %s\n' "$file" >&2
|
| 57 |
+
exit 1
|
| 58 |
+
;;
|
| 59 |
+
esac
|
| 60 |
+
done
|
| 61 |
+
|
| 62 |
+
last_commit_error=''
|
| 63 |
+
|
| 64 |
+
run_git_commit() {
|
| 65 |
+
local stderr_log
|
| 66 |
+
stderr_log=$(mktemp)
|
| 67 |
+
if git commit -m "$commit_message" -- "${files[@]}" 2> >(tee "$stderr_log" >&2); then
|
| 68 |
+
rm -f "$stderr_log"
|
| 69 |
+
last_commit_error=''
|
| 70 |
+
return 0
|
| 71 |
+
fi
|
| 72 |
+
|
| 73 |
+
last_commit_error=$(cat "$stderr_log")
|
| 74 |
+
rm -f "$stderr_log"
|
| 75 |
+
return 1
|
| 76 |
+
}
|
| 77 |
+
|
| 78 |
+
for file in "${files[@]}"; do
|
| 79 |
+
if [ ! -e "$file" ]; then
|
| 80 |
+
if ! git ls-files --error-unmatch -- "$file" >/dev/null 2>&1; then
|
| 81 |
+
printf 'Error: file not found: %s\n' "$file" >&2
|
| 82 |
+
exit 1
|
| 83 |
+
fi
|
| 84 |
+
fi
|
| 85 |
+
done
|
| 86 |
+
|
| 87 |
+
git restore --staged :/
|
| 88 |
+
git add --force -- "${files[@]}"
|
| 89 |
+
|
| 90 |
+
if git diff --staged --quiet; then
|
| 91 |
+
printf 'Warning: no staged changes detected for: %s\n' "${files[*]}" >&2
|
| 92 |
+
exit 1
|
| 93 |
+
fi
|
| 94 |
+
|
| 95 |
+
committed=false
|
| 96 |
+
if run_git_commit; then
|
| 97 |
+
committed=true
|
| 98 |
+
elif [ "$force_delete_lock" = true ]; then
|
| 99 |
+
lock_path=$(
|
| 100 |
+
printf '%s\n' "$last_commit_error" |
|
| 101 |
+
awk -F"'" '/Unable to create .*\.git\/index\.lock/ { print $2; exit }'
|
| 102 |
+
)
|
| 103 |
+
|
| 104 |
+
if [ -n "$lock_path" ] && [ -e "$lock_path" ]; then
|
| 105 |
+
rm -f "$lock_path"
|
| 106 |
+
printf 'Removed stale git lock: %s\n' "$lock_path" >&2
|
| 107 |
+
if run_git_commit; then
|
| 108 |
+
committed=true
|
| 109 |
+
fi
|
| 110 |
+
fi
|
| 111 |
+
fi
|
| 112 |
+
|
| 113 |
+
if [ "$committed" = false ]; then
|
| 114 |
+
exit 1
|
| 115 |
+
fi
|
| 116 |
+
|
| 117 |
+
printf 'Committed "%s" with %d files\n' "$commit_message" "${#files[@]}"
|
scripts/copy-hook-metadata.ts
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env tsx
|
| 2 |
+
/**
|
| 3 |
+
* Copy HOOK.md files from src/hooks/bundled to dist/hooks/bundled
|
| 4 |
+
*/
|
| 5 |
+
|
| 6 |
+
import fs from "node:fs";
|
| 7 |
+
import path from "node:path";
|
| 8 |
+
import { fileURLToPath } from "node:url";
|
| 9 |
+
|
| 10 |
+
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
| 11 |
+
const projectRoot = path.resolve(__dirname, "..");
|
| 12 |
+
|
| 13 |
+
const srcBundled = path.join(projectRoot, "src", "hooks", "bundled");
|
| 14 |
+
const distBundled = path.join(projectRoot, "dist", "hooks", "bundled");
|
| 15 |
+
|
| 16 |
+
function copyHookMetadata() {
|
| 17 |
+
if (!fs.existsSync(srcBundled)) {
|
| 18 |
+
console.warn("[copy-hook-metadata] Source directory not found:", srcBundled);
|
| 19 |
+
return;
|
| 20 |
+
}
|
| 21 |
+
|
| 22 |
+
if (!fs.existsSync(distBundled)) {
|
| 23 |
+
fs.mkdirSync(distBundled, { recursive: true });
|
| 24 |
+
}
|
| 25 |
+
|
| 26 |
+
const entries = fs.readdirSync(srcBundled, { withFileTypes: true });
|
| 27 |
+
|
| 28 |
+
for (const entry of entries) {
|
| 29 |
+
if (!entry.isDirectory()) {
|
| 30 |
+
continue;
|
| 31 |
+
}
|
| 32 |
+
|
| 33 |
+
const hookName = entry.name;
|
| 34 |
+
const srcHookDir = path.join(srcBundled, hookName);
|
| 35 |
+
const distHookDir = path.join(distBundled, hookName);
|
| 36 |
+
const srcHookMd = path.join(srcHookDir, "HOOK.md");
|
| 37 |
+
const distHookMd = path.join(distHookDir, "HOOK.md");
|
| 38 |
+
|
| 39 |
+
if (!fs.existsSync(srcHookMd)) {
|
| 40 |
+
console.warn(`[copy-hook-metadata] No HOOK.md found for ${hookName}`);
|
| 41 |
+
continue;
|
| 42 |
+
}
|
| 43 |
+
|
| 44 |
+
if (!fs.existsSync(distHookDir)) {
|
| 45 |
+
fs.mkdirSync(distHookDir, { recursive: true });
|
| 46 |
+
}
|
| 47 |
+
|
| 48 |
+
fs.copyFileSync(srcHookMd, distHookMd);
|
| 49 |
+
console.log(`[copy-hook-metadata] Copied ${hookName}/HOOK.md`);
|
| 50 |
+
}
|
| 51 |
+
|
| 52 |
+
console.log("[copy-hook-metadata] Done");
|
| 53 |
+
}
|
| 54 |
+
|
| 55 |
+
copyHookMetadata();
|
scripts/create-dmg.sh
ADDED
|
@@ -0,0 +1,176 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env bash
|
| 2 |
+
set -euo pipefail
|
| 3 |
+
|
| 4 |
+
# Create a styled DMG containing the app bundle + /Applications symlink.
|
| 5 |
+
#
|
| 6 |
+
# Usage:
|
| 7 |
+
# scripts/create-dmg.sh <app_path> [output_dmg]
|
| 8 |
+
#
|
| 9 |
+
# Env:
|
| 10 |
+
# DMG_VOLUME_NAME default: CFBundleName (or "OpenClaw")
|
| 11 |
+
# DMG_BACKGROUND_PATH default: assets/dmg-background.png
|
| 12 |
+
# DMG_BACKGROUND_SMALL default: assets/dmg-background-small.png (recommended)
|
| 13 |
+
# DMG_WINDOW_BOUNDS default: "400 100 900 420" (500x320)
|
| 14 |
+
# DMG_ICON_SIZE default: 128
|
| 15 |
+
# DMG_APP_POS default: "125 160"
|
| 16 |
+
# DMG_APPS_POS default: "375 160"
|
| 17 |
+
# SKIP_DMG_STYLE=1 skip Finder styling
|
| 18 |
+
# DMG_EXTRA_SECTORS extra sectors to keep when shrinking RW image (default: 2048)
|
| 19 |
+
|
| 20 |
+
APP_PATH="${1:-}"
|
| 21 |
+
OUT_PATH="${2:-}"
|
| 22 |
+
|
| 23 |
+
if [[ -z "$APP_PATH" ]]; then
|
| 24 |
+
echo "Usage: $0 <app_path> [output_dmg]" >&2
|
| 25 |
+
exit 1
|
| 26 |
+
fi
|
| 27 |
+
if [[ ! -d "$APP_PATH" ]]; then
|
| 28 |
+
echo "Error: App not found: $APP_PATH" >&2
|
| 29 |
+
exit 1
|
| 30 |
+
fi
|
| 31 |
+
|
| 32 |
+
ROOT_DIR="$(cd "$(dirname "$0")/.." && pwd)"
|
| 33 |
+
BUILD_DIR="$ROOT_DIR/dist"
|
| 34 |
+
mkdir -p "$BUILD_DIR"
|
| 35 |
+
|
| 36 |
+
APP_NAME=$(/usr/libexec/PlistBuddy -c "Print CFBundleName" "$APP_PATH/Contents/Info.plist" 2>/dev/null || echo "OpenClaw")
|
| 37 |
+
VERSION=$(/usr/libexec/PlistBuddy -c "Print CFBundleShortVersionString" "$APP_PATH/Contents/Info.plist" 2>/dev/null || echo "0.0.0")
|
| 38 |
+
|
| 39 |
+
DMG_NAME="${APP_NAME}-${VERSION}.dmg"
|
| 40 |
+
DMG_VOLUME_NAME="${DMG_VOLUME_NAME:-$APP_NAME}"
|
| 41 |
+
DMG_BACKGROUND_SMALL="${DMG_BACKGROUND_SMALL:-$ROOT_DIR/assets/dmg-background-small.png}"
|
| 42 |
+
DMG_BACKGROUND_PATH="${DMG_BACKGROUND_PATH:-$ROOT_DIR/assets/dmg-background.png}"
|
| 43 |
+
|
| 44 |
+
DMG_WINDOW_BOUNDS="${DMG_WINDOW_BOUNDS:-400 100 900 420}"
|
| 45 |
+
DMG_ICON_SIZE="${DMG_ICON_SIZE:-128}"
|
| 46 |
+
DMG_APP_POS="${DMG_APP_POS:-125 160}"
|
| 47 |
+
DMG_APPS_POS="${DMG_APPS_POS:-375 160}"
|
| 48 |
+
DMG_EXTRA_SECTORS="${DMG_EXTRA_SECTORS:-2048}"
|
| 49 |
+
|
| 50 |
+
to_applescript_list4() {
|
| 51 |
+
local raw="$1"
|
| 52 |
+
echo "$raw" | awk '{ printf "%s, %s, %s, %s", $1, $2, $3, $4 }'
|
| 53 |
+
}
|
| 54 |
+
|
| 55 |
+
to_applescript_pair() {
|
| 56 |
+
local raw="$1"
|
| 57 |
+
echo "$raw" | awk '{ printf "%s, %s", $1, $2 }'
|
| 58 |
+
}
|
| 59 |
+
|
| 60 |
+
if [[ -z "$OUT_PATH" ]]; then
|
| 61 |
+
OUT_PATH="$BUILD_DIR/$DMG_NAME"
|
| 62 |
+
fi
|
| 63 |
+
|
| 64 |
+
echo "Creating DMG: $OUT_PATH"
|
| 65 |
+
|
| 66 |
+
# Cleanup stuck volumes.
|
| 67 |
+
for vol in "/Volumes/$DMG_VOLUME_NAME"* "/Volumes/$APP_NAME"*; do
|
| 68 |
+
if [[ -d "$vol" ]]; then
|
| 69 |
+
hdiutil detach "$vol" -force 2>/dev/null || true
|
| 70 |
+
sleep 1
|
| 71 |
+
fi
|
| 72 |
+
done
|
| 73 |
+
|
| 74 |
+
DMG_TEMP="$(mktemp -d /tmp/openclaw-dmg.XXXXXX)"
|
| 75 |
+
trap 'hdiutil detach "/Volumes/'"$DMG_VOLUME_NAME"'" -force 2>/dev/null || true; rm -rf "$DMG_TEMP" 2>/dev/null || true' EXIT
|
| 76 |
+
|
| 77 |
+
cp -R "$APP_PATH" "$DMG_TEMP/"
|
| 78 |
+
ln -s /Applications "$DMG_TEMP/Applications"
|
| 79 |
+
|
| 80 |
+
APP_SIZE_MB=$(du -sm "$APP_PATH" | awk '{print $1}')
|
| 81 |
+
DMG_SIZE_MB=$((APP_SIZE_MB + 80))
|
| 82 |
+
|
| 83 |
+
DMG_RW_PATH="${OUT_PATH%.dmg}-rw.dmg"
|
| 84 |
+
rm -f "$DMG_RW_PATH" "$OUT_PATH"
|
| 85 |
+
|
| 86 |
+
hdiutil create \
|
| 87 |
+
-volname "$DMG_VOLUME_NAME" \
|
| 88 |
+
-srcfolder "$DMG_TEMP" \
|
| 89 |
+
-ov \
|
| 90 |
+
-format UDRW \
|
| 91 |
+
-size "${DMG_SIZE_MB}m" \
|
| 92 |
+
"$DMG_RW_PATH"
|
| 93 |
+
|
| 94 |
+
MOUNT_POINT="/Volumes/$DMG_VOLUME_NAME"
|
| 95 |
+
if [[ -d "$MOUNT_POINT" ]]; then
|
| 96 |
+
hdiutil detach "$MOUNT_POINT" -force 2>/dev/null || true
|
| 97 |
+
sleep 2
|
| 98 |
+
fi
|
| 99 |
+
hdiutil attach "$DMG_RW_PATH" -mountpoint "$MOUNT_POINT" -nobrowse
|
| 100 |
+
|
| 101 |
+
if [[ "${SKIP_DMG_STYLE:-0}" != "1" ]]; then
|
| 102 |
+
mkdir -p "$MOUNT_POINT/.background"
|
| 103 |
+
if [[ -f "$DMG_BACKGROUND_SMALL" ]]; then
|
| 104 |
+
cp "$DMG_BACKGROUND_SMALL" "$MOUNT_POINT/.background/background.png"
|
| 105 |
+
elif [[ -f "$DMG_BACKGROUND_PATH" ]]; then
|
| 106 |
+
cp "$DMG_BACKGROUND_PATH" "$MOUNT_POINT/.background/background.png"
|
| 107 |
+
else
|
| 108 |
+
echo "WARN: DMG background missing: $DMG_BACKGROUND_SMALL / $DMG_BACKGROUND_PATH" >&2
|
| 109 |
+
fi
|
| 110 |
+
|
| 111 |
+
# Volume icon: reuse the app icon if available.
|
| 112 |
+
ICON_SRC="$ROOT_DIR/apps/macos/Sources/OpenClaw/Resources/OpenClaw.icns"
|
| 113 |
+
if [[ -f "$ICON_SRC" ]]; then
|
| 114 |
+
cp "$ICON_SRC" "$MOUNT_POINT/.VolumeIcon.icns"
|
| 115 |
+
if command -v SetFile >/dev/null 2>&1; then
|
| 116 |
+
SetFile -a C "$MOUNT_POINT" 2>/dev/null || true
|
| 117 |
+
fi
|
| 118 |
+
fi
|
| 119 |
+
|
| 120 |
+
osascript <<EOF
|
| 121 |
+
tell application "Finder"
|
| 122 |
+
tell disk "$DMG_VOLUME_NAME"
|
| 123 |
+
open
|
| 124 |
+
set current view of container window to icon view
|
| 125 |
+
set toolbar visible of container window to false
|
| 126 |
+
set statusbar visible of container window to false
|
| 127 |
+
set the bounds of container window to {$(to_applescript_list4 "$DMG_WINDOW_BOUNDS")}
|
| 128 |
+
set viewOptions to the icon view options of container window
|
| 129 |
+
set arrangement of viewOptions to not arranged
|
| 130 |
+
set icon size of viewOptions to ${DMG_ICON_SIZE}
|
| 131 |
+
if exists file ".background:background.png" then
|
| 132 |
+
set background picture of viewOptions to file ".background:background.png"
|
| 133 |
+
end if
|
| 134 |
+
set text size of viewOptions to 12
|
| 135 |
+
set label position of viewOptions to bottom
|
| 136 |
+
set shows item info of viewOptions to false
|
| 137 |
+
set shows icon preview of viewOptions to true
|
| 138 |
+
set position of item "${APP_NAME}.app" of container window to {$(to_applescript_pair "$DMG_APP_POS")}
|
| 139 |
+
set position of item "Applications" of container window to {$(to_applescript_pair "$DMG_APPS_POS")}
|
| 140 |
+
update without registering applications
|
| 141 |
+
delay 2
|
| 142 |
+
close
|
| 143 |
+
open
|
| 144 |
+
delay 1
|
| 145 |
+
end tell
|
| 146 |
+
end tell
|
| 147 |
+
EOF
|
| 148 |
+
|
| 149 |
+
sleep 2
|
| 150 |
+
osascript -e 'tell application "Finder" to close every window' || true
|
| 151 |
+
fi
|
| 152 |
+
|
| 153 |
+
for i in {1..5}; do
|
| 154 |
+
if hdiutil detach "$MOUNT_POINT" -quiet 2>/dev/null; then
|
| 155 |
+
break
|
| 156 |
+
fi
|
| 157 |
+
if [[ "$i" == "3" ]]; then
|
| 158 |
+
hdiutil detach "$MOUNT_POINT" -force 2>/dev/null || true
|
| 159 |
+
fi
|
| 160 |
+
sleep 2
|
| 161 |
+
done
|
| 162 |
+
|
| 163 |
+
hdiutil resize -limits "$DMG_RW_PATH" >/tmp/openclaw-dmg-limits.txt 2>/dev/null || true
|
| 164 |
+
MIN_SECTORS="$(tail -n 1 /tmp/openclaw-dmg-limits.txt 2>/dev/null | awk '{print $1}')"
|
| 165 |
+
rm -f /tmp/openclaw-dmg-limits.txt
|
| 166 |
+
if [[ "$MIN_SECTORS" =~ ^[0-9]+$ ]] && [[ "$DMG_EXTRA_SECTORS" =~ ^[0-9]+$ ]]; then
|
| 167 |
+
TARGET_SECTORS=$((MIN_SECTORS + DMG_EXTRA_SECTORS))
|
| 168 |
+
echo "Shrinking RW image: min sectors=$MIN_SECTORS (+$DMG_EXTRA_SECTORS) -> $TARGET_SECTORS"
|
| 169 |
+
hdiutil resize -sectors "$TARGET_SECTORS" "$DMG_RW_PATH" >/dev/null 2>&1 || true
|
| 170 |
+
fi
|
| 171 |
+
|
| 172 |
+
hdiutil convert "$DMG_RW_PATH" -format ULMO -o "$OUT_PATH" -ov
|
| 173 |
+
rm -f "$DMG_RW_PATH"
|
| 174 |
+
|
| 175 |
+
hdiutil verify "$OUT_PATH" >/dev/null
|
| 176 |
+
echo "✅ DMG ready: $OUT_PATH"
|
scripts/debug-claude-usage.ts
ADDED
|
@@ -0,0 +1,391 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import { execFileSync } from "node:child_process";
|
| 2 |
+
import crypto from "node:crypto";
|
| 3 |
+
import fs from "node:fs";
|
| 4 |
+
import os from "node:os";
|
| 5 |
+
import path from "node:path";
|
| 6 |
+
|
| 7 |
+
type Args = {
|
| 8 |
+
agentId: string;
|
| 9 |
+
reveal: boolean;
|
| 10 |
+
sessionKey?: string;
|
| 11 |
+
};
|
| 12 |
+
|
| 13 |
+
const mask = (value: string) => {
|
| 14 |
+
const compact = value.trim();
|
| 15 |
+
if (!compact) {
|
| 16 |
+
return "missing";
|
| 17 |
+
}
|
| 18 |
+
const edge = compact.length >= 12 ? 6 : 4;
|
| 19 |
+
return `${compact.slice(0, edge)}…${compact.slice(-edge)}`;
|
| 20 |
+
};
|
| 21 |
+
|
| 22 |
+
const parseArgs = (): Args => {
|
| 23 |
+
const args = process.argv.slice(2);
|
| 24 |
+
let agentId = "main";
|
| 25 |
+
let reveal = false;
|
| 26 |
+
let sessionKey: string | undefined;
|
| 27 |
+
|
| 28 |
+
for (let i = 0; i < args.length; i++) {
|
| 29 |
+
const arg = args[i];
|
| 30 |
+
if (arg === "--agent" && args[i + 1]) {
|
| 31 |
+
agentId = String(args[++i]).trim() || "main";
|
| 32 |
+
continue;
|
| 33 |
+
}
|
| 34 |
+
if (arg === "--reveal") {
|
| 35 |
+
reveal = true;
|
| 36 |
+
continue;
|
| 37 |
+
}
|
| 38 |
+
if (arg === "--session-key" && args[i + 1]) {
|
| 39 |
+
sessionKey = String(args[++i]).trim() || undefined;
|
| 40 |
+
continue;
|
| 41 |
+
}
|
| 42 |
+
}
|
| 43 |
+
|
| 44 |
+
return { agentId, reveal, sessionKey };
|
| 45 |
+
};
|
| 46 |
+
|
| 47 |
+
const loadAuthProfiles = (agentId: string) => {
|
| 48 |
+
const stateRoot =
|
| 49 |
+
process.env.OPENCLAW_STATE_DIR?.trim() ||
|
| 50 |
+
process.env.CLAWDBOT_STATE_DIR?.trim() ||
|
| 51 |
+
path.join(os.homedir(), ".openclaw");
|
| 52 |
+
const authPath = path.join(stateRoot, "agents", agentId, "agent", "auth-profiles.json");
|
| 53 |
+
if (!fs.existsSync(authPath)) {
|
| 54 |
+
throw new Error(`Missing: ${authPath}`);
|
| 55 |
+
}
|
| 56 |
+
const store = JSON.parse(fs.readFileSync(authPath, "utf8")) as {
|
| 57 |
+
profiles?: Record<string, { provider?: string; type?: string; token?: string; key?: string }>;
|
| 58 |
+
};
|
| 59 |
+
return { authPath, store };
|
| 60 |
+
};
|
| 61 |
+
|
| 62 |
+
const pickAnthropicTokens = (store: {
|
| 63 |
+
profiles?: Record<string, { provider?: string; type?: string; token?: string; key?: string }>;
|
| 64 |
+
}): Array<{ profileId: string; token: string }> => {
|
| 65 |
+
const profiles = store.profiles ?? {};
|
| 66 |
+
const found: Array<{ profileId: string; token: string }> = [];
|
| 67 |
+
for (const [id, cred] of Object.entries(profiles)) {
|
| 68 |
+
if (cred?.provider !== "anthropic") {
|
| 69 |
+
continue;
|
| 70 |
+
}
|
| 71 |
+
const token = cred.type === "token" ? cred.token?.trim() : undefined;
|
| 72 |
+
if (token) {
|
| 73 |
+
found.push({ profileId: id, token });
|
| 74 |
+
}
|
| 75 |
+
}
|
| 76 |
+
return found;
|
| 77 |
+
};
|
| 78 |
+
|
| 79 |
+
const fetchAnthropicOAuthUsage = async (token: string) => {
|
| 80 |
+
const res = await fetch("https://api.anthropic.com/api/oauth/usage", {
|
| 81 |
+
headers: {
|
| 82 |
+
Authorization: `Bearer ${token}`,
|
| 83 |
+
Accept: "application/json",
|
| 84 |
+
"anthropic-version": "2023-06-01",
|
| 85 |
+
"anthropic-beta": "oauth-2025-04-20",
|
| 86 |
+
"User-Agent": "openclaw-debug",
|
| 87 |
+
},
|
| 88 |
+
});
|
| 89 |
+
const text = await res.text();
|
| 90 |
+
return { status: res.status, contentType: res.headers.get("content-type"), text };
|
| 91 |
+
};
|
| 92 |
+
|
| 93 |
+
const readClaudeCliKeychain = (): {
|
| 94 |
+
accessToken: string;
|
| 95 |
+
expiresAt?: number;
|
| 96 |
+
scopes?: string[];
|
| 97 |
+
} | null => {
|
| 98 |
+
if (process.platform !== "darwin") {
|
| 99 |
+
return null;
|
| 100 |
+
}
|
| 101 |
+
try {
|
| 102 |
+
const raw = execFileSync(
|
| 103 |
+
"security",
|
| 104 |
+
["find-generic-password", "-s", "Claude Code-credentials", "-w"],
|
| 105 |
+
{ encoding: "utf8", stdio: ["ignore", "pipe", "ignore"], timeout: 5000 },
|
| 106 |
+
);
|
| 107 |
+
const parsed = JSON.parse(raw.trim()) as Record<string, unknown>;
|
| 108 |
+
const oauth = parsed?.claudeAiOauth as Record<string, unknown> | undefined;
|
| 109 |
+
if (!oauth || typeof oauth !== "object") {
|
| 110 |
+
return null;
|
| 111 |
+
}
|
| 112 |
+
const accessToken = oauth.accessToken;
|
| 113 |
+
if (typeof accessToken !== "string" || !accessToken.trim()) {
|
| 114 |
+
return null;
|
| 115 |
+
}
|
| 116 |
+
const expiresAt = typeof oauth.expiresAt === "number" ? oauth.expiresAt : undefined;
|
| 117 |
+
const scopes = Array.isArray(oauth.scopes)
|
| 118 |
+
? oauth.scopes.filter((v): v is string => typeof v === "string")
|
| 119 |
+
: undefined;
|
| 120 |
+
return { accessToken, expiresAt, scopes };
|
| 121 |
+
} catch {
|
| 122 |
+
return null;
|
| 123 |
+
}
|
| 124 |
+
};
|
| 125 |
+
|
| 126 |
+
const chromeServiceNameForPath = (cookiePath: string): string => {
|
| 127 |
+
if (cookiePath.includes("/Arc/")) {
|
| 128 |
+
return "Arc Safe Storage";
|
| 129 |
+
}
|
| 130 |
+
if (cookiePath.includes("/BraveSoftware/")) {
|
| 131 |
+
return "Brave Safe Storage";
|
| 132 |
+
}
|
| 133 |
+
if (cookiePath.includes("/Microsoft Edge/")) {
|
| 134 |
+
return "Microsoft Edge Safe Storage";
|
| 135 |
+
}
|
| 136 |
+
if (cookiePath.includes("/Chromium/")) {
|
| 137 |
+
return "Chromium Safe Storage";
|
| 138 |
+
}
|
| 139 |
+
return "Chrome Safe Storage";
|
| 140 |
+
};
|
| 141 |
+
|
| 142 |
+
const readKeychainPassword = (service: string): string | null => {
|
| 143 |
+
try {
|
| 144 |
+
const out = execFileSync("security", ["find-generic-password", "-w", "-s", service], {
|
| 145 |
+
encoding: "utf8",
|
| 146 |
+
stdio: ["ignore", "pipe", "ignore"],
|
| 147 |
+
timeout: 5000,
|
| 148 |
+
});
|
| 149 |
+
const pw = out.trim();
|
| 150 |
+
return pw ? pw : null;
|
| 151 |
+
} catch {
|
| 152 |
+
return null;
|
| 153 |
+
}
|
| 154 |
+
};
|
| 155 |
+
|
| 156 |
+
const decryptChromeCookieValue = (encrypted: Buffer, service: string): string | null => {
|
| 157 |
+
if (encrypted.length < 4) {
|
| 158 |
+
return null;
|
| 159 |
+
}
|
| 160 |
+
const prefix = encrypted.subarray(0, 3).toString("utf8");
|
| 161 |
+
if (prefix !== "v10" && prefix !== "v11") {
|
| 162 |
+
return null;
|
| 163 |
+
}
|
| 164 |
+
|
| 165 |
+
const password = readKeychainPassword(service);
|
| 166 |
+
if (!password) {
|
| 167 |
+
return null;
|
| 168 |
+
}
|
| 169 |
+
|
| 170 |
+
const key = crypto.pbkdf2Sync(password, "saltysalt", 1003, 16, "sha1");
|
| 171 |
+
const iv = Buffer.alloc(16, 0x20);
|
| 172 |
+
const data = encrypted.subarray(3);
|
| 173 |
+
|
| 174 |
+
try {
|
| 175 |
+
const decipher = crypto.createDecipheriv("aes-128-cbc", key, iv);
|
| 176 |
+
decipher.setAutoPadding(true);
|
| 177 |
+
const decrypted = Buffer.concat([decipher.update(data), decipher.final()]);
|
| 178 |
+
const text = decrypted.toString("utf8").trim();
|
| 179 |
+
return text ? text : null;
|
| 180 |
+
} catch {
|
| 181 |
+
return null;
|
| 182 |
+
}
|
| 183 |
+
};
|
| 184 |
+
|
| 185 |
+
const queryChromeCookieDb = (cookieDb: string): string | null => {
|
| 186 |
+
try {
|
| 187 |
+
const out = execFileSync(
|
| 188 |
+
"sqlite3",
|
| 189 |
+
[
|
| 190 |
+
"-readonly",
|
| 191 |
+
cookieDb,
|
| 192 |
+
`
|
| 193 |
+
SELECT
|
| 194 |
+
COALESCE(NULLIF(value,''), hex(encrypted_value))
|
| 195 |
+
FROM cookies
|
| 196 |
+
WHERE (host_key LIKE '%claude.ai%' OR host_key = '.claude.ai')
|
| 197 |
+
AND name = 'sessionKey'
|
| 198 |
+
LIMIT 1;
|
| 199 |
+
`,
|
| 200 |
+
],
|
| 201 |
+
{ encoding: "utf8", stdio: ["ignore", "pipe", "ignore"], timeout: 5000 },
|
| 202 |
+
).trim();
|
| 203 |
+
if (!out) {
|
| 204 |
+
return null;
|
| 205 |
+
}
|
| 206 |
+
if (out.startsWith("sk-ant-")) {
|
| 207 |
+
return out;
|
| 208 |
+
}
|
| 209 |
+
const hex = out.replace(/[^0-9A-Fa-f]/g, "");
|
| 210 |
+
if (!hex) {
|
| 211 |
+
return null;
|
| 212 |
+
}
|
| 213 |
+
const buf = Buffer.from(hex, "hex");
|
| 214 |
+
const service = chromeServiceNameForPath(cookieDb);
|
| 215 |
+
const decrypted = decryptChromeCookieValue(buf, service);
|
| 216 |
+
return decrypted && decrypted.startsWith("sk-ant-") ? decrypted : null;
|
| 217 |
+
} catch {
|
| 218 |
+
return null;
|
| 219 |
+
}
|
| 220 |
+
};
|
| 221 |
+
|
| 222 |
+
const queryFirefoxCookieDb = (cookieDb: string): string | null => {
|
| 223 |
+
try {
|
| 224 |
+
const out = execFileSync(
|
| 225 |
+
"sqlite3",
|
| 226 |
+
[
|
| 227 |
+
"-readonly",
|
| 228 |
+
cookieDb,
|
| 229 |
+
`
|
| 230 |
+
SELECT value
|
| 231 |
+
FROM moz_cookies
|
| 232 |
+
WHERE (host LIKE '%claude.ai%' OR host = '.claude.ai')
|
| 233 |
+
AND name = 'sessionKey'
|
| 234 |
+
LIMIT 1;
|
| 235 |
+
`,
|
| 236 |
+
],
|
| 237 |
+
{ encoding: "utf8", stdio: ["ignore", "pipe", "ignore"], timeout: 5000 },
|
| 238 |
+
).trim();
|
| 239 |
+
return out && out.startsWith("sk-ant-") ? out : null;
|
| 240 |
+
} catch {
|
| 241 |
+
return null;
|
| 242 |
+
}
|
| 243 |
+
};
|
| 244 |
+
|
| 245 |
+
const findClaudeSessionKey = (): { sessionKey: string; source: string } | null => {
|
| 246 |
+
if (process.platform !== "darwin") {
|
| 247 |
+
return null;
|
| 248 |
+
}
|
| 249 |
+
|
| 250 |
+
const firefoxRoot = path.join(
|
| 251 |
+
os.homedir(),
|
| 252 |
+
"Library",
|
| 253 |
+
"Application Support",
|
| 254 |
+
"Firefox",
|
| 255 |
+
"Profiles",
|
| 256 |
+
);
|
| 257 |
+
if (fs.existsSync(firefoxRoot)) {
|
| 258 |
+
for (const entry of fs.readdirSync(firefoxRoot)) {
|
| 259 |
+
const db = path.join(firefoxRoot, entry, "cookies.sqlite");
|
| 260 |
+
if (!fs.existsSync(db)) {
|
| 261 |
+
continue;
|
| 262 |
+
}
|
| 263 |
+
const value = queryFirefoxCookieDb(db);
|
| 264 |
+
if (value) {
|
| 265 |
+
return { sessionKey: value, source: `firefox:${db}` };
|
| 266 |
+
}
|
| 267 |
+
}
|
| 268 |
+
}
|
| 269 |
+
|
| 270 |
+
const chromeCandidates = [
|
| 271 |
+
path.join(os.homedir(), "Library", "Application Support", "Google", "Chrome"),
|
| 272 |
+
path.join(os.homedir(), "Library", "Application Support", "Chromium"),
|
| 273 |
+
path.join(os.homedir(), "Library", "Application Support", "Arc"),
|
| 274 |
+
path.join(os.homedir(), "Library", "Application Support", "BraveSoftware", "Brave-Browser"),
|
| 275 |
+
path.join(os.homedir(), "Library", "Application Support", "Microsoft Edge"),
|
| 276 |
+
];
|
| 277 |
+
|
| 278 |
+
for (const root of chromeCandidates) {
|
| 279 |
+
if (!fs.existsSync(root)) {
|
| 280 |
+
continue;
|
| 281 |
+
}
|
| 282 |
+
const profiles = fs
|
| 283 |
+
.readdirSync(root)
|
| 284 |
+
.filter((name) => name === "Default" || name.startsWith("Profile "));
|
| 285 |
+
for (const profile of profiles) {
|
| 286 |
+
const db = path.join(root, profile, "Cookies");
|
| 287 |
+
if (!fs.existsSync(db)) {
|
| 288 |
+
continue;
|
| 289 |
+
}
|
| 290 |
+
const value = queryChromeCookieDb(db);
|
| 291 |
+
if (value) {
|
| 292 |
+
return { sessionKey: value, source: `chromium:${db}` };
|
| 293 |
+
}
|
| 294 |
+
}
|
| 295 |
+
}
|
| 296 |
+
|
| 297 |
+
return null;
|
| 298 |
+
};
|
| 299 |
+
|
| 300 |
+
const fetchClaudeWebUsage = async (sessionKey: string) => {
|
| 301 |
+
const headers = {
|
| 302 |
+
Cookie: `sessionKey=${sessionKey}`,
|
| 303 |
+
Accept: "application/json",
|
| 304 |
+
"User-Agent":
|
| 305 |
+
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/17.1 Safari/605.1.15",
|
| 306 |
+
};
|
| 307 |
+
const orgRes = await fetch("https://claude.ai/api/organizations", { headers });
|
| 308 |
+
const orgText = await orgRes.text();
|
| 309 |
+
if (!orgRes.ok) {
|
| 310 |
+
return { ok: false as const, step: "organizations", status: orgRes.status, body: orgText };
|
| 311 |
+
}
|
| 312 |
+
const orgs = JSON.parse(orgText) as Array<{ uuid?: string }>;
|
| 313 |
+
const orgId = orgs?.[0]?.uuid;
|
| 314 |
+
if (!orgId) {
|
| 315 |
+
return { ok: false as const, step: "organizations", status: 200, body: orgText };
|
| 316 |
+
}
|
| 317 |
+
|
| 318 |
+
const usageRes = await fetch(`https://claude.ai/api/organizations/${orgId}/usage`, { headers });
|
| 319 |
+
const usageText = await usageRes.text();
|
| 320 |
+
return usageRes.ok
|
| 321 |
+
? { ok: true as const, orgId, body: usageText }
|
| 322 |
+
: { ok: false as const, step: "usage", status: usageRes.status, body: usageText };
|
| 323 |
+
};
|
| 324 |
+
|
| 325 |
+
const main = async () => {
|
| 326 |
+
const opts = parseArgs();
|
| 327 |
+
const { authPath, store } = loadAuthProfiles(opts.agentId);
|
| 328 |
+
console.log(`Auth file: ${authPath}`);
|
| 329 |
+
|
| 330 |
+
const keychain = readClaudeCliKeychain();
|
| 331 |
+
if (keychain) {
|
| 332 |
+
console.log(
|
| 333 |
+
`Claude Code CLI keychain: accessToken=${opts.reveal ? keychain.accessToken : mask(keychain.accessToken)} scopes=${keychain.scopes?.join(",") ?? "(unknown)"}`,
|
| 334 |
+
);
|
| 335 |
+
const oauth = await fetchAnthropicOAuthUsage(keychain.accessToken);
|
| 336 |
+
console.log(
|
| 337 |
+
`OAuth usage (keychain): HTTP ${oauth.status} (${oauth.contentType ?? "no content-type"})`,
|
| 338 |
+
);
|
| 339 |
+
console.log(oauth.text.slice(0, 200).replace(/\s+/g, " ").trim());
|
| 340 |
+
} else {
|
| 341 |
+
console.log("Claude Code CLI keychain: missing/unreadable");
|
| 342 |
+
}
|
| 343 |
+
|
| 344 |
+
const anthropic = pickAnthropicTokens(store);
|
| 345 |
+
if (anthropic.length === 0) {
|
| 346 |
+
console.log("Auth profiles: no Anthropic token profiles found");
|
| 347 |
+
} else {
|
| 348 |
+
for (const entry of anthropic) {
|
| 349 |
+
console.log(
|
| 350 |
+
`Auth profiles: ${entry.profileId} token=${opts.reveal ? entry.token : mask(entry.token)}`,
|
| 351 |
+
);
|
| 352 |
+
const oauth = await fetchAnthropicOAuthUsage(entry.token);
|
| 353 |
+
console.log(
|
| 354 |
+
`OAuth usage (${entry.profileId}): HTTP ${oauth.status} (${oauth.contentType ?? "no content-type"})`,
|
| 355 |
+
);
|
| 356 |
+
console.log(oauth.text.slice(0, 200).replace(/\s+/g, " ").trim());
|
| 357 |
+
}
|
| 358 |
+
}
|
| 359 |
+
|
| 360 |
+
const sessionKey =
|
| 361 |
+
opts.sessionKey?.trim() ||
|
| 362 |
+
process.env.CLAUDE_AI_SESSION_KEY?.trim() ||
|
| 363 |
+
process.env.CLAUDE_WEB_SESSION_KEY?.trim() ||
|
| 364 |
+
findClaudeSessionKey()?.sessionKey;
|
| 365 |
+
const source = opts.sessionKey
|
| 366 |
+
? "--session-key"
|
| 367 |
+
: process.env.CLAUDE_AI_SESSION_KEY || process.env.CLAUDE_WEB_SESSION_KEY
|
| 368 |
+
? "env"
|
| 369 |
+
: (findClaudeSessionKey()?.source ?? "auto");
|
| 370 |
+
|
| 371 |
+
if (!sessionKey) {
|
| 372 |
+
console.log(
|
| 373 |
+
"Claude web: no sessionKey found (try --session-key or export CLAUDE_AI_SESSION_KEY)",
|
| 374 |
+
);
|
| 375 |
+
return;
|
| 376 |
+
}
|
| 377 |
+
|
| 378 |
+
console.log(
|
| 379 |
+
`Claude web: sessionKey=${opts.reveal ? sessionKey : mask(sessionKey)} (source: ${source})`,
|
| 380 |
+
);
|
| 381 |
+
const web = await fetchClaudeWebUsage(sessionKey);
|
| 382 |
+
if (!web.ok) {
|
| 383 |
+
console.log(`Claude web: ${web.step} HTTP ${web.status}`);
|
| 384 |
+
console.log(String(web.body).slice(0, 400).replace(/\s+/g, " ").trim());
|
| 385 |
+
return;
|
| 386 |
+
}
|
| 387 |
+
console.log(`Claude web: org=${web.orgId} OK`);
|
| 388 |
+
console.log(web.body.slice(0, 400).replace(/\s+/g, " ").trim());
|
| 389 |
+
};
|
| 390 |
+
|
| 391 |
+
await main();
|
scripts/docker/cleanup-smoke/Dockerfile
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
FROM node:22-bookworm-slim
|
| 2 |
+
|
| 3 |
+
RUN apt-get update \
|
| 4 |
+
&& apt-get install -y --no-install-recommends \
|
| 5 |
+
bash \
|
| 6 |
+
ca-certificates \
|
| 7 |
+
git \
|
| 8 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 9 |
+
|
| 10 |
+
WORKDIR /repo
|
| 11 |
+
COPY package.json pnpm-lock.yaml pnpm-workspace.yaml ./
|
| 12 |
+
COPY scripts/postinstall.js ./scripts/postinstall.js
|
| 13 |
+
RUN corepack enable \
|
| 14 |
+
&& pnpm install --frozen-lockfile
|
| 15 |
+
|
| 16 |
+
COPY . .
|
| 17 |
+
COPY scripts/docker/cleanup-smoke/run.sh /usr/local/bin/openclaw-cleanup-smoke
|
| 18 |
+
RUN chmod +x /usr/local/bin/openclaw-cleanup-smoke
|
| 19 |
+
|
| 20 |
+
ENTRYPOINT ["/usr/local/bin/openclaw-cleanup-smoke"]
|
scripts/docker/cleanup-smoke/run.sh
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env bash
|
| 2 |
+
set -euo pipefail
|
| 3 |
+
|
| 4 |
+
cd /repo
|
| 5 |
+
|
| 6 |
+
export OPENCLAW_STATE_DIR="/tmp/openclaw-test"
|
| 7 |
+
export OPENCLAW_CONFIG_PATH="${OPENCLAW_STATE_DIR}/openclaw.json"
|
| 8 |
+
|
| 9 |
+
echo "==> Seed state"
|
| 10 |
+
mkdir -p "${OPENCLAW_STATE_DIR}/credentials"
|
| 11 |
+
mkdir -p "${OPENCLAW_STATE_DIR}/agents/main/sessions"
|
| 12 |
+
echo '{}' >"${OPENCLAW_CONFIG_PATH}"
|
| 13 |
+
echo 'creds' >"${OPENCLAW_STATE_DIR}/credentials/marker.txt"
|
| 14 |
+
echo 'session' >"${OPENCLAW_STATE_DIR}/agents/main/sessions/sessions.json"
|
| 15 |
+
|
| 16 |
+
echo "==> Reset (config+creds+sessions)"
|
| 17 |
+
pnpm openclaw reset --scope config+creds+sessions --yes --non-interactive
|
| 18 |
+
|
| 19 |
+
test ! -f "${OPENCLAW_CONFIG_PATH}"
|
| 20 |
+
test ! -d "${OPENCLAW_STATE_DIR}/credentials"
|
| 21 |
+
test ! -d "${OPENCLAW_STATE_DIR}/agents/main/sessions"
|
| 22 |
+
|
| 23 |
+
echo "==> Recreate minimal config"
|
| 24 |
+
mkdir -p "${OPENCLAW_STATE_DIR}/credentials"
|
| 25 |
+
echo '{}' >"${OPENCLAW_CONFIG_PATH}"
|
| 26 |
+
|
| 27 |
+
echo "==> Uninstall (state only)"
|
| 28 |
+
pnpm openclaw uninstall --state --yes --non-interactive
|
| 29 |
+
|
| 30 |
+
test ! -d "${OPENCLAW_STATE_DIR}"
|
| 31 |
+
|
| 32 |
+
echo "OK"
|
scripts/docker/install-sh-e2e/Dockerfile
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
FROM node:22-bookworm-slim
|
| 2 |
+
|
| 3 |
+
RUN apt-get update \
|
| 4 |
+
&& apt-get install -y --no-install-recommends \
|
| 5 |
+
bash \
|
| 6 |
+
ca-certificates \
|
| 7 |
+
curl \
|
| 8 |
+
git \
|
| 9 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 10 |
+
|
| 11 |
+
COPY run.sh /usr/local/bin/openclaw-install-e2e
|
| 12 |
+
RUN chmod +x /usr/local/bin/openclaw-install-e2e
|
| 13 |
+
|
| 14 |
+
ENTRYPOINT ["/usr/local/bin/openclaw-install-e2e"]
|
scripts/docker/install-sh-e2e/run.sh
ADDED
|
@@ -0,0 +1,531 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env bash
|
| 2 |
+
set -euo pipefail
|
| 3 |
+
|
| 4 |
+
INSTALL_URL="${OPENCLAW_INSTALL_URL:-${CLAWDBOT_INSTALL_URL:-https://openclaw.bot/install.sh}}"
|
| 5 |
+
MODELS_MODE="${OPENCLAW_E2E_MODELS:-${CLAWDBOT_E2E_MODELS:-both}}" # both|openai|anthropic
|
| 6 |
+
INSTALL_TAG="${OPENCLAW_INSTALL_TAG:-${CLAWDBOT_INSTALL_TAG:-latest}}"
|
| 7 |
+
E2E_PREVIOUS_VERSION="${OPENCLAW_INSTALL_E2E_PREVIOUS:-${CLAWDBOT_INSTALL_E2E_PREVIOUS:-}}"
|
| 8 |
+
SKIP_PREVIOUS="${OPENCLAW_INSTALL_E2E_SKIP_PREVIOUS:-${CLAWDBOT_INSTALL_E2E_SKIP_PREVIOUS:-0}}"
|
| 9 |
+
OPENAI_API_KEY="${OPENAI_API_KEY:-}"
|
| 10 |
+
ANTHROPIC_API_KEY="${ANTHROPIC_API_KEY:-}"
|
| 11 |
+
ANTHROPIC_API_TOKEN="${ANTHROPIC_API_TOKEN:-}"
|
| 12 |
+
|
| 13 |
+
if [[ "$MODELS_MODE" != "both" && "$MODELS_MODE" != "openai" && "$MODELS_MODE" != "anthropic" ]]; then
|
| 14 |
+
echo "ERROR: OPENCLAW_E2E_MODELS must be one of: both|openai|anthropic" >&2
|
| 15 |
+
exit 2
|
| 16 |
+
fi
|
| 17 |
+
|
| 18 |
+
if [[ "$MODELS_MODE" == "both" ]]; then
|
| 19 |
+
if [[ -z "$OPENAI_API_KEY" ]]; then
|
| 20 |
+
echo "ERROR: OPENCLAW_E2E_MODELS=both requires OPENAI_API_KEY." >&2
|
| 21 |
+
exit 2
|
| 22 |
+
fi
|
| 23 |
+
if [[ -z "$ANTHROPIC_API_TOKEN" && -z "$ANTHROPIC_API_KEY" ]]; then
|
| 24 |
+
echo "ERROR: OPENCLAW_E2E_MODELS=both requires ANTHROPIC_API_TOKEN or ANTHROPIC_API_KEY." >&2
|
| 25 |
+
exit 2
|
| 26 |
+
fi
|
| 27 |
+
elif [[ "$MODELS_MODE" == "openai" && -z "$OPENAI_API_KEY" ]]; then
|
| 28 |
+
echo "ERROR: OPENCLAW_E2E_MODELS=openai requires OPENAI_API_KEY." >&2
|
| 29 |
+
exit 2
|
| 30 |
+
elif [[ "$MODELS_MODE" == "anthropic" && -z "$ANTHROPIC_API_TOKEN" && -z "$ANTHROPIC_API_KEY" ]]; then
|
| 31 |
+
echo "ERROR: OPENCLAW_E2E_MODELS=anthropic requires ANTHROPIC_API_TOKEN or ANTHROPIC_API_KEY." >&2
|
| 32 |
+
exit 2
|
| 33 |
+
fi
|
| 34 |
+
|
| 35 |
+
echo "==> Resolve npm versions"
|
| 36 |
+
EXPECTED_VERSION="$(npm view "openclaw@${INSTALL_TAG}" version)"
|
| 37 |
+
if [[ -z "$EXPECTED_VERSION" || "$EXPECTED_VERSION" == "undefined" || "$EXPECTED_VERSION" == "null" ]]; then
|
| 38 |
+
echo "ERROR: unable to resolve openclaw@${INSTALL_TAG} version" >&2
|
| 39 |
+
exit 2
|
| 40 |
+
fi
|
| 41 |
+
if [[ -n "$E2E_PREVIOUS_VERSION" ]]; then
|
| 42 |
+
PREVIOUS_VERSION="$E2E_PREVIOUS_VERSION"
|
| 43 |
+
else
|
| 44 |
+
PREVIOUS_VERSION="$(node - <<'NODE'
|
| 45 |
+
const { execSync } = require("node:child_process");
|
| 46 |
+
const versions = JSON.parse(execSync("npm view openclaw versions --json", { encoding: "utf8" }));
|
| 47 |
+
if (!Array.isArray(versions) || versions.length === 0) process.exit(1);
|
| 48 |
+
process.stdout.write(versions.length >= 2 ? versions[versions.length - 2] : versions[0]);
|
| 49 |
+
NODE
|
| 50 |
+
)"
|
| 51 |
+
fi
|
| 52 |
+
echo "expected=$EXPECTED_VERSION previous=$PREVIOUS_VERSION"
|
| 53 |
+
|
| 54 |
+
if [[ "$SKIP_PREVIOUS" == "1" ]]; then
|
| 55 |
+
echo "==> Skip preinstall previous (OPENCLAW_INSTALL_E2E_SKIP_PREVIOUS=1)"
|
| 56 |
+
else
|
| 57 |
+
echo "==> Preinstall previous (forces installer upgrade path; avoids read() prompt)"
|
| 58 |
+
npm install -g "openclaw@${PREVIOUS_VERSION}"
|
| 59 |
+
fi
|
| 60 |
+
|
| 61 |
+
echo "==> Run official installer one-liner"
|
| 62 |
+
if [[ "$INSTALL_TAG" == "beta" ]]; then
|
| 63 |
+
OPENCLAW_BETA=1 CLAWDBOT_BETA=1 curl -fsSL "$INSTALL_URL" | bash
|
| 64 |
+
elif [[ "$INSTALL_TAG" != "latest" ]]; then
|
| 65 |
+
OPENCLAW_VERSION="$INSTALL_TAG" CLAWDBOT_VERSION="$INSTALL_TAG" curl -fsSL "$INSTALL_URL" | bash
|
| 66 |
+
else
|
| 67 |
+
curl -fsSL "$INSTALL_URL" | bash
|
| 68 |
+
fi
|
| 69 |
+
|
| 70 |
+
echo "==> Verify installed version"
|
| 71 |
+
INSTALLED_VERSION="$(openclaw --version 2>/dev/null | head -n 1 | tr -d '\r')"
|
| 72 |
+
echo "installed=$INSTALLED_VERSION expected=$EXPECTED_VERSION"
|
| 73 |
+
if [[ "$INSTALLED_VERSION" != "$EXPECTED_VERSION" ]]; then
|
| 74 |
+
echo "ERROR: expected openclaw@$EXPECTED_VERSION, got openclaw@$INSTALLED_VERSION" >&2
|
| 75 |
+
exit 1
|
| 76 |
+
fi
|
| 77 |
+
|
| 78 |
+
set_image_model() {
|
| 79 |
+
local profile="$1"
|
| 80 |
+
shift
|
| 81 |
+
local candidate
|
| 82 |
+
for candidate in "$@"; do
|
| 83 |
+
if openclaw --profile "$profile" models set-image "$candidate" >/dev/null 2>&1; then
|
| 84 |
+
echo "$candidate"
|
| 85 |
+
return 0
|
| 86 |
+
fi
|
| 87 |
+
done
|
| 88 |
+
echo "ERROR: could not set an image model (tried: $*)" >&2
|
| 89 |
+
return 1
|
| 90 |
+
}
|
| 91 |
+
|
| 92 |
+
set_agent_model() {
|
| 93 |
+
local profile="$1"
|
| 94 |
+
local candidate
|
| 95 |
+
shift
|
| 96 |
+
for candidate in "$@"; do
|
| 97 |
+
if openclaw --profile "$profile" models set "$candidate" >/dev/null 2>&1; then
|
| 98 |
+
echo "$candidate"
|
| 99 |
+
return 0
|
| 100 |
+
fi
|
| 101 |
+
done
|
| 102 |
+
echo "ERROR: could not set agent model (tried: $*)" >&2
|
| 103 |
+
return 1
|
| 104 |
+
}
|
| 105 |
+
|
| 106 |
+
write_png_lr_rg() {
|
| 107 |
+
local out="$1"
|
| 108 |
+
node - <<'NODE' "$out"
|
| 109 |
+
const fs = require("node:fs");
|
| 110 |
+
const zlib = require("node:zlib");
|
| 111 |
+
|
| 112 |
+
const out = process.argv[2];
|
| 113 |
+
const width = 96;
|
| 114 |
+
const height = 64;
|
| 115 |
+
|
| 116 |
+
const crcTable = (() => {
|
| 117 |
+
const table = new Uint32Array(256);
|
| 118 |
+
for (let i = 0; i < 256; i++) {
|
| 119 |
+
let c = i;
|
| 120 |
+
for (let k = 0; k < 8; k++) c = (c & 1) ? (0xedb88320 ^ (c >>> 1)) : (c >>> 1);
|
| 121 |
+
table[i] = c >>> 0;
|
| 122 |
+
}
|
| 123 |
+
return table;
|
| 124 |
+
})();
|
| 125 |
+
function crc32(buf) {
|
| 126 |
+
let c = 0xffffffff;
|
| 127 |
+
for (let i = 0; i < buf.length; i++) c = crcTable[(c ^ buf[i]) & 0xff] ^ (c >>> 8);
|
| 128 |
+
return (c ^ 0xffffffff) >>> 0;
|
| 129 |
+
}
|
| 130 |
+
function chunk(type, data) {
|
| 131 |
+
const typeBuf = Buffer.from(type, "ascii");
|
| 132 |
+
const len = Buffer.alloc(4);
|
| 133 |
+
len.writeUInt32BE(data.length, 0);
|
| 134 |
+
const crcBuf = Buffer.alloc(4);
|
| 135 |
+
crcBuf.writeUInt32BE(crc32(Buffer.concat([typeBuf, data])), 0);
|
| 136 |
+
return Buffer.concat([len, typeBuf, data, crcBuf]);
|
| 137 |
+
}
|
| 138 |
+
|
| 139 |
+
const sig = Buffer.from([0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a]);
|
| 140 |
+
const ihdr = Buffer.alloc(13);
|
| 141 |
+
ihdr.writeUInt32BE(width, 0);
|
| 142 |
+
ihdr.writeUInt32BE(height, 4);
|
| 143 |
+
ihdr[8] = 8; // bit depth
|
| 144 |
+
ihdr[9] = 2; // color type: truecolor
|
| 145 |
+
ihdr[10] = 0; // compression
|
| 146 |
+
ihdr[11] = 0; // filter
|
| 147 |
+
ihdr[12] = 0; // interlace
|
| 148 |
+
|
| 149 |
+
const rows = [];
|
| 150 |
+
for (let y = 0; y < height; y++) {
|
| 151 |
+
const row = Buffer.alloc(1 + width * 3);
|
| 152 |
+
row[0] = 0; // filter: none
|
| 153 |
+
for (let x = 0; x < width; x++) {
|
| 154 |
+
const i = 1 + x * 3;
|
| 155 |
+
const left = x < width / 2;
|
| 156 |
+
row[i + 0] = left ? 255 : 0;
|
| 157 |
+
row[i + 1] = left ? 0 : 255;
|
| 158 |
+
row[i + 2] = 0;
|
| 159 |
+
}
|
| 160 |
+
rows.push(row);
|
| 161 |
+
}
|
| 162 |
+
const raw = Buffer.concat(rows);
|
| 163 |
+
const idat = zlib.deflateSync(raw, { level: 9 });
|
| 164 |
+
|
| 165 |
+
const png = Buffer.concat([
|
| 166 |
+
sig,
|
| 167 |
+
chunk("IHDR", ihdr),
|
| 168 |
+
chunk("IDAT", idat),
|
| 169 |
+
chunk("IEND", Buffer.alloc(0)),
|
| 170 |
+
]);
|
| 171 |
+
fs.writeFileSync(out, png);
|
| 172 |
+
NODE
|
| 173 |
+
}
|
| 174 |
+
|
| 175 |
+
run_agent_turn() {
|
| 176 |
+
local profile="$1"
|
| 177 |
+
local session_id="$2"
|
| 178 |
+
local prompt="$3"
|
| 179 |
+
local out_json="$4"
|
| 180 |
+
openclaw --profile "$profile" agent \
|
| 181 |
+
--session-id "$session_id" \
|
| 182 |
+
--message "$prompt" \
|
| 183 |
+
--thinking off \
|
| 184 |
+
--json >"$out_json"
|
| 185 |
+
}
|
| 186 |
+
|
| 187 |
+
assert_agent_json_has_text() {
|
| 188 |
+
local path="$1"
|
| 189 |
+
node - <<'NODE' "$path"
|
| 190 |
+
const fs = require("node:fs");
|
| 191 |
+
const p = JSON.parse(fs.readFileSync(process.argv[2], "utf8"));
|
| 192 |
+
const payloads =
|
| 193 |
+
Array.isArray(p?.result?.payloads) ? p.result.payloads :
|
| 194 |
+
Array.isArray(p?.payloads) ? p.payloads :
|
| 195 |
+
[];
|
| 196 |
+
const texts = payloads.map((x) => String(x?.text ?? "").trim()).filter(Boolean);
|
| 197 |
+
if (texts.length === 0) process.exit(1);
|
| 198 |
+
NODE
|
| 199 |
+
}
|
| 200 |
+
|
| 201 |
+
assert_agent_json_ok() {
|
| 202 |
+
local json_path="$1"
|
| 203 |
+
local expect_provider="$2"
|
| 204 |
+
node - <<'NODE' "$json_path" "$expect_provider"
|
| 205 |
+
const fs = require("node:fs");
|
| 206 |
+
const jsonPath = process.argv[2];
|
| 207 |
+
const expectProvider = process.argv[3];
|
| 208 |
+
const p = JSON.parse(fs.readFileSync(jsonPath, "utf8"));
|
| 209 |
+
|
| 210 |
+
if (typeof p?.status === "string" && p.status !== "ok" && p.status !== "accepted") {
|
| 211 |
+
console.error(`ERROR: gateway status=${p.status}`);
|
| 212 |
+
process.exit(1);
|
| 213 |
+
}
|
| 214 |
+
|
| 215 |
+
const result = p?.result ?? p;
|
| 216 |
+
const payloads = Array.isArray(result?.payloads) ? result.payloads : [];
|
| 217 |
+
const anyError = payloads.some((pl) => pl && pl.isError === true);
|
| 218 |
+
const combinedText = payloads.map((pl) => String(pl?.text ?? "")).filter(Boolean).join("\n").trim();
|
| 219 |
+
if (anyError) {
|
| 220 |
+
console.error(`ERROR: agent returned error payload: ${combinedText}`);
|
| 221 |
+
process.exit(1);
|
| 222 |
+
}
|
| 223 |
+
if (/rate_limit_error/i.test(combinedText) || /^429\\b/.test(combinedText)) {
|
| 224 |
+
console.error(`ERROR: agent rate limited: ${combinedText}`);
|
| 225 |
+
process.exit(1);
|
| 226 |
+
}
|
| 227 |
+
|
| 228 |
+
const meta = result?.meta;
|
| 229 |
+
const provider =
|
| 230 |
+
(typeof meta?.agentMeta?.provider === "string" && meta.agentMeta.provider.trim()) ||
|
| 231 |
+
(typeof meta?.provider === "string" && meta.provider.trim()) ||
|
| 232 |
+
"";
|
| 233 |
+
if (expectProvider && provider && provider !== expectProvider) {
|
| 234 |
+
console.error(`ERROR: expected provider=${expectProvider}, got provider=${provider}`);
|
| 235 |
+
process.exit(1);
|
| 236 |
+
}
|
| 237 |
+
NODE
|
| 238 |
+
}
|
| 239 |
+
|
| 240 |
+
extract_matching_text() {
|
| 241 |
+
local path="$1"
|
| 242 |
+
local expected="$2"
|
| 243 |
+
node - <<'NODE' "$path" "$expected"
|
| 244 |
+
const fs = require("node:fs");
|
| 245 |
+
const p = JSON.parse(fs.readFileSync(process.argv[2], "utf8"));
|
| 246 |
+
const expected = String(process.argv[3] ?? "");
|
| 247 |
+
const payloads =
|
| 248 |
+
Array.isArray(p?.result?.payloads) ? p.result.payloads :
|
| 249 |
+
Array.isArray(p?.payloads) ? p.payloads :
|
| 250 |
+
[];
|
| 251 |
+
const texts = payloads.map((x) => String(x?.text ?? "").trim()).filter(Boolean);
|
| 252 |
+
const match = texts.find((text) => text === expected);
|
| 253 |
+
process.stdout.write(match ?? texts[0] ?? "");
|
| 254 |
+
NODE
|
| 255 |
+
}
|
| 256 |
+
|
| 257 |
+
assert_session_used_tools() {
|
| 258 |
+
local jsonl="$1"
|
| 259 |
+
shift
|
| 260 |
+
node - <<'NODE' "$jsonl" "$@"
|
| 261 |
+
const fs = require("node:fs");
|
| 262 |
+
const jsonl = process.argv[2];
|
| 263 |
+
const required = new Set(process.argv.slice(3));
|
| 264 |
+
|
| 265 |
+
const raw = fs.readFileSync(jsonl, "utf8");
|
| 266 |
+
const lines = raw.split("\n").map((l) => l.trim()).filter(Boolean);
|
| 267 |
+
const seen = new Set();
|
| 268 |
+
|
| 269 |
+
const toolTypes = new Set([
|
| 270 |
+
"tool_use",
|
| 271 |
+
"tool_result",
|
| 272 |
+
"tool",
|
| 273 |
+
"tool-call",
|
| 274 |
+
"tool_call",
|
| 275 |
+
"tooluse",
|
| 276 |
+
"tool-use",
|
| 277 |
+
"toolresult",
|
| 278 |
+
"tool-result",
|
| 279 |
+
]);
|
| 280 |
+
function walk(node, parent) {
|
| 281 |
+
if (!node) return;
|
| 282 |
+
if (Array.isArray(node)) {
|
| 283 |
+
for (const item of node) walk(item, node);
|
| 284 |
+
return;
|
| 285 |
+
}
|
| 286 |
+
if (typeof node !== "object") return;
|
| 287 |
+
const obj = node;
|
| 288 |
+
const t = typeof obj.type === "string" ? obj.type : null;
|
| 289 |
+
if (t && (toolTypes.has(t) || /tool/i.test(t))) {
|
| 290 |
+
const name =
|
| 291 |
+
typeof obj.name === "string" ? obj.name :
|
| 292 |
+
typeof obj.toolName === "string" ? obj.toolName :
|
| 293 |
+
typeof obj.tool_name === "string" ? obj.tool_name :
|
| 294 |
+
(obj.tool && typeof obj.tool.name === "string") ? obj.tool.name :
|
| 295 |
+
null;
|
| 296 |
+
if (name) seen.add(name);
|
| 297 |
+
}
|
| 298 |
+
if (typeof obj.name === "string" && typeof obj.input === "object" && obj.input) {
|
| 299 |
+
// Many tool-use blocks look like { type: "...", name: "exec", input: {...} }
|
| 300 |
+
// but some transcripts omit/rename type.
|
| 301 |
+
seen.add(obj.name);
|
| 302 |
+
}
|
| 303 |
+
// OpenAI-ish tool call shapes.
|
| 304 |
+
if (Array.isArray(obj.tool_calls)) {
|
| 305 |
+
for (const c of obj.tool_calls) {
|
| 306 |
+
const fn = c?.function;
|
| 307 |
+
if (fn && typeof fn.name === "string") seen.add(fn.name);
|
| 308 |
+
}
|
| 309 |
+
}
|
| 310 |
+
if (obj.function && typeof obj.function.name === "string") seen.add(obj.function.name);
|
| 311 |
+
for (const v of Object.values(obj)) walk(v, obj);
|
| 312 |
+
}
|
| 313 |
+
|
| 314 |
+
for (const line of lines) {
|
| 315 |
+
try {
|
| 316 |
+
const entry = JSON.parse(line);
|
| 317 |
+
walk(entry, null);
|
| 318 |
+
} catch {
|
| 319 |
+
// ignore unparsable lines
|
| 320 |
+
}
|
| 321 |
+
}
|
| 322 |
+
|
| 323 |
+
const missing = [...required].filter((t) => !seen.has(t));
|
| 324 |
+
if (missing.length > 0) {
|
| 325 |
+
console.error(`Missing tools in transcript: ${missing.join(", ")}`);
|
| 326 |
+
console.error(`Seen tools: ${[...seen].sort().join(", ")}`);
|
| 327 |
+
console.error("Transcript head:");
|
| 328 |
+
console.error(lines.slice(0, 5).join("\n"));
|
| 329 |
+
process.exit(1);
|
| 330 |
+
}
|
| 331 |
+
NODE
|
| 332 |
+
}
|
| 333 |
+
|
| 334 |
+
run_profile() {
|
| 335 |
+
local profile="$1"
|
| 336 |
+
local port="$2"
|
| 337 |
+
local workspace="$3"
|
| 338 |
+
local agent_model_provider="$4" # "openai"|"anthropic"
|
| 339 |
+
|
| 340 |
+
echo "==> Onboard ($profile)"
|
| 341 |
+
if [[ "$agent_model_provider" == "openai" ]]; then
|
| 342 |
+
openclaw --profile "$profile" onboard \
|
| 343 |
+
--non-interactive \
|
| 344 |
+
--accept-risk \
|
| 345 |
+
--flow quickstart \
|
| 346 |
+
--auth-choice openai-api-key \
|
| 347 |
+
--openai-api-key "$OPENAI_API_KEY" \
|
| 348 |
+
--gateway-port "$port" \
|
| 349 |
+
--gateway-bind loopback \
|
| 350 |
+
--gateway-auth token \
|
| 351 |
+
--workspace "$workspace" \
|
| 352 |
+
--skip-health
|
| 353 |
+
elif [[ -n "$ANTHROPIC_API_TOKEN" ]]; then
|
| 354 |
+
openclaw --profile "$profile" onboard \
|
| 355 |
+
--non-interactive \
|
| 356 |
+
--accept-risk \
|
| 357 |
+
--flow quickstart \
|
| 358 |
+
--auth-choice token \
|
| 359 |
+
--token-provider anthropic \
|
| 360 |
+
--token "$ANTHROPIC_API_TOKEN" \
|
| 361 |
+
--gateway-port "$port" \
|
| 362 |
+
--gateway-bind loopback \
|
| 363 |
+
--gateway-auth token \
|
| 364 |
+
--workspace "$workspace" \
|
| 365 |
+
--skip-health
|
| 366 |
+
else
|
| 367 |
+
openclaw --profile "$profile" onboard \
|
| 368 |
+
--non-interactive \
|
| 369 |
+
--accept-risk \
|
| 370 |
+
--flow quickstart \
|
| 371 |
+
--auth-choice apiKey \
|
| 372 |
+
--anthropic-api-key "$ANTHROPIC_API_KEY" \
|
| 373 |
+
--gateway-port "$port" \
|
| 374 |
+
--gateway-bind loopback \
|
| 375 |
+
--gateway-auth token \
|
| 376 |
+
--workspace "$workspace" \
|
| 377 |
+
--skip-health
|
| 378 |
+
fi
|
| 379 |
+
|
| 380 |
+
echo "==> Verify workspace identity files ($profile)"
|
| 381 |
+
test -f "$workspace/AGENTS.md"
|
| 382 |
+
test -f "$workspace/IDENTITY.md"
|
| 383 |
+
test -f "$workspace/USER.md"
|
| 384 |
+
test -f "$workspace/SOUL.md"
|
| 385 |
+
test -f "$workspace/TOOLS.md"
|
| 386 |
+
|
| 387 |
+
echo "==> Configure models ($profile)"
|
| 388 |
+
local agent_model
|
| 389 |
+
local image_model
|
| 390 |
+
if [[ "$agent_model_provider" == "openai" ]]; then
|
| 391 |
+
agent_model="$(set_agent_model "$profile" \
|
| 392 |
+
"openai/gpt-4.1-mini" \
|
| 393 |
+
"openai/gpt-4.1" \
|
| 394 |
+
"openai/gpt-4o-mini" \
|
| 395 |
+
"openai/gpt-4o")"
|
| 396 |
+
image_model="$(set_image_model "$profile" \
|
| 397 |
+
"openai/gpt-4.1" \
|
| 398 |
+
"openai/gpt-4o-mini" \
|
| 399 |
+
"openai/gpt-4o" \
|
| 400 |
+
"openai/gpt-4.1-mini")"
|
| 401 |
+
else
|
| 402 |
+
agent_model="$(set_agent_model "$profile" \
|
| 403 |
+
"anthropic/claude-opus-4-5" \
|
| 404 |
+
"claude-opus-4-5")"
|
| 405 |
+
image_model="$(set_image_model "$profile" \
|
| 406 |
+
"anthropic/claude-opus-4-5" \
|
| 407 |
+
"claude-opus-4-5")"
|
| 408 |
+
fi
|
| 409 |
+
echo "model=$agent_model"
|
| 410 |
+
echo "imageModel=$image_model"
|
| 411 |
+
|
| 412 |
+
echo "==> Prepare tool fixtures ($profile)"
|
| 413 |
+
PROOF_TXT="$workspace/proof.txt"
|
| 414 |
+
PROOF_COPY="$workspace/copy.txt"
|
| 415 |
+
HOSTNAME_TXT="$workspace/hostname.txt"
|
| 416 |
+
IMAGE_PNG="$workspace/proof.png"
|
| 417 |
+
IMAGE_TXT="$workspace/image.txt"
|
| 418 |
+
SESSION_ID="e2e-tools-${profile}"
|
| 419 |
+
SESSION_JSONL="/root/.openclaw-${profile}/agents/main/sessions/${SESSION_ID}.jsonl"
|
| 420 |
+
|
| 421 |
+
PROOF_VALUE="$(node -e 'console.log(require("node:crypto").randomBytes(16).toString("hex"))')"
|
| 422 |
+
echo -n "$PROOF_VALUE" >"$PROOF_TXT"
|
| 423 |
+
write_png_lr_rg "$IMAGE_PNG"
|
| 424 |
+
EXPECTED_HOSTNAME="$(cat /etc/hostname | tr -d '\r\n')"
|
| 425 |
+
|
| 426 |
+
echo "==> Start gateway ($profile)"
|
| 427 |
+
GATEWAY_LOG="$workspace/gateway.log"
|
| 428 |
+
openclaw --profile "$profile" gateway --port "$port" --bind loopback >"$GATEWAY_LOG" 2>&1 &
|
| 429 |
+
GATEWAY_PID="$!"
|
| 430 |
+
cleanup_profile() {
|
| 431 |
+
if kill -0 "$GATEWAY_PID" 2>/dev/null; then
|
| 432 |
+
kill "$GATEWAY_PID" 2>/dev/null || true
|
| 433 |
+
wait "$GATEWAY_PID" 2>/dev/null || true
|
| 434 |
+
fi
|
| 435 |
+
}
|
| 436 |
+
trap cleanup_profile EXIT
|
| 437 |
+
|
| 438 |
+
echo "==> Wait for health ($profile)"
|
| 439 |
+
for _ in $(seq 1 60); do
|
| 440 |
+
if openclaw --profile "$profile" health --timeout 2000 --json >/dev/null 2>&1; then
|
| 441 |
+
break
|
| 442 |
+
fi
|
| 443 |
+
sleep 0.25
|
| 444 |
+
done
|
| 445 |
+
openclaw --profile "$profile" health --timeout 10000 --json >/dev/null
|
| 446 |
+
|
| 447 |
+
echo "==> Agent turns ($profile)"
|
| 448 |
+
TURN1_JSON="/tmp/agent-${profile}-1.json"
|
| 449 |
+
TURN2_JSON="/tmp/agent-${profile}-2.json"
|
| 450 |
+
TURN3_JSON="/tmp/agent-${profile}-3.json"
|
| 451 |
+
TURN4_JSON="/tmp/agent-${profile}-4.json"
|
| 452 |
+
|
| 453 |
+
run_agent_turn "$profile" "$SESSION_ID" \
|
| 454 |
+
"Use the read tool (not exec) to read proof.txt. Reply with the exact contents only (no extra whitespace)." \
|
| 455 |
+
"$TURN1_JSON"
|
| 456 |
+
assert_agent_json_has_text "$TURN1_JSON"
|
| 457 |
+
assert_agent_json_ok "$TURN1_JSON" "$agent_model_provider"
|
| 458 |
+
local reply1
|
| 459 |
+
reply1="$(extract_matching_text "$TURN1_JSON" "$PROOF_VALUE" | tr -d '\r\n')"
|
| 460 |
+
if [[ "$reply1" != "$PROOF_VALUE" ]]; then
|
| 461 |
+
echo "ERROR: agent did not read proof.txt correctly ($profile): $reply1" >&2
|
| 462 |
+
exit 1
|
| 463 |
+
fi
|
| 464 |
+
|
| 465 |
+
local prompt2
|
| 466 |
+
prompt2=$'Use the write tool (not exec) to write exactly this string into copy.txt:\n'"${reply1}"$'\nThen use the read tool (not exec) to read copy.txt and reply with the exact contents only (no extra whitespace).'
|
| 467 |
+
run_agent_turn "$profile" "$SESSION_ID" "$prompt2" "$TURN2_JSON"
|
| 468 |
+
assert_agent_json_has_text "$TURN2_JSON"
|
| 469 |
+
assert_agent_json_ok "$TURN2_JSON" "$agent_model_provider"
|
| 470 |
+
local copy_value
|
| 471 |
+
copy_value="$(cat "$PROOF_COPY" 2>/dev/null | tr -d '\r\n' || true)"
|
| 472 |
+
if [[ "$copy_value" != "$PROOF_VALUE" ]]; then
|
| 473 |
+
echo "ERROR: copy.txt did not match proof.txt ($profile)" >&2
|
| 474 |
+
exit 1
|
| 475 |
+
fi
|
| 476 |
+
local reply2
|
| 477 |
+
reply2="$(extract_matching_text "$TURN2_JSON" "$PROOF_VALUE" | tr -d '\r\n')"
|
| 478 |
+
if [[ "$reply2" != "$PROOF_VALUE" ]]; then
|
| 479 |
+
echo "ERROR: agent did not read copy.txt correctly ($profile): $reply2" >&2
|
| 480 |
+
exit 1
|
| 481 |
+
fi
|
| 482 |
+
|
| 483 |
+
local prompt3
|
| 484 |
+
prompt3=$'Use the exec tool to run: cat /etc/hostname\nThen use the write tool to write the exact stdout (trim trailing newline) into hostname.txt. Reply with the hostname only.'
|
| 485 |
+
run_agent_turn "$profile" "$SESSION_ID" "$prompt3" "$TURN3_JSON"
|
| 486 |
+
assert_agent_json_has_text "$TURN3_JSON"
|
| 487 |
+
assert_agent_json_ok "$TURN3_JSON" "$agent_model_provider"
|
| 488 |
+
if [[ "$(cat "$HOSTNAME_TXT" 2>/dev/null | tr -d '\r\n' || true)" != "$EXPECTED_HOSTNAME" ]]; then
|
| 489 |
+
echo "ERROR: hostname.txt did not match /etc/hostname ($profile)" >&2
|
| 490 |
+
exit 1
|
| 491 |
+
fi
|
| 492 |
+
|
| 493 |
+
run_agent_turn "$profile" "$SESSION_ID" \
|
| 494 |
+
"Use the image tool on proof.png. Determine which color is on the left half and which is on the right half. Then use the write tool to write exactly: LEFT=RED RIGHT=GREEN into image.txt. Reply with exactly: LEFT=RED RIGHT=GREEN" \
|
| 495 |
+
"$TURN4_JSON"
|
| 496 |
+
assert_agent_json_has_text "$TURN4_JSON"
|
| 497 |
+
assert_agent_json_ok "$TURN4_JSON" "$agent_model_provider"
|
| 498 |
+
if [[ "$(cat "$IMAGE_TXT" 2>/dev/null | tr -d '\r\n' || true)" != "LEFT=RED RIGHT=GREEN" ]]; then
|
| 499 |
+
echo "ERROR: image.txt did not contain expected marker ($profile)" >&2
|
| 500 |
+
exit 1
|
| 501 |
+
fi
|
| 502 |
+
local reply4
|
| 503 |
+
reply4="$(extract_matching_text "$TURN4_JSON" "LEFT=RED RIGHT=GREEN")"
|
| 504 |
+
if [[ "$reply4" != "LEFT=RED RIGHT=GREEN" ]]; then
|
| 505 |
+
echo "ERROR: agent reply did not contain expected marker ($profile): $reply4" >&2
|
| 506 |
+
exit 1
|
| 507 |
+
fi
|
| 508 |
+
|
| 509 |
+
echo "==> Verify tool usage via session transcript ($profile)"
|
| 510 |
+
# Give the gateway a moment to flush transcripts.
|
| 511 |
+
sleep 1
|
| 512 |
+
if [[ ! -f "$SESSION_JSONL" ]]; then
|
| 513 |
+
echo "ERROR: missing session transcript ($profile): $SESSION_JSONL" >&2
|
| 514 |
+
ls -la "/root/.openclaw-${profile}/agents/main/sessions" >&2 || true
|
| 515 |
+
exit 1
|
| 516 |
+
fi
|
| 517 |
+
assert_session_used_tools "$SESSION_JSONL" read write exec image
|
| 518 |
+
|
| 519 |
+
cleanup_profile
|
| 520 |
+
trap - EXIT
|
| 521 |
+
}
|
| 522 |
+
|
| 523 |
+
if [[ "$MODELS_MODE" == "openai" || "$MODELS_MODE" == "both" ]]; then
|
| 524 |
+
run_profile "e2e-openai" "18789" "/tmp/openclaw-e2e-openai" "openai"
|
| 525 |
+
fi
|
| 526 |
+
|
| 527 |
+
if [[ "$MODELS_MODE" == "anthropic" || "$MODELS_MODE" == "both" ]]; then
|
| 528 |
+
run_profile "e2e-anthropic" "18799" "/tmp/openclaw-e2e-anthropic" "anthropic"
|
| 529 |
+
fi
|
| 530 |
+
|
| 531 |
+
echo "OK"
|
scripts/docker/install-sh-nonroot/Dockerfile
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
FROM ubuntu:24.04
|
| 2 |
+
|
| 3 |
+
RUN set -eux; \
|
| 4 |
+
for attempt in 1 2 3; do \
|
| 5 |
+
if apt-get update -o Acquire::Retries=3; then break; fi; \
|
| 6 |
+
echo "apt-get update failed (attempt ${attempt})" >&2; \
|
| 7 |
+
if [ "${attempt}" -eq 3 ]; then exit 1; fi; \
|
| 8 |
+
sleep 3; \
|
| 9 |
+
done; \
|
| 10 |
+
apt-get -o Acquire::Retries=3 install -y --no-install-recommends \
|
| 11 |
+
bash \
|
| 12 |
+
ca-certificates \
|
| 13 |
+
curl \
|
| 14 |
+
sudo \
|
| 15 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 16 |
+
|
| 17 |
+
RUN useradd -m -s /bin/bash app \
|
| 18 |
+
&& echo "app ALL=(ALL) NOPASSWD:ALL" > /etc/sudoers.d/app
|
| 19 |
+
|
| 20 |
+
USER app
|
| 21 |
+
WORKDIR /home/app
|
| 22 |
+
|
| 23 |
+
ENV NPM_CONFIG_FUND=false
|
| 24 |
+
ENV NPM_CONFIG_AUDIT=false
|
| 25 |
+
|
| 26 |
+
COPY run.sh /usr/local/bin/openclaw-install-nonroot
|
| 27 |
+
RUN sudo chmod +x /usr/local/bin/openclaw-install-nonroot
|
| 28 |
+
|
| 29 |
+
ENTRYPOINT ["/usr/local/bin/openclaw-install-nonroot"]
|
scripts/docker/install-sh-nonroot/run.sh
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env bash
|
| 2 |
+
set -euo pipefail
|
| 3 |
+
|
| 4 |
+
INSTALL_URL="${OPENCLAW_INSTALL_URL:-https://openclaw.bot/install.sh}"
|
| 5 |
+
DEFAULT_PACKAGE="openclaw"
|
| 6 |
+
PACKAGE_NAME="${OPENCLAW_INSTALL_PACKAGE:-$DEFAULT_PACKAGE}"
|
| 7 |
+
|
| 8 |
+
echo "==> Pre-flight: ensure git absent"
|
| 9 |
+
if command -v git >/dev/null; then
|
| 10 |
+
echo "git is present unexpectedly" >&2
|
| 11 |
+
exit 1
|
| 12 |
+
fi
|
| 13 |
+
|
| 14 |
+
echo "==> Run installer (non-root user)"
|
| 15 |
+
curl -fsSL "$INSTALL_URL" | bash
|
| 16 |
+
|
| 17 |
+
# Ensure PATH picks up user npm prefix
|
| 18 |
+
export PATH="$HOME/.npm-global/bin:$PATH"
|
| 19 |
+
|
| 20 |
+
echo "==> Verify git installed"
|
| 21 |
+
command -v git >/dev/null
|
| 22 |
+
|
| 23 |
+
EXPECTED_VERSION="${OPENCLAW_INSTALL_EXPECT_VERSION:-}"
|
| 24 |
+
if [[ -n "$EXPECTED_VERSION" ]]; then
|
| 25 |
+
LATEST_VERSION="$EXPECTED_VERSION"
|
| 26 |
+
else
|
| 27 |
+
LATEST_VERSION="$(npm view "$PACKAGE_NAME" version)"
|
| 28 |
+
fi
|
| 29 |
+
CLI_NAME="$PACKAGE_NAME"
|
| 30 |
+
CMD_PATH="$(command -v "$CLI_NAME" || true)"
|
| 31 |
+
if [[ -z "$CMD_PATH" && -x "$HOME/.npm-global/bin/$PACKAGE_NAME" ]]; then
|
| 32 |
+
CLI_NAME="$PACKAGE_NAME"
|
| 33 |
+
CMD_PATH="$HOME/.npm-global/bin/$PACKAGE_NAME"
|
| 34 |
+
fi
|
| 35 |
+
if [[ -z "$CMD_PATH" ]]; then
|
| 36 |
+
echo "$PACKAGE_NAME is not on PATH" >&2
|
| 37 |
+
exit 1
|
| 38 |
+
fi
|
| 39 |
+
echo "==> Verify CLI installed: $CLI_NAME"
|
| 40 |
+
INSTALLED_VERSION="$("$CMD_PATH" --version 2>/dev/null | head -n 1 | tr -d '\r')"
|
| 41 |
+
|
| 42 |
+
echo "cli=$CLI_NAME installed=$INSTALLED_VERSION expected=$LATEST_VERSION"
|
| 43 |
+
if [[ "$INSTALLED_VERSION" != "$LATEST_VERSION" ]]; then
|
| 44 |
+
echo "ERROR: expected ${CLI_NAME}@${LATEST_VERSION}, got ${CLI_NAME}@${INSTALLED_VERSION}" >&2
|
| 45 |
+
exit 1
|
| 46 |
+
fi
|
| 47 |
+
|
| 48 |
+
echo "==> Sanity: CLI runs"
|
| 49 |
+
"$CMD_PATH" --help >/dev/null
|
| 50 |
+
|
| 51 |
+
echo "OK"
|
scripts/docker/install-sh-smoke/Dockerfile
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
FROM node:22-bookworm-slim
|
| 2 |
+
|
| 3 |
+
RUN set -eux; \
|
| 4 |
+
for attempt in 1 2 3; do \
|
| 5 |
+
if apt-get update -o Acquire::Retries=3; then break; fi; \
|
| 6 |
+
echo "apt-get update failed (attempt ${attempt})" >&2; \
|
| 7 |
+
if [ "${attempt}" -eq 3 ]; then exit 1; fi; \
|
| 8 |
+
sleep 3; \
|
| 9 |
+
done; \
|
| 10 |
+
apt-get -o Acquire::Retries=3 install -y --no-install-recommends \
|
| 11 |
+
bash \
|
| 12 |
+
ca-certificates \
|
| 13 |
+
curl \
|
| 14 |
+
git \
|
| 15 |
+
sudo \
|
| 16 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 17 |
+
|
| 18 |
+
COPY run.sh /usr/local/bin/openclaw-install-smoke
|
| 19 |
+
RUN chmod +x /usr/local/bin/openclaw-install-smoke
|
| 20 |
+
|
| 21 |
+
ENTRYPOINT ["/usr/local/bin/openclaw-install-smoke"]
|
scripts/docker/install-sh-smoke/run.sh
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env bash
|
| 2 |
+
set -euo pipefail
|
| 3 |
+
|
| 4 |
+
INSTALL_URL="${OPENCLAW_INSTALL_URL:-https://openclaw.bot/install.sh}"
|
| 5 |
+
SMOKE_PREVIOUS_VERSION="${OPENCLAW_INSTALL_SMOKE_PREVIOUS:-}"
|
| 6 |
+
SKIP_PREVIOUS="${OPENCLAW_INSTALL_SMOKE_SKIP_PREVIOUS:-0}"
|
| 7 |
+
DEFAULT_PACKAGE="openclaw"
|
| 8 |
+
PACKAGE_NAME="${OPENCLAW_INSTALL_PACKAGE:-$DEFAULT_PACKAGE}"
|
| 9 |
+
|
| 10 |
+
echo "==> Resolve npm versions"
|
| 11 |
+
LATEST_VERSION="$(npm view "$PACKAGE_NAME" version)"
|
| 12 |
+
if [[ -n "$SMOKE_PREVIOUS_VERSION" ]]; then
|
| 13 |
+
PREVIOUS_VERSION="$SMOKE_PREVIOUS_VERSION"
|
| 14 |
+
else
|
| 15 |
+
VERSIONS_JSON="$(npm view "$PACKAGE_NAME" versions --json)"
|
| 16 |
+
PREVIOUS_VERSION="$(VERSIONS_JSON="$VERSIONS_JSON" LATEST_VERSION="$LATEST_VERSION" node - <<'NODE'
|
| 17 |
+
const raw = process.env.VERSIONS_JSON || "[]";
|
| 18 |
+
const latest = process.env.LATEST_VERSION || "";
|
| 19 |
+
let versions;
|
| 20 |
+
try {
|
| 21 |
+
versions = JSON.parse(raw);
|
| 22 |
+
} catch {
|
| 23 |
+
versions = raw ? [raw] : [];
|
| 24 |
+
}
|
| 25 |
+
if (!Array.isArray(versions)) {
|
| 26 |
+
versions = [versions];
|
| 27 |
+
}
|
| 28 |
+
if (versions.length === 0) {
|
| 29 |
+
process.exit(1);
|
| 30 |
+
}
|
| 31 |
+
const latestIndex = latest ? versions.lastIndexOf(latest) : -1;
|
| 32 |
+
if (latestIndex > 0) {
|
| 33 |
+
process.stdout.write(String(versions[latestIndex - 1]));
|
| 34 |
+
process.exit(0);
|
| 35 |
+
}
|
| 36 |
+
process.stdout.write(String(latest || versions[versions.length - 1]));
|
| 37 |
+
NODE
|
| 38 |
+
)"
|
| 39 |
+
fi
|
| 40 |
+
|
| 41 |
+
echo "package=$PACKAGE_NAME latest=$LATEST_VERSION previous=$PREVIOUS_VERSION"
|
| 42 |
+
|
| 43 |
+
if [[ "$SKIP_PREVIOUS" == "1" ]]; then
|
| 44 |
+
echo "==> Skip preinstall previous (OPENCLAW_INSTALL_SMOKE_SKIP_PREVIOUS=1)"
|
| 45 |
+
else
|
| 46 |
+
echo "==> Preinstall previous (forces installer upgrade path)"
|
| 47 |
+
npm install -g "${PACKAGE_NAME}@${PREVIOUS_VERSION}"
|
| 48 |
+
fi
|
| 49 |
+
|
| 50 |
+
echo "==> Run official installer one-liner"
|
| 51 |
+
curl -fsSL "$INSTALL_URL" | bash
|
| 52 |
+
|
| 53 |
+
echo "==> Verify installed version"
|
| 54 |
+
CLI_NAME="$PACKAGE_NAME"
|
| 55 |
+
if ! command -v "$CLI_NAME" >/dev/null 2>&1; then
|
| 56 |
+
echo "ERROR: $PACKAGE_NAME is not on PATH" >&2
|
| 57 |
+
exit 1
|
| 58 |
+
fi
|
| 59 |
+
if [[ -n "${OPENCLAW_INSTALL_LATEST_OUT:-}" ]]; then
|
| 60 |
+
printf "%s" "$LATEST_VERSION" > "${OPENCLAW_INSTALL_LATEST_OUT:-}"
|
| 61 |
+
fi
|
| 62 |
+
INSTALLED_VERSION="$("$CLI_NAME" --version 2>/dev/null | head -n 1 | tr -d '\r')"
|
| 63 |
+
echo "cli=$CLI_NAME installed=$INSTALLED_VERSION expected=$LATEST_VERSION"
|
| 64 |
+
|
| 65 |
+
if [[ "$INSTALLED_VERSION" != "$LATEST_VERSION" ]]; then
|
| 66 |
+
echo "ERROR: expected ${CLI_NAME}@${LATEST_VERSION}, got ${CLI_NAME}@${INSTALLED_VERSION}" >&2
|
| 67 |
+
exit 1
|
| 68 |
+
fi
|
| 69 |
+
|
| 70 |
+
echo "==> Sanity: CLI runs"
|
| 71 |
+
"$CLI_NAME" --help >/dev/null
|
| 72 |
+
|
| 73 |
+
echo "OK"
|
scripts/docs-i18n/glossary.go
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
package main
|
| 2 |
+
|
| 3 |
+
import (
|
| 4 |
+
"encoding/json"
|
| 5 |
+
"errors"
|
| 6 |
+
"fmt"
|
| 7 |
+
"os"
|
| 8 |
+
)
|
| 9 |
+
|
| 10 |
+
type GlossaryEntry struct {
|
| 11 |
+
Source string `json:"source"`
|
| 12 |
+
Target string `json:"target"`
|
| 13 |
+
}
|
| 14 |
+
|
| 15 |
+
func LoadGlossary(path string) ([]GlossaryEntry, error) {
|
| 16 |
+
data, err := os.ReadFile(path)
|
| 17 |
+
if err != nil {
|
| 18 |
+
if errors.Is(err, os.ErrNotExist) {
|
| 19 |
+
return nil, nil
|
| 20 |
+
}
|
| 21 |
+
return nil, err
|
| 22 |
+
}
|
| 23 |
+
var entries []GlossaryEntry
|
| 24 |
+
if err := json.Unmarshal(data, &entries); err != nil {
|
| 25 |
+
return nil, fmt.Errorf("glossary parse failed: %w", err)
|
| 26 |
+
}
|
| 27 |
+
|
| 28 |
+
return entries, nil
|
| 29 |
+
}
|
scripts/docs-i18n/go.mod
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
module github.com/openclaw/openclaw/scripts/docs-i18n
|
| 2 |
+
|
| 3 |
+
go 1.22
|
| 4 |
+
|
| 5 |
+
require (
|
| 6 |
+
github.com/joshp123/pi-golang v0.0.4
|
| 7 |
+
github.com/yuin/goldmark v1.7.8
|
| 8 |
+
golang.org/x/net v0.24.0
|
| 9 |
+
gopkg.in/yaml.v3 v3.0.1
|
| 10 |
+
)
|
scripts/docs-i18n/go.sum
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
github.com/joshp123/pi-golang v0.0.4 h1:82HISyKNN8bIl2lvAd65462LVCQIsjhaUFQxyQgg5Xk=
|
| 2 |
+
github.com/joshp123/pi-golang v0.0.4/go.mod h1:9mHEQkeJELYzubXU3b86/T8yedI/iAOKx0Tz0c41qes=
|
| 3 |
+
github.com/yuin/goldmark v1.7.8 h1:iERMLn0/QJeHFhxSt3p6PeN9mGnvIKSpG9YYorDMnic=
|
| 4 |
+
github.com/yuin/goldmark v1.7.8/go.mod h1:uzxRWxtg69N339t3louHJ7+O03ezfj6PlliRlaOzY1E=
|
| 5 |
+
golang.org/x/net v0.24.0 h1:1PcaxkF854Fu3+lvBIx5SYn9wRlBzzcnHZSiaFFAb0w=
|
| 6 |
+
golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8=
|
| 7 |
+
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
| 8 |
+
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
| 9 |
+
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
| 10 |
+
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
scripts/docs-i18n/html_translate.go
ADDED
|
@@ -0,0 +1,160 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
package main
|
| 2 |
+
|
| 3 |
+
import (
|
| 4 |
+
"context"
|
| 5 |
+
"io"
|
| 6 |
+
"strings"
|
| 7 |
+
|
| 8 |
+
"github.com/yuin/goldmark"
|
| 9 |
+
"github.com/yuin/goldmark/ast"
|
| 10 |
+
"github.com/yuin/goldmark/extension"
|
| 11 |
+
"github.com/yuin/goldmark/text"
|
| 12 |
+
"golang.org/x/net/html"
|
| 13 |
+
"sort"
|
| 14 |
+
)
|
| 15 |
+
|
| 16 |
+
type htmlReplacement struct {
|
| 17 |
+
Start int
|
| 18 |
+
Stop int
|
| 19 |
+
Value string
|
| 20 |
+
}
|
| 21 |
+
|
| 22 |
+
func translateHTMLBlocks(ctx context.Context, translator *PiTranslator, body, srcLang, tgtLang string) (string, error) {
|
| 23 |
+
source := []byte(body)
|
| 24 |
+
r := text.NewReader(source)
|
| 25 |
+
md := goldmark.New(
|
| 26 |
+
goldmark.WithExtensions(extension.GFM),
|
| 27 |
+
)
|
| 28 |
+
doc := md.Parser().Parse(r)
|
| 29 |
+
|
| 30 |
+
replacements := make([]htmlReplacement, 0, 8)
|
| 31 |
+
|
| 32 |
+
_ = ast.Walk(doc, func(n ast.Node, entering bool) (ast.WalkStatus, error) {
|
| 33 |
+
if !entering {
|
| 34 |
+
return ast.WalkContinue, nil
|
| 35 |
+
}
|
| 36 |
+
block, ok := n.(*ast.HTMLBlock)
|
| 37 |
+
if !ok {
|
| 38 |
+
return ast.WalkContinue, nil
|
| 39 |
+
}
|
| 40 |
+
start, stop, ok := htmlBlockSpan(block, source)
|
| 41 |
+
if !ok {
|
| 42 |
+
return ast.WalkSkipChildren, nil
|
| 43 |
+
}
|
| 44 |
+
htmlText := string(source[start:stop])
|
| 45 |
+
translated, err := translateHTMLBlock(ctx, translator, htmlText, srcLang, tgtLang)
|
| 46 |
+
if err != nil {
|
| 47 |
+
return ast.WalkStop, err
|
| 48 |
+
}
|
| 49 |
+
replacements = append(replacements, htmlReplacement{Start: start, Stop: stop, Value: translated})
|
| 50 |
+
return ast.WalkSkipChildren, nil
|
| 51 |
+
})
|
| 52 |
+
|
| 53 |
+
if len(replacements) == 0 {
|
| 54 |
+
return body, nil
|
| 55 |
+
}
|
| 56 |
+
|
| 57 |
+
return applyHTMLReplacements(body, replacements), nil
|
| 58 |
+
}
|
| 59 |
+
|
| 60 |
+
func htmlBlockSpan(block *ast.HTMLBlock, source []byte) (int, int, bool) {
|
| 61 |
+
lines := block.Lines()
|
| 62 |
+
if lines.Len() == 0 {
|
| 63 |
+
return 0, 0, false
|
| 64 |
+
}
|
| 65 |
+
start := lines.At(0).Start
|
| 66 |
+
stop := lines.At(lines.Len() - 1).Stop
|
| 67 |
+
if start >= stop {
|
| 68 |
+
return 0, 0, false
|
| 69 |
+
}
|
| 70 |
+
return start, stop, true
|
| 71 |
+
}
|
| 72 |
+
|
| 73 |
+
func applyHTMLReplacements(body string, replacements []htmlReplacement) string {
|
| 74 |
+
if len(replacements) == 0 {
|
| 75 |
+
return body
|
| 76 |
+
}
|
| 77 |
+
sortHTMLReplacements(replacements)
|
| 78 |
+
var out strings.Builder
|
| 79 |
+
last := 0
|
| 80 |
+
for _, rep := range replacements {
|
| 81 |
+
if rep.Start < last {
|
| 82 |
+
continue
|
| 83 |
+
}
|
| 84 |
+
out.WriteString(body[last:rep.Start])
|
| 85 |
+
out.WriteString(rep.Value)
|
| 86 |
+
last = rep.Stop
|
| 87 |
+
}
|
| 88 |
+
out.WriteString(body[last:])
|
| 89 |
+
return out.String()
|
| 90 |
+
}
|
| 91 |
+
|
| 92 |
+
func sortHTMLReplacements(replacements []htmlReplacement) {
|
| 93 |
+
sort.Slice(replacements, func(i, j int) bool {
|
| 94 |
+
return replacements[i].Start < replacements[j].Start
|
| 95 |
+
})
|
| 96 |
+
}
|
| 97 |
+
|
| 98 |
+
func translateHTMLBlock(ctx context.Context, translator *PiTranslator, htmlText, srcLang, tgtLang string) (string, error) {
|
| 99 |
+
tokenizer := html.NewTokenizer(strings.NewReader(htmlText))
|
| 100 |
+
var out strings.Builder
|
| 101 |
+
skipDepth := 0
|
| 102 |
+
|
| 103 |
+
for {
|
| 104 |
+
tt := tokenizer.Next()
|
| 105 |
+
if tt == html.ErrorToken {
|
| 106 |
+
if err := tokenizer.Err(); err != nil && err != io.EOF {
|
| 107 |
+
return "", err
|
| 108 |
+
}
|
| 109 |
+
break
|
| 110 |
+
}
|
| 111 |
+
|
| 112 |
+
raw := string(tokenizer.Raw())
|
| 113 |
+
tok := tokenizer.Token()
|
| 114 |
+
|
| 115 |
+
switch tt {
|
| 116 |
+
case html.StartTagToken:
|
| 117 |
+
out.WriteString(raw)
|
| 118 |
+
if isSkipTag(strings.ToLower(tok.Data)) {
|
| 119 |
+
skipDepth++
|
| 120 |
+
}
|
| 121 |
+
case html.EndTagToken:
|
| 122 |
+
out.WriteString(raw)
|
| 123 |
+
if isSkipTag(strings.ToLower(tok.Data)) && skipDepth > 0 {
|
| 124 |
+
skipDepth--
|
| 125 |
+
}
|
| 126 |
+
case html.SelfClosingTagToken:
|
| 127 |
+
out.WriteString(raw)
|
| 128 |
+
case html.TextToken:
|
| 129 |
+
if shouldTranslateHTMLText(skipDepth, raw) {
|
| 130 |
+
translated, err := translator.Translate(ctx, raw, srcLang, tgtLang)
|
| 131 |
+
if err != nil {
|
| 132 |
+
return "", err
|
| 133 |
+
}
|
| 134 |
+
out.WriteString(translated)
|
| 135 |
+
} else {
|
| 136 |
+
out.WriteString(raw)
|
| 137 |
+
}
|
| 138 |
+
default:
|
| 139 |
+
out.WriteString(raw)
|
| 140 |
+
}
|
| 141 |
+
}
|
| 142 |
+
|
| 143 |
+
return out.String(), nil
|
| 144 |
+
}
|
| 145 |
+
|
| 146 |
+
func shouldTranslateHTMLText(skipDepth int, text string) bool {
|
| 147 |
+
if strings.TrimSpace(text) == "" {
|
| 148 |
+
return false
|
| 149 |
+
}
|
| 150 |
+
return skipDepth == 0
|
| 151 |
+
}
|
| 152 |
+
|
| 153 |
+
func isSkipTag(tag string) bool {
|
| 154 |
+
switch tag {
|
| 155 |
+
case "code", "pre", "script", "style":
|
| 156 |
+
return true
|
| 157 |
+
default:
|
| 158 |
+
return false
|
| 159 |
+
}
|
| 160 |
+
}
|
scripts/docs-i18n/main.go
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
package main
|
| 2 |
+
|
| 3 |
+
import (
|
| 4 |
+
"context"
|
| 5 |
+
"flag"
|
| 6 |
+
"fmt"
|
| 7 |
+
"path/filepath"
|
| 8 |
+
)
|
| 9 |
+
|
| 10 |
+
func main() {
|
| 11 |
+
var (
|
| 12 |
+
targetLang = flag.String("lang", "zh-CN", "target language (e.g., zh-CN)")
|
| 13 |
+
sourceLang = flag.String("src", "en", "source language")
|
| 14 |
+
docsRoot = flag.String("docs", "docs", "docs root")
|
| 15 |
+
tmPath = flag.String("tm", "", "translation memory path")
|
| 16 |
+
)
|
| 17 |
+
flag.Parse()
|
| 18 |
+
files := flag.Args()
|
| 19 |
+
if len(files) == 0 {
|
| 20 |
+
fatal(fmt.Errorf("no doc files provided"))
|
| 21 |
+
}
|
| 22 |
+
|
| 23 |
+
resolvedDocsRoot, err := filepath.Abs(*docsRoot)
|
| 24 |
+
if err != nil {
|
| 25 |
+
fatal(err)
|
| 26 |
+
}
|
| 27 |
+
|
| 28 |
+
if *tmPath == "" {
|
| 29 |
+
*tmPath = filepath.Join(resolvedDocsRoot, ".i18n", fmt.Sprintf("%s.tm.jsonl", *targetLang))
|
| 30 |
+
}
|
| 31 |
+
|
| 32 |
+
glossaryPath := filepath.Join(resolvedDocsRoot, ".i18n", fmt.Sprintf("glossary.%s.json", *targetLang))
|
| 33 |
+
glossary, err := LoadGlossary(glossaryPath)
|
| 34 |
+
if err != nil {
|
| 35 |
+
fatal(err)
|
| 36 |
+
}
|
| 37 |
+
|
| 38 |
+
translator, err := NewPiTranslator(*sourceLang, *targetLang, glossary)
|
| 39 |
+
if err != nil {
|
| 40 |
+
fatal(err)
|
| 41 |
+
}
|
| 42 |
+
defer translator.Close()
|
| 43 |
+
|
| 44 |
+
tm, err := LoadTranslationMemory(*tmPath)
|
| 45 |
+
if err != nil {
|
| 46 |
+
fatal(err)
|
| 47 |
+
}
|
| 48 |
+
|
| 49 |
+
for _, file := range files {
|
| 50 |
+
if err := processFile(context.Background(), translator, tm, resolvedDocsRoot, file, *sourceLang, *targetLang); err != nil {
|
| 51 |
+
fatal(err)
|
| 52 |
+
}
|
| 53 |
+
}
|
| 54 |
+
|
| 55 |
+
if err := tm.Save(); err != nil {
|
| 56 |
+
fatal(err)
|
| 57 |
+
}
|
| 58 |
+
}
|
scripts/docs-i18n/markdown_segments.go
ADDED
|
@@ -0,0 +1,131 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
package main
|
| 2 |
+
|
| 3 |
+
import (
|
| 4 |
+
"sort"
|
| 5 |
+
"strings"
|
| 6 |
+
|
| 7 |
+
"github.com/yuin/goldmark"
|
| 8 |
+
"github.com/yuin/goldmark/ast"
|
| 9 |
+
"github.com/yuin/goldmark/extension"
|
| 10 |
+
"github.com/yuin/goldmark/text"
|
| 11 |
+
)
|
| 12 |
+
|
| 13 |
+
func extractSegments(body, relPath string) ([]Segment, error) {
|
| 14 |
+
source := []byte(body)
|
| 15 |
+
r := text.NewReader(source)
|
| 16 |
+
md := goldmark.New(
|
| 17 |
+
goldmark.WithExtensions(extension.GFM),
|
| 18 |
+
)
|
| 19 |
+
doc := md.Parser().Parse(r)
|
| 20 |
+
|
| 21 |
+
segments := make([]Segment, 0, 128)
|
| 22 |
+
skipDepth := 0
|
| 23 |
+
var lastBlock ast.Node
|
| 24 |
+
|
| 25 |
+
err := ast.Walk(doc, func(n ast.Node, entering bool) (ast.WalkStatus, error) {
|
| 26 |
+
switch n.(type) {
|
| 27 |
+
case *ast.CodeBlock, *ast.FencedCodeBlock, *ast.CodeSpan, *ast.HTMLBlock, *ast.RawHTML:
|
| 28 |
+
if entering {
|
| 29 |
+
skipDepth++
|
| 30 |
+
} else {
|
| 31 |
+
skipDepth--
|
| 32 |
+
}
|
| 33 |
+
return ast.WalkContinue, nil
|
| 34 |
+
}
|
| 35 |
+
|
| 36 |
+
if !entering || skipDepth > 0 {
|
| 37 |
+
return ast.WalkContinue, nil
|
| 38 |
+
}
|
| 39 |
+
|
| 40 |
+
textNode, ok := n.(*ast.Text)
|
| 41 |
+
if !ok {
|
| 42 |
+
return ast.WalkContinue, nil
|
| 43 |
+
}
|
| 44 |
+
block := blockParent(textNode)
|
| 45 |
+
if block == nil {
|
| 46 |
+
return ast.WalkContinue, nil
|
| 47 |
+
}
|
| 48 |
+
textValue := string(textNode.Segment.Value(source))
|
| 49 |
+
if strings.TrimSpace(textValue) == "" {
|
| 50 |
+
return ast.WalkContinue, nil
|
| 51 |
+
}
|
| 52 |
+
|
| 53 |
+
start := textNode.Segment.Start
|
| 54 |
+
stop := textNode.Segment.Stop
|
| 55 |
+
if len(segments) > 0 && lastBlock == block {
|
| 56 |
+
last := &segments[len(segments)-1]
|
| 57 |
+
gap := string(source[last.Stop:start])
|
| 58 |
+
if strings.TrimSpace(gap) == "" {
|
| 59 |
+
last.Stop = stop
|
| 60 |
+
return ast.WalkContinue, nil
|
| 61 |
+
}
|
| 62 |
+
}
|
| 63 |
+
|
| 64 |
+
segments = append(segments, Segment{Start: start, Stop: stop})
|
| 65 |
+
lastBlock = block
|
| 66 |
+
return ast.WalkContinue, nil
|
| 67 |
+
})
|
| 68 |
+
if err != nil {
|
| 69 |
+
return nil, err
|
| 70 |
+
}
|
| 71 |
+
|
| 72 |
+
filtered := make([]Segment, 0, len(segments))
|
| 73 |
+
for _, seg := range segments {
|
| 74 |
+
textValue := string(source[seg.Start:seg.Stop])
|
| 75 |
+
trimmed := strings.TrimSpace(textValue)
|
| 76 |
+
if trimmed == "" {
|
| 77 |
+
continue
|
| 78 |
+
}
|
| 79 |
+
textHash := hashText(textValue)
|
| 80 |
+
segmentID := segmentID(relPath, textHash)
|
| 81 |
+
filtered = append(filtered, Segment{
|
| 82 |
+
Start: seg.Start,
|
| 83 |
+
Stop: seg.Stop,
|
| 84 |
+
Text: textValue,
|
| 85 |
+
TextHash: textHash,
|
| 86 |
+
SegmentID: segmentID,
|
| 87 |
+
})
|
| 88 |
+
}
|
| 89 |
+
|
| 90 |
+
sort.Slice(filtered, func(i, j int) bool {
|
| 91 |
+
return filtered[i].Start < filtered[j].Start
|
| 92 |
+
})
|
| 93 |
+
|
| 94 |
+
return filtered, nil
|
| 95 |
+
}
|
| 96 |
+
|
| 97 |
+
func blockParent(n ast.Node) ast.Node {
|
| 98 |
+
for node := n.Parent(); node != nil; node = node.Parent() {
|
| 99 |
+
if isTranslatableBlock(node) {
|
| 100 |
+
return node
|
| 101 |
+
}
|
| 102 |
+
}
|
| 103 |
+
return nil
|
| 104 |
+
}
|
| 105 |
+
|
| 106 |
+
func isTranslatableBlock(n ast.Node) bool {
|
| 107 |
+
switch n.(type) {
|
| 108 |
+
case *ast.Paragraph, *ast.Heading, *ast.ListItem:
|
| 109 |
+
return true
|
| 110 |
+
default:
|
| 111 |
+
return false
|
| 112 |
+
}
|
| 113 |
+
}
|
| 114 |
+
|
| 115 |
+
func applyTranslations(body string, segments []Segment) string {
|
| 116 |
+
if len(segments) == 0 {
|
| 117 |
+
return body
|
| 118 |
+
}
|
| 119 |
+
var out strings.Builder
|
| 120 |
+
last := 0
|
| 121 |
+
for _, seg := range segments {
|
| 122 |
+
if seg.Start < last {
|
| 123 |
+
continue
|
| 124 |
+
}
|
| 125 |
+
out.WriteString(body[last:seg.Start])
|
| 126 |
+
out.WriteString(seg.Translated)
|
| 127 |
+
last = seg.Stop
|
| 128 |
+
}
|
| 129 |
+
out.WriteString(body[last:])
|
| 130 |
+
return out.String()
|
| 131 |
+
}
|
scripts/docs-i18n/masking.go
ADDED
|
@@ -0,0 +1,89 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
package main
|
| 2 |
+
|
| 3 |
+
import (
|
| 4 |
+
"fmt"
|
| 5 |
+
"regexp"
|
| 6 |
+
"strings"
|
| 7 |
+
)
|
| 8 |
+
|
| 9 |
+
var (
|
| 10 |
+
inlineCodeRe = regexp.MustCompile("`[^`]+`")
|
| 11 |
+
angleLinkRe = regexp.MustCompile(`<https?://[^>]+>`)
|
| 12 |
+
linkURLRe = regexp.MustCompile(`\[[^\]]*\]\(([^)]+)\)`)
|
| 13 |
+
placeholderRe = regexp.MustCompile(`__OC_I18N_\d+__`)
|
| 14 |
+
)
|
| 15 |
+
|
| 16 |
+
func maskMarkdown(text string, nextPlaceholder func() string, placeholders *[]string, mapping map[string]string) string {
|
| 17 |
+
masked := maskMatches(text, inlineCodeRe, nextPlaceholder, placeholders, mapping)
|
| 18 |
+
masked = maskMatches(masked, angleLinkRe, nextPlaceholder, placeholders, mapping)
|
| 19 |
+
masked = maskLinkURLs(masked, nextPlaceholder, placeholders, mapping)
|
| 20 |
+
return masked
|
| 21 |
+
}
|
| 22 |
+
|
| 23 |
+
func maskMatches(text string, re *regexp.Regexp, nextPlaceholder func() string, placeholders *[]string, mapping map[string]string) string {
|
| 24 |
+
matches := re.FindAllStringIndex(text, -1)
|
| 25 |
+
if len(matches) == 0 {
|
| 26 |
+
return text
|
| 27 |
+
}
|
| 28 |
+
var out strings.Builder
|
| 29 |
+
pos := 0
|
| 30 |
+
for _, span := range matches {
|
| 31 |
+
start, end := span[0], span[1]
|
| 32 |
+
if start < pos {
|
| 33 |
+
continue
|
| 34 |
+
}
|
| 35 |
+
out.WriteString(text[pos:start])
|
| 36 |
+
placeholder := nextPlaceholder()
|
| 37 |
+
mapping[placeholder] = text[start:end]
|
| 38 |
+
*placeholders = append(*placeholders, placeholder)
|
| 39 |
+
out.WriteString(placeholder)
|
| 40 |
+
pos = end
|
| 41 |
+
}
|
| 42 |
+
out.WriteString(text[pos:])
|
| 43 |
+
return out.String()
|
| 44 |
+
}
|
| 45 |
+
|
| 46 |
+
func maskLinkURLs(text string, nextPlaceholder func() string, placeholders *[]string, mapping map[string]string) string {
|
| 47 |
+
matches := linkURLRe.FindAllStringSubmatchIndex(text, -1)
|
| 48 |
+
if len(matches) == 0 {
|
| 49 |
+
return text
|
| 50 |
+
}
|
| 51 |
+
var out strings.Builder
|
| 52 |
+
pos := 0
|
| 53 |
+
for _, span := range matches {
|
| 54 |
+
fullStart := span[0]
|
| 55 |
+
urlStart, urlEnd := span[2], span[3]
|
| 56 |
+
if urlStart < 0 || urlEnd < 0 {
|
| 57 |
+
continue
|
| 58 |
+
}
|
| 59 |
+
if fullStart < pos {
|
| 60 |
+
continue
|
| 61 |
+
}
|
| 62 |
+
out.WriteString(text[pos:urlStart])
|
| 63 |
+
placeholder := nextPlaceholder()
|
| 64 |
+
mapping[placeholder] = text[urlStart:urlEnd]
|
| 65 |
+
*placeholders = append(*placeholders, placeholder)
|
| 66 |
+
out.WriteString(placeholder)
|
| 67 |
+
pos = urlEnd
|
| 68 |
+
}
|
| 69 |
+
out.WriteString(text[pos:])
|
| 70 |
+
return out.String()
|
| 71 |
+
}
|
| 72 |
+
|
| 73 |
+
func unmaskMarkdown(text string, placeholders []string, mapping map[string]string) string {
|
| 74 |
+
out := text
|
| 75 |
+
for _, placeholder := range placeholders {
|
| 76 |
+
original := mapping[placeholder]
|
| 77 |
+
out = strings.ReplaceAll(out, placeholder, original)
|
| 78 |
+
}
|
| 79 |
+
return out
|
| 80 |
+
}
|
| 81 |
+
|
| 82 |
+
func validatePlaceholders(text string, placeholders []string) error {
|
| 83 |
+
for _, placeholder := range placeholders {
|
| 84 |
+
if !strings.Contains(text, placeholder) {
|
| 85 |
+
return fmt.Errorf("placeholder missing: %s", placeholder)
|
| 86 |
+
}
|
| 87 |
+
}
|
| 88 |
+
return nil
|
| 89 |
+
}
|
scripts/docs-i18n/placeholders.go
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
package main
|
| 2 |
+
|
| 3 |
+
import (
|
| 4 |
+
"fmt"
|
| 5 |
+
)
|
| 6 |
+
|
| 7 |
+
type PlaceholderState struct {
|
| 8 |
+
counter int
|
| 9 |
+
used map[string]struct{}
|
| 10 |
+
}
|
| 11 |
+
|
| 12 |
+
func NewPlaceholderState(text string) *PlaceholderState {
|
| 13 |
+
used := map[string]struct{}{}
|
| 14 |
+
for _, hit := range placeholderRe.FindAllString(text, -1) {
|
| 15 |
+
used[hit] = struct{}{}
|
| 16 |
+
}
|
| 17 |
+
return &PlaceholderState{counter: 900000, used: used}
|
| 18 |
+
}
|
| 19 |
+
|
| 20 |
+
func (s *PlaceholderState) Next() string {
|
| 21 |
+
for {
|
| 22 |
+
candidate := fmt.Sprintf("__OC_I18N_%d__", s.counter)
|
| 23 |
+
s.counter++
|
| 24 |
+
if _, ok := s.used[candidate]; ok {
|
| 25 |
+
continue
|
| 26 |
+
}
|
| 27 |
+
s.used[candidate] = struct{}{}
|
| 28 |
+
return candidate
|
| 29 |
+
}
|
| 30 |
+
}
|
scripts/docs-i18n/process.go
ADDED
|
@@ -0,0 +1,205 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
package main
|
| 2 |
+
|
| 3 |
+
import (
|
| 4 |
+
"context"
|
| 5 |
+
"fmt"
|
| 6 |
+
"os"
|
| 7 |
+
"path/filepath"
|
| 8 |
+
"strings"
|
| 9 |
+
"time"
|
| 10 |
+
|
| 11 |
+
"gopkg.in/yaml.v3"
|
| 12 |
+
)
|
| 13 |
+
|
| 14 |
+
func processFile(ctx context.Context, translator *PiTranslator, tm *TranslationMemory, docsRoot, filePath, srcLang, tgtLang string) error {
|
| 15 |
+
absPath, err := filepath.Abs(filePath)
|
| 16 |
+
if err != nil {
|
| 17 |
+
return err
|
| 18 |
+
}
|
| 19 |
+
relPath, err := filepath.Rel(docsRoot, absPath)
|
| 20 |
+
if err != nil {
|
| 21 |
+
return err
|
| 22 |
+
}
|
| 23 |
+
if relPath == "." || relPath == "" {
|
| 24 |
+
return fmt.Errorf("file %s resolves to docs root %s", absPath, docsRoot)
|
| 25 |
+
}
|
| 26 |
+
if filepath.IsAbs(relPath) || relPath == ".." || strings.HasPrefix(relPath, ".."+string(filepath.Separator)) {
|
| 27 |
+
return fmt.Errorf("file %s not under docs root %s", absPath, docsRoot)
|
| 28 |
+
}
|
| 29 |
+
|
| 30 |
+
content, err := os.ReadFile(absPath)
|
| 31 |
+
if err != nil {
|
| 32 |
+
return err
|
| 33 |
+
}
|
| 34 |
+
|
| 35 |
+
frontMatter, body := splitFrontMatter(string(content))
|
| 36 |
+
frontData := map[string]any{}
|
| 37 |
+
if frontMatter != "" {
|
| 38 |
+
if err := yaml.Unmarshal([]byte(frontMatter), &frontData); err != nil {
|
| 39 |
+
return fmt.Errorf("frontmatter parse failed for %s: %w", relPath, err)
|
| 40 |
+
}
|
| 41 |
+
}
|
| 42 |
+
|
| 43 |
+
if err := translateFrontMatter(ctx, translator, tm, frontData, relPath, srcLang, tgtLang); err != nil {
|
| 44 |
+
return err
|
| 45 |
+
}
|
| 46 |
+
|
| 47 |
+
body, err = translateHTMLBlocks(ctx, translator, body, srcLang, tgtLang)
|
| 48 |
+
if err != nil {
|
| 49 |
+
return err
|
| 50 |
+
}
|
| 51 |
+
|
| 52 |
+
segments, err := extractSegments(body, relPath)
|
| 53 |
+
if err != nil {
|
| 54 |
+
return err
|
| 55 |
+
}
|
| 56 |
+
|
| 57 |
+
namespace := cacheNamespace()
|
| 58 |
+
for i := range segments {
|
| 59 |
+
seg := &segments[i]
|
| 60 |
+
seg.CacheKey = cacheKey(namespace, srcLang, tgtLang, seg.SegmentID, seg.TextHash)
|
| 61 |
+
if entry, ok := tm.Get(seg.CacheKey); ok {
|
| 62 |
+
seg.Translated = entry.Translated
|
| 63 |
+
continue
|
| 64 |
+
}
|
| 65 |
+
translated, err := translator.Translate(ctx, seg.Text, srcLang, tgtLang)
|
| 66 |
+
if err != nil {
|
| 67 |
+
return fmt.Errorf("translate failed (%s): %w", relPath, err)
|
| 68 |
+
}
|
| 69 |
+
seg.Translated = translated
|
| 70 |
+
entry := TMEntry{
|
| 71 |
+
CacheKey: seg.CacheKey,
|
| 72 |
+
SegmentID: seg.SegmentID,
|
| 73 |
+
SourcePath: relPath,
|
| 74 |
+
TextHash: seg.TextHash,
|
| 75 |
+
Text: seg.Text,
|
| 76 |
+
Translated: translated,
|
| 77 |
+
Provider: providerName,
|
| 78 |
+
Model: modelVersion,
|
| 79 |
+
SrcLang: srcLang,
|
| 80 |
+
TgtLang: tgtLang,
|
| 81 |
+
UpdatedAt: time.Now().UTC().Format(time.RFC3339),
|
| 82 |
+
}
|
| 83 |
+
tm.Put(entry)
|
| 84 |
+
}
|
| 85 |
+
|
| 86 |
+
translatedBody := applyTranslations(body, segments)
|
| 87 |
+
updatedFront, err := encodeFrontMatter(frontData, relPath, content)
|
| 88 |
+
if err != nil {
|
| 89 |
+
return err
|
| 90 |
+
}
|
| 91 |
+
|
| 92 |
+
outputPath := filepath.Join(docsRoot, tgtLang, relPath)
|
| 93 |
+
if err := os.MkdirAll(filepath.Dir(outputPath), 0o755); err != nil {
|
| 94 |
+
return err
|
| 95 |
+
}
|
| 96 |
+
|
| 97 |
+
output := updatedFront + translatedBody
|
| 98 |
+
return os.WriteFile(outputPath, []byte(output), 0o644)
|
| 99 |
+
}
|
| 100 |
+
|
| 101 |
+
func splitFrontMatter(content string) (string, string) {
|
| 102 |
+
if !strings.HasPrefix(content, "---") {
|
| 103 |
+
return "", content
|
| 104 |
+
}
|
| 105 |
+
lines := strings.Split(content, "\n")
|
| 106 |
+
if len(lines) < 2 {
|
| 107 |
+
return "", content
|
| 108 |
+
}
|
| 109 |
+
endIndex := -1
|
| 110 |
+
for i := 1; i < len(lines); i++ {
|
| 111 |
+
if strings.TrimSpace(lines[i]) == "---" {
|
| 112 |
+
endIndex = i
|
| 113 |
+
break
|
| 114 |
+
}
|
| 115 |
+
}
|
| 116 |
+
if endIndex == -1 {
|
| 117 |
+
return "", content
|
| 118 |
+
}
|
| 119 |
+
front := strings.Join(lines[1:endIndex], "\n")
|
| 120 |
+
body := strings.Join(lines[endIndex+1:], "\n")
|
| 121 |
+
if strings.HasPrefix(body, "\n") {
|
| 122 |
+
body = body[1:]
|
| 123 |
+
}
|
| 124 |
+
return front, body
|
| 125 |
+
}
|
| 126 |
+
|
| 127 |
+
func encodeFrontMatter(frontData map[string]any, relPath string, source []byte) (string, error) {
|
| 128 |
+
if len(frontData) == 0 {
|
| 129 |
+
return "", nil
|
| 130 |
+
}
|
| 131 |
+
frontData["x-i18n"] = map[string]any{
|
| 132 |
+
"source_path": relPath,
|
| 133 |
+
"source_hash": hashBytes(source),
|
| 134 |
+
"provider": providerName,
|
| 135 |
+
"model": modelVersion,
|
| 136 |
+
"workflow": workflowVersion,
|
| 137 |
+
"generated_at": time.Now().UTC().Format(time.RFC3339),
|
| 138 |
+
}
|
| 139 |
+
encoded, err := yaml.Marshal(frontData)
|
| 140 |
+
if err != nil {
|
| 141 |
+
return "", err
|
| 142 |
+
}
|
| 143 |
+
return fmt.Sprintf("---\n%s---\n\n", string(encoded)), nil
|
| 144 |
+
}
|
| 145 |
+
|
| 146 |
+
func translateFrontMatter(ctx context.Context, translator *PiTranslator, tm *TranslationMemory, data map[string]any, relPath, srcLang, tgtLang string) error {
|
| 147 |
+
if len(data) == 0 {
|
| 148 |
+
return nil
|
| 149 |
+
}
|
| 150 |
+
if summary, ok := data["summary"].(string); ok {
|
| 151 |
+
translated, err := translateSnippet(ctx, translator, tm, relPath+":frontmatter:summary", summary, srcLang, tgtLang)
|
| 152 |
+
if err != nil {
|
| 153 |
+
return err
|
| 154 |
+
}
|
| 155 |
+
data["summary"] = translated
|
| 156 |
+
}
|
| 157 |
+
if readWhen, ok := data["read_when"].([]any); ok {
|
| 158 |
+
translated := make([]any, 0, len(readWhen))
|
| 159 |
+
for idx, item := range readWhen {
|
| 160 |
+
textValue, ok := item.(string)
|
| 161 |
+
if !ok {
|
| 162 |
+
translated = append(translated, item)
|
| 163 |
+
continue
|
| 164 |
+
}
|
| 165 |
+
value, err := translateSnippet(ctx, translator, tm, fmt.Sprintf("%s:frontmatter:read_when:%d", relPath, idx), textValue, srcLang, tgtLang)
|
| 166 |
+
if err != nil {
|
| 167 |
+
return err
|
| 168 |
+
}
|
| 169 |
+
translated = append(translated, value)
|
| 170 |
+
}
|
| 171 |
+
data["read_when"] = translated
|
| 172 |
+
}
|
| 173 |
+
return nil
|
| 174 |
+
}
|
| 175 |
+
|
| 176 |
+
func translateSnippet(ctx context.Context, translator *PiTranslator, tm *TranslationMemory, segmentID, textValue, srcLang, tgtLang string) (string, error) {
|
| 177 |
+
if strings.TrimSpace(textValue) == "" {
|
| 178 |
+
return textValue, nil
|
| 179 |
+
}
|
| 180 |
+
namespace := cacheNamespace()
|
| 181 |
+
textHash := hashText(textValue)
|
| 182 |
+
ck := cacheKey(namespace, srcLang, tgtLang, segmentID, textHash)
|
| 183 |
+
if entry, ok := tm.Get(ck); ok {
|
| 184 |
+
return entry.Translated, nil
|
| 185 |
+
}
|
| 186 |
+
translated, err := translator.Translate(ctx, textValue, srcLang, tgtLang)
|
| 187 |
+
if err != nil {
|
| 188 |
+
return "", err
|
| 189 |
+
}
|
| 190 |
+
entry := TMEntry{
|
| 191 |
+
CacheKey: ck,
|
| 192 |
+
SegmentID: segmentID,
|
| 193 |
+
SourcePath: segmentID,
|
| 194 |
+
TextHash: textHash,
|
| 195 |
+
Text: textValue,
|
| 196 |
+
Translated: translated,
|
| 197 |
+
Provider: providerName,
|
| 198 |
+
Model: modelVersion,
|
| 199 |
+
SrcLang: srcLang,
|
| 200 |
+
TgtLang: tgtLang,
|
| 201 |
+
UpdatedAt: time.Now().UTC().Format(time.RFC3339),
|
| 202 |
+
}
|
| 203 |
+
tm.Put(entry)
|
| 204 |
+
return translated, nil
|
| 205 |
+
}
|
scripts/docs-i18n/segment.go
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
package main
|
| 2 |
+
|
| 3 |
+
type Segment struct {
|
| 4 |
+
Start int
|
| 5 |
+
Stop int
|
| 6 |
+
Text string
|
| 7 |
+
TextHash string
|
| 8 |
+
SegmentID string
|
| 9 |
+
Translated string
|
| 10 |
+
CacheKey string
|
| 11 |
+
}
|
scripts/docs-i18n/tm.go
ADDED
|
@@ -0,0 +1,126 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
package main
|
| 2 |
+
|
| 3 |
+
import (
|
| 4 |
+
"bufio"
|
| 5 |
+
"encoding/json"
|
| 6 |
+
"errors"
|
| 7 |
+
"fmt"
|
| 8 |
+
"io"
|
| 9 |
+
"os"
|
| 10 |
+
"path/filepath"
|
| 11 |
+
"sort"
|
| 12 |
+
"strings"
|
| 13 |
+
)
|
| 14 |
+
|
| 15 |
+
type TMEntry struct {
|
| 16 |
+
CacheKey string `json:"cache_key"`
|
| 17 |
+
SegmentID string `json:"segment_id"`
|
| 18 |
+
SourcePath string `json:"source_path"`
|
| 19 |
+
TextHash string `json:"text_hash"`
|
| 20 |
+
Text string `json:"text"`
|
| 21 |
+
Translated string `json:"translated"`
|
| 22 |
+
Provider string `json:"provider"`
|
| 23 |
+
Model string `json:"model"`
|
| 24 |
+
SrcLang string `json:"src_lang"`
|
| 25 |
+
TgtLang string `json:"tgt_lang"`
|
| 26 |
+
UpdatedAt string `json:"updated_at"`
|
| 27 |
+
}
|
| 28 |
+
|
| 29 |
+
type TranslationMemory struct {
|
| 30 |
+
path string
|
| 31 |
+
entries map[string]TMEntry
|
| 32 |
+
}
|
| 33 |
+
|
| 34 |
+
func LoadTranslationMemory(path string) (*TranslationMemory, error) {
|
| 35 |
+
tm := &TranslationMemory{path: path, entries: map[string]TMEntry{}}
|
| 36 |
+
file, err := os.Open(path)
|
| 37 |
+
if err != nil {
|
| 38 |
+
if errors.Is(err, os.ErrNotExist) {
|
| 39 |
+
return tm, nil
|
| 40 |
+
}
|
| 41 |
+
return nil, err
|
| 42 |
+
}
|
| 43 |
+
defer file.Close()
|
| 44 |
+
|
| 45 |
+
reader := bufio.NewReader(file)
|
| 46 |
+
for {
|
| 47 |
+
line, err := reader.ReadBytes('\n')
|
| 48 |
+
if len(line) > 0 {
|
| 49 |
+
trimmed := strings.TrimSpace(string(line))
|
| 50 |
+
if trimmed != "" {
|
| 51 |
+
var entry TMEntry
|
| 52 |
+
if err := json.Unmarshal([]byte(trimmed), &entry); err != nil {
|
| 53 |
+
return nil, fmt.Errorf("translation memory decode failed: %w", err)
|
| 54 |
+
}
|
| 55 |
+
if entry.CacheKey != "" {
|
| 56 |
+
tm.entries[entry.CacheKey] = entry
|
| 57 |
+
}
|
| 58 |
+
}
|
| 59 |
+
}
|
| 60 |
+
if err != nil {
|
| 61 |
+
if errors.Is(err, io.EOF) {
|
| 62 |
+
break
|
| 63 |
+
}
|
| 64 |
+
return nil, err
|
| 65 |
+
}
|
| 66 |
+
}
|
| 67 |
+
return tm, nil
|
| 68 |
+
}
|
| 69 |
+
|
| 70 |
+
func (tm *TranslationMemory) Get(cacheKey string) (TMEntry, bool) {
|
| 71 |
+
entry, ok := tm.entries[cacheKey]
|
| 72 |
+
return entry, ok
|
| 73 |
+
}
|
| 74 |
+
|
| 75 |
+
func (tm *TranslationMemory) Put(entry TMEntry) {
|
| 76 |
+
if entry.CacheKey == "" {
|
| 77 |
+
return
|
| 78 |
+
}
|
| 79 |
+
tm.entries[entry.CacheKey] = entry
|
| 80 |
+
}
|
| 81 |
+
|
| 82 |
+
func (tm *TranslationMemory) Save() error {
|
| 83 |
+
if tm.path == "" {
|
| 84 |
+
return nil
|
| 85 |
+
}
|
| 86 |
+
if err := os.MkdirAll(filepath.Dir(tm.path), 0o755); err != nil {
|
| 87 |
+
return err
|
| 88 |
+
}
|
| 89 |
+
tmpPath := tm.path + ".tmp"
|
| 90 |
+
file, err := os.Create(tmpPath)
|
| 91 |
+
if err != nil {
|
| 92 |
+
return err
|
| 93 |
+
}
|
| 94 |
+
|
| 95 |
+
keys := make([]string, 0, len(tm.entries))
|
| 96 |
+
for key := range tm.entries {
|
| 97 |
+
keys = append(keys, key)
|
| 98 |
+
}
|
| 99 |
+
sort.Strings(keys)
|
| 100 |
+
|
| 101 |
+
writer := bufio.NewWriter(file)
|
| 102 |
+
for _, key := range keys {
|
| 103 |
+
entry := tm.entries[key]
|
| 104 |
+
payload, err := json.Marshal(entry)
|
| 105 |
+
if err != nil {
|
| 106 |
+
_ = file.Close()
|
| 107 |
+
return err
|
| 108 |
+
}
|
| 109 |
+
if _, err := writer.Write(payload); err != nil {
|
| 110 |
+
_ = file.Close()
|
| 111 |
+
return err
|
| 112 |
+
}
|
| 113 |
+
if _, err := writer.WriteString("\n"); err != nil {
|
| 114 |
+
_ = file.Close()
|
| 115 |
+
return err
|
| 116 |
+
}
|
| 117 |
+
}
|
| 118 |
+
if err := writer.Flush(); err != nil {
|
| 119 |
+
_ = file.Close()
|
| 120 |
+
return err
|
| 121 |
+
}
|
| 122 |
+
if err := file.Close(); err != nil {
|
| 123 |
+
return err
|
| 124 |
+
}
|
| 125 |
+
return os.Rename(tmpPath, tm.path)
|
| 126 |
+
}
|
scripts/docs-i18n/translator.go
ADDED
|
@@ -0,0 +1,104 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
package main
|
| 2 |
+
|
| 3 |
+
import (
|
| 4 |
+
"context"
|
| 5 |
+
"errors"
|
| 6 |
+
"fmt"
|
| 7 |
+
"strings"
|
| 8 |
+
|
| 9 |
+
pi "github.com/joshp123/pi-golang"
|
| 10 |
+
)
|
| 11 |
+
|
| 12 |
+
type PiTranslator struct {
|
| 13 |
+
client *pi.OneShotClient
|
| 14 |
+
}
|
| 15 |
+
|
| 16 |
+
func NewPiTranslator(srcLang, tgtLang string, glossary []GlossaryEntry) (*PiTranslator, error) {
|
| 17 |
+
options := pi.DefaultOneShotOptions()
|
| 18 |
+
options.AppName = "openclaw-docs-i18n"
|
| 19 |
+
options.Mode = pi.ModeDragons
|
| 20 |
+
options.Dragons = pi.DragonsOptions{
|
| 21 |
+
Provider: "anthropic",
|
| 22 |
+
Model: modelVersion,
|
| 23 |
+
Thinking: "high",
|
| 24 |
+
}
|
| 25 |
+
options.SystemPrompt = translationPrompt(srcLang, tgtLang, glossary)
|
| 26 |
+
client, err := pi.StartOneShot(options)
|
| 27 |
+
if err != nil {
|
| 28 |
+
return nil, err
|
| 29 |
+
}
|
| 30 |
+
return &PiTranslator{client: client}, nil
|
| 31 |
+
}
|
| 32 |
+
|
| 33 |
+
func (t *PiTranslator) Translate(ctx context.Context, text, srcLang, tgtLang string) (string, error) {
|
| 34 |
+
if t.client == nil {
|
| 35 |
+
return "", errors.New("pi client unavailable")
|
| 36 |
+
}
|
| 37 |
+
prefix, core, suffix := splitWhitespace(text)
|
| 38 |
+
if core == "" {
|
| 39 |
+
return text, nil
|
| 40 |
+
}
|
| 41 |
+
state := NewPlaceholderState(core)
|
| 42 |
+
placeholders := make([]string, 0, 8)
|
| 43 |
+
mapping := map[string]string{}
|
| 44 |
+
masked := maskMarkdown(core, state.Next, &placeholders, mapping)
|
| 45 |
+
res, err := t.client.Run(ctx, masked)
|
| 46 |
+
if err != nil {
|
| 47 |
+
return "", err
|
| 48 |
+
}
|
| 49 |
+
translated := strings.TrimSpace(res.Text)
|
| 50 |
+
if err := validatePlaceholders(translated, placeholders); err != nil {
|
| 51 |
+
return "", err
|
| 52 |
+
}
|
| 53 |
+
translated = unmaskMarkdown(translated, placeholders, mapping)
|
| 54 |
+
return prefix + translated + suffix, nil
|
| 55 |
+
}
|
| 56 |
+
|
| 57 |
+
func (t *PiTranslator) Close() {
|
| 58 |
+
if t.client != nil {
|
| 59 |
+
_ = t.client.Close()
|
| 60 |
+
}
|
| 61 |
+
}
|
| 62 |
+
|
| 63 |
+
func translationPrompt(srcLang, tgtLang string, glossary []GlossaryEntry) string {
|
| 64 |
+
srcLabel := srcLang
|
| 65 |
+
tgtLabel := tgtLang
|
| 66 |
+
if strings.EqualFold(srcLang, "en") {
|
| 67 |
+
srcLabel = "English"
|
| 68 |
+
}
|
| 69 |
+
if strings.EqualFold(tgtLang, "zh-CN") {
|
| 70 |
+
tgtLabel = "Simplified Chinese"
|
| 71 |
+
}
|
| 72 |
+
glossaryBlock := buildGlossaryPrompt(glossary)
|
| 73 |
+
return strings.TrimSpace(fmt.Sprintf(`You are a translation function, not a chat assistant.
|
| 74 |
+
Translate from %s to %s.
|
| 75 |
+
|
| 76 |
+
Rules:
|
| 77 |
+
- Output ONLY the translated text. No preamble, no questions, no commentary.
|
| 78 |
+
- Preserve Markdown syntax exactly (headings, lists, tables, emphasis).
|
| 79 |
+
- Do not translate code spans/blocks, config keys, CLI flags, or env vars.
|
| 80 |
+
- Do not alter URLs or anchors.
|
| 81 |
+
- Preserve placeholders exactly: __OC_I18N_####__.
|
| 82 |
+
- Use neutral technical Chinese; avoid slang or jokes.
|
| 83 |
+
- Keep product names in English: OpenClaw, Gateway, Pi, WhatsApp, Telegram, Discord, iMessage, Slack, Microsoft Teams, Google Chat, Signal.
|
| 84 |
+
|
| 85 |
+
%s
|
| 86 |
+
|
| 87 |
+
If the input is empty, output empty.
|
| 88 |
+
If the input contains only placeholders, output it unchanged.`, srcLabel, tgtLabel, glossaryBlock))
|
| 89 |
+
}
|
| 90 |
+
|
| 91 |
+
func buildGlossaryPrompt(glossary []GlossaryEntry) string {
|
| 92 |
+
if len(glossary) == 0 {
|
| 93 |
+
return ""
|
| 94 |
+
}
|
| 95 |
+
var lines []string
|
| 96 |
+
lines = append(lines, "Preferred translations (use when natural):")
|
| 97 |
+
for _, entry := range glossary {
|
| 98 |
+
if entry.Source == "" || entry.Target == "" {
|
| 99 |
+
continue
|
| 100 |
+
}
|
| 101 |
+
lines = append(lines, fmt.Sprintf("- %s -> %s", entry.Source, entry.Target))
|
| 102 |
+
}
|
| 103 |
+
return strings.Join(lines, "\n")
|
| 104 |
+
}
|
scripts/docs-i18n/util.go
ADDED
|
@@ -0,0 +1,81 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
package main
|
| 2 |
+
|
| 3 |
+
import (
|
| 4 |
+
"crypto/sha256"
|
| 5 |
+
"encoding/hex"
|
| 6 |
+
"fmt"
|
| 7 |
+
"io"
|
| 8 |
+
"os"
|
| 9 |
+
"strings"
|
| 10 |
+
)
|
| 11 |
+
|
| 12 |
+
const (
|
| 13 |
+
workflowVersion = 9
|
| 14 |
+
providerName = "pi"
|
| 15 |
+
modelVersion = "claude-opus-4-5"
|
| 16 |
+
)
|
| 17 |
+
|
| 18 |
+
func cacheNamespace() string {
|
| 19 |
+
return fmt.Sprintf("wf=%d|provider=%s|model=%s", workflowVersion, providerName, modelVersion)
|
| 20 |
+
}
|
| 21 |
+
|
| 22 |
+
func cacheKey(namespace, srcLang, tgtLang, segmentID, textHash string) string {
|
| 23 |
+
raw := fmt.Sprintf("%s|%s|%s|%s|%s", namespace, srcLang, tgtLang, segmentID, textHash)
|
| 24 |
+
hash := sha256.Sum256([]byte(raw))
|
| 25 |
+
return hex.EncodeToString(hash[:])
|
| 26 |
+
}
|
| 27 |
+
|
| 28 |
+
func hashText(text string) string {
|
| 29 |
+
normalized := normalizeText(text)
|
| 30 |
+
hash := sha256.Sum256([]byte(normalized))
|
| 31 |
+
return hex.EncodeToString(hash[:])
|
| 32 |
+
}
|
| 33 |
+
|
| 34 |
+
func hashBytes(data []byte) string {
|
| 35 |
+
hash := sha256.Sum256(data)
|
| 36 |
+
return hex.EncodeToString(hash[:])
|
| 37 |
+
}
|
| 38 |
+
|
| 39 |
+
func normalizeText(text string) string {
|
| 40 |
+
return strings.Join(strings.Fields(strings.TrimSpace(text)), " ")
|
| 41 |
+
}
|
| 42 |
+
|
| 43 |
+
func segmentID(relPath, textHash string) string {
|
| 44 |
+
shortHash := textHash
|
| 45 |
+
if len(shortHash) > 16 {
|
| 46 |
+
shortHash = shortHash[:16]
|
| 47 |
+
}
|
| 48 |
+
return fmt.Sprintf("%s:%s", relPath, shortHash)
|
| 49 |
+
}
|
| 50 |
+
|
| 51 |
+
func splitWhitespace(text string) (string, string, string) {
|
| 52 |
+
if text == "" {
|
| 53 |
+
return "", "", ""
|
| 54 |
+
}
|
| 55 |
+
start := 0
|
| 56 |
+
for start < len(text) && isWhitespace(text[start]) {
|
| 57 |
+
start++
|
| 58 |
+
}
|
| 59 |
+
end := len(text)
|
| 60 |
+
for end > start && isWhitespace(text[end-1]) {
|
| 61 |
+
end--
|
| 62 |
+
}
|
| 63 |
+
return text[:start], text[start:end], text[end:]
|
| 64 |
+
}
|
| 65 |
+
|
| 66 |
+
func isWhitespace(b byte) bool {
|
| 67 |
+
switch b {
|
| 68 |
+
case ' ', '\t', '\n', '\r':
|
| 69 |
+
return true
|
| 70 |
+
default:
|
| 71 |
+
return false
|
| 72 |
+
}
|
| 73 |
+
}
|
| 74 |
+
|
| 75 |
+
func fatal(err error) {
|
| 76 |
+
if err == nil {
|
| 77 |
+
return
|
| 78 |
+
}
|
| 79 |
+
_, _ = io.WriteString(os.Stderr, err.Error()+"\n")
|
| 80 |
+
os.Exit(1)
|
| 81 |
+
}
|
scripts/docs-list.js
ADDED
|
@@ -0,0 +1,173 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env node
|
| 2 |
+
|
| 3 |
+
import { existsSync, readdirSync, readFileSync, statSync } from "node:fs";
|
| 4 |
+
import { join, relative } from "node:path";
|
| 5 |
+
|
| 6 |
+
process.stdout.on("error", (error) => {
|
| 7 |
+
if (error?.code === "EPIPE") {
|
| 8 |
+
process.exit(0);
|
| 9 |
+
}
|
| 10 |
+
throw error;
|
| 11 |
+
});
|
| 12 |
+
|
| 13 |
+
const DOCS_DIR = join(process.cwd(), "docs");
|
| 14 |
+
if (!existsSync(DOCS_DIR)) {
|
| 15 |
+
console.error("docs:list: missing docs directory. Run from repo root.");
|
| 16 |
+
process.exit(1);
|
| 17 |
+
}
|
| 18 |
+
if (!statSync(DOCS_DIR).isDirectory()) {
|
| 19 |
+
console.error("docs:list: docs path is not a directory.");
|
| 20 |
+
process.exit(1);
|
| 21 |
+
}
|
| 22 |
+
|
| 23 |
+
const EXCLUDED_DIRS = new Set(["archive", "research"]);
|
| 24 |
+
|
| 25 |
+
/**
|
| 26 |
+
* @param {unknown[]} values
|
| 27 |
+
* @returns {string[]}
|
| 28 |
+
*/
|
| 29 |
+
function compactStrings(values) {
|
| 30 |
+
const result = [];
|
| 31 |
+
for (const value of values) {
|
| 32 |
+
if (value === null || value === undefined) {
|
| 33 |
+
continue;
|
| 34 |
+
}
|
| 35 |
+
const normalized =
|
| 36 |
+
typeof value === "string"
|
| 37 |
+
? value.trim()
|
| 38 |
+
: typeof value === "number" || typeof value === "boolean"
|
| 39 |
+
? String(value).trim()
|
| 40 |
+
: null;
|
| 41 |
+
|
| 42 |
+
if (normalized?.length > 0) {
|
| 43 |
+
result.push(normalized);
|
| 44 |
+
}
|
| 45 |
+
}
|
| 46 |
+
return result;
|
| 47 |
+
}
|
| 48 |
+
|
| 49 |
+
/**
|
| 50 |
+
* @param {string} dir
|
| 51 |
+
* @param {string} base
|
| 52 |
+
* @returns {string[]}
|
| 53 |
+
*/
|
| 54 |
+
function walkMarkdownFiles(dir, base = dir) {
|
| 55 |
+
const entries = readdirSync(dir, { withFileTypes: true });
|
| 56 |
+
const files = [];
|
| 57 |
+
for (const entry of entries) {
|
| 58 |
+
if (entry.name.startsWith(".")) {
|
| 59 |
+
continue;
|
| 60 |
+
}
|
| 61 |
+
const fullPath = join(dir, entry.name);
|
| 62 |
+
if (entry.isDirectory()) {
|
| 63 |
+
if (EXCLUDED_DIRS.has(entry.name)) {
|
| 64 |
+
continue;
|
| 65 |
+
}
|
| 66 |
+
files.push(...walkMarkdownFiles(fullPath, base));
|
| 67 |
+
} else if (entry.isFile() && entry.name.endsWith(".md")) {
|
| 68 |
+
files.push(relative(base, fullPath));
|
| 69 |
+
}
|
| 70 |
+
}
|
| 71 |
+
return files.toSorted((a, b) => a.localeCompare(b));
|
| 72 |
+
}
|
| 73 |
+
|
| 74 |
+
/**
|
| 75 |
+
* @param {string} fullPath
|
| 76 |
+
* @returns {{ summary: string | null; readWhen: string[]; error?: string }}
|
| 77 |
+
*/
|
| 78 |
+
function extractMetadata(fullPath) {
|
| 79 |
+
const content = readFileSync(fullPath, "utf8");
|
| 80 |
+
|
| 81 |
+
if (!content.startsWith("---")) {
|
| 82 |
+
return { summary: null, readWhen: [], error: "missing front matter" };
|
| 83 |
+
}
|
| 84 |
+
|
| 85 |
+
const endIndex = content.indexOf("\n---", 3);
|
| 86 |
+
if (endIndex === -1) {
|
| 87 |
+
return { summary: null, readWhen: [], error: "unterminated front matter" };
|
| 88 |
+
}
|
| 89 |
+
|
| 90 |
+
const frontMatter = content.slice(3, endIndex).trim();
|
| 91 |
+
const lines = frontMatter.split("\n");
|
| 92 |
+
|
| 93 |
+
let summaryLine = null;
|
| 94 |
+
const readWhen = [];
|
| 95 |
+
let collectingField = null;
|
| 96 |
+
|
| 97 |
+
for (const rawLine of lines) {
|
| 98 |
+
const line = rawLine.trim();
|
| 99 |
+
|
| 100 |
+
if (line.startsWith("summary:")) {
|
| 101 |
+
summaryLine = line;
|
| 102 |
+
collectingField = null;
|
| 103 |
+
continue;
|
| 104 |
+
}
|
| 105 |
+
|
| 106 |
+
if (line.startsWith("read_when:")) {
|
| 107 |
+
collectingField = "read_when";
|
| 108 |
+
const inline = line.slice("read_when:".length).trim();
|
| 109 |
+
if (inline.startsWith("[") && inline.endsWith("]")) {
|
| 110 |
+
try {
|
| 111 |
+
const parsed = JSON.parse(inline.replace(/'/g, '"'));
|
| 112 |
+
if (Array.isArray(parsed)) {
|
| 113 |
+
readWhen.push(...compactStrings(parsed));
|
| 114 |
+
}
|
| 115 |
+
} catch {
|
| 116 |
+
// ignore malformed inline arrays
|
| 117 |
+
}
|
| 118 |
+
}
|
| 119 |
+
continue;
|
| 120 |
+
}
|
| 121 |
+
|
| 122 |
+
if (collectingField === "read_when") {
|
| 123 |
+
if (line.startsWith("- ")) {
|
| 124 |
+
const hint = line.slice(2).trim();
|
| 125 |
+
if (hint) {
|
| 126 |
+
readWhen.push(hint);
|
| 127 |
+
}
|
| 128 |
+
} else if (line === "") {
|
| 129 |
+
// allow blank lines inside the list
|
| 130 |
+
} else {
|
| 131 |
+
collectingField = null;
|
| 132 |
+
}
|
| 133 |
+
}
|
| 134 |
+
}
|
| 135 |
+
|
| 136 |
+
if (!summaryLine) {
|
| 137 |
+
return { summary: null, readWhen, error: "summary key missing" };
|
| 138 |
+
}
|
| 139 |
+
|
| 140 |
+
const summaryValue = summaryLine.slice("summary:".length).trim();
|
| 141 |
+
const normalized = summaryValue
|
| 142 |
+
.replace(/^['"]|['"]$/g, "")
|
| 143 |
+
.replace(/\s+/g, " ")
|
| 144 |
+
.trim();
|
| 145 |
+
|
| 146 |
+
if (!normalized) {
|
| 147 |
+
return { summary: null, readWhen, error: "summary is empty" };
|
| 148 |
+
}
|
| 149 |
+
|
| 150 |
+
return { summary: normalized, readWhen };
|
| 151 |
+
}
|
| 152 |
+
|
| 153 |
+
console.log("Listing all markdown files in docs folder:");
|
| 154 |
+
|
| 155 |
+
const markdownFiles = walkMarkdownFiles(DOCS_DIR);
|
| 156 |
+
|
| 157 |
+
for (const relativePath of markdownFiles) {
|
| 158 |
+
const fullPath = join(DOCS_DIR, relativePath);
|
| 159 |
+
const { summary, readWhen, error } = extractMetadata(fullPath);
|
| 160 |
+
if (summary) {
|
| 161 |
+
console.log(`${relativePath} - ${summary}`);
|
| 162 |
+
if (readWhen.length > 0) {
|
| 163 |
+
console.log(` Read when: ${readWhen.join("; ")}`);
|
| 164 |
+
}
|
| 165 |
+
} else {
|
| 166 |
+
const reason = error ? ` - [${error}]` : "";
|
| 167 |
+
console.log(`${relativePath}${reason}`);
|
| 168 |
+
}
|
| 169 |
+
}
|
| 170 |
+
|
| 171 |
+
console.log(
|
| 172 |
+
'\nReminder: keep docs up to date as behavior changes. When your task matches any "Read when" hint above (React hooks, cache directives, database work, tests, etc.), read that doc before coding, and suggest new coverage when it is missing.',
|
| 173 |
+
);
|
scripts/e2e/Dockerfile
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
FROM node:22-bookworm
|
| 2 |
+
|
| 3 |
+
RUN corepack enable
|
| 4 |
+
|
| 5 |
+
WORKDIR /app
|
| 6 |
+
|
| 7 |
+
ENV NODE_OPTIONS="--disable-warning=ExperimentalWarning"
|
| 8 |
+
|
| 9 |
+
COPY package.json pnpm-lock.yaml pnpm-workspace.yaml tsconfig.json vitest.config.ts vitest.e2e.config.ts ./
|
| 10 |
+
COPY src ./src
|
| 11 |
+
COPY test ./test
|
| 12 |
+
COPY scripts ./scripts
|
| 13 |
+
COPY docs ./docs
|
| 14 |
+
COPY skills ./skills
|
| 15 |
+
COPY patches ./patches
|
| 16 |
+
COPY ui ./ui
|
| 17 |
+
COPY extensions/memory-core ./extensions/memory-core
|
| 18 |
+
|
| 19 |
+
RUN pnpm install --frozen-lockfile
|
| 20 |
+
RUN pnpm build
|
| 21 |
+
RUN pnpm ui:build
|
| 22 |
+
|
| 23 |
+
CMD ["bash"]
|
scripts/e2e/Dockerfile.qr-import
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
FROM node:22-bookworm
|
| 2 |
+
|
| 3 |
+
RUN corepack enable
|
| 4 |
+
|
| 5 |
+
WORKDIR /app
|
| 6 |
+
|
| 7 |
+
COPY . .
|
| 8 |
+
|
| 9 |
+
RUN pnpm install --frozen-lockfile
|
scripts/e2e/doctor-install-switch-docker.sh
ADDED
|
@@ -0,0 +1,147 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env bash
|
| 2 |
+
set -euo pipefail
|
| 3 |
+
|
| 4 |
+
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
|
| 5 |
+
IMAGE_NAME="openclaw-doctor-install-switch-e2e"
|
| 6 |
+
|
| 7 |
+
echo "Building Docker image..."
|
| 8 |
+
docker build -t "$IMAGE_NAME" -f "$ROOT_DIR/scripts/e2e/Dockerfile" "$ROOT_DIR"
|
| 9 |
+
|
| 10 |
+
echo "Running doctor install switch E2E..."
|
| 11 |
+
docker run --rm -t "$IMAGE_NAME" bash -lc '
|
| 12 |
+
set -euo pipefail
|
| 13 |
+
|
| 14 |
+
# Keep logs focused; the npm global install step can emit noisy deprecation warnings.
|
| 15 |
+
export npm_config_loglevel=error
|
| 16 |
+
export npm_config_fund=false
|
| 17 |
+
export npm_config_audit=false
|
| 18 |
+
|
| 19 |
+
# Stub systemd/loginctl so doctor + daemon flows work in Docker.
|
| 20 |
+
export PATH="/tmp/openclaw-bin:$PATH"
|
| 21 |
+
mkdir -p /tmp/openclaw-bin
|
| 22 |
+
|
| 23 |
+
cat > /tmp/openclaw-bin/systemctl <<"SYSTEMCTL"
|
| 24 |
+
#!/usr/bin/env bash
|
| 25 |
+
set -euo pipefail
|
| 26 |
+
|
| 27 |
+
args=("$@")
|
| 28 |
+
if [[ "${args[0]:-}" == "--user" ]]; then
|
| 29 |
+
args=("${args[@]:1}")
|
| 30 |
+
fi
|
| 31 |
+
cmd="${args[0]:-}"
|
| 32 |
+
case "$cmd" in
|
| 33 |
+
status)
|
| 34 |
+
exit 0
|
| 35 |
+
;;
|
| 36 |
+
is-enabled)
|
| 37 |
+
unit="${args[1]:-}"
|
| 38 |
+
unit_path="$HOME/.config/systemd/user/${unit}"
|
| 39 |
+
if [ -f "$unit_path" ]; then
|
| 40 |
+
exit 0
|
| 41 |
+
fi
|
| 42 |
+
exit 1
|
| 43 |
+
;;
|
| 44 |
+
show)
|
| 45 |
+
echo "ActiveState=inactive"
|
| 46 |
+
echo "SubState=dead"
|
| 47 |
+
echo "MainPID=0"
|
| 48 |
+
echo "ExecMainStatus=0"
|
| 49 |
+
echo "ExecMainCode=0"
|
| 50 |
+
exit 0
|
| 51 |
+
;;
|
| 52 |
+
*)
|
| 53 |
+
exit 0
|
| 54 |
+
;;
|
| 55 |
+
esac
|
| 56 |
+
SYSTEMCTL
|
| 57 |
+
chmod +x /tmp/openclaw-bin/systemctl
|
| 58 |
+
|
| 59 |
+
cat > /tmp/openclaw-bin/loginctl <<"LOGINCTL"
|
| 60 |
+
#!/usr/bin/env bash
|
| 61 |
+
set -euo pipefail
|
| 62 |
+
|
| 63 |
+
if [[ "$*" == *"show-user"* ]]; then
|
| 64 |
+
echo "Linger=yes"
|
| 65 |
+
exit 0
|
| 66 |
+
fi
|
| 67 |
+
if [[ "$*" == *"enable-linger"* ]]; then
|
| 68 |
+
exit 0
|
| 69 |
+
fi
|
| 70 |
+
exit 0
|
| 71 |
+
LOGINCTL
|
| 72 |
+
chmod +x /tmp/openclaw-bin/loginctl
|
| 73 |
+
|
| 74 |
+
# Install the npm-global variant from the local /app source.
|
| 75 |
+
# `npm pack` can emit script output; keep only the tarball name.
|
| 76 |
+
pkg_tgz="$(npm pack --silent /app | tail -n 1 | tr -d '\r')"
|
| 77 |
+
if [ ! -f "/app/$pkg_tgz" ]; then
|
| 78 |
+
echo "npm pack failed (expected /app/$pkg_tgz)"
|
| 79 |
+
exit 1
|
| 80 |
+
fi
|
| 81 |
+
npm install -g --prefix /tmp/npm-prefix "/app/$pkg_tgz"
|
| 82 |
+
|
| 83 |
+
npm_bin="/tmp/npm-prefix/bin/openclaw"
|
| 84 |
+
npm_entry="/tmp/npm-prefix/lib/node_modules/openclaw/openclaw.mjs"
|
| 85 |
+
git_entry="/app/openclaw.mjs"
|
| 86 |
+
|
| 87 |
+
assert_entrypoint() {
|
| 88 |
+
local unit_path="$1"
|
| 89 |
+
local expected="$2"
|
| 90 |
+
local exec_line=""
|
| 91 |
+
exec_line=$(grep -m1 "^ExecStart=" "$unit_path" || true)
|
| 92 |
+
if [ -z "$exec_line" ]; then
|
| 93 |
+
echo "Missing ExecStart in $unit_path"
|
| 94 |
+
exit 1
|
| 95 |
+
fi
|
| 96 |
+
exec_line="${exec_line#ExecStart=}"
|
| 97 |
+
entrypoint=$(echo "$exec_line" | awk "{print \$2}")
|
| 98 |
+
entrypoint="${entrypoint%\"}"
|
| 99 |
+
entrypoint="${entrypoint#\"}"
|
| 100 |
+
if [ "$entrypoint" != "$expected" ]; then
|
| 101 |
+
echo "Expected entrypoint $expected, got $entrypoint"
|
| 102 |
+
exit 1
|
| 103 |
+
fi
|
| 104 |
+
}
|
| 105 |
+
|
| 106 |
+
# Each flow: install service with one variant, run doctor from the other,
|
| 107 |
+
# and verify ExecStart entrypoint switches accordingly.
|
| 108 |
+
run_flow() {
|
| 109 |
+
local name="$1"
|
| 110 |
+
local install_cmd="$2"
|
| 111 |
+
local install_expected="$3"
|
| 112 |
+
local doctor_cmd="$4"
|
| 113 |
+
local doctor_expected="$5"
|
| 114 |
+
|
| 115 |
+
echo "== Flow: $name =="
|
| 116 |
+
home_dir=$(mktemp -d "/tmp/openclaw-switch-${name}.XXXXXX")
|
| 117 |
+
export HOME="$home_dir"
|
| 118 |
+
export USER="testuser"
|
| 119 |
+
|
| 120 |
+
eval "$install_cmd"
|
| 121 |
+
|
| 122 |
+
unit_path="$HOME/.config/systemd/user/openclaw-gateway.service"
|
| 123 |
+
if [ ! -f "$unit_path" ]; then
|
| 124 |
+
echo "Missing unit file: $unit_path"
|
| 125 |
+
exit 1
|
| 126 |
+
fi
|
| 127 |
+
assert_entrypoint "$unit_path" "$install_expected"
|
| 128 |
+
|
| 129 |
+
eval "$doctor_cmd"
|
| 130 |
+
|
| 131 |
+
assert_entrypoint "$unit_path" "$doctor_expected"
|
| 132 |
+
}
|
| 133 |
+
|
| 134 |
+
run_flow \
|
| 135 |
+
"npm-to-git" \
|
| 136 |
+
"$npm_bin daemon install --force" \
|
| 137 |
+
"$npm_entry" \
|
| 138 |
+
"node $git_entry doctor --repair --force" \
|
| 139 |
+
"$git_entry"
|
| 140 |
+
|
| 141 |
+
run_flow \
|
| 142 |
+
"git-to-npm" \
|
| 143 |
+
"node $git_entry daemon install --force" \
|
| 144 |
+
"$git_entry" \
|
| 145 |
+
"$npm_bin doctor --repair --force" \
|
| 146 |
+
"$npm_entry"
|
| 147 |
+
'
|
scripts/e2e/gateway-network-docker.sh
ADDED
|
@@ -0,0 +1,115 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env bash
|
| 2 |
+
set -euo pipefail
|
| 3 |
+
|
| 4 |
+
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
|
| 5 |
+
IMAGE_NAME="openclaw-gateway-network-e2e"
|
| 6 |
+
|
| 7 |
+
PORT="18789"
|
| 8 |
+
TOKEN="e2e-$(date +%s)-$$"
|
| 9 |
+
NET_NAME="openclaw-net-e2e-$$"
|
| 10 |
+
GW_NAME="openclaw-gateway-e2e-$$"
|
| 11 |
+
|
| 12 |
+
cleanup() {
|
| 13 |
+
docker rm -f "$GW_NAME" >/dev/null 2>&1 || true
|
| 14 |
+
docker network rm "$NET_NAME" >/dev/null 2>&1 || true
|
| 15 |
+
}
|
| 16 |
+
trap cleanup EXIT
|
| 17 |
+
|
| 18 |
+
echo "Building Docker image..."
|
| 19 |
+
docker build -t "$IMAGE_NAME" -f "$ROOT_DIR/scripts/e2e/Dockerfile" "$ROOT_DIR"
|
| 20 |
+
|
| 21 |
+
echo "Creating Docker network..."
|
| 22 |
+
docker network create "$NET_NAME" >/dev/null
|
| 23 |
+
|
| 24 |
+
echo "Starting gateway container..."
|
| 25 |
+
docker run --rm -d \
|
| 26 |
+
--name "$GW_NAME" \
|
| 27 |
+
--network "$NET_NAME" \
|
| 28 |
+
-e "OPENCLAW_GATEWAY_TOKEN=$TOKEN" \
|
| 29 |
+
-e "OPENCLAW_SKIP_CHANNELS=1" \
|
| 30 |
+
-e "OPENCLAW_SKIP_GMAIL_WATCHER=1" \
|
| 31 |
+
-e "OPENCLAW_SKIP_CRON=1" \
|
| 32 |
+
-e "OPENCLAW_SKIP_CANVAS_HOST=1" \
|
| 33 |
+
"$IMAGE_NAME" \
|
| 34 |
+
bash -lc "node dist/index.js gateway --port $PORT --bind lan --allow-unconfigured > /tmp/gateway-net-e2e.log 2>&1"
|
| 35 |
+
|
| 36 |
+
echo "Waiting for gateway to come up..."
|
| 37 |
+
for _ in $(seq 1 20); do
|
| 38 |
+
if docker exec "$GW_NAME" bash -lc "grep -q \"listening on ws://\" /tmp/gateway-net-e2e.log"; then
|
| 39 |
+
break
|
| 40 |
+
fi
|
| 41 |
+
sleep 0.5
|
| 42 |
+
done
|
| 43 |
+
|
| 44 |
+
docker exec "$GW_NAME" bash -lc "tail -n 50 /tmp/gateway-net-e2e.log"
|
| 45 |
+
|
| 46 |
+
echo "Running client container (connect + health)..."
|
| 47 |
+
docker run --rm \
|
| 48 |
+
--network "$NET_NAME" \
|
| 49 |
+
-e "GW_URL=ws://$GW_NAME:$PORT" \
|
| 50 |
+
-e "GW_TOKEN=$TOKEN" \
|
| 51 |
+
"$IMAGE_NAME" \
|
| 52 |
+
bash -lc "node - <<'NODE'
|
| 53 |
+
import { WebSocket } from \"ws\";
|
| 54 |
+
import { PROTOCOL_VERSION } from \"./dist/gateway/protocol/index.js\";
|
| 55 |
+
|
| 56 |
+
const url = process.env.GW_URL;
|
| 57 |
+
const token = process.env.GW_TOKEN;
|
| 58 |
+
if (!url || !token) throw new Error(\"missing GW_URL/GW_TOKEN\");
|
| 59 |
+
|
| 60 |
+
const ws = new WebSocket(url);
|
| 61 |
+
await new Promise((resolve, reject) => {
|
| 62 |
+
const t = setTimeout(() => reject(new Error(\"ws open timeout\")), 5000);
|
| 63 |
+
ws.once(\"open\", () => {
|
| 64 |
+
clearTimeout(t);
|
| 65 |
+
resolve();
|
| 66 |
+
});
|
| 67 |
+
});
|
| 68 |
+
|
| 69 |
+
function onceFrame(filter, timeoutMs = 5000) {
|
| 70 |
+
return new Promise((resolve, reject) => {
|
| 71 |
+
const t = setTimeout(() => reject(new Error(\"timeout\")), timeoutMs);
|
| 72 |
+
const handler = (data) => {
|
| 73 |
+
const obj = JSON.parse(String(data));
|
| 74 |
+
if (!filter(obj)) return;
|
| 75 |
+
clearTimeout(t);
|
| 76 |
+
ws.off(\"message\", handler);
|
| 77 |
+
resolve(obj);
|
| 78 |
+
};
|
| 79 |
+
ws.on(\"message\", handler);
|
| 80 |
+
});
|
| 81 |
+
}
|
| 82 |
+
|
| 83 |
+
ws.send(
|
| 84 |
+
JSON.stringify({
|
| 85 |
+
type: \"req\",
|
| 86 |
+
id: \"c1\",
|
| 87 |
+
method: \"connect\",
|
| 88 |
+
params: {
|
| 89 |
+
minProtocol: PROTOCOL_VERSION,
|
| 90 |
+
maxProtocol: PROTOCOL_VERSION,
|
| 91 |
+
client: {
|
| 92 |
+
id: \"test\",
|
| 93 |
+
displayName: \"docker-net-e2e\",
|
| 94 |
+
version: \"dev\",
|
| 95 |
+
platform: process.platform,
|
| 96 |
+
mode: \"test\",
|
| 97 |
+
},
|
| 98 |
+
caps: [],
|
| 99 |
+
auth: { token },
|
| 100 |
+
},
|
| 101 |
+
}),
|
| 102 |
+
);
|
| 103 |
+
const connectRes = await onceFrame((o) => o?.type === \"res\" && o?.id === \"c1\");
|
| 104 |
+
if (!connectRes.ok) throw new Error(\"connect failed: \" + (connectRes.error?.message ?? \"unknown\"));
|
| 105 |
+
|
| 106 |
+
ws.send(JSON.stringify({ type: \"req\", id: \"h1\", method: \"health\" }));
|
| 107 |
+
const healthRes = await onceFrame((o) => o?.type === \"res\" && o?.id === \"h1\", 10000);
|
| 108 |
+
if (!healthRes.ok) throw new Error(\"health failed: \" + (healthRes.error?.message ?? \"unknown\"));
|
| 109 |
+
if (healthRes.payload?.ok !== true) throw new Error(\"unexpected health payload\");
|
| 110 |
+
|
| 111 |
+
ws.close();
|
| 112 |
+
console.log(\"ok\");
|
| 113 |
+
NODE"
|
| 114 |
+
|
| 115 |
+
echo "OK"
|