Include full contents of all nested repositories

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-02-27 16:25:02 +01:00
parent 14ff8fd54c
commit 2401ed446f
7271 changed files with 1310112 additions and 6 deletions

View File

@@ -0,0 +1,89 @@
#!/bin/bash
# Auth Expiry Monitor
# Run via cron or systemd timer to get proactive notifications
# before Claude Code auth expires.
#
# Suggested cron: */30 * * * * /home/admin/openclaw/scripts/auth-monitor.sh
#
# Environment variables:
# NOTIFY_PHONE - Phone number to send OpenClaw notification (e.g., +1234567890)
# NOTIFY_NTFY - ntfy.sh topic for push notifications (e.g., openclaw-alerts)
# WARN_HOURS - Hours before expiry to warn (default: 2)
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
CLAUDE_CREDS="$HOME/.claude/.credentials.json"
STATE_FILE="$HOME/.openclaw/auth-monitor-state"
# Configuration
WARN_HOURS="${WARN_HOURS:-2}"
NOTIFY_PHONE="${NOTIFY_PHONE:-}"
NOTIFY_NTFY="${NOTIFY_NTFY:-}"
# State tracking to avoid spam
mkdir -p "$(dirname "$STATE_FILE")"
LAST_NOTIFIED=$(cat "$STATE_FILE" 2>/dev/null || echo "0")
NOW=$(date +%s)
# Only notify once per hour max
MIN_INTERVAL=3600
send_notification() {
local message="$1"
local priority="${2:-default}"
echo "$(date '+%Y-%m-%d %H:%M:%S') - $message"
# Check if we notified recently
if [ $((NOW - LAST_NOTIFIED)) -lt $MIN_INTERVAL ]; then
echo "Skipping notification (sent recently)"
return
fi
# Send via OpenClaw if phone configured and auth still valid
if [ -n "$NOTIFY_PHONE" ]; then
# Check if we can still use openclaw
if "$SCRIPT_DIR/claude-auth-status.sh" simple 2>/dev/null | grep -q "OK\|EXPIRING"; then
echo "Sending via OpenClaw to $NOTIFY_PHONE..."
openclaw send --to "$NOTIFY_PHONE" --message "$message" 2>/dev/null || true
fi
fi
# Send via ntfy.sh if configured
if [ -n "$NOTIFY_NTFY" ]; then
echo "Sending via ntfy.sh to $NOTIFY_NTFY..."
curl -s -o /dev/null \
-H "Title: OpenClaw Auth Alert" \
-H "Priority: $priority" \
-H "Tags: warning,key" \
-d "$message" \
"https://ntfy.sh/$NOTIFY_NTFY" || true
fi
# Update state
echo "$NOW" > "$STATE_FILE"
}
# Check auth status
if [ ! -f "$CLAUDE_CREDS" ]; then
send_notification "Claude Code credentials missing! Run: claude setup-token" "high"
exit 1
fi
EXPIRES_AT=$(jq -r '.claudeAiOauth.expiresAt // 0' "$CLAUDE_CREDS")
NOW_MS=$((NOW * 1000))
DIFF_MS=$((EXPIRES_AT - NOW_MS))
HOURS_LEFT=$((DIFF_MS / 3600000))
MINS_LEFT=$(((DIFF_MS % 3600000) / 60000))
if [ "$DIFF_MS" -lt 0 ]; then
send_notification "Claude Code auth EXPIRED! OpenClaw is down. Run: ssh l36 '~/openclaw/scripts/mobile-reauth.sh'" "urgent"
exit 1
elif [ "$HOURS_LEFT" -lt "$WARN_HOURS" ]; then
send_notification "Claude Code auth expires in ${HOURS_LEFT}h ${MINS_LEFT}m. Consider re-auth soon." "high"
exit 0
else
echo "$(date '+%Y-%m-%d %H:%M:%S') - Auth OK: ${HOURS_LEFT}h ${MINS_LEFT}m remaining"
exit 0
fi

View File

@@ -0,0 +1,146 @@
import { completeSimple, getModel, type Model } from "@mariozechner/pi-ai";
type Usage = {
input?: number;
output?: number;
cacheRead?: number;
cacheWrite?: number;
totalTokens?: number;
};
type RunResult = {
durationMs: number;
usage?: Usage;
};
const DEFAULT_PROMPT = "Reply with a single word: ok. No punctuation or extra text.";
const DEFAULT_RUNS = 10;
function parseArg(flag: string): string | undefined {
const idx = process.argv.indexOf(flag);
if (idx === -1) {
return undefined;
}
return process.argv[idx + 1];
}
function parseRuns(raw: string | undefined): number {
if (!raw) {
return DEFAULT_RUNS;
}
const parsed = Number(raw);
if (!Number.isFinite(parsed) || parsed <= 0) {
return DEFAULT_RUNS;
}
return Math.floor(parsed);
}
function median(values: number[]): number {
if (values.length === 0) {
return 0;
}
const sorted = [...values].toSorted((a, b) => a - b);
const mid = Math.floor(sorted.length / 2);
if (sorted.length % 2 === 0) {
return Math.round((sorted[mid - 1] + sorted[mid]) / 2);
}
return sorted[mid];
}
async function runModel(opts: {
label: string;
// oxlint-disable-next-line typescript/no-explicit-any
model: Model<any>;
apiKey: string;
runs: number;
prompt: string;
}): Promise<RunResult[]> {
const results: RunResult[] = [];
for (let i = 0; i < opts.runs; i += 1) {
const started = Date.now();
const res = await completeSimple(
opts.model,
{
messages: [
{
role: "user",
content: opts.prompt,
timestamp: Date.now(),
},
],
},
{ apiKey: opts.apiKey, maxTokens: 64 },
);
const durationMs = Date.now() - started;
results.push({ durationMs, usage: res.usage });
console.log(`${opts.label} run ${i + 1}/${opts.runs}: ${durationMs}ms`);
}
return results;
}
async function main(): Promise<void> {
const runs = parseRuns(parseArg("--runs"));
const prompt = parseArg("--prompt") ?? DEFAULT_PROMPT;
const anthropicKey = process.env.ANTHROPIC_API_KEY?.trim();
const minimaxKey = process.env.MINIMAX_API_KEY?.trim();
if (!anthropicKey) {
throw new Error("Missing ANTHROPIC_API_KEY in environment.");
}
if (!minimaxKey) {
throw new Error("Missing MINIMAX_API_KEY in environment.");
}
const minimaxBaseUrl = process.env.MINIMAX_BASE_URL?.trim() || "https://api.minimax.io/v1";
const minimaxModelId = process.env.MINIMAX_MODEL?.trim() || "MiniMax-M2.1";
const minimaxModel: Model<"openai-completions"> = {
id: minimaxModelId,
name: `MiniMax ${minimaxModelId}`,
api: "openai-completions",
provider: "minimax",
baseUrl: minimaxBaseUrl,
reasoning: false,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 200000,
maxTokens: 8192,
};
const opusModel = getModel("anthropic", "claude-opus-4-6");
console.log(`Prompt: ${prompt}`);
console.log(`Runs: ${runs}`);
console.log("");
const minimaxResults = await runModel({
label: "minimax",
model: minimaxModel,
apiKey: minimaxKey,
runs,
prompt,
});
const opusResults = await runModel({
label: "opus",
model: opusModel,
apiKey: anthropicKey,
runs,
prompt,
});
const summarize = (label: string, results: RunResult[]) => {
const durations = results.map((r) => r.durationMs);
const med = median(durations);
const min = Math.min(...durations);
const max = Math.max(...durations);
return { label, med, min, max };
};
const summary = [summarize("minimax", minimaxResults), summarize("opus", opusResults)];
console.log("");
console.log("Summary (ms):");
for (const row of summary) {
console.log(`${row.label.padEnd(7)} median=${row.med} min=${row.min} max=${row.max}`);
}
}
await main();

View File

@@ -0,0 +1,18 @@
#!/usr/bin/env bash
set -euo pipefail
cd "$(dirname "$0")/../apps/macos"
BUILD_PATH=".build-local"
PRODUCT="OpenClaw"
BIN="$BUILD_PATH/debug/$PRODUCT"
printf "\n▶ Building $PRODUCT (debug, build path: $BUILD_PATH)\n"
swift build -c debug --product "$PRODUCT" --build-path "$BUILD_PATH"
printf "\n⏹ Stopping existing $PRODUCT...\n"
killall -q "$PRODUCT" 2>/dev/null || true
printf "\n🚀 Launching $BIN ...\n"
nohup "$BIN" >/tmp/openclaw.log 2>&1 &
PID=$!
printf "Started $PRODUCT (PID $PID). Logs: /tmp/openclaw.log\n"

View File

@@ -0,0 +1,14 @@
#!/usr/bin/env node
import fs from "node:fs";
import path from "node:path";
import { fileURLToPath } from "node:url";
const root = path.resolve(path.dirname(fileURLToPath(import.meta.url)), "..");
const binDir = path.join(root, "bin");
const binPath = path.join(binDir, "docs-list");
fs.mkdirSync(binDir, { recursive: true });
const wrapper = `#!/usr/bin/env node\nimport { spawnSync } from "node:child_process";\nimport path from "node:path";\nimport { fileURLToPath } from "node:url";\n\nconst here = path.dirname(fileURLToPath(import.meta.url));\nconst script = path.join(here, "..", "scripts", "docs-list.js");\n\nconst result = spawnSync(process.execPath, [script], { stdio: "inherit" });\nprocess.exit(result.status ?? 1);\n`;
fs.writeFileSync(binPath, wrapper, { mode: 0o755 });

View File

@@ -0,0 +1,59 @@
#!/usr/bin/env bash
set -euo pipefail
# Render the macOS .icon bundle to a padded .icns like Trimmy's pipeline.
# Defaults target the OpenClaw assets so you can just run the script from repo root.
ROOT_DIR="$(cd "$(dirname "$0")/.." && pwd)"
ICON_FILE=${1:-"$ROOT_DIR/apps/macos/Icon.icon"}
BASENAME=${2:-OpenClaw}
OUT_ROOT=${3:-"$ROOT_DIR/apps/macos/build/icon"}
XCODE_APP=${XCODE_APP:-/Applications/Xcode.app}
# Where the final .icns should live; override DEST_ICNS to change.
DEST_ICNS=${DEST_ICNS:-"$ROOT_DIR/apps/macos/Sources/OpenClaw/Resources/OpenClaw.icns"}
ICTOOL="$XCODE_APP/Contents/Applications/Icon Composer.app/Contents/Executables/ictool"
if [[ ! -x "$ICTOOL" ]]; then
ICTOOL="$XCODE_APP/Contents/Applications/Icon Composer.app/Contents/Executables/icontool"
fi
if [[ ! -x "$ICTOOL" ]]; then
echo "ictool/icontool not found. Set XCODE_APP if Xcode is elsewhere." >&2
exit 1
fi
ICONSET_DIR="$OUT_ROOT/${BASENAME}.iconset"
TMP_DIR="$OUT_ROOT/tmp"
mkdir -p "$ICONSET_DIR" "$TMP_DIR"
MASTER_ART="$TMP_DIR/icon_art_824.png"
MASTER_1024="$TMP_DIR/icon_1024.png"
# Render inner art (no margin) with macOS Default appearance
"$ICTOOL" "$ICON_FILE" \
--export-preview macOS Default 824 824 1 -45 "$MASTER_ART"
# Pad to 1024x1024 with transparent border
sips --padToHeightWidth 1024 1024 "$MASTER_ART" --out "$MASTER_1024" >/dev/null
# Generate required sizes
sizes=(16 32 64 128 256 512 1024)
for sz in "${sizes[@]}"; do
out="$ICONSET_DIR/icon_${sz}x${sz}.png"
sips -z "$sz" "$sz" "$MASTER_1024" --out "$out" >/dev/null
if [[ "$sz" -ne 1024 ]]; then
dbl=$((sz*2))
out2="$ICONSET_DIR/icon_${sz}x${sz}@2x.png"
sips -z "$dbl" "$dbl" "$MASTER_1024" --out "$out2" >/dev/null
fi
done
# 512x512@2x already covered by 1024; ensure it exists
cp "$MASTER_1024" "$ICONSET_DIR/icon_512x512@2x.png"
iconutil -c icns "$ICONSET_DIR" -o "$OUT_ROOT/${BASENAME}.icns"
mkdir -p "$(dirname "$DEST_ICNS")"
cp "$OUT_ROOT/${BASENAME}.icns" "$DEST_ICNS"
echo "Icon.icns generated at $DEST_ICNS"

View File

@@ -0,0 +1,95 @@
#!/usr/bin/env bash
set -euo pipefail
on_error() {
echo "A2UI bundling failed. Re-run with: pnpm canvas:a2ui:bundle" >&2
echo "If this persists, verify pnpm deps and try again." >&2
}
trap on_error ERR
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
HASH_FILE="$ROOT_DIR/src/canvas-host/a2ui/.bundle.hash"
OUTPUT_FILE="$ROOT_DIR/src/canvas-host/a2ui/a2ui.bundle.js"
A2UI_RENDERER_DIR="$ROOT_DIR/vendor/a2ui/renderers/lit"
A2UI_APP_DIR="$ROOT_DIR/apps/shared/OpenClawKit/Tools/CanvasA2UI"
# Docker builds exclude vendor/apps via .dockerignore.
# In that environment we can keep a prebuilt bundle only if it exists.
if [[ ! -d "$A2UI_RENDERER_DIR" || ! -d "$A2UI_APP_DIR" ]]; then
if [[ -f "$OUTPUT_FILE" ]]; then
echo "A2UI sources missing; keeping prebuilt bundle."
exit 0
fi
echo "A2UI sources missing and no prebuilt bundle found at: $OUTPUT_FILE" >&2
exit 1
fi
INPUT_PATHS=(
"$ROOT_DIR/package.json"
"$ROOT_DIR/pnpm-lock.yaml"
"$A2UI_RENDERER_DIR"
"$A2UI_APP_DIR"
)
compute_hash() {
ROOT_DIR="$ROOT_DIR" node --input-type=module - "${INPUT_PATHS[@]}" <<'NODE'
import { createHash } from "node:crypto";
import { promises as fs } from "node:fs";
import path from "node:path";
const rootDir = process.env.ROOT_DIR ?? process.cwd();
const inputs = process.argv.slice(2);
const files = [];
async function walk(entryPath) {
const st = await fs.stat(entryPath);
if (st.isDirectory()) {
const entries = await fs.readdir(entryPath);
for (const entry of entries) {
await walk(path.join(entryPath, entry));
}
return;
}
files.push(entryPath);
}
for (const input of inputs) {
await walk(input);
}
function normalize(p) {
return p.split(path.sep).join("/");
}
files.sort((a, b) => normalize(a).localeCompare(normalize(b)));
const hash = createHash("sha256");
for (const filePath of files) {
const rel = normalize(path.relative(rootDir, filePath));
hash.update(rel);
hash.update("\0");
hash.update(await fs.readFile(filePath));
hash.update("\0");
}
process.stdout.write(hash.digest("hex"));
NODE
}
current_hash="$(compute_hash)"
if [[ -f "$HASH_FILE" ]]; then
previous_hash="$(cat "$HASH_FILE")"
if [[ "$previous_hash" == "$current_hash" && -f "$OUTPUT_FILE" ]]; then
echo "A2UI bundle up to date; skipping."
exit 0
fi
fi
pnpm -s exec tsc -p "$A2UI_RENDERER_DIR/tsconfig.json"
if command -v rolldown >/dev/null 2>&1; then
rolldown -c "$A2UI_APP_DIR/rolldown.config.mjs"
else
pnpm -s dlx rolldown -c "$A2UI_APP_DIR/rolldown.config.mjs"
fi
echo "$current_hash" > "$HASH_FILE"

View File

@@ -0,0 +1,40 @@
import fs from "node:fs/promises";
import path from "node:path";
import { fileURLToPath, pathToFileURL } from "node:url";
const repoRoot = path.resolve(path.dirname(fileURLToPath(import.meta.url)), "..");
export function getA2uiPaths(env = process.env) {
const srcDir = env.OPENCLAW_A2UI_SRC_DIR ?? path.join(repoRoot, "src", "canvas-host", "a2ui");
const outDir = env.OPENCLAW_A2UI_OUT_DIR ?? path.join(repoRoot, "dist", "canvas-host", "a2ui");
return { srcDir, outDir };
}
export async function copyA2uiAssets({ srcDir, outDir }: { srcDir: string; outDir: string }) {
const skipMissing = process.env.OPENCLAW_A2UI_SKIP_MISSING === "1";
try {
await fs.stat(path.join(srcDir, "index.html"));
await fs.stat(path.join(srcDir, "a2ui.bundle.js"));
} catch (err) {
const message = 'Missing A2UI bundle assets. Run "pnpm canvas:a2ui:bundle" and retry.';
if (skipMissing) {
console.warn(`${message} Skipping copy (OPENCLAW_A2UI_SKIP_MISSING=1).`);
return;
}
throw new Error(message, { cause: err });
}
await fs.mkdir(path.dirname(outDir), { recursive: true });
await fs.cp(srcDir, outDir, { recursive: true });
}
async function main() {
const { srcDir, outDir } = getA2uiPaths();
await copyA2uiAssets({ srcDir, outDir });
}
if (import.meta.url === pathToFileURL(process.argv[1] ?? "").href) {
main().catch((err) => {
console.error(String(err));
process.exit(1);
});
}

View File

@@ -0,0 +1,91 @@
#!/usr/bin/env bash
set -euo pipefail
VERSION=${1:-}
CHANGELOG_FILE=${2:-}
if [[ -z "$VERSION" ]]; then
echo "Usage: $0 <version> [changelog_file]" >&2
exit 1
fi
SCRIPT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
if [[ -z "$CHANGELOG_FILE" ]]; then
if [[ -f "$SCRIPT_DIR/../CHANGELOG.md" ]]; then
CHANGELOG_FILE="$SCRIPT_DIR/../CHANGELOG.md"
elif [[ -f "CHANGELOG.md" ]]; then
CHANGELOG_FILE="CHANGELOG.md"
elif [[ -f "../CHANGELOG.md" ]]; then
CHANGELOG_FILE="../CHANGELOG.md"
else
echo "Error: Could not find CHANGELOG.md" >&2
exit 1
fi
fi
if [[ ! -f "$CHANGELOG_FILE" ]]; then
echo "Error: Changelog file '$CHANGELOG_FILE' not found" >&2
exit 1
fi
extract_version_section() {
local version=$1
local file=$2
awk -v version="$version" '
BEGIN { found=0 }
/^## / {
if ($0 ~ "^##[[:space:]]+" version "([[:space:]].*|$)") { found=1; next }
if (found) { exit }
}
found { print }
' "$file"
}
markdown_to_html() {
local text=$1
text=$(echo "$text" | sed 's/^##### \(.*\)$/<h5>\1<\/h5>/')
text=$(echo "$text" | sed 's/^#### \(.*\)$/<h4>\1<\/h4>/')
text=$(echo "$text" | sed 's/^### \(.*\)$/<h3>\1<\/h3>/')
text=$(echo "$text" | sed 's/^## \(.*\)$/<h2>\1<\/h2>/')
text=$(echo "$text" | sed 's/^- \*\*\([^*]*\)\*\*\(.*\)$/<li><strong>\1<\/strong>\2<\/li>/')
text=$(echo "$text" | sed 's/^- \([^*].*\)$/<li>\1<\/li>/')
text=$(echo "$text" | sed 's/\*\*\([^*]*\)\*\*/<strong>\1<\/strong>/g')
text=$(echo "$text" | sed 's/`\([^`]*\)`/<code>\1<\/code>/g')
text=$(echo "$text" | sed 's/\[\([^]]*\)\](\([^)]*\))/<a href="\2">\1<\/a>/g')
echo "$text"
}
version_content=$(extract_version_section "$VERSION" "$CHANGELOG_FILE")
if [[ -z "$version_content" ]]; then
echo "<h2>OpenClaw $VERSION</h2>"
echo "<p>Latest OpenClaw update.</p>"
echo "<p><a href=\"https://github.com/openclaw/openclaw/blob/main/CHANGELOG.md\">View full changelog</a></p>"
exit 0
fi
echo "<h2>OpenClaw $VERSION</h2>"
in_list=false
while IFS= read -r line; do
if [[ "$line" =~ ^- ]]; then
if [[ "$in_list" == false ]]; then
echo "<ul>"
in_list=true
fi
markdown_to_html "$line"
else
if [[ "$in_list" == true ]]; then
echo "</ul>"
in_list=false
fi
if [[ -n "$line" ]]; then
markdown_to_html "$line"
fi
fi
done <<< "$version_content"
if [[ "$in_list" == true ]]; then
echo "</ul>"
fi
echo "<p><a href=\"https://github.com/openclaw/openclaw/blob/main/CHANGELOG.md\">View full changelog</a></p>"

View File

@@ -0,0 +1,405 @@
#!/usr/bin/env node
import { promises as fs } from "node:fs";
import path from "node:path";
import { fileURLToPath } from "node:url";
import ts from "typescript";
const repoRoot = path.resolve(path.dirname(fileURLToPath(import.meta.url)), "..");
const acpCoreProtectedSources = [
path.join(repoRoot, "src", "acp"),
path.join(repoRoot, "src", "agents", "acp-spawn.ts"),
path.join(repoRoot, "src", "auto-reply", "reply", "commands-acp"),
path.join(repoRoot, "src", "infra", "outbound", "conversation-id.ts"),
];
const channelCoreProtectedSources = [
path.join(repoRoot, "src", "channels", "thread-bindings-policy.ts"),
path.join(repoRoot, "src", "channels", "thread-bindings-messages.ts"),
];
const acpUserFacingTextSources = [
path.join(repoRoot, "src", "auto-reply", "reply", "commands-acp"),
];
const systemMarkLiteralGuardSources = [
path.join(repoRoot, "src", "auto-reply", "reply", "commands-acp"),
path.join(repoRoot, "src", "auto-reply", "reply", "dispatch-acp.ts"),
path.join(repoRoot, "src", "auto-reply", "reply", "directive-handling.shared.ts"),
path.join(repoRoot, "src", "channels", "thread-bindings-messages.ts"),
];
const channelIds = [
"bluebubbles",
"discord",
"googlechat",
"imessage",
"irc",
"line",
"matrix",
"msteams",
"signal",
"slack",
"telegram",
"web",
"whatsapp",
"zalo",
"zalouser",
];
const channelIdSet = new Set(channelIds);
const channelSegmentRe = new RegExp(`(^|[._/-])(?:${channelIds.join("|")})([._/-]|$)`);
const comparisonOperators = new Set([
ts.SyntaxKind.EqualsEqualsEqualsToken,
ts.SyntaxKind.ExclamationEqualsEqualsToken,
ts.SyntaxKind.EqualsEqualsToken,
ts.SyntaxKind.ExclamationEqualsToken,
]);
const allowedViolations = new Set([]);
function isTestLikeFile(filePath) {
return (
filePath.endsWith(".test.ts") ||
filePath.endsWith(".test-utils.ts") ||
filePath.endsWith(".test-harness.ts") ||
filePath.endsWith(".e2e-harness.ts")
);
}
async function collectTypeScriptFiles(targetPath) {
const stat = await fs.stat(targetPath);
if (stat.isFile()) {
if (!targetPath.endsWith(".ts") || isTestLikeFile(targetPath)) {
return [];
}
return [targetPath];
}
const entries = await fs.readdir(targetPath, { withFileTypes: true });
const files = [];
for (const entry of entries) {
const entryPath = path.join(targetPath, entry.name);
if (entry.isDirectory()) {
files.push(...(await collectTypeScriptFiles(entryPath)));
continue;
}
if (!entry.isFile()) {
continue;
}
if (!entryPath.endsWith(".ts")) {
continue;
}
if (isTestLikeFile(entryPath)) {
continue;
}
files.push(entryPath);
}
return files;
}
function toLine(sourceFile, node) {
return sourceFile.getLineAndCharacterOfPosition(node.getStart(sourceFile)).line + 1;
}
function isChannelsPropertyAccess(node) {
if (ts.isPropertyAccessExpression(node)) {
return node.name.text === "channels";
}
if (ts.isElementAccessExpression(node) && ts.isStringLiteral(node.argumentExpression)) {
return node.argumentExpression.text === "channels";
}
return false;
}
function readStringLiteral(node) {
if (ts.isStringLiteral(node)) {
return node.text;
}
if (ts.isNoSubstitutionTemplateLiteral(node)) {
return node.text;
}
return null;
}
function isChannelLiteralNode(node) {
const text = readStringLiteral(node);
return text ? channelIdSet.has(text) : false;
}
function matchesChannelModuleSpecifier(specifier) {
return channelSegmentRe.test(specifier.replaceAll("\\", "/"));
}
function getPropertyNameText(name) {
if (ts.isIdentifier(name) || ts.isStringLiteral(name) || ts.isNumericLiteral(name)) {
return name.text;
}
return null;
}
const userFacingChannelNameRe =
/\b(?:discord|telegram|slack|signal|imessage|whatsapp|google\s*chat|irc|line|zalo|matrix|msteams|bluebubbles)\b/i;
const systemMarkLiteral = "⚙️";
function isModuleSpecifierStringNode(node) {
const parent = node.parent;
if (ts.isImportDeclaration(parent) || ts.isExportDeclaration(parent)) {
return true;
}
return (
ts.isCallExpression(parent) &&
parent.expression.kind === ts.SyntaxKind.ImportKeyword &&
parent.arguments[0] === node
);
}
export function findChannelAgnosticBoundaryViolations(
content,
fileName = "source.ts",
options = {},
) {
const checkModuleSpecifiers = options.checkModuleSpecifiers ?? true;
const checkConfigPaths = options.checkConfigPaths ?? true;
const checkChannelComparisons = options.checkChannelComparisons ?? true;
const checkChannelAssignments = options.checkChannelAssignments ?? true;
const moduleSpecifierMatcher = options.moduleSpecifierMatcher ?? matchesChannelModuleSpecifier;
const sourceFile = ts.createSourceFile(fileName, content, ts.ScriptTarget.Latest, true);
const violations = [];
const visit = (node) => {
if (
checkModuleSpecifiers &&
ts.isImportDeclaration(node) &&
ts.isStringLiteral(node.moduleSpecifier)
) {
const specifier = node.moduleSpecifier.text;
if (moduleSpecifierMatcher(specifier)) {
violations.push({
line: toLine(sourceFile, node.moduleSpecifier),
reason: `imports channel module "${specifier}"`,
});
}
}
if (
checkModuleSpecifiers &&
ts.isExportDeclaration(node) &&
node.moduleSpecifier &&
ts.isStringLiteral(node.moduleSpecifier)
) {
const specifier = node.moduleSpecifier.text;
if (moduleSpecifierMatcher(specifier)) {
violations.push({
line: toLine(sourceFile, node.moduleSpecifier),
reason: `re-exports channel module "${specifier}"`,
});
}
}
if (
checkModuleSpecifiers &&
ts.isCallExpression(node) &&
node.expression.kind === ts.SyntaxKind.ImportKeyword &&
node.arguments.length > 0 &&
ts.isStringLiteral(node.arguments[0])
) {
const specifier = node.arguments[0].text;
if (moduleSpecifierMatcher(specifier)) {
violations.push({
line: toLine(sourceFile, node.arguments[0]),
reason: `dynamically imports channel module "${specifier}"`,
});
}
}
if (
checkConfigPaths &&
ts.isPropertyAccessExpression(node) &&
channelIdSet.has(node.name.text)
) {
if (isChannelsPropertyAccess(node.expression)) {
violations.push({
line: toLine(sourceFile, node.name),
reason: `references config path "channels.${node.name.text}"`,
});
}
}
if (
checkConfigPaths &&
ts.isElementAccessExpression(node) &&
ts.isStringLiteral(node.argumentExpression) &&
channelIdSet.has(node.argumentExpression.text)
) {
if (isChannelsPropertyAccess(node.expression)) {
violations.push({
line: toLine(sourceFile, node.argumentExpression),
reason: `references config path "channels[${JSON.stringify(node.argumentExpression.text)}]"`,
});
}
}
if (
checkChannelComparisons &&
ts.isBinaryExpression(node) &&
comparisonOperators.has(node.operatorToken.kind)
) {
if (isChannelLiteralNode(node.left) || isChannelLiteralNode(node.right)) {
const leftText = node.left.getText(sourceFile);
const rightText = node.right.getText(sourceFile);
violations.push({
line: toLine(sourceFile, node.operatorToken),
reason: `compares with channel id literal (${leftText} ${node.operatorToken.getText(sourceFile)} ${rightText})`,
});
}
}
if (checkChannelAssignments && ts.isPropertyAssignment(node)) {
const propName = getPropertyNameText(node.name);
if (propName === "channel" && isChannelLiteralNode(node.initializer)) {
violations.push({
line: toLine(sourceFile, node.initializer),
reason: `assigns channel id literal to "channel" (${node.initializer.getText(sourceFile)})`,
});
}
}
ts.forEachChild(node, visit);
};
visit(sourceFile);
return violations;
}
export function findChannelCoreReverseDependencyViolations(content, fileName = "source.ts") {
return findChannelAgnosticBoundaryViolations(content, fileName, {
checkModuleSpecifiers: true,
checkConfigPaths: false,
checkChannelComparisons: false,
checkChannelAssignments: false,
moduleSpecifierMatcher: matchesChannelModuleSpecifier,
});
}
export function findAcpUserFacingChannelNameViolations(content, fileName = "source.ts") {
const sourceFile = ts.createSourceFile(fileName, content, ts.ScriptTarget.Latest, true);
const violations = [];
const visit = (node) => {
const text = readStringLiteral(node);
if (text && userFacingChannelNameRe.test(text) && !isModuleSpecifierStringNode(node)) {
violations.push({
line: toLine(sourceFile, node),
reason: `user-facing text references channel name (${JSON.stringify(text)})`,
});
}
ts.forEachChild(node, visit);
};
visit(sourceFile);
return violations;
}
export function findSystemMarkLiteralViolations(content, fileName = "source.ts") {
const sourceFile = ts.createSourceFile(fileName, content, ts.ScriptTarget.Latest, true);
const violations = [];
const visit = (node) => {
const text = readStringLiteral(node);
if (text && text.includes(systemMarkLiteral) && !isModuleSpecifierStringNode(node)) {
violations.push({
line: toLine(sourceFile, node),
reason: `hardcoded system mark literal (${JSON.stringify(text)})`,
});
}
ts.forEachChild(node, visit);
};
visit(sourceFile);
return violations;
}
const boundaryRuleSets = [
{
id: "acp-core",
sources: acpCoreProtectedSources,
scan: (content, fileName) => findChannelAgnosticBoundaryViolations(content, fileName),
},
{
id: "channel-core-reverse-deps",
sources: channelCoreProtectedSources,
scan: (content, fileName) => findChannelCoreReverseDependencyViolations(content, fileName),
},
{
id: "acp-user-facing-text",
sources: acpUserFacingTextSources,
scan: (content, fileName) => findAcpUserFacingChannelNameViolations(content, fileName),
},
{
id: "system-mark-literal-usage",
sources: systemMarkLiteralGuardSources,
scan: (content, fileName) => findSystemMarkLiteralViolations(content, fileName),
},
];
export async function main() {
const violations = [];
for (const ruleSet of boundaryRuleSets) {
const files = (
await Promise.all(
ruleSet.sources.map(async (sourcePath) => {
try {
return await collectTypeScriptFiles(sourcePath);
} catch (error) {
if (error && typeof error === "object" && "code" in error && error.code === "ENOENT") {
return [];
}
throw error;
}
}),
)
).flat();
for (const filePath of files) {
const relativeFile = path.relative(repoRoot, filePath);
if (
allowedViolations.has(`${ruleSet.id}:${relativeFile}`) ||
allowedViolations.has(relativeFile)
) {
continue;
}
const content = await fs.readFile(filePath, "utf8");
for (const violation of ruleSet.scan(content, relativeFile)) {
violations.push(`${ruleSet.id} ${relativeFile}:${violation.line}: ${violation.reason}`);
}
}
}
if (violations.length === 0) {
return;
}
console.error("Found channel-specific references in channel-agnostic sources:");
for (const violation of violations) {
console.error(`- ${violation}`);
}
console.error(
"Move channel-specific logic to channel adapters or add a justified allowlist entry.",
);
process.exit(1);
}
const isDirectExecution = (() => {
const entry = process.argv[1];
if (!entry) {
return false;
}
return path.resolve(entry) === fileURLToPath(import.meta.url);
})();
if (isDirectExecution) {
main().catch((error) => {
console.error(error);
process.exit(1);
});
}

View File

@@ -0,0 +1,81 @@
#!/usr/bin/env python3
from __future__ import annotations
import pathlib
import re
import sys
INPUT_INTERPOLATION_RE = re.compile(r"\$\{\{\s*inputs\.")
RUN_LINE_RE = re.compile(r"^(\s*)run:\s*(.*)$")
USING_COMPOSITE_RE = re.compile(r"^\s*using:\s*composite\s*$", re.MULTILINE)
def indentation(line: str) -> int:
return len(line) - len(line.lstrip(" "))
def scan_file(path: pathlib.Path) -> list[tuple[int, str]]:
text = path.read_text(encoding="utf-8")
if not USING_COMPOSITE_RE.search(text):
return []
lines = text.splitlines()
violations: list[tuple[int, str]] = []
line_count = len(lines)
index = 0
while index < line_count:
line = lines[index]
match = RUN_LINE_RE.match(line)
if not match:
index += 1
continue
run_indent = len(match.group(1))
run_value = match.group(2).strip()
line_no = index + 1
if run_value and run_value[0] not in ("|", ">"):
if INPUT_INTERPOLATION_RE.search(run_value):
violations.append((line_no, line.strip()))
index += 1
continue
index += 1
while index < line_count:
script_line = lines[index]
if script_line.strip() == "":
index += 1
continue
if indentation(script_line) <= run_indent:
break
if INPUT_INTERPOLATION_RE.search(script_line):
violations.append((index + 1, script_line.strip()))
index += 1
return violations
def main() -> int:
root = pathlib.Path(".github/actions")
files = sorted(root.rglob("action.y*ml"))
all_violations: list[tuple[pathlib.Path, int, str]] = []
for file_path in files:
for line_no, line in scan_file(file_path):
all_violations.append((file_path, line_no, line))
if all_violations:
print("Disallowed direct inputs interpolation in composite run blocks:")
for file_path, line_no, line in all_violations:
print(f"- {file_path}:{line_no}: {line}")
print("Use env: and reference shell variables instead.")
return 1
print("No direct inputs interpolation found in composite run blocks.")
return 0
if __name__ == "__main__":
sys.exit(main())

View File

@@ -0,0 +1,239 @@
#!/usr/bin/env node
import { promises as fs } from "node:fs";
import path from "node:path";
import { fileURLToPath } from "node:url";
import ts from "typescript";
const repoRoot = path.resolve(path.dirname(fileURLToPath(import.meta.url)), "..");
const sourceRoots = [path.join(repoRoot, "src"), path.join(repoRoot, "extensions")];
const allowedFiles = new Set([
path.join(repoRoot, "src", "security", "dm-policy-shared.ts"),
path.join(repoRoot, "src", "channels", "allow-from.ts"),
// Config migration/audit logic may intentionally reference store + group fields.
path.join(repoRoot, "src", "security", "fix.ts"),
path.join(repoRoot, "src", "security", "audit-channel.ts"),
]);
const storeIdentifierRe = /^(?:storeAllowFrom|storedAllowFrom|storeAllowList)$/i;
const groupNameRe =
/(?:groupAllowFrom|effectiveGroupAllowFrom|groupAllowed|groupAllow|groupAuth|groupSender)/i;
const storeSourceCallNames = new Set([
"readChannelAllowFromStore",
"readChannelAllowFromStoreSync",
"readStoreAllowFromForDmPolicy",
]);
const allowedResolverCallNames = new Set([
"resolveEffectiveAllowFromLists",
"resolveDmGroupAccessWithLists",
"resolveMattermostEffectiveAllowFromLists",
"resolveIrcEffectiveAllowlists",
]);
function isTestLikeFile(filePath) {
return (
filePath.endsWith(".test.ts") ||
filePath.endsWith(".test-utils.ts") ||
filePath.endsWith(".test-harness.ts") ||
filePath.endsWith(".e2e-harness.ts")
);
}
async function collectTypeScriptFiles(dir) {
const entries = await fs.readdir(dir, { withFileTypes: true });
const out = [];
for (const entry of entries) {
const entryPath = path.join(dir, entry.name);
if (entry.isDirectory()) {
out.push(...(await collectTypeScriptFiles(entryPath)));
continue;
}
if (!entry.isFile() || !entryPath.endsWith(".ts") || isTestLikeFile(entryPath)) {
continue;
}
out.push(entryPath);
}
return out;
}
function toLine(sourceFile, node) {
return sourceFile.getLineAndCharacterOfPosition(node.getStart(sourceFile)).line + 1;
}
function getPropertyNameText(name) {
if (ts.isIdentifier(name) || ts.isStringLiteral(name) || ts.isNumericLiteral(name)) {
return name.text;
}
return null;
}
function getDeclarationNameText(name) {
if (ts.isIdentifier(name)) {
return name.text;
}
if (ts.isObjectBindingPattern(name) || ts.isArrayBindingPattern(name)) {
return name.getText();
}
return null;
}
function containsPairingStoreSource(node) {
let found = false;
const visit = (current) => {
if (found) {
return;
}
if (ts.isIdentifier(current) && storeIdentifierRe.test(current.text)) {
found = true;
return;
}
if (ts.isCallExpression(current)) {
const callName = getCallName(current);
if (callName && storeSourceCallNames.has(callName)) {
found = true;
return;
}
}
ts.forEachChild(current, visit);
};
visit(node);
return found;
}
function getCallName(node) {
if (!ts.isCallExpression(node)) {
return null;
}
if (ts.isIdentifier(node.expression)) {
return node.expression.text;
}
if (ts.isPropertyAccessExpression(node.expression)) {
return node.expression.name.text;
}
return null;
}
function isSuspiciousNormalizeWithStoreCall(node) {
if (!ts.isCallExpression(node)) {
return false;
}
if (!ts.isIdentifier(node.expression) || node.expression.text !== "normalizeAllowFromWithStore") {
return false;
}
const firstArg = node.arguments[0];
if (!firstArg || !ts.isObjectLiteralExpression(firstArg)) {
return false;
}
let hasStoreProp = false;
let hasGroupAllowProp = false;
for (const property of firstArg.properties) {
if (!ts.isPropertyAssignment(property)) {
continue;
}
const name = getPropertyNameText(property.name);
if (!name) {
continue;
}
if (name === "storeAllowFrom" && containsPairingStoreSource(property.initializer)) {
hasStoreProp = true;
}
if (name === "allowFrom" && groupNameRe.test(property.initializer.getText())) {
hasGroupAllowProp = true;
}
}
return hasStoreProp && hasGroupAllowProp;
}
function findViolations(content, filePath) {
const sourceFile = ts.createSourceFile(filePath, content, ts.ScriptTarget.Latest, true);
const violations = [];
const visit = (node) => {
if (ts.isVariableDeclaration(node) && node.initializer) {
const name = getDeclarationNameText(node.name);
if (name && groupNameRe.test(name) && containsPairingStoreSource(node.initializer)) {
const callName = getCallName(node.initializer);
if (callName && allowedResolverCallNames.has(callName)) {
ts.forEachChild(node, visit);
return;
}
violations.push({
line: toLine(sourceFile, node),
reason: `group-scoped variable "${name}" references pairing-store identifiers`,
});
}
}
if (ts.isPropertyAssignment(node)) {
const propName = getPropertyNameText(node.name);
if (propName && groupNameRe.test(propName) && containsPairingStoreSource(node.initializer)) {
violations.push({
line: toLine(sourceFile, node),
reason: `group-scoped property "${propName}" references pairing-store identifiers`,
});
}
}
if (isSuspiciousNormalizeWithStoreCall(node)) {
violations.push({
line: toLine(sourceFile, node),
reason: "group allowlist uses normalizeAllowFromWithStore(...) with pairing-store entries",
});
}
ts.forEachChild(node, visit);
};
visit(sourceFile);
return violations;
}
async function main() {
const files = (
await Promise.all(sourceRoots.map(async (root) => await collectTypeScriptFiles(root)))
).flat();
const violations = [];
for (const filePath of files) {
if (allowedFiles.has(filePath)) {
continue;
}
const content = await fs.readFile(filePath, "utf8");
const fileViolations = findViolations(content, filePath);
for (const violation of fileViolations) {
violations.push({
path: path.relative(repoRoot, filePath),
...violation,
});
}
}
if (violations.length === 0) {
return;
}
console.error("Found pairing-store identifiers referenced in group auth composition:");
for (const violation of violations) {
console.error(`- ${violation.path}:${violation.line} (${violation.reason})`);
}
console.error(
"Group auth must be composed via shared resolvers (resolveDmGroupAccessWithLists / resolveEffectiveAllowFromLists).",
);
process.exit(1);
}
const isDirectExecution = (() => {
const entry = process.argv[1];
if (!entry) {
return false;
}
return path.resolve(entry) === fileURLToPath(import.meta.url);
})();
if (isDirectExecution) {
main().catch((error) => {
console.error(error);
process.exit(1);
});
}

View File

@@ -0,0 +1,174 @@
#!/usr/bin/env node
import { promises as fs } from "node:fs";
import path from "node:path";
import { fileURLToPath } from "node:url";
import ts from "typescript";
const repoRoot = path.resolve(path.dirname(fileURLToPath(import.meta.url)), "..");
const sourceRoots = [
path.join(repoRoot, "src", "channels"),
path.join(repoRoot, "src", "infra", "outbound"),
path.join(repoRoot, "src", "line"),
path.join(repoRoot, "src", "media-understanding"),
path.join(repoRoot, "extensions"),
];
const allowedCallsites = new Set([path.join(repoRoot, "extensions", "feishu", "src", "dedup.ts")]);
function isTestLikeFile(filePath) {
return (
filePath.endsWith(".test.ts") ||
filePath.endsWith(".test-utils.ts") ||
filePath.endsWith(".test-harness.ts") ||
filePath.endsWith(".e2e-harness.ts")
);
}
async function collectTypeScriptFiles(dir) {
const entries = await fs.readdir(dir, { withFileTypes: true });
const out = [];
for (const entry of entries) {
const entryPath = path.join(dir, entry.name);
if (entry.isDirectory()) {
out.push(...(await collectTypeScriptFiles(entryPath)));
continue;
}
if (!entry.isFile()) {
continue;
}
if (!entryPath.endsWith(".ts")) {
continue;
}
if (isTestLikeFile(entryPath)) {
continue;
}
out.push(entryPath);
}
return out;
}
function collectOsTmpdirImports(sourceFile) {
const osModuleSpecifiers = new Set(["node:os", "os"]);
const osNamespaceOrDefault = new Set();
const namedTmpdir = new Set();
for (const statement of sourceFile.statements) {
if (!ts.isImportDeclaration(statement)) {
continue;
}
if (!statement.importClause || !ts.isStringLiteral(statement.moduleSpecifier)) {
continue;
}
if (!osModuleSpecifiers.has(statement.moduleSpecifier.text)) {
continue;
}
const clause = statement.importClause;
if (clause.name) {
osNamespaceOrDefault.add(clause.name.text);
}
if (!clause.namedBindings) {
continue;
}
if (ts.isNamespaceImport(clause.namedBindings)) {
osNamespaceOrDefault.add(clause.namedBindings.name.text);
continue;
}
for (const element of clause.namedBindings.elements) {
if ((element.propertyName?.text ?? element.name.text) === "tmpdir") {
namedTmpdir.add(element.name.text);
}
}
}
return { osNamespaceOrDefault, namedTmpdir };
}
function unwrapExpression(expression) {
let current = expression;
while (true) {
if (ts.isParenthesizedExpression(current)) {
current = current.expression;
continue;
}
if (ts.isAsExpression(current) || ts.isTypeAssertionExpression(current)) {
current = current.expression;
continue;
}
if (ts.isNonNullExpression(current)) {
current = current.expression;
continue;
}
return current;
}
}
export function findMessagingTmpdirCallLines(content, fileName = "source.ts") {
const sourceFile = ts.createSourceFile(fileName, content, ts.ScriptTarget.Latest, true);
const { osNamespaceOrDefault, namedTmpdir } = collectOsTmpdirImports(sourceFile);
const lines = [];
const visit = (node) => {
if (ts.isCallExpression(node)) {
const callee = unwrapExpression(node.expression);
if (
ts.isPropertyAccessExpression(callee) &&
callee.name.text === "tmpdir" &&
ts.isIdentifier(callee.expression) &&
osNamespaceOrDefault.has(callee.expression.text)
) {
const line = sourceFile.getLineAndCharacterOfPosition(callee.getStart(sourceFile)).line + 1;
lines.push(line);
} else if (ts.isIdentifier(callee) && namedTmpdir.has(callee.text)) {
const line = sourceFile.getLineAndCharacterOfPosition(callee.getStart(sourceFile)).line + 1;
lines.push(line);
}
}
ts.forEachChild(node, visit);
};
visit(sourceFile);
return lines;
}
export async function main() {
const files = (
await Promise.all(sourceRoots.map(async (dir) => await collectTypeScriptFiles(dir)))
).flat();
const violations = [];
for (const filePath of files) {
if (allowedCallsites.has(filePath)) {
continue;
}
const content = await fs.readFile(filePath, "utf8");
for (const line of findMessagingTmpdirCallLines(content, filePath)) {
violations.push(`${path.relative(repoRoot, filePath)}:${line}`);
}
}
if (violations.length === 0) {
return;
}
console.error("Found os.tmpdir()/tmpdir() usage in messaging/channel runtime sources:");
for (const violation of violations) {
console.error(`- ${violation}`);
}
console.error(
"Use resolvePreferredOpenClawTmpDir() or plugin-sdk temp helpers instead of host tmp defaults.",
);
process.exit(1);
}
const isDirectExecution = (() => {
const entry = process.argv[1];
if (!entry) {
return false;
}
return path.resolve(entry) === fileURLToPath(import.meta.url);
})();
if (isDirectExecution) {
main().catch((error) => {
console.error(error);
process.exit(1);
});
}

View File

@@ -0,0 +1,214 @@
#!/usr/bin/env node
import { promises as fs } from "node:fs";
import path from "node:path";
import { fileURLToPath } from "node:url";
import ts from "typescript";
const repoRoot = path.resolve(path.dirname(fileURLToPath(import.meta.url)), "..");
const sourceRoots = [
path.join(repoRoot, "src", "telegram"),
path.join(repoRoot, "src", "discord"),
path.join(repoRoot, "src", "slack"),
path.join(repoRoot, "src", "signal"),
path.join(repoRoot, "src", "imessage"),
path.join(repoRoot, "src", "web"),
path.join(repoRoot, "src", "channels"),
path.join(repoRoot, "src", "routing"),
path.join(repoRoot, "src", "line"),
path.join(repoRoot, "extensions"),
];
// Temporary allowlist for legacy callsites. New raw fetch callsites in channel/plugin runtime
// code should be rejected and migrated to fetchWithSsrFGuard/shared channel helpers.
const allowedRawFetchCallsites = new Set([
"extensions/bluebubbles/src/types.ts:131",
"extensions/feishu/src/streaming-card.ts:31",
"extensions/feishu/src/streaming-card.ts:100",
"extensions/feishu/src/streaming-card.ts:141",
"extensions/feishu/src/streaming-card.ts:197",
"extensions/google-gemini-cli-auth/oauth.ts:372",
"extensions/google-gemini-cli-auth/oauth.ts:408",
"extensions/google-gemini-cli-auth/oauth.ts:447",
"extensions/google-gemini-cli-auth/oauth.ts:507",
"extensions/google-gemini-cli-auth/oauth.ts:575",
"extensions/googlechat/src/api.ts:22",
"extensions/googlechat/src/api.ts:43",
"extensions/googlechat/src/api.ts:63",
"extensions/googlechat/src/api.ts:184",
"extensions/googlechat/src/auth.ts:82",
"extensions/matrix/src/directory-live.ts:41",
"extensions/matrix/src/matrix/client/config.ts:171",
"extensions/mattermost/src/mattermost/client.ts:211",
"extensions/mattermost/src/mattermost/monitor.ts:230",
"extensions/mattermost/src/mattermost/probe.ts:27",
"extensions/minimax-portal-auth/oauth.ts:71",
"extensions/minimax-portal-auth/oauth.ts:112",
"extensions/msteams/src/graph.ts:39",
"extensions/nextcloud-talk/src/room-info.ts:92",
"extensions/nextcloud-talk/src/send.ts:107",
"extensions/nextcloud-talk/src/send.ts:198",
"extensions/qwen-portal-auth/oauth.ts:46",
"extensions/qwen-portal-auth/oauth.ts:80",
"extensions/talk-voice/index.ts:27",
"extensions/thread-ownership/index.ts:105",
"extensions/voice-call/src/providers/plivo.ts:95",
"extensions/voice-call/src/providers/telnyx.ts:61",
"extensions/voice-call/src/providers/tts-openai.ts:111",
"extensions/voice-call/src/providers/twilio/api.ts:23",
"src/channels/telegram/api.ts:8",
"src/discord/send.outbound.ts:347",
"src/discord/voice-message.ts:267",
"src/slack/monitor/media.ts:64",
"src/slack/monitor/media.ts:68",
"src/slack/monitor/media.ts:82",
"src/slack/monitor/media.ts:108",
]);
function isTestLikeFile(filePath) {
return (
filePath.endsWith(".test.ts") ||
filePath.endsWith(".test-utils.ts") ||
filePath.endsWith(".test-harness.ts") ||
filePath.endsWith(".e2e-harness.ts") ||
filePath.endsWith(".browser.test.ts") ||
filePath.endsWith(".node.test.ts")
);
}
async function collectTypeScriptFiles(targetPath) {
const stat = await fs.stat(targetPath);
if (stat.isFile()) {
if (!targetPath.endsWith(".ts") || isTestLikeFile(targetPath)) {
return [];
}
return [targetPath];
}
const entries = await fs.readdir(targetPath, { withFileTypes: true });
const files = [];
for (const entry of entries) {
const entryPath = path.join(targetPath, entry.name);
if (entry.isDirectory()) {
if (entry.name === "node_modules") {
continue;
}
files.push(...(await collectTypeScriptFiles(entryPath)));
continue;
}
if (!entry.isFile()) {
continue;
}
if (!entryPath.endsWith(".ts")) {
continue;
}
if (isTestLikeFile(entryPath)) {
continue;
}
files.push(entryPath);
}
return files;
}
function unwrapExpression(expression) {
let current = expression;
while (true) {
if (ts.isParenthesizedExpression(current)) {
current = current.expression;
continue;
}
if (ts.isAsExpression(current) || ts.isTypeAssertionExpression(current)) {
current = current.expression;
continue;
}
if (ts.isNonNullExpression(current)) {
current = current.expression;
continue;
}
return current;
}
}
function isRawFetchCall(expression) {
const callee = unwrapExpression(expression);
if (ts.isIdentifier(callee)) {
return callee.text === "fetch";
}
if (ts.isPropertyAccessExpression(callee)) {
return (
ts.isIdentifier(callee.expression) &&
callee.expression.text === "globalThis" &&
callee.name.text === "fetch"
);
}
return false;
}
export function findRawFetchCallLines(content, fileName = "source.ts") {
const sourceFile = ts.createSourceFile(fileName, content, ts.ScriptTarget.Latest, true);
const lines = [];
const visit = (node) => {
if (ts.isCallExpression(node) && isRawFetchCall(node.expression)) {
const line =
sourceFile.getLineAndCharacterOfPosition(node.expression.getStart(sourceFile)).line + 1;
lines.push(line);
}
ts.forEachChild(node, visit);
};
visit(sourceFile);
return lines;
}
export async function main() {
const files = (
await Promise.all(
sourceRoots.map(async (sourceRoot) => {
try {
return await collectTypeScriptFiles(sourceRoot);
} catch {
return [];
}
}),
)
).flat();
const violations = [];
for (const filePath of files) {
const content = await fs.readFile(filePath, "utf8");
const relPath = path.relative(repoRoot, filePath).replaceAll(path.sep, "/");
for (const line of findRawFetchCallLines(content, filePath)) {
const callsite = `${relPath}:${line}`;
if (allowedRawFetchCallsites.has(callsite)) {
continue;
}
violations.push(callsite);
}
}
if (violations.length === 0) {
return;
}
console.error("Found raw fetch() usage in channel/plugin runtime sources outside allowlist:");
for (const violation of violations.toSorted()) {
console.error(`- ${violation}`);
}
console.error(
"Use fetchWithSsrFGuard() or existing channel/plugin SDK wrappers for network calls.",
);
process.exit(1);
}
const isDirectExecution = (() => {
const entry = process.argv[1];
if (!entry) {
return false;
}
return path.resolve(entry) === fileURLToPath(import.meta.url);
})();
if (isDirectExecution) {
main().catch((error) => {
console.error(error);
process.exit(1);
});
}

View File

@@ -0,0 +1,142 @@
#!/usr/bin/env node
import { promises as fs } from "node:fs";
import path from "node:path";
import { fileURLToPath } from "node:url";
import ts from "typescript";
const repoRoot = path.resolve(path.dirname(fileURLToPath(import.meta.url)), "..");
const uiSourceDir = path.join(repoRoot, "ui", "src", "ui");
const allowedCallsites = new Set([path.join(uiSourceDir, "open-external-url.ts")]);
function isTestFile(filePath) {
return (
filePath.endsWith(".test.ts") ||
filePath.endsWith(".browser.test.ts") ||
filePath.endsWith(".node.test.ts")
);
}
async function collectTypeScriptFiles(dir) {
const entries = await fs.readdir(dir, { withFileTypes: true });
const out = [];
for (const entry of entries) {
const entryPath = path.join(dir, entry.name);
if (entry.isDirectory()) {
out.push(...(await collectTypeScriptFiles(entryPath)));
continue;
}
if (!entry.isFile()) {
continue;
}
if (!entryPath.endsWith(".ts")) {
continue;
}
if (isTestFile(entryPath)) {
continue;
}
out.push(entryPath);
}
return out;
}
function unwrapExpression(expression) {
let current = expression;
while (true) {
if (ts.isParenthesizedExpression(current)) {
current = current.expression;
continue;
}
if (ts.isAsExpression(current) || ts.isTypeAssertionExpression(current)) {
current = current.expression;
continue;
}
if (ts.isNonNullExpression(current)) {
current = current.expression;
continue;
}
return current;
}
}
function asPropertyAccess(expression) {
if (ts.isPropertyAccessExpression(expression)) {
return expression;
}
if (typeof ts.isPropertyAccessChain === "function" && ts.isPropertyAccessChain(expression)) {
return expression;
}
return null;
}
function isRawWindowOpenCall(expression) {
const propertyAccess = asPropertyAccess(unwrapExpression(expression));
if (!propertyAccess || propertyAccess.name.text !== "open") {
return false;
}
const receiver = unwrapExpression(propertyAccess.expression);
return (
ts.isIdentifier(receiver) && (receiver.text === "window" || receiver.text === "globalThis")
);
}
export function findRawWindowOpenLines(content, fileName = "source.ts") {
const sourceFile = ts.createSourceFile(fileName, content, ts.ScriptTarget.Latest, true);
const lines = [];
const visit = (node) => {
if (ts.isCallExpression(node) && isRawWindowOpenCall(node.expression)) {
const line =
sourceFile.getLineAndCharacterOfPosition(node.expression.getStart(sourceFile)).line + 1;
lines.push(line);
}
ts.forEachChild(node, visit);
};
visit(sourceFile);
return lines;
}
export async function main() {
const files = await collectTypeScriptFiles(uiSourceDir);
const violations = [];
for (const filePath of files) {
if (allowedCallsites.has(filePath)) {
continue;
}
const content = await fs.readFile(filePath, "utf8");
for (const line of findRawWindowOpenLines(content, filePath)) {
const relPath = path.relative(repoRoot, filePath);
violations.push(`${relPath}:${line}`);
}
}
if (violations.length === 0) {
return;
}
console.error("Found raw window.open usage outside safe helper:");
for (const violation of violations) {
console.error(`- ${violation}`);
}
console.error("Use openExternalUrlSafe(...) from ui/src/ui/open-external-url.ts instead.");
process.exit(1);
}
const isDirectExecution = (() => {
const entry = process.argv[1];
if (!entry) {
return false;
}
return path.resolve(entry) === fileURLToPath(import.meta.url);
})();
if (isDirectExecution) {
main().catch((error) => {
console.error(error);
process.exit(1);
});
}

View File

@@ -0,0 +1,157 @@
#!/usr/bin/env node
import { promises as fs } from "node:fs";
import path from "node:path";
import { fileURLToPath } from "node:url";
import ts from "typescript";
const repoRoot = path.resolve(path.dirname(fileURLToPath(import.meta.url)), "..");
const sourceRoots = [path.join(repoRoot, "src"), path.join(repoRoot, "extensions")];
function isTestLikeFile(filePath) {
return (
filePath.endsWith(".test.ts") ||
filePath.endsWith(".test-utils.ts") ||
filePath.endsWith(".test-harness.ts") ||
filePath.endsWith(".e2e-harness.ts")
);
}
async function collectTypeScriptFiles(dir) {
const entries = await fs.readdir(dir, { withFileTypes: true });
const out = [];
for (const entry of entries) {
const entryPath = path.join(dir, entry.name);
if (entry.isDirectory()) {
out.push(...(await collectTypeScriptFiles(entryPath)));
continue;
}
if (!entry.isFile() || !entryPath.endsWith(".ts") || isTestLikeFile(entryPath)) {
continue;
}
out.push(entryPath);
}
return out;
}
function toLine(sourceFile, node) {
return sourceFile.getLineAndCharacterOfPosition(node.getStart(sourceFile)).line + 1;
}
function getPropertyNameText(name) {
if (ts.isIdentifier(name) || ts.isStringLiteral(name) || ts.isNumericLiteral(name)) {
return name.text;
}
return null;
}
function isUndefinedLikeExpression(node) {
if (ts.isIdentifier(node) && node.text === "undefined") {
return true;
}
return node.kind === ts.SyntaxKind.NullKeyword;
}
function hasRequiredAccountIdProperty(node) {
if (!ts.isObjectLiteralExpression(node)) {
return false;
}
for (const property of node.properties) {
if (ts.isShorthandPropertyAssignment(property) && property.name.text === "accountId") {
return true;
}
if (!ts.isPropertyAssignment(property)) {
continue;
}
if (getPropertyNameText(property.name) !== "accountId") {
continue;
}
if (isUndefinedLikeExpression(property.initializer)) {
return false;
}
return true;
}
return false;
}
function findViolations(content, filePath) {
const sourceFile = ts.createSourceFile(filePath, content, ts.ScriptTarget.Latest, true);
const violations = [];
const visit = (node) => {
if (ts.isCallExpression(node) && ts.isIdentifier(node.expression)) {
const callName = node.expression.text;
if (callName === "readChannelAllowFromStore") {
if (node.arguments.length < 3 || isUndefinedLikeExpression(node.arguments[2])) {
violations.push({
line: toLine(sourceFile, node),
reason: "readChannelAllowFromStore call must pass explicit accountId as 3rd arg",
});
}
} else if (
callName === "readLegacyChannelAllowFromStore" ||
callName === "readLegacyChannelAllowFromStoreSync"
) {
violations.push({
line: toLine(sourceFile, node),
reason: `${callName} is legacy-only; use account-scoped readChannelAllowFromStore* APIs`,
});
} else if (callName === "upsertChannelPairingRequest") {
const firstArg = node.arguments[0];
if (!firstArg || !hasRequiredAccountIdProperty(firstArg)) {
violations.push({
line: toLine(sourceFile, node),
reason: "upsertChannelPairingRequest call must include accountId in params",
});
}
}
}
ts.forEachChild(node, visit);
};
visit(sourceFile);
return violations;
}
async function main() {
const files = (
await Promise.all(sourceRoots.map(async (root) => await collectTypeScriptFiles(root)))
).flat();
const violations = [];
for (const filePath of files) {
const content = await fs.readFile(filePath, "utf8");
const fileViolations = findViolations(content, filePath);
for (const violation of fileViolations) {
violations.push({
path: path.relative(repoRoot, filePath),
...violation,
});
}
}
if (violations.length === 0) {
return;
}
console.error("Found unscoped pairing-store calls:");
for (const violation of violations) {
console.error(`- ${violation.path}:${violation.line} (${violation.reason})`);
}
process.exit(1);
}
const isDirectExecution = (() => {
const entry = process.argv[1];
if (!entry) {
return false;
}
return path.resolve(entry) === fileURLToPath(import.meta.url);
})();
if (isDirectExecution) {
main().catch((error) => {
console.error(error);
process.exit(1);
});
}

View File

@@ -0,0 +1,80 @@
import { execFileSync } from "node:child_process";
import { existsSync } from "node:fs";
import { readFile } from "node:fs/promises";
type ParsedArgs = {
maxLines: number;
};
function parseArgs(argv: string[]): ParsedArgs {
let maxLines = 500;
for (let index = 0; index < argv.length; index++) {
const arg = argv[index];
if (arg === "--max") {
const next = argv[index + 1];
if (!next || Number.isNaN(Number(next))) {
throw new Error("Missing/invalid --max value");
}
maxLines = Number(next);
index++;
continue;
}
}
return { maxLines };
}
function gitLsFilesAll(): string[] {
// Include untracked files too so local refactors dont “pass” by accident.
const stdout = execFileSync("git", ["ls-files", "--cached", "--others", "--exclude-standard"], {
encoding: "utf8",
});
return stdout
.split("\n")
.map((line) => line.trim())
.filter(Boolean);
}
async function countLines(filePath: string): Promise<number> {
const content = await readFile(filePath, "utf8");
// Count physical lines. Keeps the rule simple + predictable.
return content.split("\n").length;
}
async function main() {
// Makes `... | head` safe.
process.stdout.on("error", (error: NodeJS.ErrnoException) => {
if (error.code === "EPIPE") {
process.exit(0);
}
throw error;
});
const { maxLines } = parseArgs(process.argv.slice(2));
const files = gitLsFilesAll()
.filter((filePath) => existsSync(filePath))
.filter((filePath) => filePath.endsWith(".ts") || filePath.endsWith(".tsx"));
const results = await Promise.all(
files.map(async (filePath) => ({ filePath, lines: await countLines(filePath) })),
);
const offenders = results
.filter((result) => result.lines > maxLines)
.toSorted((a, b) => b.lines - a.lines);
if (!offenders.length) {
return;
}
// Minimal, grep-friendly output.
for (const offender of offenders) {
// eslint-disable-next-line no-console
console.log(`${offender.lines}\t${offender.filePath}`);
}
process.exitCode = 1;
}
await main();

View File

@@ -0,0 +1,280 @@
#!/bin/bash
# Claude Code Authentication Status Checker
# Checks both Claude Code and OpenClaw auth status
set -euo pipefail
CLAUDE_CREDS="$HOME/.claude/.credentials.json"
OPENCLAW_AUTH="$HOME/.openclaw/agents/main/agent/auth-profiles.json"
# Colors for terminal output
RED='\033[0;31m'
YELLOW='\033[1;33m'
GREEN='\033[0;32m'
NC='\033[0m' # No Color
# Output mode: "full" (default), "json", or "simple"
OUTPUT_MODE="${1:-full}"
fetch_models_status_json() {
openclaw models status --json 2>/dev/null || true
}
STATUS_JSON="$(fetch_models_status_json)"
USE_JSON=0
if [ -n "$STATUS_JSON" ]; then
USE_JSON=1
fi
calc_status_from_expires() {
local expires_at="$1"
if ! [[ "$expires_at" =~ ^-?[0-9]+$ ]]; then
expires_at=0
fi
local now_ms=$(( $(date +%s) * 1000 ))
local diff_ms=$((expires_at - now_ms))
local hours=$((diff_ms / 3600000))
local mins=$(((diff_ms % 3600000) / 60000))
if [ "$expires_at" -le 0 ]; then
echo "MISSING"
return 1
elif [ "$diff_ms" -lt 0 ]; then
echo "EXPIRED"
return 1
elif [ "$diff_ms" -lt 3600000 ]; then
echo "EXPIRING:${mins}m"
return 2
else
echo "OK:${hours}h${mins}m"
return 0
fi
}
json_expires_for_claude_cli() {
echo "$STATUS_JSON" | jq -r '
[.auth.oauth.profiles[]
| select(.provider == "anthropic" and (.type == "oauth" or .type == "token"))
| .expiresAt // 0]
| max // 0
' 2>/dev/null || echo "0"
}
json_expires_for_anthropic_any() {
echo "$STATUS_JSON" | jq -r '
[.auth.oauth.profiles[]
| select(.provider == "anthropic" and .type == "oauth")
| .expiresAt // 0]
| max // 0
' 2>/dev/null || echo "0"
}
json_best_anthropic_profile() {
echo "$STATUS_JSON" | jq -r '
[.auth.oauth.profiles[]
| select(.provider == "anthropic" and .type == "oauth")
| {id: .profileId, exp: (.expiresAt // 0)}]
| sort_by(.exp) | reverse | .[0].id // "none"
' 2>/dev/null || echo "none"
}
json_anthropic_api_key_count() {
echo "$STATUS_JSON" | jq -r '
[.auth.providers[] | select(.provider == "anthropic") | .profiles.apiKey]
| max // 0
' 2>/dev/null || echo "0"
}
check_claude_code_auth() {
if [ "$USE_JSON" -eq 1 ]; then
local expires_at
expires_at=$(json_expires_for_claude_cli)
calc_status_from_expires "$expires_at"
return $?
fi
if [ ! -f "$CLAUDE_CREDS" ]; then
echo "MISSING"
return 1
fi
local expires_at
expires_at=$(jq -r '.claudeAiOauth.expiresAt // 0' "$CLAUDE_CREDS" 2>/dev/null || echo "0")
calc_status_from_expires "$expires_at"
}
check_openclaw_auth() {
if [ "$USE_JSON" -eq 1 ]; then
local api_keys
api_keys=$(json_anthropic_api_key_count)
if ! [[ "$api_keys" =~ ^[0-9]+$ ]]; then
api_keys=0
fi
local expires_at
expires_at=$(json_expires_for_anthropic_any)
if [ "$expires_at" -le 0 ] && [ "$api_keys" -gt 0 ]; then
echo "OK:static"
return 0
fi
calc_status_from_expires "$expires_at"
return $?
fi
if [ ! -f "$OPENCLAW_AUTH" ]; then
echo "MISSING"
return 1
fi
local expires
expires=$(jq -r '
[.profiles | to_entries[] | select(.value.provider == "anthropic") | .value.expires]
| max // 0
' "$OPENCLAW_AUTH" 2>/dev/null || echo "0")
calc_status_from_expires "$expires"
}
# JSON output mode
if [ "$OUTPUT_MODE" = "json" ]; then
claude_status=$(check_claude_code_auth 2>/dev/null || true)
openclaw_status=$(check_openclaw_auth 2>/dev/null || true)
claude_expires=0
openclaw_expires=0
if [ "$USE_JSON" -eq 1 ]; then
claude_expires=$(json_expires_for_claude_cli)
openclaw_expires=$(json_expires_for_anthropic_any)
else
claude_expires=$(jq -r '.claudeAiOauth.expiresAt // 0' "$CLAUDE_CREDS" 2>/dev/null || echo "0")
openclaw_expires=$(jq -r '.profiles["anthropic:default"].expires // 0' "$OPENCLAW_AUTH" 2>/dev/null || echo "0")
fi
jq -n \
--arg cs "$claude_status" \
--arg ce "$claude_expires" \
--arg bs "$openclaw_status" \
--arg be "$openclaw_expires" \
'{
claude_code: {status: $cs, expires_at_ms: ($ce | tonumber)},
openclaw: {status: $bs, expires_at_ms: ($be | tonumber)},
needs_reauth: (($cs | startswith("EXPIRED") or startswith("EXPIRING") or startswith("MISSING")) or ($bs | startswith("EXPIRED") or startswith("EXPIRING") or startswith("MISSING")))
}'
exit 0
fi
# Simple output mode (for scripts/widgets)
if [ "$OUTPUT_MODE" = "simple" ]; then
claude_status=$(check_claude_code_auth 2>/dev/null || true)
openclaw_status=$(check_openclaw_auth 2>/dev/null || true)
if [[ "$claude_status" == EXPIRED* ]] || [[ "$claude_status" == MISSING* ]]; then
echo "CLAUDE_EXPIRED"
exit 1
elif [[ "$openclaw_status" == EXPIRED* ]] || [[ "$openclaw_status" == MISSING* ]]; then
echo "OPENCLAW_EXPIRED"
exit 1
elif [[ "$claude_status" == EXPIRING* ]]; then
echo "CLAUDE_EXPIRING"
exit 2
elif [[ "$openclaw_status" == EXPIRING* ]]; then
echo "OPENCLAW_EXPIRING"
exit 2
else
echo "OK"
exit 0
fi
fi
# Full output mode (default)
echo "=== Claude Code Auth Status ==="
echo ""
# Claude Code credentials
echo "Claude Code (~/.claude/.credentials.json):"
if [ "$USE_JSON" -eq 1 ]; then
expires_at=$(json_expires_for_claude_cli)
else
expires_at=$(jq -r '.claudeAiOauth.expiresAt // 0' "$CLAUDE_CREDS" 2>/dev/null || echo "0")
fi
if [ -f "$CLAUDE_CREDS" ]; then
sub_type=$(jq -r '.claudeAiOauth.subscriptionType // "unknown"' "$CLAUDE_CREDS" 2>/dev/null || echo "unknown")
rate_tier=$(jq -r '.claudeAiOauth.rateLimitTier // "unknown"' "$CLAUDE_CREDS" 2>/dev/null || echo "unknown")
echo " Subscription: $sub_type"
echo " Rate tier: $rate_tier"
fi
if [ "$expires_at" -le 0 ]; then
echo -e " Status: ${RED}NOT FOUND${NC}"
echo " Action needed: Run 'claude setup-token'"
else
now_ms=$(( $(date +%s) * 1000 ))
diff_ms=$((expires_at - now_ms))
hours=$((diff_ms / 3600000))
mins=$(((diff_ms % 3600000) / 60000))
if [ "$diff_ms" -lt 0 ]; then
echo -e " Status: ${RED}EXPIRED${NC}"
echo " Action needed: Run 'claude setup-token' or re-authenticate"
elif [ "$diff_ms" -lt 3600000 ]; then
echo -e " Status: ${YELLOW}EXPIRING SOON (${mins}m remaining)${NC}"
echo " Consider running: claude setup-token"
else
echo -e " Status: ${GREEN}OK${NC}"
echo " Expires: $(date -d @$((expires_at/1000))) (${hours}h ${mins}m)"
fi
fi
echo ""
echo "OpenClaw Auth (~/.openclaw/agents/main/agent/auth-profiles.json):"
if [ "$USE_JSON" -eq 1 ]; then
best_profile=$(json_best_anthropic_profile)
expires=$(json_expires_for_anthropic_any)
api_keys=$(json_anthropic_api_key_count)
else
best_profile=$(jq -r '
.profiles | to_entries
| map(select(.value.provider == "anthropic"))
| sort_by(.value.expires) | reverse
| .[0].key // "none"
' "$OPENCLAW_AUTH" 2>/dev/null || echo "none")
expires=$(jq -r '
[.profiles | to_entries[] | select(.value.provider == "anthropic") | .value.expires]
| max // 0
' "$OPENCLAW_AUTH" 2>/dev/null || echo "0")
api_keys=0
fi
echo " Profile: $best_profile"
if [ "$expires" -le 0 ] && [ "$api_keys" -gt 0 ]; then
echo -e " Status: ${GREEN}OK${NC} (API key)"
elif [ "$expires" -le 0 ]; then
echo -e " Status: ${RED}NOT FOUND${NC}"
echo " Note: Run 'openclaw doctor --yes' to sync from Claude Code"
else
now_ms=$(( $(date +%s) * 1000 ))
diff_ms=$((expires - now_ms))
hours=$((diff_ms / 3600000))
mins=$(((diff_ms % 3600000) / 60000))
if [ "$diff_ms" -lt 0 ]; then
echo -e " Status: ${RED}EXPIRED${NC}"
echo " Note: Run 'openclaw doctor --yes' to sync from Claude Code"
elif [ "$diff_ms" -lt 3600000 ]; then
echo -e " Status: ${YELLOW}EXPIRING SOON (${mins}m remaining)${NC}"
else
echo -e " Status: ${GREEN}OK${NC}"
echo " Expires: $(date -d @$((expires/1000))) (${hours}h ${mins}m)"
fi
fi
echo ""
echo "=== Service Status ==="
if systemctl --user is-active openclaw >/dev/null 2>&1; then
echo -e "OpenClaw service: ${GREEN}running${NC}"
else
echo -e "OpenClaw service: ${RED}NOT running${NC}"
fi

309
openclaw/scripts/clawlog.sh Normal file
View File

@@ -0,0 +1,309 @@
#!/bin/bash
# VibeTunnel Logging Utility
# Simplifies access to VibeTunnel logs using macOS unified logging system
set -euo pipefail
# Configuration
SUBSYSTEM="ai.openclaw"
DEFAULT_LEVEL="info"
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Function to handle sudo password errors
handle_sudo_error() {
echo -e "\n${RED}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
echo -e "${YELLOW}⚠️ Password Required for Log Access${NC}"
echo -e "${RED}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}\n"
echo -e "clawlog needs to use sudo to show complete log data (Apple hides sensitive info by default)."
echo -e "\nTo avoid password prompts, configure passwordless sudo for the log command:"
echo -e "See: ${BLUE}apple/docs/logging-private-fix.md${NC}\n"
echo -e "Quick fix:"
echo -e " 1. Run: ${GREEN}sudo visudo${NC}"
echo -e " 2. Add: ${GREEN}$(whoami) ALL=(ALL) NOPASSWD: /usr/bin/log${NC}"
echo -e " 3. Save and exit (:wq)\n"
echo -e "${RED}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}\n"
exit 1
}
# Default values
STREAM_MODE=false
TIME_RANGE="5m" # Default to last 5 minutes
CATEGORY=""
LOG_LEVEL="$DEFAULT_LEVEL"
SEARCH_TEXT=""
OUTPUT_FILE=""
ERRORS_ONLY=false
SERVER_ONLY=false
TAIL_LINES=50 # Default number of lines to show
SHOW_TAIL=true
SHOW_HELP=false
# Function to show usage
show_usage() {
cat << EOF
clawlog - OpenClaw Logging Utility
USAGE:
clawlog [OPTIONS]
DESCRIPTION:
View OpenClaw logs with full details (bypasses Apple's privacy redaction).
Requires sudo access configured for /usr/bin/log command.
LOG FLOW ARCHITECTURE:
OpenClaw logs flow through the macOS unified log (subsystem: ai.openclaw).
LOG CATEGORIES (examples):
• voicewake - Voice wake detection/test harness
• gateway - Gateway process manager
• xpc - XPC service calls
• notifications - Notification helper
• screenshot - Screenshotter
• shell - ShellExecutor
QUICK START:
clawlog -n 100 Show last 100 lines from all components
clawlog -f Follow logs in real-time
clawlog -e Show only errors
clawlog -c ServerManager Show logs from ServerManager only
OPTIONS:
-h, --help Show this help message
-f, --follow Stream logs continuously (like tail -f)
-n, --lines NUM Number of lines to show (default: 50)
-l, --last TIME Time range to search (default: 5m)
Examples: 5m, 1h, 2d, 1w
-c, --category CAT Filter by category (e.g., ServerManager, SessionService)
-e, --errors Show only error messages
-d, --debug Show debug level logs (more verbose)
-s, --search TEXT Search for specific text in log messages
-o, --output FILE Export logs to file
--server Show only server output logs
--all Show all logs without tail limit
--list-categories List all available log categories
--json Output in JSON format
EXAMPLES:
clawlog Show last 50 lines from past 5 minutes (default)
clawlog -f Stream logs continuously
clawlog -n 100 Show last 100 lines
clawlog -e Show only recent errors
clawlog -l 30m -n 200 Show last 200 lines from past 30 minutes
clawlog -c ServerManager Show recent ServerManager logs
clawlog -s "fail" Search for "fail" in recent logs
clawlog --server -e Show recent server errors
clawlog -f -d Stream debug logs continuously
CATEGORIES:
Common categories include:
- ServerManager - Server lifecycle and configuration
- SessionService - Terminal session management
- TerminalManager - Terminal spawning and control
- GitRepository - Git integration features
- ScreencapService - Screen capture functionality
- WebRTCManager - WebRTC connections
- UnixSocket - Unix socket communication
- WindowTracker - Window tracking and focus
- NgrokService - Ngrok tunnel management
- ServerOutput - Node.js server output
TIME FORMATS:
- 5m = 5 minutes - 1h = 1 hour
- 2d = 2 days - 1w = 1 week
EOF
}
# Function to list categories
list_categories() {
echo -e "${BLUE}Fetching VibeTunnel log categories from the last hour...${NC}\n"
# Get unique categories from recent logs
log show --predicate "subsystem == \"$SUBSYSTEM\"" --last 1h 2>/dev/null | \
grep -E "category: \"[^\"]+\"" | \
sed -E 's/.*category: "([^"]+)".*/\1/' | \
sort | uniq | \
while read -r cat; do
echo "$cat"
done
echo -e "\n${YELLOW}Note: Only categories with recent activity are shown${NC}"
}
# Show help if no arguments provided
if [[ $# -eq 0 ]]; then
show_usage
exit 0
fi
# Parse command line arguments
while [[ $# -gt 0 ]]; do
case $1 in
-h|--help)
show_usage
exit 0
;;
-f|--follow)
STREAM_MODE=true
SHOW_TAIL=false
shift
;;
-n|--lines)
TAIL_LINES="$2"
shift 2
;;
-l|--last)
TIME_RANGE="$2"
shift 2
;;
-c|--category)
CATEGORY="$2"
shift 2
;;
-e|--errors)
ERRORS_ONLY=true
shift
;;
-d|--debug)
LOG_LEVEL="debug"
shift
;;
-s|--search)
SEARCH_TEXT="$2"
shift 2
;;
-o|--output)
OUTPUT_FILE="$2"
shift 2
;;
--server)
SERVER_ONLY=true
CATEGORY="ServerOutput"
shift
;;
--list-categories)
list_categories
exit 0
;;
--json)
STYLE_ARGS="--style json"
shift
;;
--all)
SHOW_TAIL=false
shift
;;
*)
echo -e "${RED}Unknown option: $1${NC}"
echo "Use -h or --help for usage information"
exit 1
;;
esac
done
# Build the predicate
PREDICATE="subsystem == \"$SUBSYSTEM\""
# Add category filter if specified
if [[ -n "$CATEGORY" ]]; then
PREDICATE="$PREDICATE AND category == \"$CATEGORY\""
fi
# Add error filter if specified
if [[ "$ERRORS_ONLY" == true ]]; then
PREDICATE="$PREDICATE AND (eventType == \"error\" OR messageType == \"error\" OR eventMessage CONTAINS \"ERROR\" OR eventMessage CONTAINS \"[31m\")"
fi
# Add search filter if specified
if [[ -n "$SEARCH_TEXT" ]]; then
PREDICATE="$PREDICATE AND eventMessage CONTAINS[c] \"$SEARCH_TEXT\""
fi
# Build the command - always use sudo with --info to show private data
if [[ "$STREAM_MODE" == true ]]; then
# Streaming mode
CMD="sudo log stream --predicate '$PREDICATE' --level $LOG_LEVEL --info"
echo -e "${GREEN}Streaming VibeTunnel logs continuously...${NC}"
echo -e "${YELLOW}Press Ctrl+C to stop${NC}\n"
else
# Show mode
CMD="sudo log show --predicate '$PREDICATE'"
# Add log level for show command
if [[ "$LOG_LEVEL" == "debug" ]]; then
CMD="$CMD --debug"
else
CMD="$CMD --info"
fi
# Add time range
CMD="$CMD --last $TIME_RANGE"
if [[ "$SHOW_TAIL" == true ]]; then
echo -e "${GREEN}Showing last $TAIL_LINES log lines from the past $TIME_RANGE${NC}"
else
echo -e "${GREEN}Showing all logs from the past $TIME_RANGE${NC}"
fi
# Show applied filters
if [[ "$ERRORS_ONLY" == true ]]; then
echo -e "${RED}Filter: Errors only${NC}"
fi
if [[ -n "$CATEGORY" ]]; then
echo -e "${BLUE}Category: $CATEGORY${NC}"
fi
if [[ -n "$SEARCH_TEXT" ]]; then
echo -e "${YELLOW}Search: \"$SEARCH_TEXT\"${NC}"
fi
echo "" # Empty line for readability
fi
# Add style arguments if specified
if [[ -n "${STYLE_ARGS:-}" ]]; then
CMD="$CMD $STYLE_ARGS"
fi
# Execute the command
if [[ -n "$OUTPUT_FILE" ]]; then
# First check if sudo works without password for the log command
if sudo -n /usr/bin/log show --last 1s 2>&1 | grep -q "password"; then
handle_sudo_error
fi
echo -e "${BLUE}Exporting logs to: $OUTPUT_FILE${NC}\n"
if [[ "$SHOW_TAIL" == true ]] && [[ "$STREAM_MODE" == false ]]; then
eval "$CMD" 2>&1 | tail -n "$TAIL_LINES" > "$OUTPUT_FILE"
else
eval "$CMD" > "$OUTPUT_FILE" 2>&1
fi
# Check if file was created and has content
if [[ -s "$OUTPUT_FILE" ]]; then
LINE_COUNT=$(wc -l < "$OUTPUT_FILE" | tr -d ' ')
echo -e "${GREEN}✓ Exported $LINE_COUNT lines to $OUTPUT_FILE${NC}"
else
echo -e "${YELLOW}⚠ No logs found matching the criteria${NC}"
fi
else
# Run interactively
# First check if sudo works without password for the log command
if sudo -n /usr/bin/log show --last 1s 2>&1 | grep -q "password"; then
handle_sudo_error
fi
if [[ "$SHOW_TAIL" == true ]] && [[ "$STREAM_MODE" == false ]]; then
# Apply tail for non-streaming mode
eval "$CMD" 2>&1 | tail -n "$TAIL_LINES"
echo -e "\n${YELLOW}Showing last $TAIL_LINES lines. Use --all or -n to see more.${NC}"
else
eval "$CMD"
fi
fi

View File

@@ -0,0 +1,40 @@
{
"ensureLogins": [
"odrobnik",
"alphonse-arianee",
"aaronn",
"ronak-guliani",
"cpojer",
"carlulsoe",
"jdrhyne",
"latitudeki5223",
"longmaba",
"manmal",
"thesash",
"rhjoh",
"ysqander",
"atalovesyou",
"0xJonHoldsCrypto",
"hougangdev",
"jiulingyun"
],
"seedCommit": "d6863f87",
"placeholderAvatar": "assets/avatar-placeholder.svg",
"displayName": {
"jdrhyne": "Jonathan D. Rhyne (DJ-D)"
},
"nameToLogin": {
"peter steinberger": "steipete",
"eng. juan combetto": "omniwired",
"mariano belinky": "mbelinky",
"vasanth rao naik sabavat": "vsabavat",
"tu nombre real": "nachx639",
"django navarro": "djangonavarro220"
},
"emailToLogin": {
"steipete@gmail.com": "steipete",
"sbarrios93@gmail.com": "sebslight",
"rltorres26+github@gmail.com": "RandyVentures",
"hixvac@gmail.com": "VACInc"
}
}

View File

@@ -0,0 +1,289 @@
#!/usr/bin/env bash
set -euo pipefail
APP_BUNDLE="${1:-dist/OpenClaw.app}"
IDENTITY="${SIGN_IDENTITY:-}"
TIMESTAMP_MODE="${CODESIGN_TIMESTAMP:-auto}"
DISABLE_LIBRARY_VALIDATION="${DISABLE_LIBRARY_VALIDATION:-0}"
SKIP_TEAM_ID_CHECK="${SKIP_TEAM_ID_CHECK:-0}"
ENT_TMP_BASE=$(mktemp -t openclaw-entitlements-base.XXXXXX)
ENT_TMP_APP_BASE=$(mktemp -t openclaw-entitlements-app-base.XXXXXX)
ENT_TMP_RUNTIME=$(mktemp -t openclaw-entitlements-runtime.XXXXXX)
if [[ "${APP_BUNDLE}" == "--help" || "${APP_BUNDLE}" == "-h" ]]; then
cat <<'HELP'
Usage: scripts/codesign-mac-app.sh [app-bundle]
Env:
SIGN_IDENTITY="Apple Development: Your Name (TEAMID)"
ALLOW_ADHOC_SIGNING=1
CODESIGN_TIMESTAMP=auto|on|off
DISABLE_LIBRARY_VALIDATION=1 # dev-only Sparkle Team ID workaround
SKIP_TEAM_ID_CHECK=1 # bypass Team ID audit
HELP
exit 0
fi
if [ ! -d "$APP_BUNDLE" ]; then
echo "App bundle not found: $APP_BUNDLE" >&2
exit 1
fi
select_identity() {
local preferred available first
# Prefer a Developer ID Application cert.
preferred="$(security find-identity -p codesigning -v 2>/dev/null \
| awk -F'\"' '/Developer ID Application/ { print $2; exit }')"
if [ -n "$preferred" ]; then
echo "$preferred"
return
fi
# Next, try Apple Distribution.
preferred="$(security find-identity -p codesigning -v 2>/dev/null \
| awk -F'\"' '/Apple Distribution/ { print $2; exit }')"
if [ -n "$preferred" ]; then
echo "$preferred"
return
fi
# Then, try Apple Development.
preferred="$(security find-identity -p codesigning -v 2>/dev/null \
| awk -F'\"' '/Apple Development/ { print $2; exit }')"
if [ -n "$preferred" ]; then
echo "$preferred"
return
fi
# Fallback to the first valid signing identity.
available="$(security find-identity -p codesigning -v 2>/dev/null \
| sed -n 's/.*\"\\(.*\\)\"/\\1/p')"
if [ -n "$available" ]; then
first="$(printf '%s\n' "$available" | head -n1)"
echo "$first"
return
fi
return 1
}
if [ -z "$IDENTITY" ]; then
if ! IDENTITY="$(select_identity)"; then
if [[ "${ALLOW_ADHOC_SIGNING:-}" == "1" ]]; then
echo "WARN: No signing identity found. Falling back to ad-hoc signing (-)." >&2
echo " !!! WARNING: Ad-hoc signed apps do NOT persist TCC permissions (Accessibility, etc) !!!" >&2
echo " !!! You will need to re-grant permissions every time you restart the app. !!!" >&2
IDENTITY="-"
else
echo "ERROR: No signing identity found. Set SIGN_IDENTITY to a valid codesigning certificate." >&2
echo " Alternatively, set ALLOW_ADHOC_SIGNING=1 to fallback to ad-hoc signing (limitations apply)." >&2
exit 1
fi
fi
fi
echo "Using signing identity: $IDENTITY"
if [[ "$IDENTITY" == "-" ]]; then
cat <<'WARN' >&2
================================================================================
!!! AD-HOC SIGNING IN USE - PERMISSIONS WILL NOT STICK (macOS RESTRICTION) !!!
macOS ties permissions to the code signature, bundle ID, and app path.
Ad-hoc signing generates a new signature every build, so macOS treats the app
as a different binary and will forget permissions (prompts may vanish).
For correct permission behavior you MUST sign with a real Apple Development or
Developer ID certificate.
If prompts disappear: remove the app entry in System Settings -> Privacy & Security,
relaunch the app, and re-grant. Some permissions only reappear after a full
macOS restart.
================================================================================
WARN
fi
timestamp_arg="--timestamp=none"
case "$TIMESTAMP_MODE" in
1|on|yes|true)
timestamp_arg="--timestamp"
;;
0|off|no|false)
timestamp_arg="--timestamp=none"
;;
auto)
if [[ "$IDENTITY" == *"Developer ID Application"* ]]; then
timestamp_arg="--timestamp"
fi
;;
*)
echo "ERROR: Unknown CODESIGN_TIMESTAMP value: $TIMESTAMP_MODE (use auto|on|off)" >&2
exit 1
;;
esac
if [[ "$IDENTITY" == "-" ]]; then
timestamp_arg="--timestamp=none"
fi
options_args=()
if [[ "$IDENTITY" != "-" ]]; then
options_args=("--options" "runtime")
fi
timestamp_args=("$timestamp_arg")
cat > "$ENT_TMP_BASE" <<'PLIST'
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>com.apple.security.automation.apple-events</key>
<true/>
<key>com.apple.security.device.audio-input</key>
<true/>
<key>com.apple.security.device.camera</key>
<true/>
</dict>
</plist>
PLIST
cat > "$ENT_TMP_APP_BASE" <<'PLIST'
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>com.apple.security.automation.apple-events</key>
<true/>
<key>com.apple.security.device.audio-input</key>
<true/>
<key>com.apple.security.device.camera</key>
<true/>
<key>com.apple.security.personal-information.location</key>
<true/>
</dict>
</plist>
PLIST
cat > "$ENT_TMP_RUNTIME" <<'PLIST'
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>com.apple.security.cs.allow-jit</key>
<true/>
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
<true/>
</dict>
</plist>
PLIST
if [[ "$DISABLE_LIBRARY_VALIDATION" == "1" ]]; then
/usr/libexec/PlistBuddy -c "Add :com.apple.security.cs.disable-library-validation bool true" "$ENT_TMP_APP_BASE" >/dev/null 2>&1 || \
/usr/libexec/PlistBuddy -c "Set :com.apple.security.cs.disable-library-validation true" "$ENT_TMP_APP_BASE"
echo "Note: disable-library-validation entitlement enabled (DISABLE_LIBRARY_VALIDATION=1)."
fi
APP_ENTITLEMENTS="$ENT_TMP_APP_BASE"
# clear extended attributes to avoid stale signatures
xattr -cr "$APP_BUNDLE" 2>/dev/null || true
sign_item() {
local target="$1"
local entitlements="$2"
codesign --force ${options_args+"${options_args[@]}"} "${timestamp_args[@]}" --entitlements "$entitlements" --sign "$IDENTITY" "$target"
}
sign_plain_item() {
local target="$1"
codesign --force ${options_args+"${options_args[@]}"} "${timestamp_args[@]}" --sign "$IDENTITY" "$target"
}
team_id_for() {
codesign -dv --verbose=4 "$1" 2>&1 | awk -F= '/^TeamIdentifier=/{print $2; exit}'
}
verify_team_ids() {
if [[ "$SKIP_TEAM_ID_CHECK" == "1" ]]; then
echo "Note: skipping Team ID audit (SKIP_TEAM_ID_CHECK=1)."
return 0
fi
local expected
expected="$(team_id_for "$APP_BUNDLE" || true)"
if [[ -z "$expected" ]]; then
echo "WARN: TeamIdentifier missing on app bundle; skipping Team ID audit."
return 0
fi
local mismatches=()
while IFS= read -r -d '' f; do
if /usr/bin/file "$f" | /usr/bin/grep -q "Mach-O"; then
local team
team="$(team_id_for "$f" || true)"
if [[ -z "$team" ]]; then
team="not set"
fi
if [[ "$expected" == "not set" ]]; then
if [[ "$team" != "not set" ]]; then
mismatches+=("$f (TeamIdentifier=$team)")
fi
elif [[ "$team" != "$expected" ]]; then
mismatches+=("$f (TeamIdentifier=$team)")
fi
fi
done < <(find "$APP_BUNDLE" -type f -print0)
if [[ "${#mismatches[@]}" -gt 0 ]]; then
echo "ERROR: Team ID mismatch detected (expected: $expected)"
for entry in "${mismatches[@]}"; do
echo " - $entry"
done
echo "Hint: re-sign embedded frameworks or set DISABLE_LIBRARY_VALIDATION=1 for dev builds."
exit 1
fi
}
# Sign main binary
if [ -f "$APP_BUNDLE/Contents/MacOS/OpenClaw" ]; then
echo "Signing main binary"; sign_item "$APP_BUNDLE/Contents/MacOS/OpenClaw" "$APP_ENTITLEMENTS"
fi
# Sign Sparkle deeply if present
SPARKLE="$APP_BUNDLE/Contents/Frameworks/Sparkle.framework"
if [ -d "$SPARKLE" ]; then
echo "Signing Sparkle framework and helpers"
find "$SPARKLE" -type f -print0 | while IFS= read -r -d '' f; do
if /usr/bin/file "$f" | /usr/bin/grep -q "Mach-O"; then
sign_plain_item "$f"
fi
done
sign_plain_item "$SPARKLE/Versions/B/Sparkle"
sign_plain_item "$SPARKLE/Versions/B/Autoupdate"
sign_plain_item "$SPARKLE/Versions/B/Updater.app/Contents/MacOS/Updater"
sign_plain_item "$SPARKLE/Versions/B/Updater.app"
sign_plain_item "$SPARKLE/Versions/B/XPCServices/Downloader.xpc/Contents/MacOS/Downloader"
sign_plain_item "$SPARKLE/Versions/B/XPCServices/Downloader.xpc"
sign_plain_item "$SPARKLE/Versions/B/XPCServices/Installer.xpc/Contents/MacOS/Installer"
sign_plain_item "$SPARKLE/Versions/B/XPCServices/Installer.xpc"
sign_plain_item "$SPARKLE/Versions/B"
sign_plain_item "$SPARKLE"
fi
# Sign any other embedded frameworks/dylibs
if [ -d "$APP_BUNDLE/Contents/Frameworks" ]; then
find "$APP_BUNDLE/Contents/Frameworks" \( -name "*.framework" -o -name "*.dylib" \) ! -path "*Sparkle.framework*" -print0 | while IFS= read -r -d '' f; do
echo "Signing framework: $f"; sign_plain_item "$f"
done
fi
# Finally sign the bundle
sign_item "$APP_BUNDLE" "$APP_ENTITLEMENTS"
verify_team_ids
rm -f "$ENT_TMP_BASE" "$ENT_TMP_APP_BASE" "$ENT_TMP_RUNTIME"
echo "Codesign complete for $APP_BUNDLE"

View File

@@ -0,0 +1,3 @@
messagesNcontentXtooluseinput->messages.content.tool_use.input
groupsthreads->groups/threads
startstoprestart->start/stop/restart

View File

@@ -0,0 +1,9 @@
iTerm
FO
Nam
Lins
Vai
OptionA
CAF
overlayed
re-use

117
openclaw/scripts/committer Normal file
View File

@@ -0,0 +1,117 @@
#!/usr/bin/env bash
set -euo pipefail
# Disable glob expansion to handle brackets in file paths
set -f
usage() {
printf 'Usage: %s [--force] "commit message" "file" ["file" ...]\n' "$(basename "$0")" >&2
exit 2
}
if [ "$#" -lt 2 ]; then
usage
fi
force_delete_lock=false
if [ "${1:-}" = "--force" ]; then
force_delete_lock=true
shift
fi
if [ "$#" -lt 2 ]; then
usage
fi
commit_message=$1
shift
if [[ "$commit_message" != *[![:space:]]* ]]; then
printf 'Error: commit message must not be empty\n' >&2
exit 1
fi
if [ -e "$commit_message" ]; then
printf 'Error: first argument looks like a file path ("%s"); provide the commit message first\n' "$commit_message" >&2
exit 1
fi
if [ "$#" -eq 0 ]; then
usage
fi
files=("$@")
# Disallow "." because it stages the entire repository and defeats the helper's safety guardrails.
for file in "${files[@]}"; do
if [ "$file" = "." ]; then
printf 'Error: "." is not allowed; list specific paths instead\n' >&2
exit 1
fi
done
# Prevent staging node_modules even if a path is forced.
for file in "${files[@]}"; do
case "$file" in
*node_modules* | */node_modules | */node_modules/* | node_modules)
printf 'Error: node_modules paths are not allowed: %s\n' "$file" >&2
exit 1
;;
esac
done
last_commit_error=''
run_git_commit() {
local stderr_log
stderr_log=$(mktemp)
if git commit -m "$commit_message" -- "${files[@]}" 2> >(tee "$stderr_log" >&2); then
rm -f "$stderr_log"
last_commit_error=''
return 0
fi
last_commit_error=$(cat "$stderr_log")
rm -f "$stderr_log"
return 1
}
for file in "${files[@]}"; do
if [ ! -e "$file" ]; then
if ! git ls-files --error-unmatch -- "$file" >/dev/null 2>&1; then
printf 'Error: file not found: %s\n' "$file" >&2
exit 1
fi
fi
done
git restore --staged :/
git add --force -- "${files[@]}"
if git diff --staged --quiet; then
printf 'Warning: no staged changes detected for: %s\n' "${files[*]}" >&2
exit 1
fi
committed=false
if run_git_commit; then
committed=true
elif [ "$force_delete_lock" = true ]; then
lock_path=$(
printf '%s\n' "$last_commit_error" |
awk -F"'" '/Unable to create .*\.git\/index\.lock/ { print $2; exit }'
)
if [ -n "$lock_path" ] && [ -e "$lock_path" ]; then
rm -f "$lock_path"
printf 'Removed stale git lock: %s\n' "$lock_path" >&2
if run_git_commit; then
committed=true
fi
fi
fi
if [ "$committed" = false ]; then
exit 1
fi
printf 'Committed "%s" with %d files\n' "$commit_message" "${#files[@]}"

View File

@@ -0,0 +1,59 @@
#!/usr/bin/env tsx
/**
* Copy export-html templates from src to dist
*/
import fs from "node:fs";
import path from "node:path";
import { fileURLToPath } from "node:url";
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const projectRoot = path.resolve(__dirname, "..");
const srcDir = path.join(projectRoot, "src", "auto-reply", "reply", "export-html");
const distDir = path.join(projectRoot, "dist", "export-html");
function copyExportHtmlTemplates() {
if (!fs.existsSync(srcDir)) {
console.warn("[copy-export-html-templates] Source directory not found:", srcDir);
return;
}
// Create dist directory
if (!fs.existsSync(distDir)) {
fs.mkdirSync(distDir, { recursive: true });
}
// Copy main template files
const templateFiles = ["template.html", "template.css", "template.js"];
for (const file of templateFiles) {
const srcFile = path.join(srcDir, file);
const distFile = path.join(distDir, file);
if (fs.existsSync(srcFile)) {
fs.copyFileSync(srcFile, distFile);
console.log(`[copy-export-html-templates] Copied ${file}`);
}
}
// Copy vendor files
const srcVendor = path.join(srcDir, "vendor");
const distVendor = path.join(distDir, "vendor");
if (fs.existsSync(srcVendor)) {
if (!fs.existsSync(distVendor)) {
fs.mkdirSync(distVendor, { recursive: true });
}
const vendorFiles = fs.readdirSync(srcVendor);
for (const file of vendorFiles) {
const srcFile = path.join(srcVendor, file);
const distFile = path.join(distVendor, file);
if (fs.statSync(srcFile).isFile()) {
fs.copyFileSync(srcFile, distFile);
console.log(`[copy-export-html-templates] Copied vendor/${file}`);
}
}
}
console.log("[copy-export-html-templates] Done");
}
copyExportHtmlTemplates();

View File

@@ -0,0 +1,55 @@
#!/usr/bin/env tsx
/**
* Copy HOOK.md files from src/hooks/bundled to dist/bundled
*/
import fs from "node:fs";
import path from "node:path";
import { fileURLToPath } from "node:url";
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const projectRoot = path.resolve(__dirname, "..");
const srcBundled = path.join(projectRoot, "src", "hooks", "bundled");
const distBundled = path.join(projectRoot, "dist", "bundled");
function copyHookMetadata() {
if (!fs.existsSync(srcBundled)) {
console.warn("[copy-hook-metadata] Source directory not found:", srcBundled);
return;
}
if (!fs.existsSync(distBundled)) {
fs.mkdirSync(distBundled, { recursive: true });
}
const entries = fs.readdirSync(srcBundled, { withFileTypes: true });
for (const entry of entries) {
if (!entry.isDirectory()) {
continue;
}
const hookName = entry.name;
const srcHookDir = path.join(srcBundled, hookName);
const distHookDir = path.join(distBundled, hookName);
const srcHookMd = path.join(srcHookDir, "HOOK.md");
const distHookMd = path.join(distHookDir, "HOOK.md");
if (!fs.existsSync(srcHookMd)) {
console.warn(`[copy-hook-metadata] No HOOK.md found for ${hookName}`);
continue;
}
if (!fs.existsSync(distHookDir)) {
fs.mkdirSync(distHookDir, { recursive: true });
}
fs.copyFileSync(srcHookMd, distHookMd);
console.log(`[copy-hook-metadata] Copied ${hookName}/HOOK.md`);
}
console.log("[copy-hook-metadata] Done");
}
copyHookMetadata();

View File

@@ -0,0 +1,176 @@
#!/usr/bin/env bash
set -euo pipefail
# Create a styled DMG containing the app bundle + /Applications symlink.
#
# Usage:
# scripts/create-dmg.sh <app_path> [output_dmg]
#
# Env:
# DMG_VOLUME_NAME default: CFBundleName (or "OpenClaw")
# DMG_BACKGROUND_PATH default: assets/dmg-background.png
# DMG_BACKGROUND_SMALL default: assets/dmg-background-small.png (recommended)
# DMG_WINDOW_BOUNDS default: "400 100 900 420" (500x320)
# DMG_ICON_SIZE default: 128
# DMG_APP_POS default: "125 160"
# DMG_APPS_POS default: "375 160"
# SKIP_DMG_STYLE=1 skip Finder styling
# DMG_EXTRA_SECTORS extra sectors to keep when shrinking RW image (default: 2048)
APP_PATH="${1:-}"
OUT_PATH="${2:-}"
if [[ -z "$APP_PATH" ]]; then
echo "Usage: $0 <app_path> [output_dmg]" >&2
exit 1
fi
if [[ ! -d "$APP_PATH" ]]; then
echo "Error: App not found: $APP_PATH" >&2
exit 1
fi
ROOT_DIR="$(cd "$(dirname "$0")/.." && pwd)"
BUILD_DIR="$ROOT_DIR/dist"
mkdir -p "$BUILD_DIR"
APP_NAME=$(/usr/libexec/PlistBuddy -c "Print CFBundleName" "$APP_PATH/Contents/Info.plist" 2>/dev/null || echo "OpenClaw")
VERSION=$(/usr/libexec/PlistBuddy -c "Print CFBundleShortVersionString" "$APP_PATH/Contents/Info.plist" 2>/dev/null || echo "0.0.0")
DMG_NAME="${APP_NAME}-${VERSION}.dmg"
DMG_VOLUME_NAME="${DMG_VOLUME_NAME:-$APP_NAME}"
DMG_BACKGROUND_SMALL="${DMG_BACKGROUND_SMALL:-$ROOT_DIR/assets/dmg-background-small.png}"
DMG_BACKGROUND_PATH="${DMG_BACKGROUND_PATH:-$ROOT_DIR/assets/dmg-background.png}"
DMG_WINDOW_BOUNDS="${DMG_WINDOW_BOUNDS:-400 100 900 420}"
DMG_ICON_SIZE="${DMG_ICON_SIZE:-128}"
DMG_APP_POS="${DMG_APP_POS:-125 160}"
DMG_APPS_POS="${DMG_APPS_POS:-375 160}"
DMG_EXTRA_SECTORS="${DMG_EXTRA_SECTORS:-2048}"
to_applescript_list4() {
local raw="$1"
echo "$raw" | awk '{ printf "%s, %s, %s, %s", $1, $2, $3, $4 }'
}
to_applescript_pair() {
local raw="$1"
echo "$raw" | awk '{ printf "%s, %s", $1, $2 }'
}
if [[ -z "$OUT_PATH" ]]; then
OUT_PATH="$BUILD_DIR/$DMG_NAME"
fi
echo "Creating DMG: $OUT_PATH"
# Cleanup stuck volumes.
for vol in "/Volumes/$DMG_VOLUME_NAME"* "/Volumes/$APP_NAME"*; do
if [[ -d "$vol" ]]; then
hdiutil detach "$vol" -force 2>/dev/null || true
sleep 1
fi
done
DMG_TEMP="$(mktemp -d /tmp/openclaw-dmg.XXXXXX)"
trap 'hdiutil detach "/Volumes/'"$DMG_VOLUME_NAME"'" -force 2>/dev/null || true; rm -rf "$DMG_TEMP" 2>/dev/null || true' EXIT
cp -R "$APP_PATH" "$DMG_TEMP/"
ln -s /Applications "$DMG_TEMP/Applications"
APP_SIZE_MB=$(du -sm "$APP_PATH" | awk '{print $1}')
DMG_SIZE_MB=$((APP_SIZE_MB + 80))
DMG_RW_PATH="${OUT_PATH%.dmg}-rw.dmg"
rm -f "$DMG_RW_PATH" "$OUT_PATH"
hdiutil create \
-volname "$DMG_VOLUME_NAME" \
-srcfolder "$DMG_TEMP" \
-ov \
-format UDRW \
-size "${DMG_SIZE_MB}m" \
"$DMG_RW_PATH"
MOUNT_POINT="/Volumes/$DMG_VOLUME_NAME"
if [[ -d "$MOUNT_POINT" ]]; then
hdiutil detach "$MOUNT_POINT" -force 2>/dev/null || true
sleep 2
fi
hdiutil attach "$DMG_RW_PATH" -mountpoint "$MOUNT_POINT" -nobrowse
if [[ "${SKIP_DMG_STYLE:-0}" != "1" ]]; then
mkdir -p "$MOUNT_POINT/.background"
if [[ -f "$DMG_BACKGROUND_SMALL" ]]; then
cp "$DMG_BACKGROUND_SMALL" "$MOUNT_POINT/.background/background.png"
elif [[ -f "$DMG_BACKGROUND_PATH" ]]; then
cp "$DMG_BACKGROUND_PATH" "$MOUNT_POINT/.background/background.png"
else
echo "WARN: DMG background missing: $DMG_BACKGROUND_SMALL / $DMG_BACKGROUND_PATH" >&2
fi
# Volume icon: reuse the app icon if available.
ICON_SRC="$ROOT_DIR/apps/macos/Sources/OpenClaw/Resources/OpenClaw.icns"
if [[ -f "$ICON_SRC" ]]; then
cp "$ICON_SRC" "$MOUNT_POINT/.VolumeIcon.icns"
if command -v SetFile >/dev/null 2>&1; then
SetFile -a C "$MOUNT_POINT" 2>/dev/null || true
fi
fi
osascript <<EOF
tell application "Finder"
tell disk "$DMG_VOLUME_NAME"
open
set current view of container window to icon view
set toolbar visible of container window to false
set statusbar visible of container window to false
set the bounds of container window to {$(to_applescript_list4 "$DMG_WINDOW_BOUNDS")}
set viewOptions to the icon view options of container window
set arrangement of viewOptions to not arranged
set icon size of viewOptions to ${DMG_ICON_SIZE}
if exists file ".background:background.png" then
set background picture of viewOptions to file ".background:background.png"
end if
set text size of viewOptions to 12
set label position of viewOptions to bottom
set shows item info of viewOptions to false
set shows icon preview of viewOptions to true
set position of item "${APP_NAME}.app" of container window to {$(to_applescript_pair "$DMG_APP_POS")}
set position of item "Applications" of container window to {$(to_applescript_pair "$DMG_APPS_POS")}
update without registering applications
delay 2
close
open
delay 1
end tell
end tell
EOF
sleep 2
osascript -e 'tell application "Finder" to close every window' || true
fi
for i in {1..5}; do
if hdiutil detach "$MOUNT_POINT" -quiet 2>/dev/null; then
break
fi
if [[ "$i" == "3" ]]; then
hdiutil detach "$MOUNT_POINT" -force 2>/dev/null || true
fi
sleep 2
done
hdiutil resize -limits "$DMG_RW_PATH" >/tmp/openclaw-dmg-limits.txt 2>/dev/null || true
MIN_SECTORS="$(tail -n 1 /tmp/openclaw-dmg-limits.txt 2>/dev/null | awk '{print $1}')"
rm -f /tmp/openclaw-dmg-limits.txt
if [[ "$MIN_SECTORS" =~ ^[0-9]+$ ]] && [[ "$DMG_EXTRA_SECTORS" =~ ^[0-9]+$ ]]; then
TARGET_SECTORS=$((MIN_SECTORS + DMG_EXTRA_SECTORS))
echo "Shrinking RW image: min sectors=$MIN_SECTORS (+$DMG_EXTRA_SECTORS) -> $TARGET_SECTORS"
hdiutil resize -sectors "$TARGET_SECTORS" "$DMG_RW_PATH" >/dev/null 2>&1 || true
fi
hdiutil convert "$DMG_RW_PATH" -format ULMO -o "$OUT_PATH" -ov
rm -f "$DMG_RW_PATH"
hdiutil verify "$OUT_PATH" >/dev/null
echo "✅ DMG ready: $OUT_PATH"

View File

@@ -0,0 +1,273 @@
import fs from "node:fs/promises";
import path from "node:path";
type Usage = {
input_tokens?: number;
output_tokens?: number;
total_tokens?: number;
cache_read_tokens?: number;
cache_write_tokens?: number;
};
type CronRunLogEntry = {
ts: number;
jobId: string;
action: "finished";
status?: "ok" | "error" | "skipped";
model?: string;
provider?: string;
usage?: Usage;
};
function parseArgs(argv: string[]) {
const args: Record<string, string | boolean> = {};
for (let i = 2; i < argv.length; i++) {
const a = argv[i] ?? "";
if (!a.startsWith("--")) {
continue;
}
const key = a.slice(2);
const next = argv[i + 1];
if (next && !next.startsWith("--")) {
args[key] = next;
i++;
} else {
args[key] = true;
}
}
return args;
}
function usageAndExit(code: number): never {
console.error(
[
"cron_usage_report.ts",
"",
"Required (choose one):",
" --store <path-to-cron-store-json> (derive runs dir as dirname(store)/runs)",
" --runsDir <path-to-runs-dir>",
"",
"Time window:",
" --hours <n> (default 24)",
" --from <iso> (overrides --hours)",
" --to <iso> (default now)",
"",
"Filters:",
" --jobId <id>",
" --model <name>",
"",
"Output:",
" --json (emit JSON)",
].join("\n"),
);
process.exit(code);
}
async function listJsonlFiles(dir: string): Promise<string[]> {
const entries = await fs.readdir(dir, { withFileTypes: true }).catch(() => []);
return entries
.filter((e) => e.isFile() && e.name.endsWith(".jsonl"))
.map((e) => path.join(dir, e.name));
}
function safeParseLine(line: string): CronRunLogEntry | null {
try {
const obj = JSON.parse(line) as Partial<CronRunLogEntry> | null;
if (!obj || typeof obj !== "object") {
return null;
}
if (obj.action !== "finished") {
return null;
}
if (typeof obj.ts !== "number" || !Number.isFinite(obj.ts)) {
return null;
}
if (typeof obj.jobId !== "string" || !obj.jobId.trim()) {
return null;
}
return obj as CronRunLogEntry;
} catch {
return null;
}
}
function fmtInt(n: number) {
return new Intl.NumberFormat("en-US", { maximumFractionDigits: 0 }).format(n);
}
export async function main() {
const args = parseArgs(process.argv);
const store = typeof args.store === "string" ? args.store : undefined;
const runsDirArg = typeof args.runsDir === "string" ? args.runsDir : undefined;
const runsDir =
runsDirArg ?? (store ? path.join(path.dirname(path.resolve(store)), "runs") : null);
if (!runsDir) {
usageAndExit(2);
}
const hours = typeof args.hours === "string" ? Number(args.hours) : 24;
const toMs = typeof args.to === "string" ? Date.parse(args.to) : Date.now();
const fromMs =
typeof args.from === "string"
? Date.parse(args.from)
: toMs - Math.max(1, Number.isFinite(hours) ? hours : 24) * 60 * 60 * 1000;
if (!Number.isFinite(fromMs) || !Number.isFinite(toMs)) {
console.error("Invalid --from/--to timestamp");
process.exit(2);
}
const filterJobId = typeof args.jobId === "string" ? args.jobId.trim() : "";
const filterModel = typeof args.model === "string" ? args.model.trim() : "";
const asJson = args.json === true;
const files = await listJsonlFiles(runsDir);
const totalsByJob: Record<
string,
{
jobId: string;
runs: number;
models: Record<
string,
{
model: string;
runs: number;
input_tokens: number;
output_tokens: number;
total_tokens: number;
missingUsageRuns: number;
}
>;
input_tokens: number;
output_tokens: number;
total_tokens: number;
missingUsageRuns: number;
}
> = {};
for (const file of files) {
const raw = await fs.readFile(file, "utf-8").catch(() => "");
if (!raw.trim()) {
continue;
}
const lines = raw.split("\n");
for (const line of lines) {
const entry = safeParseLine(line.trim());
if (!entry) {
continue;
}
if (entry.ts < fromMs || entry.ts > toMs) {
continue;
}
if (filterJobId && entry.jobId !== filterJobId) {
continue;
}
const model = (entry.model ?? "<unknown>").trim() || "<unknown>";
if (filterModel && model !== filterModel) {
continue;
}
const jobId = entry.jobId;
const usage = entry.usage;
const hasUsage = Boolean(
usage && (usage.total_tokens ?? usage.input_tokens ?? usage.output_tokens) !== undefined,
);
const jobAgg = (totalsByJob[jobId] ??= {
jobId,
runs: 0,
models: {},
input_tokens: 0,
output_tokens: 0,
total_tokens: 0,
missingUsageRuns: 0,
});
jobAgg.runs++;
const modelAgg = (jobAgg.models[model] ??= {
model,
runs: 0,
input_tokens: 0,
output_tokens: 0,
total_tokens: 0,
missingUsageRuns: 0,
});
modelAgg.runs++;
if (!hasUsage) {
jobAgg.missingUsageRuns++;
modelAgg.missingUsageRuns++;
continue;
}
const input = Math.max(0, Math.trunc(usage?.input_tokens ?? 0));
const output = Math.max(0, Math.trunc(usage?.output_tokens ?? 0));
const total = Math.max(0, Math.trunc(usage?.total_tokens ?? input + output));
jobAgg.input_tokens += input;
jobAgg.output_tokens += output;
jobAgg.total_tokens += total;
modelAgg.input_tokens += input;
modelAgg.output_tokens += output;
modelAgg.total_tokens += total;
}
}
const rows = Object.values(totalsByJob)
.map((r) => ({
...r,
models: Object.values(r.models).toSorted((a, b) => b.total_tokens - a.total_tokens),
}))
.toSorted((a, b) => b.total_tokens - a.total_tokens);
if (asJson) {
process.stdout.write(
JSON.stringify(
{
from: new Date(fromMs).toISOString(),
to: new Date(toMs).toISOString(),
runsDir,
jobs: rows,
},
null,
2,
) + "\n",
);
return;
}
console.log(`Cron usage report`);
console.log(` runsDir: ${runsDir}`);
console.log(` window: ${new Date(fromMs).toISOString()}${new Date(toMs).toISOString()}`);
if (filterJobId) {
console.log(` filter jobId: ${filterJobId}`);
}
if (filterModel) {
console.log(` filter model: ${filterModel}`);
}
console.log("");
if (rows.length === 0) {
console.log("No matching cron run entries found.");
return;
}
for (const job of rows) {
console.log(`jobId: ${job.jobId}`);
console.log(` runs: ${fmtInt(job.runs)} (missing usage: ${fmtInt(job.missingUsageRuns)})`);
console.log(
` tokens: total ${fmtInt(job.total_tokens)} (in ${fmtInt(job.input_tokens)} / out ${fmtInt(job.output_tokens)})`,
);
for (const m of job.models) {
console.log(
` model ${m.model}: runs ${fmtInt(m.runs)} (missing usage: ${fmtInt(m.missingUsageRuns)}), total ${fmtInt(m.total_tokens)} (in ${fmtInt(m.input_tokens)} / out ${fmtInt(m.output_tokens)})`,
);
}
console.log("");
}
}
if (import.meta.url === `file://${process.argv[1]}`) {
void main();
}

View File

@@ -0,0 +1,391 @@
import { execFileSync } from "node:child_process";
import crypto from "node:crypto";
import fs from "node:fs";
import os from "node:os";
import path from "node:path";
type Args = {
agentId: string;
reveal: boolean;
sessionKey?: string;
};
const mask = (value: string) => {
const compact = value.trim();
if (!compact) {
return "missing";
}
const edge = compact.length >= 12 ? 6 : 4;
return `${compact.slice(0, edge)}${compact.slice(-edge)}`;
};
const parseArgs = (): Args => {
const args = process.argv.slice(2);
let agentId = "main";
let reveal = false;
let sessionKey: string | undefined;
for (let i = 0; i < args.length; i++) {
const arg = args[i];
if (arg === "--agent" && args[i + 1]) {
agentId = String(args[++i]).trim() || "main";
continue;
}
if (arg === "--reveal") {
reveal = true;
continue;
}
if (arg === "--session-key" && args[i + 1]) {
sessionKey = String(args[++i]).trim() || undefined;
continue;
}
}
return { agentId, reveal, sessionKey };
};
const loadAuthProfiles = (agentId: string) => {
const stateRoot =
process.env.OPENCLAW_STATE_DIR?.trim() ||
process.env.CLAWDBOT_STATE_DIR?.trim() ||
path.join(os.homedir(), ".openclaw");
const authPath = path.join(stateRoot, "agents", agentId, "agent", "auth-profiles.json");
if (!fs.existsSync(authPath)) {
throw new Error(`Missing: ${authPath}`);
}
const store = JSON.parse(fs.readFileSync(authPath, "utf8")) as {
profiles?: Record<string, { provider?: string; type?: string; token?: string; key?: string }>;
};
return { authPath, store };
};
const pickAnthropicTokens = (store: {
profiles?: Record<string, { provider?: string; type?: string; token?: string; key?: string }>;
}): Array<{ profileId: string; token: string }> => {
const profiles = store.profiles ?? {};
const found: Array<{ profileId: string; token: string }> = [];
for (const [id, cred] of Object.entries(profiles)) {
if (cred?.provider !== "anthropic") {
continue;
}
const token = cred.type === "token" ? cred.token?.trim() : undefined;
if (token) {
found.push({ profileId: id, token });
}
}
return found;
};
const fetchAnthropicOAuthUsage = async (token: string) => {
const res = await fetch("https://api.anthropic.com/api/oauth/usage", {
headers: {
Authorization: `Bearer ${token}`,
Accept: "application/json",
"anthropic-version": "2023-06-01",
"anthropic-beta": "oauth-2025-04-20",
"User-Agent": "openclaw-debug",
},
});
const text = await res.text();
return { status: res.status, contentType: res.headers.get("content-type"), text };
};
const readClaudeCliKeychain = (): {
accessToken: string;
expiresAt?: number;
scopes?: string[];
} | null => {
if (process.platform !== "darwin") {
return null;
}
try {
const raw = execFileSync(
"security",
["find-generic-password", "-s", "Claude Code-credentials", "-w"],
{ encoding: "utf8", stdio: ["ignore", "pipe", "ignore"], timeout: 5000 },
);
const parsed = JSON.parse(raw.trim()) as Record<string, unknown>;
const oauth = parsed?.claudeAiOauth as Record<string, unknown> | undefined;
if (!oauth || typeof oauth !== "object") {
return null;
}
const accessToken = oauth.accessToken;
if (typeof accessToken !== "string" || !accessToken.trim()) {
return null;
}
const expiresAt = typeof oauth.expiresAt === "number" ? oauth.expiresAt : undefined;
const scopes = Array.isArray(oauth.scopes)
? oauth.scopes.filter((v): v is string => typeof v === "string")
: undefined;
return { accessToken, expiresAt, scopes };
} catch {
return null;
}
};
const chromeServiceNameForPath = (cookiePath: string): string => {
if (cookiePath.includes("/Arc/")) {
return "Arc Safe Storage";
}
if (cookiePath.includes("/BraveSoftware/")) {
return "Brave Safe Storage";
}
if (cookiePath.includes("/Microsoft Edge/")) {
return "Microsoft Edge Safe Storage";
}
if (cookiePath.includes("/Chromium/")) {
return "Chromium Safe Storage";
}
return "Chrome Safe Storage";
};
const readKeychainPassword = (service: string): string | null => {
try {
const out = execFileSync("security", ["find-generic-password", "-w", "-s", service], {
encoding: "utf8",
stdio: ["ignore", "pipe", "ignore"],
timeout: 5000,
});
const pw = out.trim();
return pw ? pw : null;
} catch {
return null;
}
};
const decryptChromeCookieValue = (encrypted: Buffer, service: string): string | null => {
if (encrypted.length < 4) {
return null;
}
const prefix = encrypted.subarray(0, 3).toString("utf8");
if (prefix !== "v10" && prefix !== "v11") {
return null;
}
const password = readKeychainPassword(service);
if (!password) {
return null;
}
const key = crypto.pbkdf2Sync(password, "saltysalt", 1003, 16, "sha1");
const iv = Buffer.alloc(16, 0x20);
const data = encrypted.subarray(3);
try {
const decipher = crypto.createDecipheriv("aes-128-cbc", key, iv);
decipher.setAutoPadding(true);
const decrypted = Buffer.concat([decipher.update(data), decipher.final()]);
const text = decrypted.toString("utf8").trim();
return text ? text : null;
} catch {
return null;
}
};
const queryChromeCookieDb = (cookieDb: string): string | null => {
try {
const out = execFileSync(
"sqlite3",
[
"-readonly",
cookieDb,
`
SELECT
COALESCE(NULLIF(value,''), hex(encrypted_value))
FROM cookies
WHERE (host_key LIKE '%claude.ai%' OR host_key = '.claude.ai')
AND name = 'sessionKey'
LIMIT 1;
`,
],
{ encoding: "utf8", stdio: ["ignore", "pipe", "ignore"], timeout: 5000 },
).trim();
if (!out) {
return null;
}
if (out.startsWith("sk-ant-")) {
return out;
}
const hex = out.replace(/[^0-9A-Fa-f]/g, "");
if (!hex) {
return null;
}
const buf = Buffer.from(hex, "hex");
const service = chromeServiceNameForPath(cookieDb);
const decrypted = decryptChromeCookieValue(buf, service);
return decrypted && decrypted.startsWith("sk-ant-") ? decrypted : null;
} catch {
return null;
}
};
const queryFirefoxCookieDb = (cookieDb: string): string | null => {
try {
const out = execFileSync(
"sqlite3",
[
"-readonly",
cookieDb,
`
SELECT value
FROM moz_cookies
WHERE (host LIKE '%claude.ai%' OR host = '.claude.ai')
AND name = 'sessionKey'
LIMIT 1;
`,
],
{ encoding: "utf8", stdio: ["ignore", "pipe", "ignore"], timeout: 5000 },
).trim();
return out && out.startsWith("sk-ant-") ? out : null;
} catch {
return null;
}
};
const findClaudeSessionKey = (): { sessionKey: string; source: string } | null => {
if (process.platform !== "darwin") {
return null;
}
const firefoxRoot = path.join(
os.homedir(),
"Library",
"Application Support",
"Firefox",
"Profiles",
);
if (fs.existsSync(firefoxRoot)) {
for (const entry of fs.readdirSync(firefoxRoot)) {
const db = path.join(firefoxRoot, entry, "cookies.sqlite");
if (!fs.existsSync(db)) {
continue;
}
const value = queryFirefoxCookieDb(db);
if (value) {
return { sessionKey: value, source: `firefox:${db}` };
}
}
}
const chromeCandidates = [
path.join(os.homedir(), "Library", "Application Support", "Google", "Chrome"),
path.join(os.homedir(), "Library", "Application Support", "Chromium"),
path.join(os.homedir(), "Library", "Application Support", "Arc"),
path.join(os.homedir(), "Library", "Application Support", "BraveSoftware", "Brave-Browser"),
path.join(os.homedir(), "Library", "Application Support", "Microsoft Edge"),
];
for (const root of chromeCandidates) {
if (!fs.existsSync(root)) {
continue;
}
const profiles = fs
.readdirSync(root)
.filter((name) => name === "Default" || name.startsWith("Profile "));
for (const profile of profiles) {
const db = path.join(root, profile, "Cookies");
if (!fs.existsSync(db)) {
continue;
}
const value = queryChromeCookieDb(db);
if (value) {
return { sessionKey: value, source: `chromium:${db}` };
}
}
}
return null;
};
const fetchClaudeWebUsage = async (sessionKey: string) => {
const headers = {
Cookie: `sessionKey=${sessionKey}`,
Accept: "application/json",
"User-Agent":
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/17.1 Safari/605.1.15",
};
const orgRes = await fetch("https://claude.ai/api/organizations", { headers });
const orgText = await orgRes.text();
if (!orgRes.ok) {
return { ok: false as const, step: "organizations", status: orgRes.status, body: orgText };
}
const orgs = JSON.parse(orgText) as Array<{ uuid?: string }>;
const orgId = orgs?.[0]?.uuid;
if (!orgId) {
return { ok: false as const, step: "organizations", status: 200, body: orgText };
}
const usageRes = await fetch(`https://claude.ai/api/organizations/${orgId}/usage`, { headers });
const usageText = await usageRes.text();
return usageRes.ok
? { ok: true as const, orgId, body: usageText }
: { ok: false as const, step: "usage", status: usageRes.status, body: usageText };
};
const main = async () => {
const opts = parseArgs();
const { authPath, store } = loadAuthProfiles(opts.agentId);
console.log(`Auth file: ${authPath}`);
const keychain = readClaudeCliKeychain();
if (keychain) {
console.log(
`Claude Code CLI keychain: accessToken=${opts.reveal ? keychain.accessToken : mask(keychain.accessToken)} scopes=${keychain.scopes?.join(",") ?? "(unknown)"}`,
);
const oauth = await fetchAnthropicOAuthUsage(keychain.accessToken);
console.log(
`OAuth usage (keychain): HTTP ${oauth.status} (${oauth.contentType ?? "no content-type"})`,
);
console.log(oauth.text.slice(0, 200).replace(/\s+/g, " ").trim());
} else {
console.log("Claude Code CLI keychain: missing/unreadable");
}
const anthropic = pickAnthropicTokens(store);
if (anthropic.length === 0) {
console.log("Auth profiles: no Anthropic token profiles found");
} else {
for (const entry of anthropic) {
console.log(
`Auth profiles: ${entry.profileId} token=${opts.reveal ? entry.token : mask(entry.token)}`,
);
const oauth = await fetchAnthropicOAuthUsage(entry.token);
console.log(
`OAuth usage (${entry.profileId}): HTTP ${oauth.status} (${oauth.contentType ?? "no content-type"})`,
);
console.log(oauth.text.slice(0, 200).replace(/\s+/g, " ").trim());
}
}
const sessionKey =
opts.sessionKey?.trim() ||
process.env.CLAUDE_AI_SESSION_KEY?.trim() ||
process.env.CLAUDE_WEB_SESSION_KEY?.trim() ||
findClaudeSessionKey()?.sessionKey;
const source = opts.sessionKey
? "--session-key"
: process.env.CLAUDE_AI_SESSION_KEY || process.env.CLAUDE_WEB_SESSION_KEY
? "env"
: (findClaudeSessionKey()?.source ?? "auto");
if (!sessionKey) {
console.log(
"Claude web: no sessionKey found (try --session-key or export CLAUDE_AI_SESSION_KEY)",
);
return;
}
console.log(
`Claude web: sessionKey=${opts.reveal ? sessionKey : mask(sessionKey)} (source: ${source})`,
);
const web = await fetchClaudeWebUsage(sessionKey);
if (!web.ok) {
console.log(`Claude web: ${web.step} HTTP ${web.status}`);
console.log(String(web.body).slice(0, 400).replace(/\s+/g, " ").trim());
return;
}
console.log(`Claude web: org=${web.orgId} OK`);
console.log(web.body.slice(0, 400).replace(/\s+/g, " ").trim());
};
await main();

View File

@@ -0,0 +1,779 @@
#!/usr/bin/env bun
// Manual ACP thread smoke for plain-language routing.
// Keep this script available for regression/debug validation. Do not delete.
import { randomUUID } from "node:crypto";
import fs from "node:fs/promises";
import path from "node:path";
type ThreadBindingRecord = {
accountId?: string;
channelId?: string;
threadId?: string;
targetKind?: string;
targetSessionKey?: string;
agentId?: string;
boundBy?: string;
boundAt?: number;
};
type ThreadBindingsPayload = {
version?: number;
bindings?: Record<string, ThreadBindingRecord>;
};
type DiscordMessage = {
id: string;
content?: string;
timestamp?: string;
author?: {
id?: string;
username?: string;
bot?: boolean;
};
};
type DiscordUser = {
id: string;
username: string;
bot?: boolean;
};
type DriverMode = "token" | "webhook";
type Args = {
channelId: string;
driverMode: DriverMode;
driverToken: string;
driverTokenPrefix: string;
botToken: string;
botTokenPrefix: string;
targetAgent: string;
timeoutMs: number;
pollMs: number;
mentionUserId?: string;
instruction?: string;
threadBindingsPath: string;
json: boolean;
};
type SuccessResult = {
ok: true;
smokeId: string;
ackToken: string;
sentMessageId: string;
binding: {
threadId: string;
targetSessionKey: string;
targetKind: string;
agentId: string;
boundAt: number;
accountId?: string;
channelId?: string;
};
ackMessage: {
id: string;
authorId?: string;
authorUsername?: string;
timestamp?: string;
content?: string;
};
};
type FailureResult = {
ok: false;
smokeId: string;
stage: "validation" | "send-message" | "wait-binding" | "wait-ack" | "discord-api" | "unexpected";
error: string;
diagnostics?: {
parentChannelRecent?: Array<{
id: string;
author?: string;
bot?: boolean;
content?: string;
}>;
bindingCandidates?: Array<{
threadId: string;
targetSessionKey: string;
targetKind?: string;
agentId?: string;
boundAt?: number;
}>;
};
};
const DISCORD_API_BASE = "https://discord.com/api/v10";
function sleep(ms: number): Promise<void> {
return new Promise((resolve) => setTimeout(resolve, ms));
}
function parseNumber(value: string | undefined, fallback: number): number {
if (!value) {
return fallback;
}
const parsed = Number.parseInt(value, 10);
return Number.isFinite(parsed) && parsed > 0 ? parsed : fallback;
}
function resolveStateDir(): string {
const override = process.env.OPENCLAW_STATE_DIR?.trim() || process.env.CLAWDBOT_STATE_DIR?.trim();
if (override) {
return override.startsWith("~")
? path.resolve(process.env.HOME || "", override.slice(1))
: path.resolve(override);
}
const home = process.env.OPENCLAW_HOME?.trim() || process.env.HOME || "";
return path.join(home, ".openclaw");
}
function resolveArg(flag: string): string | undefined {
const argv = process.argv.slice(2);
const eq = argv.find((entry) => entry.startsWith(`${flag}=`));
if (eq) {
return eq.slice(flag.length + 1);
}
const idx = argv.indexOf(flag);
if (idx >= 0 && idx + 1 < argv.length) {
return argv[idx + 1];
}
return undefined;
}
function hasFlag(flag: string): boolean {
return process.argv.slice(2).includes(flag);
}
function usage(): string {
return (
"Usage: bun scripts/dev/discord-acp-plain-language-smoke.ts " +
"--channel <discord-channel-id> [--token <driver-token> | --driver webhook --bot-token <bot-token>] [options]\n\n" +
"Manual live smoke only (not CI). Sends a plain-language instruction in Discord and verifies:\n" +
"1) OpenClaw spawned an ACP thread binding\n" +
"2) agent replied in that bound thread with the expected ACK token\n\n" +
"Options:\n" +
" --channel <id> Parent Discord channel id (required)\n" +
" --driver <token|webhook> Driver transport mode (default: token)\n" +
" --token <token> Driver Discord token (required for driver=token)\n" +
" --token-prefix <prefix> Auth prefix for --token (default: Bot)\n" +
" --bot-token <token> Bot token for webhook driver mode\n" +
" --bot-token-prefix <prefix> Auth prefix for --bot-token (default: Bot)\n" +
" --agent <id> Expected ACP agent id (default: codex)\n" +
" --mention <user-id> Mention this user in the instruction (optional)\n" +
" --instruction <text> Custom instruction template (optional)\n" +
" --timeout-ms <n> Total timeout in ms (default: 240000)\n" +
" --poll-ms <n> Poll interval in ms (default: 1500)\n" +
" --thread-bindings-path <p> Override thread-bindings json path\n" +
" --json Emit JSON output\n" +
"\n" +
"Environment fallbacks:\n" +
" OPENCLAW_DISCORD_SMOKE_CHANNEL_ID\n" +
" OPENCLAW_DISCORD_SMOKE_DRIVER\n" +
" OPENCLAW_DISCORD_SMOKE_DRIVER_TOKEN\n" +
" OPENCLAW_DISCORD_SMOKE_DRIVER_TOKEN_PREFIX\n" +
" OPENCLAW_DISCORD_SMOKE_BOT_TOKEN\n" +
" OPENCLAW_DISCORD_SMOKE_BOT_TOKEN_PREFIX\n" +
" OPENCLAW_DISCORD_SMOKE_AGENT\n" +
" OPENCLAW_DISCORD_SMOKE_MENTION_USER_ID\n" +
" OPENCLAW_DISCORD_SMOKE_TIMEOUT_MS\n" +
" OPENCLAW_DISCORD_SMOKE_POLL_MS\n" +
" OPENCLAW_DISCORD_SMOKE_THREAD_BINDINGS_PATH"
);
}
function parseArgs(): Args {
const channelId =
resolveArg("--channel") ||
process.env.OPENCLAW_DISCORD_SMOKE_CHANNEL_ID ||
process.env.CLAWDBOT_DISCORD_SMOKE_CHANNEL_ID ||
"";
const driverModeRaw =
resolveArg("--driver") ||
process.env.OPENCLAW_DISCORD_SMOKE_DRIVER ||
process.env.CLAWDBOT_DISCORD_SMOKE_DRIVER ||
"token";
const normalizedDriverMode = driverModeRaw.trim().toLowerCase();
const driverMode: DriverMode =
normalizedDriverMode === "webhook"
? "webhook"
: normalizedDriverMode === "token"
? "token"
: "token";
const driverToken =
resolveArg("--token") ||
process.env.OPENCLAW_DISCORD_SMOKE_DRIVER_TOKEN ||
process.env.CLAWDBOT_DISCORD_SMOKE_DRIVER_TOKEN ||
"";
const driverTokenPrefix =
resolveArg("--token-prefix") || process.env.OPENCLAW_DISCORD_SMOKE_DRIVER_TOKEN_PREFIX || "Bot";
const botToken =
resolveArg("--bot-token") ||
process.env.OPENCLAW_DISCORD_SMOKE_BOT_TOKEN ||
process.env.CLAWDBOT_DISCORD_SMOKE_BOT_TOKEN ||
process.env.DISCORD_BOT_TOKEN ||
"";
const botTokenPrefix =
resolveArg("--bot-token-prefix") ||
process.env.OPENCLAW_DISCORD_SMOKE_BOT_TOKEN_PREFIX ||
"Bot";
const targetAgent =
resolveArg("--agent") ||
process.env.OPENCLAW_DISCORD_SMOKE_AGENT ||
process.env.CLAWDBOT_DISCORD_SMOKE_AGENT ||
"codex";
const mentionUserId =
resolveArg("--mention") ||
process.env.OPENCLAW_DISCORD_SMOKE_MENTION_USER_ID ||
process.env.CLAWDBOT_DISCORD_SMOKE_MENTION_USER_ID ||
undefined;
const instruction =
resolveArg("--instruction") ||
process.env.OPENCLAW_DISCORD_SMOKE_INSTRUCTION ||
process.env.CLAWDBOT_DISCORD_SMOKE_INSTRUCTION ||
undefined;
const timeoutMs = parseNumber(
resolveArg("--timeout-ms") || process.env.OPENCLAW_DISCORD_SMOKE_TIMEOUT_MS,
240_000,
);
const pollMs = parseNumber(
resolveArg("--poll-ms") || process.env.OPENCLAW_DISCORD_SMOKE_POLL_MS,
1_500,
);
const defaultBindingsPath = path.join(resolveStateDir(), "discord", "thread-bindings.json");
const threadBindingsPath =
resolveArg("--thread-bindings-path") ||
process.env.OPENCLAW_DISCORD_SMOKE_THREAD_BINDINGS_PATH ||
defaultBindingsPath;
const json = hasFlag("--json");
if (!channelId) {
throw new Error(usage());
}
if (driverMode === "token" && !driverToken) {
throw new Error(usage());
}
if (driverMode === "webhook" && !botToken) {
throw new Error(usage());
}
return {
channelId,
driverMode,
driverToken,
driverTokenPrefix,
botToken,
botTokenPrefix,
targetAgent,
timeoutMs,
pollMs,
mentionUserId,
instruction,
threadBindingsPath,
json,
};
}
function resolveAuthorizationHeader(params: { token: string; tokenPrefix: string }): string {
const token = params.token.trim();
if (!token) {
throw new Error("Missing Discord driver token.");
}
if (token.includes(" ")) {
return token;
}
return `${params.tokenPrefix.trim() || "Bot"} ${token}`;
}
async function discordApi<T>(params: {
method: "GET" | "POST";
path: string;
authHeader: string;
body?: unknown;
retries?: number;
}): Promise<T> {
const retries = params.retries ?? 6;
for (let attempt = 0; attempt <= retries; attempt += 1) {
const response = await fetch(`${DISCORD_API_BASE}${params.path}`, {
method: params.method,
headers: {
Authorization: params.authHeader,
"Content-Type": "application/json",
},
body: params.body === undefined ? undefined : JSON.stringify(params.body),
});
if (response.status === 429) {
const body = (await response.json().catch(() => ({}))) as { retry_after?: number };
const waitSeconds = typeof body.retry_after === "number" ? body.retry_after : 1;
await sleep(Math.ceil(waitSeconds * 1000));
continue;
}
if (!response.ok) {
const text = await response.text().catch(() => "");
throw new Error(
`Discord API ${params.method} ${params.path} failed: ${response.status} ${response.statusText}${text ? ` :: ${text}` : ""}`,
);
}
if (response.status === 204) {
return undefined as T;
}
return (await response.json()) as T;
}
throw new Error(`Discord API ${params.method} ${params.path} exceeded retry budget.`);
}
async function discordWebhookApi<T>(params: {
method: "POST" | "DELETE";
webhookId: string;
webhookToken: string;
body?: unknown;
query?: string;
retries?: number;
}): Promise<T> {
const retries = params.retries ?? 6;
const suffix = params.query ? `?${params.query}` : "";
const path = `/webhooks/${encodeURIComponent(params.webhookId)}/${encodeURIComponent(params.webhookToken)}${suffix}`;
for (let attempt = 0; attempt <= retries; attempt += 1) {
const response = await fetch(`${DISCORD_API_BASE}${path}`, {
method: params.method,
headers: {
"Content-Type": "application/json",
},
body: params.body === undefined ? undefined : JSON.stringify(params.body),
});
if (response.status === 429) {
const body = (await response.json().catch(() => ({}))) as { retry_after?: number };
const waitSeconds = typeof body.retry_after === "number" ? body.retry_after : 1;
await sleep(Math.ceil(waitSeconds * 1000));
continue;
}
if (!response.ok) {
const text = await response.text().catch(() => "");
throw new Error(
`Discord webhook API ${params.method} ${path} failed: ${response.status} ${response.statusText}${text ? ` :: ${text}` : ""}`,
);
}
if (response.status === 204) {
return undefined as T;
}
return (await response.json()) as T;
}
throw new Error(`Discord webhook API ${params.method} ${path} exceeded retry budget.`);
}
async function readThreadBindings(filePath: string): Promise<ThreadBindingRecord[]> {
const raw = await fs.readFile(filePath, "utf8");
const payload = JSON.parse(raw) as ThreadBindingsPayload;
const entries = Object.values(payload.bindings ?? {});
return entries.filter((entry) => Boolean(entry?.threadId && entry?.targetSessionKey));
}
function normalizeBoundAt(record: ThreadBindingRecord): number {
if (typeof record.boundAt === "number" && Number.isFinite(record.boundAt)) {
return record.boundAt;
}
return 0;
}
function resolveCandidateBindings(params: {
entries: ThreadBindingRecord[];
minBoundAt: number;
targetAgent: string;
}): ThreadBindingRecord[] {
const normalizedTargetAgent = params.targetAgent.trim().toLowerCase();
return params.entries
.filter((entry) => {
const targetKind = String(entry.targetKind || "")
.trim()
.toLowerCase();
if (targetKind !== "acp") {
return false;
}
if (normalizeBoundAt(entry) < params.minBoundAt) {
return false;
}
const agentId = String(entry.agentId || "")
.trim()
.toLowerCase();
if (normalizedTargetAgent && agentId && agentId !== normalizedTargetAgent) {
return false;
}
return true;
})
.toSorted((a, b) => normalizeBoundAt(b) - normalizeBoundAt(a));
}
function buildInstruction(params: {
smokeId: string;
ackToken: string;
targetAgent: string;
mentionUserId?: string;
template?: string;
}): string {
const mentionPrefix = params.mentionUserId?.trim() ? `<@${params.mentionUserId.trim()}> ` : "";
if (params.template?.trim()) {
return mentionPrefix + params.template.trim();
}
return (
mentionPrefix +
`Manual smoke ${params.smokeId}: Please spawn a ${params.targetAgent} ACP coding agent in a thread for this request, keep it persistent, and in that thread reply with exactly "${params.ackToken}" and nothing else.`
);
}
function toRecentMessageRow(message: DiscordMessage) {
return {
id: message.id,
author: message.author?.username || message.author?.id || "unknown",
bot: Boolean(message.author?.bot),
content: (message.content || "").slice(0, 500),
};
}
function printOutput(params: { json: boolean; payload: SuccessResult | FailureResult }) {
if (params.json) {
// eslint-disable-next-line no-console
console.log(JSON.stringify(params.payload, null, 2));
return;
}
if (params.payload.ok) {
const success = params.payload;
// eslint-disable-next-line no-console
console.log("PASS");
// eslint-disable-next-line no-console
console.log(`smokeId: ${success.smokeId}`);
// eslint-disable-next-line no-console
console.log(`sentMessageId: ${success.sentMessageId}`);
// eslint-disable-next-line no-console
console.log(`threadId: ${success.binding.threadId}`);
// eslint-disable-next-line no-console
console.log(`sessionKey: ${success.binding.targetSessionKey}`);
// eslint-disable-next-line no-console
console.log(`ackMessageId: ${success.ackMessage.id}`);
// eslint-disable-next-line no-console
console.log(
`ackAuthor: ${success.ackMessage.authorUsername || success.ackMessage.authorId || "unknown"}`,
);
return;
}
const failure = params.payload;
// eslint-disable-next-line no-console
console.error("FAIL");
// eslint-disable-next-line no-console
console.error(`stage: ${failure.stage}`);
// eslint-disable-next-line no-console
console.error(`smokeId: ${failure.smokeId}`);
// eslint-disable-next-line no-console
console.error(`error: ${failure.error}`);
if (failure.diagnostics?.bindingCandidates?.length) {
// eslint-disable-next-line no-console
console.error("binding candidates:");
for (const candidate of failure.diagnostics.bindingCandidates) {
// eslint-disable-next-line no-console
console.error(
` thread=${candidate.threadId} kind=${candidate.targetKind || "?"} agent=${candidate.agentId || "?"} boundAt=${candidate.boundAt || 0} session=${candidate.targetSessionKey}`,
);
}
}
if (failure.diagnostics?.parentChannelRecent?.length) {
// eslint-disable-next-line no-console
console.error("recent parent channel messages:");
for (const row of failure.diagnostics.parentChannelRecent) {
// eslint-disable-next-line no-console
console.error(` ${row.id} ${row.author}${row.bot ? " [bot]" : ""}: ${row.content || ""}`);
}
}
}
async function run(): Promise<SuccessResult | FailureResult> {
let args: Args;
try {
args = parseArgs();
} catch (err) {
return {
ok: false,
stage: "validation",
smokeId: "n/a",
error: err instanceof Error ? err.message : String(err),
};
}
const smokeId = `acp-smoke-${Date.now()}-${randomUUID().slice(0, 8)}`;
const ackToken = `ACP_SMOKE_ACK_${smokeId}`;
const instruction = buildInstruction({
smokeId,
ackToken,
targetAgent: args.targetAgent,
mentionUserId: args.mentionUserId,
template: args.instruction,
});
let readAuthHeader = "";
let sentMessageId = "";
let setupStage: "discord-api" | "send-message" = "discord-api";
let senderAuthorId: string | undefined;
let webhookForCleanup:
| {
id: string;
token: string;
}
| undefined;
try {
if (args.driverMode === "token") {
const authHeader = resolveAuthorizationHeader({
token: args.driverToken,
tokenPrefix: args.driverTokenPrefix,
});
readAuthHeader = authHeader;
const driverUser = await discordApi<DiscordUser>({
method: "GET",
path: "/users/@me",
authHeader,
});
senderAuthorId = driverUser.id;
setupStage = "send-message";
const sent = await discordApi<DiscordMessage>({
method: "POST",
path: `/channels/${encodeURIComponent(args.channelId)}/messages`,
authHeader,
body: {
content: instruction,
allowed_mentions: args.mentionUserId
? { parse: [], users: [args.mentionUserId] }
: { parse: [] },
},
});
sentMessageId = sent.id;
} else {
const botAuthHeader = resolveAuthorizationHeader({
token: args.botToken,
tokenPrefix: args.botTokenPrefix,
});
readAuthHeader = botAuthHeader;
await discordApi<DiscordUser>({
method: "GET",
path: "/users/@me",
authHeader: botAuthHeader,
});
setupStage = "send-message";
const webhook = await discordApi<{ id: string; token?: string | null }>({
method: "POST",
path: `/channels/${encodeURIComponent(args.channelId)}/webhooks`,
authHeader: botAuthHeader,
body: {
name: `openclaw-acp-smoke-${smokeId.slice(-8)}`,
},
});
if (!webhook.id || !webhook.token) {
return {
ok: false,
stage: "send-message",
smokeId,
error:
"Discord webhook creation succeeded but no webhook token was returned; cannot post smoke message.",
};
}
webhookForCleanup = { id: webhook.id, token: webhook.token };
const sent = await discordWebhookApi<DiscordMessage>({
method: "POST",
webhookId: webhook.id,
webhookToken: webhook.token,
query: "wait=true",
body: {
content: instruction,
allowed_mentions: args.mentionUserId
? { parse: [], users: [args.mentionUserId] }
: { parse: [] },
},
});
sentMessageId = sent.id;
senderAuthorId = sent.author?.id;
}
} catch (err) {
return {
ok: false,
stage: setupStage,
smokeId,
error: err instanceof Error ? err.message : String(err),
};
}
const startedAt = Date.now();
const deadline = startedAt + args.timeoutMs;
let winningBinding: ThreadBindingRecord | undefined;
let latestCandidates: ThreadBindingRecord[] = [];
try {
while (Date.now() < deadline && !winningBinding) {
try {
const entries = await readThreadBindings(args.threadBindingsPath);
latestCandidates = resolveCandidateBindings({
entries,
minBoundAt: startedAt - 3_000,
targetAgent: args.targetAgent,
});
winningBinding = latestCandidates[0];
} catch {
// Keep polling; file may not exist yet or may be mid-write.
}
if (!winningBinding) {
await sleep(args.pollMs);
}
}
if (!winningBinding?.threadId || !winningBinding?.targetSessionKey) {
let parentRecent: DiscordMessage[] = [];
try {
parentRecent = await discordApi<DiscordMessage[]>({
method: "GET",
path: `/channels/${encodeURIComponent(args.channelId)}/messages?limit=20`,
authHeader: readAuthHeader,
});
} catch {
// Best effort diagnostics only.
}
return {
ok: false,
stage: "wait-binding",
smokeId,
error: `Timed out waiting for new ACP thread binding (path: ${args.threadBindingsPath}).`,
diagnostics: {
bindingCandidates: latestCandidates.slice(0, 6).map((entry) => ({
threadId: entry.threadId || "",
targetSessionKey: entry.targetSessionKey || "",
targetKind: entry.targetKind,
agentId: entry.agentId,
boundAt: entry.boundAt,
})),
parentChannelRecent: parentRecent.map(toRecentMessageRow),
},
};
}
const threadId = winningBinding.threadId;
let ackMessage: DiscordMessage | undefined;
while (Date.now() < deadline && !ackMessage) {
try {
const threadMessages = await discordApi<DiscordMessage[]>({
method: "GET",
path: `/channels/${encodeURIComponent(threadId)}/messages?limit=50`,
authHeader: readAuthHeader,
});
ackMessage = threadMessages.find((message) => {
const content = message.content || "";
if (!content.includes(ackToken)) {
return false;
}
const authorId = message.author?.id || "";
return !senderAuthorId || authorId !== senderAuthorId;
});
} catch {
// Keep polling; thread can appear before read permissions settle.
}
if (!ackMessage) {
await sleep(args.pollMs);
}
}
if (!ackMessage) {
let parentRecent: DiscordMessage[] = [];
try {
parentRecent = await discordApi<DiscordMessage[]>({
method: "GET",
path: `/channels/${encodeURIComponent(args.channelId)}/messages?limit=20`,
authHeader: readAuthHeader,
});
} catch {
// Best effort diagnostics only.
}
return {
ok: false,
stage: "wait-ack",
smokeId,
error: `Thread bound (${threadId}) but timed out waiting for ACK token "${ackToken}" from OpenClaw.`,
diagnostics: {
bindingCandidates: [
{
threadId: winningBinding.threadId || "",
targetSessionKey: winningBinding.targetSessionKey || "",
targetKind: winningBinding.targetKind,
agentId: winningBinding.agentId,
boundAt: winningBinding.boundAt,
},
],
parentChannelRecent: parentRecent.map(toRecentMessageRow),
},
};
}
return {
ok: true,
smokeId,
ackToken,
sentMessageId,
binding: {
threadId,
targetSessionKey: winningBinding.targetSessionKey,
targetKind: String(winningBinding.targetKind || "acp"),
agentId: String(winningBinding.agentId || args.targetAgent),
boundAt: normalizeBoundAt(winningBinding),
accountId: winningBinding.accountId,
channelId: winningBinding.channelId,
},
ackMessage: {
id: ackMessage.id,
authorId: ackMessage.author?.id,
authorUsername: ackMessage.author?.username,
timestamp: ackMessage.timestamp,
content: ackMessage.content,
},
};
} finally {
if (webhookForCleanup) {
await discordWebhookApi<void>({
method: "DELETE",
webhookId: webhookForCleanup.id,
webhookToken: webhookForCleanup.token,
}).catch(() => {
// Best-effort cleanup only.
});
}
}
}
if (hasFlag("--help") || hasFlag("-h")) {
// eslint-disable-next-line no-console
console.log(usage());
process.exit(0);
}
const result = await run().catch(
(err): FailureResult => ({
ok: false,
stage: "unexpected",
smokeId: "n/a",
error: err instanceof Error ? err.message : String(err),
}),
);
printOutput({
json: hasFlag("--json"),
payload: result,
});
process.exit(result.ok ? 0 : 1);

View File

@@ -0,0 +1,75 @@
import { createArgReader, createGatewayWsClient, resolveGatewayUrl } from "./gateway-ws-client.ts";
const { get: getArg } = createArgReader();
const urlRaw = getArg("--url") ?? process.env.OPENCLAW_GATEWAY_URL;
const token = getArg("--token") ?? process.env.OPENCLAW_GATEWAY_TOKEN;
if (!urlRaw || !token) {
// eslint-disable-next-line no-console
console.error(
"Usage: bun scripts/dev/gateway-smoke.ts --url <wss://host[:port]> --token <gateway.auth.token>\n" +
"Or set env: OPENCLAW_GATEWAY_URL / OPENCLAW_GATEWAY_TOKEN",
);
process.exit(1);
}
async function main() {
const url = resolveGatewayUrl(urlRaw);
const { request, waitOpen, close } = createGatewayWsClient({
url: url.toString(),
onEvent: (evt) => {
// Ignore noisy connect handshakes.
if (evt.event === "connect.challenge") {
return;
}
},
});
await waitOpen();
// Match iOS "operator" session defaults: token auth, no device identity.
const connectRes = await request("connect", {
minProtocol: 3,
maxProtocol: 3,
client: {
id: "openclaw-ios",
displayName: "openclaw gateway smoke test",
version: "dev",
platform: "dev",
mode: "ui",
instanceId: "openclaw-dev-smoke",
},
locale: "en-US",
userAgent: "gateway-smoke",
role: "operator",
scopes: ["operator.read", "operator.write", "operator.admin"],
caps: [],
auth: { token },
});
if (!connectRes.ok) {
// eslint-disable-next-line no-console
console.error("connect failed:", connectRes.error);
process.exit(2);
}
const healthRes = await request("health");
if (!healthRes.ok) {
// eslint-disable-next-line no-console
console.error("health failed:", healthRes.error);
process.exit(3);
}
const historyRes = await request("chat.history", { sessionKey: "main" }, 15000);
if (!historyRes.ok) {
// eslint-disable-next-line no-console
console.error("chat.history failed:", historyRes.error);
process.exit(4);
}
// eslint-disable-next-line no-console
console.log("ok: connected + health + chat.history");
close();
}
await main();

View File

@@ -0,0 +1,132 @@
import { randomUUID } from "node:crypto";
import WebSocket from "ws";
export type GatewayReqFrame = { type: "req"; id: string; method: string; params?: unknown };
export type GatewayResFrame = {
type: "res";
id: string;
ok: boolean;
payload?: unknown;
error?: unknown;
};
export type GatewayEventFrame = { type: "event"; event: string; seq?: number; payload?: unknown };
export type GatewayFrame =
| GatewayReqFrame
| GatewayResFrame
| GatewayEventFrame
| { type: string; [key: string]: unknown };
export function createArgReader(argv = process.argv.slice(2)) {
const get = (flag: string) => {
const idx = argv.indexOf(flag);
if (idx !== -1 && idx + 1 < argv.length) {
return argv[idx + 1];
}
return undefined;
};
const has = (flag: string) => argv.includes(flag);
return { argv, get, has };
}
export function resolveGatewayUrl(urlRaw: string): URL {
const url = new URL(urlRaw.includes("://") ? urlRaw : `wss://${urlRaw}`);
if (!url.port) {
url.port = url.protocol === "wss:" ? "443" : "80";
}
return url;
}
function toText(data: WebSocket.RawData): string {
if (typeof data === "string") {
return data;
}
if (data instanceof ArrayBuffer) {
return Buffer.from(data).toString("utf8");
}
if (Array.isArray(data)) {
return Buffer.concat(data.map((chunk) => Buffer.from(chunk))).toString("utf8");
}
return Buffer.from(data as Buffer).toString("utf8");
}
export function createGatewayWsClient(params: {
url: string;
handshakeTimeoutMs?: number;
openTimeoutMs?: number;
onEvent?: (evt: GatewayEventFrame) => void;
}) {
const ws = new WebSocket(params.url, { handshakeTimeout: params.handshakeTimeoutMs ?? 8000 });
const pending = new Map<
string,
{
resolve: (res: GatewayResFrame) => void;
reject: (err: Error) => void;
timeout: ReturnType<typeof setTimeout>;
}
>();
const request = (method: string, paramsObj?: unknown, timeoutMs = 12_000) =>
new Promise<GatewayResFrame>((resolve, reject) => {
const id = randomUUID();
const frame: GatewayReqFrame = { type: "req", id, method, params: paramsObj };
const timeout = setTimeout(() => {
pending.delete(id);
reject(new Error(`timeout waiting for ${method}`));
}, timeoutMs);
pending.set(id, { resolve, reject, timeout });
ws.send(JSON.stringify(frame));
});
const waitOpen = () =>
new Promise<void>((resolve, reject) => {
const t = setTimeout(
() => reject(new Error("ws open timeout")),
params.openTimeoutMs ?? 8000,
);
ws.once("open", () => {
clearTimeout(t);
resolve();
});
ws.once("error", (err) => {
clearTimeout(t);
reject(err instanceof Error ? err : new Error(String(err)));
});
});
ws.on("message", (data) => {
const text = toText(data);
let frame: GatewayFrame | null = null;
try {
frame = JSON.parse(text) as GatewayFrame;
} catch {
return;
}
if (!frame || typeof frame !== "object" || !("type" in frame)) {
return;
}
if (frame.type === "res") {
const res = frame as GatewayResFrame;
const waiter = pending.get(res.id);
if (waiter) {
pending.delete(res.id);
clearTimeout(waiter.timeout);
waiter.resolve(res);
}
return;
}
if (frame.type === "event") {
const evt = frame as GatewayEventFrame;
params.onEvent?.(evt);
}
});
const close = () => {
for (const waiter of pending.values()) {
clearTimeout(waiter.timeout);
}
pending.clear();
ws.close();
};
return { ws, request, waitOpen, close };
}

View File

@@ -0,0 +1,283 @@
import { createArgReader, createGatewayWsClient, resolveGatewayUrl } from "./gateway-ws-client.ts";
type NodeListPayload = {
ts?: number;
nodes?: Array<{
nodeId: string;
displayName?: string;
platform?: string;
connected?: boolean;
paired?: boolean;
commands?: string[];
permissions?: unknown;
}>;
};
type NodeListNode = NonNullable<NodeListPayload["nodes"]>[number];
const { get: getArg, has: hasFlag } = createArgReader();
const urlRaw = getArg("--url") ?? process.env.OPENCLAW_GATEWAY_URL;
const token = getArg("--token") ?? process.env.OPENCLAW_GATEWAY_TOKEN;
const nodeHint = getArg("--node");
const dangerous = hasFlag("--dangerous") || process.env.OPENCLAW_RUN_DANGEROUS === "1";
const jsonOut = hasFlag("--json");
if (!urlRaw || !token) {
// eslint-disable-next-line no-console
console.error(
"Usage: bun scripts/dev/ios-node-e2e.ts --url <wss://host[:port]> --token <gateway.auth.token> [--node <id|name-substring>] [--dangerous] [--json]\n" +
"Or set env: OPENCLAW_GATEWAY_URL / OPENCLAW_GATEWAY_TOKEN",
);
process.exit(1);
}
const url = resolveGatewayUrl(urlRaw);
const isoNow = () => new Date().toISOString();
const isoMinusMs = (ms: number) => new Date(Date.now() - ms).toISOString();
type TestCase = {
id: string;
command: string;
params?: unknown;
timeoutMs?: number;
dangerous?: boolean;
};
function formatErr(err: unknown): string {
if (!err) {
return "error";
}
if (typeof err === "string") {
return err;
}
if (err instanceof Error) {
return err.message || String(err);
}
try {
return JSON.stringify(err);
} catch {
return Object.prototype.toString.call(err);
}
}
function pickIosNode(list: NodeListPayload, hint?: string): NodeListNode | null {
const nodes = (list.nodes ?? []).filter((n) => n && n.connected);
const ios = nodes.filter((n) => (n.platform ?? "").toLowerCase().includes("ios"));
if (ios.length === 0) {
return null;
}
if (!hint) {
return ios[0] ?? null;
}
const h = hint.toLowerCase();
return (
ios.find((n) => n.nodeId.toLowerCase() === h) ??
ios.find((n) => (n.displayName ?? "").toLowerCase().includes(h)) ??
ios.find((n) => n.nodeId.toLowerCase().includes(h)) ??
ios[0] ??
null
);
}
async function main() {
const { request, waitOpen, close } = createGatewayWsClient({ url: url.toString() });
await waitOpen();
const connectRes = await request("connect", {
minProtocol: 3,
maxProtocol: 3,
client: {
id: "cli",
displayName: "openclaw ios node e2e",
version: "dev",
platform: "dev",
mode: "cli",
instanceId: "openclaw-dev-ios-node-e2e",
},
locale: "en-US",
userAgent: "ios-node-e2e",
role: "operator",
scopes: ["operator.read", "operator.write", "operator.admin"],
caps: [],
auth: { token },
});
if (!connectRes.ok) {
// eslint-disable-next-line no-console
console.error("connect failed:", connectRes.error);
close();
process.exit(2);
}
const healthRes = await request("health");
if (!healthRes.ok) {
// eslint-disable-next-line no-console
console.error("health failed:", healthRes.error);
close();
process.exit(3);
}
const nodesRes = await request("node.list");
if (!nodesRes.ok) {
// eslint-disable-next-line no-console
console.error("node.list failed:", nodesRes.error);
close();
process.exit(4);
}
const listPayload = (nodesRes.payload ?? {}) as NodeListPayload;
let node = pickIosNode(listPayload, nodeHint);
if (!node) {
const waitSeconds = Number.parseInt(getArg("--wait-seconds") ?? "25", 10);
const deadline = Date.now() + Math.max(1, waitSeconds) * 1000;
while (!node && Date.now() < deadline) {
await new Promise((r) => setTimeout(r, 1000));
const res = await request("node.list").catch(() => null);
if (!res?.ok) {
continue;
}
node = pickIosNode((res.payload ?? {}) as NodeListPayload, nodeHint);
}
}
if (!node) {
// eslint-disable-next-line no-console
console.error("No connected iOS nodes found. (Is the iOS app connected to the gateway?)");
close();
process.exit(5);
}
const tests: TestCase[] = [
{ id: "device.info", command: "device.info" },
{ id: "device.status", command: "device.status" },
{
id: "system.notify",
command: "system.notify",
params: { title: "OpenClaw E2E", body: `ios-node-e2e @ ${isoNow()}`, delivery: "system" },
},
{
id: "contacts.search",
command: "contacts.search",
params: { query: null, limit: 5 },
},
{
id: "calendar.events",
command: "calendar.events",
params: { startISO: isoMinusMs(6 * 60 * 60 * 1000), endISO: isoNow(), limit: 10 },
},
{
id: "reminders.list",
command: "reminders.list",
params: { status: "incomplete", limit: 10 },
},
{
id: "motion.pedometer",
command: "motion.pedometer",
params: { startISO: isoMinusMs(60 * 60 * 1000), endISO: isoNow() },
},
{
id: "photos.latest",
command: "photos.latest",
params: { limit: 1, maxWidth: 512, quality: 0.7 },
},
{
id: "camera.snap",
command: "camera.snap",
params: { facing: "back", maxWidth: 768, quality: 0.7, format: "jpeg" },
dangerous: true,
timeoutMs: 20_000,
},
{
id: "screen.record",
command: "screen.record",
params: { durationMs: 2_000, fps: 15, includeAudio: false },
dangerous: true,
timeoutMs: 30_000,
},
];
const run = tests.filter((t) => dangerous || !t.dangerous);
const results: Array<{
id: string;
ok: boolean;
error?: unknown;
payload?: unknown;
}> = [];
for (const t of run) {
const invokeRes = await request(
"node.invoke",
{
nodeId: node.nodeId,
command: t.command,
params: t.params,
timeoutMs: t.timeoutMs ?? 12_000,
idempotencyKey: randomUUID(),
},
(t.timeoutMs ?? 12_000) + 2_000,
).catch((err) => {
results.push({ id: t.id, ok: false, error: formatErr(err) });
return null;
});
if (!invokeRes) {
continue;
}
if (!invokeRes.ok) {
results.push({ id: t.id, ok: false, error: invokeRes.error });
continue;
}
results.push({ id: t.id, ok: true, payload: invokeRes.payload });
}
if (jsonOut) {
// eslint-disable-next-line no-console
console.log(
JSON.stringify(
{
gateway: url.toString(),
node: {
nodeId: node.nodeId,
displayName: node.displayName,
platform: node.platform,
},
dangerous,
results,
},
null,
2,
),
);
} else {
const pad = (s: string, n: number) => (s.length >= n ? s : s + " ".repeat(n - s.length));
const rows = results.map((r) => ({
cmd: r.id,
ok: r.ok ? "ok" : "fail",
note: r.ok ? "" : formatErr(r.error ?? "error"),
}));
const width = Math.min(64, Math.max(12, ...rows.map((r) => r.cmd.length)));
// eslint-disable-next-line no-console
console.log(`node: ${node.displayName ?? node.nodeId} (${node.platform ?? "unknown"})`);
// eslint-disable-next-line no-console
console.log(`dangerous: ${dangerous ? "on" : "off"}`);
// eslint-disable-next-line no-console
console.log("");
for (const r of rows) {
// eslint-disable-next-line no-console
console.log(`${pad(r.cmd, width)} ${pad(r.ok, 4)} ${r.note}`);
}
}
const failed = results.filter((r) => !r.ok);
close();
if (failed.length > 0) {
process.exit(10);
}
}
await main();

View File

@@ -0,0 +1,17 @@
#!/usr/bin/env bash
set -euo pipefail
DEVICE_UDID="${1:-00008130-000630CE0146001C}"
BUNDLE_ID="${2:-ai.openclaw.ios.dev.mariano.test}"
DEST="${3:-/tmp/openclaw-gateway.log}"
xcrun devicectl device copy from \
--device "$DEVICE_UDID" \
--domain-type appDataContainer \
--domain-identifier "$BUNDLE_ID" \
--source Documents/openclaw-gateway.log \
--destination "$DEST" >/dev/null
echo "Pulled to: $DEST"
tail -n 200 "$DEST"

View File

@@ -0,0 +1,62 @@
import { loadConfig } from "../../src/config/config.js";
import { matchPluginCommand, executePluginCommand } from "../../src/plugins/commands.js";
import { loadOpenClawPlugins } from "../../src/plugins/loader.js";
import { sendMessageTelegram } from "../../src/telegram/send.js";
const args = process.argv.slice(2);
const getArg = (flag: string, short?: string) => {
const idx = args.indexOf(flag);
if (idx !== -1 && idx + 1 < args.length) {
return args[idx + 1];
}
if (short) {
const sidx = args.indexOf(short);
if (sidx !== -1 && sidx + 1 < args.length) {
return args[sidx + 1];
}
}
return undefined;
};
const chatId = getArg("--chat", "-c");
const accountId = getArg("--account", "-a");
if (!chatId) {
// eslint-disable-next-line no-console
console.error(
"Usage: bun scripts/dev/test-device-pair-telegram.ts --chat <telegram-chat-id> [--account <accountId>]",
);
process.exit(1);
}
const cfg = loadConfig();
loadOpenClawPlugins({ config: cfg });
const match = matchPluginCommand("/pair");
if (!match) {
// eslint-disable-next-line no-console
console.error("/pair plugin command not registered.");
process.exit(1);
}
const result = await executePluginCommand({
command: match.command,
args: match.args,
senderId: chatId,
channel: "telegram",
channelId: "telegram",
isAuthorizedSender: true,
commandBody: "/pair",
config: cfg,
from: `telegram:${chatId}`,
to: `telegram:${chatId}`,
accountId: accountId,
});
if (result.text) {
await sendMessageTelegram(chatId, result.text, {
accountId: accountId,
});
}
// eslint-disable-next-line no-console
console.log("Sent split /pair messages to", chatId, accountId ? `(${accountId})` : "");

View File

@@ -0,0 +1,19 @@
FROM node:22-bookworm-slim@sha256:3cfe526ec8dd62013b8843e8e5d4877e297b886e5aace4a59fec25dc20736e45
RUN apt-get update \
&& apt-get install -y --no-install-recommends \
bash \
ca-certificates \
git \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /repo
COPY package.json pnpm-lock.yaml pnpm-workspace.yaml ./
RUN corepack enable \
&& pnpm install --frozen-lockfile
COPY . .
COPY scripts/docker/cleanup-smoke/run.sh /usr/local/bin/openclaw-cleanup-smoke
RUN chmod +x /usr/local/bin/openclaw-cleanup-smoke
ENTRYPOINT ["/usr/local/bin/openclaw-cleanup-smoke"]

View File

@@ -0,0 +1,35 @@
#!/usr/bin/env bash
set -euo pipefail
cd /repo
export OPENCLAW_STATE_DIR="/tmp/openclaw-test"
export OPENCLAW_CONFIG_PATH="${OPENCLAW_STATE_DIR}/openclaw.json"
echo "==> Build"
pnpm build
echo "==> Seed state"
mkdir -p "${OPENCLAW_STATE_DIR}/credentials"
mkdir -p "${OPENCLAW_STATE_DIR}/agents/main/sessions"
echo '{}' >"${OPENCLAW_CONFIG_PATH}"
echo 'creds' >"${OPENCLAW_STATE_DIR}/credentials/marker.txt"
echo 'session' >"${OPENCLAW_STATE_DIR}/agents/main/sessions/sessions.json"
echo "==> Reset (config+creds+sessions)"
pnpm openclaw reset --scope config+creds+sessions --yes --non-interactive
test ! -f "${OPENCLAW_CONFIG_PATH}"
test ! -d "${OPENCLAW_STATE_DIR}/credentials"
test ! -d "${OPENCLAW_STATE_DIR}/agents/main/sessions"
echo "==> Recreate minimal config"
mkdir -p "${OPENCLAW_STATE_DIR}/credentials"
echo '{}' >"${OPENCLAW_CONFIG_PATH}"
echo "==> Uninstall (state only)"
pnpm openclaw uninstall --state --yes --non-interactive
test ! -d "${OPENCLAW_STATE_DIR}"
echo "OK"

View File

@@ -0,0 +1,17 @@
FROM node:22-bookworm-slim@sha256:3cfe526ec8dd62013b8843e8e5d4877e297b886e5aace4a59fec25dc20736e45
RUN apt-get update \
&& apt-get install -y --no-install-recommends \
bash \
ca-certificates \
curl \
git \
&& rm -rf /var/lib/apt/lists/*
COPY run.sh /usr/local/bin/openclaw-install-e2e
RUN chmod +x /usr/local/bin/openclaw-install-e2e
RUN useradd --create-home --shell /bin/bash appuser
USER appuser
ENTRYPOINT ["/usr/local/bin/openclaw-install-e2e"]

View File

@@ -0,0 +1,535 @@
#!/usr/bin/env bash
set -euo pipefail
INSTALL_URL="${OPENCLAW_INSTALL_URL:-${CLAWDBOT_INSTALL_URL:-https://openclaw.bot/install.sh}}"
MODELS_MODE="${OPENCLAW_E2E_MODELS:-${CLAWDBOT_E2E_MODELS:-both}}" # both|openai|anthropic
INSTALL_TAG="${OPENCLAW_INSTALL_TAG:-${CLAWDBOT_INSTALL_TAG:-latest}}"
E2E_PREVIOUS_VERSION="${OPENCLAW_INSTALL_E2E_PREVIOUS:-${CLAWDBOT_INSTALL_E2E_PREVIOUS:-}}"
SKIP_PREVIOUS="${OPENCLAW_INSTALL_E2E_SKIP_PREVIOUS:-${CLAWDBOT_INSTALL_E2E_SKIP_PREVIOUS:-0}}"
OPENAI_API_KEY="${OPENAI_API_KEY:-}"
ANTHROPIC_API_KEY="${ANTHROPIC_API_KEY:-}"
ANTHROPIC_API_TOKEN="${ANTHROPIC_API_TOKEN:-}"
if [[ "$MODELS_MODE" != "both" && "$MODELS_MODE" != "openai" && "$MODELS_MODE" != "anthropic" ]]; then
echo "ERROR: OPENCLAW_E2E_MODELS must be one of: both|openai|anthropic" >&2
exit 2
fi
if [[ "$MODELS_MODE" == "both" ]]; then
if [[ -z "$OPENAI_API_KEY" ]]; then
echo "ERROR: OPENCLAW_E2E_MODELS=both requires OPENAI_API_KEY." >&2
exit 2
fi
if [[ -z "$ANTHROPIC_API_TOKEN" && -z "$ANTHROPIC_API_KEY" ]]; then
echo "ERROR: OPENCLAW_E2E_MODELS=both requires ANTHROPIC_API_TOKEN or ANTHROPIC_API_KEY." >&2
exit 2
fi
elif [[ "$MODELS_MODE" == "openai" && -z "$OPENAI_API_KEY" ]]; then
echo "ERROR: OPENCLAW_E2E_MODELS=openai requires OPENAI_API_KEY." >&2
exit 2
elif [[ "$MODELS_MODE" == "anthropic" && -z "$ANTHROPIC_API_TOKEN" && -z "$ANTHROPIC_API_KEY" ]]; then
echo "ERROR: OPENCLAW_E2E_MODELS=anthropic requires ANTHROPIC_API_TOKEN or ANTHROPIC_API_KEY." >&2
exit 2
fi
echo "==> Resolve npm versions"
EXPECTED_VERSION="$(npm view "openclaw@${INSTALL_TAG}" version)"
if [[ -z "$EXPECTED_VERSION" || "$EXPECTED_VERSION" == "undefined" || "$EXPECTED_VERSION" == "null" ]]; then
echo "ERROR: unable to resolve openclaw@${INSTALL_TAG} version" >&2
exit 2
fi
if [[ -n "$E2E_PREVIOUS_VERSION" ]]; then
PREVIOUS_VERSION="$E2E_PREVIOUS_VERSION"
else
PREVIOUS_VERSION="$(node - <<'NODE'
const { execSync } = require("node:child_process");
const versions = JSON.parse(execSync("npm view openclaw versions --json", { encoding: "utf8" }));
if (!Array.isArray(versions) || versions.length === 0) process.exit(1);
process.stdout.write(versions.length >= 2 ? versions[versions.length - 2] : versions[0]);
NODE
)"
fi
echo "expected=$EXPECTED_VERSION previous=$PREVIOUS_VERSION"
if [[ "$SKIP_PREVIOUS" == "1" ]]; then
echo "==> Skip preinstall previous (OPENCLAW_INSTALL_E2E_SKIP_PREVIOUS=1)"
else
echo "==> Preinstall previous (forces installer upgrade path; avoids read() prompt)"
npm install -g "openclaw@${PREVIOUS_VERSION}"
fi
echo "==> Run official installer one-liner"
if [[ "$INSTALL_TAG" == "beta" ]]; then
OPENCLAW_BETA=1 CLAWDBOT_BETA=1 curl -fsSL "$INSTALL_URL" | bash
elif [[ "$INSTALL_TAG" != "latest" ]]; then
OPENCLAW_VERSION="$INSTALL_TAG" CLAWDBOT_VERSION="$INSTALL_TAG" curl -fsSL "$INSTALL_URL" | bash
else
curl -fsSL "$INSTALL_URL" | bash
fi
echo "==> Verify installed version"
INSTALLED_VERSION="$(openclaw --version 2>/dev/null | head -n 1 | tr -d '\r')"
echo "installed=$INSTALLED_VERSION expected=$EXPECTED_VERSION"
if [[ "$INSTALLED_VERSION" != "$EXPECTED_VERSION" ]]; then
echo "ERROR: expected openclaw@$EXPECTED_VERSION, got openclaw@$INSTALLED_VERSION" >&2
exit 1
fi
set_image_model() {
local profile="$1"
shift
local candidate
for candidate in "$@"; do
if openclaw --profile "$profile" models set-image "$candidate" >/dev/null 2>&1; then
echo "$candidate"
return 0
fi
done
echo "ERROR: could not set an image model (tried: $*)" >&2
return 1
}
set_agent_model() {
local profile="$1"
local candidate
shift
for candidate in "$@"; do
if openclaw --profile "$profile" models set "$candidate" >/dev/null 2>&1; then
echo "$candidate"
return 0
fi
done
echo "ERROR: could not set agent model (tried: $*)" >&2
return 1
}
write_png_lr_rg() {
local out="$1"
node - <<'NODE' "$out"
const fs = require("node:fs");
const zlib = require("node:zlib");
const out = process.argv[2];
const width = 96;
const height = 64;
const crcTable = (() => {
const table = new Uint32Array(256);
for (let i = 0; i < 256; i++) {
let c = i;
for (let k = 0; k < 8; k++) c = (c & 1) ? (0xedb88320 ^ (c >>> 1)) : (c >>> 1);
table[i] = c >>> 0;
}
return table;
})();
function crc32(buf) {
let c = 0xffffffff;
for (let i = 0; i < buf.length; i++) c = crcTable[(c ^ buf[i]) & 0xff] ^ (c >>> 8);
return (c ^ 0xffffffff) >>> 0;
}
function chunk(type, data) {
const typeBuf = Buffer.from(type, "ascii");
const len = Buffer.alloc(4);
len.writeUInt32BE(data.length, 0);
const crcBuf = Buffer.alloc(4);
crcBuf.writeUInt32BE(crc32(Buffer.concat([typeBuf, data])), 0);
return Buffer.concat([len, typeBuf, data, crcBuf]);
}
const sig = Buffer.from([0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a]);
const ihdr = Buffer.alloc(13);
ihdr.writeUInt32BE(width, 0);
ihdr.writeUInt32BE(height, 4);
ihdr[8] = 8; // bit depth
ihdr[9] = 2; // color type: truecolor
ihdr[10] = 0; // compression
ihdr[11] = 0; // filter
ihdr[12] = 0; // interlace
const rows = [];
for (let y = 0; y < height; y++) {
const row = Buffer.alloc(1 + width * 3);
row[0] = 0; // filter: none
for (let x = 0; x < width; x++) {
const i = 1 + x * 3;
const left = x < width / 2;
row[i + 0] = left ? 255 : 0;
row[i + 1] = left ? 0 : 255;
row[i + 2] = 0;
}
rows.push(row);
}
const raw = Buffer.concat(rows);
const idat = zlib.deflateSync(raw, { level: 9 });
const png = Buffer.concat([
sig,
chunk("IHDR", ihdr),
chunk("IDAT", idat),
chunk("IEND", Buffer.alloc(0)),
]);
fs.writeFileSync(out, png);
NODE
}
run_agent_turn() {
local profile="$1"
local session_id="$2"
local prompt="$3"
local out_json="$4"
openclaw --profile "$profile" agent \
--session-id "$session_id" \
--message "$prompt" \
--thinking off \
--json >"$out_json"
}
assert_agent_json_has_text() {
local path="$1"
node - <<'NODE' "$path"
const fs = require("node:fs");
const p = JSON.parse(fs.readFileSync(process.argv[2], "utf8"));
const payloads =
Array.isArray(p?.result?.payloads) ? p.result.payloads :
Array.isArray(p?.payloads) ? p.payloads :
[];
const texts = payloads.map((x) => String(x?.text ?? "").trim()).filter(Boolean);
if (texts.length === 0) process.exit(1);
NODE
}
assert_agent_json_ok() {
local json_path="$1"
local expect_provider="$2"
node - <<'NODE' "$json_path" "$expect_provider"
const fs = require("node:fs");
const jsonPath = process.argv[2];
const expectProvider = process.argv[3];
const p = JSON.parse(fs.readFileSync(jsonPath, "utf8"));
if (typeof p?.status === "string" && p.status !== "ok" && p.status !== "accepted") {
console.error(`ERROR: gateway status=${p.status}`);
process.exit(1);
}
const result = p?.result ?? p;
const payloads = Array.isArray(result?.payloads) ? result.payloads : [];
const anyError = payloads.some((pl) => pl && pl.isError === true);
const combinedText = payloads.map((pl) => String(pl?.text ?? "")).filter(Boolean).join("\n").trim();
if (anyError) {
console.error(`ERROR: agent returned error payload: ${combinedText}`);
process.exit(1);
}
if (/rate_limit_error/i.test(combinedText) || /^429\\b/.test(combinedText)) {
console.error(`ERROR: agent rate limited: ${combinedText}`);
process.exit(1);
}
const meta = result?.meta;
const provider =
(typeof meta?.agentMeta?.provider === "string" && meta.agentMeta.provider.trim()) ||
(typeof meta?.provider === "string" && meta.provider.trim()) ||
"";
if (expectProvider && provider && provider !== expectProvider) {
console.error(`ERROR: expected provider=${expectProvider}, got provider=${provider}`);
process.exit(1);
}
NODE
}
extract_matching_text() {
local path="$1"
local expected="$2"
node - <<'NODE' "$path" "$expected"
const fs = require("node:fs");
const p = JSON.parse(fs.readFileSync(process.argv[2], "utf8"));
const expected = String(process.argv[3] ?? "");
const payloads =
Array.isArray(p?.result?.payloads) ? p.result.payloads :
Array.isArray(p?.payloads) ? p.payloads :
[];
const texts = payloads.map((x) => String(x?.text ?? "").trim()).filter(Boolean);
const match = texts.find((text) => text === expected);
process.stdout.write(match ?? texts[0] ?? "");
NODE
}
assert_session_used_tools() {
local jsonl="$1"
shift
node - <<'NODE' "$jsonl" "$@"
const fs = require("node:fs");
const jsonl = process.argv[2];
const required = new Set(process.argv.slice(3));
const raw = fs.readFileSync(jsonl, "utf8");
const lines = raw.split("\n").map((l) => l.trim()).filter(Boolean);
const seen = new Set();
const toolTypes = new Set([
"tool_use",
"tool_result",
"tool",
"tool-call",
"tool_call",
"tooluse",
"tool-use",
"toolresult",
"tool-result",
]);
function walk(node, parent) {
if (!node) return;
if (Array.isArray(node)) {
for (const item of node) walk(item, node);
return;
}
if (typeof node !== "object") return;
const obj = node;
const t = typeof obj.type === "string" ? obj.type : null;
if (t && (toolTypes.has(t) || /tool/i.test(t))) {
const name =
typeof obj.name === "string" ? obj.name :
typeof obj.toolName === "string" ? obj.toolName :
typeof obj.tool_name === "string" ? obj.tool_name :
(obj.tool && typeof obj.tool.name === "string") ? obj.tool.name :
null;
if (name) seen.add(name);
}
if (typeof obj.name === "string" && typeof obj.input === "object" && obj.input) {
// Many tool-use blocks look like { type: "...", name: "exec", input: {...} }
// but some transcripts omit/rename type.
seen.add(obj.name);
}
// OpenAI-ish tool call shapes.
if (Array.isArray(obj.tool_calls)) {
for (const c of obj.tool_calls) {
const fn = c?.function;
if (fn && typeof fn.name === "string") seen.add(fn.name);
}
}
if (obj.function && typeof obj.function.name === "string") seen.add(obj.function.name);
for (const v of Object.values(obj)) walk(v, obj);
}
for (const line of lines) {
try {
const entry = JSON.parse(line);
walk(entry, null);
} catch {
// ignore unparsable lines
}
}
const missing = [...required].filter((t) => !seen.has(t));
if (missing.length > 0) {
console.error(`Missing tools in transcript: ${missing.join(", ")}`);
console.error(`Seen tools: ${[...seen].sort().join(", ")}`);
console.error("Transcript head:");
console.error(lines.slice(0, 5).join("\n"));
process.exit(1);
}
NODE
}
run_profile() {
local profile="$1"
local port="$2"
local workspace="$3"
local agent_model_provider="$4" # "openai"|"anthropic"
echo "==> Onboard ($profile)"
if [[ "$agent_model_provider" == "openai" ]]; then
openclaw --profile "$profile" onboard \
--non-interactive \
--accept-risk \
--flow quickstart \
--auth-choice openai-api-key \
--openai-api-key "$OPENAI_API_KEY" \
--gateway-port "$port" \
--gateway-bind loopback \
--gateway-auth token \
--workspace "$workspace" \
--skip-health
elif [[ -n "$ANTHROPIC_API_TOKEN" ]]; then
openclaw --profile "$profile" onboard \
--non-interactive \
--accept-risk \
--flow quickstart \
--auth-choice token \
--token-provider anthropic \
--token "$ANTHROPIC_API_TOKEN" \
--gateway-port "$port" \
--gateway-bind loopback \
--gateway-auth token \
--workspace "$workspace" \
--skip-health
else
openclaw --profile "$profile" onboard \
--non-interactive \
--accept-risk \
--flow quickstart \
--auth-choice apiKey \
--anthropic-api-key "$ANTHROPIC_API_KEY" \
--gateway-port "$port" \
--gateway-bind loopback \
--gateway-auth token \
--workspace "$workspace" \
--skip-health
fi
echo "==> Verify workspace identity files ($profile)"
test -f "$workspace/AGENTS.md"
test -f "$workspace/IDENTITY.md"
test -f "$workspace/USER.md"
test -f "$workspace/SOUL.md"
test -f "$workspace/TOOLS.md"
echo "==> Configure models ($profile)"
local agent_model
local image_model
if [[ "$agent_model_provider" == "openai" ]]; then
agent_model="$(set_agent_model "$profile" \
"openai/gpt-4.1-mini" \
"openai/gpt-4.1" \
"openai/gpt-4o-mini" \
"openai/gpt-4o")"
image_model="$(set_image_model "$profile" \
"openai/gpt-4.1" \
"openai/gpt-4o-mini" \
"openai/gpt-4o" \
"openai/gpt-4.1-mini")"
else
agent_model="$(set_agent_model "$profile" \
"anthropic/claude-opus-4-6" \
"claude-opus-4-6" \
"anthropic/claude-opus-4-5" \
"claude-opus-4-5")"
image_model="$(set_image_model "$profile" \
"anthropic/claude-opus-4-6" \
"claude-opus-4-6" \
"anthropic/claude-opus-4-5" \
"claude-opus-4-5")"
fi
echo "model=$agent_model"
echo "imageModel=$image_model"
echo "==> Prepare tool fixtures ($profile)"
PROOF_TXT="$workspace/proof.txt"
PROOF_COPY="$workspace/copy.txt"
HOSTNAME_TXT="$workspace/hostname.txt"
IMAGE_PNG="$workspace/proof.png"
IMAGE_TXT="$workspace/image.txt"
SESSION_ID="e2e-tools-${profile}"
SESSION_JSONL="/root/.openclaw-${profile}/agents/main/sessions/${SESSION_ID}.jsonl"
PROOF_VALUE="$(node -e 'console.log(require("node:crypto").randomBytes(16).toString("hex"))')"
echo -n "$PROOF_VALUE" >"$PROOF_TXT"
write_png_lr_rg "$IMAGE_PNG"
EXPECTED_HOSTNAME="$(cat /etc/hostname | tr -d '\r\n')"
echo "==> Start gateway ($profile)"
GATEWAY_LOG="$workspace/gateway.log"
openclaw --profile "$profile" gateway --port "$port" --bind loopback >"$GATEWAY_LOG" 2>&1 &
GATEWAY_PID="$!"
cleanup_profile() {
if kill -0 "$GATEWAY_PID" 2>/dev/null; then
kill "$GATEWAY_PID" 2>/dev/null || true
wait "$GATEWAY_PID" 2>/dev/null || true
fi
}
trap cleanup_profile EXIT
echo "==> Wait for health ($profile)"
for _ in $(seq 1 60); do
if openclaw --profile "$profile" health --timeout 2000 --json >/dev/null 2>&1; then
break
fi
sleep 0.25
done
openclaw --profile "$profile" health --timeout 10000 --json >/dev/null
echo "==> Agent turns ($profile)"
TURN1_JSON="/tmp/agent-${profile}-1.json"
TURN2_JSON="/tmp/agent-${profile}-2.json"
TURN3_JSON="/tmp/agent-${profile}-3.json"
TURN4_JSON="/tmp/agent-${profile}-4.json"
run_agent_turn "$profile" "$SESSION_ID" \
"Use the read tool (not exec) to read proof.txt. Reply with the exact contents only (no extra whitespace)." \
"$TURN1_JSON"
assert_agent_json_has_text "$TURN1_JSON"
assert_agent_json_ok "$TURN1_JSON" "$agent_model_provider"
local reply1
reply1="$(extract_matching_text "$TURN1_JSON" "$PROOF_VALUE" | tr -d '\r\n')"
if [[ "$reply1" != "$PROOF_VALUE" ]]; then
echo "ERROR: agent did not read proof.txt correctly ($profile): $reply1" >&2
exit 1
fi
local prompt2
prompt2=$'Use the write tool (not exec) to write exactly this string into copy.txt:\n'"${reply1}"$'\nThen use the read tool (not exec) to read copy.txt and reply with the exact contents only (no extra whitespace).'
run_agent_turn "$profile" "$SESSION_ID" "$prompt2" "$TURN2_JSON"
assert_agent_json_has_text "$TURN2_JSON"
assert_agent_json_ok "$TURN2_JSON" "$agent_model_provider"
local copy_value
copy_value="$(cat "$PROOF_COPY" 2>/dev/null | tr -d '\r\n' || true)"
if [[ "$copy_value" != "$PROOF_VALUE" ]]; then
echo "ERROR: copy.txt did not match proof.txt ($profile)" >&2
exit 1
fi
local reply2
reply2="$(extract_matching_text "$TURN2_JSON" "$PROOF_VALUE" | tr -d '\r\n')"
if [[ "$reply2" != "$PROOF_VALUE" ]]; then
echo "ERROR: agent did not read copy.txt correctly ($profile): $reply2" >&2
exit 1
fi
local prompt3
prompt3=$'Use the exec tool to run: cat /etc/hostname\nThen use the write tool to write the exact stdout (trim trailing newline) into hostname.txt. Reply with the hostname only.'
run_agent_turn "$profile" "$SESSION_ID" "$prompt3" "$TURN3_JSON"
assert_agent_json_has_text "$TURN3_JSON"
assert_agent_json_ok "$TURN3_JSON" "$agent_model_provider"
if [[ "$(cat "$HOSTNAME_TXT" 2>/dev/null | tr -d '\r\n' || true)" != "$EXPECTED_HOSTNAME" ]]; then
echo "ERROR: hostname.txt did not match /etc/hostname ($profile)" >&2
exit 1
fi
run_agent_turn "$profile" "$SESSION_ID" \
"Use the image tool on proof.png. Determine which color is on the left half and which is on the right half. Then use the write tool to write exactly: LEFT=RED RIGHT=GREEN into image.txt. Reply with exactly: LEFT=RED RIGHT=GREEN" \
"$TURN4_JSON"
assert_agent_json_has_text "$TURN4_JSON"
assert_agent_json_ok "$TURN4_JSON" "$agent_model_provider"
if [[ "$(cat "$IMAGE_TXT" 2>/dev/null | tr -d '\r\n' || true)" != "LEFT=RED RIGHT=GREEN" ]]; then
echo "ERROR: image.txt did not contain expected marker ($profile)" >&2
exit 1
fi
local reply4
reply4="$(extract_matching_text "$TURN4_JSON" "LEFT=RED RIGHT=GREEN")"
if [[ "$reply4" != "LEFT=RED RIGHT=GREEN" ]]; then
echo "ERROR: agent reply did not contain expected marker ($profile): $reply4" >&2
exit 1
fi
echo "==> Verify tool usage via session transcript ($profile)"
# Give the gateway a moment to flush transcripts.
sleep 1
if [[ ! -f "$SESSION_JSONL" ]]; then
echo "ERROR: missing session transcript ($profile): $SESSION_JSONL" >&2
ls -la "/root/.openclaw-${profile}/agents/main/sessions" >&2 || true
exit 1
fi
assert_session_used_tools "$SESSION_JSONL" read write exec image
cleanup_profile
trap - EXIT
}
if [[ "$MODELS_MODE" == "openai" || "$MODELS_MODE" == "both" ]]; then
run_profile "e2e-openai" "18789" "/tmp/openclaw-e2e-openai" "openai"
fi
if [[ "$MODELS_MODE" == "anthropic" || "$MODELS_MODE" == "both" ]]; then
run_profile "e2e-anthropic" "18799" "/tmp/openclaw-e2e-anthropic" "anthropic"
fi
echo "OK"

View File

@@ -0,0 +1,32 @@
FROM ubuntu:24.04@sha256:cd1dba651b3080c3686ecf4e3c4220f026b521fb76978881737d24f200828b2b
RUN set -eux; \
for attempt in 1 2 3; do \
if apt-get update -o Acquire::Retries=3; then break; fi; \
echo "apt-get update failed (attempt ${attempt})" >&2; \
if [ "${attempt}" -eq 3 ]; then exit 1; fi; \
sleep 3; \
done; \
apt-get -o Acquire::Retries=3 install -y --no-install-recommends \
bash \
ca-certificates \
curl \
g++ \
make \
python3 \
sudo \
&& rm -rf /var/lib/apt/lists/*
RUN useradd -m -s /bin/bash app \
&& echo "app ALL=(ALL) NOPASSWD:ALL" > /etc/sudoers.d/app
USER app
WORKDIR /home/app
ENV NPM_CONFIG_FUND=false
ENV NPM_CONFIG_AUDIT=false
COPY run.sh /usr/local/bin/openclaw-install-nonroot
RUN sudo chmod +x /usr/local/bin/openclaw-install-nonroot
ENTRYPOINT ["/usr/local/bin/openclaw-install-nonroot"]

View File

@@ -0,0 +1,66 @@
#!/usr/bin/env bash
set -euo pipefail
INSTALL_URL="${OPENCLAW_INSTALL_URL:-https://openclaw.bot/install.sh}"
DEFAULT_PACKAGE="openclaw"
PACKAGE_NAME="${OPENCLAW_INSTALL_PACKAGE:-$DEFAULT_PACKAGE}"
echo "==> Pre-flight: ensure git absent"
if command -v git >/dev/null; then
echo "git is present unexpectedly" >&2
exit 1
fi
echo "==> Run installer (non-root user)"
curl -fsSL "$INSTALL_URL" | bash
# Ensure PATH picks up user npm prefix
export PATH="$HOME/.npm-global/bin:$PATH"
echo "==> Verify git installed"
command -v git >/dev/null
EXPECTED_VERSION="${OPENCLAW_INSTALL_EXPECT_VERSION:-}"
if [[ -n "$EXPECTED_VERSION" ]]; then
LATEST_VERSION="$EXPECTED_VERSION"
else
LATEST_VERSION="$(npm view "$PACKAGE_NAME" version)"
fi
CLI_NAME="$PACKAGE_NAME"
CMD_PATH="$(command -v "$CLI_NAME" || true)"
if [[ -z "$CMD_PATH" && -x "$HOME/.npm-global/bin/$PACKAGE_NAME" ]]; then
CLI_NAME="$PACKAGE_NAME"
CMD_PATH="$HOME/.npm-global/bin/$PACKAGE_NAME"
fi
ENTRY_PATH=""
if [[ -z "$CMD_PATH" ]]; then
NPM_ROOT="$(npm root -g 2>/dev/null || true)"
if [[ -n "$NPM_ROOT" && -f "$NPM_ROOT/$PACKAGE_NAME/dist/entry.js" ]]; then
ENTRY_PATH="$NPM_ROOT/$PACKAGE_NAME/dist/entry.js"
fi
fi
if [[ -z "$CMD_PATH" && -z "$ENTRY_PATH" ]]; then
echo "$PACKAGE_NAME is not on PATH" >&2
exit 1
fi
echo "==> Verify CLI installed: $CLI_NAME"
if [[ -n "$CMD_PATH" ]]; then
INSTALLED_VERSION="$("$CMD_PATH" --version 2>/dev/null | head -n 1 | tr -d '\r')"
else
INSTALLED_VERSION="$(node "$ENTRY_PATH" --version 2>/dev/null | head -n 1 | tr -d '\r')"
fi
echo "cli=$CLI_NAME installed=$INSTALLED_VERSION expected=$LATEST_VERSION"
if [[ "$INSTALLED_VERSION" != "$LATEST_VERSION" ]]; then
echo "ERROR: expected ${CLI_NAME}@${LATEST_VERSION}, got ${CLI_NAME}@${INSTALLED_VERSION}" >&2
exit 1
fi
echo "==> Sanity: CLI runs"
if [[ -n "$CMD_PATH" ]]; then
"$CMD_PATH" --help >/dev/null
else
node "$ENTRY_PATH" --help >/dev/null
fi
echo "OK"

View File

@@ -0,0 +1,24 @@
FROM node:22-bookworm-slim@sha256:3cfe526ec8dd62013b8843e8e5d4877e297b886e5aace4a59fec25dc20736e45
RUN set -eux; \
for attempt in 1 2 3; do \
if apt-get update -o Acquire::Retries=3; then break; fi; \
echo "apt-get update failed (attempt ${attempt})" >&2; \
if [ "${attempt}" -eq 3 ]; then exit 1; fi; \
sleep 3; \
done; \
apt-get -o Acquire::Retries=3 install -y --no-install-recommends \
bash \
ca-certificates \
curl \
git \
g++ \
make \
python3 \
sudo \
&& rm -rf /var/lib/apt/lists/*
COPY run.sh /usr/local/bin/openclaw-install-smoke
RUN chmod +x /usr/local/bin/openclaw-install-smoke
ENTRYPOINT ["/usr/local/bin/openclaw-install-smoke"]

View File

@@ -0,0 +1,92 @@
#!/usr/bin/env bash
set -euo pipefail
INSTALL_URL="${OPENCLAW_INSTALL_URL:-https://openclaw.bot/install.sh}"
SMOKE_PREVIOUS_VERSION="${OPENCLAW_INSTALL_SMOKE_PREVIOUS:-}"
SKIP_PREVIOUS="${OPENCLAW_INSTALL_SMOKE_SKIP_PREVIOUS:-0}"
DEFAULT_PACKAGE="openclaw"
PACKAGE_NAME="${OPENCLAW_INSTALL_PACKAGE:-$DEFAULT_PACKAGE}"
echo "==> Resolve npm versions"
LATEST_VERSION="$(npm view "$PACKAGE_NAME" version)"
if [[ -n "$SMOKE_PREVIOUS_VERSION" ]]; then
PREVIOUS_VERSION="$SMOKE_PREVIOUS_VERSION"
else
VERSIONS_JSON="$(npm view "$PACKAGE_NAME" versions --json)"
PREVIOUS_VERSION="$(VERSIONS_JSON="$VERSIONS_JSON" LATEST_VERSION="$LATEST_VERSION" node - <<'NODE'
const raw = process.env.VERSIONS_JSON || "[]";
const latest = process.env.LATEST_VERSION || "";
let versions;
try {
versions = JSON.parse(raw);
} catch {
versions = raw ? [raw] : [];
}
if (!Array.isArray(versions)) {
versions = [versions];
}
if (versions.length === 0) {
process.exit(1);
}
const latestIndex = latest ? versions.lastIndexOf(latest) : -1;
if (latestIndex > 0) {
process.stdout.write(String(versions[latestIndex - 1]));
process.exit(0);
}
process.stdout.write(String(latest || versions[versions.length - 1]));
NODE
)"
fi
echo "package=$PACKAGE_NAME latest=$LATEST_VERSION previous=$PREVIOUS_VERSION"
if [[ "$SKIP_PREVIOUS" == "1" ]]; then
echo "==> Skip preinstall previous (OPENCLAW_INSTALL_SMOKE_SKIP_PREVIOUS=1)"
else
echo "==> Preinstall previous (forces installer upgrade path)"
npm install -g "${PACKAGE_NAME}@${PREVIOUS_VERSION}"
fi
echo "==> Run official installer one-liner"
curl -fsSL "$INSTALL_URL" | bash
echo "==> Verify installed version"
CLI_NAME="$PACKAGE_NAME"
CMD_PATH="$(command -v "$CLI_NAME" || true)"
if [[ -z "$CMD_PATH" && -x "$HOME/.npm-global/bin/$PACKAGE_NAME" ]]; then
CMD_PATH="$HOME/.npm-global/bin/$PACKAGE_NAME"
fi
ENTRY_PATH=""
if [[ -z "$CMD_PATH" ]]; then
NPM_ROOT="$(npm root -g 2>/dev/null || true)"
if [[ -n "$NPM_ROOT" && -f "$NPM_ROOT/$PACKAGE_NAME/dist/entry.js" ]]; then
ENTRY_PATH="$NPM_ROOT/$PACKAGE_NAME/dist/entry.js"
fi
fi
if [[ -z "$CMD_PATH" && -z "$ENTRY_PATH" ]]; then
echo "ERROR: $PACKAGE_NAME is not on PATH" >&2
exit 1
fi
if [[ -n "${OPENCLAW_INSTALL_LATEST_OUT:-}" ]]; then
printf "%s" "$LATEST_VERSION" > "${OPENCLAW_INSTALL_LATEST_OUT:-}"
fi
if [[ -n "$CMD_PATH" ]]; then
INSTALLED_VERSION="$("$CMD_PATH" --version 2>/dev/null | head -n 1 | tr -d '\r')"
else
INSTALLED_VERSION="$(node "$ENTRY_PATH" --version 2>/dev/null | head -n 1 | tr -d '\r')"
fi
echo "cli=$CLI_NAME installed=$INSTALLED_VERSION expected=$LATEST_VERSION"
if [[ "$INSTALLED_VERSION" != "$LATEST_VERSION" ]]; then
echo "ERROR: expected ${CLI_NAME}@${LATEST_VERSION}, got ${CLI_NAME}@${INSTALLED_VERSION}" >&2
exit 1
fi
echo "==> Sanity: CLI runs"
if [[ -n "$CMD_PATH" ]]; then
"$CMD_PATH" --help >/dev/null
else
node "$ENTRY_PATH" --help >/dev/null
fi
echo "OK"

View File

@@ -0,0 +1,272 @@
package main
import (
"context"
"fmt"
"os"
"path/filepath"
"strings"
"gopkg.in/yaml.v3"
)
const (
frontmatterTagStart = "<frontmatter>"
frontmatterTagEnd = "</frontmatter>"
bodyTagStart = "<body>"
bodyTagEnd = "</body>"
)
func processFileDoc(ctx context.Context, translator *PiTranslator, docsRoot, filePath, srcLang, tgtLang string, overwrite bool) (bool, error) {
absPath, relPath, err := resolveDocsPath(docsRoot, filePath)
if err != nil {
return false, err
}
content, err := os.ReadFile(absPath)
if err != nil {
return false, err
}
currentHash := hashBytes(content)
outputPath := filepath.Join(docsRoot, tgtLang, relPath)
if !overwrite {
skip, err := shouldSkipDoc(outputPath, currentHash)
if err != nil {
return false, err
}
if skip {
return true, nil
}
}
sourceFront, sourceBody := splitFrontMatter(string(content))
frontData := map[string]any{}
if strings.TrimSpace(sourceFront) != "" {
if err := yaml.Unmarshal([]byte(sourceFront), &frontData); err != nil {
return false, fmt.Errorf("frontmatter parse failed for %s: %w", relPath, err)
}
}
frontTemplate, markers := buildFrontmatterTemplate(frontData)
taggedInput := formatTaggedDocument(frontTemplate, sourceBody)
translatedDoc, err := translator.TranslateRaw(ctx, taggedInput, srcLang, tgtLang)
if err != nil {
return false, fmt.Errorf("translate failed (%s): %w", relPath, err)
}
translatedFront, translatedBody, err := parseTaggedDocument(translatedDoc)
if err != nil {
return false, fmt.Errorf("tagged output invalid for %s: %w", relPath, err)
}
if sourceFront != "" && strings.TrimSpace(translatedFront) == "" {
return false, fmt.Errorf("translation removed frontmatter for %s", relPath)
}
if err := applyFrontmatterTranslations(frontData, markers, translatedFront); err != nil {
return false, fmt.Errorf("frontmatter translation failed for %s: %w", relPath, err)
}
updatedFront, err := encodeFrontMatter(frontData, relPath, content)
if err != nil {
return false, err
}
if err := os.MkdirAll(filepath.Dir(outputPath), 0o755); err != nil {
return false, err
}
output := updatedFront + translatedBody
return false, os.WriteFile(outputPath, []byte(output), 0o644)
}
func formatTaggedDocument(frontMatter, body string) string {
return fmt.Sprintf("%s\n%s\n%s\n%s\n%s\n%s", frontmatterTagStart, frontMatter, frontmatterTagEnd, bodyTagStart, body, bodyTagEnd)
}
func parseTaggedDocument(text string) (string, string, error) {
frontStart := strings.Index(text, frontmatterTagStart)
if frontStart == -1 {
return "", "", fmt.Errorf("missing %s", frontmatterTagStart)
}
frontStart += len(frontmatterTagStart)
frontEnd := strings.Index(text[frontStart:], frontmatterTagEnd)
if frontEnd == -1 {
return "", "", fmt.Errorf("missing %s", frontmatterTagEnd)
}
frontEnd += frontStart
bodyStart := strings.Index(text[frontEnd:], bodyTagStart)
if bodyStart == -1 {
return "", "", fmt.Errorf("missing %s", bodyTagStart)
}
bodyStart += frontEnd + len(bodyTagStart)
bodyEnd := strings.Index(text[bodyStart:], bodyTagEnd)
if bodyEnd == -1 {
return "", "", fmt.Errorf("missing %s", bodyTagEnd)
}
bodyEnd += bodyStart
prefix := strings.TrimSpace(text[:frontStart-len(frontmatterTagStart)])
suffix := strings.TrimSpace(text[bodyEnd+len(bodyTagEnd):])
if prefix != "" || suffix != "" {
return "", "", fmt.Errorf("unexpected text outside tagged sections")
}
frontMatter := trimTagNewlines(text[frontStart:frontEnd])
body := trimTagNewlines(text[bodyStart:bodyEnd])
return frontMatter, body, nil
}
func trimTagNewlines(value string) string {
value = strings.TrimPrefix(value, "\n")
value = strings.TrimSuffix(value, "\n")
return value
}
type frontmatterMarker struct {
Field string
Index int
Start string
End string
}
func buildFrontmatterTemplate(data map[string]any) (string, []frontmatterMarker) {
if len(data) == 0 {
return "", nil
}
markers := []frontmatterMarker{}
lines := []string{}
if summary, ok := data["summary"].(string); ok {
start, end := markerPair("SUMMARY", 0)
markers = append(markers, frontmatterMarker{Field: "summary", Index: 0, Start: start, End: end})
lines = append(lines, fmt.Sprintf("summary: %s%s%s", start, summary, end))
}
if title, ok := data["title"].(string); ok {
start, end := markerPair("TITLE", 0)
markers = append(markers, frontmatterMarker{Field: "title", Index: 0, Start: start, End: end})
lines = append(lines, fmt.Sprintf("title: %s%s%s", start, title, end))
}
if readWhen, ok := data["read_when"].([]any); ok {
lines = append(lines, "read_when:")
for idx, item := range readWhen {
textValue, ok := item.(string)
if !ok {
lines = append(lines, fmt.Sprintf(" - %v", item))
continue
}
start, end := markerPair("READ_WHEN", idx)
markers = append(markers, frontmatterMarker{Field: "read_when", Index: idx, Start: start, End: end})
lines = append(lines, fmt.Sprintf(" - %s%s%s", start, textValue, end))
}
}
return strings.Join(lines, "\n"), markers
}
func markerPair(field string, index int) (string, string) {
return fmt.Sprintf("[[[FM_%s_%d_START]]]", field, index), fmt.Sprintf("[[[FM_%s_%d_END]]]", field, index)
}
func applyFrontmatterTranslations(data map[string]any, markers []frontmatterMarker, translatedFront string) error {
if len(markers) == 0 {
return nil
}
for _, marker := range markers {
value, err := extractMarkerValue(translatedFront, marker.Start, marker.End)
if err != nil {
return err
}
value = strings.TrimSpace(value)
switch marker.Field {
case "summary":
data["summary"] = value
case "title":
data["title"] = value
case "read_when":
data["read_when"] = setReadWhenValue(data["read_when"], marker.Index, value)
}
}
return nil
}
func extractMarkerValue(text, start, end string) (string, error) {
startIndex := strings.Index(text, start)
if startIndex == -1 {
return "", fmt.Errorf("missing marker %s", start)
}
startIndex += len(start)
endIndex := strings.Index(text[startIndex:], end)
if endIndex == -1 {
return "", fmt.Errorf("missing marker %s", end)
}
endIndex += startIndex
return text[startIndex:endIndex], nil
}
func setReadWhenValue(existing any, index int, value string) []any {
readWhen, ok := existing.([]any)
if !ok {
readWhen = []any{}
}
for len(readWhen) <= index {
readWhen = append(readWhen, "")
}
readWhen[index] = value
return readWhen
}
func shouldSkipDoc(outputPath string, sourceHash string) (bool, error) {
data, err := os.ReadFile(outputPath)
if err != nil {
if os.IsNotExist(err) {
return false, nil
}
return false, err
}
frontMatter, _ := splitFrontMatter(string(data))
if frontMatter == "" {
return false, nil
}
frontData := map[string]any{}
if err := yaml.Unmarshal([]byte(frontMatter), &frontData); err != nil {
return false, nil
}
storedHash := extractSourceHash(frontData)
if storedHash == "" {
return false, nil
}
return strings.EqualFold(storedHash, sourceHash), nil
}
func extractSourceHash(frontData map[string]any) string {
xi, ok := frontData["x-i18n"].(map[string]any)
if !ok {
return ""
}
value, ok := xi["source_hash"].(string)
if !ok {
return ""
}
return strings.TrimSpace(value)
}
func resolveDocsPath(docsRoot, filePath string) (string, string, error) {
absPath, err := filepath.Abs(filePath)
if err != nil {
return "", "", err
}
relPath, err := filepath.Rel(docsRoot, absPath)
if err != nil {
return "", "", err
}
if relPath == "." || relPath == "" {
return "", "", fmt.Errorf("file %s resolves to docs root %s", absPath, docsRoot)
}
if filepath.IsAbs(relPath) || relPath == ".." || strings.HasPrefix(relPath, ".."+string(filepath.Separator)) {
return "", "", fmt.Errorf("file %s not under docs root %s", absPath, docsRoot)
}
return absPath, relPath, nil
}

View File

@@ -0,0 +1,29 @@
package main
import (
"encoding/json"
"errors"
"fmt"
"os"
)
type GlossaryEntry struct {
Source string `json:"source"`
Target string `json:"target"`
}
func LoadGlossary(path string) ([]GlossaryEntry, error) {
data, err := os.ReadFile(path)
if err != nil {
if errors.Is(err, os.ErrNotExist) {
return nil, nil
}
return nil, err
}
var entries []GlossaryEntry
if err := json.Unmarshal(data, &entries); err != nil {
return nil, fmt.Errorf("glossary parse failed: %w", err)
}
return entries, nil
}

View File

@@ -0,0 +1,10 @@
module github.com/openclaw/openclaw/scripts/docs-i18n
go 1.24.0
require (
github.com/joshp123/pi-golang v0.0.4
github.com/yuin/goldmark v1.7.8
golang.org/x/net v0.50.0
gopkg.in/yaml.v3 v3.0.1
)

View File

@@ -0,0 +1,10 @@
github.com/joshp123/pi-golang v0.0.4 h1:82HISyKNN8bIl2lvAd65462LVCQIsjhaUFQxyQgg5Xk=
github.com/joshp123/pi-golang v0.0.4/go.mod h1:9mHEQkeJELYzubXU3b86/T8yedI/iAOKx0Tz0c41qes=
github.com/yuin/goldmark v1.7.8 h1:iERMLn0/QJeHFhxSt3p6PeN9mGnvIKSpG9YYorDMnic=
github.com/yuin/goldmark v1.7.8/go.mod h1:uzxRWxtg69N339t3louHJ7+O03ezfj6PlliRlaOzY1E=
golang.org/x/net v0.50.0 h1:ucWh9eiCGyDR3vtzso0WMQinm2Dnt8cFMuQa9K33J60=
golang.org/x/net v0.50.0/go.mod h1:UgoSli3F/pBgdJBHCTc+tp3gmrU4XswgGRgtnwWTfyM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

View File

@@ -0,0 +1,160 @@
package main
import (
"context"
"io"
"strings"
"github.com/yuin/goldmark"
"github.com/yuin/goldmark/ast"
"github.com/yuin/goldmark/extension"
"github.com/yuin/goldmark/text"
"golang.org/x/net/html"
"sort"
)
type htmlReplacement struct {
Start int
Stop int
Value string
}
func translateHTMLBlocks(ctx context.Context, translator *PiTranslator, body, srcLang, tgtLang string) (string, error) {
source := []byte(body)
r := text.NewReader(source)
md := goldmark.New(
goldmark.WithExtensions(extension.GFM),
)
doc := md.Parser().Parse(r)
replacements := make([]htmlReplacement, 0, 8)
_ = ast.Walk(doc, func(n ast.Node, entering bool) (ast.WalkStatus, error) {
if !entering {
return ast.WalkContinue, nil
}
block, ok := n.(*ast.HTMLBlock)
if !ok {
return ast.WalkContinue, nil
}
start, stop, ok := htmlBlockSpan(block, source)
if !ok {
return ast.WalkSkipChildren, nil
}
htmlText := string(source[start:stop])
translated, err := translateHTMLBlock(ctx, translator, htmlText, srcLang, tgtLang)
if err != nil {
return ast.WalkStop, err
}
replacements = append(replacements, htmlReplacement{Start: start, Stop: stop, Value: translated})
return ast.WalkSkipChildren, nil
})
if len(replacements) == 0 {
return body, nil
}
return applyHTMLReplacements(body, replacements), nil
}
func htmlBlockSpan(block *ast.HTMLBlock, source []byte) (int, int, bool) {
lines := block.Lines()
if lines.Len() == 0 {
return 0, 0, false
}
start := lines.At(0).Start
stop := lines.At(lines.Len() - 1).Stop
if start >= stop {
return 0, 0, false
}
return start, stop, true
}
func applyHTMLReplacements(body string, replacements []htmlReplacement) string {
if len(replacements) == 0 {
return body
}
sortHTMLReplacements(replacements)
var out strings.Builder
last := 0
for _, rep := range replacements {
if rep.Start < last {
continue
}
out.WriteString(body[last:rep.Start])
out.WriteString(rep.Value)
last = rep.Stop
}
out.WriteString(body[last:])
return out.String()
}
func sortHTMLReplacements(replacements []htmlReplacement) {
sort.Slice(replacements, func(i, j int) bool {
return replacements[i].Start < replacements[j].Start
})
}
func translateHTMLBlock(ctx context.Context, translator *PiTranslator, htmlText, srcLang, tgtLang string) (string, error) {
tokenizer := html.NewTokenizer(strings.NewReader(htmlText))
var out strings.Builder
skipDepth := 0
for {
tt := tokenizer.Next()
if tt == html.ErrorToken {
if err := tokenizer.Err(); err != nil && err != io.EOF {
return "", err
}
break
}
raw := string(tokenizer.Raw())
tok := tokenizer.Token()
switch tt {
case html.StartTagToken:
out.WriteString(raw)
if isSkipTag(strings.ToLower(tok.Data)) {
skipDepth++
}
case html.EndTagToken:
out.WriteString(raw)
if isSkipTag(strings.ToLower(tok.Data)) && skipDepth > 0 {
skipDepth--
}
case html.SelfClosingTagToken:
out.WriteString(raw)
case html.TextToken:
if shouldTranslateHTMLText(skipDepth, raw) {
translated, err := translator.Translate(ctx, raw, srcLang, tgtLang)
if err != nil {
return "", err
}
out.WriteString(translated)
} else {
out.WriteString(raw)
}
default:
out.WriteString(raw)
}
}
return out.String(), nil
}
func shouldTranslateHTMLText(skipDepth int, text string) bool {
if strings.TrimSpace(text) == "" {
return false
}
return skipDepth == 0
}
func isSkipTag(tag string) bool {
switch tag {
case "code", "pre", "script", "style":
return true
default:
return false
}
}

View File

@@ -0,0 +1,273 @@
package main
import (
"context"
"flag"
"fmt"
"log"
"os"
"path/filepath"
"sync"
"time"
)
type docJob struct {
index int
path string
rel string
}
type docResult struct {
index int
rel string
duration time.Duration
skipped bool
err error
}
func main() {
var (
targetLang = flag.String("lang", "zh-CN", "target language (e.g., zh-CN)")
sourceLang = flag.String("src", "en", "source language")
docsRoot = flag.String("docs", "docs", "docs root")
tmPath = flag.String("tm", "", "translation memory path")
mode = flag.String("mode", "segment", "translation mode (segment|doc)")
thinking = flag.String("thinking", "high", "thinking level (low|high)")
overwrite = flag.Bool("overwrite", false, "overwrite existing translations")
maxFiles = flag.Int("max", 0, "max files to process (0 = all)")
parallel = flag.Int("parallel", 1, "parallel workers for doc mode")
)
flag.Parse()
files := flag.Args()
if len(files) == 0 {
fatal(fmt.Errorf("no doc files provided"))
}
resolvedDocsRoot, err := filepath.Abs(*docsRoot)
if err != nil {
fatal(err)
}
if *tmPath == "" {
*tmPath = filepath.Join(resolvedDocsRoot, ".i18n", fmt.Sprintf("%s.tm.jsonl", *targetLang))
}
glossaryPath := filepath.Join(resolvedDocsRoot, ".i18n", fmt.Sprintf("glossary.%s.json", *targetLang))
glossary, err := LoadGlossary(glossaryPath)
if err != nil {
fatal(err)
}
translator, err := NewPiTranslator(*sourceLang, *targetLang, glossary, *thinking)
if err != nil {
fatal(err)
}
defer translator.Close()
tm, err := LoadTranslationMemory(*tmPath)
if err != nil {
fatal(err)
}
ordered, err := orderFiles(resolvedDocsRoot, files)
if err != nil {
fatal(err)
}
totalFiles := len(ordered)
preSkipped := 0
if *mode == "doc" && !*overwrite {
filtered, skipped, err := filterDocQueue(resolvedDocsRoot, *targetLang, ordered)
if err != nil {
fatal(err)
}
ordered = filtered
preSkipped = skipped
}
if *maxFiles > 0 && *maxFiles < len(ordered) {
ordered = ordered[:*maxFiles]
}
log.SetFlags(log.LstdFlags)
start := time.Now()
processed := 0
skipped := 0
if *parallel < 1 {
*parallel = 1
}
log.Printf("docs-i18n: mode=%s total=%d pending=%d pre_skipped=%d overwrite=%t thinking=%s parallel=%d", *mode, totalFiles, len(ordered), preSkipped, *overwrite, *thinking, *parallel)
switch *mode {
case "doc":
if *parallel > 1 {
proc, skip, err := runDocParallel(context.Background(), ordered, resolvedDocsRoot, *sourceLang, *targetLang, *overwrite, *parallel, glossary, *thinking)
if err != nil {
fatal(err)
}
processed += proc
skipped += skip
} else {
proc, skip, err := runDocSequential(context.Background(), ordered, translator, resolvedDocsRoot, *sourceLang, *targetLang, *overwrite)
if err != nil {
fatal(err)
}
processed += proc
skipped += skip
}
case "segment":
if *parallel > 1 {
fatal(fmt.Errorf("parallel processing is only supported in doc mode"))
}
proc, err := runSegmentSequential(context.Background(), ordered, translator, tm, resolvedDocsRoot, *sourceLang, *targetLang)
if err != nil {
fatal(err)
}
processed += proc
default:
fatal(fmt.Errorf("unknown mode: %s", *mode))
}
if err := tm.Save(); err != nil {
fatal(err)
}
elapsed := time.Since(start).Round(time.Millisecond)
log.Printf("docs-i18n: completed processed=%d skipped=%d elapsed=%s", processed, skipped, elapsed)
}
func runDocSequential(ctx context.Context, ordered []string, translator *PiTranslator, docsRoot, srcLang, tgtLang string, overwrite bool) (int, int, error) {
processed := 0
skipped := 0
for index, file := range ordered {
relPath := resolveRelPath(docsRoot, file)
log.Printf("docs-i18n: [%d/%d] start %s", index+1, len(ordered), relPath)
start := time.Now()
skip, err := processFileDoc(ctx, translator, docsRoot, file, srcLang, tgtLang, overwrite)
if err != nil {
return processed, skipped, err
}
if skip {
skipped++
log.Printf("docs-i18n: [%d/%d] skipped %s (%s)", index+1, len(ordered), relPath, time.Since(start).Round(time.Millisecond))
} else {
processed++
log.Printf("docs-i18n: [%d/%d] done %s (%s)", index+1, len(ordered), relPath, time.Since(start).Round(time.Millisecond))
}
}
return processed, skipped, nil
}
func runDocParallel(ctx context.Context, ordered []string, docsRoot, srcLang, tgtLang string, overwrite bool, parallel int, glossary []GlossaryEntry, thinking string) (int, int, error) {
jobs := make(chan docJob)
results := make(chan docResult, len(ordered))
ctx, cancel := context.WithCancel(ctx)
defer cancel()
var wg sync.WaitGroup
for worker := 0; worker < parallel; worker++ {
wg.Add(1)
go func(workerID int) {
defer wg.Done()
translator, err := NewPiTranslator(srcLang, tgtLang, glossary, thinking)
if err != nil {
results <- docResult{err: err}
return
}
defer translator.Close()
for job := range jobs {
if ctx.Err() != nil {
return
}
log.Printf("docs-i18n: [w%d %d/%d] start %s", workerID, job.index, len(ordered), job.rel)
start := time.Now()
skip, err := processFileDoc(ctx, translator, docsRoot, job.path, srcLang, tgtLang, overwrite)
results <- docResult{
index: job.index,
rel: job.rel,
duration: time.Since(start),
skipped: skip,
err: err,
}
if err != nil {
cancel()
return
}
}
}(worker + 1)
}
go func() {
for index, file := range ordered {
jobs <- docJob{index: index + 1, path: file, rel: resolveRelPath(docsRoot, file)}
}
close(jobs)
}()
processed := 0
skipped := 0
for i := 0; i < len(ordered); i++ {
result := <-results
if result.err != nil {
wg.Wait()
return processed, skipped, result.err
}
if result.skipped {
skipped++
log.Printf("docs-i18n: [w* %d/%d] skipped %s (%s)", result.index, len(ordered), result.rel, result.duration.Round(time.Millisecond))
} else {
processed++
log.Printf("docs-i18n: [w* %d/%d] done %s (%s)", result.index, len(ordered), result.rel, result.duration.Round(time.Millisecond))
}
}
wg.Wait()
return processed, skipped, nil
}
func runSegmentSequential(ctx context.Context, ordered []string, translator *PiTranslator, tm *TranslationMemory, docsRoot, srcLang, tgtLang string) (int, error) {
processed := 0
for index, file := range ordered {
relPath := resolveRelPath(docsRoot, file)
log.Printf("docs-i18n: [%d/%d] start %s", index+1, len(ordered), relPath)
start := time.Now()
if _, err := processFile(ctx, translator, tm, docsRoot, file, srcLang, tgtLang); err != nil {
return processed, err
}
processed++
log.Printf("docs-i18n: [%d/%d] done %s (%s)", index+1, len(ordered), relPath, time.Since(start).Round(time.Millisecond))
}
return processed, nil
}
func resolveRelPath(docsRoot, file string) string {
relPath := file
if _, rel, err := resolveDocsPath(docsRoot, file); err == nil {
relPath = rel
}
return relPath
}
func filterDocQueue(docsRoot, targetLang string, ordered []string) ([]string, int, error) {
pending := make([]string, 0, len(ordered))
skipped := 0
for _, file := range ordered {
absPath, relPath, err := resolveDocsPath(docsRoot, file)
if err != nil {
return nil, skipped, err
}
content, err := os.ReadFile(absPath)
if err != nil {
return nil, skipped, err
}
sourceHash := hashBytes(content)
outputPath := filepath.Join(docsRoot, targetLang, relPath)
skip, err := shouldSkipDoc(outputPath, sourceHash)
if err != nil {
return nil, skipped, err
}
if skip {
skipped++
continue
}
pending = append(pending, file)
}
return pending, skipped, nil
}

View File

@@ -0,0 +1,131 @@
package main
import (
"sort"
"strings"
"github.com/yuin/goldmark"
"github.com/yuin/goldmark/ast"
"github.com/yuin/goldmark/extension"
"github.com/yuin/goldmark/text"
)
func extractSegments(body, relPath string) ([]Segment, error) {
source := []byte(body)
r := text.NewReader(source)
md := goldmark.New(
goldmark.WithExtensions(extension.GFM),
)
doc := md.Parser().Parse(r)
segments := make([]Segment, 0, 128)
skipDepth := 0
var lastBlock ast.Node
err := ast.Walk(doc, func(n ast.Node, entering bool) (ast.WalkStatus, error) {
switch n.(type) {
case *ast.CodeBlock, *ast.FencedCodeBlock, *ast.CodeSpan, *ast.HTMLBlock, *ast.RawHTML:
if entering {
skipDepth++
} else {
skipDepth--
}
return ast.WalkContinue, nil
}
if !entering || skipDepth > 0 {
return ast.WalkContinue, nil
}
textNode, ok := n.(*ast.Text)
if !ok {
return ast.WalkContinue, nil
}
block := blockParent(textNode)
if block == nil {
return ast.WalkContinue, nil
}
textValue := string(textNode.Segment.Value(source))
if strings.TrimSpace(textValue) == "" {
return ast.WalkContinue, nil
}
start := textNode.Segment.Start
stop := textNode.Segment.Stop
if len(segments) > 0 && lastBlock == block {
last := &segments[len(segments)-1]
gap := string(source[last.Stop:start])
if strings.TrimSpace(gap) == "" {
last.Stop = stop
return ast.WalkContinue, nil
}
}
segments = append(segments, Segment{Start: start, Stop: stop})
lastBlock = block
return ast.WalkContinue, nil
})
if err != nil {
return nil, err
}
filtered := make([]Segment, 0, len(segments))
for _, seg := range segments {
textValue := string(source[seg.Start:seg.Stop])
trimmed := strings.TrimSpace(textValue)
if trimmed == "" {
continue
}
textHash := hashText(textValue)
segmentID := segmentID(relPath, textHash)
filtered = append(filtered, Segment{
Start: seg.Start,
Stop: seg.Stop,
Text: textValue,
TextHash: textHash,
SegmentID: segmentID,
})
}
sort.Slice(filtered, func(i, j int) bool {
return filtered[i].Start < filtered[j].Start
})
return filtered, nil
}
func blockParent(n ast.Node) ast.Node {
for node := n.Parent(); node != nil; node = node.Parent() {
if isTranslatableBlock(node) {
return node
}
}
return nil
}
func isTranslatableBlock(n ast.Node) bool {
switch n.(type) {
case *ast.Paragraph, *ast.Heading, *ast.ListItem:
return true
default:
return false
}
}
func applyTranslations(body string, segments []Segment) string {
if len(segments) == 0 {
return body
}
var out strings.Builder
last := 0
for _, seg := range segments {
if seg.Start < last {
continue
}
out.WriteString(body[last:seg.Start])
out.WriteString(seg.Translated)
last = seg.Stop
}
out.WriteString(body[last:])
return out.String()
}

View File

@@ -0,0 +1,89 @@
package main
import (
"fmt"
"regexp"
"strings"
)
var (
inlineCodeRe = regexp.MustCompile("`[^`]+`")
angleLinkRe = regexp.MustCompile(`<https?://[^>]+>`)
linkURLRe = regexp.MustCompile(`\[[^\]]*\]\(([^)]+)\)`)
placeholderRe = regexp.MustCompile(`__OC_I18N_\d+__`)
)
func maskMarkdown(text string, nextPlaceholder func() string, placeholders *[]string, mapping map[string]string) string {
masked := maskMatches(text, inlineCodeRe, nextPlaceholder, placeholders, mapping)
masked = maskMatches(masked, angleLinkRe, nextPlaceholder, placeholders, mapping)
masked = maskLinkURLs(masked, nextPlaceholder, placeholders, mapping)
return masked
}
func maskMatches(text string, re *regexp.Regexp, nextPlaceholder func() string, placeholders *[]string, mapping map[string]string) string {
matches := re.FindAllStringIndex(text, -1)
if len(matches) == 0 {
return text
}
var out strings.Builder
pos := 0
for _, span := range matches {
start, end := span[0], span[1]
if start < pos {
continue
}
out.WriteString(text[pos:start])
placeholder := nextPlaceholder()
mapping[placeholder] = text[start:end]
*placeholders = append(*placeholders, placeholder)
out.WriteString(placeholder)
pos = end
}
out.WriteString(text[pos:])
return out.String()
}
func maskLinkURLs(text string, nextPlaceholder func() string, placeholders *[]string, mapping map[string]string) string {
matches := linkURLRe.FindAllStringSubmatchIndex(text, -1)
if len(matches) == 0 {
return text
}
var out strings.Builder
pos := 0
for _, span := range matches {
fullStart := span[0]
urlStart, urlEnd := span[2], span[3]
if urlStart < 0 || urlEnd < 0 {
continue
}
if fullStart < pos {
continue
}
out.WriteString(text[pos:urlStart])
placeholder := nextPlaceholder()
mapping[placeholder] = text[urlStart:urlEnd]
*placeholders = append(*placeholders, placeholder)
out.WriteString(placeholder)
pos = urlEnd
}
out.WriteString(text[pos:])
return out.String()
}
func unmaskMarkdown(text string, placeholders []string, mapping map[string]string) string {
out := text
for _, placeholder := range placeholders {
original := mapping[placeholder]
out = strings.ReplaceAll(out, placeholder, original)
}
return out
}
func validatePlaceholders(text string, placeholders []string) error {
for _, placeholder := range placeholders {
if !strings.Contains(text, placeholder) {
return fmt.Errorf("placeholder missing: %s", placeholder)
}
}
return nil
}

View File

@@ -0,0 +1,37 @@
package main
import (
"path/filepath"
"sort"
)
type orderedFile struct {
path string
rel string
}
func orderFiles(docsRoot string, files []string) ([]string, error) {
entries := make([]orderedFile, 0, len(files))
for _, file := range files {
abs, err := filepath.Abs(file)
if err != nil {
return nil, err
}
rel, err := filepath.Rel(docsRoot, abs)
if err != nil {
rel = abs
}
entries = append(entries, orderedFile{path: file, rel: rel})
}
if len(entries) == 0 {
return nil, nil
}
sort.Slice(entries, func(i, j int) bool {
return entries[i].rel < entries[j].rel
})
ordered := make([]string, 0, len(entries))
for _, entry := range entries {
ordered = append(ordered, entry.path)
}
return ordered, nil
}

View File

@@ -0,0 +1,30 @@
package main
import (
"fmt"
)
type PlaceholderState struct {
counter int
used map[string]struct{}
}
func NewPlaceholderState(text string) *PlaceholderState {
used := map[string]struct{}{}
for _, hit := range placeholderRe.FindAllString(text, -1) {
used[hit] = struct{}{}
}
return &PlaceholderState{counter: 900000, used: used}
}
func (s *PlaceholderState) Next() string {
for {
candidate := fmt.Sprintf("__OC_I18N_%d__", s.counter)
s.counter++
if _, ok := s.used[candidate]; ok {
continue
}
s.used[candidate] = struct{}{}
return candidate
}
}

View File

@@ -0,0 +1,202 @@
package main
import (
"context"
"fmt"
"os"
"path/filepath"
"strings"
"time"
"gopkg.in/yaml.v3"
)
func processFile(ctx context.Context, translator *PiTranslator, tm *TranslationMemory, docsRoot, filePath, srcLang, tgtLang string) (bool, error) {
absPath, relPath, err := resolveDocsPath(docsRoot, filePath)
if err != nil {
return false, err
}
content, err := os.ReadFile(absPath)
if err != nil {
return false, err
}
frontMatter, body := splitFrontMatter(string(content))
frontData := map[string]any{}
if frontMatter != "" {
if err := yaml.Unmarshal([]byte(frontMatter), &frontData); err != nil {
return false, fmt.Errorf("frontmatter parse failed for %s: %w", relPath, err)
}
}
if err := translateFrontMatter(ctx, translator, tm, frontData, relPath, srcLang, tgtLang); err != nil {
return false, err
}
body, err = translateHTMLBlocks(ctx, translator, body, srcLang, tgtLang)
if err != nil {
return false, err
}
segments, err := extractSegments(body, relPath)
if err != nil {
return false, err
}
namespace := cacheNamespace()
for i := range segments {
seg := &segments[i]
seg.CacheKey = cacheKey(namespace, srcLang, tgtLang, seg.SegmentID, seg.TextHash)
if entry, ok := tm.Get(seg.CacheKey); ok {
seg.Translated = entry.Translated
continue
}
translated, err := translator.Translate(ctx, seg.Text, srcLang, tgtLang)
if err != nil {
return false, fmt.Errorf("translate failed (%s): %w", relPath, err)
}
seg.Translated = translated
entry := TMEntry{
CacheKey: seg.CacheKey,
SegmentID: seg.SegmentID,
SourcePath: relPath,
TextHash: seg.TextHash,
Text: seg.Text,
Translated: translated,
Provider: providerName,
Model: modelVersion,
SrcLang: srcLang,
TgtLang: tgtLang,
UpdatedAt: time.Now().UTC().Format(time.RFC3339),
}
tm.Put(entry)
}
translatedBody := applyTranslations(body, segments)
updatedFront, err := encodeFrontMatter(frontData, relPath, content)
if err != nil {
return false, err
}
outputPath := filepath.Join(docsRoot, tgtLang, relPath)
if err := os.MkdirAll(filepath.Dir(outputPath), 0o755); err != nil {
return false, err
}
output := updatedFront + translatedBody
return false, os.WriteFile(outputPath, []byte(output), 0o644)
}
func splitFrontMatter(content string) (string, string) {
if !strings.HasPrefix(content, "---") {
return "", content
}
lines := strings.Split(content, "\n")
if len(lines) < 2 {
return "", content
}
endIndex := -1
for i := 1; i < len(lines); i++ {
if strings.TrimSpace(lines[i]) == "---" {
endIndex = i
break
}
}
if endIndex == -1 {
return "", content
}
front := strings.Join(lines[1:endIndex], "\n")
body := strings.Join(lines[endIndex+1:], "\n")
if strings.HasPrefix(body, "\n") {
body = body[1:]
}
return front, body
}
func encodeFrontMatter(frontData map[string]any, relPath string, source []byte) (string, error) {
if frontData == nil {
frontData = map[string]any{}
}
frontData["x-i18n"] = map[string]any{
"source_path": relPath,
"source_hash": hashBytes(source),
"provider": providerName,
"model": modelVersion,
"workflow": workflowVersion,
"generated_at": time.Now().UTC().Format(time.RFC3339),
}
encoded, err := yaml.Marshal(frontData)
if err != nil {
return "", err
}
return fmt.Sprintf("---\n%s---\n\n", string(encoded)), nil
}
func translateFrontMatter(ctx context.Context, translator *PiTranslator, tm *TranslationMemory, data map[string]any, relPath, srcLang, tgtLang string) error {
if len(data) == 0 {
return nil
}
if summary, ok := data["summary"].(string); ok {
translated, err := translateSnippet(ctx, translator, tm, relPath+":frontmatter:summary", summary, srcLang, tgtLang)
if err != nil {
return err
}
data["summary"] = translated
}
if title, ok := data["title"].(string); ok {
translated, err := translateSnippet(ctx, translator, tm, relPath+":frontmatter:title", title, srcLang, tgtLang)
if err != nil {
return err
}
data["title"] = translated
}
if readWhen, ok := data["read_when"].([]any); ok {
translated := make([]any, 0, len(readWhen))
for idx, item := range readWhen {
textValue, ok := item.(string)
if !ok {
translated = append(translated, item)
continue
}
value, err := translateSnippet(ctx, translator, tm, fmt.Sprintf("%s:frontmatter:read_when:%d", relPath, idx), textValue, srcLang, tgtLang)
if err != nil {
return err
}
translated = append(translated, value)
}
data["read_when"] = translated
}
return nil
}
func translateSnippet(ctx context.Context, translator *PiTranslator, tm *TranslationMemory, segmentID, textValue, srcLang, tgtLang string) (string, error) {
if strings.TrimSpace(textValue) == "" {
return textValue, nil
}
namespace := cacheNamespace()
textHash := hashText(textValue)
ck := cacheKey(namespace, srcLang, tgtLang, segmentID, textHash)
if entry, ok := tm.Get(ck); ok {
return entry.Translated, nil
}
translated, err := translator.Translate(ctx, textValue, srcLang, tgtLang)
if err != nil {
return "", err
}
entry := TMEntry{
CacheKey: ck,
SegmentID: segmentID,
SourcePath: segmentID,
TextHash: textHash,
Text: textValue,
Translated: translated,
Provider: providerName,
Model: modelVersion,
SrcLang: srcLang,
TgtLang: tgtLang,
UpdatedAt: time.Now().UTC().Format(time.RFC3339),
}
tm.Put(entry)
return translated, nil
}

View File

@@ -0,0 +1,146 @@
package main
import (
"fmt"
"strings"
)
func prettyLanguageLabel(lang string) string {
trimmed := strings.TrimSpace(lang)
if trimmed == "" {
return lang
}
switch {
case strings.EqualFold(trimmed, "en"):
return "English"
case strings.EqualFold(trimmed, "zh-CN"):
return "Simplified Chinese"
case strings.EqualFold(trimmed, "ja-JP"):
return "Japanese"
default:
return trimmed
}
}
func translationPrompt(srcLang, tgtLang string, glossary []GlossaryEntry) string {
srcLabel := prettyLanguageLabel(srcLang)
tgtLabel := prettyLanguageLabel(tgtLang)
glossaryBlock := buildGlossaryPrompt(glossary)
switch {
case strings.EqualFold(tgtLang, "zh-CN"):
// Keep this prompt as stable as possible; it has lots of tuning baked into the wording.
return strings.TrimSpace(fmt.Sprintf(zhCNPromptTemplate, srcLabel, tgtLabel, glossaryBlock))
case strings.EqualFold(tgtLang, "ja-JP"):
return strings.TrimSpace(fmt.Sprintf(jaJPPromptTemplate, srcLabel, tgtLabel, glossaryBlock))
default:
return strings.TrimSpace(fmt.Sprintf(genericPromptTemplate, srcLabel, tgtLabel, glossaryBlock))
}
}
const zhCNPromptTemplate = `You are a translation function, not a chat assistant.
Translate from %s to %s.
Rules:
- Output ONLY the translated text. No preamble, no questions, no commentary.
- Translate all English prose; do not leave English unless it is code, a URL, or a product name.
- All prose must be Chinese. If any English sentence remains outside code/URLs/product names, it is wrong.
- If the input contains <frontmatter> and <body> tags, keep them exactly and output exactly one of each.
- Translate only the contents inside those tags.
- Preserve YAML structure inside <frontmatter>; translate only values.
- Preserve all [[[FM_*]]] markers exactly and translate only the text between each START/END pair.
- Translate headings/labels like "Exit codes" and "Optional scripts".
- Preserve Markdown syntax exactly (headings, lists, tables, emphasis).
- Preserve HTML tags and attributes exactly.
- Do not translate code spans/blocks, config keys, CLI flags, or env vars.
- Do not alter URLs or anchors.
- Preserve placeholders exactly: __OC_I18N_####__.
- Do not remove, reorder, or summarize content.
- Use fluent, idiomatic technical Chinese; avoid slang or jokes.
- Use neutral documentation tone; prefer “你/你的”, avoid “您/您的”.
- Insert a space between Latin characters and CJK text (W3C CLREQ), e.g., “Gateway 网关”, “Skills 配置”.
- Use Chinese quotation marks “ and ” for Chinese prose; keep ASCII quotes inside code spans/blocks or literal CLI/keys.
- Keep product names in English: OpenClaw, Pi, WhatsApp, Telegram, Discord, iMessage, Slack, Microsoft Teams, Google Chat, Signal.
- For the OpenClaw Gateway, use “Gateway 网关”.
- Keep these terms in English: Skills, local loopback, Tailscale.
- Never output an empty response; if unsure, return the source text unchanged.
%s
If the input is empty, output empty.
If the input contains only placeholders, output it unchanged.`
const jaJPPromptTemplate = `You are a translation function, not a chat assistant.
Translate from %s to %s.
Rules:
- Output ONLY the translated text. No preamble, no questions, no commentary.
- Translate all English prose; do not leave English unless it is code, a URL, or a product name.
- All prose must be Japanese. If any English sentence remains outside code/URLs/product names, it is wrong.
- If the input contains <frontmatter> and <body> tags, keep them exactly and output exactly one of each.
- Translate only the contents inside those tags.
- Preserve YAML structure inside <frontmatter>; translate only values.
- Preserve all [[[FM_*]]] markers exactly and translate only the text between each START/END pair.
- Translate headings/labels like "Exit codes" and "Optional scripts".
- Preserve Markdown syntax exactly (headings, lists, tables, emphasis).
- Preserve HTML tags and attributes exactly.
- Do not translate code spans/blocks, config keys, CLI flags, or env vars.
- Do not alter URLs or anchors.
- Preserve placeholders exactly: __OC_I18N_####__.
- Do not remove, reorder, or summarize content.
- Use fluent, idiomatic technical Japanese; avoid slang or jokes.
- Use neutral documentation tone; avoid overly formal honorifics (e.g., avoid “〜でございます”).
- Use Japanese quotation marks 「 and 」 for Japanese prose; keep ASCII quotes inside code spans/blocks or literal CLI/keys.
- Do not add or remove spacing around Latin text just because it borders Japanese; keep spacing stable unless required by Japanese grammar.
- Keep product names in English: OpenClaw, Pi, WhatsApp, Telegram, Discord, iMessage, Slack, Microsoft Teams, Google Chat, Signal.
- Keep these terms in English: Skills, local loopback, Tailscale.
- Never output an empty response; if unsure, return the source text unchanged.
%s
If the input is empty, output empty.
If the input contains only placeholders, output it unchanged.`
const genericPromptTemplate = `You are a translation function, not a chat assistant.
Translate from %s to %s.
Rules:
- Output ONLY the translated text. No preamble, no questions, no commentary.
- Translate all English prose; do not leave English unless it is code, a URL, or a product name.
- If any English sentence remains outside code/URLs/product names, it is likely wrong.
- If the input contains <frontmatter> and <body> tags, keep them exactly and output exactly one of each.
- Translate only the contents inside those tags.
- Preserve YAML structure inside <frontmatter>; translate only values.
- Preserve all [[[FM_*]]] markers exactly and translate only the text between each START/END pair.
- Translate headings/labels like "Exit codes" and "Optional scripts".
- Preserve Markdown syntax exactly (headings, lists, tables, emphasis).
- Preserve HTML tags and attributes exactly.
- Do not translate code spans/blocks, config keys, CLI flags, or env vars.
- Do not alter URLs or anchors.
- Preserve placeholders exactly: __OC_I18N_####__.
- Do not remove, reorder, or summarize content.
- Use fluent, idiomatic technical language in the target language; avoid slang or jokes.
- Use neutral documentation tone.
- Keep product names in English: OpenClaw, Pi, WhatsApp, Telegram, Discord, iMessage, Slack, Microsoft Teams, Google Chat, Signal.
- Keep these terms in English: Skills, local loopback, Tailscale.
- Never output an empty response; if unsure, return the source text unchanged.
%s
If the input is empty, output empty.
If the input contains only placeholders, output it unchanged.`
func buildGlossaryPrompt(glossary []GlossaryEntry) string {
if len(glossary) == 0 {
return ""
}
var lines []string
lines = append(lines, "Preferred translations (use when natural):")
for _, entry := range glossary {
if entry.Source == "" || entry.Target == "" {
continue
}
lines = append(lines, fmt.Sprintf("- %s -> %s", entry.Source, entry.Target))
}
return strings.Join(lines, "\n")
}

View File

@@ -0,0 +1,11 @@
package main
type Segment struct {
Start int
Stop int
Text string
TextHash string
SegmentID string
Translated string
CacheKey string
}

View File

@@ -0,0 +1,132 @@
package main
import (
"bufio"
"encoding/json"
"errors"
"fmt"
"io"
"os"
"path/filepath"
"sort"
"strings"
)
type TMEntry struct {
CacheKey string `json:"cache_key"`
SegmentID string `json:"segment_id"`
SourcePath string `json:"source_path"`
TextHash string `json:"text_hash"`
Text string `json:"text"`
Translated string `json:"translated"`
Provider string `json:"provider"`
Model string `json:"model"`
SrcLang string `json:"src_lang"`
TgtLang string `json:"tgt_lang"`
UpdatedAt string `json:"updated_at"`
}
type TranslationMemory struct {
path string
entries map[string]TMEntry
}
func LoadTranslationMemory(path string) (*TranslationMemory, error) {
tm := &TranslationMemory{path: path, entries: map[string]TMEntry{}}
file, err := os.Open(path)
if err != nil {
if errors.Is(err, os.ErrNotExist) {
return tm, nil
}
return nil, err
}
defer file.Close()
reader := bufio.NewReader(file)
for {
line, err := reader.ReadBytes('\n')
if len(line) > 0 {
trimmed := strings.TrimSpace(string(line))
if trimmed != "" {
var entry TMEntry
if err := json.Unmarshal([]byte(trimmed), &entry); err != nil {
return nil, fmt.Errorf("translation memory decode failed: %w", err)
}
if entry.CacheKey != "" && strings.TrimSpace(entry.Translated) != "" {
tm.entries[entry.CacheKey] = entry
}
}
}
if err != nil {
if errors.Is(err, io.EOF) {
break
}
return nil, err
}
}
return tm, nil
}
func (tm *TranslationMemory) Get(cacheKey string) (TMEntry, bool) {
entry, ok := tm.entries[cacheKey]
if !ok {
return TMEntry{}, false
}
if strings.TrimSpace(entry.Translated) == "" {
return TMEntry{}, false
}
return entry, true
}
func (tm *TranslationMemory) Put(entry TMEntry) {
if entry.CacheKey == "" {
return
}
tm.entries[entry.CacheKey] = entry
}
func (tm *TranslationMemory) Save() error {
if tm.path == "" {
return nil
}
if err := os.MkdirAll(filepath.Dir(tm.path), 0o755); err != nil {
return err
}
tmpPath := tm.path + ".tmp"
file, err := os.Create(tmpPath)
if err != nil {
return err
}
keys := make([]string, 0, len(tm.entries))
for key := range tm.entries {
keys = append(keys, key)
}
sort.Strings(keys)
writer := bufio.NewWriter(file)
for _, key := range keys {
entry := tm.entries[key]
payload, err := json.Marshal(entry)
if err != nil {
_ = file.Close()
return err
}
if _, err := writer.Write(payload); err != nil {
_ = file.Close()
return err
}
if _, err := writer.WriteString("\n"); err != nil {
_ = file.Close()
return err
}
}
if err := writer.Flush(); err != nil {
_ = file.Close()
return err
}
if err := file.Close(); err != nil {
return err
}
return os.Rename(tmpPath, tm.path)
}

View File

@@ -0,0 +1,247 @@
package main
import (
"context"
"encoding/json"
"errors"
"fmt"
"strings"
"time"
pi "github.com/joshp123/pi-golang"
)
const (
translateMaxAttempts = 3
translateBaseDelay = 15 * time.Second
)
var errEmptyTranslation = errors.New("empty translation")
type PiTranslator struct {
client *pi.OneShotClient
}
func NewPiTranslator(srcLang, tgtLang string, glossary []GlossaryEntry, thinking string) (*PiTranslator, error) {
options := pi.DefaultOneShotOptions()
options.AppName = "openclaw-docs-i18n"
options.WorkDir = "/tmp"
options.Mode = pi.ModeDragons
options.Dragons = pi.DragonsOptions{
Provider: "anthropic",
Model: modelVersion,
Thinking: normalizeThinking(thinking),
}
options.SystemPrompt = translationPrompt(srcLang, tgtLang, glossary)
client, err := pi.StartOneShot(options)
if err != nil {
return nil, err
}
return &PiTranslator{client: client}, nil
}
func (t *PiTranslator) Translate(ctx context.Context, text, srcLang, tgtLang string) (string, error) {
return t.translate(ctx, text, t.translateMasked)
}
func (t *PiTranslator) TranslateRaw(ctx context.Context, text, srcLang, tgtLang string) (string, error) {
return t.translate(ctx, text, t.translateRaw)
}
func (t *PiTranslator) translate(ctx context.Context, text string, run func(context.Context, string) (string, error)) (string, error) {
if t.client == nil {
return "", errors.New("pi client unavailable")
}
prefix, core, suffix := splitWhitespace(text)
if core == "" {
return text, nil
}
translated, err := t.translateWithRetry(ctx, func(ctx context.Context) (string, error) {
return run(ctx, core)
})
if err != nil {
return "", err
}
return prefix + translated + suffix, nil
}
func (t *PiTranslator) translateWithRetry(ctx context.Context, run func(context.Context) (string, error)) (string, error) {
var lastErr error
for attempt := 0; attempt < translateMaxAttempts; attempt++ {
translated, err := run(ctx)
if err == nil {
return translated, nil
}
if !isRetryableTranslateError(err) {
return "", err
}
lastErr = err
if attempt+1 < translateMaxAttempts {
delay := translateBaseDelay * time.Duration(attempt+1)
if err := sleepWithContext(ctx, delay); err != nil {
return "", err
}
}
}
return "", lastErr
}
func (t *PiTranslator) translateMasked(ctx context.Context, core string) (string, error) {
state := NewPlaceholderState(core)
placeholders := make([]string, 0, 8)
mapping := map[string]string{}
masked := maskMarkdown(core, state.Next, &placeholders, mapping)
resText, err := runPrompt(ctx, t.client, masked)
if err != nil {
return "", err
}
translated := strings.TrimSpace(resText)
if translated == "" {
return "", errEmptyTranslation
}
if err := validatePlaceholders(translated, placeholders); err != nil {
return "", err
}
return unmaskMarkdown(translated, placeholders, mapping), nil
}
func (t *PiTranslator) translateRaw(ctx context.Context, core string) (string, error) {
resText, err := runPrompt(ctx, t.client, core)
if err != nil {
return "", err
}
translated := strings.TrimSpace(resText)
if translated == "" {
return "", errEmptyTranslation
}
return translated, nil
}
func isRetryableTranslateError(err error) bool {
if err == nil {
return false
}
if errors.Is(err, errEmptyTranslation) {
return true
}
message := strings.ToLower(err.Error())
return strings.Contains(message, "placeholder missing") || strings.Contains(message, "rate limit") || strings.Contains(message, "429")
}
func sleepWithContext(ctx context.Context, delay time.Duration) error {
timer := time.NewTimer(delay)
defer timer.Stop()
select {
case <-ctx.Done():
return ctx.Err()
case <-timer.C:
return nil
}
}
func (t *PiTranslator) Close() {
if t.client != nil {
_ = t.client.Close()
}
}
type agentEndPayload struct {
Messages []agentMessage `json:"messages"`
}
type agentMessage struct {
Role string `json:"role"`
Content json.RawMessage `json:"content"`
StopReason string `json:"stopReason,omitempty"`
ErrorMessage string `json:"errorMessage,omitempty"`
}
type contentBlock struct {
Type string `json:"type"`
Text string `json:"text,omitempty"`
}
func runPrompt(ctx context.Context, client *pi.OneShotClient, message string) (string, error) {
events, cancel := client.Subscribe(256)
defer cancel()
if err := client.Prompt(ctx, message); err != nil {
return "", err
}
for {
select {
case <-ctx.Done():
return "", ctx.Err()
case event, ok := <-events:
if !ok {
return "", errors.New("event stream closed")
}
if event.Type == "agent_end" {
return extractTranslationResult(event.Raw)
}
}
}
}
func extractTranslationResult(raw json.RawMessage) (string, error) {
var payload agentEndPayload
if err := json.Unmarshal(raw, &payload); err != nil {
return "", err
}
for index := len(payload.Messages) - 1; index >= 0; index-- {
message := payload.Messages[index]
if message.Role != "assistant" {
continue
}
if message.ErrorMessage != "" || strings.EqualFold(message.StopReason, "error") {
msg := strings.TrimSpace(message.ErrorMessage)
if msg == "" {
msg = "unknown error"
}
return "", fmt.Errorf("pi error: %s", msg)
}
text, err := extractContentText(message.Content)
if err != nil {
return "", err
}
return text, nil
}
return "", errors.New("assistant message not found")
}
func extractContentText(content json.RawMessage) (string, error) {
trimmed := strings.TrimSpace(string(content))
if trimmed == "" {
return "", nil
}
if strings.HasPrefix(trimmed, "\"") {
var text string
if err := json.Unmarshal(content, &text); err != nil {
return "", err
}
return text, nil
}
var blocks []contentBlock
if err := json.Unmarshal(content, &blocks); err != nil {
return "", err
}
var parts []string
for _, block := range blocks {
if block.Type == "text" && block.Text != "" {
parts = append(parts, block.Text)
}
}
return strings.Join(parts, ""), nil
}
func normalizeThinking(value string) string {
switch strings.ToLower(strings.TrimSpace(value)) {
case "low", "high":
return strings.ToLower(strings.TrimSpace(value))
default:
return "high"
}
}

View File

@@ -0,0 +1,81 @@
package main
import (
"crypto/sha256"
"encoding/hex"
"fmt"
"io"
"os"
"strings"
)
const (
workflowVersion = 15
providerName = "pi"
modelVersion = "claude-opus-4-6"
)
func cacheNamespace() string {
return fmt.Sprintf("wf=%d|provider=%s|model=%s", workflowVersion, providerName, modelVersion)
}
func cacheKey(namespace, srcLang, tgtLang, segmentID, textHash string) string {
raw := fmt.Sprintf("%s|%s|%s|%s|%s", namespace, srcLang, tgtLang, segmentID, textHash)
hash := sha256.Sum256([]byte(raw))
return hex.EncodeToString(hash[:])
}
func hashText(text string) string {
normalized := normalizeText(text)
hash := sha256.Sum256([]byte(normalized))
return hex.EncodeToString(hash[:])
}
func hashBytes(data []byte) string {
hash := sha256.Sum256(data)
return hex.EncodeToString(hash[:])
}
func normalizeText(text string) string {
return strings.Join(strings.Fields(strings.TrimSpace(text)), " ")
}
func segmentID(relPath, textHash string) string {
shortHash := textHash
if len(shortHash) > 16 {
shortHash = shortHash[:16]
}
return fmt.Sprintf("%s:%s", relPath, shortHash)
}
func splitWhitespace(text string) (string, string, string) {
if text == "" {
return "", "", ""
}
start := 0
for start < len(text) && isWhitespace(text[start]) {
start++
}
end := len(text)
for end > start && isWhitespace(text[end-1]) {
end--
}
return text[:start], text[start:end], text[end:]
}
func isWhitespace(b byte) bool {
switch b {
case ' ', '\t', '\n', '\r':
return true
default:
return false
}
}
func fatal(err error) {
if err == nil {
return
}
_, _ = io.WriteString(os.Stderr, err.Error()+"\n")
os.Exit(1)
}

View File

@@ -0,0 +1,233 @@
#!/usr/bin/env node
import fs from "node:fs";
import path from "node:path";
const ROOT = process.cwd();
const DOCS_DIR = path.join(ROOT, "docs");
const DOCS_JSON_PATH = path.join(DOCS_DIR, "docs.json");
if (!fs.existsSync(DOCS_DIR) || !fs.statSync(DOCS_DIR).isDirectory()) {
console.error("docs:check-links: missing docs directory; run from repo root.");
process.exit(1);
}
if (!fs.existsSync(DOCS_JSON_PATH)) {
console.error("docs:check-links: missing docs/docs.json.");
process.exit(1);
}
/** @param {string} dir */
function walk(dir) {
/** @type {string[]} */
const out = [];
const entries = fs.readdirSync(dir, { withFileTypes: true });
for (const entry of entries) {
if (entry.name.startsWith(".")) {
continue;
}
const full = path.join(dir, entry.name);
if (entry.isDirectory()) {
out.push(...walk(full));
} else if (entry.isFile()) {
out.push(full);
}
}
return out;
}
/** @param {string} p */
function normalizeSlashes(p) {
return p.replace(/\\/g, "/");
}
/** @param {string} p */
function normalizeRoute(p) {
const stripped = p.replace(/^\/+|\/+$/g, "");
return stripped ? `/${stripped}` : "/";
}
/** @param {string} text */
function stripInlineCode(text) {
return text.replace(/`[^`]+`/g, "");
}
const docsConfig = JSON.parse(fs.readFileSync(DOCS_JSON_PATH, "utf8"));
const redirects = new Map();
for (const item of docsConfig.redirects || []) {
const source = normalizeRoute(String(item.source || ""));
const destination = normalizeRoute(String(item.destination || ""));
redirects.set(source, destination);
}
const allFiles = walk(DOCS_DIR);
const relAllFiles = new Set(allFiles.map((abs) => normalizeSlashes(path.relative(DOCS_DIR, abs))));
const markdownFiles = allFiles.filter((abs) => /\.(md|mdx)$/i.test(abs));
const routes = new Set();
for (const abs of markdownFiles) {
const rel = normalizeSlashes(path.relative(DOCS_DIR, abs));
const text = fs.readFileSync(abs, "utf8");
const slug = rel.replace(/\.(md|mdx)$/i, "");
const route = normalizeRoute(slug);
routes.add(route);
if (slug.endsWith("/index")) {
routes.add(normalizeRoute(slug.slice(0, -"/index".length)));
}
if (!text.startsWith("---")) {
continue;
}
const end = text.indexOf("\n---", 3);
if (end === -1) {
continue;
}
const frontMatter = text.slice(3, end);
const match = frontMatter.match(/^permalink:\s*(.+)\s*$/m);
if (!match) {
continue;
}
const permalink = String(match[1])
.trim()
.replace(/^['"]|['"]$/g, "");
routes.add(normalizeRoute(permalink));
}
/** @param {string} route */
function resolveRoute(route) {
let current = normalizeRoute(route);
if (current === "/") {
return { ok: true, terminal: "/" };
}
const seen = new Set([current]);
while (redirects.has(current)) {
current = redirects.get(current);
if (seen.has(current)) {
return { ok: false, terminal: current, loop: true };
}
seen.add(current);
}
return { ok: routes.has(current), terminal: current };
}
const markdownLinkRegex = /!?\[[^\]]*\]\(([^)]+)\)/g;
/** @type {{file: string; line: number; link: string; reason: string}[]} */
const broken = [];
let checked = 0;
for (const abs of markdownFiles) {
const rel = normalizeSlashes(path.relative(DOCS_DIR, abs));
const baseDir = normalizeSlashes(path.dirname(rel));
const rawText = fs.readFileSync(abs, "utf8");
const lines = rawText.split("\n");
// Track if we're inside a code fence
let inCodeFence = false;
for (let lineNum = 0; lineNum < lines.length; lineNum++) {
let line = lines[lineNum];
// Toggle code fence state
if (line.trim().startsWith("```")) {
inCodeFence = !inCodeFence;
continue;
}
if (inCodeFence) {
continue;
}
// Strip inline code to avoid false positives
line = stripInlineCode(line);
for (const match of line.matchAll(markdownLinkRegex)) {
const raw = match[1]?.trim();
if (!raw) {
continue;
}
// Skip external links, mailto, tel, data, and same-page anchors
if (/^(https?:|mailto:|tel:|data:|#)/i.test(raw)) {
continue;
}
const [pathPart] = raw.split("#");
const clean = pathPart.split("?")[0];
if (!clean) {
// Same-page anchor only (already skipped above)
continue;
}
checked++;
if (clean.startsWith("/")) {
const route = normalizeRoute(clean);
const resolvedRoute = resolveRoute(route);
if (!resolvedRoute.ok) {
const staticRel = route.replace(/^\//, "");
if (!relAllFiles.has(staticRel)) {
broken.push({
file: rel,
line: lineNum + 1,
link: raw,
reason: `route/file not found (terminal: ${resolvedRoute.terminal})`,
});
continue;
}
}
// Skip anchor validation - Mintlify generates anchors from MDX components,
// accordions, and config schemas that we can't reliably extract from markdown.
continue;
}
// Relative placeholder strings used in code examples (for example "url")
// are intentionally skipped.
if (!clean.startsWith(".") && !clean.includes("/")) {
continue;
}
const normalizedRel = normalizeSlashes(path.normalize(path.join(baseDir, clean)));
if (/\.[a-zA-Z0-9]+$/.test(normalizedRel)) {
if (!relAllFiles.has(normalizedRel)) {
broken.push({
file: rel,
line: lineNum + 1,
link: raw,
reason: "relative file not found",
});
}
continue;
}
const candidates = [
normalizedRel,
`${normalizedRel}.md`,
`${normalizedRel}.mdx`,
`${normalizedRel}/index.md`,
`${normalizedRel}/index.mdx`,
];
if (!candidates.some((candidate) => relAllFiles.has(candidate))) {
broken.push({
file: rel,
line: lineNum + 1,
link: raw,
reason: "relative doc target not found",
});
}
}
}
}
console.log(`checked_internal_links=${checked}`);
console.log(`broken_links=${broken.length}`);
for (const item of broken) {
console.log(`${item.file}:${item.line} :: ${item.link} :: ${item.reason}`);
}
if (broken.length > 0) {
process.exit(1);
}

View File

@@ -0,0 +1,173 @@
#!/usr/bin/env node
import { existsSync, readdirSync, readFileSync, statSync } from "node:fs";
import { join, relative } from "node:path";
process.stdout.on("error", (error) => {
if (error?.code === "EPIPE") {
process.exit(0);
}
throw error;
});
const DOCS_DIR = join(process.cwd(), "docs");
if (!existsSync(DOCS_DIR)) {
console.error("docs:list: missing docs directory. Run from repo root.");
process.exit(1);
}
if (!statSync(DOCS_DIR).isDirectory()) {
console.error("docs:list: docs path is not a directory.");
process.exit(1);
}
const EXCLUDED_DIRS = new Set(["archive", "research"]);
/**
* @param {unknown[]} values
* @returns {string[]}
*/
function compactStrings(values) {
const result = [];
for (const value of values) {
if (value === null || value === undefined) {
continue;
}
const normalized =
typeof value === "string"
? value.trim()
: typeof value === "number" || typeof value === "boolean"
? String(value).trim()
: null;
if (normalized?.length > 0) {
result.push(normalized);
}
}
return result;
}
/**
* @param {string} dir
* @param {string} base
* @returns {string[]}
*/
function walkMarkdownFiles(dir, base = dir) {
const entries = readdirSync(dir, { withFileTypes: true });
const files = [];
for (const entry of entries) {
if (entry.name.startsWith(".")) {
continue;
}
const fullPath = join(dir, entry.name);
if (entry.isDirectory()) {
if (EXCLUDED_DIRS.has(entry.name)) {
continue;
}
files.push(...walkMarkdownFiles(fullPath, base));
} else if (entry.isFile() && entry.name.endsWith(".md")) {
files.push(relative(base, fullPath));
}
}
return files.toSorted((a, b) => a.localeCompare(b));
}
/**
* @param {string} fullPath
* @returns {{ summary: string | null; readWhen: string[]; error?: string }}
*/
function extractMetadata(fullPath) {
const content = readFileSync(fullPath, "utf8");
if (!content.startsWith("---")) {
return { summary: null, readWhen: [], error: "missing front matter" };
}
const endIndex = content.indexOf("\n---", 3);
if (endIndex === -1) {
return { summary: null, readWhen: [], error: "unterminated front matter" };
}
const frontMatter = content.slice(3, endIndex).trim();
const lines = frontMatter.split("\n");
let summaryLine = null;
const readWhen = [];
let collectingField = null;
for (const rawLine of lines) {
const line = rawLine.trim();
if (line.startsWith("summary:")) {
summaryLine = line;
collectingField = null;
continue;
}
if (line.startsWith("read_when:")) {
collectingField = "read_when";
const inline = line.slice("read_when:".length).trim();
if (inline.startsWith("[") && inline.endsWith("]")) {
try {
const parsed = JSON.parse(inline.replace(/'/g, '"'));
if (Array.isArray(parsed)) {
readWhen.push(...compactStrings(parsed));
}
} catch {
// ignore malformed inline arrays
}
}
continue;
}
if (collectingField === "read_when") {
if (line.startsWith("- ")) {
const hint = line.slice(2).trim();
if (hint) {
readWhen.push(hint);
}
} else if (line === "") {
// allow blank lines inside the list
} else {
collectingField = null;
}
}
}
if (!summaryLine) {
return { summary: null, readWhen, error: "summary key missing" };
}
const summaryValue = summaryLine.slice("summary:".length).trim();
const normalized = summaryValue
.replace(/^['"]|['"]$/g, "")
.replace(/\s+/g, " ")
.trim();
if (!normalized) {
return { summary: null, readWhen, error: "summary is empty" };
}
return { summary: normalized, readWhen };
}
console.log("Listing all markdown files in docs folder:");
const markdownFiles = walkMarkdownFiles(DOCS_DIR);
for (const relativePath of markdownFiles) {
const fullPath = join(DOCS_DIR, relativePath);
const { summary, readWhen, error } = extractMetadata(fullPath);
if (summary) {
console.log(`${relativePath} - ${summary}`);
if (readWhen.length > 0) {
console.log(` Read when: ${readWhen.join("; ")}`);
}
} else {
const reason = error ? ` - [${error}]` : "";
console.log(`${relativePath}${reason}`);
}
}
console.log(
'\nReminder: keep docs up to date as behavior changes. When your task matches any "Read when" hint above (React hooks, cache directives, database work, tests, etc.), read that doc before coding, and suggest new coverage when it is missing.',
);

View File

@@ -0,0 +1,44 @@
#!/usr/bin/env bash
set -euo pipefail
mode="${1:-}"
write_flag=()
if [[ "$mode" == "--write" ]]; then
write_flag=(-w)
fi
args=(
README.md
docs
--skip=*.png,*.jpg,*.jpeg,*.gif,*.svg
-D
-
-D
scripts/codespell-dictionary.txt
-I
scripts/codespell-ignore.txt
"${write_flag[@]}"
)
if command -v codespell >/dev/null 2>&1; then
codespell "${args[@]}"
exit 0
fi
if command -v python3 >/dev/null 2>&1; then
python3 -m pip install --user --disable-pip-version-check --break-system-packages codespell >/dev/null 2>&1 || \
python3 -m pip install --user --disable-pip-version-check codespell >/dev/null 2>&1
user_bin="$(python3 - <<'PY'
import site
print(f"{site.USER_BASE}/bin")
PY
)"
if [[ -x "${user_bin}/codespell" ]]; then
"${user_bin}/codespell" "${args[@]}"
exit 0
fi
fi
echo "codespell unavailable: install codespell or python3" >&2
exit 1

View File

@@ -0,0 +1,29 @@
FROM node:22-bookworm@sha256:cd7bcd2e7a1e6f72052feb023c7f6b722205d3fcab7bbcbd2d1bfdab10b1e935
RUN corepack enable
WORKDIR /app
ENV NODE_OPTIONS="--disable-warning=ExperimentalWarning"
COPY package.json pnpm-lock.yaml pnpm-workspace.yaml tsconfig.json tsconfig.plugin-sdk.dts.json tsdown.config.ts vitest.config.ts vitest.e2e.config.ts openclaw.mjs ./
COPY src ./src
COPY test ./test
COPY scripts ./scripts
COPY docs ./docs
COPY skills ./skills
COPY patches ./patches
COPY ui ./ui
COPY extensions/memory-core ./extensions/memory-core
COPY vendor/a2ui/renderers/lit ./vendor/a2ui/renderers/lit
COPY apps/shared/OpenClawKit/Tools/CanvasA2UI ./apps/shared/OpenClawKit/Tools/CanvasA2UI
RUN pnpm install --frozen-lockfile
RUN pnpm build
RUN pnpm ui:build
RUN useradd --create-home --shell /bin/bash appuser \
&& chown -R appuser:appuser /app
USER appuser
CMD ["bash"]

View File

@@ -0,0 +1,13 @@
FROM node:22-bookworm@sha256:cd7bcd2e7a1e6f72052feb023c7f6b722205d3fcab7bbcbd2d1bfdab10b1e935
RUN corepack enable
WORKDIR /app
COPY . .
RUN pnpm install --frozen-lockfile
RUN useradd --create-home --shell /bin/bash appuser \
&& chown -R appuser:appuser /app
USER appuser

View File

@@ -0,0 +1,158 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
IMAGE_NAME="openclaw-doctor-install-switch-e2e"
echo "Building Docker image..."
docker build -t "$IMAGE_NAME" -f "$ROOT_DIR/scripts/e2e/Dockerfile" "$ROOT_DIR"
echo "Running doctor install switch E2E..."
docker run --rm -e COREPACK_ENABLE_DOWNLOAD_PROMPT=0 "$IMAGE_NAME" bash -lc '
set -euo pipefail
# Keep logs focused; the npm global install step can emit noisy deprecation warnings.
export npm_config_loglevel=error
export npm_config_fund=false
export npm_config_audit=false
# Stub systemd/loginctl so doctor + daemon flows work in Docker.
export PATH="/tmp/openclaw-bin:$PATH"
mkdir -p /tmp/openclaw-bin
cat > /tmp/openclaw-bin/systemctl <<"SYSTEMCTL"
#!/usr/bin/env bash
set -euo pipefail
args=("$@")
if [[ "${args[0]:-}" == "--user" ]]; then
args=("${args[@]:1}")
fi
cmd="${args[0]:-}"
case "$cmd" in
status)
exit 0
;;
is-enabled)
unit="${args[1]:-}"
unit_path="$HOME/.config/systemd/user/${unit}"
if [ -f "$unit_path" ]; then
exit 0
fi
exit 1
;;
show)
echo "ActiveState=inactive"
echo "SubState=dead"
echo "MainPID=0"
echo "ExecMainStatus=0"
echo "ExecMainCode=0"
exit 0
;;
*)
exit 0
;;
esac
SYSTEMCTL
chmod +x /tmp/openclaw-bin/systemctl
cat > /tmp/openclaw-bin/loginctl <<"LOGINCTL"
#!/usr/bin/env bash
set -euo pipefail
if [[ "$*" == *"show-user"* ]]; then
echo "Linger=yes"
exit 0
fi
if [[ "$*" == *"enable-linger"* ]]; then
exit 0
fi
exit 0
LOGINCTL
chmod +x /tmp/openclaw-bin/loginctl
# Install the npm-global variant from the local /app source.
# `npm pack` can emit script output; keep only the tarball name.
pkg_tgz="$(npm pack --silent /app | tail -n 1 | tr -d '\r')"
if [ ! -f "/app/$pkg_tgz" ]; then
echo "npm pack failed (expected /app/$pkg_tgz)"
exit 1
fi
npm install -g --prefix /tmp/npm-prefix "/app/$pkg_tgz"
npm_bin="/tmp/npm-prefix/bin/openclaw"
npm_root="/tmp/npm-prefix/lib/node_modules/openclaw"
if [ -f "$npm_root/dist/index.mjs" ]; then
npm_entry="$npm_root/dist/index.mjs"
else
npm_entry="$npm_root/dist/index.js"
fi
if [ -f "/app/dist/index.mjs" ]; then
git_entry="/app/dist/index.mjs"
else
git_entry="/app/dist/index.js"
fi
git_cli="/app/openclaw.mjs"
assert_entrypoint() {
local unit_path="$1"
local expected="$2"
local exec_line=""
exec_line=$(grep -m1 "^ExecStart=" "$unit_path" || true)
if [ -z "$exec_line" ]; then
echo "Missing ExecStart in $unit_path"
exit 1
fi
exec_line="${exec_line#ExecStart=}"
entrypoint=$(echo "$exec_line" | awk "{print \$2}")
entrypoint="${entrypoint%\"}"
entrypoint="${entrypoint#\"}"
if [ "$entrypoint" != "$expected" ]; then
echo "Expected entrypoint $expected, got $entrypoint"
exit 1
fi
}
# Each flow: install service with one variant, run doctor from the other,
# and verify ExecStart entrypoint switches accordingly.
run_flow() {
local name="$1"
local install_cmd="$2"
local install_expected="$3"
local doctor_cmd="$4"
local doctor_expected="$5"
echo "== Flow: $name =="
home_dir=$(mktemp -d "/tmp/openclaw-switch-${name}.XXXXXX")
export HOME="$home_dir"
export USER="testuser"
eval "$install_cmd"
unit_path="$HOME/.config/systemd/user/openclaw-gateway.service"
if [ ! -f "$unit_path" ]; then
echo "Missing unit file: $unit_path"
exit 1
fi
assert_entrypoint "$unit_path" "$install_expected"
eval "$doctor_cmd"
assert_entrypoint "$unit_path" "$doctor_expected"
}
run_flow \
"npm-to-git" \
"$npm_bin daemon install --force" \
"$npm_entry" \
"node $git_cli doctor --repair --force --yes" \
"$git_entry"
run_flow \
"git-to-npm" \
"node $git_cli daemon install --force" \
"$git_entry" \
"$npm_bin doctor --repair --force --yes" \
"$npm_entry"
'

View File

@@ -0,0 +1,145 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
IMAGE_NAME="openclaw-gateway-network-e2e"
PORT="18789"
TOKEN="e2e-$(date +%s)-$$"
NET_NAME="openclaw-net-e2e-$$"
GW_NAME="openclaw-gateway-e2e-$$"
cleanup() {
docker rm -f "$GW_NAME" >/dev/null 2>&1 || true
docker network rm "$NET_NAME" >/dev/null 2>&1 || true
}
trap cleanup EXIT
echo "Building Docker image..."
docker build -t "$IMAGE_NAME" -f "$ROOT_DIR/scripts/e2e/Dockerfile" "$ROOT_DIR"
echo "Creating Docker network..."
docker network create "$NET_NAME" >/dev/null
echo "Starting gateway container..."
docker run -d \
--name "$GW_NAME" \
--network "$NET_NAME" \
-e "OPENCLAW_GATEWAY_TOKEN=$TOKEN" \
-e "OPENCLAW_SKIP_CHANNELS=1" \
-e "OPENCLAW_SKIP_GMAIL_WATCHER=1" \
-e "OPENCLAW_SKIP_CRON=1" \
-e "OPENCLAW_SKIP_CANVAS_HOST=1" \
"$IMAGE_NAME" \
bash -lc "set -euo pipefail; entry=dist/index.mjs; [ -f \"\$entry\" ] || entry=dist/index.js; node \"\$entry\" config set gateway.controlUi.enabled false >/dev/null; node \"\$entry\" gateway --port $PORT --bind lan --allow-unconfigured > /tmp/gateway-net-e2e.log 2>&1"
echo "Waiting for gateway to come up..."
ready=0
for _ in $(seq 1 40); do
if [ "$(docker inspect -f '{{.State.Running}}' "$GW_NAME" 2>/dev/null || echo false)" != "true" ]; then
break
fi
if docker exec "$GW_NAME" bash -lc "node --input-type=module -e '
import net from \"node:net\";
const socket = net.createConnection({ host: \"127.0.0.1\", port: $PORT });
const timeout = setTimeout(() => {
socket.destroy();
process.exit(1);
}, 400);
socket.on(\"connect\", () => {
clearTimeout(timeout);
socket.end();
process.exit(0);
});
socket.on(\"error\", () => {
clearTimeout(timeout);
process.exit(1);
});
' >/dev/null 2>&1"; then
ready=1
break
fi
if docker exec "$GW_NAME" bash -lc "grep -q \"listening on ws://\" /tmp/gateway-net-e2e.log"; then
ready=1
break
fi
sleep 0.5
done
if [ "$ready" -ne 1 ]; then
echo "Gateway failed to start"
if [ "$(docker inspect -f '{{.State.Running}}' "$GW_NAME" 2>/dev/null || echo false)" = "true" ]; then
docker exec "$GW_NAME" bash -lc "tail -n 80 /tmp/gateway-net-e2e.log" || true
else
docker logs "$GW_NAME" 2>&1 | tail -n 120 || true
fi
exit 1
fi
docker exec "$GW_NAME" bash -lc "tail -n 50 /tmp/gateway-net-e2e.log"
echo "Running client container (connect + health)..."
docker run --rm \
--network "$NET_NAME" \
-e "GW_URL=ws://$GW_NAME:$PORT" \
-e "GW_TOKEN=$TOKEN" \
"$IMAGE_NAME" \
bash -lc "node --import tsx - <<'NODE'
import { WebSocket } from \"ws\";
import { PROTOCOL_VERSION } from \"./src/gateway/protocol/index.ts\";
const url = process.env.GW_URL;
const token = process.env.GW_TOKEN;
if (!url || !token) throw new Error(\"missing GW_URL/GW_TOKEN\");
const ws = new WebSocket(url);
await new Promise((resolve, reject) => {
const t = setTimeout(() => reject(new Error(\"ws open timeout\")), 5000);
ws.once(\"open\", () => {
clearTimeout(t);
resolve();
});
});
function onceFrame(filter, timeoutMs = 5000) {
return new Promise((resolve, reject) => {
const t = setTimeout(() => reject(new Error(\"timeout\")), timeoutMs);
const handler = (data) => {
const obj = JSON.parse(String(data));
if (!filter(obj)) return;
clearTimeout(t);
ws.off(\"message\", handler);
resolve(obj);
};
ws.on(\"message\", handler);
});
}
ws.send(
JSON.stringify({
type: \"req\",
id: \"c1\",
method: \"connect\",
params: {
minProtocol: PROTOCOL_VERSION,
maxProtocol: PROTOCOL_VERSION,
client: {
id: \"test\",
displayName: \"docker-net-e2e\",
version: \"dev\",
platform: process.platform,
mode: \"test\",
},
caps: [],
auth: { token },
},
}),
);
const connectRes = await onceFrame((o) => o?.type === \"res\" && o?.id === \"c1\");
if (!connectRes.ok) throw new Error(\"connect failed: \" + (connectRes.error?.message ?? \"unknown\"));
ws.close();
console.log(\"ok\");
NODE"
echo "OK"

View File

@@ -0,0 +1,566 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
IMAGE_NAME="openclaw-onboard-e2e"
echo "Building Docker image..."
docker build -t "$IMAGE_NAME" -f "$ROOT_DIR/scripts/e2e/Dockerfile" "$ROOT_DIR"
echo "Running onboarding E2E..."
docker run --rm -t "$IMAGE_NAME" bash -lc '
set -euo pipefail
trap "" PIPE
export TERM=xterm-256color
ONBOARD_FLAGS="--flow quickstart --auth-choice skip --skip-channels --skip-skills --skip-daemon --skip-ui"
# tsdown may emit dist/index.js or dist/index.mjs depending on runtime/bundler.
if [ -f dist/index.mjs ]; then
OPENCLAW_ENTRY="dist/index.mjs"
elif [ -f dist/index.js ]; then
OPENCLAW_ENTRY="dist/index.js"
else
echo "Missing dist/index.(m)js (build output):"
ls -la dist || true
exit 1
fi
export OPENCLAW_ENTRY
# Provide a minimal trash shim to avoid noisy "missing trash" logs in containers.
export PATH="/tmp/openclaw-bin:$PATH"
mkdir -p /tmp/openclaw-bin
cat > /tmp/openclaw-bin/trash <<'"'"'TRASH'"'"'
#!/usr/bin/env bash
set -euo pipefail
trash_dir="$HOME/.Trash"
mkdir -p "$trash_dir"
for target in "$@"; do
[ -e "$target" ] || continue
base="$(basename "$target")"
dest="$trash_dir/$base"
if [ -e "$dest" ]; then
dest="$trash_dir/${base}-$(date +%s)-$$"
fi
mv "$target" "$dest"
done
TRASH
chmod +x /tmp/openclaw-bin/trash
send() {
local payload="$1"
local delay="${2:-0.4}"
# Let prompts render before sending keystrokes.
sleep "$delay"
printf "%b" "$payload" >&3 2>/dev/null || true
}
wait_for_log() {
local needle="$1"
local timeout_s="${2:-45}"
local quiet_on_timeout="${3:-false}"
local needle_compact
needle_compact="$(printf "%s" "$needle" | tr -cd "[:alpha:]")"
local start_s
start_s="$(date +%s)"
while true; do
if [ -n "${WIZARD_LOG_PATH:-}" ] && [ -f "$WIZARD_LOG_PATH" ]; then
if grep -a -F -q "$needle" "$WIZARD_LOG_PATH"; then
return 0
fi
if NEEDLE=\"$needle_compact\" node --input-type=module -e "
import fs from \"node:fs\";
const file = process.env.WIZARD_LOG_PATH;
const needle = process.env.NEEDLE ?? \"\";
let text = \"\";
try { text = fs.readFileSync(file, \"utf8\"); } catch { process.exit(1); }
// Clack/script output can include lots of control sequences; keep a larger tail and strip ANSI more robustly.
if (text.length > 120000) text = text.slice(-120000);
const stripAnsi = (value) =>
value
// OSC: ESC ] ... BEL or ESC \\
.replace(/\\x1b\\][^\\x07]*(?:\\x07|\\x1b\\\\)/g, \"\")
// CSI: ESC [ ... cmd
.replace(/\\x1b\\[[0-?]*[ -/]*[@-~]/g, \"\");
// Letters-only: script output sometimes fragments ANSI sequences into digits/letters that
// can otherwise break substring matching.
const compact = (value) => stripAnsi(value).toLowerCase().replace(/[^a-z]+/g, \"\");
const haystack = compact(text);
const compactNeedle = compact(needle);
if (!compactNeedle) process.exit(1);
process.exit(haystack.includes(compactNeedle) ? 0 : 1);
"; then
return 0
fi
fi
if [ $(( $(date +%s) - start_s )) -ge "$timeout_s" ]; then
if [ "$quiet_on_timeout" = "true" ]; then
return 1
fi
echo "Timeout waiting for log: $needle"
if [ -n "${WIZARD_LOG_PATH:-}" ] && [ -f "$WIZARD_LOG_PATH" ]; then
tail -n 140 "$WIZARD_LOG_PATH" || true
fi
return 1
fi
sleep 0.2
done
}
start_gateway() {
node "$OPENCLAW_ENTRY" gateway --port 18789 --bind loopback --allow-unconfigured > /tmp/gateway-e2e.log 2>&1 &
GATEWAY_PID="$!"
}
wait_for_gateway() {
for _ in $(seq 1 20); do
if node --input-type=module -e "
import net from 'node:net';
const socket = net.createConnection({ host: '127.0.0.1', port: 18789 });
const timeout = setTimeout(() => {
socket.destroy();
process.exit(1);
}, 500);
socket.on('connect', () => {
clearTimeout(timeout);
socket.end();
process.exit(0);
});
socket.on('error', () => {
clearTimeout(timeout);
process.exit(1);
});
" >/dev/null 2>&1; then
return 0
fi
if [ -f /tmp/gateway-e2e.log ] && grep -E -q "listening on ws://[^ ]+:18789" /tmp/gateway-e2e.log; then
if [ -n "${GATEWAY_PID:-}" ] && kill -0 "$GATEWAY_PID" 2>/dev/null; then
return 0
fi
fi
sleep 1
done
echo "Gateway failed to start"
cat /tmp/gateway-e2e.log || true
return 1
}
stop_gateway() {
local gw_pid="$1"
if [ -n "$gw_pid" ]; then
kill "$gw_pid" 2>/dev/null || true
wait "$gw_pid" || true
fi
}
run_wizard_cmd() {
local case_name="$1"
local home_dir="$2"
local command="$3"
local send_fn="$4"
local with_gateway="${5:-false}"
local validate_fn="${6:-}"
echo "== Wizard case: $case_name =="
export HOME="$home_dir"
mkdir -p "$HOME"
input_fifo="$(mktemp -u "/tmp/openclaw-onboard-${case_name}.XXXXXX")"
mkfifo "$input_fifo"
local log_path="/tmp/openclaw-onboard-${case_name}.log"
WIZARD_LOG_PATH="$log_path"
export WIZARD_LOG_PATH
# Run under script to keep an interactive TTY for clack prompts.
script -q -f -c "$command" "$log_path" < "$input_fifo" &
wizard_pid=$!
exec 3> "$input_fifo"
local gw_pid=""
if [ "$with_gateway" = "true" ]; then
start_gateway
gw_pid="$GATEWAY_PID"
wait_for_gateway
fi
"$send_fn"
if ! wait "$wizard_pid"; then
wizard_status=$?
exec 3>&-
rm -f "$input_fifo"
stop_gateway "$gw_pid"
echo "Wizard exited with status $wizard_status"
if [ -f "$log_path" ]; then
tail -n 160 "$log_path" || true
fi
exit "$wizard_status"
fi
exec 3>&-
rm -f "$input_fifo"
stop_gateway "$gw_pid"
if [ -n "$validate_fn" ]; then
"$validate_fn" "$log_path"
fi
}
run_wizard() {
local case_name="$1"
local home_dir="$2"
local send_fn="$3"
local validate_fn="${4:-}"
# Default onboarding command wrapper.
run_wizard_cmd "$case_name" "$home_dir" "node \"$OPENCLAW_ENTRY\" onboard $ONBOARD_FLAGS" "$send_fn" true "$validate_fn"
}
make_home() {
mktemp -d "/tmp/openclaw-e2e-$1.XXXXXX"
}
assert_file() {
local file_path="$1"
if [ ! -f "$file_path" ]; then
echo "Missing file: $file_path"
exit 1
fi
}
assert_dir() {
local dir_path="$1"
if [ ! -d "$dir_path" ]; then
echo "Missing dir: $dir_path"
exit 1
fi
}
select_skip_hooks() {
# Hooks multiselect: pick "Skip for now".
wait_for_log "Enable hooks?" 60 true || true
send $'"'"' \r'"'"' 0.6
}
send_local_basic() {
# Risk acknowledgement (default is "No").
wait_for_log "Continue?" 60
send $'"'"'y\r'"'"' 0.6
# Non-interactive flow; no gateway-location prompt.
select_skip_hooks
}
send_reset_config_only() {
# Risk acknowledgement (default is "No").
wait_for_log "Continue?" 40 true || true
send $'"'"'y\r'"'"' 0.8
# Select reset flow for existing config.
wait_for_log "Config handling" 40 true || true
send $'"'"'\e[B'"'"' 0.3
send $'"'"'\e[B'"'"' 0.3
send $'"'"'\r'"'"' 0.4
# Reset scope -> Config only (default).
wait_for_log "Reset scope" 40 true || true
send $'"'"'\r'"'"' 0.4
select_skip_hooks
}
send_channels_flow() {
# Configure channels via configure wizard.
# Prompts are interactive; notes are not. Use conservative delays to stay in sync.
# Where will the Gateway run? -> Local (default)
send $'"'"'\r'"'"' 1.2
# Channels mode -> Configure/link (default)
send $'"'"'\r'"'"' 1.5
# Select a channel -> Finished (last option; clack wraps on Up)
send $'"'"'\e[A\r'"'"' 2.0
# Keep stdin open until wizard exits.
send "" 2.5
}
send_skills_flow() {
# configure --section skills still runs the configure wizard; the first prompt is gateway location.
# Avoid log-based synchronization here; clack output can fragment ANSI sequences and break matching.
send $'"'"'\r'"'"' 3.0
wait_for_log "Configure skills now?" 120 true || true
send $'"'"'n\r'"'"' 0.8
send "" 2.0
}
run_case_local_basic() {
local home_dir
home_dir="$(make_home local-basic)"
export HOME="$home_dir"
mkdir -p "$HOME"
node "$OPENCLAW_ENTRY" onboard \
--non-interactive \
--accept-risk \
--flow quickstart \
--mode local \
--skip-channels \
--skip-skills \
--skip-daemon \
--skip-ui \
--skip-health
# Assert config + workspace scaffolding.
workspace_dir="$HOME/.openclaw/workspace"
config_path="$HOME/.openclaw/openclaw.json"
sessions_dir="$HOME/.openclaw/agents/main/sessions"
assert_file "$config_path"
assert_dir "$sessions_dir"
for file in AGENTS.md BOOTSTRAP.md IDENTITY.md SOUL.md TOOLS.md USER.md; do
assert_file "$workspace_dir/$file"
done
CONFIG_PATH="$config_path" WORKSPACE_DIR="$workspace_dir" node --input-type=module - <<'"'"'NODE'"'"'
import fs from "node:fs";
import JSON5 from "json5";
const cfg = JSON5.parse(fs.readFileSync(process.env.CONFIG_PATH, "utf-8"));
const expectedWorkspace = process.env.WORKSPACE_DIR;
const errors = [];
if (cfg?.agents?.defaults?.workspace !== expectedWorkspace) {
errors.push(
`agents.defaults.workspace mismatch (got ${cfg?.agents?.defaults?.workspace ?? "unset"})`,
);
}
if (cfg?.gateway?.mode !== "local") {
errors.push(`gateway.mode mismatch (got ${cfg?.gateway?.mode ?? "unset"})`);
}
if (cfg?.gateway?.bind !== "loopback") {
errors.push(`gateway.bind mismatch (got ${cfg?.gateway?.bind ?? "unset"})`);
}
if ((cfg?.gateway?.tailscale?.mode ?? "off") !== "off") {
errors.push(
`gateway.tailscale.mode mismatch (got ${cfg?.gateway?.tailscale?.mode ?? "unset"})`,
);
}
if (!cfg?.wizard?.lastRunAt) {
errors.push("wizard.lastRunAt missing");
}
if (!cfg?.wizard?.lastRunVersion) {
errors.push("wizard.lastRunVersion missing");
}
if (cfg?.wizard?.lastRunCommand !== "onboard") {
errors.push(
`wizard.lastRunCommand mismatch (got ${cfg?.wizard?.lastRunCommand ?? "unset"})`,
);
}
if (cfg?.wizard?.lastRunMode !== "local") {
errors.push(
`wizard.lastRunMode mismatch (got ${cfg?.wizard?.lastRunMode ?? "unset"})`,
);
}
if (errors.length > 0) {
console.error(errors.join("\n"));
process.exit(1);
}
NODE
}
run_case_remote_non_interactive() {
local home_dir
home_dir="$(make_home remote-non-interactive)"
export HOME="$home_dir"
mkdir -p "$HOME"
# Smoke test non-interactive remote config write.
node "$OPENCLAW_ENTRY" onboard --non-interactive --accept-risk \
--mode remote \
--remote-url ws://gateway.local:18789 \
--remote-token remote-token \
--skip-skills \
--skip-health
config_path="$HOME/.openclaw/openclaw.json"
assert_file "$config_path"
CONFIG_PATH="$config_path" node --input-type=module - <<'"'"'NODE'"'"'
import fs from "node:fs";
import JSON5 from "json5";
const cfg = JSON5.parse(fs.readFileSync(process.env.CONFIG_PATH, "utf-8"));
const errors = [];
if (cfg?.gateway?.mode !== "remote") {
errors.push(`gateway.mode mismatch (got ${cfg?.gateway?.mode ?? "unset"})`);
}
if (cfg?.gateway?.remote?.url !== "ws://gateway.local:18789") {
errors.push(`gateway.remote.url mismatch (got ${cfg?.gateway?.remote?.url ?? "unset"})`);
}
if (cfg?.gateway?.remote?.token !== "remote-token") {
errors.push(`gateway.remote.token mismatch (got ${cfg?.gateway?.remote?.token ?? "unset"})`);
}
if (cfg?.wizard?.lastRunMode !== "remote") {
errors.push(`wizard.lastRunMode mismatch (got ${cfg?.wizard?.lastRunMode ?? "unset"})`);
}
if (errors.length > 0) {
console.error(errors.join("\n"));
process.exit(1);
}
NODE
}
run_case_reset() {
local home_dir
home_dir="$(make_home reset-config)"
export HOME="$home_dir"
mkdir -p "$HOME/.openclaw"
# Seed a remote config to exercise reset path.
cat > "$HOME/.openclaw/openclaw.json" <<'"'"'JSON'"'"'
{
"meta": {},
"agents": { "defaults": { "workspace": "/root/old" } },
"gateway": {
"mode": "remote",
"remote": { "url": "ws://old.example:18789", "token": "old-token" }
}
}
JSON
node "$OPENCLAW_ENTRY" onboard \
--non-interactive \
--accept-risk \
--flow quickstart \
--mode local \
--reset \
--skip-channels \
--skip-skills \
--skip-daemon \
--skip-ui \
--skip-health
config_path="$HOME/.openclaw/openclaw.json"
assert_file "$config_path"
CONFIG_PATH="$config_path" node --input-type=module - <<'"'"'NODE'"'"'
import fs from "node:fs";
import JSON5 from "json5";
const cfg = JSON5.parse(fs.readFileSync(process.env.CONFIG_PATH, "utf-8"));
const errors = [];
if (cfg?.gateway?.mode !== "local") {
errors.push(`gateway.mode mismatch (got ${cfg?.gateway?.mode ?? "unset"})`);
}
if (cfg?.gateway?.remote?.url) {
errors.push(`gateway.remote.url should be cleared (got ${cfg?.gateway?.remote?.url})`);
}
if (cfg?.wizard?.lastRunMode !== "local") {
errors.push(`wizard.lastRunMode mismatch (got ${cfg?.wizard?.lastRunMode ?? "unset"})`);
}
if (errors.length > 0) {
console.error(errors.join("\n"));
process.exit(1);
}
NODE
}
run_case_channels() {
local home_dir
home_dir="$(make_home channels)"
# Channels-only configure flow.
run_wizard_cmd channels "$home_dir" "node \"$OPENCLAW_ENTRY\" configure --section channels" send_channels_flow
config_path="$HOME/.openclaw/openclaw.json"
assert_file "$config_path"
CONFIG_PATH="$config_path" node --input-type=module - <<'"'"'NODE'"'"'
import fs from "node:fs";
import JSON5 from "json5";
const cfg = JSON5.parse(fs.readFileSync(process.env.CONFIG_PATH, "utf-8"));
const errors = [];
if (cfg?.telegram?.botToken) {
errors.push(`telegram.botToken should be unset (got ${cfg?.telegram?.botToken})`);
}
if (cfg?.discord?.token) {
errors.push(`discord.token should be unset (got ${cfg?.discord?.token})`);
}
if (cfg?.slack?.botToken || cfg?.slack?.appToken) {
errors.push(
`slack tokens should be unset (got bot=${cfg?.slack?.botToken ?? "unset"}, app=${cfg?.slack?.appToken ?? "unset"})`,
);
}
if (cfg?.wizard?.lastRunCommand !== "configure") {
errors.push(
`wizard.lastRunCommand mismatch (got ${cfg?.wizard?.lastRunCommand ?? "unset"})`,
);
}
if (errors.length > 0) {
console.error(errors.join("\n"));
process.exit(1);
}
NODE
}
run_case_skills() {
local home_dir
home_dir="$(make_home skills)"
export HOME="$home_dir"
mkdir -p "$HOME/.openclaw"
# Seed skills config to ensure it survives the wizard.
cat > "$HOME/.openclaw/openclaw.json" <<'"'"'JSON'"'"'
{
"meta": {},
"skills": {
"allowBundled": ["__none__"],
"install": { "nodeManager": "bun" }
}
}
JSON
run_wizard_cmd skills "$home_dir" "node \"$OPENCLAW_ENTRY\" configure --section skills" send_skills_flow
config_path="$HOME/.openclaw/openclaw.json"
assert_file "$config_path"
CONFIG_PATH="$config_path" node --input-type=module - <<'"'"'NODE'"'"'
import fs from "node:fs";
import JSON5 from "json5";
const cfg = JSON5.parse(fs.readFileSync(process.env.CONFIG_PATH, "utf-8"));
const errors = [];
if (cfg?.skills?.install?.nodeManager !== "bun") {
errors.push(`skills.install.nodeManager mismatch (got ${cfg?.skills?.install?.nodeManager ?? "unset"})`);
}
if (!Array.isArray(cfg?.skills?.allowBundled) || cfg.skills.allowBundled[0] !== "__none__") {
errors.push("skills.allowBundled missing");
}
if (cfg?.wizard?.lastRunMode !== "local") {
errors.push(`wizard.lastRunMode mismatch (got ${cfg?.wizard?.lastRunMode ?? "unset"})`);
}
if (errors.length > 0) {
console.error(errors.join("\n"));
process.exit(1);
}
NODE
}
assert_log_not_contains() {
local file_path="$1"
local needle="$2"
if grep -q "$needle" "$file_path"; then
echo "Unexpected log output: $needle"
exit 1
fi
}
validate_local_basic_log() {
local log_path="$1"
assert_log_not_contains "$log_path" "systemctl --user unavailable"
}
run_case_local_basic
run_case_remote_non_interactive
run_case_reset
run_case_channels
run_case_skills
'
echo "E2E complete."

View File

@@ -0,0 +1,224 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
IMAGE_NAME="openclaw-plugins-e2e"
echo "Building Docker image..."
docker build -t "$IMAGE_NAME" -f "$ROOT_DIR/scripts/e2e/Dockerfile" "$ROOT_DIR"
echo "Running plugins Docker E2E..."
docker run --rm -t "$IMAGE_NAME" bash -lc '
set -euo pipefail
if [ -f dist/index.mjs ]; then
OPENCLAW_ENTRY="dist/index.mjs"
elif [ -f dist/index.js ]; then
OPENCLAW_ENTRY="dist/index.js"
else
echo "Missing dist/index.(m)js (build output):"
ls -la dist || true
exit 1
fi
export OPENCLAW_ENTRY
home_dir=$(mktemp -d "/tmp/openclaw-plugins-e2e.XXXXXX")
export HOME="$home_dir"
mkdir -p "$HOME/.openclaw/extensions/demo-plugin"
cat > "$HOME/.openclaw/extensions/demo-plugin/index.js" <<'"'"'JS'"'"'
module.exports = {
id: "demo-plugin",
name: "Demo Plugin",
description: "Docker E2E demo plugin",
register(api) {
api.registerTool(() => null, { name: "demo_tool" });
api.registerGatewayMethod("demo.ping", async () => ({ ok: true }));
api.registerCli(() => {}, { commands: ["demo"] });
api.registerService({ id: "demo-service", start: () => {} });
},
};
JS
cat > "$HOME/.openclaw/extensions/demo-plugin/openclaw.plugin.json" <<'"'"'JSON'"'"'
{
"id": "demo-plugin",
"configSchema": {
"type": "object",
"properties": {}
}
}
JSON
node "$OPENCLAW_ENTRY" plugins list --json > /tmp/plugins.json
node - <<'"'"'NODE'"'"'
const fs = require("node:fs");
const data = JSON.parse(fs.readFileSync("/tmp/plugins.json", "utf8"));
const plugin = (data.plugins || []).find((entry) => entry.id === "demo-plugin");
if (!plugin) throw new Error("plugin not found");
if (plugin.status !== "loaded") {
throw new Error(`unexpected status: ${plugin.status}`);
}
const assertIncludes = (list, value, label) => {
if (!Array.isArray(list) || !list.includes(value)) {
throw new Error(`${label} missing: ${value}`);
}
};
assertIncludes(plugin.toolNames, "demo_tool", "tool");
assertIncludes(plugin.gatewayMethods, "demo.ping", "gateway method");
assertIncludes(plugin.cliCommands, "demo", "cli command");
assertIncludes(plugin.services, "demo-service", "service");
const diagErrors = (data.diagnostics || []).filter((diag) => diag.level === "error");
if (diagErrors.length > 0) {
throw new Error(`diagnostics errors: ${diagErrors.map((diag) => diag.message).join("; ")}`);
}
console.log("ok");
NODE
echo "Testing tgz install flow..."
pack_dir="$(mktemp -d "/tmp/openclaw-plugin-pack.XXXXXX")"
mkdir -p "$pack_dir/package"
cat > "$pack_dir/package/package.json" <<'"'"'JSON'"'"'
{
"name": "@openclaw/demo-plugin-tgz",
"version": "0.0.1",
"openclaw": { "extensions": ["./index.js"] }
}
JSON
cat > "$pack_dir/package/index.js" <<'"'"'JS'"'"'
module.exports = {
id: "demo-plugin-tgz",
name: "Demo Plugin TGZ",
register(api) {
api.registerGatewayMethod("demo.tgz", async () => ({ ok: true }));
},
};
JS
cat > "$pack_dir/package/openclaw.plugin.json" <<'"'"'JSON'"'"'
{
"id": "demo-plugin-tgz",
"configSchema": {
"type": "object",
"properties": {}
}
}
JSON
tar -czf /tmp/demo-plugin-tgz.tgz -C "$pack_dir" package
node "$OPENCLAW_ENTRY" plugins install /tmp/demo-plugin-tgz.tgz
node "$OPENCLAW_ENTRY" plugins list --json > /tmp/plugins2.json
node - <<'"'"'NODE'"'"'
const fs = require("node:fs");
const data = JSON.parse(fs.readFileSync("/tmp/plugins2.json", "utf8"));
const plugin = (data.plugins || []).find((entry) => entry.id === "demo-plugin-tgz");
if (!plugin) throw new Error("tgz plugin not found");
if (plugin.status !== "loaded") {
throw new Error(`unexpected status: ${plugin.status}`);
}
if (!Array.isArray(plugin.gatewayMethods) || !plugin.gatewayMethods.includes("demo.tgz")) {
throw new Error("expected gateway method demo.tgz");
}
console.log("ok");
NODE
echo "Testing install from local folder (plugins.load.paths)..."
dir_plugin="$(mktemp -d "/tmp/openclaw-plugin-dir.XXXXXX")"
cat > "$dir_plugin/package.json" <<'"'"'JSON'"'"'
{
"name": "@openclaw/demo-plugin-dir",
"version": "0.0.1",
"openclaw": { "extensions": ["./index.js"] }
}
JSON
cat > "$dir_plugin/index.js" <<'"'"'JS'"'"'
module.exports = {
id: "demo-plugin-dir",
name: "Demo Plugin DIR",
register(api) {
api.registerGatewayMethod("demo.dir", async () => ({ ok: true }));
},
};
JS
cat > "$dir_plugin/openclaw.plugin.json" <<'"'"'JSON'"'"'
{
"id": "demo-plugin-dir",
"configSchema": {
"type": "object",
"properties": {}
}
}
JSON
node "$OPENCLAW_ENTRY" plugins install "$dir_plugin"
node "$OPENCLAW_ENTRY" plugins list --json > /tmp/plugins3.json
node - <<'"'"'NODE'"'"'
const fs = require("node:fs");
const data = JSON.parse(fs.readFileSync("/tmp/plugins3.json", "utf8"));
const plugin = (data.plugins || []).find((entry) => entry.id === "demo-plugin-dir");
if (!plugin) throw new Error("dir plugin not found");
if (plugin.status !== "loaded") {
throw new Error(`unexpected status: ${plugin.status}`);
}
if (!Array.isArray(plugin.gatewayMethods) || !plugin.gatewayMethods.includes("demo.dir")) {
throw new Error("expected gateway method demo.dir");
}
console.log("ok");
NODE
echo "Testing install from npm spec (file:)..."
file_pack_dir="$(mktemp -d "/tmp/openclaw-plugin-filepack.XXXXXX")"
mkdir -p "$file_pack_dir/package"
cat > "$file_pack_dir/package/package.json" <<'"'"'JSON'"'"'
{
"name": "@openclaw/demo-plugin-file",
"version": "0.0.1",
"openclaw": { "extensions": ["./index.js"] }
}
JSON
cat > "$file_pack_dir/package/index.js" <<'"'"'JS'"'"'
module.exports = {
id: "demo-plugin-file",
name: "Demo Plugin FILE",
register(api) {
api.registerGatewayMethod("demo.file", async () => ({ ok: true }));
},
};
JS
cat > "$file_pack_dir/package/openclaw.plugin.json" <<'"'"'JSON'"'"'
{
"id": "demo-plugin-file",
"configSchema": {
"type": "object",
"properties": {}
}
}
JSON
node "$OPENCLAW_ENTRY" plugins install "file:$file_pack_dir/package"
node "$OPENCLAW_ENTRY" plugins list --json > /tmp/plugins4.json
node - <<'"'"'NODE'"'"'
const fs = require("node:fs");
const data = JSON.parse(fs.readFileSync("/tmp/plugins4.json", "utf8"));
const plugin = (data.plugins || []).find((entry) => entry.id === "demo-plugin-file");
if (!plugin) throw new Error("file plugin not found");
if (plugin.status !== "loaded") {
throw new Error(`unexpected status: ${plugin.status}`);
}
if (!Array.isArray(plugin.gatewayMethods) || !plugin.gatewayMethods.includes("demo.file")) {
throw new Error("expected gateway method demo.file");
}
console.log("ok");
NODE
'
echo "OK"

View File

@@ -0,0 +1,11 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
IMAGE_NAME="${OPENCLAW_QR_SMOKE_IMAGE:-${CLAWDBOT_QR_SMOKE_IMAGE:-openclaw-qr-smoke}}"
echo "Building Docker image..."
docker build -t "$IMAGE_NAME" -f "$ROOT_DIR/scripts/e2e/Dockerfile.qr-import" "$ROOT_DIR"
echo "Running qrcode-terminal import smoke..."
docker run --rm -t "$IMAGE_NAME" node -e "import('qrcode-terminal').then((m)=>m.default.generate('qr-smoke',{small:true}))"

View File

@@ -0,0 +1,139 @@
import { extractReadableContent, fetchFirecrawlContent } from "../src/agents/tools/web-tools.js";
const DEFAULT_URLS = [
"https://en.wikipedia.org/wiki/Web_scraping",
"https://news.ycombinator.com/",
"https://www.apple.com/iphone/",
"https://www.nytimes.com/",
"https://www.reddit.com/r/javascript/",
];
const urls = process.argv.slice(2);
const targets = urls.length > 0 ? urls : DEFAULT_URLS;
const apiKey = process.env.FIRECRAWL_API_KEY;
const baseUrl = process.env.FIRECRAWL_BASE_URL ?? "https://api.firecrawl.dev";
const userAgent =
"Mozilla/5.0 (Macintosh; Intel Mac OS X 14_7_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36";
const timeoutMs = 30_000;
function truncate(value: string, max = 180): string {
if (!value) {
return "";
}
return value.length > max ? `${value.slice(0, max)}` : value;
}
async function fetchHtml(url: string): Promise<{
ok: boolean;
status: number;
contentType: string;
finalUrl: string;
body: string;
}> {
const controller = new AbortController();
const timer = setTimeout(() => controller.abort(), timeoutMs);
try {
const res = await fetch(url, {
method: "GET",
headers: { Accept: "*/*", "User-Agent": userAgent },
signal: controller.signal,
});
const contentType = res.headers.get("content-type") ?? "application/octet-stream";
const body = await res.text();
return {
ok: res.ok,
status: res.status,
contentType,
finalUrl: res.url || url,
body,
};
} finally {
clearTimeout(timer);
}
}
async function run() {
if (!apiKey) {
console.log("FIRECRAWL_API_KEY not set. Firecrawl comparisons will be skipped.");
}
for (const url of targets) {
console.log(`\n=== ${url}`);
let localStatus = "skipped";
let localTitle = "";
let localText = "";
let localError: string | undefined;
try {
const res = await fetchHtml(url);
if (!res.ok) {
localStatus = `http ${res.status}`;
} else if (!res.contentType.includes("text/html")) {
localStatus = `non-html (${res.contentType})`;
} else {
const readable = await extractReadableContent({
html: res.body,
url: res.finalUrl,
extractMode: "markdown",
});
if (readable?.text) {
localStatus = "readability";
localTitle = readable.title ?? "";
localText = readable.text;
} else {
localStatus = "readability-empty";
}
}
} catch (error) {
localStatus = "error";
localError = error instanceof Error ? error.message : String(error);
}
console.log(`local: ${localStatus} len=${localText.length} title=${truncate(localTitle, 80)}`);
if (localError) {
console.log(`local error: ${localError}`);
}
if (localText) {
console.log(`local sample: ${truncate(localText)}`);
}
if (apiKey) {
try {
const firecrawl = await fetchFirecrawlContent({
url,
extractMode: "markdown",
apiKey,
baseUrl,
onlyMainContent: true,
maxAgeMs: 172_800_000,
proxy: "auto",
storeInCache: true,
timeoutSeconds: 60,
});
console.log(
`firecrawl: ok len=${firecrawl.text.length} title=${truncate(
firecrawl.title ?? "",
80,
)} status=${firecrawl.status ?? "n/a"}`,
);
if (firecrawl.warning) {
console.log(`firecrawl warning: ${firecrawl.warning}`);
}
if (firecrawl.text) {
console.log(`firecrawl sample: ${truncate(firecrawl.text)}`);
}
} catch (error) {
const message = error instanceof Error ? error.message : String(error);
console.log(`firecrawl: error ${message}`);
}
}
}
process.exit(0);
}
run().catch((error) => {
console.error(error);
process.exit(1);
});

View File

@@ -0,0 +1,74 @@
#!/usr/bin/env node
import fs from "node:fs";
import path from "node:path";
import { fileURLToPath } from "node:url";
const args = new Set(process.argv.slice(2));
const checkOnly = args.has("--check");
const writeMode = args.has("--write") || !checkOnly;
if (checkOnly && args.has("--write")) {
console.error("Use either --check or --write, not both.");
process.exit(1);
}
const here = path.dirname(fileURLToPath(import.meta.url));
const repoRoot = path.resolve(here, "..");
const policyPath = path.join(repoRoot, "src", "infra", "host-env-security-policy.json");
const outputPath = path.join(
repoRoot,
"apps",
"macos",
"Sources",
"OpenClaw",
"HostEnvSecurityPolicy.generated.swift",
);
/** @type {{blockedKeys: string[]; blockedOverrideKeys?: string[]; blockedPrefixes: string[]}} */
const policy = JSON.parse(fs.readFileSync(policyPath, "utf8"));
const renderSwiftStringArray = (items) => items.map((item) => ` "${item}"`).join(",\n");
const generated = `// Generated file. Do not edit directly.
// Source: src/infra/host-env-security-policy.json
// Regenerate: node scripts/generate-host-env-security-policy-swift.mjs --write
import Foundation
enum HostEnvSecurityPolicy {
static let blockedKeys: Set<String> = [
${renderSwiftStringArray(policy.blockedKeys)}
]
static let blockedOverrideKeys: Set<String> = [
${renderSwiftStringArray(policy.blockedOverrideKeys ?? [])}
]
static let blockedPrefixes: [String] = [
${renderSwiftStringArray(policy.blockedPrefixes)}
]
}
`;
const current = fs.existsSync(outputPath) ? fs.readFileSync(outputPath, "utf8") : null;
if (checkOnly) {
if (current === generated) {
console.log(`OK ${path.relative(repoRoot, outputPath)}`);
process.exit(0);
}
console.error(
[
`Out of date ${path.relative(repoRoot, outputPath)}.`,
"Run: node scripts/generate-host-env-security-policy-swift.mjs --write",
].join("\n"),
);
process.exit(1);
}
if (writeMode) {
if (current !== generated) {
fs.writeFileSync(outputPath, generated);
}
console.log(`Wrote ${path.relative(repoRoot, outputPath)}`);
}

View File

@@ -0,0 +1,168 @@
#!/usr/bin/env node
import { execFileSync, spawnSync } from "node:child_process";
import crypto from "node:crypto";
import fs from "node:fs";
import os from "node:os";
import path from "node:path";
function usage() {
console.error(
[
"Usage:",
" node scripts/ghsa-patch.mjs --ghsa <GHSA-id-or-url> [--repo owner/name]",
" --summary <text> --severity <low|medium|high|critical>",
" --description-file <path>",
" --vulnerable-version-range <range>",
" --patched-versions <range-or-null>",
" [--package openclaw] [--ecosystem npm] [--cvss <vector>]",
].join("\n"),
);
}
function fail(message) {
console.error(message);
process.exit(1);
}
function parseArgs(argv) {
const out = {};
for (let i = 0; i < argv.length; i += 1) {
const arg = argv[i];
if (!arg.startsWith("--")) {
fail(`Unexpected argument: ${arg}`);
}
const key = arg.slice(2);
const value = argv[i + 1];
if (!value || value.startsWith("--")) {
fail(`Missing value for --${key}`);
}
out[key] = value;
i += 1;
}
return out;
}
function runGh(args) {
const proc = spawnSync("gh", args, { encoding: "utf8" });
if (proc.status !== 0) {
fail(proc.stderr.trim() || proc.stdout.trim() || `gh ${args.join(" ")} failed`);
}
return proc.stdout;
}
function deriveRepoFromOrigin() {
const remote = execFileSync("git", ["remote", "get-url", "origin"], { encoding: "utf8" }).trim();
const httpsMatch = remote.match(/github\.com[/:]([^/]+)\/([^/.]+)(?:\.git)?$/);
if (!httpsMatch) {
fail(`Could not parse origin remote: ${remote}`);
}
return `${httpsMatch[1]}/${httpsMatch[2]}`;
}
function parseGhsaId(value) {
const match = value.match(/GHSA-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{4}/i);
if (!match) {
fail(`Could not parse GHSA id from: ${value}`);
}
return match[0];
}
function writeTempJson(data) {
const file = path.join(os.tmpdir(), `ghsa-patch-${crypto.randomUUID()}.json`);
fs.writeFileSync(file, `${JSON.stringify(data, null, 2)}\n`);
return file;
}
const args = parseArgs(process.argv.slice(2));
if (!args.ghsa || !args.summary || !args.severity || !args["description-file"]) {
usage();
process.exit(1);
}
const repo = args.repo || deriveRepoFromOrigin();
const ghsaId = parseGhsaId(args.ghsa);
const advisoryPath = `/repos/${repo}/security-advisories/${ghsaId}`;
const descriptionPath = path.resolve(args["description-file"]);
if (!fs.existsSync(descriptionPath)) {
fail(`Description file does not exist: ${descriptionPath}`);
}
const current = JSON.parse(runGh(["api", "-H", "X-GitHub-Api-Version: 2022-11-28", advisoryPath]));
const restoredCvss = args.cvss || current?.cvss?.vector_string || null;
const ecosystem = args.ecosystem || "npm";
const packageName = args.package || "openclaw";
const vulnerableRange = args["vulnerable-version-range"];
const patchedVersionsRaw = args["patched-versions"];
if (!vulnerableRange) {
fail("Missing --vulnerable-version-range");
}
if (patchedVersionsRaw === undefined) {
fail("Missing --patched-versions");
}
const patchedVersions = patchedVersionsRaw === "null" ? null : patchedVersionsRaw;
const description = fs.readFileSync(descriptionPath, "utf8");
const payload = {
summary: args.summary,
severity: args.severity,
description,
vulnerabilities: [
{
package: {
ecosystem,
name: packageName,
},
vulnerable_version_range: vulnerableRange,
patched_versions: patchedVersions,
vulnerable_functions: [],
},
],
};
const patchFile = writeTempJson(payload);
runGh([
"api",
"-H",
"X-GitHub-Api-Version: 2022-11-28",
"-X",
"PATCH",
advisoryPath,
"--input",
patchFile,
]);
if (restoredCvss) {
runGh([
"api",
"-H",
"X-GitHub-Api-Version: 2022-11-28",
"-X",
"PATCH",
advisoryPath,
"-f",
`cvss_vector_string=${restoredCvss}`,
]);
}
const refreshed = JSON.parse(
runGh(["api", "-H", "X-GitHub-Api-Version: 2022-11-28", advisoryPath]),
);
console.log(
JSON.stringify(
{
html_url: refreshed.html_url,
state: refreshed.state,
severity: refreshed.severity,
summary: refreshed.summary,
vulnerabilities: refreshed.vulnerabilities,
cvss: refreshed.cvss,
updated_at: refreshed.updated_at,
},
null,
2,
),
);

View File

@@ -0,0 +1,100 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
IOS_DIR="${ROOT_DIR}/apps/ios"
TEAM_ID_SCRIPT="${ROOT_DIR}/scripts/ios-team-id.sh"
LOCAL_SIGNING_FILE="${IOS_DIR}/.local-signing.xcconfig"
sanitize_identifier_segment() {
local raw="${1:-}"
raw="$(printf '%s' "$raw" | tr '[:upper:]' '[:lower:]')"
raw="$(printf '%s' "$raw" | sed -E 's/[^a-z0-9]+/-/g; s/^-+//; s/-+$//; s/-+/-/g')"
if [[ -z "$raw" ]]; then
raw="local"
fi
printf '%s\n' "$raw"
}
normalize_bundle_id() {
local raw="${1:-}"
raw="$(printf '%s' "$raw" | tr '[:upper:]' '[:lower:]')"
raw="$(printf '%s' "$raw" | sed -E 's/[^a-z0-9.-]+/-/g; s/\.+/./g; s/^-+//; s/[.-]+$//')"
if [[ -z "$raw" ]]; then
raw="ai.openclaw.ios.test.local"
fi
printf '%s\n' "$raw"
}
if [[ ! -x "${TEAM_ID_SCRIPT}" ]]; then
echo "ERROR: Missing team detection helper: ${TEAM_ID_SCRIPT}" >&2
exit 1
fi
team_id=""
if team_id="$("${TEAM_ID_SCRIPT}" 2>/dev/null)"; then
:
else
if [[ "${IOS_SIGNING_REQUIRED:-0}" == "1" ]]; then
"${TEAM_ID_SCRIPT}"
exit 1
fi
echo "WARN: Unable to detect an Apple Team ID; keeping existing iOS signing override (if any)." >&2
exit 0
fi
if [[ -n "${OPENCLAW_IOS_BUNDLE_SUFFIX:-}" ]]; then
identity_source="${OPENCLAW_IOS_BUNDLE_SUFFIX}"
else
identity_source="${USER:-}"
if [[ -z "${identity_source}" ]]; then
identity_source="$(id -un 2>/dev/null || true)"
fi
team_segment="$(sanitize_identifier_segment "${team_id}")"
identity_source="${identity_source}-${team_segment}"
fi
bundle_suffix="$(sanitize_identifier_segment "${identity_source}")"
bundle_base="${OPENCLAW_IOS_APP_BUNDLE_ID:-${OPENCLAW_IOS_BUNDLE_ID_BASE:-}}"
if [[ -z "${bundle_base}" ]]; then
bundle_base="ai.openclaw.ios.test.${bundle_suffix}"
fi
bundle_base="$(normalize_bundle_id "${bundle_base}")"
share_bundle_id="${OPENCLAW_IOS_SHARE_BUNDLE_ID:-${bundle_base}.share}"
watch_app_bundle_id="${OPENCLAW_IOS_WATCH_APP_BUNDLE_ID:-${bundle_base}.watchkitapp}"
watch_extension_bundle_id="${OPENCLAW_IOS_WATCH_EXTENSION_BUNDLE_ID:-${watch_app_bundle_id}.extension}"
code_sign_style="${OPENCLAW_IOS_CODE_SIGN_STYLE:-Automatic}"
app_profile="${OPENCLAW_IOS_APP_PROFILE:-}"
share_profile="${OPENCLAW_IOS_SHARE_PROFILE:-}"
tmp_file="$(mktemp "${TMPDIR:-/tmp}/openclaw-ios-signing.XXXXXX")"
cat >"${tmp_file}" <<EOF
// Auto-generated by scripts/ios-configure-signing.sh.
// This file is local-only and should not be committed.
// Override values with env vars if needed:
// OPENCLAW_IOS_APP_BUNDLE_ID / OPENCLAW_IOS_BUNDLE_ID_BASE
// OPENCLAW_IOS_SHARE_BUNDLE_ID / OPENCLAW_IOS_WATCH_APP_BUNDLE_ID / OPENCLAW_IOS_WATCH_EXTENSION_BUNDLE_ID
// OPENCLAW_IOS_CODE_SIGN_STYLE / OPENCLAW_IOS_APP_PROFILE / OPENCLAW_IOS_SHARE_PROFILE
OPENCLAW_CODE_SIGN_STYLE = ${code_sign_style}
OPENCLAW_DEVELOPMENT_TEAM = ${team_id}
// Keep legacy key for compatibility with older signing config paths.
OPENCLAW_IOS_SELECTED_TEAM = ${team_id}
OPENCLAW_APP_BUNDLE_ID = ${bundle_base}
OPENCLAW_SHARE_BUNDLE_ID = ${share_bundle_id}
OPENCLAW_WATCH_APP_BUNDLE_ID = ${watch_app_bundle_id}
OPENCLAW_WATCH_EXTENSION_BUNDLE_ID = ${watch_extension_bundle_id}
OPENCLAW_APP_PROFILE = ${app_profile}
OPENCLAW_SHARE_PROFILE = ${share_profile}
EOF
if [[ -f "${LOCAL_SIGNING_FILE}" ]] && cmp -s "${tmp_file}" "${LOCAL_SIGNING_FILE}"; then
rm -f "${tmp_file}"
echo "iOS signing config already up to date: team=${team_id} app=${bundle_base}"
exit 0
fi
mv "${tmp_file}" "${LOCAL_SIGNING_FILE}"
echo "Configured iOS signing: team=${team_id} app=${bundle_base}"

View File

@@ -0,0 +1,207 @@
#!/usr/bin/env bash
set -euo pipefail
if [[ -n "${IOS_DEVELOPMENT_TEAM:-}" ]]; then
printf '%s\n' "${IOS_DEVELOPMENT_TEAM}"
exit 0
fi
preferred_team="${IOS_PREFERRED_TEAM_ID:-${OPENCLAW_IOS_DEFAULT_TEAM_ID:-Y5PE65HELJ}}"
preferred_team_name="${IOS_PREFERRED_TEAM_NAME:-}"
allow_keychain_fallback="${IOS_ALLOW_KEYCHAIN_TEAM_FALLBACK:-0}"
prefer_non_free_team="${IOS_PREFER_NON_FREE_TEAM:-1}"
preferred_team="${preferred_team//$'\r'/}"
preferred_team_name="${preferred_team_name//$'\r'/}"
declare -a team_ids=()
declare -a team_is_free=()
declare -a team_names=()
python_cmd=""
detect_python() {
local candidate
for candidate in "${IOS_PYTHON_BIN:-}" python3 python /usr/bin/python3; do
[[ -n "$candidate" ]] || continue
if command -v "$candidate" >/dev/null 2>&1; then
printf '%s\n' "$candidate"
return 0
fi
done
return 1
}
python_cmd="$(detect_python || true)"
append_team() {
local candidate_id="$1"
local candidate_is_free="$2"
local candidate_name="$3"
candidate_id="${candidate_id//$'\r'/}"
candidate_is_free="${candidate_is_free//$'\r'/}"
candidate_name="${candidate_name//$'\r'/}"
[[ -z "$candidate_id" ]] && return
local i
for i in "${!team_ids[@]}"; do
if [[ "${team_ids[$i]}" == "$candidate_id" ]]; then
return
fi
done
team_ids+=("$candidate_id")
team_is_free+=("$candidate_is_free")
team_names+=("$candidate_name")
}
load_teams_from_xcode_preferences() {
local plist_path="${HOME}/Library/Preferences/com.apple.dt.Xcode.plist"
[[ -f "$plist_path" ]] || return 0
[[ -n "$python_cmd" ]] || return 0
while IFS=$'\t' read -r team_id is_free team_name; do
[[ -z "$team_id" ]] && continue
append_team "$team_id" "${is_free:-0}" "${team_name:-}"
done < <(
plutil -extract IDEProvisioningTeams json -o - "$plist_path" 2>/dev/null \
| "$python_cmd" -c '
import json
import sys
try:
data = json.load(sys.stdin)
except Exception:
raise SystemExit(0)
if not isinstance(data, dict):
raise SystemExit(0)
for teams in data.values():
if not isinstance(teams, list):
continue
for team in teams:
if not isinstance(team, dict):
continue
team_id = str(team.get("teamID", "")).strip()
if not team_id:
continue
is_free = "1" if bool(team.get("isFreeProvisioningTeam", False)) else "0"
team_name = str(team.get("teamName", "")).replace("\t", " ").strip()
print(f"{team_id}\t{is_free}\t{team_name}")
'
)
}
load_teams_from_legacy_defaults_key() {
while IFS= read -r team; do
[[ -z "$team" ]] && continue
append_team "$team" "0" ""
done < <(
defaults read com.apple.dt.Xcode IDEProvisioningTeamIdentifiers 2>/dev/null \
| grep -Eo '[A-Z0-9]{10}' || true
)
}
load_teams_from_xcode_managed_profiles() {
local profiles_dir="${HOME}/Library/MobileDevice/Provisioning Profiles"
[[ -d "$profiles_dir" ]] || return 0
[[ -n "$python_cmd" ]] || return 0
while IFS= read -r team; do
[[ -z "$team" ]] && continue
append_team "$team" "0" ""
done < <(
for p in "${profiles_dir}"/*.mobileprovision; do
[[ -f "$p" ]] || continue
security cms -D -i "$p" 2>/dev/null \
| "$python_cmd" -c '
import plistlib, sys
try:
raw = sys.stdin.buffer.read()
if not raw:
raise SystemExit(0)
d = plistlib.loads(raw)
for tid in d.get("TeamIdentifier", []):
print(tid)
except Exception:
pass
' 2>/dev/null
done | sort -u
)
}
has_xcode_account() {
local plist_path="${HOME}/Library/Preferences/com.apple.dt.Xcode.plist"
[[ -f "$plist_path" ]] || return 1
local accts
accts="$(defaults read com.apple.dt.Xcode DVTDeveloperAccountManagerAppleIDLists 2>/dev/null || true)"
[[ -n "$accts" ]] && [[ "$accts" != *"does not exist"* ]] && grep -q 'identifier' <<< "$accts"
}
load_teams_from_xcode_preferences
load_teams_from_legacy_defaults_key
if [[ ${#team_ids[@]} -eq 0 ]]; then
load_teams_from_xcode_managed_profiles
fi
if [[ ${#team_ids[@]} -eq 0 && "$allow_keychain_fallback" == "1" ]]; then
while IFS= read -r team; do
[[ -z "$team" ]] && continue
append_team "$team" "0" ""
done < <(
security find-identity -p codesigning -v 2>/dev/null \
| grep -Eo '\([A-Z0-9]{10}\)' \
| tr -d '()' || true
)
fi
if [[ ${#team_ids[@]} -eq 0 ]]; then
if has_xcode_account; then
echo "An Apple account is signed in to Xcode, but no Team ID could be resolved." >&2
echo "" >&2
echo "On Xcode 16+, team data is not written until you build a project." >&2
echo "To fix this, do ONE of the following:" >&2
echo "" >&2
echo " 1. Open the iOS project in Xcode, select your Team in Signing &" >&2
echo " Capabilities, and build once. Then re-run this script." >&2
echo "" >&2
echo " 2. Set your Team ID directly:" >&2
echo " export IOS_DEVELOPMENT_TEAM=<your-10-char-team-id>" >&2
echo " Find your Team ID at: https://developer.apple.com/account#MembershipDetailsCard" >&2
elif [[ "$allow_keychain_fallback" == "1" ]]; then
echo "No Apple Team ID found. Open Xcode or install signing certificates first." >&2
else
echo "No Apple Team ID found in Xcode accounts. Open Xcode → Settings → Accounts and sign in, then retry." >&2
echo "(Set IOS_ALLOW_KEYCHAIN_TEAM_FALLBACK=1 to allow keychain-only team detection.)" >&2
fi
exit 1
fi
for i in "${!team_ids[@]}"; do
if [[ "${team_ids[$i]}" == "$preferred_team" ]]; then
printf '%s\n' "${team_ids[$i]}"
exit 0
fi
done
if [[ -n "$preferred_team_name" ]]; then
preferred_team_name_lc="$(printf '%s' "$preferred_team_name" | tr '[:upper:]' '[:lower:]')"
for i in "${!team_ids[@]}"; do
team_name_lc="$(printf '%s' "${team_names[$i]}" | tr '[:upper:]' '[:lower:]')"
if [[ "$team_name_lc" == "$preferred_team_name_lc" ]]; then
printf '%s\n' "${team_ids[$i]}"
exit 0
fi
done
fi
if [[ "$prefer_non_free_team" == "1" ]]; then
for i in "${!team_ids[@]}"; do
if [[ "${team_is_free[$i]}" == "0" ]]; then
printf '%s\n' "${team_ids[$i]}"
exit 0
fi
done
fi
printf '%s\n' "${team_ids[0]}"

View File

@@ -0,0 +1,912 @@
import { execFileSync } from "node:child_process";
import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs";
import { homedir } from "node:os";
import { dirname, join } from "node:path";
const BUG_LABEL = "bug";
const ENHANCEMENT_LABEL = "enhancement";
const SUPPORT_LABEL = "r: support";
const SKILL_LABEL = "r: skill";
const DEFAULT_MODEL = "gpt-5.2-codex";
const MAX_BODY_CHARS = 6000;
const GH_MAX_BUFFER = 50 * 1024 * 1024;
const PAGE_SIZE = 50;
const WORK_BATCH_SIZE = 500;
const STATE_VERSION = 1;
const STATE_FILE_NAME = "issue-labeler-state.json";
const CONFIG_BASE_DIR = process.env.XDG_CONFIG_HOME ?? join(homedir(), ".config");
const STATE_FILE_PATH = join(CONFIG_BASE_DIR, "openclaw", STATE_FILE_NAME);
const ISSUE_QUERY = `
query($owner: String!, $name: String!, $after: String, $pageSize: Int!) {
repository(owner: $owner, name: $name) {
issues(states: OPEN, first: $pageSize, after: $after, orderBy: { field: CREATED_AT, direction: DESC }) {
nodes {
number
title
body
labels(first: 100) {
nodes {
name
}
}
}
pageInfo {
hasNextPage
endCursor
}
totalCount
}
}
}
`;
const PULL_REQUEST_QUERY = `
query($owner: String!, $name: String!, $after: String, $pageSize: Int!) {
repository(owner: $owner, name: $name) {
pullRequests(states: OPEN, first: $pageSize, after: $after, orderBy: { field: CREATED_AT, direction: DESC }) {
nodes {
number
title
body
labels(first: 100) {
nodes {
name
}
}
}
pageInfo {
hasNextPage
endCursor
}
totalCount
}
}
}
`;
type IssueLabel = { name: string };
type LabelItem = {
number: number;
title: string;
body?: string | null;
labels: IssueLabel[];
};
type Issue = LabelItem;
type PullRequest = LabelItem;
type Classification = {
category: "bug" | "enhancement";
isSupport: boolean;
isSkillOnly: boolean;
};
type ScriptOptions = {
limit: number;
dryRun: boolean;
model: string;
};
type OpenAIResponse = {
output_text?: string;
output?: OpenAIResponseOutput[];
};
type OpenAIResponseOutput = {
type?: string;
content?: OpenAIResponseContent[];
};
type OpenAIResponseContent = {
type?: string;
text?: string;
};
type RepoInfo = {
owner: string;
name: string;
};
type IssuePageInfo = {
hasNextPage: boolean;
endCursor?: string | null;
};
type IssuePage = {
nodes: Array<{
number: number;
title: string;
body?: string | null;
labels?: { nodes?: IssueLabel[] | null } | null;
}>;
pageInfo: IssuePageInfo;
totalCount: number;
};
type IssueQueryResponse = {
data?: {
repository?: {
issues?: IssuePage | null;
} | null;
};
errors?: Array<{ message?: string }>;
};
type PullRequestPage = {
nodes: Array<{
number: number;
title: string;
body?: string | null;
labels?: { nodes?: IssueLabel[] | null } | null;
}>;
pageInfo: IssuePageInfo;
totalCount: number;
};
type PullRequestQueryResponse = {
data?: {
repository?: {
pullRequests?: PullRequestPage | null;
} | null;
};
errors?: Array<{ message?: string }>;
};
type IssueBatch = {
batchIndex: number;
issues: Issue[];
totalCount: number;
fetchedCount: number;
};
type PullRequestBatch = {
batchIndex: number;
pullRequests: PullRequest[];
totalCount: number;
fetchedCount: number;
};
type ScriptState = {
version: number;
issues: number[];
pullRequests: number[];
};
type LoadedState = {
state: ScriptState;
issueSet: Set<number>;
pullRequestSet: Set<number>;
};
type LabelTarget = "issue" | "pr";
function parseArgs(argv: string[]): ScriptOptions {
let limit = Number.POSITIVE_INFINITY;
let dryRun = false;
let model = DEFAULT_MODEL;
for (let index = 0; index < argv.length; index++) {
const arg = argv[index];
if (arg === "--dry-run") {
dryRun = true;
continue;
}
if (arg === "--limit") {
const next = argv[index + 1];
if (!next || Number.isNaN(Number(next))) {
throw new Error("Missing/invalid --limit value");
}
const parsed = Number(next);
if (parsed <= 0) {
throw new Error("--limit must be greater than 0");
}
limit = parsed;
index++;
continue;
}
if (arg === "--model") {
const next = argv[index + 1];
if (!next) {
throw new Error("Missing --model value");
}
model = next;
index++;
continue;
}
}
return { limit, dryRun, model };
}
function logHeader(title: string) {
// eslint-disable-next-line no-console
console.log(`\n${title}`);
// eslint-disable-next-line no-console
console.log("=".repeat(title.length));
}
function logStep(message: string) {
// eslint-disable-next-line no-console
console.log(`${message}`);
}
function logSuccess(message: string) {
// eslint-disable-next-line no-console
console.log(`${message}`);
}
function logInfo(message: string) {
// eslint-disable-next-line no-console
console.log(` ${message}`);
}
function createEmptyState(): LoadedState {
const state: ScriptState = {
version: STATE_VERSION,
issues: [],
pullRequests: [],
};
return {
state,
issueSet: new Set(),
pullRequestSet: new Set(),
};
}
function loadState(statePath: string): LoadedState {
if (!existsSync(statePath)) {
return createEmptyState();
}
const raw = readFileSync(statePath, "utf8");
const parsed = JSON.parse(raw) as Partial<ScriptState>;
const issues = Array.isArray(parsed.issues)
? parsed.issues.filter(
(value): value is number => typeof value === "number" && Number.isFinite(value),
)
: [];
const pullRequests = Array.isArray(parsed.pullRequests)
? parsed.pullRequests.filter(
(value): value is number => typeof value === "number" && Number.isFinite(value),
)
: [];
const state: ScriptState = {
version: STATE_VERSION,
issues,
pullRequests,
};
return {
state,
issueSet: new Set(issues),
pullRequestSet: new Set(pullRequests),
};
}
function saveState(statePath: string, state: ScriptState): void {
mkdirSync(dirname(statePath), { recursive: true });
writeFileSync(statePath, `${JSON.stringify(state, null, 2)}\n`);
}
function buildStateSnapshot(issueSet: Set<number>, pullRequestSet: Set<number>): ScriptState {
return {
version: STATE_VERSION,
issues: Array.from(issueSet).toSorted((a, b) => a - b),
pullRequests: Array.from(pullRequestSet).toSorted((a, b) => a - b),
};
}
function runGh(args: string[]): string {
return execFileSync("gh", args, {
encoding: "utf8",
maxBuffer: GH_MAX_BUFFER,
});
}
function resolveRepo(): RepoInfo {
const remote = execFileSync("git", ["config", "--get", "remote.origin.url"], {
encoding: "utf8",
}).trim();
if (!remote) {
throw new Error("Unable to determine repository from git remote.");
}
const normalized = remote.replace(/\.git$/, "");
if (normalized.startsWith("git@github.com:")) {
const slug = normalized.replace("git@github.com:", "");
const [owner, name] = slug.split("/");
if (owner && name) {
return { owner, name };
}
}
if (normalized.startsWith("https://github.com/")) {
const slug = normalized.replace("https://github.com/", "");
const [owner, name] = slug.split("/");
if (owner && name) {
return { owner, name };
}
}
throw new Error(`Unsupported GitHub remote: ${remote}`);
}
function fetchIssuePage(repo: RepoInfo, after: string | null): IssuePage {
const args = [
"api",
"graphql",
"-f",
`query=${ISSUE_QUERY}`,
"-f",
`owner=${repo.owner}`,
"-f",
`name=${repo.name}`,
];
if (after) {
args.push("-f", `after=${after}`);
}
args.push("-F", `pageSize=${PAGE_SIZE}`);
const stdout = runGh(args);
const payload = JSON.parse(stdout) as IssueQueryResponse;
if (payload.errors?.length) {
const message = payload.errors.map((error) => error.message ?? "Unknown error").join("; ");
throw new Error(`GitHub API error: ${message}`);
}
const issues = payload.data?.repository?.issues;
if (!issues) {
throw new Error("GitHub API response missing issues data.");
}
return issues;
}
function fetchPullRequestPage(repo: RepoInfo, after: string | null): PullRequestPage {
const args = [
"api",
"graphql",
"-f",
`query=${PULL_REQUEST_QUERY}`,
"-f",
`owner=${repo.owner}`,
"-f",
`name=${repo.name}`,
];
if (after) {
args.push("-f", `after=${after}`);
}
args.push("-F", `pageSize=${PAGE_SIZE}`);
const stdout = runGh(args);
const payload = JSON.parse(stdout) as PullRequestQueryResponse;
if (payload.errors?.length) {
const message = payload.errors.map((error) => error.message ?? "Unknown error").join("; ");
throw new Error(`GitHub API error: ${message}`);
}
const pullRequests = payload.data?.repository?.pullRequests;
if (!pullRequests) {
throw new Error("GitHub API response missing pull request data.");
}
return pullRequests;
}
function* fetchOpenIssueBatches(limit: number): Generator<IssueBatch> {
const repo = resolveRepo();
const results: Issue[] = [];
let page = 1;
let after: string | null = null;
let totalCount = 0;
let fetchedCount = 0;
let batchIndex = 1;
logStep(`Repository: ${repo.owner}/${repo.name}`);
while (fetchedCount < limit) {
const pageData = fetchIssuePage(repo, after);
const nodes = pageData.nodes ?? [];
totalCount = pageData.totalCount ?? totalCount;
if (page === 1) {
logSuccess(`Found ${totalCount} open issues.`);
}
logInfo(`Fetched page ${page} (${nodes.length} issues).`);
for (const node of nodes) {
if (fetchedCount >= limit) {
break;
}
results.push({
number: node.number,
title: node.title,
body: node.body ?? "",
labels: node.labels?.nodes ?? [],
});
fetchedCount += 1;
if (results.length >= WORK_BATCH_SIZE) {
yield {
batchIndex,
issues: results.splice(0, results.length),
totalCount,
fetchedCount,
};
batchIndex += 1;
}
}
if (!pageData.pageInfo.hasNextPage) {
break;
}
after = pageData.pageInfo.endCursor ?? null;
page += 1;
}
if (results.length) {
yield {
batchIndex,
issues: results,
totalCount,
fetchedCount,
};
}
}
function* fetchOpenPullRequestBatches(limit: number): Generator<PullRequestBatch> {
const repo = resolveRepo();
const results: PullRequest[] = [];
let page = 1;
let after: string | null = null;
let totalCount = 0;
let fetchedCount = 0;
let batchIndex = 1;
logStep(`Repository: ${repo.owner}/${repo.name}`);
while (fetchedCount < limit) {
const pageData = fetchPullRequestPage(repo, after);
const nodes = pageData.nodes ?? [];
totalCount = pageData.totalCount ?? totalCount;
if (page === 1) {
logSuccess(`Found ${totalCount} open pull requests.`);
}
logInfo(`Fetched page ${page} (${nodes.length} pull requests).`);
for (const node of nodes) {
if (fetchedCount >= limit) {
break;
}
results.push({
number: node.number,
title: node.title,
body: node.body ?? "",
labels: node.labels?.nodes ?? [],
});
fetchedCount += 1;
if (results.length >= WORK_BATCH_SIZE) {
yield {
batchIndex,
pullRequests: results.splice(0, results.length),
totalCount,
fetchedCount,
};
batchIndex += 1;
}
}
if (!pageData.pageInfo.hasNextPage) {
break;
}
after = pageData.pageInfo.endCursor ?? null;
page += 1;
}
if (results.length) {
yield {
batchIndex,
pullRequests: results,
totalCount,
fetchedCount,
};
}
}
function truncateBody(body: string): string {
if (body.length <= MAX_BODY_CHARS) {
return body;
}
return `${body.slice(0, MAX_BODY_CHARS)}\n\n[truncated]`;
}
function buildItemPrompt(item: LabelItem, kind: "issue" | "pull request"): string {
const body = truncateBody(item.body?.trim() ?? "");
return `Type: ${kind}\nTitle:\n${item.title.trim()}\n\nBody:\n${body}`;
}
function extractResponseText(payload: OpenAIResponse): string {
if (payload.output_text && payload.output_text.trim()) {
return payload.output_text.trim();
}
const chunks: string[] = [];
for (const item of payload.output ?? []) {
if (item.type !== "message") {
continue;
}
for (const content of item.content ?? []) {
if (content.type === "output_text" && typeof content.text === "string") {
chunks.push(content.text);
}
}
}
return chunks.join("\n").trim();
}
function isRecord(value: unknown): value is Record<string, unknown> {
return typeof value === "object" && value !== null;
}
function fallbackCategory(issueText: string): "bug" | "enhancement" {
const lower = issueText.toLowerCase();
const bugSignals = [
"bug",
"error",
"crash",
"broken",
"regression",
"fails",
"failure",
"incorrect",
];
return bugSignals.some((signal) => lower.includes(signal)) ? "bug" : "enhancement";
}
function normalizeClassification(raw: unknown, issueText: string): Classification {
const fallback = fallbackCategory(issueText);
if (!isRecord(raw)) {
return { category: fallback, isSupport: false, isSkillOnly: false };
}
const categoryRaw = raw.category;
const category = categoryRaw === "bug" || categoryRaw === "enhancement" ? categoryRaw : fallback;
const isSupport = raw.isSupport === true;
const isSkillOnly = raw.isSkillOnly === true;
return { category, isSupport, isSkillOnly };
}
async function classifyItem(
item: LabelItem,
kind: "issue" | "pull request",
options: { apiKey: string; model: string },
): Promise<Classification> {
const itemText = buildItemPrompt(item, kind);
const response = await fetch("https://api.openai.com/v1/responses", {
method: "POST",
headers: {
Authorization: `Bearer ${options.apiKey}`,
"Content-Type": "application/json",
},
body: JSON.stringify({
model: options.model,
max_output_tokens: 200,
text: {
format: {
type: "json_schema",
name: "issue_classification",
schema: {
type: "object",
additionalProperties: false,
properties: {
category: { type: "string", enum: ["bug", "enhancement"] },
isSupport: { type: "boolean" },
isSkillOnly: { type: "boolean" },
},
required: ["category", "isSupport", "isSkillOnly"],
},
},
},
input: [
{
role: "system",
content:
"You classify GitHub issues and pull requests for OpenClaw. Respond with JSON only, no extra text.",
},
{
role: "user",
content: [
"Determine classification:\n",
"- category: 'bug' if the item reports incorrect behavior, errors, crashes, or regressions; otherwise 'enhancement'.\n",
"- isSupport: true if the item is primarily a support request or troubleshooting/how-to question, not a change request.\n",
"- isSkillOnly: true if the item solely requests or delivers adding/updating skills (no other feature/bug work).\n\n",
itemText,
"\n\nReturn JSON with keys: category, isSupport, isSkillOnly.",
].join(""),
},
],
}),
});
if (!response.ok) {
const text = await response.text();
throw new Error(`OpenAI request failed (${response.status}): ${text}`);
}
const payload = (await response.json()) as OpenAIResponse;
const rawText = extractResponseText(payload);
let parsed: unknown = undefined;
if (rawText) {
try {
parsed = JSON.parse(rawText);
} catch (error) {
throw new Error(`Failed to parse OpenAI response: ${String(error)} (raw: ${rawText})`, {
cause: error,
});
}
}
return normalizeClassification(parsed, itemText);
}
function applyLabels(
target: LabelTarget,
item: LabelItem,
labelsToAdd: string[],
dryRun: boolean,
): boolean {
if (!labelsToAdd.length) {
return false;
}
if (dryRun) {
logInfo(`Would add labels: ${labelsToAdd.join(", ")}`);
return true;
}
const ghTarget = target === "issue" ? "issue" : "pr";
execFileSync(
"gh",
[ghTarget, "edit", String(item.number), "--add-label", labelsToAdd.join(",")],
{ stdio: "inherit" },
);
return true;
}
async function main() {
// Makes `... | head` safe.
process.stdout.on("error", (error: NodeJS.ErrnoException) => {
if (error.code === "EPIPE") {
process.exit(0);
}
throw error;
});
const { limit, dryRun, model } = parseArgs(process.argv.slice(2));
const apiKey = process.env.OPENAI_API_KEY;
if (!apiKey) {
throw new Error("OPENAI_API_KEY is required to classify issues and pull requests.");
}
logHeader("OpenClaw Issue Label Audit");
logStep(`Mode: ${dryRun ? "dry-run" : "apply labels"}`);
logStep(`Model: ${model}`);
logStep(`Issue limit: ${Number.isFinite(limit) ? limit : "unlimited"}`);
logStep(`PR limit: ${Number.isFinite(limit) ? limit : "unlimited"}`);
logStep(`Batch size: ${WORK_BATCH_SIZE}`);
logStep(`State file: ${STATE_FILE_PATH}`);
if (dryRun) {
logInfo("Dry-run enabled: state file will not be updated.");
}
let loadedState: LoadedState;
try {
loadedState = loadState(STATE_FILE_PATH);
} catch (error) {
logInfo(`State file unreadable (${String(error)}); starting fresh.`);
loadedState = createEmptyState();
}
logInfo(
`State entries: ${loadedState.issueSet.size} issues, ${loadedState.pullRequestSet.size} pull requests.`,
);
const issueState = loadedState.issueSet;
const pullRequestState = loadedState.pullRequestSet;
logHeader("Issues");
let updatedCount = 0;
let supportCount = 0;
let skillCount = 0;
let categoryAddedCount = 0;
let scannedCount = 0;
let processedCount = 0;
let skippedCount = 0;
let totalCount = 0;
let batches = 0;
for (const batch of fetchOpenIssueBatches(limit)) {
batches += 1;
scannedCount += batch.issues.length;
totalCount = batch.totalCount ?? totalCount;
const pendingIssues = batch.issues.filter((issue) => !issueState.has(issue.number));
const skippedInBatch = batch.issues.length - pendingIssues.length;
skippedCount += skippedInBatch;
logHeader(`Issue Batch ${batch.batchIndex}`);
logInfo(`Fetched ${batch.issues.length} issues (${skippedInBatch} already processed).`);
logInfo(`Processing ${pendingIssues.length} issues (scanned so far: ${scannedCount}).`);
for (const issue of pendingIssues) {
// eslint-disable-next-line no-console
console.log(`\n#${issue.number}${issue.title}`);
const labels = new Set(issue.labels.map((label) => label.name));
logInfo(`Existing labels: ${Array.from(labels).toSorted().join(", ") || "none"}`);
const classification = await classifyItem(issue, "issue", { apiKey, model });
logInfo(
`Classification: category=${classification.category}, support=${classification.isSupport ? "yes" : "no"}, skill-only=${classification.isSkillOnly ? "yes" : "no"}.`,
);
const toAdd: string[] = [];
if (!labels.has(BUG_LABEL) && !labels.has(ENHANCEMENT_LABEL)) {
toAdd.push(classification.category);
categoryAddedCount += 1;
}
if (classification.isSupport && !labels.has(SUPPORT_LABEL)) {
toAdd.push(SUPPORT_LABEL);
supportCount += 1;
}
if (classification.isSkillOnly && !labels.has(SKILL_LABEL)) {
toAdd.push(SKILL_LABEL);
skillCount += 1;
}
const changed = applyLabels("issue", issue, toAdd, dryRun);
if (changed) {
updatedCount += 1;
logSuccess(`Labels added: ${toAdd.join(", ")}`);
} else {
logInfo("No label changes needed.");
}
issueState.add(issue.number);
processedCount += 1;
}
if (!dryRun && pendingIssues.length > 0) {
saveState(STATE_FILE_PATH, buildStateSnapshot(issueState, pullRequestState));
logInfo("State checkpoint saved.");
}
}
logHeader("Pull Requests");
let prUpdatedCount = 0;
let prSkillCount = 0;
let prScannedCount = 0;
let prProcessedCount = 0;
let prSkippedCount = 0;
let prTotalCount = 0;
let prBatches = 0;
for (const batch of fetchOpenPullRequestBatches(limit)) {
prBatches += 1;
prScannedCount += batch.pullRequests.length;
prTotalCount = batch.totalCount ?? prTotalCount;
const pendingPullRequests = batch.pullRequests.filter(
(pullRequest) => !pullRequestState.has(pullRequest.number),
);
const skippedInBatch = batch.pullRequests.length - pendingPullRequests.length;
prSkippedCount += skippedInBatch;
logHeader(`PR Batch ${batch.batchIndex}`);
logInfo(
`Fetched ${batch.pullRequests.length} pull requests (${skippedInBatch} already processed).`,
);
logInfo(
`Processing ${pendingPullRequests.length} pull requests (scanned so far: ${prScannedCount}).`,
);
for (const pullRequest of pendingPullRequests) {
// eslint-disable-next-line no-console
console.log(`\n#${pullRequest.number}${pullRequest.title}`);
const labels = new Set(pullRequest.labels.map((label) => label.name));
logInfo(`Existing labels: ${Array.from(labels).toSorted().join(", ") || "none"}`);
if (labels.has(SKILL_LABEL)) {
logInfo("Skill label already present; skipping classification.");
pullRequestState.add(pullRequest.number);
prProcessedCount += 1;
continue;
}
const classification = await classifyItem(pullRequest, "pull request", { apiKey, model });
logInfo(
`Classification: category=${classification.category}, support=${classification.isSupport ? "yes" : "no"}, skill-only=${classification.isSkillOnly ? "yes" : "no"}.`,
);
const toAdd: string[] = [];
if (classification.isSkillOnly && !labels.has(SKILL_LABEL)) {
toAdd.push(SKILL_LABEL);
prSkillCount += 1;
}
const changed = applyLabels("pr", pullRequest, toAdd, dryRun);
if (changed) {
prUpdatedCount += 1;
logSuccess(`Labels added: ${toAdd.join(", ")}`);
} else {
logInfo("No label changes needed.");
}
pullRequestState.add(pullRequest.number);
prProcessedCount += 1;
}
if (!dryRun && pendingPullRequests.length > 0) {
saveState(STATE_FILE_PATH, buildStateSnapshot(issueState, pullRequestState));
logInfo("State checkpoint saved.");
}
}
logHeader("Summary");
logInfo(`Issues scanned: ${scannedCount}`);
if (totalCount) {
logInfo(`Total open issues: ${totalCount}`);
}
logInfo(`Issue batches processed: ${batches}`);
logInfo(`Issues processed: ${processedCount}`);
logInfo(`Issues skipped (state): ${skippedCount}`);
logInfo(`Issues updated: ${updatedCount}`);
logInfo(`Added bug/enhancement labels: ${categoryAddedCount}`);
logInfo(`Added r: support labels: ${supportCount}`);
logInfo(`Added r: skill labels (issues): ${skillCount}`);
logInfo(`Pull requests scanned: ${prScannedCount}`);
if (prTotalCount) {
logInfo(`Total open pull requests: ${prTotalCount}`);
}
logInfo(`PR batches processed: ${prBatches}`);
logInfo(`Pull requests processed: ${prProcessedCount}`);
logInfo(`Pull requests skipped (state): ${prSkippedCount}`);
logInfo(`Pull requests updated: ${prUpdatedCount}`);
logInfo(`Added r: skill labels (PRs): ${prSkillCount}`);
}
await main();

View File

@@ -0,0 +1,70 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT=$(cd "$(dirname "$0")/.." && pwd)
ZIP=${1:?"Usage: $0 OpenClaw-<ver>.zip"}
FEED_URL=${2:-"https://raw.githubusercontent.com/openclaw/openclaw/main/appcast.xml"}
PRIVATE_KEY_FILE=${SPARKLE_PRIVATE_KEY_FILE:-}
if [[ -z "$PRIVATE_KEY_FILE" ]]; then
echo "Set SPARKLE_PRIVATE_KEY_FILE to your ed25519 private key (Sparkle)." >&2
exit 1
fi
if [[ ! -f "$ZIP" ]]; then
echo "Zip not found: $ZIP" >&2
exit 1
fi
ZIP_DIR=$(cd "$(dirname "$ZIP")" && pwd)
ZIP_NAME=$(basename "$ZIP")
ZIP_BASE="${ZIP_NAME%.zip}"
VERSION=${SPARKLE_RELEASE_VERSION:-}
if [[ -z "$VERSION" ]]; then
# Accept legacy calver suffixes like -1 and prerelease forms like -beta.1 / .beta.1.
if [[ "$ZIP_NAME" =~ ^OpenClaw-([0-9]+(\.[0-9]+){1,2}([-.][0-9A-Za-z]+([.-][0-9A-Za-z]+)*)?)\.zip$ ]]; then
VERSION="${BASH_REMATCH[1]}"
else
echo "Could not infer version from $ZIP_NAME; set SPARKLE_RELEASE_VERSION." >&2
exit 1
fi
fi
TMP_DIR="$(mktemp -d)"
cleanup() {
rm -rf "$TMP_DIR"
if [[ "${KEEP_SPARKLE_NOTES:-0}" != "1" ]]; then
rm -f "$NOTES_HTML"
fi
}
trap cleanup EXIT
cp -f "$ZIP" "$TMP_DIR/$ZIP_NAME"
if [[ -f "$ROOT/appcast.xml" ]]; then
cp -f "$ROOT/appcast.xml" "$TMP_DIR/appcast.xml"
fi
NOTES_HTML="${ZIP_DIR}/${ZIP_BASE}.html"
if [[ -x "$ROOT/scripts/changelog-to-html.sh" ]]; then
"$ROOT/scripts/changelog-to-html.sh" "$VERSION" >"$NOTES_HTML"
else
echo "Missing scripts/changelog-to-html.sh; cannot generate HTML release notes." >&2
exit 1
fi
cp -f "$NOTES_HTML" "$TMP_DIR/${ZIP_BASE}.html"
DOWNLOAD_URL_PREFIX=${SPARKLE_DOWNLOAD_URL_PREFIX:-"https://github.com/openclaw/openclaw/releases/download/v${VERSION}/"}
export PATH="$ROOT/apps/macos/.build/artifacts/sparkle/Sparkle/bin:$PATH"
if ! command -v generate_appcast >/dev/null; then
echo "generate_appcast not found in PATH. Build Sparkle tools via SwiftPM." >&2
exit 1
fi
generate_appcast \
--ed-key-file "$PRIVATE_KEY_FILE" \
--download-url-prefix "$DOWNLOAD_URL_PREFIX" \
--embed-release-notes \
--link "$FEED_URL" \
"$TMP_DIR"
cp -f "$TMP_DIR/appcast.xml" "$ROOT/appcast.xml"
echo "Appcast generated (appcast.xml). Upload alongside $ZIP at $FEED_URL"

View File

@@ -0,0 +1,84 @@
#!/bin/bash
# Mobile-friendly Claude Code re-authentication
# Designed for use via SSH from Termux
#
# This script handles the authentication flow in a way that works
# from a mobile device by:
# 1. Checking if auth is needed
# 2. Running claude setup-token for long-lived auth
# 3. Outputting URLs that can be easily opened on phone
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
# Colors
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
CYAN='\033[0;36m'
NC='\033[0m'
echo "=== Claude Code Mobile Re-Auth ==="
echo ""
# Check current auth status
echo "Checking auth status..."
AUTH_STATUS=$("$SCRIPT_DIR/claude-auth-status.sh" simple 2>/dev/null || echo "ERROR")
case "$AUTH_STATUS" in
OK)
echo -e "${GREEN}Auth is valid!${NC}"
"$SCRIPT_DIR/claude-auth-status.sh" full
exit 0
;;
CLAUDE_EXPIRING|OPENCLAW_EXPIRING|CLAWDBOT_EXPIRING)
echo -e "${YELLOW}Auth is expiring soon.${NC}"
echo ""
;;
*)
echo -e "${RED}Auth needs refresh.${NC}"
echo ""
;;
esac
echo "Starting long-lived token setup..."
echo ""
echo -e "${CYAN}Instructions:${NC}"
echo "1. Open this URL on your phone:"
echo ""
echo -e " ${CYAN}https://console.anthropic.com/settings/api-keys${NC}"
echo ""
echo "2. Sign in if needed"
echo "3. Create a new API key or use existing 'Claude Code' key"
echo "4. Copy the key (starts with sk-ant-...)"
echo "5. When prompted below, paste the key"
echo ""
echo "Press Enter when ready to continue..."
read -r
# Run setup-token interactively
echo ""
echo "Running 'claude setup-token'..."
echo "(Follow the prompts and paste your API key when asked)"
echo ""
if claude setup-token; then
echo ""
echo -e "${GREEN}Authentication successful!${NC}"
echo ""
"$SCRIPT_DIR/claude-auth-status.sh" full
# Restart openclaw service if running
if systemctl --user is-active openclaw >/dev/null 2>&1; then
echo ""
echo "Restarting openclaw service..."
systemctl --user restart openclaw
echo -e "${GREEN}Service restarted.${NC}"
fi
else
echo ""
echo -e "${RED}Authentication failed.${NC}"
echo "Please try again or check the Claude Code documentation."
exit 1
fi

View File

@@ -0,0 +1,65 @@
#!/usr/bin/env bash
set -euo pipefail
# Notarize a macOS artifact (zip/dmg/pkg) and optionally staple the app bundle.
#
# Usage:
# STAPLE_APP_PATH=dist/OpenClaw.app scripts/notarize-mac-artifact.sh <artifact>
#
# Auth (pick one):
# NOTARYTOOL_PROFILE keychain profile created via `xcrun notarytool store-credentials`
# NOTARYTOOL_KEY path to App Store Connect API key (.p8)
# NOTARYTOOL_KEY_ID API key ID
# NOTARYTOOL_ISSUER API issuer ID
ARTIFACT="${1:-}"
STAPLE_APP_PATH="${STAPLE_APP_PATH:-}"
if [[ -z "$ARTIFACT" ]]; then
echo "Usage: $0 <artifact>" >&2
exit 1
fi
if [[ ! -e "$ARTIFACT" ]]; then
echo "Error: artifact not found: $ARTIFACT" >&2
exit 1
fi
if ! command -v xcrun >/dev/null 2>&1; then
echo "Error: xcrun not found; install Xcode command line tools." >&2
exit 1
fi
auth_args=()
if [[ -n "${NOTARYTOOL_PROFILE:-}" ]]; then
auth_args+=(--keychain-profile "$NOTARYTOOL_PROFILE")
elif [[ -n "${NOTARYTOOL_KEY:-}" && -n "${NOTARYTOOL_KEY_ID:-}" && -n "${NOTARYTOOL_ISSUER:-}" ]]; then
auth_args+=(--key "$NOTARYTOOL_KEY" --key-id "$NOTARYTOOL_KEY_ID" --issuer "$NOTARYTOOL_ISSUER")
else
echo "Error: Notary auth missing. Set NOTARYTOOL_PROFILE or NOTARYTOOL_KEY/NOTARYTOOL_KEY_ID/NOTARYTOOL_ISSUER." >&2
exit 1
fi
echo "🧾 Notarizing: $ARTIFACT"
xcrun notarytool submit "$ARTIFACT" "${auth_args[@]}" --wait
case "$ARTIFACT" in
*.dmg|*.pkg)
echo "📌 Stapling artifact: $ARTIFACT"
xcrun stapler staple "$ARTIFACT"
xcrun stapler validate "$ARTIFACT"
;;
*)
;;
esac
if [[ -n "$STAPLE_APP_PATH" ]]; then
if [[ -d "$STAPLE_APP_PATH" ]]; then
echo "📌 Stapling app: $STAPLE_APP_PATH"
xcrun stapler staple "$STAPLE_APP_PATH"
xcrun stapler validate "$STAPLE_APP_PATH"
else
echo "Warn: STAPLE_APP_PATH not found: $STAPLE_APP_PATH" >&2
fi
fi
echo "✅ Notarization complete"

View File

@@ -0,0 +1,261 @@
#!/usr/bin/env bash
set -euo pipefail
# Build and bundle OpenClaw into a minimal .app we can open.
# Outputs to dist/OpenClaw.app
ROOT_DIR="$(cd "$(dirname "$0")/.." && pwd)"
APP_ROOT="$ROOT_DIR/dist/OpenClaw.app"
BUILD_ROOT="$ROOT_DIR/apps/macos/.build"
PRODUCT="OpenClaw"
BUNDLE_ID="${BUNDLE_ID:-ai.openclaw.mac.debug}"
PKG_VERSION="$(cd "$ROOT_DIR" && node -p "require('./package.json').version" 2>/dev/null || echo "0.0.0")"
BUILD_TS=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
GIT_COMMIT=$(cd "$ROOT_DIR" && git rev-parse --short HEAD 2>/dev/null || echo "unknown")
GIT_BUILD_NUMBER=$(cd "$ROOT_DIR" && git rev-list --count HEAD 2>/dev/null || echo "0")
APP_VERSION="${APP_VERSION:-$PKG_VERSION}"
APP_BUILD="${APP_BUILD:-$GIT_BUILD_NUMBER}"
BUILD_CONFIG="${BUILD_CONFIG:-debug}"
BUILD_ARCHS_VALUE="${BUILD_ARCHS:-$(uname -m)}"
if [[ "${BUILD_ARCHS_VALUE}" == "all" ]]; then
BUILD_ARCHS_VALUE="arm64 x86_64"
fi
IFS=' ' read -r -a BUILD_ARCHS <<< "$BUILD_ARCHS_VALUE"
PRIMARY_ARCH="${BUILD_ARCHS[0]}"
SPARKLE_PUBLIC_ED_KEY="${SPARKLE_PUBLIC_ED_KEY:-AGCY8w5vHirVfGGDGc8Szc5iuOqupZSh9pMj/Qs67XI=}"
SPARKLE_FEED_URL="${SPARKLE_FEED_URL:-https://raw.githubusercontent.com/openclaw/openclaw/main/appcast.xml}"
AUTO_CHECKS=true
if [[ "$BUNDLE_ID" == *.debug ]]; then
SPARKLE_FEED_URL=""
AUTO_CHECKS=false
fi
if [[ "$AUTO_CHECKS" == "true" && ! "$APP_BUILD" =~ ^[0-9]+$ ]]; then
echo "ERROR: APP_BUILD must be numeric for Sparkle compare (CFBundleVersion). Got: $APP_BUILD" >&2
exit 1
fi
build_path_for_arch() {
echo "$BUILD_ROOT/$1"
}
bin_for_arch() {
echo "$(build_path_for_arch "$1")/$BUILD_CONFIG/$PRODUCT"
}
sparkle_framework_for_arch() {
echo "$(build_path_for_arch "$1")/$BUILD_CONFIG/Sparkle.framework"
}
merge_framework_machos() {
local primary="$1"
local dest="$2"
shift 2
local others=("$@")
archs_for() {
/usr/bin/lipo -info "$1" | /usr/bin/sed -E 's/.*are: //; s/.*architecture: //'
}
arch_in_list() {
local needle="$1"
shift
for item in "$@"; do
if [[ "$item" == "$needle" ]]; then
return 0
fi
done
return 1
}
while IFS= read -r -d '' file; do
if /usr/bin/file "$file" | /usr/bin/grep -q "Mach-O"; then
local rel="${file#$primary/}"
local primary_archs
primary_archs=$(archs_for "$file")
IFS=' ' read -r -a primary_arch_array <<< "$primary_archs"
local missing_files=()
local tmp_dir
tmp_dir=$(mktemp -d)
for fw in "${others[@]}"; do
local other_file="$fw/$rel"
if [[ ! -f "$other_file" ]]; then
echo "ERROR: Missing $rel in $fw" >&2
rm -rf "$tmp_dir"
exit 1
fi
if /usr/bin/file "$other_file" | /usr/bin/grep -q "Mach-O"; then
local other_archs
other_archs=$(archs_for "$other_file")
IFS=' ' read -r -a other_arch_array <<< "$other_archs"
for arch in "${other_arch_array[@]}"; do
if ! arch_in_list "$arch" "${primary_arch_array[@]}"; then
local thin_file="$tmp_dir/$(echo "$rel" | tr '/' '_')-$arch"
/usr/bin/lipo -thin "$arch" "$other_file" -output "$thin_file"
missing_files+=("$thin_file")
primary_arch_array+=("$arch")
fi
done
fi
done
if [[ "${#missing_files[@]}" -gt 0 ]]; then
/usr/bin/lipo -create "$file" "${missing_files[@]}" -output "$dest/$rel"
fi
rm -rf "$tmp_dir"
fi
done < <(find "$primary" -type f -print0)
}
echo "📦 Ensuring deps (pnpm install)"
(cd "$ROOT_DIR" && pnpm install --no-frozen-lockfile --config.node-linker=hoisted)
if [[ "${SKIP_TSC:-0}" != "1" ]]; then
echo "📦 Building JS (pnpm build)"
(cd "$ROOT_DIR" && pnpm build)
else
echo "📦 Skipping JS build (SKIP_TSC=1)"
fi
if [[ "${SKIP_UI_BUILD:-0}" != "1" ]]; then
echo "🖥 Building Control UI (ui:build)"
(cd "$ROOT_DIR" && node scripts/ui.js build)
else
echo "🖥 Skipping Control UI build (SKIP_UI_BUILD=1)"
fi
cd "$ROOT_DIR/apps/macos"
echo "🔨 Building $PRODUCT ($BUILD_CONFIG) [${BUILD_ARCHS[*]}]"
for arch in "${BUILD_ARCHS[@]}"; do
BUILD_PATH="$(build_path_for_arch "$arch")"
swift build -c "$BUILD_CONFIG" --product "$PRODUCT" --build-path "$BUILD_PATH" --arch "$arch" -Xlinker -rpath -Xlinker @executable_path/../Frameworks
done
BIN_PRIMARY="$(bin_for_arch "$PRIMARY_ARCH")"
echo "pkg: binary $BIN_PRIMARY" >&2
echo "🧹 Cleaning old app bundle"
rm -rf "$APP_ROOT"
mkdir -p "$APP_ROOT/Contents/MacOS"
mkdir -p "$APP_ROOT/Contents/Resources"
mkdir -p "$APP_ROOT/Contents/Frameworks"
echo "📄 Copying Info.plist template"
INFO_PLIST_SRC="$ROOT_DIR/apps/macos/Sources/OpenClaw/Resources/Info.plist"
if [ ! -f "$INFO_PLIST_SRC" ]; then
echo "ERROR: Info.plist template missing at $INFO_PLIST_SRC" >&2
exit 1
fi
cp "$INFO_PLIST_SRC" "$APP_ROOT/Contents/Info.plist"
/usr/libexec/PlistBuddy -c "Set :CFBundleIdentifier ${BUNDLE_ID}" "$APP_ROOT/Contents/Info.plist" || true
/usr/libexec/PlistBuddy -c "Set :CFBundleShortVersionString ${APP_VERSION}" "$APP_ROOT/Contents/Info.plist" || true
/usr/libexec/PlistBuddy -c "Set :CFBundleVersion ${APP_BUILD}" "$APP_ROOT/Contents/Info.plist" || true
/usr/libexec/PlistBuddy -c "Set :OpenClawBuildTimestamp ${BUILD_TS}" "$APP_ROOT/Contents/Info.plist" || true
/usr/libexec/PlistBuddy -c "Set :OpenClawGitCommit ${GIT_COMMIT}" "$APP_ROOT/Contents/Info.plist" || true
/usr/libexec/PlistBuddy -c "Set :SUFeedURL ${SPARKLE_FEED_URL}" "$APP_ROOT/Contents/Info.plist" \
|| /usr/libexec/PlistBuddy -c "Add :SUFeedURL string ${SPARKLE_FEED_URL}" "$APP_ROOT/Contents/Info.plist" || true
/usr/libexec/PlistBuddy -c "Set :SUPublicEDKey ${SPARKLE_PUBLIC_ED_KEY}" "$APP_ROOT/Contents/Info.plist" \
|| /usr/libexec/PlistBuddy -c "Add :SUPublicEDKey string ${SPARKLE_PUBLIC_ED_KEY}" "$APP_ROOT/Contents/Info.plist" || true
if /usr/libexec/PlistBuddy -c "Set :SUEnableAutomaticChecks ${AUTO_CHECKS}" "$APP_ROOT/Contents/Info.plist"; then
true
else
/usr/libexec/PlistBuddy -c "Add :SUEnableAutomaticChecks bool ${AUTO_CHECKS}" "$APP_ROOT/Contents/Info.plist" || true
fi
echo "🚚 Copying binary"
cp "$BIN_PRIMARY" "$APP_ROOT/Contents/MacOS/OpenClaw"
if [[ "${#BUILD_ARCHS[@]}" -gt 1 ]]; then
BIN_INPUTS=()
for arch in "${BUILD_ARCHS[@]}"; do
BIN_INPUTS+=("$(bin_for_arch "$arch")")
done
/usr/bin/lipo -create "${BIN_INPUTS[@]}" -output "$APP_ROOT/Contents/MacOS/OpenClaw"
fi
chmod +x "$APP_ROOT/Contents/MacOS/OpenClaw"
# SwiftPM outputs ad-hoc signed binaries; strip the signature before install_name_tool to avoid warnings.
/usr/bin/codesign --remove-signature "$APP_ROOT/Contents/MacOS/OpenClaw" 2>/dev/null || true
SPARKLE_FRAMEWORK_PRIMARY="$(sparkle_framework_for_arch "$PRIMARY_ARCH")"
if [ -d "$SPARKLE_FRAMEWORK_PRIMARY" ]; then
echo "✨ Embedding Sparkle.framework"
cp -R "$SPARKLE_FRAMEWORK_PRIMARY" "$APP_ROOT/Contents/Frameworks/"
if [[ "${#BUILD_ARCHS[@]}" -gt 1 ]]; then
OTHER_FRAMEWORKS=()
for arch in "${BUILD_ARCHS[@]}"; do
if [[ "$arch" == "$PRIMARY_ARCH" ]]; then
continue
fi
OTHER_FRAMEWORKS+=("$(sparkle_framework_for_arch "$arch")")
done
merge_framework_machos "$SPARKLE_FRAMEWORK_PRIMARY" "$APP_ROOT/Contents/Frameworks/Sparkle.framework" "${OTHER_FRAMEWORKS[@]}"
fi
chmod -R a+rX "$APP_ROOT/Contents/Frameworks/Sparkle.framework"
fi
echo "📦 Copying Swift 6.2 compatibility libraries"
SWIFT_COMPAT_LIB="$(xcode-select -p)/Toolchains/XcodeDefault.xctoolchain/usr/lib/swift-6.2/macosx/libswiftCompatibilitySpan.dylib"
if [ -f "$SWIFT_COMPAT_LIB" ]; then
cp "$SWIFT_COMPAT_LIB" "$APP_ROOT/Contents/Frameworks/"
chmod +x "$APP_ROOT/Contents/Frameworks/libswiftCompatibilitySpan.dylib"
else
echo "WARN: Swift compatibility library not found at $SWIFT_COMPAT_LIB (continuing)" >&2
fi
echo "🖼 Copying app icon"
cp "$ROOT_DIR/apps/macos/Sources/OpenClaw/Resources/OpenClaw.icns" "$APP_ROOT/Contents/Resources/OpenClaw.icns"
echo "📦 Copying device model resources"
rm -rf "$APP_ROOT/Contents/Resources/DeviceModels"
cp -R "$ROOT_DIR/apps/macos/Sources/OpenClaw/Resources/DeviceModels" "$APP_ROOT/Contents/Resources/DeviceModels"
echo "📦 Copying model catalog"
MODEL_CATALOG_SRC="$ROOT_DIR/node_modules/@mariozechner/pi-ai/dist/models.generated.js"
MODEL_CATALOG_DEST="$APP_ROOT/Contents/Resources/models.generated.js"
if [ -f "$MODEL_CATALOG_SRC" ]; then
cp "$MODEL_CATALOG_SRC" "$MODEL_CATALOG_DEST"
else
echo "WARN: model catalog missing at $MODEL_CATALOG_SRC (continuing)" >&2
fi
echo "📦 Copying OpenClawKit resources"
OPENCLAWKIT_BUNDLE="$(build_path_for_arch "$PRIMARY_ARCH")/$BUILD_CONFIG/OpenClawKit_OpenClawKit.bundle"
if [ -d "$OPENCLAWKIT_BUNDLE" ]; then
rm -rf "$APP_ROOT/Contents/Resources/OpenClawKit_OpenClawKit.bundle"
cp -R "$OPENCLAWKIT_BUNDLE" "$APP_ROOT/Contents/Resources/OpenClawKit_OpenClawKit.bundle"
else
echo "WARN: OpenClawKit resource bundle not found at $OPENCLAWKIT_BUNDLE (continuing)" >&2
fi
echo "📦 Copying Textual resources"
TEXTUAL_BUNDLE_DIR="$(build_path_for_arch "$PRIMARY_ARCH")/$BUILD_CONFIG"
TEXTUAL_BUNDLE=""
for candidate in \
"$TEXTUAL_BUNDLE_DIR/textual_Textual.bundle" \
"$TEXTUAL_BUNDLE_DIR/Textual_Textual.bundle"
do
if [ -d "$candidate" ]; then
TEXTUAL_BUNDLE="$candidate"
break
fi
done
if [ -z "$TEXTUAL_BUNDLE" ]; then
TEXTUAL_BUNDLE="$(find "$BUILD_ROOT" -type d \( -name "textual_Textual.bundle" -o -name "Textual_Textual.bundle" \) -print -quit)"
fi
if [ -n "$TEXTUAL_BUNDLE" ] && [ -d "$TEXTUAL_BUNDLE" ]; then
rm -rf "$APP_ROOT/Contents/Resources/$(basename "$TEXTUAL_BUNDLE")"
cp -R "$TEXTUAL_BUNDLE" "$APP_ROOT/Contents/Resources/"
else
if [[ "${ALLOW_MISSING_TEXTUAL_BUNDLE:-0}" == "1" ]]; then
echo "WARN: Textual resource bundle not found (continuing due to ALLOW_MISSING_TEXTUAL_BUNDLE=1)" >&2
else
echo "ERROR: Textual resource bundle not found. Set ALLOW_MISSING_TEXTUAL_BUNDLE=1 to bypass." >&2
exit 1
fi
fi
echo "⏹ Stopping any running OpenClaw"
killall -q OpenClaw 2>/dev/null || true
echo "🔏 Signing bundle (auto-selects signing identity if SIGN_IDENTITY is unset)"
"$ROOT_DIR/scripts/codesign-mac-app.sh" "$APP_ROOT"
echo "✅ Bundle ready at $APP_ROOT"

View File

@@ -0,0 +1,93 @@
#!/usr/bin/env bash
set -euo pipefail
# Build the mac app bundle, then create a zip (Sparkle) + styled DMG (humans).
#
# Output:
# - dist/OpenClaw.app
# - dist/OpenClaw-<version>.zip
# - dist/OpenClaw-<version>.dmg
ROOT_DIR="$(cd "$(dirname "$0")/.." && pwd)"
BUILD_ROOT="$ROOT_DIR/apps/macos/.build"
PRODUCT="OpenClaw"
BUILD_CONFIG="${BUILD_CONFIG:-release}"
# Default to universal binary for distribution builds (supports both Apple Silicon and Intel Macs)
export BUILD_ARCHS="${BUILD_ARCHS:-all}"
# Use release bundle ID (not .debug) so Sparkle auto-update works.
# The .debug suffix in package-mac-app.sh blanks SUFeedURL intentionally for dev builds.
export BUNDLE_ID="${BUNDLE_ID:-ai.openclaw.mac}"
"$ROOT_DIR/scripts/package-mac-app.sh"
APP="$ROOT_DIR/dist/OpenClaw.app"
if [[ ! -d "$APP" ]]; then
echo "Error: missing app bundle at $APP" >&2
exit 1
fi
VERSION=$(/usr/libexec/PlistBuddy -c "Print CFBundleShortVersionString" "$APP/Contents/Info.plist" 2>/dev/null || echo "0.0.0")
ZIP="$ROOT_DIR/dist/OpenClaw-$VERSION.zip"
DMG="$ROOT_DIR/dist/OpenClaw-$VERSION.dmg"
NOTARY_ZIP="$ROOT_DIR/dist/OpenClaw-$VERSION.notary.zip"
DSYM_ZIP="$ROOT_DIR/dist/OpenClaw-$VERSION.dSYM.zip"
SKIP_NOTARIZE="${SKIP_NOTARIZE:-0}"
NOTARIZE=1
SKIP_DSYM="${SKIP_DSYM:-0}"
if [[ "$SKIP_NOTARIZE" == "1" ]]; then
NOTARIZE=0
fi
if [[ "$NOTARIZE" == "1" ]]; then
echo "📦 Notary zip: $NOTARY_ZIP"
rm -f "$NOTARY_ZIP"
ditto -c -k --sequesterRsrc --keepParent "$APP" "$NOTARY_ZIP"
STAPLE_APP_PATH="$APP" "$ROOT_DIR/scripts/notarize-mac-artifact.sh" "$NOTARY_ZIP"
rm -f "$NOTARY_ZIP"
fi
echo "📦 Zip: $ZIP"
rm -f "$ZIP"
ditto -c -k --sequesterRsrc --keepParent "$APP" "$ZIP"
echo "💿 DMG: $DMG"
"$ROOT_DIR/scripts/create-dmg.sh" "$APP" "$DMG"
if [[ "$NOTARIZE" == "1" ]]; then
if [[ -n "${SIGN_IDENTITY:-}" ]]; then
echo "🔏 Signing DMG: $DMG"
/usr/bin/codesign --force --sign "$SIGN_IDENTITY" --timestamp "$DMG"
fi
"$ROOT_DIR/scripts/notarize-mac-artifact.sh" "$DMG"
fi
if [[ "$SKIP_DSYM" != "1" ]]; then
DSYM_ARM64="$(find "$BUILD_ROOT/arm64" -type d -path "*/$BUILD_CONFIG/$PRODUCT.dSYM" -print -quit)"
DSYM_X86="$(find "$BUILD_ROOT/x86_64" -type d -path "*/$BUILD_CONFIG/$PRODUCT.dSYM" -print -quit)"
if [[ -n "$DSYM_ARM64" || -n "$DSYM_X86" ]]; then
TMP_DSYM="$ROOT_DIR/dist/$PRODUCT.dSYM"
rm -rf "$TMP_DSYM"
if [[ -n "$DSYM_ARM64" && -n "$DSYM_X86" ]]; then
cp -R "$DSYM_ARM64" "$TMP_DSYM"
DWARF_OUT="$TMP_DSYM/Contents/Resources/DWARF/$PRODUCT"
DWARF_ARM="$DSYM_ARM64/Contents/Resources/DWARF/$PRODUCT"
DWARF_X86="$DSYM_X86/Contents/Resources/DWARF/$PRODUCT"
if [[ -f "$DWARF_ARM" && -f "$DWARF_X86" ]]; then
/usr/bin/lipo -create "$DWARF_ARM" "$DWARF_X86" -output "$DWARF_OUT"
else
echo "WARN: Missing DWARF binaries for dSYM merge (continuing)" >&2
fi
else
cp -R "${DSYM_ARM64:-$DSYM_X86}" "$TMP_DSYM"
fi
echo "🧩 dSYM: $DSYM_ZIP"
rm -f "$DSYM_ZIP"
ditto -c -k --keepParent "$TMP_DSYM" "$DSYM_ZIP"
rm -rf "$TMP_DSYM"
else
echo "WARN: dSYM not found; skipping zip (set SKIP_DSYM=1 to silence)" >&2
fi
fi

View File

@@ -0,0 +1,26 @@
# OpenClaw gateway — Podman Quadlet (rootless)
# Installed by setup-podman.sh into openclaw's ~/.config/containers/systemd/
# {{OPENCLAW_HOME}} is replaced at install time.
[Unit]
Description=OpenClaw gateway (rootless Podman)
[Container]
Image=openclaw:local
ContainerName=openclaw
UserNS=keep-id
Volume={{OPENCLAW_HOME}}/.openclaw:/home/node/.openclaw
EnvironmentFile={{OPENCLAW_HOME}}/.openclaw/.env
Environment=HOME=/home/node
Environment=TERM=xterm-256color
PublishPort=18789:18789
PublishPort=18790:18790
Pull=never
Exec=node dist/index.js gateway --bind lan --port 18789
[Service]
TimeoutStartSec=300
Restart=on-failure
[Install]
WantedBy=default.target

1279
openclaw/scripts/pr Normal file

File diff suppressed because it is too large Load Diff

44
openclaw/scripts/pr-merge Normal file
View File

@@ -0,0 +1,44 @@
#!/usr/bin/env bash
set -euo pipefail
script_dir="$(cd "$(dirname "$0")" && pwd)"
base="$script_dir/pr"
if common_git_dir=$(git -C "$script_dir" rev-parse --path-format=absolute --git-common-dir 2>/dev/null); then
canonical_base="$(dirname "$common_git_dir")/scripts/pr"
if [ -x "$canonical_base" ]; then
base="$canonical_base"
fi
fi
usage() {
cat <<USAGE
Usage:
scripts/pr-merge <PR> # verify only (backward compatible)
scripts/pr-merge verify <PR> # verify only
scripts/pr-merge run <PR> # verify + merge + post-merge checks + cleanup
USAGE
}
if [ "$#" -eq 1 ]; then
exec "$base" merge-verify "$1"
fi
if [ "$#" -eq 2 ]; then
mode="$1"
pr="$2"
case "$mode" in
verify)
exec "$base" merge-verify "$pr"
;;
run)
exec "$base" merge-run "$pr"
;;
*)
usage
exit 2
;;
esac
fi
usage
exit 2

View File

@@ -0,0 +1,40 @@
#!/usr/bin/env bash
set -euo pipefail
if [ "$#" -ne 2 ]; then
echo "Usage: scripts/pr-prepare <init|validate-commit|gates|push|run> <PR>"
exit 2
fi
mode="$1"
pr="$2"
script_dir="$(cd "$(dirname "$0")" && pwd)"
base="$script_dir/pr"
if common_git_dir=$(git -C "$script_dir" rev-parse --path-format=absolute --git-common-dir 2>/dev/null); then
canonical_base="$(dirname "$common_git_dir")/scripts/pr"
if [ -x "$canonical_base" ]; then
base="$canonical_base"
fi
fi
case "$mode" in
init)
exec "$base" prepare-init "$pr"
;;
validate-commit)
exec "$base" prepare-validate-commit "$pr"
;;
gates)
exec "$base" prepare-gates "$pr"
;;
push)
exec "$base" prepare-push "$pr"
;;
run)
exec "$base" prepare-run "$pr"
;;
*)
echo "Usage: scripts/pr-prepare <init|validate-commit|gates|push|run> <PR>"
exit 2
;;
esac

View File

@@ -0,0 +1,13 @@
#!/usr/bin/env bash
set -euo pipefail
script_dir="$(cd "$(dirname "$0")" && pwd)"
base="$script_dir/pr"
if common_git_dir=$(git -C "$script_dir" rev-parse --path-format=absolute --git-common-dir 2>/dev/null); then
canonical_base="$(dirname "$common_git_dir")/scripts/pr"
if [ -x "$canonical_base" ]; then
base="$canonical_base"
fi
fi
exec "$base" review-init "$@"

View File

@@ -0,0 +1,39 @@
#!/usr/bin/env node
import path from "node:path";
/**
* Prints selected files as NUL-delimited tokens to stdout.
*
* Usage:
* node scripts/pre-commit/filter-staged-files.mjs lint -- <files...>
* node scripts/pre-commit/filter-staged-files.mjs format -- <files...>
*
* Keep this dependency-free: the pre-commit hook runs in many environments.
*/
const mode = process.argv[2];
const rawArgs = process.argv.slice(3);
const files = rawArgs[0] === "--" ? rawArgs.slice(1) : rawArgs;
if (mode !== "lint" && mode !== "format") {
process.stderr.write("usage: filter-staged-files.mjs <lint|format> -- <files...>\n");
process.exit(2);
}
const lintExts = new Set([".ts", ".tsx", ".js", ".jsx", ".mjs", ".cjs"]);
const formatExts = new Set([".ts", ".tsx", ".js", ".jsx", ".mjs", ".cjs", ".json", ".md", ".mdx"]);
const shouldSelect = (filePath) => {
const ext = path.extname(filePath).toLowerCase();
if (mode === "lint") {
return lintExts.has(ext);
}
return formatExts.has(ext);
};
for (const file of files) {
if (shouldSelect(file)) {
process.stdout.write(file);
process.stdout.write("\0");
}
}

View File

@@ -0,0 +1,31 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
if [[ $# -lt 1 ]]; then
echo "usage: run-node-tool.sh <tool> [args...]" >&2
exit 2
fi
tool="$1"
shift
if [[ -f "$ROOT_DIR/pnpm-lock.yaml" ]] && command -v pnpm >/dev/null 2>&1; then
exec pnpm exec "$tool" "$@"
fi
if { [[ -f "$ROOT_DIR/bun.lockb" ]] || [[ -f "$ROOT_DIR/bun.lock" ]]; } && command -v bun >/dev/null 2>&1; then
exec bunx --bun "$tool" "$@"
fi
if command -v npm >/dev/null 2>&1; then
exec npm exec -- "$tool" "$@"
fi
if command -v npx >/dev/null 2>&1; then
exec npx "$tool" "$@"
fi
echo "Missing package manager: pnpm, bun, or npm required." >&2
exit 1

View File

@@ -0,0 +1,247 @@
import { promises as fs } from "node:fs";
import path from "node:path";
import { fileURLToPath } from "node:url";
import { ErrorCodes, PROTOCOL_VERSION, ProtocolSchemas } from "../src/gateway/protocol/schema.js";
type JsonSchema = {
type?: string | string[];
properties?: Record<string, JsonSchema>;
required?: string[];
items?: JsonSchema;
enum?: string[];
patternProperties?: Record<string, JsonSchema>;
};
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const repoRoot = path.resolve(__dirname, "..");
const outPaths = [
path.join(repoRoot, "apps", "macos", "Sources", "OpenClawProtocol", "GatewayModels.swift"),
path.join(
repoRoot,
"apps",
"shared",
"OpenClawKit",
"Sources",
"OpenClawProtocol",
"GatewayModels.swift",
),
];
const header = `// Generated by scripts/protocol-gen-swift.ts — do not edit by hand\n// swiftlint:disable file_length\nimport Foundation\n\npublic let GATEWAY_PROTOCOL_VERSION = ${PROTOCOL_VERSION}\n\npublic enum ErrorCode: String, Codable, Sendable {\n${Object.values(
ErrorCodes,
)
.map((c) => ` case ${camelCase(c)} = "${c}"`)
.join("\n")}\n}\n`;
const reserved = new Set([
"associatedtype",
"class",
"deinit",
"enum",
"extension",
"fileprivate",
"func",
"import",
"init",
"inout",
"internal",
"let",
"open",
"operator",
"private",
"precedencegroup",
"protocol",
"public",
"rethrows",
"static",
"struct",
"subscript",
"typealias",
"var",
]);
function camelCase(input: string) {
return input
.replace(/[^a-zA-Z0-9]+/g, " ")
.trim()
.toLowerCase()
.split(/\s+/)
.map((p, i) => (i === 0 ? p : p[0].toUpperCase() + p.slice(1)))
.join("");
}
function safeName(name: string) {
const cc = camelCase(name.replace(/-/g, "_"));
if (reserved.has(cc)) {
return `_${cc}`;
}
return cc;
}
// filled later once schemas are loaded
const schemaNameByObject = new Map<object, string>();
function swiftType(schema: JsonSchema, required: boolean): string {
const t = schema.type;
const isOptional = !required;
let base: string;
const named = schemaNameByObject.get(schema as object);
if (named) {
base = named;
} else if (t === "string") {
base = "String";
} else if (t === "integer") {
base = "Int";
} else if (t === "number") {
base = "Double";
} else if (t === "boolean") {
base = "Bool";
} else if (t === "array") {
base = `[${swiftType(schema.items ?? { type: "Any" }, true)}]`;
} else if (schema.enum) {
base = "String";
} else if (schema.patternProperties) {
base = "[String: AnyCodable]";
} else if (t === "object") {
base = "[String: AnyCodable]";
} else {
base = "AnyCodable";
}
return isOptional ? `${base}?` : base;
}
function emitStruct(name: string, schema: JsonSchema): string {
const props = schema.properties ?? {};
const required = new Set(schema.required ?? []);
const lines: string[] = [];
if (Object.keys(props).length === 0) {
return `public struct ${name}: Codable, Sendable {}\n`;
}
lines.push(`public struct ${name}: Codable, Sendable {`);
const codingKeys: string[] = [];
for (const [key, propSchema] of Object.entries(props)) {
const propName = safeName(key);
const propType = swiftType(propSchema, required.has(key));
lines.push(` public let ${propName}: ${propType}`);
if (propName !== key) {
codingKeys.push(` case ${propName} = "${key}"`);
} else {
codingKeys.push(` case ${propName}`);
}
}
lines.push(
"\n public init(\n" +
Object.entries(props)
.map(([key, prop]) => {
const propName = safeName(key);
const req = required.has(key);
return ` ${propName}: ${swiftType(prop, true)}${req ? "" : "?"}`;
})
.join(",\n") +
")\n" +
" {\n" +
Object.entries(props)
.map(([key]) => {
const propName = safeName(key);
return ` self.${propName} = ${propName}`;
})
.join("\n") +
"\n }\n\n" +
" private enum CodingKeys: String, CodingKey {\n" +
codingKeys.join("\n") +
"\n }\n}",
);
lines.push("");
return lines.join("\n");
}
function emitGatewayFrame(): string {
const cases = ["req", "res", "event"];
const associated: Record<string, string> = {
req: "RequestFrame",
res: "ResponseFrame",
event: "EventFrame",
};
const caseLines = cases.map((c) => ` case ${safeName(c)}(${associated[c]})`);
const initLines = `
private enum CodingKeys: String, CodingKey {
case type
}
public init(from decoder: Decoder) throws {
let typeContainer = try decoder.container(keyedBy: CodingKeys.self)
let type = try typeContainer.decode(String.self, forKey: .type)
switch type {
case "req":
self = try .req(RequestFrame(from: decoder))
case "res":
self = try .res(ResponseFrame(from: decoder))
case "event":
self = try .event(EventFrame(from: decoder))
default:
let container = try decoder.singleValueContainer()
let raw = try container.decode([String: AnyCodable].self)
self = .unknown(type: type, raw: raw)
}
}
public func encode(to encoder: Encoder) throws {
switch self {
case let .req(v):
try v.encode(to: encoder)
case let .res(v):
try v.encode(to: encoder)
case let .event(v):
try v.encode(to: encoder)
case let .unknown(_, raw):
var container = encoder.singleValueContainer()
try container.encode(raw)
}
}
`;
return [
"public enum GatewayFrame: Codable, Sendable {",
...caseLines,
" case unknown(type: String, raw: [String: AnyCodable])",
initLines.trimEnd(),
"}",
"",
].join("\n");
}
async function generate() {
const definitions = Object.entries(ProtocolSchemas) as Array<[string, JsonSchema]>;
for (const [name, schema] of definitions) {
schemaNameByObject.set(schema as object, name);
}
const parts: string[] = [];
parts.push(header);
// Value structs
for (const [name, schema] of definitions) {
if (name === "GatewayFrame") {
continue;
}
if (schema.type === "object") {
parts.push(emitStruct(name, schema));
}
}
// Frame enum must come after payload structs
parts.push(emitGatewayFrame());
const content = parts.join("\n");
for (const outPath of outPaths) {
await fs.mkdir(path.dirname(outPath), { recursive: true });
await fs.writeFile(outPath, content);
console.log(`wrote ${outPath}`);
}
}
generate().catch((err) => {
console.error(err);
process.exit(1);
});

View File

@@ -0,0 +1,51 @@
import { promises as fs } from "node:fs";
import path from "node:path";
import { fileURLToPath } from "node:url";
import { ProtocolSchemas } from "../src/gateway/protocol/schema.js";
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const repoRoot = path.resolve(__dirname, "..");
async function writeJsonSchema() {
const definitions: Record<string, unknown> = {};
for (const [name, schema] of Object.entries(ProtocolSchemas)) {
definitions[name] = schema;
}
const rootSchema = {
$schema: "http://json-schema.org/draft-07/schema#",
$id: "https://openclaw.ai/protocol.schema.json",
title: "OpenClaw Gateway Protocol",
description: "Handshake, request/response, and event frames for the Gateway WebSocket.",
oneOf: [
{ $ref: "#/definitions/RequestFrame" },
{ $ref: "#/definitions/ResponseFrame" },
{ $ref: "#/definitions/EventFrame" },
],
discriminator: {
propertyName: "type",
mapping: {
req: "#/definitions/RequestFrame",
res: "#/definitions/ResponseFrame",
event: "#/definitions/EventFrame",
},
},
definitions,
};
const distDir = path.join(repoRoot, "dist");
await fs.mkdir(distDir, { recursive: true });
const jsonSchemaPath = path.join(distDir, "protocol.schema.json");
await fs.writeFile(jsonSchemaPath, JSON.stringify(rootSchema, null, 2));
console.log(`wrote ${jsonSchemaPath}`);
return { jsonSchemaPath, schemaString: JSON.stringify(rootSchema) };
}
async function main() {
await writeJsonSchema();
}
main().catch((err) => {
console.error(err);
process.exit(1);
});

View File

@@ -0,0 +1,66 @@
import { createWebFetchTool } from "../src/agents/tools/web-tools.js";
const DEFAULT_URLS = [
"https://example.com/",
"https://news.ycombinator.com/",
"https://www.reddit.com/r/javascript/",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/User-Agent",
"https://httpbin.org/html",
];
const urls = process.argv.slice(2);
const targets = urls.length > 0 ? urls : DEFAULT_URLS;
async function runFetch(url: string, readability: boolean) {
if (!readability) {
throw new Error("Basic extraction removed. Set readability=true or enable Firecrawl.");
}
const tool = createWebFetchTool({
config: {
tools: {
web: { fetch: { readability, cacheTtlMinutes: 0, firecrawl: { enabled: false } } },
},
},
sandboxed: false,
});
if (!tool) {
throw new Error("web_fetch tool is disabled");
}
const result = await tool.execute("test", { url, extractMode: "markdown" });
return result.details as {
text?: string;
title?: string;
extractor?: string;
length?: number;
truncated?: boolean;
};
}
function truncate(value: string, max = 160): string {
if (!value) {
return "";
}
return value.length > max ? `${value.slice(0, max)}` : value;
}
async function run() {
for (const url of targets) {
console.log(`\n=== ${url}`);
const readable = await runFetch(url, true);
console.log(
`readability: ${readable.extractor ?? "unknown"} len=${readable.length ?? 0} title=${truncate(
readable.title ?? "",
80,
)}`,
);
if (readable.text) {
console.log(`readability sample: ${truncate(readable.text)}`);
}
}
}
run().catch((error) => {
console.error(error);
process.exit(1);
});

View File

@@ -0,0 +1,191 @@
#!/usr/bin/env bash
# Scan for orphaned coding agent processes after a gateway restart.
#
# Background coding agents (Claude Code, Codex CLI) spawned by the gateway
# can outlive the session that started them when the gateway restarts.
# This script finds them and reports their state.
#
# Usage:
# recover-orphaned-processes.sh
#
# Output: JSON object with `orphaned` array and `ts` timestamp.
set -euo pipefail
usage() {
cat <<'USAGE'
Usage: recover-orphaned-processes.sh
Scans for likely orphaned coding agent processes and prints JSON.
USAGE
}
if [ "${1:-}" = "--help" ] || [ "${1:-}" = "-h" ]; then
usage
exit 0
fi
if [ "$#" -gt 0 ]; then
usage >&2
exit 2
fi
if ! command -v node &>/dev/null; then
_ts="unknown"
command -v date &>/dev/null && _ts="$(date -u +%Y-%m-%dT%H:%M:%SZ 2>/dev/null)" || true
[ -z "$_ts" ] && _ts="unknown"
printf '{"error":"node not found on PATH","orphaned":[],"ts":"%s"}\n' "$_ts"
exit 0
fi
node <<'NODE'
const { execFileSync } = require("node:child_process");
const fs = require("node:fs");
let username = process.env.USER || process.env.LOGNAME || "";
if (username && !/^[a-zA-Z0-9._-]+$/.test(username)) {
username = "";
}
function runFile(file, args) {
try {
return execFileSync(file, args, {
encoding: "utf8",
stdio: ["ignore", "pipe", "ignore"],
});
} catch (err) {
if (err && typeof err.stdout === "string") {
return err.stdout;
}
if (err && err.stdout && Buffer.isBuffer(err.stdout)) {
return err.stdout.toString("utf8");
}
return "";
}
}
function resolveStarted(pid) {
const started = runFile("ps", ["-o", "lstart=", "-p", String(pid)]).trim();
return started.length > 0 ? started : "unknown";
}
function resolveCwd(pid) {
if (process.platform === "linux") {
try {
return fs.readlinkSync(`/proc/${pid}/cwd`);
} catch {
return "unknown";
}
}
const lsof = runFile("lsof", ["-a", "-d", "cwd", "-p", String(pid), "-Fn"]);
const match = lsof.match(/^n(.+)$/m);
return match ? match[1] : "unknown";
}
function sanitizeCommand(cmd) {
// Avoid leaking obvious secrets when this diagnostic output is shared.
return cmd
.replace(
/(--(?:token|api[-_]?key|password|secret|authorization)\s+)([^\s]+)/gi,
"$1<redacted>",
)
.replace(
/((?:token|api[-_]?key|password|secret|authorization)=)([^\s]+)/gi,
"$1<redacted>",
)
.replace(/(Bearer\s+)[A-Za-z0-9._~+/=-]+/g, "$1<redacted>");
}
// Pre-filter candidate PIDs using pgrep to avoid scanning all processes.
// Only falls back to a full ps scan when pgrep is genuinely unavailable
// (ENOENT), not when it simply finds no matches (exit code 1).
let pgrepUnavailable = false;
const pgrepResult = (() => {
const args =
username.length > 0
? ["-u", username, "-f", "codex|claude"]
: ["-f", "codex|claude"];
try {
return execFileSync("pgrep", args, {
encoding: "utf8",
stdio: ["ignore", "pipe", "ignore"],
});
} catch (err) {
if (err && err.code === "ENOENT") {
pgrepUnavailable = true;
return "";
}
// pgrep exit code 1 = no matches — return stdout (empty)
if (err && typeof err.stdout === "string") return err.stdout;
return "";
}
})();
const candidatePids = pgrepResult
.split("\n")
.map((s) => s.trim())
.filter((s) => s.length > 0 && /^\d+$/.test(s));
let lines;
if (candidatePids.length > 0) {
// Fetch command info only for candidate PIDs.
lines = runFile("ps", ["-o", "pid=,command=", "-p", candidatePids.join(",")]).split("\n");
} else if (pgrepUnavailable && username.length > 0) {
// pgrep not installed — fall back to user-scoped ps scan.
lines = runFile("ps", ["-U", username, "-o", "pid=,command="]).split("\n");
} else if (pgrepUnavailable) {
// pgrep not installed and no username — full scan as last resort.
lines = runFile("ps", ["-axo", "pid=,command="]).split("\n");
} else {
// pgrep ran successfully but found no matches — no orphans.
lines = [];
}
const includePattern = /codex|claude/i;
const excludePatterns = [
/openclaw-gateway/i,
/signal-cli/i,
/node_modules\/\.bin\/openclaw/i,
/recover-orphaned-processes\.sh/i,
];
const orphaned = [];
for (const rawLine of lines) {
const line = rawLine.trim();
if (!line) {
continue;
}
const match = line.match(/^(\d+)\s+(.+)$/);
if (!match) {
continue;
}
const pid = Number(match[1]);
const cmd = match[2];
if (!Number.isInteger(pid) || pid <= 0 || pid === process.pid) {
continue;
}
if (!includePattern.test(cmd)) {
continue;
}
if (excludePatterns.some((pattern) => pattern.test(cmd))) {
continue;
}
orphaned.push({
pid,
cmd: sanitizeCommand(cmd),
cwd: resolveCwd(pid),
started: resolveStarted(pid),
});
}
process.stdout.write(
JSON.stringify({
orphaned,
ts: new Date().toISOString(),
}) + "\n",
);
NODE

View File

@@ -0,0 +1,128 @@
#!/usr/bin/env -S node --import tsx
import { execSync } from "node:child_process";
import { readdirSync, readFileSync } from "node:fs";
import { join, resolve } from "node:path";
type PackFile = { path: string };
type PackResult = { files?: PackFile[] };
const requiredPathGroups = [
["dist/index.js", "dist/index.mjs"],
["dist/entry.js", "dist/entry.mjs"],
"dist/plugin-sdk/index.js",
"dist/plugin-sdk/index.d.ts",
"dist/build-info.json",
];
const forbiddenPrefixes = ["dist/OpenClaw.app/"];
type PackageJson = {
name?: string;
version?: string;
};
function normalizePluginSyncVersion(version: string): string {
const normalized = version.trim().replace(/^v/, "");
const base = /^([0-9]+\.[0-9]+\.[0-9]+)/.exec(normalized)?.[1];
if (base) {
return base;
}
return normalized.replace(/[-+].*$/, "");
}
function runPackDry(): PackResult[] {
const raw = execSync("npm pack --dry-run --json --ignore-scripts", {
encoding: "utf8",
stdio: ["ignore", "pipe", "pipe"],
maxBuffer: 1024 * 1024 * 100,
});
return JSON.parse(raw) as PackResult[];
}
function checkPluginVersions() {
const rootPackagePath = resolve("package.json");
const rootPackage = JSON.parse(readFileSync(rootPackagePath, "utf8")) as PackageJson;
const targetVersion = rootPackage.version;
const targetBaseVersion = targetVersion ? normalizePluginSyncVersion(targetVersion) : null;
if (!targetVersion || !targetBaseVersion) {
console.error("release-check: root package.json missing version.");
process.exit(1);
}
const extensionsDir = resolve("extensions");
const entries = readdirSync(extensionsDir, { withFileTypes: true }).filter((entry) =>
entry.isDirectory(),
);
const mismatches: string[] = [];
for (const entry of entries) {
const packagePath = join(extensionsDir, entry.name, "package.json");
let pkg: PackageJson;
try {
pkg = JSON.parse(readFileSync(packagePath, "utf8")) as PackageJson;
} catch {
continue;
}
if (!pkg.name || !pkg.version) {
continue;
}
if (normalizePluginSyncVersion(pkg.version) !== targetBaseVersion) {
mismatches.push(`${pkg.name} (${pkg.version})`);
}
}
if (mismatches.length > 0) {
console.error(
`release-check: plugin versions must match release base ${targetBaseVersion} (root ${targetVersion}):`,
);
for (const item of mismatches) {
console.error(` - ${item}`);
}
console.error("release-check: run `pnpm plugins:sync` to align plugin versions.");
process.exit(1);
}
}
function main() {
checkPluginVersions();
const results = runPackDry();
const files = results.flatMap((entry) => entry.files ?? []);
const paths = new Set(files.map((file) => file.path));
const missing = requiredPathGroups
.flatMap((group) => {
if (Array.isArray(group)) {
return group.some((path) => paths.has(path)) ? [] : [group.join(" or ")];
}
return paths.has(group) ? [] : [group];
})
.toSorted();
const forbidden = [...paths].filter((path) =>
forbiddenPrefixes.some((prefix) => path.startsWith(prefix)),
);
if (missing.length > 0 || forbidden.length > 0) {
if (missing.length > 0) {
console.error("release-check: missing files in npm pack:");
for (const path of missing) {
console.error(` - ${path}`);
}
}
if (forbidden.length > 0) {
console.error("release-check: forbidden files in npm pack:");
for (const path of forbidden) {
console.error(` - ${path}`);
}
}
process.exit(1);
}
console.log("release-check: npm pack contents look OK.");
}
main();

View File

@@ -0,0 +1,3 @@
import "../../src/logging/subsystem.js";
console.log("tsx-name-repro: loaded logging/subsystem");

View File

@@ -0,0 +1,269 @@
#!/usr/bin/env bash
# Reset OpenClaw like Trimmy: kill running instances, rebuild, repackage, relaunch, verify.
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
APP_BUNDLE="${OPENCLAW_APP_BUNDLE:-}"
APP_PROCESS_PATTERN="OpenClaw.app/Contents/MacOS/OpenClaw"
DEBUG_PROCESS_PATTERN="${ROOT_DIR}/apps/macos/.build/debug/OpenClaw"
LOCAL_PROCESS_PATTERN="${ROOT_DIR}/apps/macos/.build-local/debug/OpenClaw"
RELEASE_PROCESS_PATTERN="${ROOT_DIR}/apps/macos/.build/release/OpenClaw"
LAUNCH_AGENT="${HOME}/Library/LaunchAgents/ai.openclaw.mac.plist"
LOCK_KEY="$(printf '%s' "${ROOT_DIR}" | shasum -a 256 | cut -c1-8)"
LOCK_DIR="${TMPDIR:-/tmp}/openclaw-restart-${LOCK_KEY}"
LOCK_PID_FILE="${LOCK_DIR}/pid"
WAIT_FOR_LOCK=0
LOG_PATH="${OPENCLAW_RESTART_LOG:-/tmp/openclaw-restart.log}"
NO_SIGN=0
SIGN=0
AUTO_DETECT_SIGNING=1
GATEWAY_WAIT_SECONDS="${OPENCLAW_GATEWAY_WAIT_SECONDS:-0}"
LAUNCHAGENT_DISABLE_MARKER="${HOME}/.openclaw/disable-launchagent"
ATTACH_ONLY=1
log() { printf '%s\n' "$*"; }
fail() { printf 'ERROR: %s\n' "$*" >&2; exit 1; }
# Ensure local node binaries (rolldown, pnpm) are discoverable for the steps below.
export PATH="${ROOT_DIR}/node_modules/.bin:${PATH}"
run_step() {
local label="$1"; shift
log "==> ${label}"
if ! "$@"; then
fail "${label} failed"
fi
}
cleanup() {
if [[ -d "${LOCK_DIR}" ]]; then
rm -rf "${LOCK_DIR}"
fi
}
acquire_lock() {
while true; do
if mkdir "${LOCK_DIR}" 2>/dev/null; then
echo "$$" > "${LOCK_PID_FILE}"
return 0
fi
local existing_pid=""
if [[ -f "${LOCK_PID_FILE}" ]]; then
existing_pid="$(cat "${LOCK_PID_FILE}" 2>/dev/null || true)"
fi
if [[ -n "${existing_pid}" ]] && kill -0 "${existing_pid}" 2>/dev/null; then
if [[ "${WAIT_FOR_LOCK}" == "1" ]]; then
log "==> Another restart is running (pid ${existing_pid}); waiting..."
while kill -0 "${existing_pid}" 2>/dev/null; do
sleep 1
done
continue
fi
log "==> Another restart is running (pid ${existing_pid}); re-run with --wait."
exit 0
fi
rm -rf "${LOCK_DIR}"
done
}
check_signing_keys() {
security find-identity -p codesigning -v 2>/dev/null \
| grep -Eq '(Developer ID Application|Apple Distribution|Apple Development)'
}
trap cleanup EXIT INT TERM
for arg in "$@"; do
case "${arg}" in
--wait|-w) WAIT_FOR_LOCK=1 ;;
--no-sign) NO_SIGN=1; AUTO_DETECT_SIGNING=0 ;;
--sign) SIGN=1; AUTO_DETECT_SIGNING=0 ;;
--attach-only) ATTACH_ONLY=1 ;;
--no-attach-only) ATTACH_ONLY=0 ;;
--help|-h)
log "Usage: $(basename "$0") [--wait] [--no-sign] [--sign] [--attach-only|--no-attach-only]"
log " --wait Wait for other restart to complete instead of exiting"
log " --no-sign Force no code signing (fastest for development)"
log " --sign Force code signing (will fail if no signing key available)"
log " --attach-only Launch app with --attach-only (skip launchd install)"
log " --no-attach-only Launch app without attach-only override"
log ""
log "Env:"
log " OPENCLAW_GATEWAY_WAIT_SECONDS=0 Wait time before gateway port check (unsigned only)"
log ""
log "Unsigned recovery:"
log " node openclaw.mjs daemon install --force --runtime node"
log " node openclaw.mjs daemon restart"
log ""
log "Reset unsigned overrides:"
log " rm ~/.openclaw/disable-launchagent"
log ""
log "Default behavior: Auto-detect signing keys, fallback to --no-sign if none found"
exit 0
;;
*) ;;
esac
done
if [[ "$NO_SIGN" -eq 1 && "$SIGN" -eq 1 ]]; then
fail "Cannot use --sign and --no-sign together"
fi
mkdir -p "$(dirname "$LOG_PATH")"
rm -f "$LOG_PATH"
exec > >(tee "$LOG_PATH") 2>&1
log "==> Log: ${LOG_PATH}"
if [[ "$NO_SIGN" -eq 1 ]]; then
log "==> Using --no-sign (unsigned flow enabled)"
fi
if [[ "$ATTACH_ONLY" -eq 1 ]]; then
log "==> Using --attach-only (skip launchd install)"
fi
acquire_lock
kill_all_openclaw() {
for _ in {1..10}; do
pkill -f "${APP_PROCESS_PATTERN}" 2>/dev/null || true
pkill -f "${DEBUG_PROCESS_PATTERN}" 2>/dev/null || true
pkill -f "${LOCAL_PROCESS_PATTERN}" 2>/dev/null || true
pkill -f "${RELEASE_PROCESS_PATTERN}" 2>/dev/null || true
pkill -x "OpenClaw" 2>/dev/null || true
if ! pgrep -f "${APP_PROCESS_PATTERN}" >/dev/null 2>&1 \
&& ! pgrep -f "${DEBUG_PROCESS_PATTERN}" >/dev/null 2>&1 \
&& ! pgrep -f "${LOCAL_PROCESS_PATTERN}" >/dev/null 2>&1 \
&& ! pgrep -f "${RELEASE_PROCESS_PATTERN}" >/dev/null 2>&1 \
&& ! pgrep -x "OpenClaw" >/dev/null 2>&1; then
return 0
fi
sleep 0.3
done
}
stop_launch_agent() {
launchctl bootout gui/"$UID"/ai.openclaw.mac 2>/dev/null || true
}
# 1) Kill all running instances first.
log "==> Killing existing OpenClaw instances"
kill_all_openclaw
stop_launch_agent
# Bundle Gateway-hosted Canvas A2UI assets.
run_step "bundle canvas a2ui" bash -lc "cd '${ROOT_DIR}' && pnpm canvas:a2ui:bundle"
# 2) Rebuild into the same path the packager consumes (.build).
run_step "clean build cache" bash -lc "cd '${ROOT_DIR}/apps/macos' && rm -rf .build .build-swift .swiftpm 2>/dev/null || true"
run_step "swift build" bash -lc "cd '${ROOT_DIR}/apps/macos' && swift build -q --product OpenClaw"
if [ "$AUTO_DETECT_SIGNING" -eq 1 ]; then
if check_signing_keys; then
log "==> Signing keys detected, will code sign"
SIGN=1
else
log "==> No signing keys found, will skip code signing (--no-sign)"
NO_SIGN=1
fi
fi
if [ "$NO_SIGN" -eq 1 ]; then
export ALLOW_ADHOC_SIGNING=1
export SIGN_IDENTITY="-"
mkdir -p "${HOME}/.openclaw"
run_step "disable launchagent writes" /usr/bin/touch "${LAUNCHAGENT_DISABLE_MARKER}"
elif [ "$SIGN" -eq 1 ]; then
if ! check_signing_keys; then
fail "No signing identity found. Use --no-sign or install a signing key."
fi
unset ALLOW_ADHOC_SIGNING
unset SIGN_IDENTITY
fi
# 3) Package app (no embedded gateway).
run_step "package app" bash -lc "cd '${ROOT_DIR}' && SKIP_TSC=${SKIP_TSC:-1} '${ROOT_DIR}/scripts/package-mac-app.sh'"
choose_app_bundle() {
if [[ -n "${APP_BUNDLE}" && -d "${APP_BUNDLE}" ]]; then
return 0
fi
if [[ -d "/Applications/OpenClaw.app" ]]; then
APP_BUNDLE="/Applications/OpenClaw.app"
return 0
fi
if [[ -d "${ROOT_DIR}/dist/OpenClaw.app" ]]; then
APP_BUNDLE="${ROOT_DIR}/dist/OpenClaw.app"
if [[ ! -d "${APP_BUNDLE}/Contents/Frameworks/Sparkle.framework" ]]; then
fail "dist/OpenClaw.app missing Sparkle after packaging"
fi
return 0
fi
fail "App bundle not found. Set OPENCLAW_APP_BUNDLE to your installed OpenClaw.app"
}
choose_app_bundle
# When signed, clear any previous launchagent override marker.
if [[ "$NO_SIGN" -ne 1 && "$ATTACH_ONLY" -ne 1 && -f "${LAUNCHAGENT_DISABLE_MARKER}" ]]; then
run_step "clear launchagent disable marker" /bin/rm -f "${LAUNCHAGENT_DISABLE_MARKER}"
fi
# When unsigned, ensure the gateway LaunchAgent targets the repo CLI (before the app launches).
# This reduces noisy "could not connect" errors during app startup.
if [ "$NO_SIGN" -eq 1 ] && [ "$ATTACH_ONLY" -ne 1 ]; then
run_step "install gateway launch agent (unsigned)" bash -lc "cd '${ROOT_DIR}' && node openclaw.mjs daemon install --force --runtime node"
run_step "restart gateway daemon (unsigned)" bash -lc "cd '${ROOT_DIR}' && node openclaw.mjs daemon restart"
if [[ "${GATEWAY_WAIT_SECONDS}" -gt 0 ]]; then
run_step "wait for gateway (unsigned)" sleep "${GATEWAY_WAIT_SECONDS}"
fi
GATEWAY_PORT="$(
node -e '
const fs = require("node:fs");
const path = require("node:path");
try {
const raw = fs.readFileSync(path.join(process.env.HOME, ".openclaw", "openclaw.json"), "utf8");
const cfg = JSON.parse(raw);
const port = cfg && cfg.gateway && typeof cfg.gateway.port === "number" ? cfg.gateway.port : 18789;
process.stdout.write(String(port));
} catch {
process.stdout.write("18789");
}
'
)"
run_step "verify gateway port ${GATEWAY_PORT} (unsigned)" bash -lc "lsof -iTCP:${GATEWAY_PORT} -sTCP:LISTEN | head -n 5 || true"
fi
ATTACH_ONLY_ARGS=()
if [[ "$ATTACH_ONLY" -eq 1 ]]; then
ATTACH_ONLY_ARGS+=(--args --attach-only)
fi
# 4) Launch the installed app in the foreground so the menu bar extra appears.
# LaunchServices can inherit a huge environment from this shell (secrets, prompt vars, etc.).
# That can cause launchd spawn failures and is undesirable for a GUI app anyway.
run_step "launch app" env -i \
HOME="${HOME}" \
USER="${USER:-$(id -un)}" \
LOGNAME="${LOGNAME:-$(id -un)}" \
TMPDIR="${TMPDIR:-/tmp}" \
PATH="/usr/bin:/bin:/usr/sbin:/sbin" \
LANG="${LANG:-en_US.UTF-8}" \
/usr/bin/open "${APP_BUNDLE}" ${ATTACH_ONLY_ARGS[@]:+"${ATTACH_ONLY_ARGS[@]}"}
# 5) Verify the app is alive.
sleep 1.5
if pgrep -f "${APP_PROCESS_PATTERN}" >/dev/null 2>&1; then
log "OK: OpenClaw is running."
else
fail "App exited immediately. Check ${LOG_PATH} or Console.app (User Reports)."
fi
if [ "$NO_SIGN" -eq 1 ] && [ "$ATTACH_ONLY" -ne 1 ]; then
run_step "show gateway launch agent args (unsigned)" bash -lc "/usr/bin/plutil -p '${HOME}/Library/LaunchAgents/ai.openclaw.gateway.plist' | head -n 40 || true"
fi

View File

@@ -0,0 +1,22 @@
export const runNodeWatchedPaths: string[];
export function runNodeMain(params?: {
spawn?: (
cmd: string,
args: string[],
options: unknown,
) => {
on: (
event: "exit",
cb: (code: number | null, signal: string | null) => void,
) => void | undefined;
};
spawnSync?: unknown;
fs?: unknown;
stderr?: { write: (value: string) => void };
execPath?: string;
cwd?: string;
args?: string[];
env?: NodeJS.ProcessEnv;
platform?: NodeJS.Platform;
}): Promise<number>;

View File

@@ -0,0 +1,263 @@
#!/usr/bin/env node
import { spawn, spawnSync } from "node:child_process";
import fs from "node:fs";
import path from "node:path";
import process from "node:process";
import { pathToFileURL } from "node:url";
const compiler = "tsdown";
const compilerArgs = ["exec", compiler, "--no-clean"];
export const runNodeWatchedPaths = ["src", "tsconfig.json", "package.json"];
const statMtime = (filePath, fsImpl = fs) => {
try {
return fsImpl.statSync(filePath).mtimeMs;
} catch {
return null;
}
};
const isExcludedSource = (filePath, srcRoot) => {
const relativePath = path.relative(srcRoot, filePath);
if (relativePath.startsWith("..")) {
return false;
}
return (
relativePath.endsWith(".test.ts") ||
relativePath.endsWith(".test.tsx") ||
relativePath.endsWith(`test-helpers.ts`)
);
};
const findLatestMtime = (dirPath, shouldSkip, deps) => {
let latest = null;
const queue = [dirPath];
while (queue.length > 0) {
const current = queue.pop();
if (!current) {
continue;
}
let entries = [];
try {
entries = deps.fs.readdirSync(current, { withFileTypes: true });
} catch {
continue;
}
for (const entry of entries) {
const fullPath = path.join(current, entry.name);
if (entry.isDirectory()) {
queue.push(fullPath);
continue;
}
if (!entry.isFile()) {
continue;
}
if (shouldSkip?.(fullPath)) {
continue;
}
const mtime = statMtime(fullPath, deps.fs);
if (mtime == null) {
continue;
}
if (latest == null || mtime > latest) {
latest = mtime;
}
}
}
return latest;
};
const runGit = (gitArgs, deps) => {
try {
const result = deps.spawnSync("git", gitArgs, {
cwd: deps.cwd,
encoding: "utf8",
stdio: ["ignore", "pipe", "ignore"],
});
if (result.status !== 0) {
return null;
}
return (result.stdout ?? "").trim();
} catch {
return null;
}
};
const resolveGitHead = (deps) => {
const head = runGit(["rev-parse", "HEAD"], deps);
return head || null;
};
const hasDirtySourceTree = (deps) => {
const output = runGit(
["status", "--porcelain", "--untracked-files=normal", "--", ...runNodeWatchedPaths],
deps,
);
if (output === null) {
return null;
}
return output.length > 0;
};
const readBuildStamp = (deps) => {
const mtime = statMtime(deps.buildStampPath, deps.fs);
if (mtime == null) {
return { mtime: null, head: null };
}
try {
const raw = deps.fs.readFileSync(deps.buildStampPath, "utf8").trim();
if (!raw.startsWith("{")) {
return { mtime, head: null };
}
const parsed = JSON.parse(raw);
const head = typeof parsed?.head === "string" && parsed.head.trim() ? parsed.head.trim() : null;
return { mtime, head };
} catch {
return { mtime, head: null };
}
};
const hasSourceMtimeChanged = (stampMtime, deps) => {
const srcMtime = findLatestMtime(
deps.srcRoot,
(candidate) => isExcludedSource(candidate, deps.srcRoot),
deps,
);
return srcMtime != null && srcMtime > stampMtime;
};
const shouldBuild = (deps) => {
if (deps.env.OPENCLAW_FORCE_BUILD === "1") {
return true;
}
const stamp = readBuildStamp(deps);
if (stamp.mtime == null) {
return true;
}
if (statMtime(deps.distEntry, deps.fs) == null) {
return true;
}
for (const filePath of deps.configFiles) {
const mtime = statMtime(filePath, deps.fs);
if (mtime != null && mtime > stamp.mtime) {
return true;
}
}
const currentHead = resolveGitHead(deps);
if (currentHead && !stamp.head) {
return hasSourceMtimeChanged(stamp.mtime, deps);
}
if (currentHead && stamp.head && currentHead !== stamp.head) {
return hasSourceMtimeChanged(stamp.mtime, deps);
}
if (currentHead) {
const dirty = hasDirtySourceTree(deps);
if (dirty === true) {
return true;
}
if (dirty === false) {
return false;
}
}
if (hasSourceMtimeChanged(stamp.mtime, deps)) {
return true;
}
return false;
};
const logRunner = (message, deps) => {
if (deps.env.OPENCLAW_RUNNER_LOG === "0") {
return;
}
deps.stderr.write(`[openclaw] ${message}\n`);
};
const runOpenClaw = async (deps) => {
const nodeProcess = deps.spawn(deps.execPath, ["openclaw.mjs", ...deps.args], {
cwd: deps.cwd,
env: deps.env,
stdio: "inherit",
});
const res = await new Promise((resolve) => {
nodeProcess.on("exit", (exitCode, exitSignal) => {
resolve({ exitCode, exitSignal });
});
});
if (res.exitSignal) {
return 1;
}
return res.exitCode ?? 1;
};
const writeBuildStamp = (deps) => {
try {
deps.fs.mkdirSync(deps.distRoot, { recursive: true });
const stamp = {
builtAt: Date.now(),
head: resolveGitHead(deps),
};
deps.fs.writeFileSync(deps.buildStampPath, `${JSON.stringify(stamp)}\n`);
} catch (error) {
// Best-effort stamp; still allow the runner to start.
logRunner(`Failed to write build stamp: ${error?.message ?? "unknown error"}`, deps);
}
};
export async function runNodeMain(params = {}) {
const deps = {
spawn: params.spawn ?? spawn,
spawnSync: params.spawnSync ?? spawnSync,
fs: params.fs ?? fs,
stderr: params.stderr ?? process.stderr,
execPath: params.execPath ?? process.execPath,
cwd: params.cwd ?? process.cwd(),
args: params.args ?? process.argv.slice(2),
env: params.env ? { ...params.env } : { ...process.env },
platform: params.platform ?? process.platform,
};
deps.distRoot = path.join(deps.cwd, "dist");
deps.distEntry = path.join(deps.distRoot, "/entry.js");
deps.buildStampPath = path.join(deps.distRoot, ".buildstamp");
deps.srcRoot = path.join(deps.cwd, "src");
deps.configFiles = [path.join(deps.cwd, "tsconfig.json"), path.join(deps.cwd, "package.json")];
if (!shouldBuild(deps)) {
return await runOpenClaw(deps);
}
logRunner("Building TypeScript (dist is stale).", deps);
const buildCmd = deps.platform === "win32" ? "cmd.exe" : "pnpm";
const buildArgs =
deps.platform === "win32" ? ["/d", "/s", "/c", "pnpm", ...compilerArgs] : compilerArgs;
const build = deps.spawn(buildCmd, buildArgs, {
cwd: deps.cwd,
env: deps.env,
stdio: "inherit",
});
const buildRes = await new Promise((resolve) => {
build.on("exit", (exitCode, exitSignal) => resolve({ exitCode, exitSignal }));
});
if (buildRes.exitSignal) {
return 1;
}
if (buildRes.exitCode !== 0 && buildRes.exitCode !== null) {
return buildRes.exitCode;
}
writeBuildStamp(deps);
return await runOpenClaw(deps);
}
if (import.meta.url === pathToFileURL(process.argv[1] ?? "").href) {
void runNodeMain()
.then((code) => process.exit(code))
.catch((err) => {
console.error(err);
process.exit(1);
});
}

View File

@@ -0,0 +1,213 @@
#!/usr/bin/env bash
# Rootless OpenClaw in Podman: run after one-time setup.
#
# One-time setup (from repo root): ./setup-podman.sh
# Then:
# ./scripts/run-openclaw-podman.sh launch # Start gateway
# ./scripts/run-openclaw-podman.sh launch setup # Onboarding wizard
#
# As the openclaw user (no repo needed):
# sudo -u openclaw /home/openclaw/run-openclaw-podman.sh
# sudo -u openclaw /home/openclaw/run-openclaw-podman.sh setup
#
# Legacy: "setup-host" delegates to ../setup-podman.sh
set -euo pipefail
OPENCLAW_USER="${OPENCLAW_PODMAN_USER:-openclaw}"
resolve_user_home() {
local user="$1"
local home=""
if command -v getent >/dev/null 2>&1; then
home="$(getent passwd "$user" 2>/dev/null | cut -d: -f6 || true)"
fi
if [[ -z "$home" && -f /etc/passwd ]]; then
home="$(awk -F: -v u="$user" '$1==u {print $6}' /etc/passwd 2>/dev/null || true)"
fi
if [[ -z "$home" ]]; then
home="/home/$user"
fi
printf '%s' "$home"
}
OPENCLAW_HOME="$(resolve_user_home "$OPENCLAW_USER")"
OPENCLAW_UID="$(id -u "$OPENCLAW_USER" 2>/dev/null || true)"
LAUNCH_SCRIPT="$OPENCLAW_HOME/run-openclaw-podman.sh"
# Legacy: setup-host → run setup-podman.sh
if [[ "${1:-}" == "setup-host" ]]; then
shift
REPO_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
SETUP_PODMAN="$REPO_ROOT/setup-podman.sh"
if [[ -f "$SETUP_PODMAN" ]]; then
exec "$SETUP_PODMAN" "$@"
fi
echo "setup-podman.sh not found at $SETUP_PODMAN. Run from repo root: ./setup-podman.sh" >&2
exit 1
fi
# --- Step 2: launch (from repo: re-exec as openclaw in safe cwd; from openclaw home: run container) ---
if [[ "${1:-}" == "launch" ]]; then
shift
if [[ -n "${OPENCLAW_UID:-}" && "$(id -u)" -ne "$OPENCLAW_UID" ]]; then
# Exec as openclaw with cwd=/tmp so a nologin user never inherits an invalid cwd.
exec sudo -u "$OPENCLAW_USER" env HOME="$OPENCLAW_HOME" PATH="$PATH" TERM="${TERM:-}" \
bash -c 'cd /tmp && exec '"$LAUNCH_SCRIPT"' "$@"' _ "$@"
fi
# Already openclaw; fall through to container run (with remaining args, e.g. "setup")
fi
# --- Container run (script in openclaw home, run as openclaw) ---
EFFECTIVE_HOME="${HOME:-}"
if [[ -n "${OPENCLAW_UID:-}" && "$(id -u)" -eq "$OPENCLAW_UID" ]]; then
EFFECTIVE_HOME="$OPENCLAW_HOME"
export HOME="$OPENCLAW_HOME"
fi
if [[ -z "${EFFECTIVE_HOME:-}" ]]; then
EFFECTIVE_HOME="${OPENCLAW_HOME:-/tmp}"
fi
CONFIG_DIR="${OPENCLAW_CONFIG_DIR:-$EFFECTIVE_HOME/.openclaw}"
ENV_FILE="${OPENCLAW_PODMAN_ENV:-$CONFIG_DIR/.env}"
WORKSPACE_DIR="${OPENCLAW_WORKSPACE_DIR:-$CONFIG_DIR/workspace}"
CONTAINER_NAME="${OPENCLAW_PODMAN_CONTAINER:-openclaw}"
OPENCLAW_IMAGE="${OPENCLAW_PODMAN_IMAGE:-openclaw:local}"
PODMAN_PULL="${OPENCLAW_PODMAN_PULL:-never}"
HOST_GATEWAY_PORT="${OPENCLAW_PODMAN_GATEWAY_HOST_PORT:-${OPENCLAW_GATEWAY_PORT:-18789}}"
HOST_BRIDGE_PORT="${OPENCLAW_PODMAN_BRIDGE_HOST_PORT:-${OPENCLAW_BRIDGE_PORT:-18790}}"
# Keep Podman default local-only unless explicitly overridden.
# Non-loopback binds require gateway.controlUi.allowedOrigins (security hardening).
GATEWAY_BIND="${OPENCLAW_GATEWAY_BIND:-loopback}"
# Safe cwd for podman (openclaw is nologin; avoid inherited cwd from sudo)
cd "$EFFECTIVE_HOME" 2>/dev/null || cd /tmp 2>/dev/null || true
RUN_SETUP=false
if [[ "${1:-}" == "setup" || "${1:-}" == "onboard" ]]; then
RUN_SETUP=true
shift
fi
mkdir -p "$CONFIG_DIR" "$WORKSPACE_DIR"
# Subdirs the app may create at runtime (canvas, cron); create here so ownership is correct
mkdir -p "$CONFIG_DIR/canvas" "$CONFIG_DIR/cron"
chmod 700 "$CONFIG_DIR" "$WORKSPACE_DIR" 2>/dev/null || true
if [[ -f "$ENV_FILE" ]]; then
set -a
# shellcheck source=/dev/null
source "$ENV_FILE" 2>/dev/null || true
set +a
fi
upsert_env_var() {
local file="$1"
local key="$2"
local value="$3"
local tmp
tmp="$(mktemp)"
if [[ -f "$file" ]]; then
awk -v k="$key" -v v="$value" '
BEGIN { found = 0 }
$0 ~ ("^" k "=") { print k "=" v; found = 1; next }
{ print }
END { if (!found) print k "=" v }
' "$file" >"$tmp"
else
printf '%s=%s\n' "$key" "$value" >"$tmp"
fi
mv "$tmp" "$file"
chmod 600 "$file" 2>/dev/null || true
}
generate_token_hex_32() {
if command -v openssl >/dev/null 2>&1; then
openssl rand -hex 32
return 0
fi
if command -v python3 >/dev/null 2>&1; then
python3 - <<'PY'
import secrets
print(secrets.token_hex(32))
PY
return 0
fi
if command -v od >/dev/null 2>&1; then
od -An -N32 -tx1 /dev/urandom | tr -d " \n"
return 0
fi
echo "Missing dependency: need openssl or python3 (or od) to generate OPENCLAW_GATEWAY_TOKEN." >&2
exit 1
}
if [[ -z "${OPENCLAW_GATEWAY_TOKEN:-}" ]]; then
export OPENCLAW_GATEWAY_TOKEN="$(generate_token_hex_32)"
mkdir -p "$(dirname "$ENV_FILE")"
upsert_env_var "$ENV_FILE" "OPENCLAW_GATEWAY_TOKEN" "$OPENCLAW_GATEWAY_TOKEN"
echo "Generated OPENCLAW_GATEWAY_TOKEN and wrote it to $ENV_FILE." >&2
fi
# The gateway refuses to start unless gateway.mode=local is set in config.
# Keep this minimal; users can run the wizard later to configure channels/providers.
CONFIG_JSON="$CONFIG_DIR/openclaw.json"
if [[ ! -f "$CONFIG_JSON" ]]; then
echo '{ gateway: { mode: "local" } }' >"$CONFIG_JSON"
chmod 600 "$CONFIG_JSON" 2>/dev/null || true
echo "Created $CONFIG_JSON (minimal gateway.mode=local)." >&2
fi
PODMAN_USERNS="${OPENCLAW_PODMAN_USERNS:-keep-id}"
USERNS_ARGS=()
RUN_USER_ARGS=()
case "$PODMAN_USERNS" in
""|auto) ;;
keep-id) USERNS_ARGS=(--userns=keep-id) ;;
host) USERNS_ARGS=(--userns=host) ;;
*)
echo "Unsupported OPENCLAW_PODMAN_USERNS=$PODMAN_USERNS (expected: keep-id, auto, host)." >&2
exit 2
;;
esac
RUN_UID="$(id -u)"
RUN_GID="$(id -g)"
if [[ "$PODMAN_USERNS" == "keep-id" ]]; then
RUN_USER_ARGS=(--user "${RUN_UID}:${RUN_GID}")
echo "Starting container as uid=${RUN_UID} gid=${RUN_GID} (must match owner of $CONFIG_DIR)" >&2
else
echo "Starting container without --user (OPENCLAW_PODMAN_USERNS=$PODMAN_USERNS), mounts may require ownership fixes." >&2
fi
ENV_FILE_ARGS=()
[[ -f "$ENV_FILE" ]] && ENV_FILE_ARGS+=(--env-file "$ENV_FILE")
if [[ "$RUN_SETUP" == true ]]; then
exec podman run --pull="$PODMAN_PULL" --rm -it \
--init \
"${USERNS_ARGS[@]}" "${RUN_USER_ARGS[@]}" \
-e HOME=/home/node -e TERM=xterm-256color -e BROWSER=echo \
-e OPENCLAW_GATEWAY_TOKEN="$OPENCLAW_GATEWAY_TOKEN" \
-v "$CONFIG_DIR:/home/node/.openclaw:rw" \
-v "$WORKSPACE_DIR:/home/node/.openclaw/workspace:rw" \
"${ENV_FILE_ARGS[@]}" \
"$OPENCLAW_IMAGE" \
node dist/index.js onboard "$@"
fi
podman run --pull="$PODMAN_PULL" -d --replace \
--name "$CONTAINER_NAME" \
--init \
"${USERNS_ARGS[@]}" "${RUN_USER_ARGS[@]}" \
-e HOME=/home/node -e TERM=xterm-256color \
-e OPENCLAW_GATEWAY_TOKEN="$OPENCLAW_GATEWAY_TOKEN" \
"${ENV_FILE_ARGS[@]}" \
-v "$CONFIG_DIR:/home/node/.openclaw:rw" \
-v "$WORKSPACE_DIR:/home/node/.openclaw/workspace:rw" \
-p "${HOST_GATEWAY_PORT}:18789" \
-p "${HOST_BRIDGE_PORT}:18790" \
"$OPENCLAW_IMAGE" \
node dist/index.js gateway --bind "$GATEWAY_BIND" --port 18789
echo "Container $CONTAINER_NAME started. Dashboard: http://127.0.0.1:${HOST_GATEWAY_PORT}/"
echo "Logs: podman logs -f $CONTAINER_NAME"
echo "For auto-start/restarts, use: ./setup-podman.sh --quadlet (Quadlet + systemd user service)."

View File

@@ -0,0 +1,88 @@
#!/usr/bin/env bash
set -euo pipefail
export DISPLAY=:1
export HOME=/tmp/openclaw-home
export XDG_CONFIG_HOME="${HOME}/.config"
export XDG_CACHE_HOME="${HOME}/.cache"
CDP_PORT="${OPENCLAW_BROWSER_CDP_PORT:-${CLAWDBOT_BROWSER_CDP_PORT:-9222}}"
CDP_SOURCE_RANGE="${OPENCLAW_BROWSER_CDP_SOURCE_RANGE:-${CLAWDBOT_BROWSER_CDP_SOURCE_RANGE:-}}"
VNC_PORT="${OPENCLAW_BROWSER_VNC_PORT:-${CLAWDBOT_BROWSER_VNC_PORT:-5900}}"
NOVNC_PORT="${OPENCLAW_BROWSER_NOVNC_PORT:-${CLAWDBOT_BROWSER_NOVNC_PORT:-6080}}"
ENABLE_NOVNC="${OPENCLAW_BROWSER_ENABLE_NOVNC:-${CLAWDBOT_BROWSER_ENABLE_NOVNC:-1}}"
HEADLESS="${OPENCLAW_BROWSER_HEADLESS:-${CLAWDBOT_BROWSER_HEADLESS:-0}}"
ALLOW_NO_SANDBOX="${OPENCLAW_BROWSER_NO_SANDBOX:-${CLAWDBOT_BROWSER_NO_SANDBOX:-0}}"
NOVNC_PASSWORD="${OPENCLAW_BROWSER_NOVNC_PASSWORD:-${CLAWDBOT_BROWSER_NOVNC_PASSWORD:-}}"
mkdir -p "${HOME}" "${HOME}/.chrome" "${XDG_CONFIG_HOME}" "${XDG_CACHE_HOME}"
Xvfb :1 -screen 0 1280x800x24 -ac -nolisten tcp &
if [[ "${HEADLESS}" == "1" ]]; then
CHROME_ARGS=(
"--headless=new"
"--disable-gpu"
)
else
CHROME_ARGS=()
fi
if [[ "${CDP_PORT}" -ge 65535 ]]; then
CHROME_CDP_PORT="$((CDP_PORT - 1))"
else
CHROME_CDP_PORT="$((CDP_PORT + 1))"
fi
CHROME_ARGS+=(
"--remote-debugging-address=127.0.0.1"
"--remote-debugging-port=${CHROME_CDP_PORT}"
"--user-data-dir=${HOME}/.chrome"
"--no-first-run"
"--no-default-browser-check"
"--disable-dev-shm-usage"
"--disable-background-networking"
"--disable-features=TranslateUI"
"--disable-breakpad"
"--disable-crash-reporter"
"--metrics-recording-only"
)
if [[ "${ALLOW_NO_SANDBOX}" == "1" ]]; then
CHROME_ARGS+=(
"--no-sandbox"
"--disable-setuid-sandbox"
)
fi
chromium "${CHROME_ARGS[@]}" about:blank &
for _ in $(seq 1 50); do
if curl -sS --max-time 1 "http://127.0.0.1:${CHROME_CDP_PORT}/json/version" >/dev/null; then
break
fi
sleep 0.1
done
SOCAT_LISTEN_ADDR="TCP-LISTEN:${CDP_PORT},fork,reuseaddr,bind=0.0.0.0"
if [[ -n "${CDP_SOURCE_RANGE}" ]]; then
SOCAT_LISTEN_ADDR="${SOCAT_LISTEN_ADDR},range=${CDP_SOURCE_RANGE}"
fi
socat "${SOCAT_LISTEN_ADDR}" "TCP:127.0.0.1:${CHROME_CDP_PORT}" &
if [[ "${ENABLE_NOVNC}" == "1" && "${HEADLESS}" != "1" ]]; then
# VNC auth passwords are max 8 chars; use a random default when not provided.
if [[ -z "${NOVNC_PASSWORD}" ]]; then
NOVNC_PASSWORD="$(< /proc/sys/kernel/random/uuid)"
NOVNC_PASSWORD="${NOVNC_PASSWORD//-/}"
NOVNC_PASSWORD="${NOVNC_PASSWORD:0:8}"
fi
NOVNC_PASSWD_FILE="${HOME}/.vnc/passwd"
mkdir -p "${HOME}/.vnc"
x11vnc -storepasswd "${NOVNC_PASSWORD}" "${NOVNC_PASSWD_FILE}" >/dev/null
chmod 600 "${NOVNC_PASSWD_FILE}"
x11vnc -display :1 -rfbport "${VNC_PORT}" -shared -forever -rfbauth "${NOVNC_PASSWD_FILE}" -localhost &
websockify --web /usr/share/novnc/ "${NOVNC_PORT}" "localhost:${VNC_PORT}" &
fi
wait -n

View File

@@ -0,0 +1,7 @@
#!/usr/bin/env bash
set -euo pipefail
IMAGE_NAME="openclaw-sandbox-browser:bookworm-slim"
docker build -t "${IMAGE_NAME}" -f Dockerfile.sandbox-browser .
echo "Built ${IMAGE_NAME}"

View File

@@ -0,0 +1,40 @@
#!/usr/bin/env bash
set -euo pipefail
BASE_IMAGE="${BASE_IMAGE:-openclaw-sandbox:bookworm-slim}"
TARGET_IMAGE="${TARGET_IMAGE:-openclaw-sandbox-common:bookworm-slim}"
PACKAGES="${PACKAGES:-curl wget jq coreutils grep nodejs npm python3 git ca-certificates golang-go rustc cargo unzip pkg-config libasound2-dev build-essential file}"
INSTALL_PNPM="${INSTALL_PNPM:-1}"
INSTALL_BUN="${INSTALL_BUN:-1}"
BUN_INSTALL_DIR="${BUN_INSTALL_DIR:-/opt/bun}"
INSTALL_BREW="${INSTALL_BREW:-1}"
BREW_INSTALL_DIR="${BREW_INSTALL_DIR:-/home/linuxbrew/.linuxbrew}"
FINAL_USER="${FINAL_USER:-sandbox}"
if ! docker image inspect "${BASE_IMAGE}" >/dev/null 2>&1; then
echo "Base image missing: ${BASE_IMAGE}"
echo "Building base image via scripts/sandbox-setup.sh..."
scripts/sandbox-setup.sh
fi
echo "Building ${TARGET_IMAGE} with: ${PACKAGES}"
docker build \
-t "${TARGET_IMAGE}" \
-f Dockerfile.sandbox-common \
--build-arg BASE_IMAGE="${BASE_IMAGE}" \
--build-arg PACKAGES="${PACKAGES}" \
--build-arg INSTALL_PNPM="${INSTALL_PNPM}" \
--build-arg INSTALL_BUN="${INSTALL_BUN}" \
--build-arg BUN_INSTALL_DIR="${BUN_INSTALL_DIR}" \
--build-arg INSTALL_BREW="${INSTALL_BREW}" \
--build-arg BREW_INSTALL_DIR="${BREW_INSTALL_DIR}" \
--build-arg FINAL_USER="${FINAL_USER}" \
.
cat <<NOTE
Built ${TARGET_IMAGE}.
To use it, set agents.defaults.sandbox.docker.image to "${TARGET_IMAGE}" and restart.
If you want a clean re-create, remove old sandbox containers:
docker rm -f \$(docker ps -aq --filter label=openclaw.sandbox=1)
NOTE

View File

@@ -0,0 +1,7 @@
#!/usr/bin/env bash
set -euo pipefail
IMAGE_NAME="openclaw-sandbox:bookworm-slim"
docker build -t "${IMAGE_NAME}" -f Dockerfile.sandbox .
echo "Built ${IMAGE_NAME}"

Some files were not shown because too many files have changed in this diff Show More