fix(logging): cap file logs with configurable maxFileBytes
Co-authored-by: Xinhua Gu <562450+xinhuagu@users.noreply.github.com>
This commit is contained in:
@@ -31,6 +31,7 @@ Docs: https://docs.openclaw.ai
|
||||
- Telegram/Polling: clear Telegram webhooks (`deleteWebhook`) before starting long-poll `getUpdates`, including retry handling for transient cleanup failures.
|
||||
- Telegram/Webhook: add `channels.telegram.webhookPort` config support and pass it through plugin startup wiring to the monitor listener.
|
||||
- Telegram/Media: send a user-facing Telegram reply when media download fails (non-size errors) instead of silently dropping the message.
|
||||
- Logging: cap single log-file size with `logging.maxFileBytes` (default 500 MB) and suppress additional writes after cap hit to prevent disk exhaustion from repeated error storms.
|
||||
- Signal/RPC: guard malformed Signal RPC JSON responses with a clear status-scoped error and add regression coverage for invalid JSON responses. (#22995) Thanks @adhitShet.
|
||||
- Gateway/Subagents: guard gateway and subagent session-key/message trim paths against undefined inputs to prevent early `Cannot read properties of undefined (reading 'trim')` crashes during subagent spawn and wait flows.
|
||||
- Agents/Workspace: guard `resolveUserPath` against undefined/null input to prevent `Cannot read properties of undefined (reading 'trim')` crashes when workspace paths are missing in embedded runner flows.
|
||||
|
||||
25
src/config/logging-max-file-bytes.test.ts
Normal file
25
src/config/logging-max-file-bytes.test.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { validateConfigObject } from "./config.js";
|
||||
|
||||
describe("logging.maxFileBytes config", () => {
|
||||
it("accepts a positive maxFileBytes", () => {
|
||||
const res = validateConfigObject({
|
||||
logging: {
|
||||
maxFileBytes: 1024,
|
||||
},
|
||||
});
|
||||
expect(res.ok).toBe(true);
|
||||
});
|
||||
|
||||
it("rejects non-positive maxFileBytes", () => {
|
||||
const res = validateConfigObject({
|
||||
logging: {
|
||||
maxFileBytes: 0,
|
||||
},
|
||||
});
|
||||
expect(res.ok).toBe(false);
|
||||
if (!res.ok) {
|
||||
expect(res.issues.some((issue) => issue.path === "logging.maxFileBytes")).toBe(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -142,6 +142,8 @@ export type SessionMaintenanceConfig = {
|
||||
export type LoggingConfig = {
|
||||
level?: "silent" | "fatal" | "error" | "warn" | "info" | "debug" | "trace";
|
||||
file?: string;
|
||||
/** Maximum size of a single log file in bytes before writes are suppressed. Default: 500 MB. */
|
||||
maxFileBytes?: number;
|
||||
consoleLevel?: "silent" | "fatal" | "error" | "warn" | "info" | "debug" | "trace";
|
||||
consoleStyle?: "pretty" | "compact" | "json";
|
||||
/** Redact sensitive tokens in tool summaries. Default: "tools". */
|
||||
|
||||
@@ -190,6 +190,7 @@ export const OpenClawSchema = z
|
||||
])
|
||||
.optional(),
|
||||
file: z.string().optional(),
|
||||
maxFileBytes: z.number().int().positive().optional(),
|
||||
consoleLevel: z
|
||||
.union([
|
||||
z.literal("silent"),
|
||||
|
||||
68
src/logging/log-file-size-cap.test.ts
Normal file
68
src/logging/log-file-size-cap.test.ts
Normal file
@@ -0,0 +1,68 @@
|
||||
import crypto from "node:crypto";
|
||||
import fs from "node:fs";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import {
|
||||
getLogger,
|
||||
getResolvedLoggerSettings,
|
||||
resetLogger,
|
||||
setLoggerOverride,
|
||||
} from "../logging.js";
|
||||
|
||||
const DEFAULT_MAX_FILE_BYTES = 500 * 1024 * 1024;
|
||||
|
||||
describe("log file size cap", () => {
|
||||
let logPath = "";
|
||||
|
||||
beforeEach(() => {
|
||||
logPath = path.join(os.tmpdir(), `openclaw-log-cap-${crypto.randomUUID()}.log`);
|
||||
resetLogger();
|
||||
setLoggerOverride(null);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
resetLogger();
|
||||
setLoggerOverride(null);
|
||||
vi.restoreAllMocks();
|
||||
try {
|
||||
fs.rmSync(logPath, { force: true });
|
||||
} catch {
|
||||
// ignore cleanup errors
|
||||
}
|
||||
});
|
||||
|
||||
it("defaults maxFileBytes to 500 MB when unset", () => {
|
||||
setLoggerOverride({ level: "info", file: logPath });
|
||||
expect(getResolvedLoggerSettings().maxFileBytes).toBe(DEFAULT_MAX_FILE_BYTES);
|
||||
});
|
||||
|
||||
it("uses configured maxFileBytes", () => {
|
||||
setLoggerOverride({ level: "info", file: logPath, maxFileBytes: 2048 });
|
||||
expect(getResolvedLoggerSettings().maxFileBytes).toBe(2048);
|
||||
});
|
||||
|
||||
it("suppresses file writes after cap is reached and warns once", () => {
|
||||
const stderrSpy = vi.spyOn(process.stderr, "write").mockImplementation(
|
||||
() => true as unknown as ReturnType<typeof process.stderr.write>, // preserve stream contract in test spy
|
||||
);
|
||||
setLoggerOverride({ level: "info", file: logPath, maxFileBytes: 1024 });
|
||||
const logger = getLogger();
|
||||
|
||||
for (let i = 0; i < 200; i++) {
|
||||
logger.error(`network-failure-${i}-${"x".repeat(80)}`);
|
||||
}
|
||||
const sizeAfterCap = fs.statSync(logPath).size;
|
||||
for (let i = 0; i < 20; i++) {
|
||||
logger.error(`post-cap-${i}-${"y".repeat(80)}`);
|
||||
}
|
||||
const sizeAfterExtraLogs = fs.statSync(logPath).size;
|
||||
|
||||
expect(sizeAfterExtraLogs).toBe(sizeAfterCap);
|
||||
expect(sizeAfterCap).toBeLessThanOrEqual(1024 + 512);
|
||||
const capWarnings = stderrSpy.mock.calls
|
||||
.map(([firstArg]) => String(firstArg))
|
||||
.filter((line) => line.includes("log file size cap reached"));
|
||||
expect(capWarnings).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
@@ -10,6 +10,7 @@ import {
|
||||
import { loggingState } from "./state.js";
|
||||
|
||||
const testLogPath = path.join(os.tmpdir(), "openclaw-test-env-log-level.log");
|
||||
const defaultMaxFileBytes = 500 * 1024 * 1024;
|
||||
|
||||
describe("OPENCLAW_LOG_LEVEL", () => {
|
||||
let originalEnv: string | undefined;
|
||||
@@ -46,6 +47,7 @@ describe("OPENCLAW_LOG_LEVEL", () => {
|
||||
expect(getResolvedLoggerSettings()).toEqual({
|
||||
level: "debug",
|
||||
file: testLogPath,
|
||||
maxFileBytes: defaultMaxFileBytes,
|
||||
});
|
||||
expect(getResolvedConsoleSettings()).toEqual({
|
||||
level: "debug",
|
||||
@@ -66,6 +68,7 @@ describe("OPENCLAW_LOG_LEVEL", () => {
|
||||
);
|
||||
|
||||
expect(getResolvedLoggerSettings().level).toBe("error");
|
||||
expect(getResolvedLoggerSettings().maxFileBytes).toBe(defaultMaxFileBytes);
|
||||
expect(getResolvedConsoleSettings().level).toBe("warn");
|
||||
expect(getResolvedLoggerSettings().level).toBe("error");
|
||||
|
||||
|
||||
@@ -16,12 +16,14 @@ export const DEFAULT_LOG_FILE = path.join(DEFAULT_LOG_DIR, "openclaw.log"); // l
|
||||
const LOG_PREFIX = "openclaw";
|
||||
const LOG_SUFFIX = ".log";
|
||||
const MAX_LOG_AGE_MS = 24 * 60 * 60 * 1000; // 24h
|
||||
const DEFAULT_MAX_LOG_FILE_BYTES = 500 * 1024 * 1024; // 500 MB
|
||||
|
||||
const requireConfig = resolveNodeRequireFromMeta(import.meta.url);
|
||||
|
||||
export type LoggerSettings = {
|
||||
level?: LogLevel;
|
||||
file?: string;
|
||||
maxFileBytes?: number;
|
||||
consoleLevel?: LogLevel;
|
||||
consoleStyle?: ConsoleStyle;
|
||||
};
|
||||
@@ -31,6 +33,7 @@ type LogObj = { date?: Date } & Record<string, unknown>;
|
||||
type ResolvedSettings = {
|
||||
level: LogLevel;
|
||||
file: string;
|
||||
maxFileBytes: number;
|
||||
};
|
||||
export type LoggerResolvedSettings = ResolvedSettings;
|
||||
export type LogTransportRecord = Record<string, unknown>;
|
||||
@@ -72,14 +75,15 @@ function resolveSettings(): ResolvedSettings {
|
||||
const envLevel = resolveEnvLogLevelOverride();
|
||||
const level = envLevel ?? fromConfig;
|
||||
const file = cfg?.file ?? defaultRollingPathForToday();
|
||||
return { level, file };
|
||||
const maxFileBytes = resolveMaxLogFileBytes(cfg?.maxFileBytes);
|
||||
return { level, file, maxFileBytes };
|
||||
}
|
||||
|
||||
function settingsChanged(a: ResolvedSettings | null, b: ResolvedSettings) {
|
||||
if (!a) {
|
||||
return true;
|
||||
}
|
||||
return a.level !== b.level || a.file !== b.file;
|
||||
return a.level !== b.level || a.file !== b.file || a.maxFileBytes !== b.maxFileBytes;
|
||||
}
|
||||
|
||||
export function isFileLogLevelEnabled(level: LogLevel): boolean {
|
||||
@@ -99,6 +103,8 @@ function buildLogger(settings: ResolvedSettings): TsLogger<LogObj> {
|
||||
if (isRollingPath(settings.file)) {
|
||||
pruneOldRollingLogs(path.dirname(settings.file));
|
||||
}
|
||||
let currentFileBytes = getCurrentLogFileBytes(settings.file);
|
||||
let warnedAboutSizeCap = false;
|
||||
const logger = new TsLogger<LogObj>({
|
||||
name: "openclaw",
|
||||
minLevel: levelToMinLevel(settings.level),
|
||||
@@ -109,7 +115,28 @@ function buildLogger(settings: ResolvedSettings): TsLogger<LogObj> {
|
||||
try {
|
||||
const time = logObj.date?.toISOString?.() ?? new Date().toISOString();
|
||||
const line = JSON.stringify({ ...logObj, time });
|
||||
fs.appendFileSync(settings.file, `${line}\n`, { encoding: "utf8" });
|
||||
const payload = `${line}\n`;
|
||||
const payloadBytes = Buffer.byteLength(payload, "utf8");
|
||||
const nextBytes = currentFileBytes + payloadBytes;
|
||||
if (nextBytes > settings.maxFileBytes) {
|
||||
if (!warnedAboutSizeCap) {
|
||||
warnedAboutSizeCap = true;
|
||||
const warningLine = JSON.stringify({
|
||||
time: new Date().toISOString(),
|
||||
level: "warn",
|
||||
subsystem: "logging",
|
||||
message: `log file size cap reached; suppressing writes file=${settings.file} maxFileBytes=${settings.maxFileBytes}`,
|
||||
});
|
||||
appendLogLine(settings.file, `${warningLine}\n`);
|
||||
process.stderr.write(
|
||||
`[openclaw] log file size cap reached; suppressing writes file=${settings.file} maxFileBytes=${settings.maxFileBytes}\n`,
|
||||
);
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (appendLogLine(settings.file, payload)) {
|
||||
currentFileBytes = nextBytes;
|
||||
}
|
||||
} catch {
|
||||
// never block on logging failures
|
||||
}
|
||||
@@ -121,6 +148,30 @@ function buildLogger(settings: ResolvedSettings): TsLogger<LogObj> {
|
||||
return logger;
|
||||
}
|
||||
|
||||
function resolveMaxLogFileBytes(raw: unknown): number {
|
||||
if (typeof raw === "number" && Number.isFinite(raw) && raw > 0) {
|
||||
return Math.floor(raw);
|
||||
}
|
||||
return DEFAULT_MAX_LOG_FILE_BYTES;
|
||||
}
|
||||
|
||||
function getCurrentLogFileBytes(file: string): number {
|
||||
try {
|
||||
return fs.statSync(file).size;
|
||||
} catch {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
function appendLogLine(file: string, line: string): boolean {
|
||||
try {
|
||||
fs.appendFileSync(file, line, { encoding: "utf8" });
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export function getLogger(): TsLogger<LogObj> {
|
||||
const settings = resolveSettings();
|
||||
const cachedLogger = loggingState.cachedLogger as TsLogger<LogObj> | null;
|
||||
|
||||
Reference in New Issue
Block a user