Files
openclaw/src/imessage/monitor/loop-rate-limiter.ts
OfflynAI adb9234d03 fix(imessage): prevent echo loop from leaking internal metadata and amplifying NO_REPLY into queue overflow (#33295)
* fix(imessage): prevent echo loop from leaking internal metadata and amplifying NO_REPLY into queue overflow

- Add outbound sanitization at channel boundary (sanitize-outbound.ts):
  strips thinking/reasoning tags, relevant-memories tags, model-specific
  separators (+#+#), and assistant role markers before iMessage delivery

- Add inbound reflection guard (reflection-guard.ts): detects and drops
  messages containing assistant-internal markers that indicate a reflected
  outbound message, preventing recursive echo amplification

- Harden echo cache: increase text TTL from 5s to 30s to catch delayed
  reflections that previously expired before the echo could be detected

- Add loop rate limiter (loop-rate-limiter.ts): per-conversation rapid-fire
  detection that suppresses conversations exceeding threshold within a
  time window, acting as a safety net against amplification

Closes #33281

* fix(imessage): address review — stricter reflection regex, loop-aware rate limiter

- Reflection guard: require closing > bracket on thinking/final/memory
  tag patterns to prevent false-positives on user phrases like
  '<final answer>' or '<thought experiment>' (#33295 review)

- Rate limiter: only record echo/reflection/from-me drops instead of
  all dispatches, so the limiter acts as a loop-specific escalation
  mechanism rather than a general throttle on normal conversation
  velocity (#33295 review)

* Changelog: add iMessage echo-loop hardening entry

* iMessage: restore short echo-text TTL

* iMessage: ignore reflection markers in code

---------

Co-authored-by: Vincent Koc <vincentkoc@ieee.org>
2026-03-06 19:19:57 -05:00

70 lines
1.9 KiB
TypeScript

/**
* Per-conversation rate limiter that detects rapid-fire identical echo
* patterns and suppresses them before they amplify into queue overflow.
*/
const DEFAULT_WINDOW_MS = 60_000;
const DEFAULT_MAX_HITS = 5;
const CLEANUP_INTERVAL_MS = 120_000;
type ConversationWindow = {
timestamps: number[];
};
export type LoopRateLimiter = {
/** Returns true if this conversation has exceeded the rate limit. */
isRateLimited: (conversationKey: string) => boolean;
/** Record an inbound message for a conversation. */
record: (conversationKey: string) => void;
};
export function createLoopRateLimiter(opts?: {
windowMs?: number;
maxHits?: number;
}): LoopRateLimiter {
const windowMs = opts?.windowMs ?? DEFAULT_WINDOW_MS;
const maxHits = opts?.maxHits ?? DEFAULT_MAX_HITS;
const conversations = new Map<string, ConversationWindow>();
let lastCleanup = Date.now();
function cleanup() {
const now = Date.now();
if (now - lastCleanup < CLEANUP_INTERVAL_MS) {
return;
}
lastCleanup = now;
for (const [key, win] of conversations.entries()) {
const recent = win.timestamps.filter((ts) => now - ts <= windowMs);
if (recent.length === 0) {
conversations.delete(key);
} else {
win.timestamps = recent;
}
}
}
return {
record(conversationKey: string) {
cleanup();
let win = conversations.get(conversationKey);
if (!win) {
win = { timestamps: [] };
conversations.set(conversationKey, win);
}
win.timestamps.push(Date.now());
},
isRateLimited(conversationKey: string): boolean {
cleanup();
const win = conversations.get(conversationKey);
if (!win) {
return false;
}
const now = Date.now();
const recent = win.timestamps.filter((ts) => now - ts <= windowMs);
win.timestamps = recent;
return recent.length >= maxHits;
},
};
}