From c3e900866198b2707f27a8fe414f06e3ab508acd Mon Sep 17 00:00:00 2001 From: Jorgen Henriksen Date: Thu, 11 Dec 2025 17:25:31 +0100 Subject: [PATCH 01/51] working through it --- CLAUDE.md | 79 ++++++++++++++++++++++++++++++++++++++++ index.ts | 62 +++++++++++++++---------------- lib/hooks.ts | 10 +++++ lib/pruning-tool.ts | 18 ++++----- lib/state/index.ts | 44 +++++++++++----------- lib/state/persistence.ts | 8 ++-- 6 files changed, 154 insertions(+), 67 deletions(-) create mode 100644 CLAUDE.md diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 0000000..023e83f --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,79 @@ +# CLAUDE.md + +This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. + +## Build & Development Commands + +```bash +npm run build # Clean, compile TypeScript, copy prompts to dist/ +npm run dev # Run plugin in development mode (opencode plugin dev) +npm run typecheck # Type check without emitting files +npm run test # Run tests (node --import tsx --test tests/*.test.ts) +npm run clean # Remove dist directory + +# Publishing +npm version patch # Bump version before publishing +npm publish # Publish to npm +``` + +## Architecture Overview + +This is an OpenCode plugin that reduces token usage by pruning obsolete tool outputs from conversation history. The plugin intercepts API requests via a global fetch wrapper and replaces pruned tool outputs with placeholder text. + +### Core Flow + +1. **Entry Point** (`index.ts`): Initializes the plugin, creates state, installs the fetch wrapper, and registers hooks +2. **Fetch Wrapper** (`lib/fetch-wrapper/`): Intercepts outgoing LLM API calls to replace pruned tool outputs. Supports multiple API formats: + - OpenAI Chat Completions / Anthropic (`openai-chat.ts`) + - Google/Gemini (`gemini.ts`) + - OpenAI Responses API (`openai-responses.ts`) +3. **Janitor** (`lib/janitor.ts`): Core pruning logic that orchestrates analysis and tracks pruned IDs + +### Pruning Strategies + +Two complementary strategies in `lib/`: +- **Deduplication** (`deduplicator.ts`): Fast, zero-cost removal of duplicate tool calls by matching tool name + parameters +- **AI Analysis**: Uses an LLM to semantically identify obsolete tool outputs (prompts in `lib/prompts/`) + +### State Management + +`lib/state.ts` defines `PluginState` with Maps for: +- `prunedIds`: Session → pruned tool call IDs +- `stats`: Session → token savings statistics +- `toolParameters`: Tool call ID → parameters (for display and deduplication) +- `model`: Session → model info cache +- `googleToolCallMapping`: Session → position-based ID mapping for Google/Gemini + +### Hook System + +`lib/hooks.ts` provides two hooks: +- `event`: Triggers pruning when session goes idle +- `chat.params`: Caches model info and builds Google tool call mappings + +### Model Selection + +`lib/model-selector.ts` handles dynamic model selection with fallback chain: +1. Config-specified model (`dcp.jsonc`) +2. Current session model +3. Provider fallback models (OpenAI → Anthropic → Google → etc.) + +### Configuration + +`lib/config.ts` loads config from: +1. `~/.config/opencode/dcp.jsonc` (global) +2. `.opencode/dcp.jsonc` (project, overrides global) + +Key config options: `enabled`, `debug`, `model`, `strategies.onIdle`, `strategies.onTool`, `protectedTools`, `nudge_freq` + +### Synthetic Instructions + +`lib/synth-instruction.ts` and `lib/prompt.ts` inject nudge reminders into conversations prompting the AI to call the `context_pruning` tool. + +## Key Implementation Details + +- Session history is never modified; pruning happens only in outgoing API requests +- Tool outputs are replaced with: `[Output removed to save context - information superseded or no longer needed]` +- Protected tools (task, todowrite, todoread, context_pruning) are never pruned +- Google/Gemini requires position-based correlation since native format loses tool call IDs +- When working on this plugin, reference the OpenCode source code to understand the plugin API and hook system +- Debug logs are written to `~/.config/opencode/logs/dcp/` when `debug: true` in config \ No newline at end of file diff --git a/index.ts b/index.ts index 1328d3c..f44101f 100644 --- a/index.ts +++ b/index.ts @@ -6,7 +6,7 @@ import { checkForUpdates } from "./lib/version-checker" import { createPluginState } from "./lib/state" import { installFetchWrapper } from "./lib/fetch-wrapper" import { createPruningTool } from "./lib/pruning-tool" -import { createEventHandler, createChatParamsHandler } from "./lib/hooks" +import { createEventHandler, createChatParamsHandler, createChatMessageTransformHandler } from "./lib/hooks" import { createToolTracker } from "./lib/fetch-wrapper/tool-tracker" const plugin: Plugin = (async (ctx) => { @@ -25,25 +25,25 @@ const plugin: Plugin = (async (ctx) => { const logger = new Logger(config.debug) const state = createPluginState() - const janitorCtx = createJanitorContext( - ctx.client, - state, - logger, - { - protectedTools: config.protectedTools, - model: config.model, - showModelErrorToasts: config.showModelErrorToasts ?? true, - strictModelSelection: config.strictModelSelection ?? false, - pruningSummary: config.pruning_summary, - workingDirectory: ctx.directory - } - ) + // const janitorCtx = createJanitorContext( + // ctx.client, + // state, + // logger, + // { + // protectedTools: config.protectedTools, + // model: config.model, + // showModelErrorToasts: config.showModelErrorToasts ?? true, + // strictModelSelection: config.strictModelSelection ?? false, + // pruningSummary: config.pruning_summary, + // workingDirectory: ctx.directory + // } + // ) // Create tool tracker for nudge injection - const toolTracker = createToolTracker() + // const toolTracker = createToolTracker() // Install global fetch wrapper for context pruning and system message injection - installFetchWrapper(state, logger, ctx.client, config, toolTracker) + // installFetchWrapper(state, logger, ctx.client, config, toolTracker) // Log initialization logger.info("plugin", "DCP initialized", { @@ -52,9 +52,9 @@ const plugin: Plugin = (async (ctx) => { }) // Check for updates after a delay - setTimeout(() => { - checkForUpdates(ctx.client, logger, config.showUpdateToasts ?? true).catch(() => { }) - }, 5000) + // setTimeout(() => { + // checkForUpdates(ctx.client, logger, config.showUpdateToasts ?? true).catch(() => { }) + // }, 5000) // Show migration toast if there were config migrations if (migrations.length > 0) { @@ -75,18 +75,18 @@ const plugin: Plugin = (async (ctx) => { } return { - event: createEventHandler(ctx.client, janitorCtx, logger, config, toolTracker), - "chat.params": createChatParamsHandler(ctx.client, state, logger, toolTracker), - tool: config.strategies.onTool.length > 0 ? { - prune: createPruningTool({ - client: ctx.client, - state, - logger, - config, - notificationCtx: janitorCtx.notificationCtx, - workingDirectory: ctx.directory - }, toolTracker), - } : undefined, + "experimental.chat.messages.transform": createChatMessageTransformHandler(), + // "chat.params": createChatParamsHandler(ctx.client, state, logger, toolTracker), + // tool: config.strategies.onTool.length > 0 ? { + // prune: createPruningTool({ + // client: ctx.client, + // state, + // logger, + // config, + // notificationCtx: janitorCtx.notificationCtx, + // workingDirectory: ctx.directory + // }, toolTracker), + // } : undefined, } }) satisfies Plugin diff --git a/lib/hooks.ts b/lib/hooks.ts index 617abe1..64d2c01 100644 --- a/lib/hooks.ts +++ b/lib/hooks.ts @@ -1,3 +1,4 @@ +import type { WithParts } from "@opencode-ai/plugin" import type { PluginState } from "./state" import type { Logger } from "./logger" import type { JanitorContext } from "./core/janitor" @@ -54,6 +55,15 @@ export function createEventHandler( } } +export function createChatMessageTransformHandler() { + return async( + input: {}, + output: { messages: WithParts[] } + ) => { + + } +} + /** * Creates the chat.params hook for model caching and Google tool call mapping. */ diff --git a/lib/pruning-tool.ts b/lib/pruning-tool.ts index 9c1ff13..806f32d 100644 --- a/lib/pruning-tool.ts +++ b/lib/pruning-tool.ts @@ -8,7 +8,7 @@ import { isSubagentSession, findCurrentAgent } from "./hooks" import { getActualId } from "./state/id-mapping" import { sendUnifiedNotification, type NotificationContext } from "./ui/notification" import { formatPruningResultForTool } from "./ui/display-utils" -import { ensureSessionRestored } from "./state" +import { ensureSessionInitialized } from "./state" import { saveSessionState } from "./state/persistence" import type { Logger } from "./logger" import { estimateTokensBatch } from "./tokenizer" @@ -60,23 +60,19 @@ export function createPruningTool( } // Parse reason from first element, numeric IDs from the rest - const firstElement = args.ids[0] + + const reason = args.ids[0]; const validReasons = ["completion", "noise", "consolidation"] as const - let reason: PruneReason | undefined - let numericIds: number[] - - if (typeof firstElement === "string" && validReasons.includes(firstElement as any)) { - reason = firstElement as PruneReason - numericIds = args.ids.slice(1).filter((id): id is number => typeof id === "number") - } else { - numericIds = args.ids.filter((id): id is number => typeof id === "number") + if (typeof reason !== "string" || !validReasons.includes(reason as any)) { + return "No valid pruning reason found. Use 'completion', 'noise', or 'consolidation' as the first element." } + const numericIds: number[] = args.ids.slice(1).filter((id): id is number => typeof id === "number") if (numericIds.length === 0) { return "No numeric IDs provided. Format: [reason, id1, id2, ...] where reason is 'completion', 'noise', or 'consolidation'." } - await ensureSessionRestored(state, sessionId, logger) + await ensureSessionInitialized(state, sessionId, logger) const prunedIds = numericIds .map(numId => getActualId(sessionId, numId)) diff --git a/lib/state/index.ts b/lib/state/index.ts index a3c2584..a8e65ce 100644 --- a/lib/state/index.ts +++ b/lib/state/index.ts @@ -3,16 +3,17 @@ import type { Logger } from "../logger" import { loadSessionState } from "./persistence" export interface PluginState { + sessionId: string | null prunedIds: Map stats: Map gcPending: Map - toolParameters: Map - model: Map - googleToolCallMapping: Map> - restoredSessions: Set - checkedSessions: Set - subagentSessions: Set - lastSeenSessionId: string | null + // toolParameters: Map + // model: Map + // googleToolCallMapping: Map> + // restoredSessions: Set + // checkedSessions: Set + // subagentSessions: Set + // lastSeenSessionId: string | null } export type ToolStatus = "pending" | "running" | "completed" | "error" @@ -31,29 +32,30 @@ export interface ModelInfo { export function createPluginState(): PluginState { return { + sessionId: null, prunedIds: new Map(), stats: new Map(), gcPending: new Map(), - toolParameters: new Map(), - model: new Map(), - googleToolCallMapping: new Map(), - restoredSessions: new Set(), - checkedSessions: new Set(), - subagentSessions: new Set(), - lastSeenSessionId: null, + // toolParameters: new Map(), + // model: new Map(), + // googleToolCallMapping: new Map(), + // restoredSessions: new Set(), + // checkedSessions: new Set(), + // subagentSessions: new Set(), + // lastSeenSessionId: null, } } -export async function ensureSessionRestored( +export async function ensureSessionInitialized( state: PluginState, sessionId: string, - logger?: Logger + logger: Logger ): Promise { - if (state.restoredSessions.has(sessionId)) { - return - } - - state.restoredSessions.add(sessionId) + // if (state.restoredSessions.has(sessionId)) { + // return + // } + // + // state.restoredSessions.add(sessionId) const persisted = await loadSessionState(sessionId, logger) if (persisted) { diff --git a/lib/state/persistence.ts b/lib/state/persistence.ts index b394ef2..0337c2b 100644 --- a/lib/state/persistence.ts +++ b/lib/state/persistence.ts @@ -74,7 +74,7 @@ export async function saveSessionState( export async function loadSessionState( sessionId: string, - logger?: Logger + logger: Logger ): Promise { try { const filePath = getSessionFilePath(sessionId); @@ -87,13 +87,13 @@ export async function loadSessionState( const state = JSON.parse(content) as PersistedSessionState; if (!state || !Array.isArray(state.prunedIds) || !state.stats) { - logger?.warn("persist", "Invalid session state file, ignoring", { + logger.warn("persist", "Invalid session state file, ignoring", { sessionId: sessionId.slice(0, 8), }); return null; } - logger?.info("persist", "Loaded session state from disk", { + logger.info("persist", "Loaded session state from disk", { sessionId: sessionId.slice(0, 8), prunedIds: state.prunedIds.length, totalTokensSaved: state.stats.totalTokensSaved, @@ -101,7 +101,7 @@ export async function loadSessionState( return state; } catch (error: any) { - logger?.warn("persist", "Failed to load session state", { + logger.warn("persist", "Failed to load session state", { sessionId: sessionId.slice(0, 8), error: error?.message, }); From f9fd11a71bd9f123b4c5c1ae9e8ae260eb39f243 Mon Sep 17 00:00:00 2001 From: Jorgen Henriksen Date: Thu, 11 Dec 2025 21:05:46 +0100 Subject: [PATCH 02/51] crazy refactor --- CLAUDE.md | 6 +- README.md | 6 +- index.ts | 52 ++--- lib/config.ts | 24 +-- lib/core/janitor.ts | 6 +- lib/fetch-wrapper/formats/anthropic.ts | 111 ----------- lib/fetch-wrapper/formats/bedrock.ts | 113 ----------- lib/fetch-wrapper/formats/gemini.ts | 160 ---------------- lib/fetch-wrapper/formats/index.ts | 5 - lib/fetch-wrapper/formats/openai-chat.ts | 115 ----------- lib/fetch-wrapper/formats/openai-responses.ts | 75 -------- lib/fetch-wrapper/gc-tracker.ts | 77 -------- lib/fetch-wrapper/handler.ts | 165 ---------------- lib/fetch-wrapper/index.ts | 134 ------------- lib/fetch-wrapper/prunable-list.ts | 60 ------ lib/fetch-wrapper/tool-tracker.ts | 19 -- lib/fetch-wrapper/types.ts | 76 -------- lib/hooks.ts | 6 +- lib/pruning-tool.ts | 116 ++++-------- lib/state/id-mapping.ts | 64 ------- lib/state/index.ts | 82 +------- lib/state/persistence.ts | 32 ++-- lib/state/state.ts | 66 +++++++ lib/state/tool-cache.ts | 6 +- lib/state/types.ts | 26 +++ lib/ui/display-utils.ts | 15 +- lib/ui/notification.ts | 178 ++++++++++-------- lib/utils.ts | 43 +++++ 28 files changed, 336 insertions(+), 1502 deletions(-) delete mode 100644 lib/fetch-wrapper/formats/anthropic.ts delete mode 100644 lib/fetch-wrapper/formats/bedrock.ts delete mode 100644 lib/fetch-wrapper/formats/gemini.ts delete mode 100644 lib/fetch-wrapper/formats/index.ts delete mode 100644 lib/fetch-wrapper/formats/openai-chat.ts delete mode 100644 lib/fetch-wrapper/formats/openai-responses.ts delete mode 100644 lib/fetch-wrapper/gc-tracker.ts delete mode 100644 lib/fetch-wrapper/handler.ts delete mode 100644 lib/fetch-wrapper/index.ts delete mode 100644 lib/fetch-wrapper/prunable-list.ts delete mode 100644 lib/fetch-wrapper/tool-tracker.ts delete mode 100644 lib/fetch-wrapper/types.ts delete mode 100644 lib/state/id-mapping.ts create mode 100644 lib/state/state.ts create mode 100644 lib/state/types.ts create mode 100644 lib/utils.ts diff --git a/CLAUDE.md b/CLAUDE.md index 023e83f..64c26ef 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -37,7 +37,7 @@ Two complementary strategies in `lib/`: ### State Management -`lib/state.ts` defines `PluginState` with Maps for: +`lib/state.ts` defines `SessionState` with Maps for: - `prunedIds`: Session → pruned tool call IDs - `stats`: Session → token savings statistics - `toolParameters`: Tool call ID → parameters (for display and deduplication) @@ -63,7 +63,7 @@ Two complementary strategies in `lib/`: 1. `~/.config/opencode/dcp.jsonc` (global) 2. `.opencode/dcp.jsonc` (project, overrides global) -Key config options: `enabled`, `debug`, `model`, `strategies.onIdle`, `strategies.onTool`, `protectedTools`, `nudge_freq` +Key config options: `enabled`, `debug`, `model`, `strategies.onIdle`, `strategies.onTool`, `protectedTools`, `nudgeFreq` ### Synthetic Instructions @@ -76,4 +76,4 @@ Key config options: `enabled`, `debug`, `model`, `strategies.onIdle`, `strategie - Protected tools (task, todowrite, todoread, context_pruning) are never pruned - Google/Gemini requires position-based correlation since native format loses tool call IDs - When working on this plugin, reference the OpenCode source code to understand the plugin API and hook system -- Debug logs are written to `~/.config/opencode/logs/dcp/` when `debug: true` in config \ No newline at end of file +- Debug logs are written to `~/.config/opencode/logs/dcp/` when `debug: true` in config diff --git a/README.md b/README.md index 113c8ff..6992da2 100644 --- a/README.md +++ b/README.md @@ -33,7 +33,7 @@ DCP uses two complementary techniques: When `strategies.onTool` is enabled, DCP exposes a `prune` tool to Opencode that the AI can call to trigger pruning on demand. -Adjust `nudge_freq` to control how aggressively the AI is prompted to prune — lower values trigger reminders sooner and more often. +Adjust `nudgeFreq` to control how aggressively the AI is prompted to prune — lower values trigger reminders sooner and more often. ## How It Works @@ -59,8 +59,8 @@ DCP uses its own config file (`~/.config/opencode/dcp.jsonc` or `.opencode/dcp.j | `showModelErrorToasts` | `true` | Show notifications on model fallback | | `showUpdateToasts` | `true` | Show notifications when a new version is available | | `strictModelSelection` | `false` | Only run AI analysis with session or configured model (disables fallback models) | -| `pruning_summary` | `"detailed"` | `"off"`, `"minimal"`, or `"detailed"` | -| `nudge_freq` | `10` | How often to remind AI to prune (lower = more frequent) | +| `pruningSummary` | `"detailed"` | `"off"`, `"minimal"`, or `"detailed"` | +| `nudgeFreq` | `10` | How often to remind AI to prune (lower = more frequent) | | `protectedTools` | `["task", "todowrite", "todoread", "prune", "batch", "write", "edit"]` | Tools that are never pruned | | `strategies.onIdle` | `["ai-analysis"]` | Strategies for automatic pruning | | `strategies.onTool` | `["ai-analysis"]` | Strategies when AI calls `prune` | diff --git a/index.ts b/index.ts index 90cbe10..e645e31 100644 --- a/index.ts +++ b/index.ts @@ -1,13 +1,9 @@ import type { Plugin } from "@opencode-ai/plugin" import { getConfig } from "./lib/config" import { Logger } from "./lib/logger" -import { createJanitorContext } from "./lib/core/janitor" -import { checkForUpdates } from "./lib/version-checker" -import { createPluginState } from "./lib/state" -import { installFetchWrapper } from "./lib/fetch-wrapper" +import { createSessionState } from "./lib/state" import { createPruningTool } from "./lib/pruning-tool" import { createEventHandler, createChatParamsHandler, createChatMessageTransformHandler } from "./lib/hooks" -import { createToolTracker } from "./lib/fetch-wrapper/tool-tracker" const plugin: Plugin = (async (ctx) => { const { config, migrations } = getConfig(ctx) @@ -23,27 +19,7 @@ const plugin: Plugin = (async (ctx) => { // Initialize core components const logger = new Logger(config.debug) - const state = createPluginState() - - // const janitorCtx = createJanitorContext( - // ctx.client, - // state, - // logger, - // { - // protectedTools: config.protectedTools, - // model: config.model, - // showModelErrorToasts: config.showModelErrorToasts ?? true, - // strictModelSelection: config.strictModelSelection ?? false, - // pruningSummary: config.pruning_summary, - // workingDirectory: ctx.directory - // } - // ) - - // Create tool tracker for nudge injection - // const toolTracker = createToolTracker() - - // Install global fetch wrapper for context pruning and system message injection - // installFetchWrapper(state, logger, ctx.client, config, toolTracker) + const state = createSessionState() // Log initialization logger.info("plugin", "DCP initialized", { @@ -51,11 +27,6 @@ const plugin: Plugin = (async (ctx) => { model: config.model || "auto" }) - // Check for updates after a delay - // setTimeout(() => { - // checkForUpdates(ctx.client, logger, config.showUpdateToasts ?? true).catch(() => { }) - // }, 5000) - // Show migration toast if there were config migrations if (migrations.length > 0) { setTimeout(async () => { @@ -77,16 +48,15 @@ const plugin: Plugin = (async (ctx) => { return { "experimental.chat.messages.transform": createChatMessageTransformHandler(), // "chat.params": createChatParamsHandler(ctx.client, state, logger, toolTracker), - // tool: config.strategies.onTool.length > 0 ? { - // prune: createPruningTool({ - // client: ctx.client, - // state, - // logger, - // config, - // notificationCtx: janitorCtx.notificationCtx, - // workingDirectory: ctx.directory - // }, toolTracker), - // } : undefined, + tool: config.strategies.onTool.length > 0 ? { + prune: createPruningTool({ + client: ctx.client, + state, + logger, + config, + workingDirectory: ctx.directory + }), + } : undefined, // config: async (opencodeConfig) => { // // Add prune to primary_tools by mutating the opencode config // // This works because config is cached and passed by reference diff --git a/lib/config.ts b/lib/config.ts index 57a2093..a08a2aa 100644 --- a/lib/config.ts +++ b/lib/config.ts @@ -15,8 +15,8 @@ export interface PluginConfig { showModelErrorToasts?: boolean showUpdateToasts?: boolean strictModelSelection?: boolean - pruning_summary: "off" | "minimal" | "detailed" - nudge_freq: number + pruningSummary: "off" | "minimal" | "detailed" + nudgeFreq: number strategies: { onIdle: PruningStrategy[] onTool: PruningStrategy[] @@ -35,8 +35,8 @@ const defaultConfig: PluginConfig = { showModelErrorToasts: true, showUpdateToasts: true, strictModelSelection: false, - pruning_summary: 'detailed', - nudge_freq: 10, + pruningSummary: 'detailed', + nudgeFreq: 10, strategies: { onIdle: ['ai-analysis'], onTool: ['ai-analysis'] @@ -51,8 +51,8 @@ const VALID_CONFIG_KEYS = new Set([ 'showModelErrorToasts', 'showUpdateToasts', 'strictModelSelection', - 'pruning_summary', - 'nudge_freq', + 'pruningSummary', + 'nudgeFreq', 'strategies' ]) @@ -125,9 +125,9 @@ function createDefaultConfig(): void { "onTool": ["ai-analysis"] }, // Summary display: "off", "minimal", or "detailed" - "pruning_summary": "detailed", + "pruningSummary": "detailed", // How often to nudge the AI to prune (every N tool results, 0 = disabled) - "nudge_freq": 10 + "nudgeFreq": 10 // Additional tools to protect from pruning // "protectedTools": ["bash"] } @@ -207,8 +207,8 @@ export function getConfig(ctx?: PluginInput): ConfigResult { showUpdateToasts: globalConfig.showUpdateToasts ?? config.showUpdateToasts, strictModelSelection: globalConfig.strictModelSelection ?? config.strictModelSelection, strategies: mergeStrategies(config.strategies, globalConfig.strategies as any), - pruning_summary: globalConfig.pruning_summary ?? config.pruning_summary, - nudge_freq: globalConfig.nudge_freq ?? config.nudge_freq + pruningSummary: globalConfig.pruningSummary ?? config.pruningSummary, + nudgeFreq: globalConfig.nudgeFreq ?? config.nudgeFreq } logger.info('config', 'Loaded global config', { path: configPaths.global }) } @@ -239,8 +239,8 @@ export function getConfig(ctx?: PluginInput): ConfigResult { showUpdateToasts: projectConfig.showUpdateToasts ?? config.showUpdateToasts, strictModelSelection: projectConfig.strictModelSelection ?? config.strictModelSelection, strategies: mergeStrategies(config.strategies, projectConfig.strategies as any), - pruning_summary: projectConfig.pruning_summary ?? config.pruning_summary, - nudge_freq: projectConfig.nudge_freq ?? config.nudge_freq + pruningSummary: projectConfig.pruningSummary ?? config.pruningSummary, + nudgeFreq: projectConfig.nudgeFreq ?? config.nudgeFreq } logger.info('config', 'Loaded project config (overrides global)', { path: configPaths.project }) } diff --git a/lib/core/janitor.ts b/lib/core/janitor.ts index 55772ac..fc00a10 100644 --- a/lib/core/janitor.ts +++ b/lib/core/janitor.ts @@ -1,7 +1,7 @@ import { z } from "zod" import type { Logger } from "../logger" import type { PruningStrategy } from "../config" -import type { PluginState } from "../state" +import type { SessionState } from "../state" import type { ToolMetadata, SessionStats, GCStats, PruningResult } from "../fetch-wrapper/types" import { findCurrentAgent } from "../hooks" import { buildAnalysisPrompt } from "./prompt" @@ -32,7 +32,7 @@ export interface JanitorConfig { export interface JanitorContext { client: any - state: PluginState + state: SessionState logger: Logger config: JanitorConfig notificationCtx: NotificationContext @@ -44,7 +44,7 @@ export interface JanitorContext { export function createJanitorContext( client: any, - state: PluginState, + state: SessionState, logger: Logger, config: JanitorConfig ): JanitorContext { diff --git a/lib/fetch-wrapper/formats/anthropic.ts b/lib/fetch-wrapper/formats/anthropic.ts deleted file mode 100644 index a409e3e..0000000 --- a/lib/fetch-wrapper/formats/anthropic.ts +++ /dev/null @@ -1,111 +0,0 @@ -import type { FormatDescriptor, ToolOutput } from "../types" -import type { PluginState } from "../../state" - -/** - * Anthropic Messages API format with top-level `system` array. - * Tool calls: `tool_use` blocks in assistant content with `id` - * Tool results: `tool_result` blocks in user content with `tool_use_id` - */ -export const anthropicFormat: FormatDescriptor = { - name: 'anthropic', - - detect(body: any): boolean { - return ( - body.system !== undefined && - Array.isArray(body.messages) - ) - }, - - getDataArray(body: any): any[] | undefined { - return body.messages - }, - - injectSystemMessage(body: any, injection: string): boolean { - if (!injection) return false - - if (typeof body.system === 'string') { - body.system = [{ type: 'text', text: body.system }] - } else if (!Array.isArray(body.system)) { - body.system = [] - } - - body.system.push({ type: 'text', text: injection }) - return true - }, - - appendUserMessage(body: any, injection: string): boolean { - if (!injection || !body.messages) return false - body.messages.push({ role: 'user', content: [{ type: 'text', text: injection }] }) - return true - }, - - extractToolOutputs(data: any[], state: PluginState): ToolOutput[] { - const outputs: ToolOutput[] = [] - - for (const m of data) { - if (m.role === 'user' && Array.isArray(m.content)) { - for (const block of m.content) { - if (block.type === 'tool_result' && block.tool_use_id) { - const toolUseId = block.tool_use_id.toLowerCase() - const metadata = state.toolParameters.get(toolUseId) - outputs.push({ - id: toolUseId, - toolName: metadata?.tool - }) - } - } - } - } - - return outputs - }, - - replaceToolOutput(data: any[], toolId: string, prunedMessage: string, _state: PluginState): boolean { - const toolIdLower = toolId.toLowerCase() - let replaced = false - - for (let i = 0; i < data.length; i++) { - const m = data[i] - - if (m.role === 'user' && Array.isArray(m.content)) { - let messageModified = false - const newContent = m.content.map((block: any) => { - if (block.type === 'tool_result' && block.tool_use_id?.toLowerCase() === toolIdLower) { - messageModified = true - return { - ...block, - content: prunedMessage - } - } - return block - }) - if (messageModified) { - data[i] = { ...m, content: newContent } - replaced = true - } - } - } - - return replaced - }, - - hasToolOutputs(data: any[]): boolean { - for (const m of data) { - if (m.role === 'user' && Array.isArray(m.content)) { - for (const block of m.content) { - if (block.type === 'tool_result') return true - } - } - } - return false - }, - - getLogMetadata(data: any[], replacedCount: number, inputUrl: string): Record { - return { - url: inputUrl, - replacedCount, - totalMessages: data.length, - format: 'anthropic' - } - } -} diff --git a/lib/fetch-wrapper/formats/bedrock.ts b/lib/fetch-wrapper/formats/bedrock.ts deleted file mode 100644 index 4f4f7ce..0000000 --- a/lib/fetch-wrapper/formats/bedrock.ts +++ /dev/null @@ -1,113 +0,0 @@ -import type { FormatDescriptor, ToolOutput } from "../types" -import type { PluginState } from "../../state" - -/** - * Bedrock uses top-level `system` array + `inferenceConfig` (distinguishes from OpenAI/Anthropic). - * Tool calls: `toolUse` blocks in assistant content with `toolUseId` - * Tool results: `toolResult` blocks in user content with `toolUseId` - */ -export const bedrockFormat: FormatDescriptor = { - name: 'bedrock', - - detect(body: any): boolean { - return ( - Array.isArray(body.system) && - body.inferenceConfig !== undefined && - Array.isArray(body.messages) - ) - }, - - getDataArray(body: any): any[] | undefined { - return body.messages - }, - - injectSystemMessage(body: any, injection: string): boolean { - if (!injection) return false - - if (!Array.isArray(body.system)) { - body.system = [] - } - - body.system.push({ text: injection }) - return true - }, - - appendUserMessage(body: any, injection: string): boolean { - if (!injection || !body.messages) return false - body.messages.push({ role: 'user', content: [{ text: injection }] }) - return true - }, - - extractToolOutputs(data: any[], state: PluginState): ToolOutput[] { - const outputs: ToolOutput[] = [] - - for (const m of data) { - if (m.role === 'user' && Array.isArray(m.content)) { - for (const block of m.content) { - if (block.toolResult && block.toolResult.toolUseId) { - const toolUseId = block.toolResult.toolUseId.toLowerCase() - const metadata = state.toolParameters.get(toolUseId) - outputs.push({ - id: toolUseId, - toolName: metadata?.tool - }) - } - } - } - } - - return outputs - }, - - replaceToolOutput(data: any[], toolId: string, prunedMessage: string, _state: PluginState): boolean { - const toolIdLower = toolId.toLowerCase() - let replaced = false - - for (let i = 0; i < data.length; i++) { - const m = data[i] - - if (m.role === 'user' && Array.isArray(m.content)) { - let messageModified = false - const newContent = m.content.map((block: any) => { - if (block.toolResult && block.toolResult.toolUseId?.toLowerCase() === toolIdLower) { - messageModified = true - return { - ...block, - toolResult: { - ...block.toolResult, - content: [{ text: prunedMessage }] - } - } - } - return block - }) - if (messageModified) { - data[i] = { ...m, content: newContent } - replaced = true - } - } - } - - return replaced - }, - - hasToolOutputs(data: any[]): boolean { - for (const m of data) { - if (m.role === 'user' && Array.isArray(m.content)) { - for (const block of m.content) { - if (block.toolResult) return true - } - } - } - return false - }, - - getLogMetadata(data: any[], replacedCount: number, inputUrl: string): Record { - return { - url: inputUrl, - replacedCount, - totalMessages: data.length, - format: 'bedrock' - } - } -} diff --git a/lib/fetch-wrapper/formats/gemini.ts b/lib/fetch-wrapper/formats/gemini.ts deleted file mode 100644 index 46ec2ad..0000000 --- a/lib/fetch-wrapper/formats/gemini.ts +++ /dev/null @@ -1,160 +0,0 @@ -import type { FormatDescriptor, ToolOutput } from "../types" -import type { PluginState } from "../../state" - -/** - * Gemini doesn't include tool call IDs in its native format. - * We use position-based correlation via state.googleToolCallMapping which maps - * "toolName:index" -> "toolCallId" (populated by hooks.ts from message events). - */ -export const geminiFormat: FormatDescriptor = { - name: 'gemini', - - detect(body: any): boolean { - return body.contents && Array.isArray(body.contents) - }, - - getDataArray(body: any): any[] | undefined { - return body.contents - }, - - injectSystemMessage(body: any, injection: string): boolean { - if (!injection) return false - - if (!body.systemInstruction) { - body.systemInstruction = { parts: [] } - } - if (!Array.isArray(body.systemInstruction.parts)) { - body.systemInstruction.parts = [] - } - - body.systemInstruction.parts.push({ text: injection }) - return true - }, - - appendUserMessage(body: any, injection: string): boolean { - if (!injection || !body.contents) return false - body.contents.push({ role: 'user', parts: [{ text: injection }] }) - return true - }, - - extractToolOutputs(data: any[], state: PluginState): ToolOutput[] { - const outputs: ToolOutput[] = [] - - let positionMapping: Map | undefined - for (const [_sessionId, mapping] of state.googleToolCallMapping) { - if (mapping && mapping.size > 0) { - positionMapping = mapping - break - } - } - - if (!positionMapping) { - return outputs - } - - const toolPositionCounters = new Map() - - for (const content of data) { - if (!Array.isArray(content.parts)) continue - - for (const part of content.parts) { - if (part.functionResponse) { - const funcName = part.functionResponse.name?.toLowerCase() - if (funcName) { - const currentIndex = toolPositionCounters.get(funcName) || 0 - toolPositionCounters.set(funcName, currentIndex + 1) - - const positionKey = `${funcName}:${currentIndex}` - const toolCallId = positionMapping.get(positionKey) - - if (toolCallId) { - const metadata = state.toolParameters.get(toolCallId.toLowerCase()) - outputs.push({ - id: toolCallId.toLowerCase(), - toolName: metadata?.tool - }) - } - } - } - } - } - - return outputs - }, - - replaceToolOutput(data: any[], toolId: string, prunedMessage: string, state: PluginState): boolean { - let positionMapping: Map | undefined - for (const [_sessionId, mapping] of state.googleToolCallMapping) { - if (mapping && mapping.size > 0) { - positionMapping = mapping - break - } - } - - if (!positionMapping) { - return false - } - - const toolIdLower = toolId.toLowerCase() - const toolPositionCounters = new Map() - let replaced = false - - for (let i = 0; i < data.length; i++) { - const content = data[i] - if (!Array.isArray(content.parts)) continue - - let contentModified = false - const newParts = content.parts.map((part: any) => { - if (part.functionResponse) { - const funcName = part.functionResponse.name?.toLowerCase() - if (funcName) { - const currentIndex = toolPositionCounters.get(funcName) || 0 - toolPositionCounters.set(funcName, currentIndex + 1) - - const positionKey = `${funcName}:${currentIndex}` - const mappedToolId = positionMapping!.get(positionKey) - - if (mappedToolId?.toLowerCase() === toolIdLower) { - contentModified = true - replaced = true - // Preserve thoughtSignature if present (required for Gemini 3 Pro) - return { - ...part, - functionResponse: { - ...part.functionResponse, - response: { - name: part.functionResponse.name, - content: prunedMessage - } - } - } - } - } - } - return part - }) - - if (contentModified) { - data[i] = { ...content, parts: newParts } - } - } - - return replaced - }, - - hasToolOutputs(data: any[]): boolean { - return data.some((content: any) => - Array.isArray(content.parts) && - content.parts.some((part: any) => part.functionResponse) - ) - }, - - getLogMetadata(data: any[], replacedCount: number, inputUrl: string): Record { - return { - url: inputUrl, - replacedCount, - totalContents: data.length, - format: 'google-gemini' - } - } -} diff --git a/lib/fetch-wrapper/formats/index.ts b/lib/fetch-wrapper/formats/index.ts deleted file mode 100644 index 5e13d3f..0000000 --- a/lib/fetch-wrapper/formats/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -export { openaiChatFormat } from './openai-chat' -export { openaiResponsesFormat } from './openai-responses' -export { geminiFormat } from './gemini' -export { bedrockFormat } from './bedrock' -export { anthropicFormat } from './anthropic' diff --git a/lib/fetch-wrapper/formats/openai-chat.ts b/lib/fetch-wrapper/formats/openai-chat.ts deleted file mode 100644 index ca41dbf..0000000 --- a/lib/fetch-wrapper/formats/openai-chat.ts +++ /dev/null @@ -1,115 +0,0 @@ -import type { FormatDescriptor, ToolOutput } from "../types" -import type { PluginState } from "../../state" - -export const openaiChatFormat: FormatDescriptor = { - name: 'openai-chat', - - detect(body: any): boolean { - return body.messages && Array.isArray(body.messages) - }, - - getDataArray(body: any): any[] | undefined { - return body.messages - }, - - injectSystemMessage(body: any, injection: string): boolean { - if (!injection || !body.messages) return false - - let lastSystemIndex = -1 - for (let i = 0; i < body.messages.length; i++) { - if (body.messages[i].role === 'system') { - lastSystemIndex = i - } - } - - const insertIndex = lastSystemIndex + 1 - body.messages.splice(insertIndex, 0, { role: 'system', content: injection }) - return true - }, - - appendUserMessage(body: any, injection: string): boolean { - if (!injection || !body.messages) return false - body.messages.push({ role: 'user', content: injection }) - return true - }, - - extractToolOutputs(data: any[], state: PluginState): ToolOutput[] { - const outputs: ToolOutput[] = [] - - for (const m of data) { - if (m.role === 'tool' && m.tool_call_id) { - const metadata = state.toolParameters.get(m.tool_call_id.toLowerCase()) - outputs.push({ - id: m.tool_call_id.toLowerCase(), - toolName: metadata?.tool - }) - } - - if (m.role === 'user' && Array.isArray(m.content)) { - for (const part of m.content) { - if (part.type === 'tool_result' && part.tool_use_id) { - const metadata = state.toolParameters.get(part.tool_use_id.toLowerCase()) - outputs.push({ - id: part.tool_use_id.toLowerCase(), - toolName: metadata?.tool - }) - } - } - } - } - - return outputs - }, - - replaceToolOutput(data: any[], toolId: string, prunedMessage: string, _state: PluginState): boolean { - const toolIdLower = toolId.toLowerCase() - let replaced = false - - for (let i = 0; i < data.length; i++) { - const m = data[i] - - if (m.role === 'tool' && m.tool_call_id?.toLowerCase() === toolIdLower) { - data[i] = { ...m, content: prunedMessage } - replaced = true - } - - if (m.role === 'user' && Array.isArray(m.content)) { - let messageModified = false - const newContent = m.content.map((part: any) => { - if (part.type === 'tool_result' && part.tool_use_id?.toLowerCase() === toolIdLower) { - messageModified = true - return { ...part, content: prunedMessage } - } - return part - }) - if (messageModified) { - data[i] = { ...m, content: newContent } - replaced = true - } - } - } - - return replaced - }, - - hasToolOutputs(data: any[]): boolean { - for (const m of data) { - if (m.role === 'tool') return true - if (m.role === 'user' && Array.isArray(m.content)) { - for (const part of m.content) { - if (part.type === 'tool_result') return true - } - } - } - return false - }, - - getLogMetadata(data: any[], replacedCount: number, inputUrl: string): Record { - return { - url: inputUrl, - replacedCount, - totalMessages: data.length, - format: 'openai-chat' - } - } -} diff --git a/lib/fetch-wrapper/formats/openai-responses.ts b/lib/fetch-wrapper/formats/openai-responses.ts deleted file mode 100644 index 2cabafe..0000000 --- a/lib/fetch-wrapper/formats/openai-responses.ts +++ /dev/null @@ -1,75 +0,0 @@ -import type { FormatDescriptor, ToolOutput } from "../types" -import type { PluginState } from "../../state" - -export const openaiResponsesFormat: FormatDescriptor = { - name: 'openai-responses', - - detect(body: any): boolean { - return body.input && Array.isArray(body.input) - }, - - getDataArray(body: any): any[] | undefined { - return body.input - }, - - injectSystemMessage(body: any, injection: string): boolean { - if (!injection) return false - - if (body.instructions && typeof body.instructions === 'string') { - body.instructions = body.instructions + '\n\n' + injection - } else { - body.instructions = injection - } - return true - }, - - appendUserMessage(body: any, injection: string): boolean { - if (!injection || !body.input) return false - body.input.push({ type: 'message', role: 'user', content: injection }) - return true - }, - - extractToolOutputs(data: any[], state: PluginState): ToolOutput[] { - const outputs: ToolOutput[] = [] - - for (const item of data) { - if (item.type === 'function_call_output' && item.call_id) { - const metadata = state.toolParameters.get(item.call_id.toLowerCase()) - outputs.push({ - id: item.call_id.toLowerCase(), - toolName: metadata?.tool ?? item.name - }) - } - } - - return outputs - }, - - replaceToolOutput(data: any[], toolId: string, prunedMessage: string, _state: PluginState): boolean { - const toolIdLower = toolId.toLowerCase() - let replaced = false - - for (let i = 0; i < data.length; i++) { - const item = data[i] - if (item.type === 'function_call_output' && item.call_id?.toLowerCase() === toolIdLower) { - data[i] = { ...item, output: prunedMessage } - replaced = true - } - } - - return replaced - }, - - hasToolOutputs(data: any[]): boolean { - return data.some((item: any) => item.type === 'function_call_output') - }, - - getLogMetadata(data: any[], replacedCount: number, inputUrl: string): Record { - return { - url: inputUrl, - replacedCount, - totalItems: data.length, - format: 'openai-responses-api' - } - } -} diff --git a/lib/fetch-wrapper/gc-tracker.ts b/lib/fetch-wrapper/gc-tracker.ts deleted file mode 100644 index 950a21a..0000000 --- a/lib/fetch-wrapper/gc-tracker.ts +++ /dev/null @@ -1,77 +0,0 @@ -import type { PluginState } from "../state" -import type { Logger } from "../logger" - -export function accumulateGCStats( - state: PluginState, - sessionId: string, - prunedIds: string[], - body: any, - logger: Logger -): void { - if (prunedIds.length === 0) return - - const toolOutputs = extractToolOutputsFromBody(body, prunedIds) - const tokensCollected = estimateTokensFromOutputs(toolOutputs) - - const existing = state.gcPending.get(sessionId) ?? { tokensCollected: 0, toolsDeduped: 0 } - - state.gcPending.set(sessionId, { - tokensCollected: existing.tokensCollected + tokensCollected, - toolsDeduped: existing.toolsDeduped + prunedIds.length - }) - - logger.debug("gc-tracker", "Accumulated GC stats", { - sessionId: sessionId.substring(0, 8), - newlyDeduped: prunedIds.length, - tokensThisCycle: tokensCollected, - pendingTotal: state.gcPending.get(sessionId) - }) -} - -function extractToolOutputsFromBody(body: any, prunedIds: string[]): string[] { - const outputs: string[] = [] - const prunedIdSet = new Set(prunedIds.map(id => id.toLowerCase())) - - // OpenAI Chat format - if (body.messages && Array.isArray(body.messages)) { - for (const m of body.messages) { - if (m.role === 'tool' && m.tool_call_id && prunedIdSet.has(m.tool_call_id.toLowerCase())) { - if (typeof m.content === 'string') { - outputs.push(m.content) - } - } - // Anthropic format - if (m.role === 'user' && Array.isArray(m.content)) { - for (const part of m.content) { - if (part.type === 'tool_result' && part.tool_use_id && prunedIdSet.has(part.tool_use_id.toLowerCase())) { - if (typeof part.content === 'string') { - outputs.push(part.content) - } - } - } - } - } - } - - // OpenAI Responses format - if (body.input && Array.isArray(body.input)) { - for (const item of body.input) { - if (item.type === 'function_call_output' && item.call_id && prunedIdSet.has(item.call_id.toLowerCase())) { - if (typeof item.output === 'string') { - outputs.push(item.output) - } - } - } - } - - return outputs -} - -// Character-based approximation (chars / 4) to avoid async tokenizer in fetch path -function estimateTokensFromOutputs(outputs: string[]): number { - let totalChars = 0 - for (const output of outputs) { - totalChars += output.length - } - return Math.round(totalChars / 4) -} diff --git a/lib/fetch-wrapper/handler.ts b/lib/fetch-wrapper/handler.ts deleted file mode 100644 index a4cd693..0000000 --- a/lib/fetch-wrapper/handler.ts +++ /dev/null @@ -1,165 +0,0 @@ -import type { FetchHandlerContext, FetchHandlerResult, FormatDescriptor, PrunedIdData } from "./types" -import { type PluginState, ensureSessionRestored } from "../state" -import type { Logger } from "../logger" -import { buildPrunableToolsList, buildEndInjection } from "./prunable-list" -import { syncToolCache } from "../state/tool-cache" -import { loadPrompt } from "../core/prompt" - -const SYNTHETIC_INSTRUCTION = loadPrompt("synthetic") -const PRUNED_CONTENT_MESSAGE = '[Output removed to save context - information superseded or no longer needed]' - -function getMostRecentActiveSession(allSessions: any): any | undefined { - const activeSessions = allSessions.data?.filter((s: any) => !s.parentID) || [] - return activeSessions.length > 0 ? activeSessions[0] : undefined -} - -async function fetchSessionMessages( - client: any, - sessionId: string -): Promise { - try { - const messagesResponse = await client.session.messages({ - path: { id: sessionId }, - query: { limit: 500 } - }) - return Array.isArray(messagesResponse.data) - ? messagesResponse.data - : Array.isArray(messagesResponse) ? messagesResponse : undefined - } catch (e) { - return undefined - } -} - -async function getAllPrunedIds( - client: any, - state: PluginState, - logger?: Logger -): Promise { - const allSessions = await client.session.list() - const allPrunedIds = new Set() - - const currentSession = getMostRecentActiveSession(allSessions) - if (currentSession) { - await ensureSessionRestored(state, currentSession.id, logger) - const prunedIds = state.prunedIds.get(currentSession.id) ?? [] - prunedIds.forEach((id: string) => allPrunedIds.add(id.toLowerCase())) - - if (logger && prunedIds.length > 0) { - logger.debug("fetch", "Loaded pruned IDs for replacement", { - sessionId: currentSession.id, - prunedCount: prunedIds.length - }) - } - } - - return { allSessions, allPrunedIds } -} - -export async function handleFormat( - body: any, - ctx: FetchHandlerContext, - inputUrl: string, - format: FormatDescriptor -): Promise { - const data = format.getDataArray(body) - if (!data) { - return { modified: false, body } - } - - let modified = false - - // Sync tool parameters from OpenCode's session API (single source of truth) - // Also tracks new tool results for nudge injection - const sessionId = ctx.state.lastSeenSessionId - const protectedSet = new Set(ctx.config.protectedTools) - if (sessionId) { - await ensureSessionRestored(ctx.state, sessionId, ctx.logger) - await syncToolCache(ctx.client, sessionId, ctx.state, ctx.toolTracker, protectedSet, ctx.logger) - } - - if (ctx.config.strategies.onTool.length > 0 && sessionId) { - const toolIds = Array.from(ctx.state.toolParameters.keys()) - const alreadyPruned = ctx.state.prunedIds.get(sessionId) ?? [] - const alreadyPrunedLower = new Set(alreadyPruned.map(id => id.toLowerCase())) - const unprunedIds = toolIds.filter(id => !alreadyPrunedLower.has(id.toLowerCase())) - - const { list: prunableList, numericIds } = buildPrunableToolsList( - sessionId, - unprunedIds, - ctx.state.toolParameters, - ctx.config.protectedTools - ) - - if (prunableList) { - const includeNudge = ctx.config.nudge_freq > 0 && ctx.toolTracker.toolResultCount > ctx.config.nudge_freq - if (format.injectSystemMessage(body, SYNTHETIC_INSTRUCTION)) { - modified = true - } - - const endInjection = buildEndInjection(prunableList, includeNudge) - - if (format.appendUserMessage && format.appendUserMessage(body, endInjection)) { - const nudgeMsg = includeNudge ? " with nudge" : "" - ctx.logger.debug("fetch", `Appended prunable tools list${nudgeMsg} as user message (${format.name})`, { - ids: numericIds, - nudge: includeNudge, - toolsSincePrune: ctx.toolTracker.toolResultCount - }) - modified = true - } - } - } - - if (!format.hasToolOutputs(data)) { - return { modified, body } - } - - const { allSessions, allPrunedIds } = await getAllPrunedIds(ctx.client, ctx.state, ctx.logger) - - if (allPrunedIds.size === 0) { - return { modified, body } - } - - const toolOutputs = format.extractToolOutputs(data, ctx.state) - let replacedCount = 0 - let prunableCount = 0 - - for (const output of toolOutputs) { - // Skip tools not in cache (protected tools are excluded from cache) - if (!output.toolName) continue - prunableCount++ - - if (allPrunedIds.has(output.id)) { - if (format.replaceToolOutput(data, output.id, PRUNED_CONTENT_MESSAGE, ctx.state)) { - replacedCount++ - } - } - } - - if (replacedCount > 0) { - ctx.logger.info("fetch", `Replaced pruned tool outputs (${format.name})`, { - replaced: replacedCount, - total: prunableCount - }) - - if (ctx.logger.enabled) { - const activeSessions = allSessions.data?.filter((s: any) => !s.parentID) || [] - let sessionMessages: any[] | undefined - if (activeSessions.length > 0) { - const mostRecentSession = activeSessions[0] - sessionMessages = await fetchSessionMessages(ctx.client, mostRecentSession.id) - } - - await ctx.logger.saveWrappedContext( - "global", - data, - format.getLogMetadata(data, replacedCount, inputUrl), - sessionMessages - ) - } - - return { modified: true, body } - } - - return { modified, body } -} diff --git a/lib/fetch-wrapper/index.ts b/lib/fetch-wrapper/index.ts deleted file mode 100644 index 244103b..0000000 --- a/lib/fetch-wrapper/index.ts +++ /dev/null @@ -1,134 +0,0 @@ -import type { PluginState } from "../state" -import type { Logger } from "../logger" -import type { FetchHandlerContext } from "./types" -import type { ToolTracker } from "./types" -import type { PluginConfig } from "../config" -import { openaiChatFormat, openaiResponsesFormat, geminiFormat, bedrockFormat, anthropicFormat } from "./formats" -import { handleFormat } from "./handler" -import { runStrategies } from "../core/strategies" -import { accumulateGCStats } from "./gc-tracker" -import { trimToolParametersCache } from "../state/tool-cache" - -export type { FetchHandlerContext, FetchHandlerResult } from "./types" - -/** - * Creates a wrapped global fetch that intercepts API calls and performs - * context pruning on tool outputs that have been marked for removal. - * - * Supports five API formats: - * 1. OpenAI Chat Completions (body.messages with role='tool') - * 2. Anthropic Messages API (body.system + body.messages with tool_result) - * 3. Google/Gemini (body.contents with functionResponse parts) - * 4. OpenAI Responses API (body.input with function_call_output items) - * 5. AWS Bedrock Converse API (body.system + body.messages with toolResult blocks) - */ -export function installFetchWrapper( - state: PluginState, - logger: Logger, - client: any, - config: PluginConfig, - toolTracker: ToolTracker -): () => void { - const originalGlobalFetch = globalThis.fetch - - const ctx: FetchHandlerContext = { - state, - logger, - client, - config, - toolTracker - } - - globalThis.fetch = async (input: any, init?: any) => { - if (state.lastSeenSessionId && state.subagentSessions.has(state.lastSeenSessionId)) { - logger.debug("fetch-wrapper", "Skipping DCP processing for subagent session", { - sessionId: state.lastSeenSessionId.substring(0, 8) - }) - return originalGlobalFetch(input, init) - } - - if (init?.body && typeof init.body === 'string') { - try { - const body = JSON.parse(init.body) - const inputUrl = typeof input === 'string' ? input : 'URL object' - let modified = false - - const toolIdsBefore = new Set(state.toolParameters.keys()) - - // Mutually exclusive format handlers - // Order matters: More specific formats first to avoid incorrect detection - // 1. OpenAI Responses API: has body.input (not body.messages) - // 2. Bedrock: has body.system + body.inferenceConfig + body.messages - // 3. Anthropic: has body.system + body.messages (no inferenceConfig) - // 4. OpenAI Chat: has body.messages (no top-level system) - // 5. Gemini: has body.contents - if (openaiResponsesFormat.detect(body)) { - const result = await handleFormat(body, ctx, inputUrl, openaiResponsesFormat) - if (result.modified) { - modified = true - } - } - else if (bedrockFormat.detect(body)) { - const result = await handleFormat(body, ctx, inputUrl, bedrockFormat) - if (result.modified) { - modified = true - } - } - else if (anthropicFormat.detect(body)) { - const result = await handleFormat(body, ctx, inputUrl, anthropicFormat) - if (result.modified) { - modified = true - } - } - else if (openaiChatFormat.detect(body)) { - const result = await handleFormat(body, ctx, inputUrl, openaiChatFormat) - if (result.modified) { - modified = true - } - } - else if (geminiFormat.detect(body)) { - const result = await handleFormat(body, ctx, inputUrl, geminiFormat) - if (result.modified) { - modified = true - } - } - - const sessionId = state.lastSeenSessionId - const toolIdsAfter = Array.from(state.toolParameters.keys()) - const newToolsCached = toolIdsAfter.filter(id => !toolIdsBefore.has(id)).length > 0 - - if (sessionId && newToolsCached && state.toolParameters.size > 0) { - const toolIds = Array.from(state.toolParameters.keys()) - const alreadyPruned = state.prunedIds.get(sessionId) ?? [] - const alreadyPrunedLower = new Set(alreadyPruned.map(id => id.toLowerCase())) - const unpruned = toolIds.filter(id => !alreadyPrunedLower.has(id.toLowerCase())) - if (unpruned.length > 1) { - const result = runStrategies( - state.toolParameters, - unpruned, - config.protectedTools - ) - if (result.prunedIds.length > 0) { - const normalizedIds = result.prunedIds.map(id => id.toLowerCase()) - state.prunedIds.set(sessionId, [...new Set([...alreadyPruned, ...normalizedIds])]) - accumulateGCStats(state, sessionId, result.prunedIds, body, logger) - } - } - - trimToolParametersCache(state) - } - - if (modified) { - init.body = JSON.stringify(body) - } - } catch (e) { - } - } - - return originalGlobalFetch(input, init) - } - - return () => { - globalThis.fetch = originalGlobalFetch - } -} diff --git a/lib/fetch-wrapper/prunable-list.ts b/lib/fetch-wrapper/prunable-list.ts deleted file mode 100644 index 677f8c9..0000000 --- a/lib/fetch-wrapper/prunable-list.ts +++ /dev/null @@ -1,60 +0,0 @@ -import { extractParameterKey } from '../ui/display-utils' -import { getOrCreateNumericId } from '../state/id-mapping' -import { loadPrompt } from '../core/prompt' -import type { ToolMetadata } from './types' - -const NUDGE_INSTRUCTION = loadPrompt("nudge") - -export interface PrunableListResult { - list: string - numericIds: number[] -} - -export function buildPrunableToolsList( - sessionId: string, - unprunedToolCallIds: string[], - toolMetadata: Map, - protectedTools: string[] -): PrunableListResult { - const lines: string[] = [] - const numericIds: number[] = [] - - for (const actualId of unprunedToolCallIds) { - const metadata = toolMetadata.get(actualId) - if (!metadata) continue - if (protectedTools.includes(metadata.tool)) continue - - const numericId = getOrCreateNumericId(sessionId, actualId) - numericIds.push(numericId) - - const paramKey = extractParameterKey(metadata) - const description = paramKey ? `${metadata.tool}, ${paramKey}` : metadata.tool - lines.push(`${numericId}: ${description}`) - } - - if (lines.length === 0) { - return { list: '', numericIds: [] } - } - - return { - list: `\nThe following tools have been invoked and are available for pruning. This list does not mandate immediate action. Consider your current goals and the resources you need before discarding valuable tool outputs. Keep the context free of noise.\n${lines.join('\n')}\n`, - numericIds - } -} - -export function buildEndInjection( - prunableList: string, - includeNudge: boolean -): string { - if (!prunableList) { - return '' - } - - const parts = [prunableList] - - if (includeNudge) { - parts.push(NUDGE_INSTRUCTION) - } - - return parts.join('\n\n') -} diff --git a/lib/fetch-wrapper/tool-tracker.ts b/lib/fetch-wrapper/tool-tracker.ts deleted file mode 100644 index a195a67..0000000 --- a/lib/fetch-wrapper/tool-tracker.ts +++ /dev/null @@ -1,19 +0,0 @@ -export interface ToolTracker { - seenToolResultIds: Set - toolResultCount: number // Tools since last prune - skipNextIdle: boolean -} - -export function createToolTracker(): ToolTracker { - return { seenToolResultIds: new Set(), toolResultCount: 0, skipNextIdle: false } -} - -export function resetToolTrackerCount(tracker: ToolTracker): void { - tracker.toolResultCount = 0 -} - -export function clearToolTracker(tracker: ToolTracker): void { - tracker.seenToolResultIds.clear() - tracker.toolResultCount = 0 - tracker.skipNextIdle = false -} diff --git a/lib/fetch-wrapper/types.ts b/lib/fetch-wrapper/types.ts deleted file mode 100644 index d15b640..0000000 --- a/lib/fetch-wrapper/types.ts +++ /dev/null @@ -1,76 +0,0 @@ -import type { PluginState } from "../state" -import type { Logger } from "../logger" -import type { PluginConfig } from "../config" -import type { ToolTracker } from "./tool-tracker" -export type { ToolTracker } from "./tool-tracker" - -export interface ToolOutput { - id: string - toolName?: string -} - -export interface ToolMetadata { - tool: string - parameters?: any -} - -export interface FormatDescriptor { - name: string - detect(body: any): boolean - getDataArray(body: any): any[] | undefined - injectSystemMessage(body: any, injection: string): boolean - appendUserMessage?(body: any, injection: string): boolean - extractToolOutputs(data: any[], state: PluginState): ToolOutput[] - replaceToolOutput(data: any[], toolId: string, prunedMessage: string, state: PluginState): boolean - hasToolOutputs(data: any[]): boolean - getLogMetadata(data: any[], replacedCount: number, inputUrl: string): Record -} - -export interface FetchHandlerContext { - state: PluginState - logger: Logger - client: any - config: PluginConfig - toolTracker: ToolTracker -} - -export interface FetchHandlerResult { - modified: boolean - body: any -} - -export interface PrunedIdData { - allSessions: any - allPrunedIds: Set -} - -/** The 3 scenarios that trigger explicit LLM pruning */ -export type PruneReason = "completion" | "noise" | "consolidation" - -/** Human-readable labels for prune reasons */ -export const PRUNE_REASON_LABELS: Record = { - completion: "Task Complete", - noise: "Noise Removal", - consolidation: "Consolidation" -} - -export interface SessionStats { - totalToolsPruned: number - totalTokensSaved: number - totalGCTokens: number - totalGCTools: number -} - -export interface GCStats { - tokensCollected: number - toolsDeduped: number -} - -export interface PruningResult { - prunedCount: number - tokensSaved: number - llmPrunedIds: string[] - toolMetadata: Map - sessionStats: SessionStats - reason?: PruneReason -} diff --git a/lib/hooks.ts b/lib/hooks.ts index 5be020b..5cc4bfb 100644 --- a/lib/hooks.ts +++ b/lib/hooks.ts @@ -1,5 +1,5 @@ import type { WithParts } from "@opencode-ai/plugin" -import type { PluginState } from "./state" +import type { SessionState } from "./state" import type { Logger } from "./logger" import type { JanitorContext } from "./core/janitor" import { runOnIdle } from "./core/janitor" @@ -43,7 +43,7 @@ export function createEventHandler( try { const result = await runOnIdle(janitorCtx, event.properties.sessionID, config.strategies.onIdle) - if (result && result.prunedCount > 0 && toolTracker && config.nudge_freq > 0) { + if (result && result.prunedCount > 0 && toolTracker && config.nudgeFreq > 0) { if (toolStrategiesCoveredByIdle(config.strategies.onIdle, config.strategies.onTool)) { resetToolTrackerCount(toolTracker) } @@ -69,7 +69,7 @@ export function createChatMessageTransformHandler() { */ export function createChatParamsHandler( client: any, - state: PluginState, + state: SessionState, logger: Logger, toolTracker?: ToolTracker ) { diff --git a/lib/pruning-tool.ts b/lib/pruning-tool.ts index 2282acb..39713e3 100644 --- a/lib/pruning-tool.ts +++ b/lib/pruning-tool.ts @@ -1,18 +1,13 @@ import { tool } from "@opencode-ai/plugin" -import type { PluginState } from "./state" +import type { SessionState, ToolParameterEntry} from "./state" import type { PluginConfig } from "./config" -import type { ToolTracker } from "./fetch-wrapper/tool-tracker" -import type { ToolMetadata, PruneReason } from "./fetch-wrapper/types" -import { resetToolTrackerCount } from "./fetch-wrapper/tool-tracker" -import { findCurrentAgent } from "./hooks" -import { getActualId } from "./state/id-mapping" -import { sendUnifiedNotification, type NotificationContext } from "./ui/notification" +import { findCurrentAgent, buildToolIdList, getPrunedIds } from "./utils" +import { PruneReason, sendUnifiedNotification } from "./ui/notification" import { formatPruningResultForTool } from "./ui/display-utils" import { ensureSessionInitialized } from "./state" import { saveSessionState } from "./state/persistence" import type { Logger } from "./logger" import { estimateTokensBatch } from "./tokenizer" -import type { SessionStats, PruningResult } from "./core/janitor" import { loadPrompt } from "./core/prompt" /** Tool description loaded from prompts/tool.txt */ @@ -20,11 +15,10 @@ const TOOL_DESCRIPTION = loadPrompt("tool") export interface PruneToolContext { client: any - state: PluginState + state: SessionState logger: Logger config: PluginConfig - notificationCtx: NotificationContext - workingDirectory?: string + workingDirectory: string } /** @@ -33,7 +27,6 @@ export interface PruneToolContext { */ export function createPruningTool( ctx: PruneToolContext, - toolTracker: ToolTracker ): ReturnType { return tool({ description: TOOL_DESCRIPTION, @@ -48,7 +41,7 @@ export function createPruningTool( ), }, async execute(args, toolCtx) { - const { client, state, logger, config, notificationCtx } = ctx + const { client, state, logger, config, workingDirectory } = ctx const sessionId = toolCtx.sessionID if (!args.ids || args.ids.length === 0) { @@ -70,84 +63,55 @@ export function createPruningTool( await ensureSessionInitialized(state, sessionId, logger) - const prunedIds = numericIds - .map(numId => getActualId(sessionId, numId)) - .filter((id): id is string => id !== undefined) - - if (prunedIds.length === 0) { - return "None of the provided IDs were valid. Check the list for available IDs." - } - // Fetch messages to calculate tokens and find current agent - const messagesResponse = await client.session.messages({ - path: { id: sessionId }, - query: { limit: 200 } + const messages = await client.session.messages({ + path: { id: sessionId } }) - const messages = messagesResponse.data || messagesResponse + // const messages = messagesResponse.data || messagesResponse // Need this? - const currentAgent = findCurrentAgent(messages) + const currentAgent: string | undefined = findCurrentAgent(messages) + const toolIdList: string[] = buildToolIdList(messages) + const prunedIds: string[] = getPrunedIds(numericIds, toolIdList) const tokensSaved = await calculateTokensSavedFromMessages(messages, prunedIds) - const currentStats = state.stats.get(sessionId) ?? { - totalToolsPruned: 0, - totalTokensSaved: 0, - totalGCTokens: 0, - totalGCTools: 0 - } - const sessionStats: SessionStats = { - ...currentStats, - totalToolsPruned: currentStats.totalToolsPruned + prunedIds.length, - totalTokensSaved: currentStats.totalTokensSaved + tokensSaved - } - state.stats.set(sessionId, sessionStats) - - const alreadyPrunedIds = state.prunedIds.get(sessionId) ?? [] - const allPrunedIds = [...alreadyPrunedIds, ...prunedIds] - state.prunedIds.set(sessionId, allPrunedIds) + state.stats.totalTokensSaved += tokensSaved + state.stats.totalToolsPruned += prunedIds.length + state.prunedIds.push(...prunedIds) - saveSessionState(sessionId, new Set(allPrunedIds), sessionStats, logger) + saveSessionState(state, logger) .catch(err => logger.error("prune-tool", "Failed to persist state", { error: err.message })) - const toolMetadata = new Map() + const toolMetadata = new Map() for (const id of prunedIds) { - const meta = state.toolParameters.get(id.toLowerCase()) - if (meta) { - toolMetadata.set(id.toLowerCase(), meta) + const toolParameters = state.toolParameters.get(id) + if (toolParameters) { + toolMetadata.set(id, toolParameters) } else { - logger.debug("prune-tool", "No metadata found for ID", { - id, - idLower: id.toLowerCase(), - hasLower: state.toolParameters.has(id.toLowerCase()) - }) + logger.debug("prune-tool", "No metadata found for ID", { id }) } } - await sendUnifiedNotification(notificationCtx, sessionId, { - aiPrunedCount: prunedIds.length, - aiTokensSaved: tokensSaved, - aiPrunedIds: prunedIds, - toolMetadata, - gcPending: null, - sessionStats, - reason - }, currentAgent) - - toolTracker.skipNextIdle = true - - if (config.nudge_freq > 0) { - resetToolTrackerCount(toolTracker) - } - - const result: PruningResult = { - prunedCount: prunedIds.length, + await sendUnifiedNotification( + client, + logger, + config, + sessionId, + prunedIds.length, tokensSaved, - llmPrunedIds: prunedIds, + prunedIds, toolMetadata, - sessionStats, - reason - } - - return formatPruningResultForTool(result, ctx.workingDirectory) + null, + state.stats, + reason as PruneReason, + currentAgent, + workingDirectory + ) + + return formatPruningResultForTool( + prunedIds, + toolMetadata, + workingDirectory + ) }, }) } diff --git a/lib/state/id-mapping.ts b/lib/state/id-mapping.ts deleted file mode 100644 index 0f73eb4..0000000 --- a/lib/state/id-mapping.ts +++ /dev/null @@ -1,64 +0,0 @@ -/** - * Numeric ID mapping system for tool call IDs. - * - * Maps simple incrementing numbers (1, 2, 3...) to actual provider tool call IDs - * (e.g., "call_abc123xyz..."). This allows the session AI to reference tools by - * simple numbers when using the prune tool. - * - * Design decisions: - * - IDs are monotonically increasing and never reused (avoids race conditions) - * - Mappings are rebuilt from session messages on restore (single source of truth) - * - Per-session mappings to isolate sessions from each other - */ - -export interface IdMapping { - numericToActual: Map // 1 → "call_abc123xyz..." - actualToNumeric: Map // "call_abc123xyz..." → 1 - nextId: number -} - -/** Per-session ID mappings */ -const sessionMappings = new Map() - -function getSessionMapping(sessionId: string): IdMapping { - let mapping = sessionMappings.get(sessionId) - if (!mapping) { - mapping = { - numericToActual: new Map(), - actualToNumeric: new Map(), - nextId: 1 - } - sessionMappings.set(sessionId, mapping) - } - return mapping -} - -/** - * Assigns a numeric ID to a tool call ID if it doesn't already have one. - * Returns the numeric ID (existing or newly assigned). - */ -export function getOrCreateNumericId(sessionId: string, actualId: string): number { - const mapping = getSessionMapping(sessionId) - - // Check if already mapped - const existing = mapping.actualToNumeric.get(actualId) - if (existing !== undefined) { - return existing - } - - // Assign new ID - const numericId = mapping.nextId++ - mapping.numericToActual.set(numericId, actualId) - mapping.actualToNumeric.set(actualId, numericId) - - return numericId -} - -export function getActualId(sessionId: string, numericId: number): string | undefined { - const mapping = sessionMappings.get(sessionId) - return mapping?.numericToActual.get(numericId) -} - -export function clearAllMappings(): void { - sessionMappings.clear() -} diff --git a/lib/state/index.ts b/lib/state/index.ts index a8e65ce..a665a74 100644 --- a/lib/state/index.ts +++ b/lib/state/index.ts @@ -1,79 +1,3 @@ -import type { SessionStats, GCStats } from "../core/janitor" -import type { Logger } from "../logger" -import { loadSessionState } from "./persistence" - -export interface PluginState { - sessionId: string | null - prunedIds: Map - stats: Map - gcPending: Map - // toolParameters: Map - // model: Map - // googleToolCallMapping: Map> - // restoredSessions: Set - // checkedSessions: Set - // subagentSessions: Set - // lastSeenSessionId: string | null -} - -export type ToolStatus = "pending" | "running" | "completed" | "error" - -export interface ToolParameterEntry { - tool: string - parameters: any - status?: ToolStatus - error?: string -} - -export interface ModelInfo { - providerID: string - modelID: string -} - -export function createPluginState(): PluginState { - return { - sessionId: null, - prunedIds: new Map(), - stats: new Map(), - gcPending: new Map(), - // toolParameters: new Map(), - // model: new Map(), - // googleToolCallMapping: new Map(), - // restoredSessions: new Set(), - // checkedSessions: new Set(), - // subagentSessions: new Set(), - // lastSeenSessionId: null, - } -} - -export async function ensureSessionInitialized( - state: PluginState, - sessionId: string, - logger: Logger -): Promise { - // if (state.restoredSessions.has(sessionId)) { - // return - // } - // - // state.restoredSessions.add(sessionId) - - const persisted = await loadSessionState(sessionId, logger) - if (persisted) { - if (!state.prunedIds.has(sessionId)) { - state.prunedIds.set(sessionId, persisted.prunedIds) - logger?.info("persist", "Restored prunedIds from disk", { - sessionId: sessionId.slice(0, 8), - count: persisted.prunedIds.length, - }) - } - if (!state.stats.has(sessionId)) { - const stats: SessionStats = { - totalToolsPruned: persisted.stats.totalToolsPruned, - totalTokensSaved: persisted.stats.totalTokensSaved, - totalGCTokens: persisted.stats.totalGCTokens ?? 0, - totalGCTools: persisted.stats.totalGCTools ?? 0 - } - state.stats.set(sessionId, stats) - } - } -} +export * from "./persistence" +export * from "./types" +export * from "./state" diff --git a/lib/state/persistence.ts b/lib/state/persistence.ts index 0337c2b..e16f020 100644 --- a/lib/state/persistence.ts +++ b/lib/state/persistence.ts @@ -8,7 +8,7 @@ import * as fs from "fs/promises"; import { existsSync } from "fs"; import { homedir } from "os"; import { join } from "path"; -import type { SessionStats } from "../core/janitor"; +import type { SessionState, SessionStats } from "./types" import type { Logger } from "../logger"; export interface PersistedSessionState { @@ -39,34 +39,36 @@ function getSessionFilePath(sessionId: string): string { } export async function saveSessionState( - sessionId: string, - prunedIds: Set, - stats: SessionStats, - logger?: Logger, + sessionState: SessionState, + logger: Logger, sessionName?: string ): Promise { try { + if (!sessionState.sessionId) { + return; + } + await ensureStorageDir(); const state: PersistedSessionState = { - ...(sessionName && { sessionName }), - prunedIds: Array.from(prunedIds), - stats, + sessionName: sessionName, + prunedIds: sessionState.prunedIds, + stats: sessionState.stats, lastUpdated: new Date().toISOString(), }; - const filePath = getSessionFilePath(sessionId); + const filePath = getSessionFilePath(sessionState.sessionId); const content = JSON.stringify(state, null, 2); await fs.writeFile(filePath, content, "utf-8"); - logger?.info("persist", "Saved session state to disk", { - sessionId: sessionId.slice(0, 8), - prunedIds: prunedIds.size, - totalTokensSaved: stats.totalTokensSaved, + logger.info("persist", "Saved session state to disk", { + sessionId: sessionState.sessionId.slice(0, 8), + prunedIds: state.prunedIds.length, + totalTokensSaved: state.stats.totalTokensSaved, }); } catch (error: any) { - logger?.error("persist", "Failed to save session state", { - sessionId: sessionId.slice(0, 8), + logger.error("persist", "Failed to save session state", { + sessionId: sessionState.sessionId?.slice(0, 8), error: error?.message, }); } diff --git a/lib/state/state.ts b/lib/state/state.ts new file mode 100644 index 0000000..ad15d1c --- /dev/null +++ b/lib/state/state.ts @@ -0,0 +1,66 @@ +import type { SessionState, ToolParameterEntry } from "./types" +import type { Logger } from "../logger" +import { loadSessionState } from "./persistence" + +export function createSessionState(): SessionState { + return { + sessionId: null, + prunedIds: [], + stats: { + totalToolsPruned: 0, + totalTokensSaved: 0, + totalGCTokens: 0, + totalGCTools: 0 + }, + gcPending: { + tokensCollected: 0, + toolsDeduped: 0 + }, + toolParameters: new Map() + } +} + +export function resetSessionState(state: SessionState): void { + state.sessionId = null + state.prunedIds = [] + state.stats = { + totalToolsPruned: 0, + totalTokensSaved: 0, + totalGCTokens: 0, + totalGCTools: 0 + } + state.gcPending = { + tokensCollected: 0, + toolsDeduped: 0 + } + state.toolParameters.clear() +} + +export async function ensureSessionInitialized( + state: SessionState, + sessionId: string, + logger: Logger +): Promise { + if (state.sessionId === sessionId) { + return; + } + + // Clear previous session data + resetSessionState(state) + state.sessionId = sessionId + + // Load session data from storage + const persisted = await loadSessionState(sessionId, logger) + if (persisted === null) { + return; + } + + // Populate state with loaded data + state.prunedIds = persisted.prunedIds || [] + state.stats = { + totalToolsPruned: persisted.stats.totalToolsPruned || 0, + totalTokensSaved: persisted.stats.totalTokensSaved || 0, + totalGCTokens: persisted.stats.totalGCTokens || 0, + totalGCTools: persisted.stats.totalGCTools || 0 + } +} diff --git a/lib/state/tool-cache.ts b/lib/state/tool-cache.ts index 2970720..0d8a471 100644 --- a/lib/state/tool-cache.ts +++ b/lib/state/tool-cache.ts @@ -1,4 +1,4 @@ -import type { PluginState, ToolStatus } from "./index" +import type { SessionState, ToolStatus } from "./index" import type { Logger } from "../logger" import type { ToolTracker } from "../fetch-wrapper/tool-tracker" @@ -13,7 +13,7 @@ const MAX_TOOL_CACHE_SIZE = 500 export async function syncToolCache( client: any, sessionId: string, - state: PluginState, + state: SessionState, tracker?: ToolTracker, protectedTools?: Set, logger?: Logger @@ -87,7 +87,7 @@ export async function syncToolCache( * Trim the tool parameters cache to prevent unbounded memory growth. * Uses FIFO eviction - removes oldest entries first. */ -export function trimToolParametersCache(state: PluginState): void { +export function trimToolParametersCache(state: SessionState): void { if (state.toolParameters.size <= MAX_TOOL_CACHE_SIZE) { return } diff --git a/lib/state/types.ts b/lib/state/types.ts new file mode 100644 index 0000000..bd6c5cf --- /dev/null +++ b/lib/state/types.ts @@ -0,0 +1,26 @@ +export interface ToolParameterEntry { + tool: string + parameters: any + status?: "pending" | "running" | "completed" | "error" + error?: string +} + +export interface GCStats { + tokensCollected: number + toolsDeduped: number +} + +export interface SessionStats { + totalToolsPruned: number + totalTokensSaved: number + totalGCTokens: number + totalGCTools: number +} + +export interface SessionState { + sessionId: string | null + prunedIds: string[] + stats: SessionStats + gcPending: GCStats + toolParameters: Map +} diff --git a/lib/ui/display-utils.ts b/lib/ui/display-utils.ts index 6ba7eb4..8c843f5 100644 --- a/lib/ui/display-utils.ts +++ b/lib/ui/display-utils.ts @@ -1,5 +1,5 @@ -import type { ToolMetadata } from "../fetch-wrapper/types" import type { PruningResult } from "../core/janitor" +import { ToolParameterEntry } from "../state" /** * Extracts a human-readable key from tool metadata for display purposes. @@ -110,7 +110,7 @@ function shortenSinglePath(path: string, workingDirectory?: string): string { */ export function formatPrunedItemsList( prunedIds: string[], - toolMetadata: Map, + toolMetadata: Map, workingDirectory?: string ): string[] { const lines: string[] = [] @@ -147,16 +147,17 @@ export function formatPrunedItemsList( * Formats a PruningResult into a human-readable string for the prune tool output. */ export function formatPruningResultForTool( - result: PruningResult, + prunedIds: string[], + toolMetadata: Map, workingDirectory?: string ): string { const lines: string[] = [] - lines.push(`Context pruning complete. Pruned ${result.prunedCount} tool outputs.`) + lines.push(`Context pruning complete. Pruned ${prunedIds.length} tool outputs.`) lines.push('') - if (result.llmPrunedIds.length > 0) { - lines.push(`Semantically pruned (${result.llmPrunedIds.length}):`) - lines.push(...formatPrunedItemsList(result.llmPrunedIds, result.toolMetadata, workingDirectory)) + if (prunedIds.length > 0) { + lines.push(`Semantically pruned (${prunedIds.length}):`) + lines.push(...formatPrunedItemsList(prunedIds, toolMetadata, workingDirectory)) } return lines.join('\n').trim() diff --git a/lib/ui/notification.ts b/lib/ui/notification.ts index a2507ad..9129a01 100644 --- a/lib/ui/notification.ts +++ b/lib/ui/notification.ts @@ -1,66 +1,128 @@ import type { Logger } from "../logger" import type { SessionStats, GCStats } from "../core/janitor" -import type { ToolMetadata, PruneReason } from "../fetch-wrapper/types" -import { PRUNE_REASON_LABELS } from "../fetch-wrapper/types" import { formatTokenCount } from "../tokenizer" import { formatPrunedItemsList } from "./display-utils" +import { ToolParameterEntry } from "../state" +import { PluginConfig } from "../config" + +export type PruneReason = "completion" | "noise" | "consolidation" +export const PRUNE_REASON_LABELS: Record = { + completion: "Task Complete", + noise: "Noise Removal", + consolidation: "Consolidation" +} -export type PruningSummaryLevel = "off" | "minimal" | "detailed" +function calculateStats( + tokensSaved: number, + gcPending: GCStats | null, + sessionStats: SessionStats +): { + justNowTokens: number + totalTokens: number +} { + const justNowTokens = tokensSaved + (gcPending?.tokensCollected ?? 0) -export interface NotificationConfig { - pruningSummary: PruningSummaryLevel - workingDirectory?: string + const totalTokens = sessionStats + ? sessionStats.totalTokensSaved + sessionStats.totalGCTokens + : justNowTokens + + return { justNowTokens, totalTokens } +} + +function formatStatsHeader( + totalTokens: number, + justNowTokens: number +): string { + const totalTokensStr = `~${formatTokenCount(totalTokens)}` + const justNowTokensStr = `~${formatTokenCount(justNowTokens)}` + + const maxTokenLen = Math.max(totalTokensStr.length, justNowTokensStr.length) + const totalTokensPadded = totalTokensStr.padStart(maxTokenLen) + + return [ + `▣ DCP | ${totalTokensPadded} saved total`, + ].join('\n') } -export interface NotificationContext { - client: any - logger: Logger - config: NotificationConfig +function buildMinimalMessage( + tokensSaved: number, + gcPending: GCStats | null, + sessionStats: SessionStats, + reason: PruneReason | undefined +): string { + const { justNowTokens, totalTokens } = calculateStats(tokensSaved, gcPending, sessionStats) + const reasonSuffix = reason ? ` [${PRUNE_REASON_LABELS[reason]}]` : '' + return formatStatsHeader(totalTokens, justNowTokens) + reasonSuffix } -export interface NotificationData { - aiPrunedCount: number - aiTokensSaved: number - aiPrunedIds: string[] - toolMetadata: Map - gcPending: GCStats | null - sessionStats: SessionStats | null - reason?: PruneReason +function buildDetailedMessage( + tokensSaved: number, + gcPending: GCStats | null, + sessionStats: SessionStats, + reason: PruneReason | undefined, + prunedIds: string[], + toolMetadata: Map, + workingDirectory?: string +): string { + const { justNowTokens, totalTokens } = calculateStats(tokensSaved, gcPending, sessionStats) + + let message = formatStatsHeader(totalTokens, justNowTokens) + + if (data.aiPrunedCount > 0) { + const justNowTokensStr = `~${formatTokenCount(justNowTokens)}` + const reasonLabel = reason ? ` — ${PRUNE_REASON_LABELS[reason]}` : '' + message += `\n\n▣ Pruned tools (${justNowTokensStr})${reasonLabel}` + + const itemLines = formatPrunedItemsList(prunedIds, toolMetadata, workingDirectory) + message += '\n' + itemLines.join('\n') + } + + return message.trim() } export async function sendUnifiedNotification( - ctx: NotificationContext, - sessionID: string, - data: NotificationData, - agent?: string + client: any, + logger: Logger, + config: PluginConfig, + sessionId: string, + prunedCount: number, + tokensSaved: number, + prunedIds: string[], + toolMetadata: Map, + gcPending: GCStats | null, + sessionStats: SessionStats, + reason: PruneReason | undefined, + agent: string | undefined, + workingDirectory: string ): Promise { - const hasAiPruning = data.aiPrunedCount > 0 - const hasGcActivity = data.gcPending && data.gcPending.toolsDeduped > 0 + const hasPruned = prunedCount > 0 + const hasGcActivity = gcPending && gcPending.toolsDeduped > 0 - if (!hasAiPruning && !hasGcActivity) { + if (!hasPruned && !hasGcActivity) { return false } - if (ctx.config.pruningSummary === 'off') { + if (config.pruningSummary === 'off') { return false } - const message = ctx.config.pruningSummary === 'minimal' - ? buildMinimalMessage(data) - : buildDetailedMessage(data, ctx.config.workingDirectory) + const message = config.pruningSummary === 'minimal' + ? buildMinimalMessage(tokensSaved, gcPending, sessionStats, reason) + : buildDetailedMessage(tokensSaved, gcPending, sessionStats, reason, prunedIds, toolMetadata, workingDirectory) - await sendIgnoredMessage(ctx, sessionID, message, agent) + await sendIgnoredMessage(client, logger, sessionId, message, agent) return true } export async function sendIgnoredMessage( - ctx: NotificationContext, + client: any, + logger: Logger, sessionID: string, text: string, agent?: string ): Promise { try { - await ctx.client.session.prompt({ + await client.session.prompt({ path: { id: sessionID }, body: { noReply: true, @@ -73,57 +135,7 @@ export async function sendIgnoredMessage( } }) } catch (error: any) { - ctx.logger.error("notification", "Failed to send notification", { error: error.message }) + logger.error("notification", "Failed to send notification", { error: error.message }) } } -function buildMinimalMessage(data: NotificationData): string { - const { justNowTokens, totalTokens } = calculateStats(data) - const reasonSuffix = data.reason ? ` [${PRUNE_REASON_LABELS[data.reason]}]` : '' - return formatStatsHeader(totalTokens, justNowTokens) + reasonSuffix -} - -function buildDetailedMessage(data: NotificationData, workingDirectory?: string): string { - const { justNowTokens, totalTokens } = calculateStats(data) - - let message = formatStatsHeader(totalTokens, justNowTokens) - - if (data.aiPrunedCount > 0) { - const justNowTokensStr = `~${formatTokenCount(justNowTokens)}` - const reasonLabel = data.reason ? ` — ${PRUNE_REASON_LABELS[data.reason]}` : '' - message += `\n\n▣ Pruned tools (${justNowTokensStr})${reasonLabel}` - - const itemLines = formatPrunedItemsList(data.aiPrunedIds, data.toolMetadata, workingDirectory) - message += '\n' + itemLines.join('\n') - } - - return message.trim() -} - -function calculateStats(data: NotificationData): { - justNowTokens: number - totalTokens: number -} { - const justNowTokens = data.aiTokensSaved + (data.gcPending?.tokensCollected ?? 0) - - const totalTokens = data.sessionStats - ? data.sessionStats.totalTokensSaved + data.sessionStats.totalGCTokens - : justNowTokens - - return { justNowTokens, totalTokens } -} - -function formatStatsHeader( - totalTokens: number, - justNowTokens: number -): string { - const totalTokensStr = `~${formatTokenCount(totalTokens)}` - const justNowTokensStr = `~${formatTokenCount(justNowTokens)}` - - const maxTokenLen = Math.max(totalTokensStr.length, justNowTokensStr.length) - const totalTokensPadded = totalTokensStr.padStart(maxTokenLen) - - return [ - `▣ DCP | ${totalTokensPadded} saved total`, - ].join('\n') -} diff --git a/lib/utils.ts b/lib/utils.ts new file mode 100644 index 0000000..a6742bf --- /dev/null +++ b/lib/utils.ts @@ -0,0 +1,43 @@ +/** + * Finds the current agent from messages by scanning backward for user messages. + */ +export function findCurrentAgent(messages: any[]): string | undefined { + for (let i = messages.length - 1; i >= 0; i--) { + const msg = messages[i] + const info = msg.info + if (info?.role === 'user') { + return info.agent || 'build' + } + } + return undefined +} + +/** + * Builds a list of tool call IDs from messages. + */ +export function buildToolIdList(messages: any[]): string[] { + const toolIds: string[] = [] + for (const msg of messages) { + if (msg.parts) { + for (const part of msg.parts) { + if (part.type === 'tool' && part.callID && part.tool) { + toolIds.push(part.callID) + } + } + } + } + return toolIds +} + +/** + * Prunes numeric IDs to valid tool call IDs based on the provided tool ID list. + */ +export function getPrunedIds(numericIds: number[], toolIdList: string[]): string[] { + const prunedIds: string[] = [] + for (const index of numericIds) { + if (!isNaN(index) && index >= 0 && index < toolIdList.length) { + prunedIds.push(toolIdList[index]) + } + } + return prunedIds +} From db25bd260d2afcf83d7928334fdc9842c278e9f3 Mon Sep 17 00:00:00 2001 From: Jorgen Henriksen Date: Thu, 11 Dec 2025 22:38:27 +0100 Subject: [PATCH 03/51] couple of bug fixes --- index.ts | 27 +-------------------------- lib/ui/notification.ts | 2 +- 2 files changed, 2 insertions(+), 27 deletions(-) diff --git a/index.ts b/index.ts index e645e31..380beb7 100644 --- a/index.ts +++ b/index.ts @@ -3,7 +3,7 @@ import { getConfig } from "./lib/config" import { Logger } from "./lib/logger" import { createSessionState } from "./lib/state" import { createPruningTool } from "./lib/pruning-tool" -import { createEventHandler, createChatParamsHandler, createChatMessageTransformHandler } from "./lib/hooks" +import { createChatMessageTransformHandler } from "./lib/hooks" const plugin: Plugin = (async (ctx) => { const { config, migrations } = getConfig(ctx) @@ -47,7 +47,6 @@ const plugin: Plugin = (async (ctx) => { return { "experimental.chat.messages.transform": createChatMessageTransformHandler(), - // "chat.params": createChatParamsHandler(ctx.client, state, logger, toolTracker), tool: config.strategies.onTool.length > 0 ? { prune: createPruningTool({ client: ctx.client, @@ -57,30 +56,6 @@ const plugin: Plugin = (async (ctx) => { workingDirectory: ctx.directory }), } : undefined, - // config: async (opencodeConfig) => { - // // Add prune to primary_tools by mutating the opencode config - // // This works because config is cached and passed by reference - // if (config.strategies.onTool.length > 0) { - // const existingPrimaryTools = opencodeConfig.experimental?.primary_tools ?? [] - // opencodeConfig.experimental = { - // ...opencodeConfig.experimental, - // primary_tools: [...existingPrimaryTools, "prune"], - // } - // logger.info("plugin", "Added 'prune' to experimental.primary_tools via config mutation") - // } - // }, - // event: createEventHandler(ctx.client, janitorCtx, logger, config, toolTracker), - // "chat.params": createChatParamsHandler(ctx.client, state, logger, toolTracker), - // tool: config.strategies.onTool.length > 0 ? { - // prune: createPruningTool({ - // client: ctx.client, - // state, - // logger, - // config, - // notificationCtx: janitorCtx.notificationCtx, - // workingDirectory: ctx.directory - // }, toolTracker), - // } : undefined, } }) satisfies Plugin diff --git a/lib/ui/notification.ts b/lib/ui/notification.ts index 9129a01..53ca1eb 100644 --- a/lib/ui/notification.ts +++ b/lib/ui/notification.ts @@ -68,7 +68,7 @@ function buildDetailedMessage( let message = formatStatsHeader(totalTokens, justNowTokens) - if (data.aiPrunedCount > 0) { + if (prunedIds.length > 0) { const justNowTokensStr = `~${formatTokenCount(justNowTokens)}` const reasonLabel = reason ? ` — ${PRUNE_REASON_LABELS[reason]}` : '' message += `\n\n▣ Pruned tools (${justNowTokensStr})${reasonLabel}` From fb11f6aabdf09e316741824b092721b9dbd17435 Mon Sep 17 00:00:00 2001 From: Jorgen Henriksen Date: Thu, 11 Dec 2025 23:43:42 +0100 Subject: [PATCH 04/51] new config --- index.ts | 21 +--- lib/config.ts | 269 ++++++++++++++++++++++++++------------------------ 2 files changed, 141 insertions(+), 149 deletions(-) diff --git a/index.ts b/index.ts index 380beb7..98df16b 100644 --- a/index.ts +++ b/index.ts @@ -6,7 +6,7 @@ import { createPruningTool } from "./lib/pruning-tool" import { createChatMessageTransformHandler } from "./lib/hooks" const plugin: Plugin = (async (ctx) => { - const { config, migrations } = getConfig(ctx) + const config = getConfig(ctx) if (!config.enabled) { return {} @@ -24,27 +24,8 @@ const plugin: Plugin = (async (ctx) => { // Log initialization logger.info("plugin", "DCP initialized", { strategies: config.strategies, - model: config.model || "auto" }) - // Show migration toast if there were config migrations - if (migrations.length > 0) { - setTimeout(async () => { - try { - await ctx.client.tui.showToast({ - body: { - title: "DCP: Config upgraded", - message: migrations.join('\n'), - variant: "info", - duration: 8000 - } - }) - } catch { - // Silently ignore toast errors - } - }, 7000) - } - return { "experimental.chat.messages.transform": createChatMessageTransformHandler(), tool: config.strategies.onTool.length > 0 ? { diff --git a/lib/config.ts b/lib/config.ts index a08a2aa..9507a8e 100644 --- a/lib/config.ts +++ b/lib/config.ts @@ -1,61 +1,72 @@ -import { readFileSync, writeFileSync, existsSync, mkdirSync, statSync, copyFileSync } from 'fs' +import { readFileSync, writeFileSync, existsSync, mkdirSync, statSync } from 'fs' import { join, dirname } from 'path' import { homedir } from 'os' import { parse } from 'jsonc-parser' -import { Logger } from './logger' import type { PluginInput } from '@opencode-ai/plugin' -export type PruningStrategy = "deduplication" | "ai-analysis" +export interface DeduplicationStrategy { + enabled: boolean +} -export interface PluginConfig { +export interface PruneThinkingBlocksStrategy { + enabled: boolean +} + +export interface OnIdleStrategy { enabled: boolean - debug: boolean - protectedTools: string[] model?: string showModelErrorToasts?: boolean - showUpdateToasts?: boolean strictModelSelection?: boolean + protectedTools: string[] +} + +export interface PruneToolStrategy { + enabled: boolean + protectedTools: string[] + nudgeFrequency: number +} + +export interface PluginConfig { + enabled: boolean + debug: boolean + showUpdateToasts?: boolean pruningSummary: "off" | "minimal" | "detailed" - nudgeFreq: number strategies: { - onIdle: PruningStrategy[] - onTool: PruningStrategy[] + deduplication: DeduplicationStrategy + pruneThinkingBlocks: PruneThinkingBlocksStrategy + onIdle: OnIdleStrategy + pruneTool: PruneToolStrategy } } -export interface ConfigResult { - config: PluginConfig - migrations: string[] -} +const DEFAULT_PROTECTED_TOOLS = ['task', 'todowrite', 'todoread', 'prune', 'batch', 'write', 'edit'] const defaultConfig: PluginConfig = { enabled: true, debug: false, - protectedTools: ['task', 'todowrite', 'todoread', 'prune', 'batch', 'write', 'edit'], - showModelErrorToasts: true, showUpdateToasts: true, - strictModelSelection: false, pruningSummary: 'detailed', - nudgeFreq: 10, strategies: { - onIdle: ['ai-analysis'], - onTool: ['ai-analysis'] + deduplication: { + enabled: true + }, + pruneThinkingBlocks: { + enabled: true + }, + onIdle: { + enabled: true, + showModelErrorToasts: true, + strictModelSelection: false, + protectedTools: [...DEFAULT_PROTECTED_TOOLS] + }, + pruneTool: { + enabled: false, + protectedTools: [...DEFAULT_PROTECTED_TOOLS], + nudgeFrequency: 10 + } } } -const VALID_CONFIG_KEYS = new Set([ - 'enabled', - 'debug', - 'protectedTools', - 'model', - 'showModelErrorToasts', - 'showUpdateToasts', - 'strictModelSelection', - 'pruningSummary', - 'nudgeFreq', - 'strategies' -]) - const GLOBAL_CONFIG_DIR = join(homedir(), '.config', 'opencode') const GLOBAL_CONFIG_PATH_JSONC = join(GLOBAL_CONFIG_DIR, 'dcp.jsonc') const GLOBAL_CONFIG_PATH_JSON = join(GLOBAL_CONFIG_DIR, 'dcp.json') @@ -109,30 +120,43 @@ function createDefaultConfig(): void { "enabled": true, // Enable debug logging to ~/.config/opencode/logs/dcp/ "debug": false, - // Override model for analysis (format: "provider/model", e.g. "anthropic/claude-haiku-4-5") - // "model": "anthropic/claude-haiku-4-5", - // Show toast notifications when model selection fails - "showModelErrorToasts": true, // Show toast notifications when a new version is available "showUpdateToasts": true, - // Only run AI analysis with session model or configured model (disables fallback models) - "strictModelSelection": false, - // AI analysis strategies (deduplication runs automatically on every request) - "strategies": { - // Strategies to run when session goes idle - "onIdle": ["ai-analysis"], - // Strategies to run when AI calls prune tool - "onTool": ["ai-analysis"] - }, // Summary display: "off", "minimal", or "detailed" "pruningSummary": "detailed", - // How often to nudge the AI to prune (every N tool results, 0 = disabled) - "nudgeFreq": 10 - // Additional tools to protect from pruning - // "protectedTools": ["bash"] + // Strategies for pruning tokens from chat history + "strategies": { + // Remove duplicate tool calls (same tool with same arguments) + "deduplication": { + "enabled": true + }, + // Remove thinking/reasoning LLM blocks + "pruneThinkingBlocks": { + "enabled": true + }, + // Run an LLM to analyze what tool calls are no longer relevant on idle + "onIdle": { + "enabled": true, + // Override model for analysis (format: "provider/model") + // "model": "anthropic/claude-haiku-4-5", + // Show toast notifications when model selection fails + "showModelErrorToasts": true, + // When true, fallback models are not permitted + "strictModelSelection": false, + // Additional tools to protect from pruning + "protectedTools": [] + }, + // Exposes a prune tool to your LLM to call when it determines pruning is necessary + "pruneTool": { + "enabled": false, + // Additional tools to protect from pruning + "protectedTools": [], + // How often to nudge the AI to prune (every N tool results, 0 = disabled) + "nudgeFrequency": 10 + } + } } ` - writeFileSync(GLOBAL_CONFIG_PATH_JSONC, configContent, 'utf-8') } @@ -145,109 +169,96 @@ function loadConfigFile(configPath: string): Record | null { } } -function getInvalidKeys(config: Record): string[] { - const invalidKeys: string[] = [] - for (const key of Object.keys(config)) { - if (!VALID_CONFIG_KEYS.has(key)) { - invalidKeys.push(key) - } - } - return invalidKeys -} - -function backupAndResetConfig(configPath: string, logger: Logger): string | null { - try { - const backupPath = configPath + '.bak' - copyFileSync(configPath, backupPath) - logger.info('config', 'Created config backup', { backup: backupPath }) - createDefaultConfig() - logger.info('config', 'Created fresh default config', { path: GLOBAL_CONFIG_PATH_JSONC }) - return backupPath - } catch (error: any) { - logger.error('config', 'Failed to backup/reset config', { error: error.message }) - return null - } -} - function mergeStrategies( base: PluginConfig['strategies'], override?: Partial ): PluginConfig['strategies'] { if (!override) return base + return { - onIdle: override.onIdle ?? base.onIdle, - onTool: override.onTool ?? base.onTool + deduplication: { + enabled: override.deduplication?.enabled ?? base.deduplication.enabled + }, + pruneThinkingBlocks: { + enabled: override.pruneThinkingBlocks?.enabled ?? base.pruneThinkingBlocks.enabled + }, + onIdle: { + enabled: override.onIdle?.enabled ?? base.onIdle.enabled, + model: override.onIdle?.model ?? base.onIdle.model, + showModelErrorToasts: override.onIdle?.showModelErrorToasts ?? base.onIdle.showModelErrorToasts, + strictModelSelection: override.onIdle?.strictModelSelection ?? base.onIdle.strictModelSelection, + protectedTools: [ + ...new Set([ + ...base.onIdle.protectedTools, + ...(override.onIdle?.protectedTools ?? []) + ]) + ] + }, + pruneTool: { + enabled: override.pruneTool?.enabled ?? base.pruneTool.enabled, + protectedTools: [ + ...new Set([ + ...base.pruneTool.protectedTools, + ...(override.pruneTool?.protectedTools ?? []) + ]) + ], + nudgeFrequency: override.pruneTool?.nudgeFrequency ?? base.pruneTool.nudgeFrequency + } + } +} + +function deepCloneConfig(config: PluginConfig): PluginConfig { + return { + ...config, + strategies: { + deduplication: { ...config.strategies.deduplication }, + pruneThinkingBlocks: { ...config.strategies.pruneThinkingBlocks }, + onIdle: { + ...config.strategies.onIdle, + protectedTools: [...config.strategies.onIdle.protectedTools] + }, + pruneTool: { + ...config.strategies.pruneTool, + protectedTools: [...config.strategies.pruneTool.protectedTools] + } + } } } -export function getConfig(ctx?: PluginInput): ConfigResult { - let config = { ...defaultConfig, protectedTools: [...defaultConfig.protectedTools] } +export function getConfig(ctx: PluginInput): PluginConfig { + let config = deepCloneConfig(defaultConfig) const configPaths = getConfigPaths(ctx) - const logger = new Logger(true) - const migrations: string[] = [] + // Load and merge global config if (configPaths.global) { const globalConfig = loadConfigFile(configPaths.global) if (globalConfig) { - const invalidKeys = getInvalidKeys(globalConfig) - - if (invalidKeys.length > 0) { - logger.info('config', 'Found invalid config keys', { keys: invalidKeys }) - const backupPath = backupAndResetConfig(configPaths.global, logger) - if (backupPath) { - migrations.push(`Old config backed up to ${backupPath}`) - } - } else { - config = { - enabled: globalConfig.enabled ?? config.enabled, - debug: globalConfig.debug ?? config.debug, - protectedTools: [...new Set([...config.protectedTools, ...(globalConfig.protectedTools ?? [])])], - model: globalConfig.model ?? config.model, - showModelErrorToasts: globalConfig.showModelErrorToasts ?? config.showModelErrorToasts, - showUpdateToasts: globalConfig.showUpdateToasts ?? config.showUpdateToasts, - strictModelSelection: globalConfig.strictModelSelection ?? config.strictModelSelection, - strategies: mergeStrategies(config.strategies, globalConfig.strategies as any), - pruningSummary: globalConfig.pruningSummary ?? config.pruningSummary, - nudgeFreq: globalConfig.nudgeFreq ?? config.nudgeFreq - } - logger.info('config', 'Loaded global config', { path: configPaths.global }) + config = { + enabled: globalConfig.enabled ?? config.enabled, + debug: globalConfig.debug ?? config.debug, + showUpdateToasts: globalConfig.showUpdateToasts ?? config.showUpdateToasts, + pruningSummary: globalConfig.pruningSummary ?? config.pruningSummary, + strategies: mergeStrategies(config.strategies, globalConfig.strategies as any) } } } else { + // No config exists, create default createDefaultConfig() - logger.info('config', 'Created default global config', { path: GLOBAL_CONFIG_PATH_JSONC }) } + // Load and merge project config (overrides global) if (configPaths.project) { const projectConfig = loadConfigFile(configPaths.project) if (projectConfig) { - const invalidKeys = getInvalidKeys(projectConfig) - - if (invalidKeys.length > 0) { - logger.warn('config', 'Project config has invalid keys (ignored)', { - path: configPaths.project, - keys: invalidKeys - }) - migrations.push(`Project config has invalid keys: ${invalidKeys.join(', ')}`) - } else { - config = { - enabled: projectConfig.enabled ?? config.enabled, - debug: projectConfig.debug ?? config.debug, - protectedTools: [...new Set([...config.protectedTools, ...(projectConfig.protectedTools ?? [])])], - model: projectConfig.model ?? config.model, - showModelErrorToasts: projectConfig.showModelErrorToasts ?? config.showModelErrorToasts, - showUpdateToasts: projectConfig.showUpdateToasts ?? config.showUpdateToasts, - strictModelSelection: projectConfig.strictModelSelection ?? config.strictModelSelection, - strategies: mergeStrategies(config.strategies, projectConfig.strategies as any), - pruningSummary: projectConfig.pruningSummary ?? config.pruningSummary, - nudgeFreq: projectConfig.nudgeFreq ?? config.nudgeFreq - } - logger.info('config', 'Loaded project config (overrides global)', { path: configPaths.project }) + config = { + enabled: projectConfig.enabled ?? config.enabled, + debug: projectConfig.debug ?? config.debug, + showUpdateToasts: projectConfig.showUpdateToasts ?? config.showUpdateToasts, + pruningSummary: projectConfig.pruningSummary ?? config.pruningSummary, + strategies: mergeStrategies(config.strategies, projectConfig.strategies as any) } } - } else if (ctx?.directory) { - logger.debug('config', 'No project config found', { searchedFrom: ctx.directory }) } - return { config, migrations } + return config } From a57ce231762b07e569cb83925d1e28b2a6668d2e Mon Sep 17 00:00:00 2001 From: Jorgen Henriksen Date: Thu, 11 Dec 2025 23:50:44 +0100 Subject: [PATCH 05/51] show error toast if loading config file fails --- lib/config.ts | 70 ++++++++++++++++++++++++++++++++++++++------------- 1 file changed, 52 insertions(+), 18 deletions(-) diff --git a/lib/config.ts b/lib/config.ts index 9507a8e..83440d0 100644 --- a/lib/config.ts +++ b/lib/config.ts @@ -160,12 +160,28 @@ function createDefaultConfig(): void { writeFileSync(GLOBAL_CONFIG_PATH_JSONC, configContent, 'utf-8') } -function loadConfigFile(configPath: string): Record | null { +interface ConfigLoadResult { + data: Record | null + parseError?: string +} + +function loadConfigFile(configPath: string): ConfigLoadResult { + let fileContent: string + try { + fileContent = readFileSync(configPath, 'utf-8') + } catch { + // File doesn't exist or can't be read - not a parse error + return { data: null } + } + try { - const fileContent = readFileSync(configPath, 'utf-8') - return parse(fileContent) + const parsed = parse(fileContent) + if (parsed === undefined || parsed === null) { + return { data: null, parseError: 'Config file is empty or invalid' } + } + return { data: parsed } } catch (error: any) { - return null + return { data: null, parseError: error.message || 'Failed to parse config' } } } @@ -231,14 +247,23 @@ export function getConfig(ctx: PluginInput): PluginConfig { // Load and merge global config if (configPaths.global) { - const globalConfig = loadConfigFile(configPaths.global) - if (globalConfig) { + const result = loadConfigFile(configPaths.global) + if (result.parseError) { + ctx.client.tui.showToast({ + body: { + title: "DCP: Invalid config", + message: `${configPaths.global}\n${result.parseError}\nUsing default values`, + variant: "warning", + duration: 7000 + } + }).catch(() => {}) + } else if (result.data) { config = { - enabled: globalConfig.enabled ?? config.enabled, - debug: globalConfig.debug ?? config.debug, - showUpdateToasts: globalConfig.showUpdateToasts ?? config.showUpdateToasts, - pruningSummary: globalConfig.pruningSummary ?? config.pruningSummary, - strategies: mergeStrategies(config.strategies, globalConfig.strategies as any) + enabled: result.data.enabled ?? config.enabled, + debug: result.data.debug ?? config.debug, + showUpdateToasts: result.data.showUpdateToasts ?? config.showUpdateToasts, + pruningSummary: result.data.pruningSummary ?? config.pruningSummary, + strategies: mergeStrategies(config.strategies, result.data.strategies as any) } } } else { @@ -248,14 +273,23 @@ export function getConfig(ctx: PluginInput): PluginConfig { // Load and merge project config (overrides global) if (configPaths.project) { - const projectConfig = loadConfigFile(configPaths.project) - if (projectConfig) { + const result = loadConfigFile(configPaths.project) + if (result.parseError) { + ctx.client.tui.showToast({ + body: { + title: "DCP: Invalid project config", + message: `${configPaths.project}\n${result.parseError}\nUsing global/default values`, + variant: "warning", + duration: 7000 + } + }).catch(() => {}) + } else if (result.data) { config = { - enabled: projectConfig.enabled ?? config.enabled, - debug: projectConfig.debug ?? config.debug, - showUpdateToasts: projectConfig.showUpdateToasts ?? config.showUpdateToasts, - pruningSummary: projectConfig.pruningSummary ?? config.pruningSummary, - strategies: mergeStrategies(config.strategies, projectConfig.strategies as any) + enabled: result.data.enabled ?? config.enabled, + debug: result.data.debug ?? config.debug, + showUpdateToasts: result.data.showUpdateToasts ?? config.showUpdateToasts, + pruningSummary: result.data.pruningSummary ?? config.pruningSummary, + strategies: mergeStrategies(config.strategies, result.data.strategies as any) } } } From 951dbcd5a3c78de3e9abb9dc9e1f695b99f57a50 Mon Sep 17 00:00:00 2001 From: Jorgen Henriksen Date: Fri, 12 Dec 2025 00:53:27 +0100 Subject: [PATCH 06/51] building prototype --- index.ts | 9 +- lib/config.ts | 16 +- lib/core/janitor.ts | 445 --------------------------- lib/core/strategies/deduplication.ts | 4 +- lib/core/strategies/index.ts | 8 +- lib/hooks.ts | 173 +---------- lib/state/tool-cache.ts | 75 ++--- lib/state/types.ts | 11 +- lib/ui/display-utils.ts | 1 - lib/ui/notification.ts | 2 +- lib/utils.ts | 12 + 11 files changed, 70 insertions(+), 686 deletions(-) delete mode 100644 lib/core/janitor.ts diff --git a/index.ts b/index.ts index 98df16b..1dab272 100644 --- a/index.ts +++ b/index.ts @@ -27,8 +27,13 @@ const plugin: Plugin = (async (ctx) => { }) return { - "experimental.chat.messages.transform": createChatMessageTransformHandler(), - tool: config.strategies.onTool.length > 0 ? { + "experimental.chat.messages.transform": createChatMessageTransformHandler( + ctx.client, + state, + logger, + config + ), + tool: config.strategies.pruneTool.enabled ? { prune: createPruningTool({ client: ctx.client, state, diff --git a/lib/config.ts b/lib/config.ts index 83440d0..76fe79f 100644 --- a/lib/config.ts +++ b/lib/config.ts @@ -4,15 +4,15 @@ import { homedir } from 'os' import { parse } from 'jsonc-parser' import type { PluginInput } from '@opencode-ai/plugin' -export interface DeduplicationStrategy { +export interface Deduplication { enabled: boolean } -export interface PruneThinkingBlocksStrategy { +export interface PruneThinkingBlocks { enabled: boolean } -export interface OnIdleStrategy { +export interface OnIdle { enabled: boolean model?: string showModelErrorToasts?: boolean @@ -20,7 +20,7 @@ export interface OnIdleStrategy { protectedTools: string[] } -export interface PruneToolStrategy { +export interface PruneTool { enabled: boolean protectedTools: string[] nudgeFrequency: number @@ -32,10 +32,10 @@ export interface PluginConfig { showUpdateToasts?: boolean pruningSummary: "off" | "minimal" | "detailed" strategies: { - deduplication: DeduplicationStrategy - pruneThinkingBlocks: PruneThinkingBlocksStrategy - onIdle: OnIdleStrategy - pruneTool: PruneToolStrategy + deduplication: Deduplication + pruneThinkingBlocks: PruneThinkingBlocks + onIdle: OnIdle + pruneTool: PruneTool } } diff --git a/lib/core/janitor.ts b/lib/core/janitor.ts deleted file mode 100644 index fc00a10..0000000 --- a/lib/core/janitor.ts +++ /dev/null @@ -1,445 +0,0 @@ -import { z } from "zod" -import type { Logger } from "../logger" -import type { PruningStrategy } from "../config" -import type { SessionState } from "../state" -import type { ToolMetadata, SessionStats, GCStats, PruningResult } from "../fetch-wrapper/types" -import { findCurrentAgent } from "../hooks" -import { buildAnalysisPrompt } from "./prompt" -import { selectModel, extractModelFromSession } from "../model-selector" -import { estimateTokensBatch, formatTokenCount } from "../tokenizer" -import { saveSessionState } from "../state/persistence" -import { ensureSessionRestored } from "../state" -import { - sendUnifiedNotification, - type NotificationContext -} from "../ui/notification" - -export type { SessionStats, GCStats, PruningResult } - -export interface PruningOptions { - reason?: string - trigger: 'idle' | 'tool' -} - -export interface JanitorConfig { - protectedTools: string[] - model?: string - showModelErrorToasts: boolean - strictModelSelection: boolean - pruningSummary: "off" | "minimal" | "detailed" - workingDirectory?: string -} - -export interface JanitorContext { - client: any - state: SessionState - logger: Logger - config: JanitorConfig - notificationCtx: NotificationContext -} - -// ============================================================================ -// Context factory -// ============================================================================ - -export function createJanitorContext( - client: any, - state: SessionState, - logger: Logger, - config: JanitorConfig -): JanitorContext { - return { - client, - state, - logger, - config, - notificationCtx: { - client, - logger, - config: { - pruningSummary: config.pruningSummary, - workingDirectory: config.workingDirectory - } - } - } -} - -// ============================================================================ -// Public API -// ============================================================================ - -/** - * Run pruning on idle trigger. - * Note: onTool pruning is now handled directly by pruning-tool.ts - */ -export async function runOnIdle( - ctx: JanitorContext, - sessionID: string, - strategies: PruningStrategy[] -): Promise { - return runWithStrategies(ctx, sessionID, strategies, { trigger: 'idle' }) -} - -// ============================================================================ -// Core pruning logic (for onIdle only) -// ============================================================================ - -async function runWithStrategies( - ctx: JanitorContext, - sessionID: string, - strategies: PruningStrategy[], - options: PruningOptions -): Promise { - const { client, state, logger, config } = ctx - - try { - if (strategies.length === 0) { - return null - } - - // Ensure persisted state is restored before processing - await ensureSessionRestored(state, sessionID, logger) - - const [sessionInfoResponse, messagesResponse] = await Promise.all([ - client.session.get({ path: { id: sessionID } }), - client.session.messages({ path: { id: sessionID }, query: { limit: 500 } }) - ]) - - const sessionInfo = sessionInfoResponse.data - const messages = messagesResponse.data || messagesResponse - - if (!messages || messages.length < 3) { - return null - } - - const currentAgent = findCurrentAgent(messages) - const { toolCallIds, toolOutputs, toolMetadata } = parseMessages(messages, state.toolParameters) - - const alreadyPrunedIds = state.prunedIds.get(sessionID) ?? [] - const unprunedToolCallIds = toolCallIds.filter(id => !alreadyPrunedIds.includes(id)) - - const gcPending = state.gcPending.get(sessionID) ?? null - - if (unprunedToolCallIds.length === 0 && !gcPending) { - return null - } - - const candidateCount = unprunedToolCallIds.filter(id => { - const metadata = toolMetadata.get(id) - return !metadata || !config.protectedTools.includes(metadata.tool) - }).length - - // PHASE 1: LLM ANALYSIS - let llmPrunedIds: string[] = [] - - if (strategies.includes('ai-analysis') && unprunedToolCallIds.length > 0) { - llmPrunedIds = await runLlmAnalysis( - ctx, - sessionID, - sessionInfo, - messages, - unprunedToolCallIds, - alreadyPrunedIds, - toolMetadata, - options - ) - } - - const finalNewlyPrunedIds = llmPrunedIds.filter(id => !alreadyPrunedIds.includes(id)) - - if (finalNewlyPrunedIds.length === 0 && !gcPending) { - return null - } - - // Calculate stats & send notification - const tokensSaved = await calculateTokensSaved(finalNewlyPrunedIds, toolOutputs) - - const currentStats = state.stats.get(sessionID) ?? { - totalToolsPruned: 0, - totalTokensSaved: 0, - totalGCTokens: 0, - totalGCTools: 0 - } - - const sessionStats: SessionStats = { - totalToolsPruned: currentStats.totalToolsPruned + finalNewlyPrunedIds.length, - totalTokensSaved: currentStats.totalTokensSaved + tokensSaved, - totalGCTokens: currentStats.totalGCTokens + (gcPending?.tokensCollected ?? 0), - totalGCTools: currentStats.totalGCTools + (gcPending?.toolsDeduped ?? 0) - } - state.stats.set(sessionID, sessionStats) - - const notificationSent = await sendUnifiedNotification( - ctx.notificationCtx, - sessionID, - { - aiPrunedCount: llmPrunedIds.length, - aiTokensSaved: tokensSaved, - aiPrunedIds: llmPrunedIds, - toolMetadata, - gcPending, - sessionStats - }, - currentAgent - ) - - if (gcPending) { - state.gcPending.delete(sessionID) - } - - if (finalNewlyPrunedIds.length === 0) { - if (notificationSent) { - logger.info("janitor", `GC-only notification: ~${formatTokenCount(gcPending?.tokensCollected ?? 0)} tokens from ${gcPending?.toolsDeduped ?? 0} deduped tools`, { - trigger: options.trigger - }) - } - return null - } - - // State update (only if something was pruned) - const allPrunedIds = [...new Set([...alreadyPrunedIds, ...llmPrunedIds])] - state.prunedIds.set(sessionID, allPrunedIds) - - const sessionName = sessionInfo?.title - saveSessionState(sessionID, new Set(allPrunedIds), sessionStats, logger, sessionName).catch(err => { - logger.error("janitor", "Failed to persist state", { error: err.message }) - }) - - const prunedCount = finalNewlyPrunedIds.length - const keptCount = candidateCount - prunedCount - - const logMeta: Record = { trigger: options.trigger } - if (options.reason) { - logMeta.reason = options.reason - } - if (gcPending) { - logMeta.gcTokens = gcPending.tokensCollected - logMeta.gcTools = gcPending.toolsDeduped - } - - logger.info("janitor", `Pruned ${prunedCount}/${candidateCount} tools, ${keptCount} kept (~${formatTokenCount(tokensSaved)} tokens)`, logMeta) - - return { - prunedCount: finalNewlyPrunedIds.length, - tokensSaved, - llmPrunedIds, - toolMetadata, - sessionStats - } - - } catch (error: any) { - ctx.logger.error("janitor", "Analysis failed", { - error: error.message, - trigger: options.trigger - }) - return null - } -} - -// ============================================================================ -// LLM Analysis -// ============================================================================ - -async function runLlmAnalysis( - ctx: JanitorContext, - sessionID: string, - sessionInfo: any, - messages: any[], - unprunedToolCallIds: string[], - alreadyPrunedIds: string[], - toolMetadata: Map, - options: PruningOptions -): Promise { - const { client, state, logger, config } = ctx - - const protectedToolCallIds: string[] = [] - const prunableToolCallIds = unprunedToolCallIds.filter(id => { - const metadata = toolMetadata.get(id) - if (metadata && config.protectedTools.includes(metadata.tool)) { - protectedToolCallIds.push(id) - return false - } - return true - }) - - if (prunableToolCallIds.length === 0) { - return [] - } - - const cachedModelInfo = state.model.get(sessionID) - const sessionModelInfo = extractModelFromSession(sessionInfo, logger) - const currentModelInfo = cachedModelInfo || sessionModelInfo - - const modelSelection = await selectModel(currentModelInfo, logger, config.model, config.workingDirectory) - - logger.info("janitor", `Model: ${modelSelection.modelInfo.providerID}/${modelSelection.modelInfo.modelID}`, { - source: modelSelection.source - }) - - if (modelSelection.failedModel && config.showModelErrorToasts) { - const skipAi = modelSelection.source === 'fallback' && config.strictModelSelection - try { - await client.tui.showToast({ - body: { - title: skipAi ? "DCP: AI analysis skipped" : "DCP: Model fallback", - message: skipAi - ? `${modelSelection.failedModel.providerID}/${modelSelection.failedModel.modelID} failed\nAI analysis skipped (strictModelSelection enabled)` - : `${modelSelection.failedModel.providerID}/${modelSelection.failedModel.modelID} failed\nUsing ${modelSelection.modelInfo.providerID}/${modelSelection.modelInfo.modelID}`, - variant: "info", - duration: 5000 - } - }) - } catch (toastError: any) { - // Ignore toast errors - } - } - - if (modelSelection.source === 'fallback' && config.strictModelSelection) { - logger.info("janitor", "Skipping AI analysis (fallback model, strictModelSelection enabled)") - return [] - } - - const { generateObject } = await import('ai') - - const sanitizedMessages = replacePrunedToolOutputs(messages, alreadyPrunedIds) - - const analysisPrompt = buildAnalysisPrompt( - prunableToolCallIds, - sanitizedMessages, - alreadyPrunedIds, - protectedToolCallIds, - options.reason - ) - - await logger.saveWrappedContext( - "janitor-shadow", - [{ role: "user", content: analysisPrompt }], - { - sessionID, - modelProvider: modelSelection.modelInfo.providerID, - modelID: modelSelection.modelInfo.modelID, - candidateToolCount: prunableToolCallIds.length, - alreadyPrunedCount: alreadyPrunedIds.length, - protectedToolCount: protectedToolCallIds.length, - trigger: options.trigger, - reason: options.reason - } - ) - - const result = await generateObject({ - model: modelSelection.model, - schema: z.object({ - pruned_tool_call_ids: z.array(z.string()), - reasoning: z.string(), - }), - prompt: analysisPrompt - }) - - const rawLlmPrunedIds = result.object.pruned_tool_call_ids - const llmPrunedIds = rawLlmPrunedIds.filter(id => - prunableToolCallIds.includes(id.toLowerCase()) - ) - - if (llmPrunedIds.length > 0) { - const reasoning = result.object.reasoning.replace(/\n+/g, ' ').replace(/\s+/g, ' ').trim() - logger.info("janitor", `LLM reasoning: ${reasoning.substring(0, 200)}${reasoning.length > 200 ? '...' : ''}`) - } - - return llmPrunedIds -} - -function replacePrunedToolOutputs(messages: any[], prunedIds: string[]): any[] { - if (prunedIds.length === 0) return messages - - const prunedIdsSet = new Set(prunedIds.map(id => id.toLowerCase())) - - return messages.map(msg => { - if (!msg.parts) return msg - - return { - ...msg, - parts: msg.parts.map((part: any) => { - if (part.type === 'tool' && - part.callID && - prunedIdsSet.has(part.callID.toLowerCase()) && - part.state?.output) { - return { - ...part, - state: { - ...part.state, - output: '[Output removed to save context - information superseded or no longer needed]' - } - } - } - return part - }) - } - }) -} - -// ============================================================================ -// Message parsing -// ============================================================================ - -interface ParsedMessages { - toolCallIds: string[] - toolOutputs: Map - toolMetadata: Map -} - -export function parseMessages( - messages: any[], - toolParametersCache: Map -): ParsedMessages { - const toolCallIds: string[] = [] - const toolOutputs = new Map() - const toolMetadata = new Map() - - for (const msg of messages) { - if (msg.parts) { - for (const part of msg.parts) { - if (part.type === "tool" && part.callID) { - const normalizedId = part.callID.toLowerCase() - toolCallIds.push(normalizedId) - - const cachedData = toolParametersCache.get(part.callID) || toolParametersCache.get(normalizedId) - const parameters = cachedData?.parameters ?? part.state?.input ?? part.parameters - - toolMetadata.set(normalizedId, { - tool: part.tool, - parameters: parameters - }) - - if (part.state?.status === "completed" && part.state.output) { - toolOutputs.set(normalizedId, part.state.output) - } - } - } - } - } - - return { toolCallIds, toolOutputs, toolMetadata } -} - -// ============================================================================ -// Helpers -// ============================================================================ - -async function calculateTokensSaved(prunedIds: string[], toolOutputs: Map): Promise { - const outputsToTokenize: string[] = [] - - for (const prunedId of prunedIds) { - const normalizedId = prunedId.toLowerCase() - const output = toolOutputs.get(normalizedId) - if (output) { - outputsToTokenize.push(output) - } - } - - if (outputsToTokenize.length > 0) { - const tokenCounts = await estimateTokensBatch(outputsToTokenize) - return tokenCounts.reduce((sum, count) => sum + count, 0) - } - - return 0 -} diff --git a/lib/core/strategies/deduplication.ts b/lib/core/strategies/deduplication.ts index ace7b3f..3bbcd28 100644 --- a/lib/core/strategies/deduplication.ts +++ b/lib/core/strategies/deduplication.ts @@ -1,5 +1,5 @@ import { extractParameterKey } from "../../ui/display-utils" -import type { PruningStrategy, StrategyResult, ToolMetadata } from "./index" +import type { PruningStrategy, StrategyResult, ToolParameterEntry } from "./index" /** * Deduplication strategy - prunes older tool calls that have identical @@ -9,7 +9,7 @@ export const deduplicationStrategy: PruningStrategy = { name: "deduplication", detect( - toolMetadata: Map, + toolMetadata: Map, unprunedIds: string[], protectedTools: string[] ): StrategyResult { diff --git a/lib/core/strategies/index.ts b/lib/core/strategies/index.ts index b4eb8af..7314192 100644 --- a/lib/core/strategies/index.ts +++ b/lib/core/strategies/index.ts @@ -2,10 +2,10 @@ * Strategy runner - executes all enabled pruning strategies and collects results. */ +import { ToolParameterEntry } from "../../state" import { deduplicationStrategy } from "./deduplication" -import type { ToolMetadata } from "../../fetch-wrapper/types" -export type { ToolMetadata } +export type { ToolParameterEntry} /** * Common interface for rule-based pruning strategies. @@ -40,7 +40,7 @@ export interface PruningStrategy { * @returns IDs to prune and optional details */ detect( - toolMetadata: Map, + toolMetadata: Map, unprunedIds: string[], protectedTools: string[] ): StrategyResult @@ -71,7 +71,7 @@ export interface RunStrategiesResult { * @param enabledStrategies - Strategy names to run (defaults to all) */ export function runStrategies( - toolMetadata: Map, + toolMetadata: Map, unprunedIds: string[], protectedTools: string[], enabledStrategies?: string[] diff --git a/lib/hooks.ts b/lib/hooks.ts index 5cc4bfb..eb7b56b 100644 --- a/lib/hooks.ts +++ b/lib/hooks.ts @@ -1,178 +1,21 @@ -import type { WithParts } from "@opencode-ai/plugin" -import type { SessionState } from "./state" +import type { SessionState, WithParts } from "./state" import type { Logger } from "./logger" -import type { JanitorContext } from "./core/janitor" -import { runOnIdle } from "./core/janitor" -import type { PluginConfig, PruningStrategy } from "./config" -import type { ToolTracker } from "./fetch-wrapper/tool-tracker" -import { resetToolTrackerCount, clearToolTracker } from "./fetch-wrapper/tool-tracker" -import { clearAllMappings } from "./state/id-mapping" +import type { PluginConfig } from "./config" +import { syncToolCache } from "./state/tool-cache" -export async function isSubagentSession(client: any, sessionID: string): Promise { - try { - const result = await client.session.get({ path: { id: sessionID } }) - return !!result.data?.parentID - } catch (error: any) { - return false - } -} - -function toolStrategiesCoveredByIdle(onIdle: PruningStrategy[], onTool: PruningStrategy[]): boolean { - return onTool.every(strategy => onIdle.includes(strategy)) -} -export function createEventHandler( +export function createChatMessageTransformHandler( client: any, - janitorCtx: JanitorContext, + state: SessionState, logger: Logger, - config: PluginConfig, - toolTracker?: ToolTracker + config: PluginConfig ) { - return async ({ event }: { event: any }) => { - if (event.type === "session.status" && event.properties.status.type === "idle") { - if (await isSubagentSession(client, event.properties.sessionID)) return - if (config.strategies.onIdle.length === 0) return - - if (toolTracker?.skipNextIdle) { - toolTracker.skipNextIdle = false - if (toolStrategiesCoveredByIdle(config.strategies.onIdle, config.strategies.onTool)) { - return - } - } - - try { - const result = await runOnIdle(janitorCtx, event.properties.sessionID, config.strategies.onIdle) - - if (result && result.prunedCount > 0 && toolTracker && config.nudgeFreq > 0) { - if (toolStrategiesCoveredByIdle(config.strategies.onIdle, config.strategies.onTool)) { - resetToolTrackerCount(toolTracker) - } - } - } catch (err: any) { - logger.error("janitor", "Failed", { error: err.message }) - } - } - } -} - -export function createChatMessageTransformHandler() { return async( - input: {}, + input: {}, output: { messages: WithParts[] } ) => { - + syncToolCache(state, logger, output.messages); } } -/** - * Creates the chat.params hook for model caching and Google tool call mapping. - */ -export function createChatParamsHandler( - client: any, - state: SessionState, - logger: Logger, - toolTracker?: ToolTracker -) { - return async (input: any, _output: any) => { - const sessionId = input.sessionID - let providerID = (input.provider as any)?.info?.id || input.provider?.id - const modelID = input.model?.id - if (!providerID && input.message?.model?.providerID) { - providerID = input.message.model.providerID - } - - if (state.lastSeenSessionId && state.lastSeenSessionId !== sessionId) { - logger.info("chat.params", "Session changed, resetting state", { - from: state.lastSeenSessionId, - to: sessionId - }) - clearAllMappings() - state.toolParameters.clear() - if (toolTracker) { - clearToolTracker(toolTracker) - } - } - - state.lastSeenSessionId = sessionId - - if (!state.checkedSessions.has(sessionId)) { - state.checkedSessions.add(sessionId) - const isSubagent = await isSubagentSession(client, sessionId) - if (isSubagent) { - state.subagentSessions.add(sessionId) - } - } - - // Cache model info for the session (used by janitor for model selection) - if (providerID && modelID) { - state.model.set(sessionId, { - providerID: providerID, - modelID: modelID - }) - } - - // Build position-based mapping for Gemini (which loses tool call IDs in native format) - if (providerID === 'google' || providerID === 'google-vertex') { - try { - const messagesResponse = await client.session.messages({ - path: { id: sessionId }, - query: { limit: 500 } - }) - const messages = messagesResponse.data || messagesResponse - - if (Array.isArray(messages)) { - const toolCallsByName = new Map() - - for (const msg of messages) { - if (msg.parts) { - for (const part of msg.parts) { - if (part.type === 'tool' && part.callID && part.tool) { - const toolName = part.tool.toLowerCase() - const callId = part.callID.toLowerCase() - - if (!toolCallsByName.has(toolName)) { - toolCallsByName.set(toolName, []) - } - toolCallsByName.get(toolName)!.push(callId) - } - } - } - } - - const positionMapping = new Map() - for (const [toolName, callIds] of toolCallsByName) { - callIds.forEach((callId, index) => { - positionMapping.set(`${toolName}:${index}`, callId) - }) - } - - state.googleToolCallMapping.set(sessionId, positionMapping) - logger.info("chat.params", "Built Google tool call mapping", { - sessionId: sessionId.substring(0, 8), - toolCount: positionMapping.size, - toolParamsCount: state.toolParameters.size - }) - } - } catch (error: any) { - logger.error("chat.params", "Failed to build Google tool call mapping", { - error: error.message - }) - } - } - } -} - -/** - * Finds the current agent from messages by scanning backward for user messages. - */ -export function findCurrentAgent(messages: any[]): string | undefined { - for (let i = messages.length - 1; i >= 0; i--) { - const msg = messages[i] - const info = msg.info - if (info?.role === 'user') { - return info.agent || 'build' - } - } - return undefined -} diff --git a/lib/state/tool-cache.ts b/lib/state/tool-cache.ts index 0d8a471..f75f619 100644 --- a/lib/state/tool-cache.ts +++ b/lib/state/tool-cache.ts @@ -1,8 +1,6 @@ -import type { SessionState, ToolStatus } from "./index" +import type { SessionState, ToolStatus, WithParts } from "./index" import type { Logger } from "../logger" -import type { ToolTracker } from "../fetch-wrapper/tool-tracker" -/** Maximum number of entries to keep in the tool parameters cache */ const MAX_TOOL_CACHE_SIZE = 500 /** @@ -11,73 +9,36 @@ const MAX_TOOL_CACHE_SIZE = 500 * format-specific parsing from LLM API requests. */ export async function syncToolCache( - client: any, - sessionId: string, state: SessionState, - tracker?: ToolTracker, - protectedTools?: Set, - logger?: Logger + logger: Logger, + messages: WithParts[], ): Promise { try { - const messagesResponse = await client.session.messages({ - path: { id: sessionId }, - query: { limit: 500 } - }) - const messages = messagesResponse.data || messagesResponse - - if (!Array.isArray(messages)) { - return - } - - let synced = 0 - // Build lowercase set of pruned IDs for comparison (IDs in state may be mixed case) - const prunedIdsLower = tracker - ? new Set((state.prunedIds.get(sessionId) ?? []).map(id => id.toLowerCase())) - : null - for (const msg of messages) { - if (!msg.parts) continue + if (!msg.parts) { + continue + } for (const part of msg.parts) { - if (part.type !== "tool" || !part.callID) continue - - const id = part.callID.toLowerCase() - - // Track tool results for nudge injection - if (tracker && !tracker.seenToolResultIds.has(id)) { - tracker.seenToolResultIds.add(id) - // Only count non-protected tools toward nudge threshold - // Also skip already-pruned tools to avoid re-counting on restart - if ((!part.tool || !protectedTools?.has(part.tool)) && !prunedIdsLower?.has(id)) { - tracker.toolResultCount++ - } + if (part.type !== "tool" || !part.callID || state.toolParameters.has(part.callID)) { + continue } - if (state.toolParameters.has(id)) continue - if (part.tool && protectedTools?.has(part.tool)) continue - - const status = part.state?.status as ToolStatus | undefined - state.toolParameters.set(id, { - tool: part.tool, - parameters: part.state?.input ?? {}, - status, - error: status === "error" ? part.state?.error : undefined, - }) - synced++ + state.toolParameters.set( + part.callID, + { + tool: part.tool, + parameters: part.state?.input ?? {}, + status: part.state.status as ToolStatus | undefined, + error: part.state.status === "error" ? part.state.error : undefined, + } + ) } } trimToolParametersCache(state) - - if (logger && synced > 0) { - logger.debug("tool-cache", "Synced tool parameters from OpenCode", { - sessionId: sessionId.slice(0, 8), - synced - }) - } } catch (error) { - logger?.warn("tool-cache", "Failed to sync tool parameters from OpenCode", { - sessionId: sessionId.slice(0, 8), + logger.warn("tool-cache", "Failed to sync tool parameters from OpenCode", { error: error instanceof Error ? error.message : String(error) }) } diff --git a/lib/state/types.ts b/lib/state/types.ts index bd6c5cf..bfb20e7 100644 --- a/lib/state/types.ts +++ b/lib/state/types.ts @@ -1,7 +1,16 @@ +import { Message, Part } from "@opencode-ai/sdk" + +export interface WithParts { + info: Message + parts: Part[] +} + +export type ToolStatus = "pending" | "running" | "completed" | "error" + export interface ToolParameterEntry { tool: string parameters: any - status?: "pending" | "running" | "completed" | "error" + status?: ToolStatus error?: string } diff --git a/lib/ui/display-utils.ts b/lib/ui/display-utils.ts index 8c843f5..aedea27 100644 --- a/lib/ui/display-utils.ts +++ b/lib/ui/display-utils.ts @@ -1,4 +1,3 @@ -import type { PruningResult } from "../core/janitor" import { ToolParameterEntry } from "../state" /** diff --git a/lib/ui/notification.ts b/lib/ui/notification.ts index 53ca1eb..3ae888b 100644 --- a/lib/ui/notification.ts +++ b/lib/ui/notification.ts @@ -1,5 +1,5 @@ import type { Logger } from "../logger" -import type { SessionStats, GCStats } from "../core/janitor" +import type { SessionStats, GCStats } from "../state" import { formatTokenCount } from "../tokenizer" import { formatPrunedItemsList } from "./display-utils" import { ToolParameterEntry } from "../state" diff --git a/lib/utils.ts b/lib/utils.ts index a6742bf..7d080ea 100644 --- a/lib/utils.ts +++ b/lib/utils.ts @@ -1,3 +1,15 @@ +/** + * Checks if a session is a subagent session by looking for a parentID. + */ +export async function isSubagentSession(client: any, sessionID: string): Promise { + try { + const result = await client.session.get({ path: { id: sessionID } }) + return !!result.data?.parentID + } catch (error: any) { + return false + } +} + /** * Finds the current agent from messages by scanning backward for user messages. */ From 90496a3b031d22e55df503416ca7129fd2393eee Mon Sep 17 00:00:00 2001 From: Jorgen Henriksen Date: Fri, 12 Dec 2025 02:01:47 +0100 Subject: [PATCH 07/51] config logic --- lib/config.ts | 213 ++++++++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 197 insertions(+), 16 deletions(-) diff --git a/lib/config.ts b/lib/config.ts index 76fe79f..c321344 100644 --- a/lib/config.ts +++ b/lib/config.ts @@ -41,6 +41,175 @@ export interface PluginConfig { const DEFAULT_PROTECTED_TOOLS = ['task', 'todowrite', 'todoread', 'prune', 'batch', 'write', 'edit'] +// Valid config keys for validation against user config +export const VALID_CONFIG_KEYS = new Set([ + // Top-level keys + 'enabled', + 'debug', + 'showUpdateToasts', + 'pruningSummary', + 'strategies', + // strategies.deduplication + 'strategies.deduplication', + 'strategies.deduplication.enabled', + // strategies.pruneThinkingBlocks + 'strategies.pruneThinkingBlocks', + 'strategies.pruneThinkingBlocks.enabled', + // strategies.onIdle + 'strategies.onIdle', + 'strategies.onIdle.enabled', + 'strategies.onIdle.model', + 'strategies.onIdle.showModelErrorToasts', + 'strategies.onIdle.strictModelSelection', + 'strategies.onIdle.protectedTools', + // strategies.pruneTool + 'strategies.pruneTool', + 'strategies.pruneTool.enabled', + 'strategies.pruneTool.protectedTools', + 'strategies.pruneTool.nudgeFrequency', +]) + +// Extract all key paths from a config object for validation +function getConfigKeyPaths(obj: Record, prefix = ''): string[] { + const keys: string[] = [] + for (const key of Object.keys(obj)) { + const fullKey = prefix ? `${prefix}.${key}` : key + keys.push(fullKey) + if (obj[key] && typeof obj[key] === 'object' && !Array.isArray(obj[key])) { + keys.push(...getConfigKeyPaths(obj[key], fullKey)) + } + } + return keys +} + +// Returns invalid keys found in user config +export function getInvalidConfigKeys(userConfig: Record): string[] { + const userKeys = getConfigKeyPaths(userConfig) + return userKeys.filter(key => !VALID_CONFIG_KEYS.has(key)) +} + +// Type validators for config values +interface ValidationError { + key: string + expected: string + actual: string +} + +function validateConfigTypes(config: Record): ValidationError[] { + const errors: ValidationError[] = [] + + // Top-level validators + if (config.enabled !== undefined && typeof config.enabled !== 'boolean') { + errors.push({ key: 'enabled', expected: 'boolean', actual: typeof config.enabled }) + } + if (config.debug !== undefined && typeof config.debug !== 'boolean') { + errors.push({ key: 'debug', expected: 'boolean', actual: typeof config.debug }) + } + if (config.showUpdateToasts !== undefined && typeof config.showUpdateToasts !== 'boolean') { + errors.push({ key: 'showUpdateToasts', expected: 'boolean', actual: typeof config.showUpdateToasts }) + } + if (config.pruningSummary !== undefined) { + const validValues = ['off', 'minimal', 'detailed'] + if (!validValues.includes(config.pruningSummary)) { + errors.push({ key: 'pruningSummary', expected: '"off" | "minimal" | "detailed"', actual: JSON.stringify(config.pruningSummary) }) + } + } + + // Strategies validators + const strategies = config.strategies + if (strategies) { + // deduplication + if (strategies.deduplication?.enabled !== undefined && typeof strategies.deduplication.enabled !== 'boolean') { + errors.push({ key: 'strategies.deduplication.enabled', expected: 'boolean', actual: typeof strategies.deduplication.enabled }) + } + + // pruneThinkingBlocks + if (strategies.pruneThinkingBlocks?.enabled !== undefined && typeof strategies.pruneThinkingBlocks.enabled !== 'boolean') { + errors.push({ key: 'strategies.pruneThinkingBlocks.enabled', expected: 'boolean', actual: typeof strategies.pruneThinkingBlocks.enabled }) + } + + // onIdle + if (strategies.onIdle) { + if (strategies.onIdle.enabled !== undefined && typeof strategies.onIdle.enabled !== 'boolean') { + errors.push({ key: 'strategies.onIdle.enabled', expected: 'boolean', actual: typeof strategies.onIdle.enabled }) + } + if (strategies.onIdle.model !== undefined && typeof strategies.onIdle.model !== 'string') { + errors.push({ key: 'strategies.onIdle.model', expected: 'string', actual: typeof strategies.onIdle.model }) + } + if (strategies.onIdle.showModelErrorToasts !== undefined && typeof strategies.onIdle.showModelErrorToasts !== 'boolean') { + errors.push({ key: 'strategies.onIdle.showModelErrorToasts', expected: 'boolean', actual: typeof strategies.onIdle.showModelErrorToasts }) + } + if (strategies.onIdle.strictModelSelection !== undefined && typeof strategies.onIdle.strictModelSelection !== 'boolean') { + errors.push({ key: 'strategies.onIdle.strictModelSelection', expected: 'boolean', actual: typeof strategies.onIdle.strictModelSelection }) + } + if (strategies.onIdle.protectedTools !== undefined && !Array.isArray(strategies.onIdle.protectedTools)) { + errors.push({ key: 'strategies.onIdle.protectedTools', expected: 'string[]', actual: typeof strategies.onIdle.protectedTools }) + } + } + + // pruneTool + if (strategies.pruneTool) { + if (strategies.pruneTool.enabled !== undefined && typeof strategies.pruneTool.enabled !== 'boolean') { + errors.push({ key: 'strategies.pruneTool.enabled', expected: 'boolean', actual: typeof strategies.pruneTool.enabled }) + } + if (strategies.pruneTool.protectedTools !== undefined && !Array.isArray(strategies.pruneTool.protectedTools)) { + errors.push({ key: 'strategies.pruneTool.protectedTools', expected: 'string[]', actual: typeof strategies.pruneTool.protectedTools }) + } + if (strategies.pruneTool.nudgeFrequency !== undefined && typeof strategies.pruneTool.nudgeFrequency !== 'number') { + errors.push({ key: 'strategies.pruneTool.nudgeFrequency', expected: 'number', actual: typeof strategies.pruneTool.nudgeFrequency }) + } + } + } + + return errors +} + +// Show validation warnings for a config file +function showConfigValidationWarnings( + ctx: PluginInput, + configPath: string, + configData: Record, + isProject: boolean +): void { + const invalidKeys = getInvalidConfigKeys(configData) + const typeErrors = validateConfigTypes(configData) + + if (invalidKeys.length === 0 && typeErrors.length === 0) { + return + } + + const configType = isProject ? 'project config' : 'config' + const messages: string[] = [] + + if (invalidKeys.length > 0) { + const keyList = invalidKeys.slice(0, 3).join(', ') + const suffix = invalidKeys.length > 3 ? ` (+${invalidKeys.length - 3} more)` : '' + messages.push(`Unknown keys: ${keyList}${suffix}`) + } + + if (typeErrors.length > 0) { + for (const err of typeErrors.slice(0, 2)) { + messages.push(`${err.key}: expected ${err.expected}, got ${err.actual}`) + } + if (typeErrors.length > 2) { + messages.push(`(+${typeErrors.length - 2} more type errors)`) + } + } + + setTimeout(() => { + try { + ctx.client.tui.showToast({ + body: { + title: `DCP: Invalid ${configType}`, + message: `${configPath}\n${messages.join('\n')}`, + variant: "warning", + duration: 7000 + } + }) + } catch {} + }, 7000) +} + const defaultConfig: PluginConfig = { enabled: true, debug: false, @@ -249,15 +418,21 @@ export function getConfig(ctx: PluginInput): PluginConfig { if (configPaths.global) { const result = loadConfigFile(configPaths.global) if (result.parseError) { - ctx.client.tui.showToast({ - body: { - title: "DCP: Invalid config", - message: `${configPaths.global}\n${result.parseError}\nUsing default values`, - variant: "warning", - duration: 7000 - } - }).catch(() => {}) + setTimeout(async () => { + try { + ctx.client.tui.showToast({ + body: { + title: "DCP: Invalid config", + message: `${configPaths.global}\n${result.parseError}\nUsing default values`, + variant: "warning", + duration: 7000 + } + }) + } catch {} + }, 7000) } else if (result.data) { + // Validate config keys and types + showConfigValidationWarnings(ctx, configPaths.global, result.data, false) config = { enabled: result.data.enabled ?? config.enabled, debug: result.data.debug ?? config.debug, @@ -275,15 +450,21 @@ export function getConfig(ctx: PluginInput): PluginConfig { if (configPaths.project) { const result = loadConfigFile(configPaths.project) if (result.parseError) { - ctx.client.tui.showToast({ - body: { - title: "DCP: Invalid project config", - message: `${configPaths.project}\n${result.parseError}\nUsing global/default values`, - variant: "warning", - duration: 7000 - } - }).catch(() => {}) + setTimeout(async () => { + try { + ctx.client.tui.showToast({ + body: { + title: "DCP: Invalid project config", + message: `${configPaths.project}\n${result.parseError}\nUsing global/default values`, + variant: "warning", + duration: 7000 + } + }) + } catch {} + }, 7000) } else if (result.data) { + // Validate config keys and types + showConfigValidationWarnings(ctx, configPaths.project, result.data, true) config = { enabled: result.data.enabled ?? config.enabled, debug: result.data.debug ?? config.debug, From befa392eac1729cec204a86a7f3d145b366ee001 Mon Sep 17 00:00:00 2001 From: Jorgen Henriksen Date: Fri, 12 Dec 2025 20:59:56 +0100 Subject: [PATCH 08/51] small refactor and prepare strategies --- lib/core/strategies/deduplication.ts | 89 --------------------- lib/core/strategies/index.ts | 111 --------------------------- lib/hooks.ts | 4 + lib/{core => }/prompt.ts | 2 +- lib/pruning-tool.ts | 2 +- lib/state/types.ts | 6 +- lib/strategies/deduplication.ts | 12 +++ lib/strategies/index.ts | 2 + 8 files changed, 25 insertions(+), 203 deletions(-) delete mode 100644 lib/core/strategies/deduplication.ts delete mode 100644 lib/core/strategies/index.ts rename lib/{core => }/prompt.ts (98%) create mode 100644 lib/strategies/deduplication.ts create mode 100644 lib/strategies/index.ts diff --git a/lib/core/strategies/deduplication.ts b/lib/core/strategies/deduplication.ts deleted file mode 100644 index 3bbcd28..0000000 --- a/lib/core/strategies/deduplication.ts +++ /dev/null @@ -1,89 +0,0 @@ -import { extractParameterKey } from "../../ui/display-utils" -import type { PruningStrategy, StrategyResult, ToolParameterEntry } from "./index" - -/** - * Deduplication strategy - prunes older tool calls that have identical - * tool name and parameters, keeping only the most recent occurrence. - */ -export const deduplicationStrategy: PruningStrategy = { - name: "deduplication", - - detect( - toolMetadata: Map, - unprunedIds: string[], - protectedTools: string[] - ): StrategyResult { - const signatureMap = new Map() - - const deduplicatableIds = unprunedIds.filter(id => { - const metadata = toolMetadata.get(id) - const protectedToolsLower = protectedTools.map(t => t.toLowerCase()) - return !metadata || !protectedToolsLower.includes(metadata.tool.toLowerCase()) - }) - - for (const id of deduplicatableIds) { - const metadata = toolMetadata.get(id) - if (!metadata) continue - - const signature = createToolSignature(metadata.tool, metadata.parameters) - if (!signatureMap.has(signature)) { - signatureMap.set(signature, []) - } - signatureMap.get(signature)!.push(id) - } - - const prunedIds: string[] = [] - const details = new Map() - - for (const [signature, ids] of signatureMap.entries()) { - if (ids.length > 1) { - const metadata = toolMetadata.get(ids[0])! - const idsToRemove = ids.slice(0, -1) // All except last - prunedIds.push(...idsToRemove) - - details.set(signature, { - toolName: metadata.tool, - parameterKey: extractParameterKey(metadata), - reason: `duplicate (${ids.length} occurrences, kept most recent)`, - duplicateCount: ids.length, - prunedIds: idsToRemove, - keptId: ids[ids.length - 1] - }) - } - } - - return { prunedIds, details } - } -} - -function createToolSignature(tool: string, parameters?: any): string { - if (!parameters) return tool - - const normalized = normalizeParameters(parameters) - const sorted = sortObjectKeys(normalized) - return `${tool}::${JSON.stringify(sorted)}` -} - -function normalizeParameters(params: any): any { - if (typeof params !== 'object' || params === null) return params - if (Array.isArray(params)) return params - - const normalized: any = {} - for (const [key, value] of Object.entries(params)) { - if (value !== undefined && value !== null) { - normalized[key] = value - } - } - return normalized -} - -function sortObjectKeys(obj: any): any { - if (typeof obj !== 'object' || obj === null) return obj - if (Array.isArray(obj)) return obj.map(sortObjectKeys) - - const sorted: any = {} - for (const key of Object.keys(obj).sort()) { - sorted[key] = sortObjectKeys(obj[key]) - } - return sorted -} diff --git a/lib/core/strategies/index.ts b/lib/core/strategies/index.ts deleted file mode 100644 index 7314192..0000000 --- a/lib/core/strategies/index.ts +++ /dev/null @@ -1,111 +0,0 @@ -/** - * Strategy runner - executes all enabled pruning strategies and collects results. - */ - -import { ToolParameterEntry } from "../../state" -import { deduplicationStrategy } from "./deduplication" - -export type { ToolParameterEntry} - -/** - * Common interface for rule-based pruning strategies. - * Each strategy analyzes tool metadata and returns IDs that should be pruned. - */ - -export interface StrategyResult { - /** Tool call IDs that should be pruned */ - prunedIds: string[] - /** Optional details about what was pruned and why */ - details?: Map -} - -export interface StrategyDetail { - toolName: string - parameterKey: string - reason: string - /** Additional info specific to the strategy */ - [key: string]: any -} - -export interface PruningStrategy { - /** Unique identifier for this strategy */ - name: string - - /** - * Analyze tool metadata and determine which tool calls should be pruned. - * - * @param toolMetadata - Map of tool call ID to metadata (tool name + parameters) - * @param unprunedIds - Tool call IDs that haven't been pruned yet (chronological order) - * @param protectedTools - Tool names that should never be pruned - * @returns IDs to prune and optional details - */ - detect( - toolMetadata: Map, - unprunedIds: string[], - protectedTools: string[] - ): StrategyResult -} - -/** All available strategies */ -const ALL_STRATEGIES: PruningStrategy[] = [ - deduplicationStrategy, - // Future strategies will be added here: - // errorPruningStrategy, - // writeReadStrategy, - // partialReadStrategy, -] - -export interface RunStrategiesResult { - /** All tool IDs that should be pruned (deduplicated) */ - prunedIds: string[] - /** Results keyed by strategy name */ - byStrategy: Map -} - -/** - * Run all enabled strategies and collect pruned IDs. - * - * @param toolMetadata - Map of tool call ID to metadata - * @param unprunedIds - Tool call IDs not yet pruned (chronological order) - * @param protectedTools - Tool names that should never be pruned - * @param enabledStrategies - Strategy names to run (defaults to all) - */ -export function runStrategies( - toolMetadata: Map, - unprunedIds: string[], - protectedTools: string[], - enabledStrategies?: string[] -): RunStrategiesResult { - const byStrategy = new Map() - const allPrunedIds = new Set() - - // Filter to enabled strategies (or all if not specified) - const strategies = enabledStrategies - ? ALL_STRATEGIES.filter(s => enabledStrategies.includes(s.name)) - : ALL_STRATEGIES - - // Track which IDs are still available for each strategy - let remainingIds = unprunedIds - - for (const strategy of strategies) { - const result = strategy.detect(toolMetadata, remainingIds, protectedTools) - - if (result.prunedIds.length > 0) { - byStrategy.set(strategy.name, result) - - // Add to overall pruned set - for (const id of result.prunedIds) { - allPrunedIds.add(id) - } - - // Remove pruned IDs from remaining for next strategy - const prunedSet = new Set(result.prunedIds.map(id => id.toLowerCase())) - remainingIds = remainingIds.filter(id => !prunedSet.has(id.toLowerCase())) - } - } - - return { - prunedIds: Array.from(allPrunedIds), - byStrategy - } -} diff --git a/lib/hooks.ts b/lib/hooks.ts index eb7b56b..ee457ed 100644 --- a/lib/hooks.ts +++ b/lib/hooks.ts @@ -2,6 +2,7 @@ import type { SessionState, WithParts } from "./state" import type { Logger } from "./logger" import type { PluginConfig } from "./config" import { syncToolCache } from "./state/tool-cache" +import { deduplicate } from "./strategies" export function createChatMessageTransformHandler( @@ -15,6 +16,9 @@ export function createChatMessageTransformHandler( output: { messages: WithParts[] } ) => { syncToolCache(state, logger, output.messages); + + deduplicate(state, logger, config, output.messages) + pruneTool(state, logger, config, output.messages) } } diff --git a/lib/core/prompt.ts b/lib/prompt.ts similarity index 98% rename from lib/core/prompt.ts rename to lib/prompt.ts index e7f44d4..e210284 100644 --- a/lib/core/prompt.ts +++ b/lib/prompt.ts @@ -2,7 +2,7 @@ import { readFileSync } from "fs" import { join } from "path" export function loadPrompt(name: string, vars?: Record): string { - const filePath = join(__dirname, "..", "prompts", `${name}.txt`) + const filePath = join(__dirname, "prompts", `${name}.txt`) let content = readFileSync(filePath, "utf8").trim() if (vars) { for (const [key, value] of Object.entries(vars)) { diff --git a/lib/pruning-tool.ts b/lib/pruning-tool.ts index 39713e3..c78cd97 100644 --- a/lib/pruning-tool.ts +++ b/lib/pruning-tool.ts @@ -8,7 +8,7 @@ import { ensureSessionInitialized } from "./state" import { saveSessionState } from "./state/persistence" import type { Logger } from "./logger" import { estimateTokensBatch } from "./tokenizer" -import { loadPrompt } from "./core/prompt" +import { loadPrompt } from "./prompt" /** Tool description loaded from prompts/tool.txt */ const TOOL_DESCRIPTION = loadPrompt("tool") diff --git a/lib/state/types.ts b/lib/state/types.ts index bfb20e7..e2c4b19 100644 --- a/lib/state/types.ts +++ b/lib/state/types.ts @@ -26,9 +26,13 @@ export interface SessionStats { totalGCTools: number } +export interface Prune { + toolIds: string[] +} + export interface SessionState { sessionId: string | null - prunedIds: string[] + prune: Prune stats: SessionStats gcPending: GCStats toolParameters: Map diff --git a/lib/strategies/deduplication.ts b/lib/strategies/deduplication.ts new file mode 100644 index 0000000..cdcd83b --- /dev/null +++ b/lib/strategies/deduplication.ts @@ -0,0 +1,12 @@ +import { PluginConfig } from "../config" +import { Logger } from "../logger" +import type { SessionState, WithParts } from "../state" + +export const deduplicate = ( + state: SessionState, + logger: Logger, + config: PluginConfig, + messages: WithParts[] +) => { + +} diff --git a/lib/strategies/index.ts b/lib/strategies/index.ts new file mode 100644 index 0000000..0bd83ff --- /dev/null +++ b/lib/strategies/index.ts @@ -0,0 +1,2 @@ +export { deduplicate } from "./deduplication" + From 58145ba8ad1a200115319cb2024feadea8692610 Mon Sep 17 00:00:00 2001 From: Jorgen Henriksen Date: Fri, 12 Dec 2025 22:52:34 +0100 Subject: [PATCH 09/51] refactor --- index.ts | 2 +- lib/config.ts | 27 +++++++++-- lib/hooks.ts | 3 -- lib/state/persistence.ts | 14 +++--- lib/state/state.ts | 12 +++-- lib/state/types.ts | 12 +---- lib/{ => strategies}/pruning-tool.ts | 45 ++++++++---------- lib/ui/notification.ts | 68 +++++++++------------------- lib/utils.ts | 12 ++--- 9 files changed, 89 insertions(+), 106 deletions(-) rename lib/{ => strategies}/pruning-tool.ts (79%) diff --git a/index.ts b/index.ts index 1dab272..4a93d95 100644 --- a/index.ts +++ b/index.ts @@ -2,7 +2,7 @@ import type { Plugin } from "@opencode-ai/plugin" import { getConfig } from "./lib/config" import { Logger } from "./lib/logger" import { createSessionState } from "./lib/state" -import { createPruningTool } from "./lib/pruning-tool" +import { createPruningTool } from "./lib/strategies/pruning-tool" import { createChatMessageTransformHandler } from "./lib/hooks" const plugin: Plugin = (async (ctx) => { diff --git a/lib/config.ts b/lib/config.ts index c321344..7487e47 100644 --- a/lib/config.ts +++ b/lib/config.ts @@ -6,6 +6,7 @@ import type { PluginInput } from '@opencode-ai/plugin' export interface Deduplication { enabled: boolean + protectedTools: string[] } export interface PruneThinkingBlocks { @@ -52,6 +53,7 @@ export const VALID_CONFIG_KEYS = new Set([ // strategies.deduplication 'strategies.deduplication', 'strategies.deduplication.enabled', + 'strategies.deduplication.protectedTools', // strategies.pruneThinkingBlocks 'strategies.pruneThinkingBlocks', 'strategies.pruneThinkingBlocks.enabled', @@ -122,6 +124,9 @@ function validateConfigTypes(config: Record): ValidationError[] { if (strategies.deduplication?.enabled !== undefined && typeof strategies.deduplication.enabled !== 'boolean') { errors.push({ key: 'strategies.deduplication.enabled', expected: 'boolean', actual: typeof strategies.deduplication.enabled }) } + if (strategies.deduplication?.protectedTools !== undefined && !Array.isArray(strategies.deduplication.protectedTools)) { + errors.push({ key: 'strategies.deduplication.protectedTools', expected: 'string[]', actual: typeof strategies.deduplication.protectedTools }) + } // pruneThinkingBlocks if (strategies.pruneThinkingBlocks?.enabled !== undefined && typeof strategies.pruneThinkingBlocks.enabled !== 'boolean') { @@ -217,7 +222,8 @@ const defaultConfig: PluginConfig = { pruningSummary: 'detailed', strategies: { deduplication: { - enabled: true + enabled: true, + protectedTools: [...DEFAULT_PROTECTED_TOOLS] }, pruneThinkingBlocks: { enabled: true @@ -297,7 +303,9 @@ function createDefaultConfig(): void { "strategies": { // Remove duplicate tool calls (same tool with same arguments) "deduplication": { - "enabled": true + "enabled": true, + // Additional tools to protect from pruning + "protectedTools": [] }, // Remove thinking/reasoning LLM blocks "pruneThinkingBlocks": { @@ -362,7 +370,13 @@ function mergeStrategies( return { deduplication: { - enabled: override.deduplication?.enabled ?? base.deduplication.enabled + enabled: override.deduplication?.enabled ?? base.deduplication.enabled, + protectedTools: [ + ...new Set([ + ...base.deduplication.protectedTools, + ...(override.deduplication?.protectedTools ?? []) + ]) + ] }, pruneThinkingBlocks: { enabled: override.pruneThinkingBlocks?.enabled ?? base.pruneThinkingBlocks.enabled @@ -396,7 +410,10 @@ function deepCloneConfig(config: PluginConfig): PluginConfig { return { ...config, strategies: { - deduplication: { ...config.strategies.deduplication }, + deduplication: { + ...config.strategies.deduplication, + protectedTools: [...config.strategies.deduplication.protectedTools] + }, pruneThinkingBlocks: { ...config.strategies.pruneThinkingBlocks }, onIdle: { ...config.strategies.onIdle, @@ -476,4 +493,4 @@ export function getConfig(ctx: PluginInput): PluginConfig { } return config -} + diff --git a/lib/hooks.ts b/lib/hooks.ts index ee457ed..0f39303 100644 --- a/lib/hooks.ts +++ b/lib/hooks.ts @@ -18,8 +18,5 @@ export function createChatMessageTransformHandler( syncToolCache(state, logger, output.messages); deduplicate(state, logger, config, output.messages) - pruneTool(state, logger, config, output.messages) } } - - diff --git a/lib/state/persistence.ts b/lib/state/persistence.ts index e16f020..f9b1e21 100644 --- a/lib/state/persistence.ts +++ b/lib/state/persistence.ts @@ -8,12 +8,12 @@ import * as fs from "fs/promises"; import { existsSync } from "fs"; import { homedir } from "os"; import { join } from "path"; -import type { SessionState, SessionStats } from "./types" +import type { SessionState, SessionStats, Prune } from "./types" import type { Logger } from "../logger"; export interface PersistedSessionState { sessionName?: string; - prunedIds: string[]; + prune: Prune stats: SessionStats; lastUpdated: string; } @@ -52,7 +52,7 @@ export async function saveSessionState( const state: PersistedSessionState = { sessionName: sessionName, - prunedIds: sessionState.prunedIds, + prune: sessionState.prune, stats: sessionState.stats, lastUpdated: new Date().toISOString(), }; @@ -63,7 +63,6 @@ export async function saveSessionState( logger.info("persist", "Saved session state to disk", { sessionId: sessionState.sessionId.slice(0, 8), - prunedIds: state.prunedIds.length, totalTokensSaved: state.stats.totalTokensSaved, }); } catch (error: any) { @@ -88,7 +87,11 @@ export async function loadSessionState( const content = await fs.readFile(filePath, "utf-8"); const state = JSON.parse(content) as PersistedSessionState; - if (!state || !Array.isArray(state.prunedIds) || !state.stats) { + if (!state || + !state.prune || + !Array.isArray(state.prune.toolIds) || + !state.stats + ) { logger.warn("persist", "Invalid session state file, ignoring", { sessionId: sessionId.slice(0, 8), }); @@ -97,7 +100,6 @@ export async function loadSessionState( logger.info("persist", "Loaded session state from disk", { sessionId: sessionId.slice(0, 8), - prunedIds: state.prunedIds.length, totalTokensSaved: state.stats.totalTokensSaved, }); diff --git a/lib/state/state.ts b/lib/state/state.ts index ad15d1c..fa0368e 100644 --- a/lib/state/state.ts +++ b/lib/state/state.ts @@ -5,7 +5,9 @@ import { loadSessionState } from "./persistence" export function createSessionState(): SessionState { return { sessionId: null, - prunedIds: [], + prune: { + toolIds: [] + }, stats: { totalToolsPruned: 0, totalTokensSaved: 0, @@ -22,7 +24,9 @@ export function createSessionState(): SessionState { export function resetSessionState(state: SessionState): void { state.sessionId = null - state.prunedIds = [] + state.prune = { + toolIds: [] + } state.stats = { totalToolsPruned: 0, totalTokensSaved: 0, @@ -56,7 +60,9 @@ export async function ensureSessionInitialized( } // Populate state with loaded data - state.prunedIds = persisted.prunedIds || [] + state.prune = { + toolIds: persisted.prune.toolIds || [] + } state.stats = { totalToolsPruned: persisted.stats.totalToolsPruned || 0, totalTokensSaved: persisted.stats.totalTokensSaved || 0, diff --git a/lib/state/types.ts b/lib/state/types.ts index e2c4b19..b0b6b69 100644 --- a/lib/state/types.ts +++ b/lib/state/types.ts @@ -14,16 +14,9 @@ export interface ToolParameterEntry { error?: string } -export interface GCStats { - tokensCollected: number - toolsDeduped: number -} - export interface SessionStats { - totalToolsPruned: number - totalTokensSaved: number - totalGCTokens: number - totalGCTools: number + pruneTokenCounter: number + totalPruneTokens: number } export interface Prune { @@ -34,6 +27,5 @@ export interface SessionState { sessionId: string | null prune: Prune stats: SessionStats - gcPending: GCStats toolParameters: Map } diff --git a/lib/pruning-tool.ts b/lib/strategies/pruning-tool.ts similarity index 79% rename from lib/pruning-tool.ts rename to lib/strategies/pruning-tool.ts index c78cd97..80509bd 100644 --- a/lib/pruning-tool.ts +++ b/lib/strategies/pruning-tool.ts @@ -1,14 +1,14 @@ import { tool } from "@opencode-ai/plugin" -import type { SessionState, ToolParameterEntry} from "./state" -import type { PluginConfig } from "./config" -import { findCurrentAgent, buildToolIdList, getPrunedIds } from "./utils" -import { PruneReason, sendUnifiedNotification } from "./ui/notification" -import { formatPruningResultForTool } from "./ui/display-utils" -import { ensureSessionInitialized } from "./state" -import { saveSessionState } from "./state/persistence" -import type { Logger } from "./logger" -import { estimateTokensBatch } from "./tokenizer" -import { loadPrompt } from "./prompt" +import type { SessionState, ToolParameterEntry} from "../state" +import type { PluginConfig } from "../config" +import { findCurrentAgent, buildToolIdList, getPruneToolIds } from "../utils" +import { PruneReason, sendUnifiedNotification } from "../ui/notification" +import { formatPruningResultForTool } from "../ui/display-utils" +import { ensureSessionInitialized } from "../state" +import { saveSessionState } from "../state/persistence" +import type { Logger } from "../logger" +import { estimateTokensBatch } from "../tokenizer" +import { loadPrompt } from "../prompt" /** Tool description loaded from prompts/tool.txt */ const TOOL_DESCRIPTION = loadPrompt("tool") @@ -56,8 +56,8 @@ export function createPruningTool( return "No valid pruning reason found. Use 'completion', 'noise', or 'consolidation' as the first element." } - const numericIds: number[] = args.ids.slice(1).filter((id): id is number => typeof id === "number") - if (numericIds.length === 0) { + const numericToolIds: number[] = args.ids.slice(1).filter((id): id is number => typeof id === "number") + if (numericToolIds.length === 0) { return "No numeric IDs provided. Format: [reason, id1, id2, ...] where reason is 'completion', 'noise', or 'consolidation'." } @@ -67,22 +67,20 @@ export function createPruningTool( const messages = await client.session.messages({ path: { id: sessionId } }) - // const messages = messagesResponse.data || messagesResponse // Need this? const currentAgent: string | undefined = findCurrentAgent(messages) const toolIdList: string[] = buildToolIdList(messages) - const prunedIds: string[] = getPrunedIds(numericIds, toolIdList) - const tokensSaved = await calculateTokensSavedFromMessages(messages, prunedIds) + const pruneToolIds: string[] = getPruneToolIds(numericToolIds, toolIdList) + const tokensSaved = await calculateTokensSavedFromMessages(messages, pruneToolIds) - state.stats.totalTokensSaved += tokensSaved - state.stats.totalToolsPruned += prunedIds.length - state.prunedIds.push(...prunedIds) + state.stats.pruneTokenCounter += tokensSaved + state.prune.toolIds.push(...pruneToolIds) saveSessionState(state, logger) .catch(err => logger.error("prune-tool", "Failed to persist state", { error: err.message })) const toolMetadata = new Map() - for (const id of prunedIds) { + for (const id of pruneToolIds) { const toolParameters = state.toolParameters.get(id) if (toolParameters) { toolMetadata.set(id, toolParameters) @@ -95,20 +93,17 @@ export function createPruningTool( client, logger, config, + state, sessionId, - prunedIds.length, - tokensSaved, - prunedIds, + pruneToolIds, toolMetadata, - null, - state.stats, reason as PruneReason, currentAgent, workingDirectory ) return formatPruningResultForTool( - prunedIds, + pruneToolIds, toolMetadata, workingDirectory ) diff --git a/lib/ui/notification.ts b/lib/ui/notification.ts index 3ae888b..a84d283 100644 --- a/lib/ui/notification.ts +++ b/lib/ui/notification.ts @@ -1,5 +1,5 @@ import type { Logger } from "../logger" -import type { SessionStats, GCStats } from "../state" +import type { SessionState } from "../state" import { formatTokenCount } from "../tokenizer" import { formatPrunedItemsList } from "./display-utils" import { ToolParameterEntry } from "../state" @@ -12,32 +12,15 @@ export const PRUNE_REASON_LABELS: Record = { consolidation: "Consolidation" } -function calculateStats( - tokensSaved: number, - gcPending: GCStats | null, - sessionStats: SessionStats -): { - justNowTokens: number - totalTokens: number -} { - const justNowTokens = tokensSaved + (gcPending?.tokensCollected ?? 0) - - const totalTokens = sessionStats - ? sessionStats.totalTokensSaved + sessionStats.totalGCTokens - : justNowTokens - - return { justNowTokens, totalTokens } -} - function formatStatsHeader( - totalTokens: number, - justNowTokens: number + totalTokensSaved: number, + pruneTokenCounter: number ): string { - const totalTokensStr = `~${formatTokenCount(totalTokens)}` - const justNowTokensStr = `~${formatTokenCount(justNowTokens)}` + const totalTokensSavedStr = `~${formatTokenCount(totalTokensSaved)}` + const pruneTokenCounterStr = `~${formatTokenCount(pruneTokenCounter)}` - const maxTokenLen = Math.max(totalTokensStr.length, justNowTokensStr.length) - const totalTokensPadded = totalTokensStr.padStart(maxTokenLen) + const maxTokenLen = Math.max(pruneTokenCounterStr.length, pruneTokenCounterStr.length) + const totalTokensPadded = totalTokensSavedStr.padStart(maxTokenLen) return [ `▣ DCP | ${totalTokensPadded} saved total`, @@ -45,31 +28,27 @@ function formatStatsHeader( } function buildMinimalMessage( - tokensSaved: number, - gcPending: GCStats | null, - sessionStats: SessionStats, + state: SessionState, reason: PruneReason | undefined ): string { - const { justNowTokens, totalTokens } = calculateStats(tokensSaved, gcPending, sessionStats) const reasonSuffix = reason ? ` [${PRUNE_REASON_LABELS[reason]}]` : '' - return formatStatsHeader(totalTokens, justNowTokens) + reasonSuffix + return formatStatsHeader( + state.stats.totalPruneTokens, + state.stats.pruneTokenCounter + ) + reasonSuffix } function buildDetailedMessage( - tokensSaved: number, - gcPending: GCStats | null, - sessionStats: SessionStats, + state: SessionState, reason: PruneReason | undefined, prunedIds: string[], toolMetadata: Map, workingDirectory?: string ): string { - const { justNowTokens, totalTokens } = calculateStats(tokensSaved, gcPending, sessionStats) - - let message = formatStatsHeader(totalTokens, justNowTokens) + let message = formatStatsHeader(state.stats.totalPruneTokens, state.stats.pruneTokenCounter) if (prunedIds.length > 0) { - const justNowTokensStr = `~${formatTokenCount(justNowTokens)}` + const justNowTokensStr = `~${formatTokenCount(state.stats.pruneTokenCounter)}` const reasonLabel = reason ? ` — ${PRUNE_REASON_LABELS[reason]}` : '' message += `\n\n▣ Pruned tools (${justNowTokensStr})${reasonLabel}` @@ -84,21 +63,16 @@ export async function sendUnifiedNotification( client: any, logger: Logger, config: PluginConfig, + state: SessionState, sessionId: string, - prunedCount: number, - tokensSaved: number, - prunedIds: string[], + pruneToolIds: string[], toolMetadata: Map, - gcPending: GCStats | null, - sessionStats: SessionStats, reason: PruneReason | undefined, agent: string | undefined, workingDirectory: string ): Promise { - const hasPruned = prunedCount > 0 - const hasGcActivity = gcPending && gcPending.toolsDeduped > 0 - - if (!hasPruned && !hasGcActivity) { + const hasPruned = pruneToolIds.length > 0 + if (!hasPruned) { return false } @@ -107,8 +81,8 @@ export async function sendUnifiedNotification( } const message = config.pruningSummary === 'minimal' - ? buildMinimalMessage(tokensSaved, gcPending, sessionStats, reason) - : buildDetailedMessage(tokensSaved, gcPending, sessionStats, reason, prunedIds, toolMetadata, workingDirectory) + ? buildMinimalMessage(state, reason) + : buildDetailedMessage(state, reason, pruneToolIds, toolMetadata, workingDirectory) await sendIgnoredMessage(client, logger, sessionId, message, agent) return true diff --git a/lib/utils.ts b/lib/utils.ts index 7d080ea..5dec9e4 100644 --- a/lib/utils.ts +++ b/lib/utils.ts @@ -42,14 +42,14 @@ export function buildToolIdList(messages: any[]): string[] { } /** - * Prunes numeric IDs to valid tool call IDs based on the provided tool ID list. + * Prunes numeric tool IDs to valid tool call IDs based on the provided tool ID list. */ -export function getPrunedIds(numericIds: number[], toolIdList: string[]): string[] { - const prunedIds: string[] = [] - for (const index of numericIds) { +export function getPruneToolIds(numericToolIds: number[], toolIdList: string[]): string[] { + const pruneToolIds: string[] = [] + for (const index of numericToolIds) { if (!isNaN(index) && index >= 0 && index < toolIdList.length) { - prunedIds.push(toolIdList[index]) + pruneToolIds.push(toolIdList[index]) } } - return prunedIds + return pruneToolIds } From 32d782dbafc04cff4c15945368f34c101a9a142e Mon Sep 17 00:00:00 2001 From: Jorgen Henriksen Date: Fri, 12 Dec 2025 22:53:56 +0100 Subject: [PATCH 10/51] delete claude.md --- CLAUDE.md | 79 ------------------------------------------------------- 1 file changed, 79 deletions(-) delete mode 100644 CLAUDE.md diff --git a/CLAUDE.md b/CLAUDE.md deleted file mode 100644 index 64c26ef..0000000 --- a/CLAUDE.md +++ /dev/null @@ -1,79 +0,0 @@ -# CLAUDE.md - -This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. - -## Build & Development Commands - -```bash -npm run build # Clean, compile TypeScript, copy prompts to dist/ -npm run dev # Run plugin in development mode (opencode plugin dev) -npm run typecheck # Type check without emitting files -npm run test # Run tests (node --import tsx --test tests/*.test.ts) -npm run clean # Remove dist directory - -# Publishing -npm version patch # Bump version before publishing -npm publish # Publish to npm -``` - -## Architecture Overview - -This is an OpenCode plugin that reduces token usage by pruning obsolete tool outputs from conversation history. The plugin intercepts API requests via a global fetch wrapper and replaces pruned tool outputs with placeholder text. - -### Core Flow - -1. **Entry Point** (`index.ts`): Initializes the plugin, creates state, installs the fetch wrapper, and registers hooks -2. **Fetch Wrapper** (`lib/fetch-wrapper/`): Intercepts outgoing LLM API calls to replace pruned tool outputs. Supports multiple API formats: - - OpenAI Chat Completions / Anthropic (`openai-chat.ts`) - - Google/Gemini (`gemini.ts`) - - OpenAI Responses API (`openai-responses.ts`) -3. **Janitor** (`lib/janitor.ts`): Core pruning logic that orchestrates analysis and tracks pruned IDs - -### Pruning Strategies - -Two complementary strategies in `lib/`: -- **Deduplication** (`deduplicator.ts`): Fast, zero-cost removal of duplicate tool calls by matching tool name + parameters -- **AI Analysis**: Uses an LLM to semantically identify obsolete tool outputs (prompts in `lib/prompts/`) - -### State Management - -`lib/state.ts` defines `SessionState` with Maps for: -- `prunedIds`: Session → pruned tool call IDs -- `stats`: Session → token savings statistics -- `toolParameters`: Tool call ID → parameters (for display and deduplication) -- `model`: Session → model info cache -- `googleToolCallMapping`: Session → position-based ID mapping for Google/Gemini - -### Hook System - -`lib/hooks.ts` provides two hooks: -- `event`: Triggers pruning when session goes idle -- `chat.params`: Caches model info and builds Google tool call mappings - -### Model Selection - -`lib/model-selector.ts` handles dynamic model selection with fallback chain: -1. Config-specified model (`dcp.jsonc`) -2. Current session model -3. Provider fallback models (OpenAI → Anthropic → Google → etc.) - -### Configuration - -`lib/config.ts` loads config from: -1. `~/.config/opencode/dcp.jsonc` (global) -2. `.opencode/dcp.jsonc` (project, overrides global) - -Key config options: `enabled`, `debug`, `model`, `strategies.onIdle`, `strategies.onTool`, `protectedTools`, `nudgeFreq` - -### Synthetic Instructions - -`lib/synth-instruction.ts` and `lib/prompt.ts` inject nudge reminders into conversations prompting the AI to call the `context_pruning` tool. - -## Key Implementation Details - -- Session history is never modified; pruning happens only in outgoing API requests -- Tool outputs are replaced with: `[Output removed to save context - information superseded or no longer needed]` -- Protected tools (task, todowrite, todoread, context_pruning) are never pruned -- Google/Gemini requires position-based correlation since native format loses tool call IDs -- When working on this plugin, reference the OpenCode source code to understand the plugin API and hook system -- Debug logs are written to `~/.config/opencode/logs/dcp/` when `debug: true` in config From da8520598ea2f1bdb1718c0aca5467af0affef8f Mon Sep 17 00:00:00 2001 From: Jorgen Henriksen Date: Fri, 12 Dec 2025 22:54:35 +0100 Subject: [PATCH 11/51] remove old log watch script --- watch-logs.sh | 58 --------------------------------------------------- 1 file changed, 58 deletions(-) delete mode 100755 watch-logs.sh diff --git a/watch-logs.sh b/watch-logs.sh deleted file mode 100755 index 2b833f1..0000000 --- a/watch-logs.sh +++ /dev/null @@ -1,58 +0,0 @@ -#!/bin/bash -# Helper script to watch plugin logs in real time - -PLUGIN_DIR="$(cd "$(dirname "$0")" && pwd)" -LOG_DIR="$PLUGIN_DIR/logs" -LOG_FILE="$LOG_DIR/$(date +%Y-%m-%d).log" - -echo "Dynamic Context Pruning - Log Viewer" -echo "====================================" -echo "" - -# Check if debug is enabled -if grep -q "debug: false" "$PLUGIN_DIR/lib/config.ts"; then - echo "⚠️ WARNING: Debug logging is DISABLED" - echo " Edit lib/config.ts and set 'debug: true' to enable logging" - echo "" - exit 1 -fi - -echo "✓ Debug logging is enabled" -echo "" - -# Check if log file exists -if [ ! -f "$LOG_FILE" ]; then - echo "ℹ️ Log file not found: $LOG_FILE" - echo "" - echo " This means OpenCode hasn't been restarted since the plugin was updated." - echo "" - echo " To generate logs:" - echo " 1. Restart OpenCode to reload the plugin" - echo " 2. Logs will be created automatically" - echo "" - echo " Waiting for log file to appear..." - echo " (Press Ctrl+C to cancel)" - echo "" - - # Wait for file to be created - while [ ! -f "$LOG_FILE" ]; do - sleep 2 - done - - echo "✓ Log file created!" - echo "" -fi - -echo "📺 Watching: $LOG_FILE" -echo " Press Ctrl+C to stop" -echo "" -echo "----------------------------------------" -echo "" - -# Show all logs with pretty printing if jq is available -if command -v jq &> /dev/null; then - tail -f "$LOG_FILE" | jq --color-output '.' -else - tail -f "$LOG_FILE" -fi - From 0e7630d8029ef445d1a54fa62b9730ab288bda1a Mon Sep 17 00:00:00 2001 From: Jorgen Henriksen Date: Fri, 12 Dec 2025 23:31:21 +0100 Subject: [PATCH 12/51] deduplicate tool id logic --- index.ts | 4 +- lib/config.ts | 2 +- lib/hooks.ts | 2 + lib/state/persistence.ts | 4 +- lib/strategies/deduplication.ts | 110 +++++++++++++++++- .../{pruning-tool.ts => prune-tool.ts} | 4 +- 6 files changed, 118 insertions(+), 8 deletions(-) rename lib/strategies/{pruning-tool.ts => prune-tool.ts} (98%) diff --git a/index.ts b/index.ts index 4a93d95..b7e96ed 100644 --- a/index.ts +++ b/index.ts @@ -2,7 +2,7 @@ import type { Plugin } from "@opencode-ai/plugin" import { getConfig } from "./lib/config" import { Logger } from "./lib/logger" import { createSessionState } from "./lib/state" -import { createPruningTool } from "./lib/strategies/pruning-tool" +import { createPruneTool } from "./lib/strategies/prune-tool" import { createChatMessageTransformHandler } from "./lib/hooks" const plugin: Plugin = (async (ctx) => { @@ -34,7 +34,7 @@ const plugin: Plugin = (async (ctx) => { config ), tool: config.strategies.pruneTool.enabled ? { - prune: createPruningTool({ + prune: createPruneTool({ client: ctx.client, state, logger, diff --git a/lib/config.ts b/lib/config.ts index 7487e47..6352076 100644 --- a/lib/config.ts +++ b/lib/config.ts @@ -493,4 +493,4 @@ export function getConfig(ctx: PluginInput): PluginConfig { } return config - +} diff --git a/lib/hooks.ts b/lib/hooks.ts index 0f39303..4b80b34 100644 --- a/lib/hooks.ts +++ b/lib/hooks.ts @@ -18,5 +18,7 @@ export function createChatMessageTransformHandler( syncToolCache(state, logger, output.messages); deduplicate(state, logger, config, output.messages) + + prune(state, logger, config, output.messages) } } diff --git a/lib/state/persistence.ts b/lib/state/persistence.ts index f9b1e21..d656bae 100644 --- a/lib/state/persistence.ts +++ b/lib/state/persistence.ts @@ -63,7 +63,7 @@ export async function saveSessionState( logger.info("persist", "Saved session state to disk", { sessionId: sessionState.sessionId.slice(0, 8), - totalTokensSaved: state.stats.totalTokensSaved, + totalTokensSaved: state.stats.totalPruneTokens }); } catch (error: any) { logger.error("persist", "Failed to save session state", { @@ -100,7 +100,7 @@ export async function loadSessionState( logger.info("persist", "Loaded session state from disk", { sessionId: sessionId.slice(0, 8), - totalTokensSaved: state.stats.totalTokensSaved, + totalTokensSaved: state.stats.totalPruneTokens }); return state; diff --git a/lib/strategies/deduplication.ts b/lib/strategies/deduplication.ts index cdcd83b..b362da1 100644 --- a/lib/strategies/deduplication.ts +++ b/lib/strategies/deduplication.ts @@ -2,11 +2,119 @@ import { PluginConfig } from "../config" import { Logger } from "../logger" import type { SessionState, WithParts } from "../state" +/** + * Deduplication strategy - prunes older tool calls that have identical + * tool name and parameters, keeping only the most recent occurrence. + * Modifies the session state in place to add pruned tool call IDs. + */ export const deduplicate = ( state: SessionState, logger: Logger, config: PluginConfig, messages: WithParts[] -) => { +): void => { + if (!config.strategies.deduplication.enabled) { + return + } + // Build list of all tool call IDs from messages (chronological order) + const allToolIds = buildToolIdList(messages) + if (allToolIds.length === 0) { + return + } + + // Filter out IDs already pruned + const alreadyPruned = new Set(state.prune.toolIds) + const unprunedIds = allToolIds.filter(id => !alreadyPruned.has(id)) + + if (unprunedIds.length === 0) { + return + } + + const protectedTools = config.strategies.deduplication.protectedTools + + // Group by signature (tool name + normalized parameters) + const signatureMap = new Map() + + for (const id of unprunedIds) { + const metadata = state.toolParameters.get(id) + if (!metadata) { + logger.warn("deduplication", `Missing metadata for tool call ID: ${id}`) + continue + } + + // Skip protected tools + if (protectedTools.includes(metadata.tool)) { + continue + } + + const signature = createToolSignature(metadata.tool, metadata.parameters) + if (!signatureMap.has(signature)) { + signatureMap.set(signature, []) + } + signatureMap.get(signature)!.push(id) + } + + // Find duplicates - keep only the most recent (last) in each group + const newPruneIds: string[] = [] + + for (const [, ids] of signatureMap.entries()) { + if (ids.length > 1) { + // All except last (most recent) should be pruned + const idsToRemove = ids.slice(0, -1) + newPruneIds.push(...idsToRemove) + } + } + + if (newPruneIds.length > 0) { + state.prune.toolIds.push(...newPruneIds) + logger.debug("dedulication", `Marked ${newPruneIds.length} duplicate tool calls for pruning`) + } +} + +function buildToolIdList(messages: WithParts[]): string[] { + const toolIds: string[] = [] + for (const msg of messages) { + if (msg.parts) { + for (const part of msg.parts) { + if (part.type === 'tool' && part.callID && part.tool) { + toolIds.push(part.callID) + } + } + } + } + return toolIds +} + +function createToolSignature(tool: string, parameters?: any): string { + if (!parameters) { + return tool + } + const normalized = normalizeParameters(parameters) + const sorted = sortObjectKeys(normalized) + return `${tool}::${JSON.stringify(sorted)}` +} + +function normalizeParameters(params: any): any { + if (typeof params !== 'object' || params === null) return params + if (Array.isArray(params)) return params + + const normalized: any = {} + for (const [key, value] of Object.entries(params)) { + if (value !== undefined && value !== null) { + normalized[key] = value + } + } + return normalized +} + +function sortObjectKeys(obj: any): any { + if (typeof obj !== 'object' || obj === null) return obj + if (Array.isArray(obj)) return obj.map(sortObjectKeys) + + const sorted: any = {} + for (const key of Object.keys(obj).sort()) { + sorted[key] = sortObjectKeys(obj[key]) + } + return sorted } diff --git a/lib/strategies/pruning-tool.ts b/lib/strategies/prune-tool.ts similarity index 98% rename from lib/strategies/pruning-tool.ts rename to lib/strategies/prune-tool.ts index 80509bd..e98bf7d 100644 --- a/lib/strategies/pruning-tool.ts +++ b/lib/strategies/prune-tool.ts @@ -1,5 +1,5 @@ import { tool } from "@opencode-ai/plugin" -import type { SessionState, ToolParameterEntry} from "../state" +import type { SessionState, ToolParameterEntry } from "../state" import type { PluginConfig } from "../config" import { findCurrentAgent, buildToolIdList, getPruneToolIds } from "../utils" import { PruneReason, sendUnifiedNotification } from "../ui/notification" @@ -25,7 +25,7 @@ export interface PruneToolContext { * Creates the prune tool definition. * Accepts numeric IDs from the list and prunes those tool outputs. */ -export function createPruningTool( +export function createPruneTool( ctx: PruneToolContext, ): ReturnType { return tool({ From 8c4876138bf754064b0a5f9db94bfe0efd096786 Mon Sep 17 00:00:00 2001 From: Jorgen Henriksen Date: Fri, 12 Dec 2025 23:40:03 +0100 Subject: [PATCH 13/51] refactor --- lib/constants.ts | 1 + lib/hooks.ts | 1 + lib/prune.ts | 21 +++++++++++++++++++++ 3 files changed, 23 insertions(+) create mode 100644 lib/constants.ts create mode 100644 lib/prune.ts diff --git a/lib/constants.ts b/lib/constants.ts new file mode 100644 index 0000000..b73e523 --- /dev/null +++ b/lib/constants.ts @@ -0,0 +1 @@ +export const PRUNED_CONTENT_MESSAGE = '[Output removed to save context - information superseded or no longer needed]' diff --git a/lib/hooks.ts b/lib/hooks.ts index 4b80b34..7475a9f 100644 --- a/lib/hooks.ts +++ b/lib/hooks.ts @@ -3,6 +3,7 @@ import type { Logger } from "./logger" import type { PluginConfig } from "./config" import { syncToolCache } from "./state/tool-cache" import { deduplicate } from "./strategies" +import { prune } from "./prune" export function createChatMessageTransformHandler( diff --git a/lib/prune.ts b/lib/prune.ts new file mode 100644 index 0000000..aeff527 --- /dev/null +++ b/lib/prune.ts @@ -0,0 +1,21 @@ +import type { SessionState, WithParts } from "./state" +import type { Logger } from "./logger" +import type { PluginConfig } from "./config" + +export const prune = ( + state: SessionState, + logger: Logger, + config: PluginConfig, + messages: WithParts[] +): void => { + pruneToolOutputs(state, logger, config, messages) +} + +const pruneToolOutputs = ( + state: SessionState, + logger: Logger, + config: PluginConfig, + messages: WithParts[] +): void => { + +} From 1a2fc6c7d5e45b7ce47608bedebc36e0e74e1db6 Mon Sep 17 00:00:00 2001 From: Jorgen Henriksen Date: Sat, 13 Dec 2025 18:01:40 +0100 Subject: [PATCH 14/51] wip --- lib/constants.ts | 1 - lib/hooks.ts | 4 +- lib/messages/index.ts | 1 + lib/messages/prune.ts | 103 ++++++++++++++++++++++++++++++++ lib/messages/utils.ts | 83 +++++++++++++++++++++++++ lib/prune.ts | 21 ------- lib/state/state.ts | 26 ++------ lib/strategies/deduplication.ts | 2 +- lib/strategies/prune-tool.ts | 5 +- lib/utils.ts | 5 +- 10 files changed, 204 insertions(+), 47 deletions(-) delete mode 100644 lib/constants.ts create mode 100644 lib/messages/index.ts create mode 100644 lib/messages/prune.ts create mode 100644 lib/messages/utils.ts delete mode 100644 lib/prune.ts diff --git a/lib/constants.ts b/lib/constants.ts deleted file mode 100644 index b73e523..0000000 --- a/lib/constants.ts +++ /dev/null @@ -1 +0,0 @@ -export const PRUNED_CONTENT_MESSAGE = '[Output removed to save context - information superseded or no longer needed]' diff --git a/lib/hooks.ts b/lib/hooks.ts index 7475a9f..9c4a80d 100644 --- a/lib/hooks.ts +++ b/lib/hooks.ts @@ -3,7 +3,7 @@ import type { Logger } from "./logger" import type { PluginConfig } from "./config" import { syncToolCache } from "./state/tool-cache" import { deduplicate } from "./strategies" -import { prune } from "./prune" +import { prune, insertPruneToolContext } from "./messages" export function createChatMessageTransformHandler( @@ -21,5 +21,7 @@ export function createChatMessageTransformHandler( deduplicate(state, logger, config, output.messages) prune(state, logger, config, output.messages) + + insertPruneToolContext(state, config, logger, output.messages) } } diff --git a/lib/messages/index.ts b/lib/messages/index.ts new file mode 100644 index 0000000..e854003 --- /dev/null +++ b/lib/messages/index.ts @@ -0,0 +1 @@ +export { prune, insertPruneToolContext } from "./prune" diff --git a/lib/messages/prune.ts b/lib/messages/prune.ts new file mode 100644 index 0000000..7f007ce --- /dev/null +++ b/lib/messages/prune.ts @@ -0,0 +1,103 @@ +import type { SessionState, WithParts } from "../state" +import type { Logger } from "../logger" +import type { PluginConfig } from "../config" +import { buildToolIdList } from "../utils" +import { getLastUserMessage, extractParameterKey } from "./utils" + +const PRUNED_TOOL_OUTPUT_REPLACEMENT = '[Output removed to save context - information superseded or no longer needed]' + +const buildPrunableToolsList = ( + state: SessionState, + config: PluginConfig, + logger: Logger, + messages: WithParts[], +): string => { + const lines: string[] = [] + const toolIdList: string[] = buildToolIdList(messages) + + state.toolParameters.forEach((toolParameterEntry, toolCallId) => { + if (state.prune.toolIds.includes(toolCallId)) { + return + } + if (config.strategies.pruneTool.protectedTools.includes(toolParameterEntry.tool)) { + return + } + const numericId = toolIdList.indexOf(toolCallId) + const paramKey = extractParameterKey(toolParameterEntry.tool, toolParameterEntry.parameters) + const description = paramKey ? `${toolParameterEntry.tool}, ${paramKey}` : toolParameterEntry.tool + lines.push(`${numericId}: ${description}`) + }) + + return `\nThe following tools have been invoked and are available for pruning. This list does not mandate immediate action. Consider your current goals and the resources you need before discarding valuable tool outputs. Keep the context free of noise.\n${lines.join('\n')}\n` +} + +export const insertPruneToolContext = ( + state: SessionState, + config: PluginConfig, + logger: Logger, + messages: WithParts[] +): void => { + const lastUserMessage = getLastUserMessage(messages) + if (!lastUserMessage || lastUserMessage.info.role !== 'user') { + return + } + + const prunableToolsList = buildPrunableToolsList(state, config, logger, messages) + + const userMessage: WithParts = { + info: { + id: "msg_01234567890123456789012345", + sessionID: lastUserMessage.info.sessionID, + role: "user", + time: { created: Date.now() }, + agent: lastUserMessage.info.agent || "build", + model: { + providerID: lastUserMessage.info.model.providerID, + modelID: lastUserMessage.info.model.modelID + } + }, + parts: [ + { + id: "prt_01234567890123456789012345", + sessionID: lastUserMessage.info.sessionID, + messageID: "msg_01234567890123456789012345", + type: "text", + text: prunableToolsList, + } + ] + } + + messages.push(userMessage) +} + +export const prune = ( + state: SessionState, + logger: Logger, + config: PluginConfig, + messages: WithParts[] +): void => { + pruneToolOutputs(state, logger, messages) +} + +const pruneToolOutputs = ( + state: SessionState, + logger: Logger, + messages: WithParts[] +): void => { + for (const msg of messages) { + for (const part of msg.parts) { + if (part.type !== 'tool') { + continue + } + if (!state.prune.toolIds.includes(part.id)) { + continue + } + if (part.state.status === 'completed') { + part.state.output = PRUNED_TOOL_OUTPUT_REPLACEMENT + } + // if (part.state.status === 'error') { + // part.state.error = PRUNED_TOOL_OUTPUT_REPLACEMENT + // } + } + } +} diff --git a/lib/messages/utils.ts b/lib/messages/utils.ts new file mode 100644 index 0000000..47b6d44 --- /dev/null +++ b/lib/messages/utils.ts @@ -0,0 +1,83 @@ +import type { WithParts } from "../state" + +/** + * Extracts a human-readable key from tool metadata for display purposes. + * Used by both deduplication and AI analysis to show what was pruned. + */ +export const extractParameterKey = (tool: string, parameters: any): string => { + if (tool === "read" && parameters.filePath) { + return parameters.filePath + } + if (tool === "write" && parameters.filePath) { + return parameters.filePath + } + if (tool === "edit" && parameters.filePath) { + return parameters.filePath + } + + if (tool === "list") { + return parameters.path || '(current directory)' + } + if (tool === "glob") { + if (parameters.pattern) { + const pathInfo = parameters.path ? ` in ${parameters.path}` : "" + return `"${parameters.pattern}"${pathInfo}` + } + return '(unknown pattern)' + } + if (tool === "grep") { + if (parameters.pattern) { + const pathInfo = parameters.path ? ` in ${parameters.path}` : "" + return `"${parameters.pattern}"${pathInfo}` + } + return '(unknown pattern)' + } + + if (tool === "bash") { + if (parameters.description) return parameters.description + if (parameters.command) { + return parameters.command.length > 50 + ? parameters.command.substring(0, 50) + "..." + : parameters.command + } + } + + if (tool === "webfetch" && parameters.url) { + return parameters.url + } + if (tool === "websearch" && parameters.query) { + return `"${parameters.query}"` + } + if (tool === "codesearch" && parameters.query) { + return `"${parameters.query}"` + } + + if (tool === "todowrite") { + return `${parameters.todos?.length || 0} todos` + } + if (tool === "todoread") { + return "read todo list" + } + + if (tool === "task" && parameters.description) { + return parameters.description + } + + const paramStr = JSON.stringify(parameters) + if (paramStr === '{}' || paramStr === '[]' || paramStr === 'null') { + return '' + } + return paramStr.substring(0, 50) +} + +export const getLastUserMessage = ( + messages: WithParts[] +): WithParts | null => { + for (let i = messages.length - 1; i >= 0; i--) { + const msg = messages[i] + if (msg.info.role === 'user') { + return msg + } + } + return null +} diff --git a/lib/prune.ts b/lib/prune.ts deleted file mode 100644 index aeff527..0000000 --- a/lib/prune.ts +++ /dev/null @@ -1,21 +0,0 @@ -import type { SessionState, WithParts } from "./state" -import type { Logger } from "./logger" -import type { PluginConfig } from "./config" - -export const prune = ( - state: SessionState, - logger: Logger, - config: PluginConfig, - messages: WithParts[] -): void => { - pruneToolOutputs(state, logger, config, messages) -} - -const pruneToolOutputs = ( - state: SessionState, - logger: Logger, - config: PluginConfig, - messages: WithParts[] -): void => { - -} diff --git a/lib/state/state.ts b/lib/state/state.ts index fa0368e..a67d93a 100644 --- a/lib/state/state.ts +++ b/lib/state/state.ts @@ -9,14 +9,8 @@ export function createSessionState(): SessionState { toolIds: [] }, stats: { - totalToolsPruned: 0, - totalTokensSaved: 0, - totalGCTokens: 0, - totalGCTools: 0 - }, - gcPending: { - tokensCollected: 0, - toolsDeduped: 0 + pruneTokenCounter: 0, + totalPruneTokens: 0, }, toolParameters: new Map() } @@ -28,14 +22,8 @@ export function resetSessionState(state: SessionState): void { toolIds: [] } state.stats = { - totalToolsPruned: 0, - totalTokensSaved: 0, - totalGCTokens: 0, - totalGCTools: 0 - } - state.gcPending = { - tokensCollected: 0, - toolsDeduped: 0 + pruneTokenCounter: 0, + totalPruneTokens: 0, } state.toolParameters.clear() } @@ -64,9 +52,7 @@ export async function ensureSessionInitialized( toolIds: persisted.prune.toolIds || [] } state.stats = { - totalToolsPruned: persisted.stats.totalToolsPruned || 0, - totalTokensSaved: persisted.stats.totalTokensSaved || 0, - totalGCTokens: persisted.stats.totalGCTokens || 0, - totalGCTools: persisted.stats.totalGCTools || 0 + pruneTokenCounter: persisted.stats?.pruneTokenCounter || 0, + totalPruneTokens: persisted.stats?.totalPruneTokens || 0, } } diff --git a/lib/strategies/deduplication.ts b/lib/strategies/deduplication.ts index b362da1..841d552 100644 --- a/lib/strategies/deduplication.ts +++ b/lib/strategies/deduplication.ts @@ -68,7 +68,7 @@ export const deduplicate = ( if (newPruneIds.length > 0) { state.prune.toolIds.push(...newPruneIds) - logger.debug("dedulication", `Marked ${newPruneIds.length} duplicate tool calls for pruning`) + logger.debug("deduplication", `Marked ${newPruneIds.length} duplicate tool calls for pruning`) } } diff --git a/lib/strategies/prune-tool.ts b/lib/strategies/prune-tool.ts index e98bf7d..b60d53c 100644 --- a/lib/strategies/prune-tool.ts +++ b/lib/strategies/prune-tool.ts @@ -1,5 +1,5 @@ import { tool } from "@opencode-ai/plugin" -import type { SessionState, ToolParameterEntry } from "../state" +import type { SessionState, ToolParameterEntry, WithParts } from "../state" import type { PluginConfig } from "../config" import { findCurrentAgent, buildToolIdList, getPruneToolIds } from "../utils" import { PruneReason, sendUnifiedNotification } from "../ui/notification" @@ -64,9 +64,10 @@ export function createPruneTool( await ensureSessionInitialized(state, sessionId, logger) // Fetch messages to calculate tokens and find current agent - const messages = await client.session.messages({ + const messagesResponse = await client.session.messages({ path: { id: sessionId } }) + const messages = messagesResponse.data || messagesResponse const currentAgent: string | undefined = findCurrentAgent(messages) const toolIdList: string[] = buildToolIdList(messages) diff --git a/lib/utils.ts b/lib/utils.ts index 5dec9e4..4daec82 100644 --- a/lib/utils.ts +++ b/lib/utils.ts @@ -1,3 +1,6 @@ +import { Logger } from "./logger" +import { WithParts } from "./state" + /** * Checks if a session is a subagent session by looking for a parentID. */ @@ -27,7 +30,7 @@ export function findCurrentAgent(messages: any[]): string | undefined { /** * Builds a list of tool call IDs from messages. */ -export function buildToolIdList(messages: any[]): string[] { +export function buildToolIdList(messages: WithParts[]): string[] { const toolIds: string[] = [] for (const msg of messages) { if (msg.parts) { From 3143b87ce89f252a1efeb70edc221459ab736e91 Mon Sep 17 00:00:00 2001 From: Jorgen Henriksen Date: Sat, 13 Dec 2025 20:11:28 +0100 Subject: [PATCH 15/51] deduplication and prune tool works. MUCH nicer code --- index.ts | 2 +- lib/hooks.ts | 2 ++ lib/logger.ts | 35 +++++++++++++++--- lib/messages/prune.ts | 3 +- lib/model-selector.ts | 6 ++-- lib/state/persistence.ts | 20 +++++------ lib/state/state.ts | 31 +++++++++++++++- lib/state/tool-cache.ts | 7 ++-- lib/strategies/deduplication.ts | 7 ++-- lib/strategies/prune-tool.ts | 63 +++++---------------------------- lib/tokenizer.ts | 15 -------- lib/ui/display-utils.ts | 13 ++++--- lib/ui/notification.ts | 23 +++++------- lib/utils.ts | 58 +++++++++++++++++++++++++++++- 14 files changed, 166 insertions(+), 119 deletions(-) delete mode 100644 lib/tokenizer.ts diff --git a/index.ts b/index.ts index b7e96ed..0b32e31 100644 --- a/index.ts +++ b/index.ts @@ -22,7 +22,7 @@ const plugin: Plugin = (async (ctx) => { const state = createSessionState() // Log initialization - logger.info("plugin", "DCP initialized", { + logger.info("DCP initialized", { strategies: config.strategies, }) diff --git a/lib/hooks.ts b/lib/hooks.ts index 9c4a80d..92abeb2 100644 --- a/lib/hooks.ts +++ b/lib/hooks.ts @@ -4,6 +4,7 @@ import type { PluginConfig } from "./config" import { syncToolCache } from "./state/tool-cache" import { deduplicate } from "./strategies" import { prune, insertPruneToolContext } from "./messages" +import { checkSession } from "./state" export function createChatMessageTransformHandler( @@ -16,6 +17,7 @@ export function createChatMessageTransformHandler( input: {}, output: { messages: WithParts[] } ) => { + checkSession(state, logger, output.messages); syncToolCache(state, logger, output.messages); deduplicate(state, logger, config, output.messages) diff --git a/lib/logger.ts b/lib/logger.ts index d51e888..6ccbfa4 100644 --- a/lib/logger.ts +++ b/lib/logger.ts @@ -45,6 +45,29 @@ export class Logger { return parts.join(" ") } + private getCallerFile(skipFrames: number = 3): string { + const originalPrepareStackTrace = Error.prepareStackTrace + try { + const err = new Error() + Error.prepareStackTrace = (_, stack) => stack + const stack = err.stack as unknown as NodeJS.CallSite[] + Error.prepareStackTrace = originalPrepareStackTrace + + // Skip specified number of frames to get to actual caller + for (let i = skipFrames; i < stack.length; i++) { + const filename = stack[i]?.getFileName() + if (filename && !filename.includes('/logger.')) { + // Extract just the filename without path and extension + const match = filename.match(/([^/\\]+)\.[tj]s$/) + return match ? match[1] : filename + } + } + return 'unknown' + } catch { + return 'unknown' + } + } + private async write(level: string, component: string, message: string, data?: any) { if (!this.enabled) return @@ -67,19 +90,23 @@ export class Logger { } } - info(component: string, message: string, data?: any) { + info(message: string, data?: any) { + const component = this.getCallerFile(2) return this.write("INFO", component, message, data) } - debug(component: string, message: string, data?: any) { + debug(message: string, data?: any) { + const component = this.getCallerFile(2) return this.write("DEBUG", component, message, data) } - warn(component: string, message: string, data?: any) { + warn(message: string, data?: any) { + const component = this.getCallerFile(2) return this.write("WARN", component, message, data) } - error(component: string, message: string, data?: any) { + error(message: string, data?: any) { + const component = this.getCallerFile(2) return this.write("ERROR", component, message, data) } diff --git a/lib/messages/prune.ts b/lib/messages/prune.ts index 7f007ce..e4a7fe9 100644 --- a/lib/messages/prune.ts +++ b/lib/messages/prune.ts @@ -26,6 +26,7 @@ const buildPrunableToolsList = ( const paramKey = extractParameterKey(toolParameterEntry.tool, toolParameterEntry.parameters) const description = paramKey ? `${toolParameterEntry.tool}, ${paramKey}` : toolParameterEntry.tool lines.push(`${numericId}: ${description}`) + logger.debug(`Prunable tool found - ID: ${numericId}, Tool: ${toolParameterEntry.tool}, Call ID: ${toolCallId}`) }) return `\nThe following tools have been invoked and are available for pruning. This list does not mandate immediate action. Consider your current goals and the resources you need before discarding valuable tool outputs. Keep the context free of noise.\n${lines.join('\n')}\n` @@ -89,7 +90,7 @@ const pruneToolOutputs = ( if (part.type !== 'tool') { continue } - if (!state.prune.toolIds.includes(part.id)) { + if (!state.prune.toolIds.includes(part.callID)) { continue } if (part.state.status === 'completed') { diff --git a/lib/model-selector.ts b/lib/model-selector.ts index e0e9895..d1499eb 100644 --- a/lib/model-selector.ts +++ b/lib/model-selector.ts @@ -55,7 +55,7 @@ async function importOpencodeAI(logger?: Logger, maxRetries: number = 3, delayMs lastError = error; if (error.message?.includes('before initialization')) { - logger?.debug('model-selector', `Import attempt ${attempt}/${maxRetries} failed, will retry`, { + logger?.debug(`Import attempt ${attempt}/${maxRetries} failed, will retry`, { error: error.message }); @@ -85,7 +85,7 @@ export async function selectModel( if (configModel) { const parts = configModel.split('/'); if (parts.length !== 2) { - logger?.warn('model-selector', 'Invalid config model format', { configModel }); + logger?.warn('Invalid config model format', { configModel }); } else { const [providerID, modelID] = parts; @@ -98,7 +98,7 @@ export async function selectModel( reason: 'Using model specified in dcp.jsonc config' }; } catch (error: any) { - logger?.warn('model-selector', `Config model failed: ${providerID}/${modelID}`, { + logger?.warn(`Config model failed: ${providerID}/${modelID}`, { error: error.message }); failedModelInfo = { providerID, modelID }; diff --git a/lib/state/persistence.ts b/lib/state/persistence.ts index d656bae..21f0092 100644 --- a/lib/state/persistence.ts +++ b/lib/state/persistence.ts @@ -61,13 +61,13 @@ export async function saveSessionState( const content = JSON.stringify(state, null, 2); await fs.writeFile(filePath, content, "utf-8"); - logger.info("persist", "Saved session state to disk", { - sessionId: sessionState.sessionId.slice(0, 8), + logger.info("Saved session state to disk", { + sessionId: sessionState.sessionId, totalTokensSaved: state.stats.totalPruneTokens }); } catch (error: any) { - logger.error("persist", "Failed to save session state", { - sessionId: sessionState.sessionId?.slice(0, 8), + logger.error("Failed to save session state", { + sessionId: sessionState.sessionId, error: error?.message, }); } @@ -92,21 +92,21 @@ export async function loadSessionState( !Array.isArray(state.prune.toolIds) || !state.stats ) { - logger.warn("persist", "Invalid session state file, ignoring", { - sessionId: sessionId.slice(0, 8), + logger.warn("Invalid session state file, ignoring", { + sessionId: sessionId, }); return null; } - logger.info("persist", "Loaded session state from disk", { - sessionId: sessionId.slice(0, 8), + logger.info("Loaded session state from disk", { + sessionId: sessionId, totalTokensSaved: state.stats.totalPruneTokens }); return state; } catch (error: any) { - logger.warn("persist", "Failed to load session state", { - sessionId: sessionId.slice(0, 8), + logger.warn("Failed to load session state", { + sessionId: sessionId, error: error?.message, }); return null; diff --git a/lib/state/state.ts b/lib/state/state.ts index a67d93a..3e727a0 100644 --- a/lib/state/state.ts +++ b/lib/state/state.ts @@ -1,6 +1,32 @@ -import type { SessionState, ToolParameterEntry } from "./types" +import type { SessionState, ToolParameterEntry, WithParts } from "./types" import type { Logger } from "../logger" import { loadSessionState } from "./persistence" +import { getLastUserMessage } from "../messages/utils" + +export const checkSession = ( + state: SessionState, + logger: Logger, + messages: WithParts[] +) => { + + const lastUserMessage = getLastUserMessage(messages) + if (!lastUserMessage) { + return + } + + const lastSessionId = lastUserMessage.info.sessionID + + if (state.sessionId === null || state.sessionId !== lastSessionId) { + logger.info(`Session changed: ${state.sessionId} -> ${lastSessionId}`) + ensureSessionInitialized( + state, + lastSessionId, + logger + ).catch((err) => { + logger.error("Failed to initialize session state", { error: err.message }) + } ) + } +} export function createSessionState(): SessionState { return { @@ -37,6 +63,9 @@ export async function ensureSessionInitialized( return; } + logger.info("session ID = " + sessionId) + logger.info("Initializing session state", { sessionId: sessionId }) + // Clear previous session data resetSessionState(state) state.sessionId = sessionId diff --git a/lib/state/tool-cache.ts b/lib/state/tool-cache.ts index f75f619..215f9a5 100644 --- a/lib/state/tool-cache.ts +++ b/lib/state/tool-cache.ts @@ -14,11 +14,8 @@ export async function syncToolCache( messages: WithParts[], ): Promise { try { + logger.info("Syncing tool parameters from OpenCode messages") for (const msg of messages) { - if (!msg.parts) { - continue - } - for (const part of msg.parts) { if (part.type !== "tool" || !part.callID || state.toolParameters.has(part.callID)) { continue @@ -38,7 +35,7 @@ export async function syncToolCache( trimToolParametersCache(state) } catch (error) { - logger.warn("tool-cache", "Failed to sync tool parameters from OpenCode", { + logger.warn("Failed to sync tool parameters from OpenCode", { error: error instanceof Error ? error.message : String(error) }) } diff --git a/lib/strategies/deduplication.ts b/lib/strategies/deduplication.ts index 841d552..f58a13a 100644 --- a/lib/strategies/deduplication.ts +++ b/lib/strategies/deduplication.ts @@ -1,6 +1,7 @@ import { PluginConfig } from "../config" import { Logger } from "../logger" import type { SessionState, WithParts } from "../state" +import { calculateTokensSaved } from "../utils" /** * Deduplication strategy - prunes older tool calls that have identical @@ -39,7 +40,7 @@ export const deduplicate = ( for (const id of unprunedIds) { const metadata = state.toolParameters.get(id) if (!metadata) { - logger.warn("deduplication", `Missing metadata for tool call ID: ${id}`) + logger.warn(`Missing metadata for tool call ID: ${id}`) continue } @@ -66,9 +67,11 @@ export const deduplicate = ( } } + state.stats.totalPruneTokens += calculateTokensSaved(messages, newPruneIds) + if (newPruneIds.length > 0) { state.prune.toolIds.push(...newPruneIds) - logger.debug("deduplication", `Marked ${newPruneIds.length} duplicate tool calls for pruning`) + logger.debug(`Marked ${newPruneIds.length} duplicate tool calls for pruning`) } } diff --git a/lib/strategies/prune-tool.ts b/lib/strategies/prune-tool.ts index b60d53c..c48af54 100644 --- a/lib/strategies/prune-tool.ts +++ b/lib/strategies/prune-tool.ts @@ -1,13 +1,12 @@ import { tool } from "@opencode-ai/plugin" import type { SessionState, ToolParameterEntry, WithParts } from "../state" import type { PluginConfig } from "../config" -import { findCurrentAgent, buildToolIdList, getPruneToolIds } from "../utils" +import { findCurrentAgent, buildToolIdList, getPruneToolIds, calculateTokensSaved } from "../utils" import { PruneReason, sendUnifiedNotification } from "../ui/notification" import { formatPruningResultForTool } from "../ui/display-utils" import { ensureSessionInitialized } from "../state" import { saveSessionState } from "../state/persistence" import type { Logger } from "../logger" -import { estimateTokensBatch } from "../tokenizer" import { loadPrompt } from "../prompt" /** Tool description loaded from prompts/tool.txt */ @@ -67,18 +66,15 @@ export function createPruneTool( const messagesResponse = await client.session.messages({ path: { id: sessionId } }) - const messages = messagesResponse.data || messagesResponse + const messages: WithParts[] = messagesResponse.data || messagesResponse const currentAgent: string | undefined = findCurrentAgent(messages) const toolIdList: string[] = buildToolIdList(messages) const pruneToolIds: string[] = getPruneToolIds(numericToolIds, toolIdList) - const tokensSaved = await calculateTokensSavedFromMessages(messages, pruneToolIds) - - state.stats.pruneTokenCounter += tokensSaved state.prune.toolIds.push(...pruneToolIds) saveSessionState(state, logger) - .catch(err => logger.error("prune-tool", "Failed to persist state", { error: err.message })) + .catch(err => logger.error("Failed to persist state", { error: err.message })) const toolMetadata = new Map() for (const id of pruneToolIds) { @@ -86,10 +82,12 @@ export function createPruneTool( if (toolParameters) { toolMetadata.set(id, toolParameters) } else { - logger.debug("prune-tool", "No metadata found for ID", { id }) + logger.debug("No metadata found for ID", { id }) } } + state.stats.pruneTokenCounter += calculateTokensSaved(messages, pruneToolIds) + await sendUnifiedNotification( client, logger, @@ -102,6 +100,8 @@ export function createPruneTool( currentAgent, workingDirectory ) + state.stats.totalPruneTokens += state.stats.pruneTokenCounter + state.stats.pruneTokenCounter = 0 return formatPruningResultForTool( pruneToolIds, @@ -112,50 +112,3 @@ export function createPruneTool( }) } -/** - * Calculates approximate tokens saved by pruning the given tool call IDs. - * Uses pre-fetched messages to avoid duplicate API calls. - */ -async function calculateTokensSavedFromMessages( - messages: any[], - prunedIds: string[] -): Promise { - try { - const toolOutputs = new Map() - for (const msg of messages) { - if (msg.role === 'tool' && msg.tool_call_id) { - const content = typeof msg.content === 'string' - ? msg.content - : JSON.stringify(msg.content) - toolOutputs.set(msg.tool_call_id.toLowerCase(), content) - } - if (msg.role === 'user' && Array.isArray(msg.content)) { - for (const part of msg.content) { - if (part.type === 'tool_result' && part.tool_use_id) { - const content = typeof part.content === 'string' - ? part.content - : JSON.stringify(part.content) - toolOutputs.set(part.tool_use_id.toLowerCase(), content) - } - } - } - } - - const contents: string[] = [] - for (const id of prunedIds) { - const content = toolOutputs.get(id.toLowerCase()) - if (content) { - contents.push(content) - } - } - - if (contents.length === 0) { - return prunedIds.length * 500 - } - - const tokenCounts = await estimateTokensBatch(contents) - return tokenCounts.reduce((sum, count) => sum + count, 0) - } catch (error: any) { - return prunedIds.length * 500 - } -} diff --git a/lib/tokenizer.ts b/lib/tokenizer.ts deleted file mode 100644 index 711a449..0000000 --- a/lib/tokenizer.ts +++ /dev/null @@ -1,15 +0,0 @@ -export async function estimateTokensBatch(texts: string[]): Promise { - try { - const { encode } = await import('gpt-tokenizer') - return texts.map(text => encode(text).length) - } catch { - return texts.map(text => Math.round(text.length / 4)) - } -} - -export function formatTokenCount(tokens: number): string { - if (tokens >= 1000) { - return `${(tokens / 1000).toFixed(1)}K`.replace('.0K', 'K') + ' tokens' - } - return tokens.toString() + ' tokens' -} diff --git a/lib/ui/display-utils.ts b/lib/ui/display-utils.ts index aedea27..2edc44f 100644 --- a/lib/ui/display-utils.ts +++ b/lib/ui/display-utils.ts @@ -108,15 +108,14 @@ function shortenSinglePath(path: string, workingDirectory?: string): string { * Formats a list of pruned items in the style: "→ tool: parameter" */ export function formatPrunedItemsList( - prunedIds: string[], + pruneToolIds: string[], toolMetadata: Map, workingDirectory?: string ): string[] { const lines: string[] = [] - for (const prunedId of prunedIds) { - const normalizedId = prunedId.toLowerCase() - const metadata = toolMetadata.get(normalizedId) + for (const id of pruneToolIds) { + const metadata = toolMetadata.get(id) if (metadata) { const paramKey = extractParameterKey(metadata) @@ -130,10 +129,10 @@ export function formatPrunedItemsList( } } - const knownCount = prunedIds.filter(id => - toolMetadata.has(id.toLowerCase()) + const knownCount = pruneToolIds.filter(id => + toolMetadata.has(id) ).length - const unknownCount = prunedIds.length - knownCount + const unknownCount = pruneToolIds.length - knownCount if (unknownCount > 0) { lines.push(`→ (${unknownCount} tool${unknownCount > 1 ? 's' : ''} with unknown metadata)`) diff --git a/lib/ui/notification.ts b/lib/ui/notification.ts index a84d283..06b370e 100644 --- a/lib/ui/notification.ts +++ b/lib/ui/notification.ts @@ -1,6 +1,6 @@ import type { Logger } from "../logger" import type { SessionState } from "../state" -import { formatTokenCount } from "../tokenizer" +import { formatTokenCount } from "../utils" import { formatPrunedItemsList } from "./display-utils" import { ToolParameterEntry } from "../state" import { PluginConfig } from "../config" @@ -16,14 +16,9 @@ function formatStatsHeader( totalTokensSaved: number, pruneTokenCounter: number ): string { - const totalTokensSavedStr = `~${formatTokenCount(totalTokensSaved)}` - const pruneTokenCounterStr = `~${formatTokenCount(pruneTokenCounter)}` - - const maxTokenLen = Math.max(pruneTokenCounterStr.length, pruneTokenCounterStr.length) - const totalTokensPadded = totalTokensSavedStr.padStart(maxTokenLen) - + const totalTokensSavedStr = `~${formatTokenCount(totalTokensSaved + pruneTokenCounter)}` return [ - `▣ DCP | ${totalTokensPadded} saved total`, + `▣ DCP | ${totalTokensSavedStr} saved total`, ].join('\n') } @@ -41,18 +36,18 @@ function buildMinimalMessage( function buildDetailedMessage( state: SessionState, reason: PruneReason | undefined, - prunedIds: string[], + pruneToolIds: string[], toolMetadata: Map, workingDirectory?: string ): string { let message = formatStatsHeader(state.stats.totalPruneTokens, state.stats.pruneTokenCounter) - if (prunedIds.length > 0) { - const justNowTokensStr = `~${formatTokenCount(state.stats.pruneTokenCounter)}` + if (pruneToolIds.length > 0) { + const pruneTokenCounterStr = `~${formatTokenCount(state.stats.pruneTokenCounter)}` const reasonLabel = reason ? ` — ${PRUNE_REASON_LABELS[reason]}` : '' - message += `\n\n▣ Pruned tools (${justNowTokensStr})${reasonLabel}` + message += `\n\n▣ Pruned tools (${pruneTokenCounterStr})${reasonLabel}` - const itemLines = formatPrunedItemsList(prunedIds, toolMetadata, workingDirectory) + const itemLines = formatPrunedItemsList(pruneToolIds, toolMetadata, workingDirectory) message += '\n' + itemLines.join('\n') } @@ -109,7 +104,7 @@ export async function sendIgnoredMessage( } }) } catch (error: any) { - logger.error("notification", "Failed to send notification", { error: error.message }) + logger.error("Failed to send notification", { error: error.message }) } } diff --git a/lib/utils.ts b/lib/utils.ts index 4daec82..844edfb 100644 --- a/lib/utils.ts +++ b/lib/utils.ts @@ -1,5 +1,61 @@ -import { Logger } from "./logger" import { WithParts } from "./state" +import { encode } from 'gpt-tokenizer' + +/** + * Estimates token counts for a batch of texts using gpt-tokenizer. + * TODO: ensure we aren't falling back to catch branch + */ +function estimateTokensBatch(texts: string[]): number[] { + try { + return texts.map(text => encode(text).length) + } catch { + return texts.map(text => Math.round(text.length / 4)) + } +} + +/** + * Calculates approximate tokens saved by pruning the given tool call IDs. + * Uses pre-fetched messages to avoid duplicate API calls. + * TODO: Make it count message content that are not tool outputs. Currently it ONLY covers tool outputs and errors + */ +export const calculateTokensSaved = ( + messages: WithParts[], + pruneToolIds: string[] +): number => { + try { + const contents: string[] = [] + for (const msg of messages) { + for (const part of msg.parts) { + if (part.type !== 'tool' || !pruneToolIds.includes(part.callID)) { + continue + } + if (part.state.status === "completed") { + const content = typeof part.state.output === 'string' + ? part.state.output + : JSON.stringify(part.state.output) + contents.push(content) + } + if (part.state.status === "error") { + const content = typeof part.state.error === 'string' + ? part.state.error + : JSON.stringify(part.state.error) + contents.push(content) + } + } + } + const tokenCounts: number[] = estimateTokensBatch(contents) + return tokenCounts.reduce((sum, count) => sum + count, 0) + } catch (error: any) { + return 0 + } +} + +export function formatTokenCount(tokens: number): string { + if (tokens >= 1000) { + return `${(tokens / 1000).toFixed(1)}K`.replace('.0K', 'K') + ' tokens' + } + return tokens.toString() + ' tokens' +} /** * Checks if a session is a subagent session by looking for a parentID. From 7730dc43f1b430d49803e098b7d00098211eced7 Mon Sep 17 00:00:00 2001 From: Jorgen Henriksen Date: Sat, 13 Dec 2025 20:43:51 +0100 Subject: [PATCH 16/51] readme --- README.md | 83 ++++++++++++++++++++++++++++++++++++------------------- 1 file changed, 55 insertions(+), 28 deletions(-) diff --git a/README.md b/README.md index 6992da2..99fba49 100644 --- a/README.md +++ b/README.md @@ -23,17 +23,15 @@ Restart OpenCode. The plugin will automatically start optimizing your sessions. ## How Pruning Works -DCP uses two complementary techniques: +DCP uses multiple strategies to reduce context size: -**Automatic Deduplication** — Silently identifies repeated tool calls (e.g., reading the same file multiple times) and keeps only the most recent output. Runs on every request with zero LLM cost. +**Deduplication** — Identifies repeated tool calls (e.g., reading the same file multiple times) and keeps only the most recent output. Runs automatically on every request with zero LLM cost. -**AI Analysis** — Uses a language model to semantically analyze conversation context and identify tool outputs that are no longer relevant to the current task. +**Prune Thinking Blocks** — Removes LLM thinking/reasoning blocks from the conversation history. -## Context Pruning Tool +**On Idle Analysis** — Uses a language model to semantically analyze conversation context during idle periods and identify tool outputs that are no longer relevant. -When `strategies.onTool` is enabled, DCP exposes a `prune` tool to Opencode that the AI can call to trigger pruning on demand. - -Adjust `nudgeFreq` to control how aggressively the AI is prompted to prune — lower values trigger reminders sooner and more often. +**Prune Tool** — Exposes a `prune` tool that the AI can call to manually trigger pruning when it determines context cleanup is needed. ## How It Works @@ -49,35 +47,64 @@ LLM providers like Anthropic and OpenAI cache prompts based on exact prefix matc DCP uses its own config file (`~/.config/opencode/dcp.jsonc` or `.opencode/dcp.jsonc`), created automatically on first run. -### Options - -| Option | Default | Description | -|--------|---------|-------------| -| `enabled` | `true` | Enable/disable the plugin | -| `debug` | `false` | Log to `~/.config/opencode/logs/dcp/` | -| `model` | (session) | Model for analysis (e.g., `"anthropic/claude-haiku-4-5"`) | -| `showModelErrorToasts` | `true` | Show notifications on model fallback | -| `showUpdateToasts` | `true` | Show notifications when a new version is available | -| `strictModelSelection` | `false` | Only run AI analysis with session or configured model (disables fallback models) | -| `pruningSummary` | `"detailed"` | `"off"`, `"minimal"`, or `"detailed"` | -| `nudgeFreq` | `10` | How often to remind AI to prune (lower = more frequent) | -| `protectedTools` | `["task", "todowrite", "todoread", "prune", "batch", "write", "edit"]` | Tools that are never pruned | -| `strategies.onIdle` | `["ai-analysis"]` | Strategies for automatic pruning | -| `strategies.onTool` | `["ai-analysis"]` | Strategies when AI calls `prune` | - -**Strategies:** `"ai-analysis"` uses LLM to identify prunable outputs. Empty array disables that trigger. Deduplication runs automatically on every request. +
+Default Configuration (click to expand) ```jsonc { + // Enable or disable the plugin "enabled": true, + // Enable debug logging to ~/.config/opencode/logs/dcp/ + "debug": false, + // Show toast notifications when a new version is available + "showUpdateToasts": true, + // Summary display: "off", "minimal", or "detailed" + "pruningSummary": "detailed", + // Strategies for pruning tokens from chat history "strategies": { - "onIdle": ["ai-analysis"], - "onTool": ["ai-analysis"] - }, - "protectedTools": ["task", "todowrite", "todoread", "prune", "batch", "write", "edit"] + // Remove duplicate tool calls (same tool with same arguments) + "deduplication": { + "enabled": true, + // Additional tools to protect from pruning + "protectedTools": [] + }, + // Remove thinking/reasoning LLM blocks + "pruneThinkingBlocks": { + "enabled": true + }, + // Run an LLM to analyze what tool calls are no longer relevant on idle + "onIdle": { + "enabled": true, + // Override model for analysis (format: "provider/model") + // "model": "anthropic/claude-haiku-4-5", + // Show toast notifications when model selection fails + "showModelErrorToasts": true, + // When true, fallback models are not permitted + "strictModelSelection": false, + // Additional tools to protect from pruning + "protectedTools": [] + }, + // Exposes a prune tool to your LLM to call when it determines pruning is necessary + "pruneTool": { + "enabled": false, + // Additional tools to protect from pruning + "protectedTools": [], + // How often to nudge the AI to prune (every N tool results, 0 = disabled) + "nudgeFrequency": 10 + } + } } ``` +
+ +### Protected Tools + +By default, these tools are always protected from pruning across all strategies: +`task`, `todowrite`, `todoread`, `prune`, `batch`, `write`, `edit` + +The `protectedTools` arrays in each strategy add to this default list. + ### Config Precedence Settings are merged in order: **Defaults** → **Global** (`~/.config/opencode/dcp.jsonc`) → **Project** (`.opencode/dcp.jsonc`). Each level overrides the previous, so project settings take priority over global, which takes priority over defaults. From 0914d82ef23cf418f8009261473a4ca6ac0ce52d Mon Sep 17 00:00:00 2001 From: Jorgen Henriksen Date: Sat, 13 Dec 2025 20:45:44 +0100 Subject: [PATCH 17/51] reamde --- README.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/README.md b/README.md index 99fba49..56eaee3 100644 --- a/README.md +++ b/README.md @@ -33,8 +33,6 @@ DCP uses multiple strategies to reduce context size: **Prune Tool** — Exposes a `prune` tool that the AI can call to manually trigger pruning when it determines context cleanup is needed. -## How It Works - Your session history is never modified. DCP replaces pruned outputs with a placeholder before sending requests to your LLM. ## Impact on Prompt Caching From 7c900345c817910340f68dac7029d5410dd10bf2 Mon Sep 17 00:00:00 2001 From: Jorgen Henriksen Date: Sat, 13 Dec 2025 21:02:51 +0100 Subject: [PATCH 18/51] sync --- lib/hooks.ts | 3 ++- lib/messages/prune.ts | 10 +++++++++- lib/state/state.ts | 4 +++- lib/state/tool-cache.ts | 6 ++++++ lib/state/types.ts | 1 + lib/strategies/prune-tool.ts | 1 + 6 files changed, 22 insertions(+), 3 deletions(-) diff --git a/lib/hooks.ts b/lib/hooks.ts index 92abeb2..b3dc9da 100644 --- a/lib/hooks.ts +++ b/lib/hooks.ts @@ -18,7 +18,8 @@ export function createChatMessageTransformHandler( output: { messages: WithParts[] } ) => { checkSession(state, logger, output.messages); - syncToolCache(state, logger, output.messages); + syncToolCache(state, config, logger, output.messages); + deduplicate(state, logger, config, output.messages) diff --git a/lib/messages/prune.ts b/lib/messages/prune.ts index e4a7fe9..34a78d3 100644 --- a/lib/messages/prune.ts +++ b/lib/messages/prune.ts @@ -3,8 +3,10 @@ import type { Logger } from "../logger" import type { PluginConfig } from "../config" import { buildToolIdList } from "../utils" import { getLastUserMessage, extractParameterKey } from "./utils" +import { loadPrompt } from "../prompt" const PRUNED_TOOL_OUTPUT_REPLACEMENT = '[Output removed to save context - information superseded or no longer needed]' +const NUDGE_STRING = loadPrompt("nudge") const buildPrunableToolsList = ( state: SessionState, @@ -45,6 +47,12 @@ export const insertPruneToolContext = ( const prunableToolsList = buildPrunableToolsList(state, config, logger, messages) + let nudgeString = "" + if (state.nudgeCounter >= config.strategies.pruneTool.nudgeFrequency) { + logger.info("Inserting prune nudge message") + nudgeString = "\n" + NUDGE_STRING + } + const userMessage: WithParts = { info: { id: "msg_01234567890123456789012345", @@ -63,7 +71,7 @@ export const insertPruneToolContext = ( sessionID: lastUserMessage.info.sessionID, messageID: "msg_01234567890123456789012345", type: "text", - text: prunableToolsList, + text: prunableToolsList + nudgeString, } ] } diff --git a/lib/state/state.ts b/lib/state/state.ts index 3e727a0..6682d6f 100644 --- a/lib/state/state.ts +++ b/lib/state/state.ts @@ -38,7 +38,8 @@ export function createSessionState(): SessionState { pruneTokenCounter: 0, totalPruneTokens: 0, }, - toolParameters: new Map() + toolParameters: new Map(), + nudgeCounter: 0 } } @@ -52,6 +53,7 @@ export function resetSessionState(state: SessionState): void { totalPruneTokens: 0, } state.toolParameters.clear() + state.nudgeCounter = 0 } export async function ensureSessionInitialized( diff --git a/lib/state/tool-cache.ts b/lib/state/tool-cache.ts index 215f9a5..d86005f 100644 --- a/lib/state/tool-cache.ts +++ b/lib/state/tool-cache.ts @@ -1,5 +1,6 @@ import type { SessionState, ToolStatus, WithParts } from "./index" import type { Logger } from "../logger" +import { PluginConfig } from "../config" const MAX_TOOL_CACHE_SIZE = 500 @@ -10,6 +11,7 @@ const MAX_TOOL_CACHE_SIZE = 500 */ export async function syncToolCache( state: SessionState, + config: PluginConfig, logger: Logger, messages: WithParts[], ): Promise { @@ -30,6 +32,10 @@ export async function syncToolCache( error: part.state.status === "error" ? part.state.error : undefined, } ) + + if (!config.strategies.pruneTool.protectedTools.includes(part.tool)) { + state.nudgeCounter++ + } } } diff --git a/lib/state/types.ts b/lib/state/types.ts index b0b6b69..750ca38 100644 --- a/lib/state/types.ts +++ b/lib/state/types.ts @@ -28,4 +28,5 @@ export interface SessionState { prune: Prune stats: SessionStats toolParameters: Map + nudgeCounter: number } diff --git a/lib/strategies/prune-tool.ts b/lib/strategies/prune-tool.ts index c48af54..08e680c 100644 --- a/lib/strategies/prune-tool.ts +++ b/lib/strategies/prune-tool.ts @@ -102,6 +102,7 @@ export function createPruneTool( ) state.stats.totalPruneTokens += state.stats.pruneTokenCounter state.stats.pruneTokenCounter = 0 + state.nudgeCounter = 0 return formatPruningResultForTool( pruneToolIds, From 8227d64143d8e52e0aa40e4696b619583126220b Mon Sep 17 00:00:00 2001 From: Jorgen Henriksen Date: Sat, 13 Dec 2025 21:13:02 +0100 Subject: [PATCH 19/51] ai nudge implementation --- README.md | 7 +++++-- lib/config.ts | 40 +++++++++++++++++++++++++++++++--------- lib/messages/prune.ts | 2 +- 3 files changed, 37 insertions(+), 12 deletions(-) diff --git a/README.md b/README.md index 56eaee3..c2d63bb 100644 --- a/README.md +++ b/README.md @@ -87,8 +87,11 @@ DCP uses its own config file (`~/.config/opencode/dcp.jsonc` or `.opencode/dcp.j "enabled": false, // Additional tools to protect from pruning "protectedTools": [], - // How often to nudge the AI to prune (every N tool results, 0 = disabled) - "nudgeFrequency": 10 + // Nudge the LLM to use the prune tool (every tool results) + "nudge": { + "enabled": true, + "frequency": 10 + } } } } diff --git a/lib/config.ts b/lib/config.ts index 6352076..0a05235 100644 --- a/lib/config.ts +++ b/lib/config.ts @@ -21,10 +21,15 @@ export interface OnIdle { protectedTools: string[] } +export interface PruneToolNudge { + enabled: boolean + frequency: number +} + export interface PruneTool { enabled: boolean protectedTools: string[] - nudgeFrequency: number + nudge: PruneToolNudge } export interface PluginConfig { @@ -68,7 +73,9 @@ export const VALID_CONFIG_KEYS = new Set([ 'strategies.pruneTool', 'strategies.pruneTool.enabled', 'strategies.pruneTool.protectedTools', - 'strategies.pruneTool.nudgeFrequency', + 'strategies.pruneTool.nudge', + 'strategies.pruneTool.nudge.enabled', + 'strategies.pruneTool.nudge.frequency', ]) // Extract all key paths from a config object for validation @@ -160,8 +167,13 @@ function validateConfigTypes(config: Record): ValidationError[] { if (strategies.pruneTool.protectedTools !== undefined && !Array.isArray(strategies.pruneTool.protectedTools)) { errors.push({ key: 'strategies.pruneTool.protectedTools', expected: 'string[]', actual: typeof strategies.pruneTool.protectedTools }) } - if (strategies.pruneTool.nudgeFrequency !== undefined && typeof strategies.pruneTool.nudgeFrequency !== 'number') { - errors.push({ key: 'strategies.pruneTool.nudgeFrequency', expected: 'number', actual: typeof strategies.pruneTool.nudgeFrequency }) + if (strategies.pruneTool.nudge) { + if (strategies.pruneTool.nudge.enabled !== undefined && typeof strategies.pruneTool.nudge.enabled !== 'boolean') { + errors.push({ key: 'strategies.pruneTool.nudge.enabled', expected: 'boolean', actual: typeof strategies.pruneTool.nudge.enabled }) + } + if (strategies.pruneTool.nudge.frequency !== undefined && typeof strategies.pruneTool.nudge.frequency !== 'number') { + errors.push({ key: 'strategies.pruneTool.nudge.frequency', expected: 'number', actual: typeof strategies.pruneTool.nudge.frequency }) + } } } } @@ -237,7 +249,10 @@ const defaultConfig: PluginConfig = { pruneTool: { enabled: false, protectedTools: [...DEFAULT_PROTECTED_TOOLS], - nudgeFrequency: 10 + nudge: { + enabled: true, + frequency: 10 + } } } } @@ -328,8 +343,11 @@ function createDefaultConfig(): void { "enabled": false, // Additional tools to protect from pruning "protectedTools": [], - // How often to nudge the AI to prune (every N tool results, 0 = disabled) - "nudgeFrequency": 10 + // Nudge the LLM to use the prune tool (every tool results) + "nudge": { + "enabled": true, + "frequency": 10 + } } } } @@ -401,7 +419,10 @@ function mergeStrategies( ...(override.pruneTool?.protectedTools ?? []) ]) ], - nudgeFrequency: override.pruneTool?.nudgeFrequency ?? base.pruneTool.nudgeFrequency + nudge: { + enabled: override.pruneTool?.nudge?.enabled ?? base.pruneTool.nudge.enabled, + frequency: override.pruneTool?.nudge?.frequency ?? base.pruneTool.nudge.frequency + } } } } @@ -421,7 +442,8 @@ function deepCloneConfig(config: PluginConfig): PluginConfig { }, pruneTool: { ...config.strategies.pruneTool, - protectedTools: [...config.strategies.pruneTool.protectedTools] + protectedTools: [...config.strategies.pruneTool.protectedTools], + nudge: { ...config.strategies.pruneTool.nudge } } } } diff --git a/lib/messages/prune.ts b/lib/messages/prune.ts index 34a78d3..cca70d1 100644 --- a/lib/messages/prune.ts +++ b/lib/messages/prune.ts @@ -48,7 +48,7 @@ export const insertPruneToolContext = ( const prunableToolsList = buildPrunableToolsList(state, config, logger, messages) let nudgeString = "" - if (state.nudgeCounter >= config.strategies.pruneTool.nudgeFrequency) { + if (config.strategies.pruneTool.nudge.enabled && state.nudgeCounter >= config.strategies.pruneTool.nudge.frequency) { logger.info("Inserting prune nudge message") nudgeString = "\n" + NUDGE_STRING } From 75539fde9876fd5365213bef043d7fd530f6a3a9 Mon Sep 17 00:00:00 2001 From: Jorgen Henriksen Date: Sat, 13 Dec 2025 21:16:46 +0100 Subject: [PATCH 20/51] set onIdle and pruneThinkingBlock strategies to disabled by default --- README.md | 4 ++-- lib/config.ts | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index c2d63bb..cef186c 100644 --- a/README.md +++ b/README.md @@ -68,11 +68,11 @@ DCP uses its own config file (`~/.config/opencode/dcp.jsonc` or `.opencode/dcp.j }, // Remove thinking/reasoning LLM blocks "pruneThinkingBlocks": { - "enabled": true + "enabled": false }, // Run an LLM to analyze what tool calls are no longer relevant on idle "onIdle": { - "enabled": true, + "enabled": false, // Override model for analysis (format: "provider/model") // "model": "anthropic/claude-haiku-4-5", // Show toast notifications when model selection fails diff --git a/lib/config.ts b/lib/config.ts index 0a05235..b6c20a2 100644 --- a/lib/config.ts +++ b/lib/config.ts @@ -238,10 +238,10 @@ const defaultConfig: PluginConfig = { protectedTools: [...DEFAULT_PROTECTED_TOOLS] }, pruneThinkingBlocks: { - enabled: true + enabled: false }, onIdle: { - enabled: true, + enabled: false, showModelErrorToasts: true, strictModelSelection: false, protectedTools: [...DEFAULT_PROTECTED_TOOLS] @@ -324,11 +324,11 @@ function createDefaultConfig(): void { }, // Remove thinking/reasoning LLM blocks "pruneThinkingBlocks": { - "enabled": true + "enabled": false }, // Run an LLM to analyze what tool calls are no longer relevant on idle "onIdle": { - "enabled": true, + "enabled": false, // Override model for analysis (format: "provider/model") // "model": "anthropic/claude-haiku-4-5", // Show toast notifications when model selection fails From 5b9493da97d394384ee45d759bd29dbbc2ee875a Mon Sep 17 00:00:00 2001 From: Jorgen Henriksen Date: Sat, 13 Dec 2025 21:22:13 +0100 Subject: [PATCH 21/51] say onIdle is legacy --- README.md | 2 +- lib/config.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index cef186c..965f5f2 100644 --- a/README.md +++ b/README.md @@ -70,7 +70,7 @@ DCP uses its own config file (`~/.config/opencode/dcp.jsonc` or `.opencode/dcp.j "pruneThinkingBlocks": { "enabled": false }, - // Run an LLM to analyze what tool calls are no longer relevant on idle + // (Legacy) Run an LLM to analyze what tool calls are no longer relevant on idle "onIdle": { "enabled": false, // Override model for analysis (format: "provider/model") diff --git a/lib/config.ts b/lib/config.ts index b6c20a2..3dfdba2 100644 --- a/lib/config.ts +++ b/lib/config.ts @@ -326,7 +326,7 @@ function createDefaultConfig(): void { "pruneThinkingBlocks": { "enabled": false }, - // Run an LLM to analyze what tool calls are no longer relevant on idle + // (Legacy) Run an LLM to analyze what tool calls are no longer relevant on idle "onIdle": { "enabled": false, // Override model for analysis (format: "provider/model") From 07fa3c4784c6636db3a04d8530832a17d1f8a2a7 Mon Sep 17 00:00:00 2001 From: Jorgen Henriksen Date: Sat, 13 Dec 2025 21:55:17 +0100 Subject: [PATCH 22/51] workflow fix --- .github/workflows/pr-checks.yml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/.github/workflows/pr-checks.yml b/.github/workflows/pr-checks.yml index f9c7804..e665cfc 100644 --- a/.github/workflows/pr-checks.yml +++ b/.github/workflows/pr-checks.yml @@ -2,9 +2,7 @@ name: PR Checks on: pull_request: - branches: [main, master] - push: - branches: [main, master] + branches: [master, dev] jobs: validate: From 7c702454b1c7078725ad060c4fe8a70ae43f8244 Mon Sep 17 00:00:00 2001 From: Jorgen Henriksen Date: Sat, 13 Dec 2025 22:24:06 +0100 Subject: [PATCH 23/51] disallow subagents to use plugin --- .claude/settings.local.json | 15 --------------- index.ts | 12 ++++++++++++ lib/hooks.ts | 8 ++++++-- lib/state/state.ts | 10 ++++++++++ lib/state/types.ts | 1 + lib/strategies/deduplication.ts | 1 + lib/strategies/on-idle.ts | 0 lib/strategies/prune-tool.ts | 2 +- lib/utils.ts | 2 +- 9 files changed, 32 insertions(+), 19 deletions(-) delete mode 100644 .claude/settings.local.json create mode 100644 lib/strategies/on-idle.ts diff --git a/.claude/settings.local.json b/.claude/settings.local.json deleted file mode 100644 index 6fccb8f..0000000 --- a/.claude/settings.local.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "permissions": { - "allow": [ - "Bash(cat:*)", - "Bash(for f in ~/.local/share/opencode/storage/part/*/*)", - "Bash(do grep -l \"\"type\"\":\"\"reasoning\"\" $f)", - "Bash(done)", - "WebSearch", - "WebFetch(domain:ai-sdk.dev)", - "Bash(npm run typecheck:*)" - ], - "deny": [], - "ask": [] - } -} diff --git a/index.ts b/index.ts index 0b32e31..f30b041 100644 --- a/index.ts +++ b/index.ts @@ -42,6 +42,18 @@ const plugin: Plugin = (async (ctx) => { workingDirectory: ctx.directory }), } : undefined, + config: async (opencodeConfig) => { + // Add prune to primary_tools by mutating the opencode config + // This works because config is cached and passed by reference + if (config.strategies.pruneTool.enabled) { + const existingPrimaryTools = opencodeConfig.experimental?.primary_tools ?? [] + opencodeConfig.experimental = { + ...opencodeConfig.experimental, + primary_tools: [...existingPrimaryTools, "prune"], + } + logger.info("Added 'prune' to experimental.primary_tools via config mutation") + } + }, } }) satisfies Plugin diff --git a/lib/hooks.ts b/lib/hooks.ts index b3dc9da..88acd88 100644 --- a/lib/hooks.ts +++ b/lib/hooks.ts @@ -17,11 +17,15 @@ export function createChatMessageTransformHandler( input: {}, output: { messages: WithParts[] } ) => { - checkSession(state, logger, output.messages); + checkSession(client, state, logger, output.messages); + if (state.isSubAgent) { + return + } + syncToolCache(state, config, logger, output.messages); - deduplicate(state, logger, config, output.messages) + deduplicate(client, state, logger, config, output.messages) prune(state, logger, config, output.messages) diff --git a/lib/state/state.ts b/lib/state/state.ts index 6682d6f..8edbf52 100644 --- a/lib/state/state.ts +++ b/lib/state/state.ts @@ -2,8 +2,10 @@ import type { SessionState, ToolParameterEntry, WithParts } from "./types" import type { Logger } from "../logger" import { loadSessionState } from "./persistence" import { getLastUserMessage } from "../messages/utils" +import { isSubAgentSession } from "../utils" export const checkSession = ( + client: any, state: SessionState, logger: Logger, messages: WithParts[] @@ -19,6 +21,7 @@ export const checkSession = ( if (state.sessionId === null || state.sessionId !== lastSessionId) { logger.info(`Session changed: ${state.sessionId} -> ${lastSessionId}`) ensureSessionInitialized( + client, state, lastSessionId, logger @@ -31,6 +34,7 @@ export const checkSession = ( export function createSessionState(): SessionState { return { sessionId: null, + isSubAgent: false, prune: { toolIds: [] }, @@ -45,6 +49,7 @@ export function createSessionState(): SessionState { export function resetSessionState(state: SessionState): void { state.sessionId = null + state.isSubAgent = false state.prune = { toolIds: [] } @@ -57,6 +62,7 @@ export function resetSessionState(state: SessionState): void { } export async function ensureSessionInitialized( + client: any, state: SessionState, sessionId: string, logger: Logger @@ -72,6 +78,10 @@ export async function ensureSessionInitialized( resetSessionState(state) state.sessionId = sessionId + const isSubAgent = await isSubAgentSession(client, sessionId) + state.isSubAgent = isSubAgent + logger.info("isSubAgent = " + isSubAgent) + // Load session data from storage const persisted = await loadSessionState(sessionId, logger) if (persisted === null) { diff --git a/lib/state/types.ts b/lib/state/types.ts index 750ca38..f7353e0 100644 --- a/lib/state/types.ts +++ b/lib/state/types.ts @@ -25,6 +25,7 @@ export interface Prune { export interface SessionState { sessionId: string | null + isSubAgent: boolean prune: Prune stats: SessionStats toolParameters: Map diff --git a/lib/strategies/deduplication.ts b/lib/strategies/deduplication.ts index f58a13a..f0887c2 100644 --- a/lib/strategies/deduplication.ts +++ b/lib/strategies/deduplication.ts @@ -9,6 +9,7 @@ import { calculateTokensSaved } from "../utils" * Modifies the session state in place to add pruned tool call IDs. */ export const deduplicate = ( + client: any, state: SessionState, logger: Logger, config: PluginConfig, diff --git a/lib/strategies/on-idle.ts b/lib/strategies/on-idle.ts new file mode 100644 index 0000000..e69de29 diff --git a/lib/strategies/prune-tool.ts b/lib/strategies/prune-tool.ts index 08e680c..83d4ff5 100644 --- a/lib/strategies/prune-tool.ts +++ b/lib/strategies/prune-tool.ts @@ -60,7 +60,7 @@ export function createPruneTool( return "No numeric IDs provided. Format: [reason, id1, id2, ...] where reason is 'completion', 'noise', or 'consolidation'." } - await ensureSessionInitialized(state, sessionId, logger) + await ensureSessionInitialized(ctx.client, state, sessionId, logger) // Fetch messages to calculate tokens and find current agent const messagesResponse = await client.session.messages({ diff --git a/lib/utils.ts b/lib/utils.ts index 844edfb..471729c 100644 --- a/lib/utils.ts +++ b/lib/utils.ts @@ -60,7 +60,7 @@ export function formatTokenCount(tokens: number): string { /** * Checks if a session is a subagent session by looking for a parentID. */ -export async function isSubagentSession(client: any, sessionID: string): Promise { +export async function isSubAgentSession(client: any, sessionID: string): Promise { try { const result = await client.session.get({ path: { id: sessionID } }) return !!result.data?.parentID From 2d4ae040a44e3a217ff590f9c548be47c844ffd2 Mon Sep 17 00:00:00 2001 From: Jorgen Henriksen Date: Sat, 13 Dec 2025 22:26:23 +0100 Subject: [PATCH 24/51] .gitignore --- .gitignore | 1 - 1 file changed, 1 deletion(-) diff --git a/.gitignore b/.gitignore index 6fcd193..c4c6365 100644 --- a/.gitignore +++ b/.gitignore @@ -27,7 +27,6 @@ Thumbs.db # OpenCode .opencode/ -AGENTS.md # Tests (local development only) tests/ From bd48b4ecd4cdbca2491e4bbfc7e2e70b3ac1bd88 Mon Sep 17 00:00:00 2001 From: Jorgen Henriksen Date: Sat, 13 Dec 2025 22:28:14 +0100 Subject: [PATCH 25/51] cleanup --- lib/hooks.ts | 3 +-- lib/strategies/deduplication.ts | 1 - 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/lib/hooks.ts b/lib/hooks.ts index 88acd88..c3895df 100644 --- a/lib/hooks.ts +++ b/lib/hooks.ts @@ -24,8 +24,7 @@ export function createChatMessageTransformHandler( syncToolCache(state, config, logger, output.messages); - - deduplicate(client, state, logger, config, output.messages) + deduplicate(state, logger, config, output.messages) prune(state, logger, config, output.messages) diff --git a/lib/strategies/deduplication.ts b/lib/strategies/deduplication.ts index f0887c2..f58a13a 100644 --- a/lib/strategies/deduplication.ts +++ b/lib/strategies/deduplication.ts @@ -9,7 +9,6 @@ import { calculateTokensSaved } from "../utils" * Modifies the session state in place to add pruned tool call IDs. */ export const deduplicate = ( - client: any, state: SessionState, logger: Logger, config: PluginConfig, From ebf81518bda7630d3511e1be8d66434bd7504168 Mon Sep 17 00:00:00 2001 From: Jorgen Henriksen Date: Sat, 13 Dec 2025 22:54:06 +0100 Subject: [PATCH 26/51] aids --- index.ts | 3 ++- lib/hooks.ts | 21 +++++++++++++++++++++ lib/state/state.ts | 4 +++- lib/state/tool-cache.ts | 4 ++++ lib/state/types.ts | 1 + 5 files changed, 31 insertions(+), 2 deletions(-) diff --git a/index.ts b/index.ts index f30b041..dde0431 100644 --- a/index.ts +++ b/index.ts @@ -3,7 +3,7 @@ import { getConfig } from "./lib/config" import { Logger } from "./lib/logger" import { createSessionState } from "./lib/state" import { createPruneTool } from "./lib/strategies/prune-tool" -import { createChatMessageTransformHandler } from "./lib/hooks" +import { createChatMessageTransformHandler, createEventHandler } from "./lib/hooks" const plugin: Plugin = (async (ctx) => { const config = getConfig(ctx) @@ -54,6 +54,7 @@ const plugin: Plugin = (async (ctx) => { logger.info("Added 'prune' to experimental.primary_tools via config mutation") } }, + event: createEventHandler(ctx.client, config, state, logger), } }) satisfies Plugin diff --git a/lib/hooks.ts b/lib/hooks.ts index c3895df..9e94f44 100644 --- a/lib/hooks.ts +++ b/lib/hooks.ts @@ -31,3 +31,24 @@ export function createChatMessageTransformHandler( insertPruneToolContext(state, config, logger, output.messages) } } + +export function createEventHandler( + client: any, + config: PluginConfig, + state: SessionState, + logger: Logger +) { + return async ( + { event }: { event: any } + ) => { + if (state.sessionId === null || state.isSubAgent) { + return + } + + if (event.type === "session.status" && event.properties.status.type === "idle") { + if (!config.strategies.onIdle.enabled) { + return + } + } + } +} diff --git a/lib/state/state.ts b/lib/state/state.ts index 8edbf52..fc69883 100644 --- a/lib/state/state.ts +++ b/lib/state/state.ts @@ -43,7 +43,8 @@ export function createSessionState(): SessionState { totalPruneTokens: 0, }, toolParameters: new Map(), - nudgeCounter: 0 + nudgeCounter: 0, + lastToolPrune: false } } @@ -59,6 +60,7 @@ export function resetSessionState(state: SessionState): void { } state.toolParameters.clear() state.nudgeCounter = 0 + state.lastToolPrune = false } export async function ensureSessionInitialized( diff --git a/lib/state/tool-cache.ts b/lib/state/tool-cache.ts index d86005f..854aaaa 100644 --- a/lib/state/tool-cache.ts +++ b/lib/state/tool-cache.ts @@ -17,6 +17,7 @@ export async function syncToolCache( ): Promise { try { logger.info("Syncing tool parameters from OpenCode messages") + for (const msg of messages) { for (const part of msg.parts) { if (part.type !== "tool" || !part.callID || state.toolParameters.has(part.callID)) { @@ -36,6 +37,9 @@ export async function syncToolCache( if (!config.strategies.pruneTool.protectedTools.includes(part.tool)) { state.nudgeCounter++ } + + state.lastToolPrune = part.tool === "prune" + logger.info("lastToolPrune=" + String(state.lastToolPrune)) } } diff --git a/lib/state/types.ts b/lib/state/types.ts index f7353e0..89fc8e7 100644 --- a/lib/state/types.ts +++ b/lib/state/types.ts @@ -30,4 +30,5 @@ export interface SessionState { stats: SessionStats toolParameters: Map nudgeCounter: number + lastToolPrune: boolean } From 9d64c42d3645465b2b2c9f7f2075ec1ff45a81bd Mon Sep 17 00:00:00 2001 From: Jorgen Henriksen Date: Sat, 13 Dec 2025 23:28:35 +0100 Subject: [PATCH 27/51] on-idle re-implemented --- index.ts | 4 +- lib/hooks.ts | 20 ++- lib/messages/prune.ts | 6 +- lib/strategies/index.ts | 3 +- lib/strategies/on-idle.ts | 317 ++++++++++++++++++++++++++++++++++++++ 5 files changed, 345 insertions(+), 5 deletions(-) diff --git a/index.ts b/index.ts index dde0431..9a34d60 100644 --- a/index.ts +++ b/index.ts @@ -2,7 +2,7 @@ import type { Plugin } from "@opencode-ai/plugin" import { getConfig } from "./lib/config" import { Logger } from "./lib/logger" import { createSessionState } from "./lib/state" -import { createPruneTool } from "./lib/strategies/prune-tool" +import { createPruneTool } from "./lib/strategies" import { createChatMessageTransformHandler, createEventHandler } from "./lib/hooks" const plugin: Plugin = (async (ctx) => { @@ -54,7 +54,7 @@ const plugin: Plugin = (async (ctx) => { logger.info("Added 'prune' to experimental.primary_tools via config mutation") } }, - event: createEventHandler(ctx.client, config, state, logger), + event: createEventHandler(ctx.client, config, state, logger, ctx.directory), } }) satisfies Plugin diff --git a/lib/hooks.ts b/lib/hooks.ts index 9e94f44..a48183e 100644 --- a/lib/hooks.ts +++ b/lib/hooks.ts @@ -5,6 +5,7 @@ import { syncToolCache } from "./state/tool-cache" import { deduplicate } from "./strategies" import { prune, insertPruneToolContext } from "./messages" import { checkSession } from "./state" +import { runOnIdle } from "./strategies/on-idle" export function createChatMessageTransformHandler( @@ -36,7 +37,8 @@ export function createEventHandler( client: any, config: PluginConfig, state: SessionState, - logger: Logger + logger: Logger, + workingDirectory?: string ) { return async ( { event }: { event: any } @@ -49,6 +51,22 @@ export function createEventHandler( if (!config.strategies.onIdle.enabled) { return } + if (state.lastToolPrune) { + logger.info("Skipping OnIdle pruning - last tool was prune") + return + } + + try { + await runOnIdle( + client, + state, + logger, + config, + workingDirectory + ) + } catch (err: any) { + logger.error("OnIdle pruning failed", { error: err.message }) + } } } } diff --git a/lib/messages/prune.ts b/lib/messages/prune.ts index cca70d1..cb0ba18 100644 --- a/lib/messages/prune.ts +++ b/lib/messages/prune.ts @@ -40,6 +40,10 @@ export const insertPruneToolContext = ( logger: Logger, messages: WithParts[] ): void => { + if (!config.strategies.pruneTool.enabled) { + return + } + const lastUserMessage = getLastUserMessage(messages) if (!lastUserMessage || lastUserMessage.info.role !== 'user') { return @@ -48,7 +52,7 @@ export const insertPruneToolContext = ( const prunableToolsList = buildPrunableToolsList(state, config, logger, messages) let nudgeString = "" - if (config.strategies.pruneTool.nudge.enabled && state.nudgeCounter >= config.strategies.pruneTool.nudge.frequency) { + if (state.nudgeCounter >= config.strategies.pruneTool.nudge.frequency) { logger.info("Inserting prune nudge message") nudgeString = "\n" + NUDGE_STRING } diff --git a/lib/strategies/index.ts b/lib/strategies/index.ts index 0bd83ff..105d9c8 100644 --- a/lib/strategies/index.ts +++ b/lib/strategies/index.ts @@ -1,2 +1,3 @@ export { deduplicate } from "./deduplication" - +export { runOnIdle } from "./on-idle" +export { createPruneTool } from "./prune-tool" diff --git a/lib/strategies/on-idle.ts b/lib/strategies/on-idle.ts index e69de29..a0f07d0 100644 --- a/lib/strategies/on-idle.ts +++ b/lib/strategies/on-idle.ts @@ -0,0 +1,317 @@ +import { z } from "zod" +import type { SessionState, WithParts, ToolParameterEntry } from "../state" +import type { Logger } from "../logger" +import type { PluginConfig } from "../config" +import { buildAnalysisPrompt } from "../prompt" +import { selectModel, extractModelFromSession, ModelInfo } from "../model-selector" +import { calculateTokensSaved, findCurrentAgent } from "../utils" +import { saveSessionState } from "../state/persistence" +import { sendUnifiedNotification } from "../ui/notification" + +export interface OnIdleResult { + prunedCount: number + tokensSaved: number + prunedIds: string[] +} + +/** + * Parse messages to extract tool information. + */ +function parseMessages( + messages: WithParts[], + toolParametersCache: Map +): { + toolCallIds: string[] + toolMetadata: Map +} { + const toolCallIds: string[] = [] + const toolMetadata = new Map() + + for (const msg of messages) { + if (msg.parts) { + for (const part of msg.parts) { + if (part.type === "tool" && part.callID) { + toolCallIds.push(part.callID) + + const cachedData = toolParametersCache.get(part.callID) + const parameters = cachedData?.parameters ?? part.state?.input ?? {} + + toolMetadata.set(part.callID, { + tool: part.tool, + parameters: parameters, + status: part.state?.status, + error: part.state?.status === "error" ? part.state.error : undefined + }) + } + } + } + } + + return { toolCallIds, toolMetadata } +} + +/** + * Replace pruned tool outputs in messages for LLM analysis. + */ +function replacePrunedToolOutputs(messages: WithParts[], prunedIds: string[]): WithParts[] { + if (prunedIds.length === 0) return messages + + const prunedIdsSet = new Set(prunedIds) + + return messages.map(msg => { + if (!msg.parts) return msg + + return { + ...msg, + parts: msg.parts.map((part: any) => { + if (part.type === 'tool' && + part.callID && + prunedIdsSet.has(part.callID) && + part.state?.output) { + return { + ...part, + state: { + ...part.state, + output: '[Output removed to save context - information superseded or no longer needed]' + } + } + } + return part + }) + } + }) as WithParts[] +} + +/** + * Run LLM analysis to determine which tool calls can be pruned. + */ +async function runLlmAnalysis( + client: any, + state: SessionState, + logger: Logger, + config: PluginConfig, + messages: WithParts[], + unprunedToolCallIds: string[], + alreadyPrunedIds: string[], + toolMetadata: Map, + workingDirectory?: string +): Promise { + const protectedToolCallIds: string[] = [] + const prunableToolCallIds = unprunedToolCallIds.filter(id => { + const metadata = toolMetadata.get(id) + if (metadata && config.strategies.onIdle.protectedTools.includes(metadata.tool)) { + protectedToolCallIds.push(id) + return false + } + return true + }) + + if (prunableToolCallIds.length === 0) { + return [] + } + + // Get model info from messages + let validModelInfo: ModelInfo | undefined = undefined + if (messages.length > 0) { + const lastMessage = messages[messages.length - 1] + const model = (lastMessage.info as any)?.model + if (model?.providerID && model?.modelID) { + validModelInfo = { + providerID: model.providerID, + modelID: model.modelID + } + } + } + + const modelSelection = await selectModel( + validModelInfo, + logger, + config.strategies.onIdle.model, + workingDirectory + ) + + logger.info(`OnIdle Model: ${modelSelection.modelInfo.providerID}/${modelSelection.modelInfo.modelID}`, { + source: modelSelection.source + }) + + if (modelSelection.failedModel && config.strategies.onIdle.showModelErrorToasts) { + const skipAi = modelSelection.source === 'fallback' && config.strategies.onIdle.strictModelSelection + try { + await client.tui.showToast({ + body: { + title: skipAi ? "DCP: AI analysis skipped" : "DCP: Model fallback", + message: skipAi + ? `${modelSelection.failedModel.providerID}/${modelSelection.failedModel.modelID} failed\nAI analysis skipped (strictModelSelection enabled)` + : `${modelSelection.failedModel.providerID}/${modelSelection.failedModel.modelID} failed\nUsing ${modelSelection.modelInfo.providerID}/${modelSelection.modelInfo.modelID}`, + variant: "info", + duration: 5000 + } + }) + } catch { + // Ignore toast errors + } + } + + if (modelSelection.source === 'fallback' && config.strategies.onIdle.strictModelSelection) { + logger.info("Skipping AI analysis (fallback model, strictModelSelection enabled)") + return [] + } + + const { generateObject } = await import('ai') + + const sanitizedMessages = replacePrunedToolOutputs(messages, alreadyPrunedIds) + + const analysisPrompt = buildAnalysisPrompt( + prunableToolCallIds, + sanitizedMessages, + alreadyPrunedIds, + protectedToolCallIds + ) + + const result = await generateObject({ + model: modelSelection.model, + schema: z.object({ + pruned_tool_call_ids: z.array(z.string()), + reasoning: z.string(), + }), + prompt: analysisPrompt + }) + + const rawLlmPrunedIds = result.object.pruned_tool_call_ids + const llmPrunedIds = rawLlmPrunedIds.filter(id => + prunableToolCallIds.includes(id) + ) + + // Always log LLM output as debug + const reasoning = result.object.reasoning.replace(/\n+/g, ' ').replace(/\s+/g, ' ').trim() + logger.debug(`OnIdle LLM output`, { + pruned_tool_call_ids: rawLlmPrunedIds, + reasoning: reasoning + }) + + return llmPrunedIds +} + +/** + * Run the onIdle pruning strategy. + * This is called when the session transitions to idle state. + */ +export async function runOnIdle( + client: any, + state: SessionState, + logger: Logger, + config: PluginConfig, + workingDirectory?: string +): Promise { + try { + if (!state.sessionId) { + return null + } + + const sessionId = state.sessionId + + // Fetch session info and messages + const [sessionInfoResponse, messagesResponse] = await Promise.all([ + client.session.get({ path: { id: sessionId } }), + client.session.messages({ path: { id: sessionId }}) + ]) + + const sessionInfo = sessionInfoResponse.data + const messages: WithParts[] = messagesResponse.data || messagesResponse + + if (!messages || messages.length < 3) { + return null + } + + const currentAgent = findCurrentAgent(messages) + const { toolCallIds, toolMetadata } = parseMessages(messages, state.toolParameters) + + const alreadyPrunedIds = state.prune.toolIds + const unprunedToolCallIds = toolCallIds.filter(id => !alreadyPrunedIds.includes(id)) + + if (unprunedToolCallIds.length === 0) { + return null + } + + // Count prunable tools (excluding protected) + const candidateCount = unprunedToolCallIds.filter(id => { + const metadata = toolMetadata.get(id) + return !metadata || !config.strategies.onIdle.protectedTools.includes(metadata.tool) + }).length + + if (candidateCount === 0) { + return null + } + + // Run LLM analysis + const llmPrunedIds = await runLlmAnalysis( + client, + state, + logger, + config, + messages, + unprunedToolCallIds, + alreadyPrunedIds, + toolMetadata, + workingDirectory + ) + + const newlyPrunedIds = llmPrunedIds.filter(id => !alreadyPrunedIds.includes(id)) + + if (newlyPrunedIds.length === 0) { + return null + } + + // Log the tool IDs being pruned with their tool names + for (const id of newlyPrunedIds) { + const metadata = toolMetadata.get(id) + const toolName = metadata?.tool || 'unknown' + logger.info(`OnIdle pruning tool: ${toolName}`, { callID: id }) + } + + // Update state + const allPrunedIds = [...new Set([...alreadyPrunedIds, ...newlyPrunedIds])] + state.prune.toolIds = allPrunedIds + + state.stats.pruneTokenCounter += calculateTokensSaved(messages, newlyPrunedIds) + + // Build tool metadata map for notification + const prunedToolMetadata = new Map() + for (const id of newlyPrunedIds) { + const metadata = toolMetadata.get(id) + if (metadata) { + prunedToolMetadata.set(id, metadata) + } + } + + // Send notification + await sendUnifiedNotification( + client, + logger, + config, + state, + sessionId, + newlyPrunedIds, + prunedToolMetadata, + undefined, // reason + currentAgent, + workingDirectory || "" + ) + + state.stats.totalPruneTokens += state.stats.pruneTokenCounter + state.stats.pruneTokenCounter = 0 + state.nudgeCounter = 0 + state.lastToolPrune = true + + // Persist state + const sessionName = sessionInfo?.title + saveSessionState(state, logger, sessionName).catch(err => { + logger.error("Failed to persist state", { error: err.message }) + }) + + logger.info(`OnIdle: Pruned ${newlyPrunedIds.length}/${candidateCount} tools`) + } catch (error: any) { + logger.error("OnIdle analysis failed", { error: error.message }) + return null + } +} From cc6cc68ec35eecc9c1518479646636bca30743c2 Mon Sep 17 00:00:00 2001 From: Daniel Smolsky Date: Sat, 13 Dec 2025 17:49:00 -0500 Subject: [PATCH 28/51] refactor: use shared buildToolIdList from utils --- lib/strategies/deduplication.ts | 16 +--------------- 1 file changed, 1 insertion(+), 15 deletions(-) diff --git a/lib/strategies/deduplication.ts b/lib/strategies/deduplication.ts index f58a13a..50b30fa 100644 --- a/lib/strategies/deduplication.ts +++ b/lib/strategies/deduplication.ts @@ -1,7 +1,7 @@ import { PluginConfig } from "../config" import { Logger } from "../logger" import type { SessionState, WithParts } from "../state" -import { calculateTokensSaved } from "../utils" +import { buildToolIdList, calculateTokensSaved } from "../utils" /** * Deduplication strategy - prunes older tool calls that have identical @@ -75,20 +75,6 @@ export const deduplicate = ( } } -function buildToolIdList(messages: WithParts[]): string[] { - const toolIds: string[] = [] - for (const msg of messages) { - if (msg.parts) { - for (const part of msg.parts) { - if (part.type === 'tool' && part.callID && part.tool) { - toolIds.push(part.callID) - } - } - } - } - return toolIds -} - function createToolSignature(tool: string, parameters?: any): string { if (!parameters) { return tool From 8a951016e9fee27270b8ec657c3fd093328836ec Mon Sep 17 00:00:00 2001 From: spoons-and-mirrors <212802214+spoons-and-mirrors@users.noreply.github.com> Date: Sun, 14 Dec 2025 00:01:50 +0100 Subject: [PATCH 29/51] Prevents compacted tools from appearing in the prunable tools list --- lib/messages/prune.ts | 3 +++ lib/state/tool-cache.ts | 1 + lib/state/types.ts | 1 + 3 files changed, 5 insertions(+) diff --git a/lib/messages/prune.ts b/lib/messages/prune.ts index cb0ba18..fc97aee 100644 --- a/lib/messages/prune.ts +++ b/lib/messages/prune.ts @@ -24,6 +24,9 @@ const buildPrunableToolsList = ( if (config.strategies.pruneTool.protectedTools.includes(toolParameterEntry.tool)) { return } + if (toolParameterEntry.compacted) { + return + } const numericId = toolIdList.indexOf(toolCallId) const paramKey = extractParameterKey(toolParameterEntry.tool, toolParameterEntry.parameters) const description = paramKey ? `${toolParameterEntry.tool}, ${paramKey}` : toolParameterEntry.tool diff --git a/lib/state/tool-cache.ts b/lib/state/tool-cache.ts index 854aaaa..3325367 100644 --- a/lib/state/tool-cache.ts +++ b/lib/state/tool-cache.ts @@ -31,6 +31,7 @@ export async function syncToolCache( parameters: part.state?.input ?? {}, status: part.state.status as ToolStatus | undefined, error: part.state.status === "error" ? part.state.error : undefined, + compacted: part.state.status === "completed" && !!part.state.time.compacted, } ) diff --git a/lib/state/types.ts b/lib/state/types.ts index 89fc8e7..e1b92a7 100644 --- a/lib/state/types.ts +++ b/lib/state/types.ts @@ -12,6 +12,7 @@ export interface ToolParameterEntry { parameters: any status?: ToolStatus error?: string + compacted?: boolean } export interface SessionStats { From c8a5bbce00d223cc2d0d7895acce676a64feb2d5 Mon Sep 17 00:00:00 2001 From: Daniel Smolsky Date: Sat, 13 Dec 2025 18:16:52 -0500 Subject: [PATCH 30/51] refactor: consolidate duplicate extractParameterKey function --- lib/messages/utils.ts | 2 ++ lib/ui/display-utils.ts | 77 ++--------------------------------------- 2 files changed, 4 insertions(+), 75 deletions(-) diff --git a/lib/messages/utils.ts b/lib/messages/utils.ts index 47b6d44..1a9dbf6 100644 --- a/lib/messages/utils.ts +++ b/lib/messages/utils.ts @@ -5,6 +5,8 @@ import type { WithParts } from "../state" * Used by both deduplication and AI analysis to show what was pruned. */ export const extractParameterKey = (tool: string, parameters: any): string => { + if (!parameters) return '' + if (tool === "read" && parameters.filePath) { return parameters.filePath } diff --git a/lib/ui/display-utils.ts b/lib/ui/display-utils.ts index 2edc44f..deb23a3 100644 --- a/lib/ui/display-utils.ts +++ b/lib/ui/display-utils.ts @@ -1,78 +1,5 @@ import { ToolParameterEntry } from "../state" - -/** - * Extracts a human-readable key from tool metadata for display purposes. - * Used by both deduplication and AI analysis to show what was pruned. - */ -export function extractParameterKey(metadata: { tool: string, parameters?: any }): string { - if (!metadata.parameters) return '' - - const { tool, parameters } = metadata - - if (tool === "read" && parameters.filePath) { - return parameters.filePath - } - if (tool === "write" && parameters.filePath) { - return parameters.filePath - } - if (tool === "edit" && parameters.filePath) { - return parameters.filePath - } - - if (tool === "list") { - return parameters.path || '(current directory)' - } - if (tool === "glob") { - if (parameters.pattern) { - const pathInfo = parameters.path ? ` in ${parameters.path}` : "" - return `"${parameters.pattern}"${pathInfo}` - } - return '(unknown pattern)' - } - if (tool === "grep") { - if (parameters.pattern) { - const pathInfo = parameters.path ? ` in ${parameters.path}` : "" - return `"${parameters.pattern}"${pathInfo}` - } - return '(unknown pattern)' - } - - if (tool === "bash") { - if (parameters.description) return parameters.description - if (parameters.command) { - return parameters.command.length > 50 - ? parameters.command.substring(0, 50) + "..." - : parameters.command - } - } - - if (tool === "webfetch" && parameters.url) { - return parameters.url - } - if (tool === "websearch" && parameters.query) { - return `"${parameters.query}"` - } - if (tool === "codesearch" && parameters.query) { - return `"${parameters.query}"` - } - - if (tool === "todowrite") { - return `${parameters.todos?.length || 0} todos` - } - if (tool === "todoread") { - return "read todo list" - } - - if (tool === "task" && parameters.description) { - return parameters.description - } - - const paramStr = JSON.stringify(parameters) - if (paramStr === '{}' || paramStr === '[]' || paramStr === 'null') { - return '' - } - return paramStr.substring(0, 50) -} +import { extractParameterKey } from "../messages/utils" export function truncate(str: string, maxLen: number = 60): string { if (str.length <= maxLen) return str @@ -118,7 +45,7 @@ export function formatPrunedItemsList( const metadata = toolMetadata.get(id) if (metadata) { - const paramKey = extractParameterKey(metadata) + const paramKey = extractParameterKey(metadata.tool, metadata.parameters) if (paramKey) { // Use 60 char limit to match notification style const displayKey = truncate(shortenPath(paramKey, workingDirectory), 60) From 583d3431564a08da84c3cab297676ad9414ae82b Mon Sep 17 00:00:00 2001 From: Daniel Smolsky Date: Sat, 13 Dec 2025 18:21:25 -0500 Subject: [PATCH 31/51] enable pruneTool by default and reorder above onIdle --- README.md | 22 +++++++++++----------- lib/config.ts | 36 ++++++++++++++++++------------------ 2 files changed, 29 insertions(+), 29 deletions(-) diff --git a/README.md b/README.md index 965f5f2..a39d818 100644 --- a/README.md +++ b/README.md @@ -70,6 +70,17 @@ DCP uses its own config file (`~/.config/opencode/dcp.jsonc` or `.opencode/dcp.j "pruneThinkingBlocks": { "enabled": false }, + // Exposes a prune tool to your LLM to call when it determines pruning is necessary + "pruneTool": { + "enabled": true, + // Additional tools to protect from pruning + "protectedTools": [], + // Nudge the LLM to use the prune tool (every tool results) + "nudge": { + "enabled": true, + "frequency": 10 + } + }, // (Legacy) Run an LLM to analyze what tool calls are no longer relevant on idle "onIdle": { "enabled": false, @@ -81,17 +92,6 @@ DCP uses its own config file (`~/.config/opencode/dcp.jsonc` or `.opencode/dcp.j "strictModelSelection": false, // Additional tools to protect from pruning "protectedTools": [] - }, - // Exposes a prune tool to your LLM to call when it determines pruning is necessary - "pruneTool": { - "enabled": false, - // Additional tools to protect from pruning - "protectedTools": [], - // Nudge the LLM to use the prune tool (every tool results) - "nudge": { - "enabled": true, - "frequency": 10 - } } } } diff --git a/lib/config.ts b/lib/config.ts index 3dfdba2..83be6c9 100644 --- a/lib/config.ts +++ b/lib/config.ts @@ -240,19 +240,19 @@ const defaultConfig: PluginConfig = { pruneThinkingBlocks: { enabled: false }, - onIdle: { - enabled: false, - showModelErrorToasts: true, - strictModelSelection: false, - protectedTools: [...DEFAULT_PROTECTED_TOOLS] - }, pruneTool: { - enabled: false, + enabled: true, protectedTools: [...DEFAULT_PROTECTED_TOOLS], nudge: { enabled: true, frequency: 10 } + }, + onIdle: { + enabled: false, + showModelErrorToasts: true, + strictModelSelection: false, + protectedTools: [...DEFAULT_PROTECTED_TOOLS] } } } @@ -326,6 +326,17 @@ function createDefaultConfig(): void { "pruneThinkingBlocks": { "enabled": false }, + // Exposes a prune tool to your LLM to call when it determines pruning is necessary + "pruneTool": { + "enabled": true, + // Additional tools to protect from pruning + "protectedTools": [], + // Nudge the LLM to use the prune tool (every tool results) + "nudge": { + "enabled": true, + "frequency": 10 + } + }, // (Legacy) Run an LLM to analyze what tool calls are no longer relevant on idle "onIdle": { "enabled": false, @@ -337,17 +348,6 @@ function createDefaultConfig(): void { "strictModelSelection": false, // Additional tools to protect from pruning "protectedTools": [] - }, - // Exposes a prune tool to your LLM to call when it determines pruning is necessary - "pruneTool": { - "enabled": false, - // Additional tools to protect from pruning - "protectedTools": [], - // Nudge the LLM to use the prune tool (every tool results) - "nudge": { - "enabled": true, - "frequency": 10 - } } } } From 17c22a91f3c13af95069a2a0057019b382e85d8b Mon Sep 17 00:00:00 2001 From: Daniel Smolsky Date: Sat, 13 Dec 2025 18:28:23 -0500 Subject: [PATCH 32/51] refactor: remove dead code from Logger class --- lib/logger.ts | 178 -------------------------------------------------- 1 file changed, 178 deletions(-) diff --git a/lib/logger.ts b/lib/logger.ts index 6ccbfa4..0081db1 100644 --- a/lib/logger.ts +++ b/lib/logger.ts @@ -6,7 +6,6 @@ import { homedir } from "os" export class Logger { private logDir: string public enabled: boolean - private fileCounter: number = 0 constructor(enabled: boolean) { this.enabled = enabled @@ -109,181 +108,4 @@ export class Logger { const component = this.getCallerFile(2) return this.write("ERROR", component, message, data) } - - private parseJanitorPrompt(prompt: string): { - instructions: string - availableToolCallIds: string[] - sessionHistory: any[] - responseSchema: any - } | null { - try { - const idsMatch = prompt.match(/Available tool call IDs for analysis:\s*([^\n]+)/) - const availableToolCallIds = idsMatch - ? idsMatch[1].split(',').map(id => id.trim()) - : [] - - const historyMatch = prompt.match(/Session history[^\n]*:\s*\n([\s\S]*?)\n\nYou MUST respond/) - let sessionHistory: any[] = [] - - if (historyMatch) { - const historyText = historyMatch[1] - - const fixedJson = this.escapeNewlinesInJson(historyText) - sessionHistory = JSON.parse(fixedJson) - } - - const instructionsMatch = prompt.match(/([\s\S]*?)\n\nIMPORTANT: Available tool call IDs/) - const instructions = instructionsMatch - ? instructionsMatch[1].trim() - : '' - - const schemaMatch = prompt.match(/matching this exact schema:\s*\n(\{[\s\S]*?\})\s*$/) - const responseSchema = schemaMatch - ? schemaMatch[1] - : null - - return { - instructions, - availableToolCallIds, - sessionHistory, - responseSchema - } - } catch (error) { - return null - } - } - - private escapeNewlinesInJson(jsonText: string): string { - let result = '' - let inString = false - - for (let i = 0; i < jsonText.length; i++) { - const char = jsonText[i] - const prevChar = i > 0 ? jsonText[i - 1] : '' - - if (char === '"' && prevChar !== '\\') { - inString = !inString - result += char - } else if (char === '\n' && inString) { - result += '\\n' - } else { - result += char - } - } - - return result - } - - private extractReasoningBlocks(sessionMessages: any[]): any[] { - const reasoningBlocks: any[] = [] - - for (const msg of sessionMessages) { - if (!msg.parts) continue - - for (const part of msg.parts) { - if (part.type === "reasoning") { - // Calculate encrypted content size for different providers - let encryptedContentLength = 0 - if (part.metadata?.openai?.reasoningEncryptedContent) { - encryptedContentLength = part.metadata.openai.reasoningEncryptedContent.length - } else if (part.metadata?.anthropic?.signature) { - encryptedContentLength = part.metadata.anthropic.signature.length - } else if (part.metadata?.google?.thoughtSignature) { - encryptedContentLength = part.metadata.google.thoughtSignature.length - } - - reasoningBlocks.push({ - messageId: msg.id, - messageRole: msg.role, - text: part.text, - textLength: part.text?.length || 0, - encryptedContentLength, - time: part.time, - hasMetadata: !!part.metadata, - metadataKeys: part.metadata ? Object.keys(part.metadata) : [] - }) - } - } - } - - return reasoningBlocks - } - - async saveWrappedContext(sessionID: string, messages: any[], metadata: any, sessionMessages?: any[]) { - if (!this.enabled) return - - try { - await this.ensureLogDir() - - const aiContextDir = join(this.logDir, "ai-context") - if (!existsSync(aiContextDir)) { - await mkdir(aiContextDir, { recursive: true }) - } - - const timestamp = new Date().toISOString().replace(/:/g, '-').replace(/\./g, '-') - const counter = (this.fileCounter++).toString().padStart(3, '0') - const filename = `${timestamp}_${counter}_${sessionID.substring(0, 15)}.json` - const filepath = join(aiContextDir, filename) - - const isJanitorShadow = sessionID === "janitor-shadow" && - messages.length === 1 && - messages[0]?.role === 'user' && - typeof messages[0]?.content === 'string' - - let content: any - - if (isJanitorShadow) { - const parsed = this.parseJanitorPrompt(messages[0].content) - - if (parsed) { - content = { - timestamp: new Date().toISOString(), - sessionID, - metadata, - janitorAnalysis: { - instructions: parsed.instructions, - availableToolCallIds: parsed.availableToolCallIds, - protectedTools: ["task", "todowrite", "todoread"], - sessionHistory: parsed.sessionHistory, - responseSchema: parsed.responseSchema - }, - rawPrompt: messages[0].content - } - } else { - content = { - timestamp: new Date().toISOString(), - sessionID, - metadata, - messages, - note: "Failed to parse janitor prompt structure" - } - } - } else { - // Extract reasoning blocks from session messages if available - const reasoningBlocks = sessionMessages - ? this.extractReasoningBlocks(sessionMessages) - : [] - - content = { - timestamp: new Date().toISOString(), - sessionID, - metadata, - messages, - ...(reasoningBlocks.length > 0 && { - reasoning: { - count: reasoningBlocks.length, - totalTextCharacters: reasoningBlocks.reduce((sum, b) => sum + b.textLength, 0), - totalEncryptedCharacters: reasoningBlocks.reduce((sum, b) => sum + b.encryptedContentLength, 0), - blocks: reasoningBlocks - } - }) - } - } - - const jsonString = JSON.stringify(content, null, 2) - - await writeFile(filepath, jsonString) - } catch (error) { - } - } } From 188f5ee82892ceeffb4be47846d7db76a731e06e Mon Sep 17 00:00:00 2001 From: Daniel Smolsky Date: Sat, 13 Dec 2025 18:46:39 -0500 Subject: [PATCH 33/51] refactor: remove unused version-checker module --- lib/version-checker.ts | 84 ------------------------------------------ 1 file changed, 84 deletions(-) delete mode 100644 lib/version-checker.ts diff --git a/lib/version-checker.ts b/lib/version-checker.ts deleted file mode 100644 index 5aed6b1..0000000 --- a/lib/version-checker.ts +++ /dev/null @@ -1,84 +0,0 @@ -import { readFileSync } from 'fs' -import { join, dirname } from 'path' -import { fileURLToPath } from 'url' - -export const PACKAGE_NAME = '@tarquinen/opencode-dcp' -export const NPM_REGISTRY_URL = `https://registry.npmjs.org/${PACKAGE_NAME}/latest` - -const __filename = fileURLToPath(import.meta.url) -const __dirname = dirname(__filename) - -export function getLocalVersion(): string { - try { - const pkgPath = join(__dirname, '../../package.json') - const pkg = JSON.parse(readFileSync(pkgPath, 'utf-8')) - return pkg.version - } catch { - return '0.0.0' - } -} - -export async function getNpmVersion(): Promise { - try { - const controller = new AbortController() - const timeout = setTimeout(() => controller.abort(), 5000) - - const res = await fetch(NPM_REGISTRY_URL, { - signal: controller.signal, - headers: { 'Accept': 'application/json' } - }) - clearTimeout(timeout) - - if (!res.ok) return null - const data = await res.json() as { version?: string } - return data.version ?? null - } catch { - return null - } -} - -export function isOutdated(local: string, remote: string): boolean { - const parseVersion = (v: string) => v.split('.').map(n => parseInt(n, 10) || 0) - const [localParts, remoteParts] = [parseVersion(local), parseVersion(remote)] - - for (let i = 0; i < Math.max(localParts.length, remoteParts.length); i++) { - const l = localParts[i] ?? 0 - const r = remoteParts[i] ?? 0 - if (r > l) return true - if (l > r) return false - } - return false -} - -export async function checkForUpdates(client: any, logger?: { info: (component: string, message: string, data?: any) => void }, showToast: boolean = true): Promise { - try { - const local = getLocalVersion() - const npm = await getNpmVersion() - - if (!npm) { - logger?.info("version", "Version check skipped", { reason: "npm fetch failed" }) - return - } - - if (!isOutdated(local, npm)) { - logger?.info("version", "Up to date", { local, npm }) - return - } - - logger?.info("version", "Update available", { local, npm }) - - if (!showToast) { - return - } - - await client.tui.showToast({ - body: { - title: "DCP: Update available", - message: `v${local} → v${npm}\nUse ${PACKAGE_NAME}@latest to auto-update`, - variant: "info", - duration: 6000 - } - }) - } catch { - } -} From acb254384ddcbed927e23b6b2e021a62c955ea56 Mon Sep 17 00:00:00 2001 From: Daniel Smolsky Date: Sat, 13 Dec 2025 23:55:01 -0500 Subject: [PATCH 34/51] fix: skip pruned tools when counting toward nudge threshold --- lib/state/tool-cache.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/state/tool-cache.ts b/lib/state/tool-cache.ts index 3325367..1c500f0 100644 --- a/lib/state/tool-cache.ts +++ b/lib/state/tool-cache.ts @@ -6,8 +6,6 @@ const MAX_TOOL_CACHE_SIZE = 500 /** * Sync tool parameters from OpenCode's session.messages() API. - * This is the single source of truth for tool parameters, replacing - * format-specific parsing from LLM API requests. */ export async function syncToolCache( state: SessionState, @@ -24,6 +22,8 @@ export async function syncToolCache( continue } + const alreadyPruned = state.prune.toolIds.includes(part.callID) + state.toolParameters.set( part.callID, { @@ -35,7 +35,7 @@ export async function syncToolCache( } ) - if (!config.strategies.pruneTool.protectedTools.includes(part.tool)) { + if (!alreadyPruned && !config.strategies.pruneTool.protectedTools.includes(part.tool)) { state.nudgeCounter++ } From 8b5037ef4819c6135273eaa4e9a45d49b3d01670 Mon Sep 17 00:00:00 2001 From: Daniel Smolsky Date: Sat, 13 Dec 2025 23:55:05 -0500 Subject: [PATCH 35/51] fix: await session state load before syncing tool cache --- lib/hooks.ts | 5 +++-- lib/state/state.ts | 15 ++++++--------- 2 files changed, 9 insertions(+), 11 deletions(-) diff --git a/lib/hooks.ts b/lib/hooks.ts index a48183e..72fda69 100644 --- a/lib/hooks.ts +++ b/lib/hooks.ts @@ -14,11 +14,12 @@ export function createChatMessageTransformHandler( logger: Logger, config: PluginConfig ) { - return async( + return async ( input: {}, output: { messages: WithParts[] } ) => { - checkSession(client, state, logger, output.messages); + await checkSession(client, state, logger, output.messages) + if (state.isSubAgent) { return } diff --git a/lib/state/state.ts b/lib/state/state.ts index fc69883..91e3f92 100644 --- a/lib/state/state.ts +++ b/lib/state/state.ts @@ -4,12 +4,12 @@ import { loadSessionState } from "./persistence" import { getLastUserMessage } from "../messages/utils" import { isSubAgentSession } from "../utils" -export const checkSession = ( +export const checkSession = async ( client: any, state: SessionState, logger: Logger, messages: WithParts[] -) => { +): Promise => { const lastUserMessage = getLastUserMessage(messages) if (!lastUserMessage) { @@ -20,14 +20,11 @@ export const checkSession = ( if (state.sessionId === null || state.sessionId !== lastSessionId) { logger.info(`Session changed: ${state.sessionId} -> ${lastSessionId}`) - ensureSessionInitialized( - client, - state, - lastSessionId, - logger - ).catch((err) => { + try { + await ensureSessionInitialized(client, state, lastSessionId, logger) + } catch (err: any) { logger.error("Failed to initialize session state", { error: err.message }) - } ) + } } } From b5daf99ef17b326172c6040d83e93da674ffe85b Mon Sep 17 00:00:00 2001 From: Daniel Smolsky Date: Sun, 14 Dec 2025 00:08:04 -0500 Subject: [PATCH 36/51] fix: save session state after updating token stats --- lib/strategies/prune-tool.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/strategies/prune-tool.ts b/lib/strategies/prune-tool.ts index 83d4ff5..1fccb44 100644 --- a/lib/strategies/prune-tool.ts +++ b/lib/strategies/prune-tool.ts @@ -73,9 +73,6 @@ export function createPruneTool( const pruneToolIds: string[] = getPruneToolIds(numericToolIds, toolIdList) state.prune.toolIds.push(...pruneToolIds) - saveSessionState(state, logger) - .catch(err => logger.error("Failed to persist state", { error: err.message })) - const toolMetadata = new Map() for (const id of pruneToolIds) { const toolParameters = state.toolParameters.get(id) @@ -104,6 +101,9 @@ export function createPruneTool( state.stats.pruneTokenCounter = 0 state.nudgeCounter = 0 + saveSessionState(state, logger) + .catch(err => logger.error("Failed to persist state", { error: err.message })) + return formatPruningResultForTool( pruneToolIds, toolMetadata, From 31cd1d9a21185abef085ca5a46fe838bc4ee13ad Mon Sep 17 00:00:00 2001 From: Daniel Smolsky Date: Sun, 14 Dec 2025 01:25:19 -0500 Subject: [PATCH 37/51] refactor: consolidate message utilities and eliminate duplicate iteration --- lib/messages/prune.ts | 3 +-- lib/messages/utils.ts | 39 +++++++++++++++++++++++++++++ lib/strategies/deduplication.ts | 3 ++- lib/strategies/on-idle.ts | 3 ++- lib/strategies/prune-tool.ts | 3 ++- lib/utils.ts | 44 --------------------------------- 6 files changed, 46 insertions(+), 49 deletions(-) diff --git a/lib/messages/prune.ts b/lib/messages/prune.ts index fc97aee..e0cec3b 100644 --- a/lib/messages/prune.ts +++ b/lib/messages/prune.ts @@ -1,8 +1,7 @@ import type { SessionState, WithParts } from "../state" import type { Logger } from "../logger" import type { PluginConfig } from "../config" -import { buildToolIdList } from "../utils" -import { getLastUserMessage, extractParameterKey } from "./utils" +import { getLastUserMessage, extractParameterKey, buildToolIdList } from "./utils" import { loadPrompt } from "../prompt" const PRUNED_TOOL_OUTPUT_REPLACEMENT = '[Output removed to save context - information superseded or no longer needed]' diff --git a/lib/messages/utils.ts b/lib/messages/utils.ts index 1a9dbf6..92405d2 100644 --- a/lib/messages/utils.ts +++ b/lib/messages/utils.ts @@ -83,3 +83,42 @@ export const getLastUserMessage = ( } return null } + +/** + * Finds the current agent from messages by scanning backward for user messages. + */ +export function findCurrentAgent(messages: WithParts[]): string | undefined { + const userMsg = getLastUserMessage(messages) + if (!userMsg) return undefined + return (userMsg.info as any).agent || 'build' +} + +/** + * Builds a list of tool call IDs from messages. + */ +export function buildToolIdList(messages: WithParts[]): string[] { + const toolIds: string[] = [] + for (const msg of messages) { + if (msg.parts) { + for (const part of msg.parts) { + if (part.type === 'tool' && part.callID && part.tool) { + toolIds.push(part.callID) + } + } + } + } + return toolIds +} + +/** + * Prunes numeric tool IDs to valid tool call IDs based on the provided tool ID list. + */ +export function getPruneToolIds(numericToolIds: number[], toolIdList: string[]): string[] { + const pruneToolIds: string[] = [] + for (const index of numericToolIds) { + if (!isNaN(index) && index >= 0 && index < toolIdList.length) { + pruneToolIds.push(toolIdList[index]) + } + } + return pruneToolIds +} diff --git a/lib/strategies/deduplication.ts b/lib/strategies/deduplication.ts index 50b30fa..61cc484 100644 --- a/lib/strategies/deduplication.ts +++ b/lib/strategies/deduplication.ts @@ -1,7 +1,8 @@ import { PluginConfig } from "../config" import { Logger } from "../logger" import type { SessionState, WithParts } from "../state" -import { buildToolIdList, calculateTokensSaved } from "../utils" +import { calculateTokensSaved } from "../utils" +import { buildToolIdList } from "../messages/utils" /** * Deduplication strategy - prunes older tool calls that have identical diff --git a/lib/strategies/on-idle.ts b/lib/strategies/on-idle.ts index a0f07d0..49887d3 100644 --- a/lib/strategies/on-idle.ts +++ b/lib/strategies/on-idle.ts @@ -4,7 +4,8 @@ import type { Logger } from "../logger" import type { PluginConfig } from "../config" import { buildAnalysisPrompt } from "../prompt" import { selectModel, extractModelFromSession, ModelInfo } from "../model-selector" -import { calculateTokensSaved, findCurrentAgent } from "../utils" +import { calculateTokensSaved } from "../utils" +import { findCurrentAgent } from "../messages/utils" import { saveSessionState } from "../state/persistence" import { sendUnifiedNotification } from "../ui/notification" diff --git a/lib/strategies/prune-tool.ts b/lib/strategies/prune-tool.ts index 1fccb44..d0a07ab 100644 --- a/lib/strategies/prune-tool.ts +++ b/lib/strategies/prune-tool.ts @@ -1,7 +1,8 @@ import { tool } from "@opencode-ai/plugin" import type { SessionState, ToolParameterEntry, WithParts } from "../state" import type { PluginConfig } from "../config" -import { findCurrentAgent, buildToolIdList, getPruneToolIds, calculateTokensSaved } from "../utils" +import { findCurrentAgent, buildToolIdList, getPruneToolIds } from "../messages/utils" +import { calculateTokensSaved } from "../utils" import { PruneReason, sendUnifiedNotification } from "../ui/notification" import { formatPruningResultForTool } from "../ui/display-utils" import { ensureSessionInitialized } from "../state" diff --git a/lib/utils.ts b/lib/utils.ts index 471729c..da1821b 100644 --- a/lib/utils.ts +++ b/lib/utils.ts @@ -68,47 +68,3 @@ export async function isSubAgentSession(client: any, sessionID: string): Promise return false } } - -/** - * Finds the current agent from messages by scanning backward for user messages. - */ -export function findCurrentAgent(messages: any[]): string | undefined { - for (let i = messages.length - 1; i >= 0; i--) { - const msg = messages[i] - const info = msg.info - if (info?.role === 'user') { - return info.agent || 'build' - } - } - return undefined -} - -/** - * Builds a list of tool call IDs from messages. - */ -export function buildToolIdList(messages: WithParts[]): string[] { - const toolIds: string[] = [] - for (const msg of messages) { - if (msg.parts) { - for (const part of msg.parts) { - if (part.type === 'tool' && part.callID && part.tool) { - toolIds.push(part.callID) - } - } - } - } - return toolIds -} - -/** - * Prunes numeric tool IDs to valid tool call IDs based on the provided tool ID list. - */ -export function getPruneToolIds(numericToolIds: number[], toolIdList: string[]): string[] { - const pruneToolIds: string[] = [] - for (const index of numericToolIds) { - if (!isNaN(index) && index >= 0 && index < toolIdList.length) { - pruneToolIds.push(toolIdList[index]) - } - } - return pruneToolIds -} From 1637f1b18362fcd9835886e9b0f50ee1615609e9 Mon Sep 17 00:00:00 2001 From: Daniel Smolsky Date: Sun, 14 Dec 2025 01:28:46 -0500 Subject: [PATCH 38/51] cleanup --- lib/messages/utils.ts | 10 ---------- lib/utils.ts | 5 ----- 2 files changed, 15 deletions(-) diff --git a/lib/messages/utils.ts b/lib/messages/utils.ts index 92405d2..54abbd8 100644 --- a/lib/messages/utils.ts +++ b/lib/messages/utils.ts @@ -2,7 +2,6 @@ import type { WithParts } from "../state" /** * Extracts a human-readable key from tool metadata for display purposes. - * Used by both deduplication and AI analysis to show what was pruned. */ export const extractParameterKey = (tool: string, parameters: any): string => { if (!parameters) return '' @@ -84,18 +83,12 @@ export const getLastUserMessage = ( return null } -/** - * Finds the current agent from messages by scanning backward for user messages. - */ export function findCurrentAgent(messages: WithParts[]): string | undefined { const userMsg = getLastUserMessage(messages) if (!userMsg) return undefined return (userMsg.info as any).agent || 'build' } -/** - * Builds a list of tool call IDs from messages. - */ export function buildToolIdList(messages: WithParts[]): string[] { const toolIds: string[] = [] for (const msg of messages) { @@ -110,9 +103,6 @@ export function buildToolIdList(messages: WithParts[]): string[] { return toolIds } -/** - * Prunes numeric tool IDs to valid tool call IDs based on the provided tool ID list. - */ export function getPruneToolIds(numericToolIds: number[], toolIdList: string[]): string[] { const pruneToolIds: string[] = [] for (const index of numericToolIds) { diff --git a/lib/utils.ts b/lib/utils.ts index da1821b..842b964 100644 --- a/lib/utils.ts +++ b/lib/utils.ts @@ -3,7 +3,6 @@ import { encode } from 'gpt-tokenizer' /** * Estimates token counts for a batch of texts using gpt-tokenizer. - * TODO: ensure we aren't falling back to catch branch */ function estimateTokensBatch(texts: string[]): number[] { try { @@ -15,7 +14,6 @@ function estimateTokensBatch(texts: string[]): number[] { /** * Calculates approximate tokens saved by pruning the given tool call IDs. - * Uses pre-fetched messages to avoid duplicate API calls. * TODO: Make it count message content that are not tool outputs. Currently it ONLY covers tool outputs and errors */ export const calculateTokensSaved = ( @@ -57,9 +55,6 @@ export function formatTokenCount(tokens: number): string { return tokens.toString() + ' tokens' } -/** - * Checks if a session is a subagent session by looking for a parentID. - */ export async function isSubAgentSession(client: any, sessionID: string): Promise { try { const result = await client.session.get({ path: { id: sessionID } }) From 51f4826ed9a83f4813b05df10ec4e97299771136 Mon Sep 17 00:00:00 2001 From: Daniel Smolsky Date: Sun, 14 Dec 2025 01:59:38 -0500 Subject: [PATCH 39/51] refactor: remove unused reason parameter from buildAnalysisPrompt --- lib/prompt.ts | 8 +------- lib/prompts/pruning.txt | 2 +- 2 files changed, 2 insertions(+), 8 deletions(-) diff --git a/lib/prompt.ts b/lib/prompt.ts index e210284..76cc94f 100644 --- a/lib/prompt.ts +++ b/lib/prompt.ts @@ -122,18 +122,12 @@ export function buildAnalysisPrompt( unprunedToolCallIds: string[], messages: any[], alreadyPrunedIds?: string[], - protectedToolCallIds?: string[], - reason?: string + protectedToolCallIds?: string[] ): string { const minimizedMessages = minimizeMessages(messages, alreadyPrunedIds, protectedToolCallIds) const messagesJson = JSON.stringify(minimizedMessages, null, 2).replace(/\\n/g, '\n') - const reasonContext = reason - ? `\nContext: The AI has requested pruning with the following reason: "${reason}"\nUse this context to inform your decisions about what is most relevant to keep.` - : '' - return loadPrompt("pruning", { - reason_context: reasonContext, available_tool_call_ids: unprunedToolCallIds.join(", "), session_history: messagesJson }) diff --git a/lib/prompts/pruning.txt b/lib/prompts/pruning.txt index 49e1e82..62045c3 100644 --- a/lib/prompts/pruning.txt +++ b/lib/prompts/pruning.txt @@ -1,5 +1,5 @@ You are a conversation analyzer that identifies obsolete tool outputs in a coding session. -{{reason_context}} + Your task: Analyze the session history and identify tool call IDs whose outputs are NO LONGER RELEVANT to the current conversation context. Guidelines for identifying obsolete tool calls: From f9e47dcfdd93296a2fbedd3ba7ef8458c778c1fb Mon Sep 17 00:00:00 2001 From: Daniel Smolsky Date: Sun, 14 Dec 2025 09:16:07 -0500 Subject: [PATCH 40/51] fix: simplify prune tool schema for OAuth plugin compatibility --- lib/prompts/tool.txt | 6 +++--- lib/strategies/prune-tool.ts | 11 +++++------ 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/lib/prompts/tool.txt b/lib/prompts/tool.txt index d727a11..87149f4 100644 --- a/lib/prompts/tool.txt +++ b/lib/prompts/tool.txt @@ -36,7 +36,7 @@ You must use this tool in three specific scenarios. The rules for distillation ( Assistant: [Reads 'wrong_file.ts'] This file isn't relevant to the auth system. I'll remove it to clear the context. -[Uses prune with ids: ["noise", 5]] +[Uses prune with ids: ["noise", "5"]] @@ -46,11 +46,11 @@ I have analyzed the configuration. Here is the distillation: - 'db.ts' connects to mongo:27017. - The other 3 files were defaults. I have preserved the signals above, so I am now pruning the raw reads. -[Uses prune with ids: ["consolidation", 10, 11, 12, 13, 14]] +[Uses prune with ids: ["consolidation", "10", "11", "12", "13", "14"]] Assistant: [Runs tests, they pass] The tests passed. The feature is verified. -[Uses prune with ids: ["completion", 20, 21]] +[Uses prune with ids: ["completion", "20", "21"]] diff --git a/lib/strategies/prune-tool.ts b/lib/strategies/prune-tool.ts index d0a07ab..c162e08 100644 --- a/lib/strategies/prune-tool.ts +++ b/lib/strategies/prune-tool.ts @@ -32,12 +32,9 @@ export function createPruneTool( description: TOOL_DESCRIPTION, args: { ids: tool.schema.array( - tool.schema.union([ - tool.schema.enum(["completion", "noise", "consolidation"]), - tool.schema.number() - ]) + tool.schema.string() ).describe( - "First element is the reason ('completion', 'noise', 'consolidation'), followed by numeric IDs to prune" + "First element is the reason ('completion', 'noise', 'consolidation'), followed by numeric IDs as strings to prune" ), }, async execute(args, toolCtx) { @@ -56,7 +53,9 @@ export function createPruneTool( return "No valid pruning reason found. Use 'completion', 'noise', or 'consolidation' as the first element." } - const numericToolIds: number[] = args.ids.slice(1).filter((id): id is number => typeof id === "number") + const numericToolIds: number[] = args.ids.slice(1) + .map(id => parseInt(id, 10)) + .filter((n): n is number => !isNaN(n)) if (numericToolIds.length === 0) { return "No numeric IDs provided. Format: [reason, id1, id2, ...] where reason is 'completion', 'noise', or 'consolidation'." } From 968d6569ff755032b89a68db78a2d7b427151e14 Mon Sep 17 00:00:00 2001 From: Daniel Smolsky Date: Sun, 14 Dec 2025 12:27:55 -0500 Subject: [PATCH 41/51] 1.0.0-beta.1 --- package-lock.json | 4 ++-- package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index a6c3dcd..8223a09 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@tarquinen/opencode-dcp", - "version": "0.4.17", + "version": "1.0.0-beta.1", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@tarquinen/opencode-dcp", - "version": "0.4.17", + "version": "1.0.0-beta.1", "license": "MIT", "dependencies": { "@ai-sdk/openai-compatible": "^1.0.28", diff --git a/package.json b/package.json index ffb7bb1..8ef2493 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "$schema": "https://json.schemastore.org/package.json", "name": "@tarquinen/opencode-dcp", - "version": "0.4.17", + "version": "1.0.0-beta.1", "type": "module", "description": "OpenCode plugin that optimizes token usage by pruning obsolete tool outputs from conversation context", "main": "./dist/index.js", From b323912958a924666191db5d581de203bd9b212c Mon Sep 17 00:00:00 2001 From: Daniel Smolsky Date: Sun, 14 Dec 2025 19:50:10 -0500 Subject: [PATCH 42/51] fix: properly reset nudge counter after prune tool is used The nudge counter now correctly resets to zero when prune is used and only counts tools invoked since the last prune. Previously, the counter could become inaccurate across session reloads. Also increased tool cache size to 1000. --- lib/state/tool-cache.ts | 26 ++++++++++++++++---------- 1 file changed, 16 insertions(+), 10 deletions(-) diff --git a/lib/state/tool-cache.ts b/lib/state/tool-cache.ts index 1c500f0..a6140c7 100644 --- a/lib/state/tool-cache.ts +++ b/lib/state/tool-cache.ts @@ -2,7 +2,7 @@ import type { SessionState, ToolStatus, WithParts } from "./index" import type { Logger } from "../logger" import { PluginConfig } from "../config" -const MAX_TOOL_CACHE_SIZE = 500 +const MAX_TOOL_CACHE_SIZE = 1000 /** * Sync tool parameters from OpenCode's session.messages() API. @@ -16,13 +16,24 @@ export async function syncToolCache( try { logger.info("Syncing tool parameters from OpenCode messages") + state.nudgeCounter = 0 + for (const msg of messages) { for (const part of msg.parts) { - if (part.type !== "tool" || !part.callID || state.toolParameters.has(part.callID)) { + if (part.type !== "tool" || !part.callID) { continue } - const alreadyPruned = state.prune.toolIds.includes(part.callID) + if (part.tool === "prune") { + state.nudgeCounter = 0 + } else if (!config.strategies.pruneTool.protectedTools.includes(part.tool)) { + state.nudgeCounter++ + } + state.lastToolPrune = part.tool === "prune" + + if (state.toolParameters.has(part.callID)) { + continue + } state.toolParameters.set( part.callID, @@ -34,16 +45,11 @@ export async function syncToolCache( compacted: part.state.status === "completed" && !!part.state.time.compacted, } ) - - if (!alreadyPruned && !config.strategies.pruneTool.protectedTools.includes(part.tool)) { - state.nudgeCounter++ - } - - state.lastToolPrune = part.tool === "prune" - logger.info("lastToolPrune=" + String(state.lastToolPrune)) } } + // logger.info(`nudgeCounter=${state.nudgeCounter}, lastToolPrune=${state.lastToolPrune}`) + trimToolParametersCache(state) } catch (error) { logger.warn("Failed to sync tool parameters from OpenCode", { From c21a974243e354153b0e1ee81a52fa6441dfc912 Mon Sep 17 00:00:00 2001 From: Daniel Smolsky Date: Sun, 14 Dec 2025 20:01:30 -0500 Subject: [PATCH 43/51] v1.0.0-beta.2 - Bump version --- package-lock.json | 4 ++-- package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index 8223a09..d56198b 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@tarquinen/opencode-dcp", - "version": "1.0.0-beta.1", + "version": "1.0.0-beta.2", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@tarquinen/opencode-dcp", - "version": "1.0.0-beta.1", + "version": "1.0.0-beta.2", "license": "MIT", "dependencies": { "@ai-sdk/openai-compatible": "^1.0.28", diff --git a/package.json b/package.json index 8ef2493..3eb8a82 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "$schema": "https://json.schemastore.org/package.json", "name": "@tarquinen/opencode-dcp", - "version": "1.0.0-beta.1", + "version": "1.0.0-beta.2", "type": "module", "description": "OpenCode plugin that optimizes token usage by pruning obsolete tool outputs from conversation context", "main": "./dist/index.js", From f4ba0c1baf910655b4737db10b72a2b17a33f14c Mon Sep 17 00:00:00 2001 From: Daniel Smolsky Date: Mon, 15 Dec 2025 10:30:24 -0500 Subject: [PATCH 44/51] fix: prevent protected tools from being pruned --- lib/messages/utils.ts | 14 ++++++++++++-- lib/prompts/tool.txt | 2 +- lib/strategies/prune-tool.ts | 7 ++++++- 3 files changed, 19 insertions(+), 4 deletions(-) diff --git a/lib/messages/utils.ts b/lib/messages/utils.ts index 54abbd8..33f950c 100644 --- a/lib/messages/utils.ts +++ b/lib/messages/utils.ts @@ -103,11 +103,21 @@ export function buildToolIdList(messages: WithParts[]): string[] { return toolIds } -export function getPruneToolIds(numericToolIds: number[], toolIdList: string[]): string[] { +export function getPruneToolIds( + numericToolIds: number[], + toolIdList: string[], + toolParameters: Map, + protectedTools: string[] +): string[] { const pruneToolIds: string[] = [] for (const index of numericToolIds) { if (!isNaN(index) && index >= 0 && index < toolIdList.length) { - pruneToolIds.push(toolIdList[index]) + const id = toolIdList[index] + const metadata = toolParameters.get(id) + if (metadata && protectedTools.includes(metadata.tool)) { + continue + } + pruneToolIds.push(id) } } return pruneToolIds diff --git a/lib/prompts/tool.txt b/lib/prompts/tool.txt index 87149f4..2eda4e8 100644 --- a/lib/prompts/tool.txt +++ b/lib/prompts/tool.txt @@ -1,7 +1,7 @@ Prunes tool outputs from context to manage conversation size and reduce noise. ## IMPORTANT: The Prunable List -A `` list is injected into user messages showing available tool outputs you can prune. Each line has the format `ID: tool, parameter` (e.g., `20: read, /path/to/file.ts`). Use these numeric IDs to select which tools to prune. +A `` list is injected into user messages showing available tool outputs you can prune. Each line has the format `ID: tool, parameter` (e.g., `20: read, /path/to/file.ts`). You MUST only use numeric IDs that appear in this list to select which tools to prune. ## CRITICAL: When and How to Prune diff --git a/lib/strategies/prune-tool.ts b/lib/strategies/prune-tool.ts index c162e08..694e485 100644 --- a/lib/strategies/prune-tool.ts +++ b/lib/strategies/prune-tool.ts @@ -70,7 +70,12 @@ export function createPruneTool( const currentAgent: string | undefined = findCurrentAgent(messages) const toolIdList: string[] = buildToolIdList(messages) - const pruneToolIds: string[] = getPruneToolIds(numericToolIds, toolIdList) + const pruneToolIds: string[] = getPruneToolIds( + numericToolIds, + toolIdList, + state.toolParameters, + config.strategies.pruneTool.protectedTools + ) state.prune.toolIds.push(...pruneToolIds) const toolMetadata = new Map() From e1ec087ecd77072cefd84efbc7009b5aeaab13ec Mon Sep 17 00:00:00 2001 From: Daniel Smolsky Date: Mon, 15 Dec 2025 10:59:33 -0500 Subject: [PATCH 45/51] refactor: move prune tool ID validation to prune-tool.ts --- lib/messages/utils.ts | 22 +--------------------- lib/strategies/prune-tool.ts | 25 ++++++++++++++++++------- 2 files changed, 19 insertions(+), 28 deletions(-) diff --git a/lib/messages/utils.ts b/lib/messages/utils.ts index 33f950c..26b2c60 100644 --- a/lib/messages/utils.ts +++ b/lib/messages/utils.ts @@ -101,24 +101,4 @@ export function buildToolIdList(messages: WithParts[]): string[] { } } return toolIds -} - -export function getPruneToolIds( - numericToolIds: number[], - toolIdList: string[], - toolParameters: Map, - protectedTools: string[] -): string[] { - const pruneToolIds: string[] = [] - for (const index of numericToolIds) { - if (!isNaN(index) && index >= 0 && index < toolIdList.length) { - const id = toolIdList[index] - const metadata = toolParameters.get(id) - if (metadata && protectedTools.includes(metadata.tool)) { - continue - } - pruneToolIds.push(id) - } - } - return pruneToolIds -} +} \ No newline at end of file diff --git a/lib/strategies/prune-tool.ts b/lib/strategies/prune-tool.ts index 694e485..cf4cf1b 100644 --- a/lib/strategies/prune-tool.ts +++ b/lib/strategies/prune-tool.ts @@ -1,7 +1,7 @@ import { tool } from "@opencode-ai/plugin" import type { SessionState, ToolParameterEntry, WithParts } from "../state" import type { PluginConfig } from "../config" -import { findCurrentAgent, buildToolIdList, getPruneToolIds } from "../messages/utils" +import { findCurrentAgent, buildToolIdList } from "../messages/utils" import { calculateTokensSaved } from "../utils" import { PruneReason, sendUnifiedNotification } from "../ui/notification" import { formatPruningResultForTool } from "../ui/display-utils" @@ -70,12 +70,23 @@ export function createPruneTool( const currentAgent: string | undefined = findCurrentAgent(messages) const toolIdList: string[] = buildToolIdList(messages) - const pruneToolIds: string[] = getPruneToolIds( - numericToolIds, - toolIdList, - state.toolParameters, - config.strategies.pruneTool.protectedTools - ) + + // Validate that all numeric IDs are within bounds + const invalidIds = numericToolIds.filter(id => id < 0 || id >= toolIdList.length) + if (invalidIds.length > 0) { + return "Invalid IDs provided. Only use numeric IDs from the list." + } + + // Check for protected tools (model hallucinated an ID not in the prunable list) + for (const index of numericToolIds) { + const id = toolIdList[index] + const metadata = state.toolParameters.get(id) + if (metadata && config.strategies.pruneTool.protectedTools.includes(metadata.tool)) { + return "Invalid IDs provided. Only use numeric IDs from the list." + } + } + + const pruneToolIds: string[] = numericToolIds.map(index => toolIdList[index]) state.prune.toolIds.push(...pruneToolIds) const toolMetadata = new Map() From 551f0d88aa1ebd92225f3cf32f7909cc783ba8e8 Mon Sep 17 00:00:00 2001 From: Daniel Smolsky Date: Mon, 15 Dec 2025 11:19:58 -0500 Subject: [PATCH 46/51] refactor: simplify bounds check with .some() --- lib/strategies/prune-tool.ts | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/lib/strategies/prune-tool.ts b/lib/strategies/prune-tool.ts index cf4cf1b..c546363 100644 --- a/lib/strategies/prune-tool.ts +++ b/lib/strategies/prune-tool.ts @@ -72,8 +72,7 @@ export function createPruneTool( const toolIdList: string[] = buildToolIdList(messages) // Validate that all numeric IDs are within bounds - const invalidIds = numericToolIds.filter(id => id < 0 || id >= toolIdList.length) - if (invalidIds.length > 0) { + if (numericToolIds.some(id => id < 0 || id >= toolIdList.length)) { return "Invalid IDs provided. Only use numeric IDs from the list." } From ca1f35f6f55c34e0c39acd18fd64128c59ed8bbe Mon Sep 17 00:00:00 2001 From: Daniel Smolsky Date: Mon, 15 Dec 2025 12:21:43 -0500 Subject: [PATCH 47/51] Add system prompt transformation hook for synthetic prompt injection --- index.ts | 5 +++++ lib/messages/prune.ts | 1 + 2 files changed, 6 insertions(+) diff --git a/index.ts b/index.ts index 9a34d60..6b617c4 100644 --- a/index.ts +++ b/index.ts @@ -1,6 +1,7 @@ import type { Plugin } from "@opencode-ai/plugin" import { getConfig } from "./lib/config" import { Logger } from "./lib/logger" +import { loadPrompt } from "./lib/prompt" import { createSessionState } from "./lib/state" import { createPruneTool } from "./lib/strategies" import { createChatMessageTransformHandler, createEventHandler } from "./lib/hooks" @@ -27,6 +28,10 @@ const plugin: Plugin = (async (ctx) => { }) return { + "experimental.chat.system.transform": async (_input: unknown, output: { system: string[] }) => { + const syntheticPrompt = loadPrompt("synthetic") + output.system.push(syntheticPrompt) + }, "experimental.chat.messages.transform": createChatMessageTransformHandler( ctx.client, state, diff --git a/lib/messages/prune.ts b/lib/messages/prune.ts index e0cec3b..ce022f3 100644 --- a/lib/messages/prune.ts +++ b/lib/messages/prune.ts @@ -92,6 +92,7 @@ export const prune = ( messages: WithParts[] ): void => { pruneToolOutputs(state, logger, messages) + // more prune methods coming here } const pruneToolOutputs = ( From 9565fc72207b49ae7ce99c5b7e78cbd400d396d4 Mon Sep 17 00:00:00 2001 From: Daniel Smolsky Date: Mon, 15 Dec 2025 12:25:45 -0500 Subject: [PATCH 48/51] Bump version to 1.0.0-beta.3 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 3eb8a82..b4c2d13 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "$schema": "https://json.schemastore.org/package.json", "name": "@tarquinen/opencode-dcp", - "version": "1.0.0-beta.2", + "version": "1.0.0-beta.3", "type": "module", "description": "OpenCode plugin that optimizes token usage by pruning obsolete tool outputs from conversation context", "main": "./dist/index.js", From ff959c3110c38a0b772477fcde04631a64f4d368 Mon Sep 17 00:00:00 2001 From: Daniel Smolsky Date: Mon, 15 Dec 2025 12:59:00 -0500 Subject: [PATCH 49/51] fix: skip inserting prunable tools list when empty --- lib/messages/prune.ts | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/lib/messages/prune.ts b/lib/messages/prune.ts index ce022f3..7361b74 100644 --- a/lib/messages/prune.ts +++ b/lib/messages/prune.ts @@ -33,6 +33,10 @@ const buildPrunableToolsList = ( logger.debug(`Prunable tool found - ID: ${numericId}, Tool: ${toolParameterEntry.tool}, Call ID: ${toolCallId}`) }) + if (lines.length === 0) { + return "" + } + return `\nThe following tools have been invoked and are available for pruning. This list does not mandate immediate action. Consider your current goals and the resources you need before discarding valuable tool outputs. Keep the context free of noise.\n${lines.join('\n')}\n` } @@ -52,6 +56,9 @@ export const insertPruneToolContext = ( } const prunableToolsList = buildPrunableToolsList(state, config, logger, messages) + if (!prunableToolsList) { + return + } let nudgeString = "" if (state.nudgeCounter >= config.strategies.pruneTool.nudge.frequency) { From a0ee808522deef5c6a835c616320eafdbe138a6b Mon Sep 17 00:00:00 2001 From: Daniel Smolsky Date: Mon, 15 Dec 2025 13:10:27 -0500 Subject: [PATCH 50/51] chore: remove pruneThinkingBlocks (not yet implemented) --- README.md | 8 ++------ lib/config.ts | 24 ------------------------ 2 files changed, 2 insertions(+), 30 deletions(-) diff --git a/README.md b/README.md index a39d818..67fdd1b 100644 --- a/README.md +++ b/README.md @@ -27,12 +27,12 @@ DCP uses multiple strategies to reduce context size: **Deduplication** — Identifies repeated tool calls (e.g., reading the same file multiple times) and keeps only the most recent output. Runs automatically on every request with zero LLM cost. -**Prune Thinking Blocks** — Removes LLM thinking/reasoning blocks from the conversation history. - **On Idle Analysis** — Uses a language model to semantically analyze conversation context during idle periods and identify tool outputs that are no longer relevant. **Prune Tool** — Exposes a `prune` tool that the AI can call to manually trigger pruning when it determines context cleanup is needed. +*More strategies coming soon.* + Your session history is never modified. DCP replaces pruned outputs with a placeholder before sending requests to your LLM. ## Impact on Prompt Caching @@ -66,10 +66,6 @@ DCP uses its own config file (`~/.config/opencode/dcp.jsonc` or `.opencode/dcp.j // Additional tools to protect from pruning "protectedTools": [] }, - // Remove thinking/reasoning LLM blocks - "pruneThinkingBlocks": { - "enabled": false - }, // Exposes a prune tool to your LLM to call when it determines pruning is necessary "pruneTool": { "enabled": true, diff --git a/lib/config.ts b/lib/config.ts index 83be6c9..2c14144 100644 --- a/lib/config.ts +++ b/lib/config.ts @@ -9,10 +9,6 @@ export interface Deduplication { protectedTools: string[] } -export interface PruneThinkingBlocks { - enabled: boolean -} - export interface OnIdle { enabled: boolean model?: string @@ -39,7 +35,6 @@ export interface PluginConfig { pruningSummary: "off" | "minimal" | "detailed" strategies: { deduplication: Deduplication - pruneThinkingBlocks: PruneThinkingBlocks onIdle: OnIdle pruneTool: PruneTool } @@ -59,9 +54,6 @@ export const VALID_CONFIG_KEYS = new Set([ 'strategies.deduplication', 'strategies.deduplication.enabled', 'strategies.deduplication.protectedTools', - // strategies.pruneThinkingBlocks - 'strategies.pruneThinkingBlocks', - 'strategies.pruneThinkingBlocks.enabled', // strategies.onIdle 'strategies.onIdle', 'strategies.onIdle.enabled', @@ -135,11 +127,6 @@ function validateConfigTypes(config: Record): ValidationError[] { errors.push({ key: 'strategies.deduplication.protectedTools', expected: 'string[]', actual: typeof strategies.deduplication.protectedTools }) } - // pruneThinkingBlocks - if (strategies.pruneThinkingBlocks?.enabled !== undefined && typeof strategies.pruneThinkingBlocks.enabled !== 'boolean') { - errors.push({ key: 'strategies.pruneThinkingBlocks.enabled', expected: 'boolean', actual: typeof strategies.pruneThinkingBlocks.enabled }) - } - // onIdle if (strategies.onIdle) { if (strategies.onIdle.enabled !== undefined && typeof strategies.onIdle.enabled !== 'boolean') { @@ -237,9 +224,6 @@ const defaultConfig: PluginConfig = { enabled: true, protectedTools: [...DEFAULT_PROTECTED_TOOLS] }, - pruneThinkingBlocks: { - enabled: false - }, pruneTool: { enabled: true, protectedTools: [...DEFAULT_PROTECTED_TOOLS], @@ -322,10 +306,6 @@ function createDefaultConfig(): void { // Additional tools to protect from pruning "protectedTools": [] }, - // Remove thinking/reasoning LLM blocks - "pruneThinkingBlocks": { - "enabled": false - }, // Exposes a prune tool to your LLM to call when it determines pruning is necessary "pruneTool": { "enabled": true, @@ -396,9 +376,6 @@ function mergeStrategies( ]) ] }, - pruneThinkingBlocks: { - enabled: override.pruneThinkingBlocks?.enabled ?? base.pruneThinkingBlocks.enabled - }, onIdle: { enabled: override.onIdle?.enabled ?? base.onIdle.enabled, model: override.onIdle?.model ?? base.onIdle.model, @@ -435,7 +412,6 @@ function deepCloneConfig(config: PluginConfig): PluginConfig { ...config.strategies.deduplication, protectedTools: [...config.strategies.deduplication.protectedTools] }, - pruneThinkingBlocks: { ...config.strategies.pruneThinkingBlocks }, onIdle: { ...config.strategies.onIdle, protectedTools: [...config.strategies.onIdle.protectedTools] From e1eb62ea7fac0a9cff34515f7bfa45149d790941 Mon Sep 17 00:00:00 2001 From: Didac Oliveira Date: Thu, 11 Dec 2025 11:00:18 +0100 Subject: [PATCH 51/51] feat: support DCP config in OPENCODE_CONFIG_DIR --- README.md | 10 ++++++++-- lib/config.ts | 52 ++++++++++++++++++++++++++++++++++++++++++++++++--- 2 files changed, 57 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 67fdd1b..59b222a 100644 --- a/README.md +++ b/README.md @@ -43,7 +43,11 @@ LLM providers like Anthropic and OpenAI cache prompts based on exact prefix matc ## Configuration -DCP uses its own config file (`~/.config/opencode/dcp.jsonc` or `.opencode/dcp.jsonc`), created automatically on first run. +DCP uses its own config file: + +- Global: `~/.config/opencode/dcp.jsonc` (or `dcp.json`), created automatically on first run +- Custom config directory: `$OPENCODE_CONFIG_DIR/dcp.jsonc` (or `dcp.json`), if `OPENCODE_CONFIG_DIR` is set +- Project: `.opencode/dcp.jsonc` (or `dcp.json`) in your project’s `.opencode` directory
Default Configuration (click to expand) @@ -104,7 +108,9 @@ The `protectedTools` arrays in each strategy add to this default list. ### Config Precedence -Settings are merged in order: **Defaults** → **Global** (`~/.config/opencode/dcp.jsonc`) → **Project** (`.opencode/dcp.jsonc`). Each level overrides the previous, so project settings take priority over global, which takes priority over defaults. +Settings are merged in order: +Defaults → Global (`~/.config/opencode/dcp.jsonc`) → Config Dir (`$OPENCODE_CONFIG_DIR/dcp.jsonc`) → Project (`.opencode/dcp.jsonc`). +Each level overrides the previous, so project settings take priority over config-dir and global, which take priority over defaults. Restart OpenCode after making config changes. diff --git a/lib/config.ts b/lib/config.ts index 2c14144..eb90adc 100644 --- a/lib/config.ts +++ b/lib/config.ts @@ -259,14 +259,30 @@ function findOpencodeDir(startDir: string): string | null { return null } -function getConfigPaths(ctx?: PluginInput): { global: string | null, project: string | null } { +function getConfigPaths(ctx?: PluginInput): { global: string | null, configDir: string | null, project: string | null} { + + // Global: ~/.config/opencode/dcp.jsonc|json let globalPath: string | null = null if (existsSync(GLOBAL_CONFIG_PATH_JSONC)) { globalPath = GLOBAL_CONFIG_PATH_JSONC } else if (existsSync(GLOBAL_CONFIG_PATH_JSON)) { globalPath = GLOBAL_CONFIG_PATH_JSON } - + + // Custom config directory: $OPENCODE_CONFIG_DIR/dcp.jsonc|json + let configDirPath: string | null = null + const opencodeConfigDir = process.env.OPENCODE_CONFIG_DIR + if (opencodeConfigDir) { + const configJsonc = join(opencodeConfigDir, 'dcp.jsonc') + const configJson = join(opencodeConfigDir, 'dcp.json') + if (existsSync(configJsonc)) { + configDirPath = configJsonc + } else if (existsSync(configJson)) { + configDirPath = configJson + } + } + + // Project: /.opencode/dcp.jsonc|json let projectPath: string | null = null if (ctx?.directory) { const opencodeDir = findOpencodeDir(ctx.directory) @@ -281,7 +297,7 @@ function getConfigPaths(ctx?: PluginInput): { global: string | null, project: st } } - return { global: globalPath, project: projectPath } + return { global: globalPath, configDir: configDirPath, project: projectPath } } function createDefaultConfig(): void { @@ -425,6 +441,7 @@ function deepCloneConfig(config: PluginConfig): PluginConfig { } } + export function getConfig(ctx: PluginInput): PluginConfig { let config = deepCloneConfig(defaultConfig) const configPaths = getConfigPaths(ctx) @@ -461,6 +478,35 @@ export function getConfig(ctx: PluginInput): PluginConfig { createDefaultConfig() } + // Load and merge $OPENCODE_CONFIG_DIR/dcp.jsonc|json (overrides global) + if (configPaths.configDir) { + const result = loadConfigFile(configPaths.configDir) + if (result.parseError) { + setTimeout(async () => { + try { + ctx.client.tui.showToast({ + body: { + title: "DCP: Invalid configDir config", + message: `${configPaths.configDir}\n${result.parseError}\nUsing global/default values`, + variant: "warning", + duration: 7000 + } + }) + } catch {} + }, 7000) + } else if (result.data) { + // Validate config keys and types + showConfigValidationWarnings(ctx, configPaths.configDir, result.data, true) + config = { + enabled: result.data.enabled ?? config.enabled, + debug: result.data.debug ?? config.debug, + showUpdateToasts: result.data.showUpdateToasts ?? config.showUpdateToasts, + pruningSummary: result.data.pruningSummary ?? config.pruningSummary, + strategies: mergeStrategies(config.strategies, result.data.strategies as any) + } + } + } + // Load and merge project config (overrides global) if (configPaths.project) { const result = loadConfigFile(configPaths.project)