Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 7 additions & 4 deletions src/agent/agent.ts
Original file line number Diff line number Diff line change
Expand Up @@ -245,13 +245,16 @@ export class Agent {
return query;
}

const userMessages = inMemoryChatHistory.getUserMessages();
if (userMessages.length === 0) {
const messages = inMemoryChatHistory.getMessages();
if (messages.length === 0) {
return query;
}

const historyContext = userMessages.map((msg, i) => `${i + 1}. ${msg}`).join('\n');
return `Current query to answer: ${query}\n\nPrevious user queries for context:\n${historyContext}`;
// Include both queries and summaries for proper context
const historyContext = messages
.map((msg, i) => `${i + 1}. User: ${msg.query}\n Assistant: ${msg.summary}`)
.join('\n');
return `Current query to answer: ${query}\n\nPrevious conversation for context:\n${historyContext}`;
}

/**
Expand Down
10 changes: 9 additions & 1 deletion src/hooks/useModelSelection.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { useState, useCallback, useRef } from 'react';
import { useState, useCallback, useRef, useEffect } from 'react';
import { getSetting, setSetting } from '../utils/config.js';
import { getProviderDisplayName, checkApiKeyExistsForProvider, saveApiKeyForProvider } from '../utils/env.js';
import { getModelsForProvider, getDefaultModelForProvider } from '../components/ModelSelector.js';
Expand Down Expand Up @@ -73,6 +73,14 @@ export function useModelSelection(
// Message history ref - shared with agent runner
const inMemoryChatHistoryRef = useRef<InMemoryChatHistory>(new InMemoryChatHistory(model));

// Load conversation history on mount to resume previous session
useEffect(() => {
inMemoryChatHistoryRef.current.load().catch((err) => {
// Non-critical: app works without history, but log for debugging
console.warn('Failed to load conversation history:', err);
});
}, []);

// Helper to complete a model switch (DRY pattern)
const completeModelSwitch = useCallback((newProvider: string, newModelId: string) => {
setProvider(newProvider);
Expand Down
73 changes: 69 additions & 4 deletions src/utils/in-memory-chat-history.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,14 @@
import { createHash } from 'crypto';
import { readFile, writeFile, mkdir } from 'fs/promises';
import { existsSync } from 'fs';
import { join, dirname } from 'path';
import { callLlm, DEFAULT_MODEL } from '../model/llm.js';
import { z } from 'zod';

const DEXTER_DIR = '.dexter';
const CONTEXT_DIR = 'context';
const CONTEXT_FILE = 'conversation.json';

/**
* Represents a single conversation turn (query + answer + summary)
*/
Expand Down Expand Up @@ -31,17 +38,71 @@ Keep summaries to 1-2 sentences that capture the key information.`;
const MESSAGE_SELECTION_SYSTEM_PROMPT = `You are a relevance evaluator. Select which previous conversation messages are relevant to the current query.
Return only message IDs that contain information directly useful for answering the current query.`;

interface ContextFile {
messages: Message[];
model: string;
savedAt: string;
}

/**
* Manages in-memory conversation history for multi-turn conversations.
* Manages conversation history for multi-turn conversations.
* Persists to disk for session resume on restart.
* Stores user queries, final answers, and LLM-generated summaries.
*/
export class InMemoryChatHistory {
private messages: Message[] = [];
private model: string;
private relevantMessagesByQuery: Map<string, Message[]> = new Map();
private filePath: string;
private loaded = false;

constructor(model: string = DEFAULT_MODEL) {
constructor(model: string = DEFAULT_MODEL, baseDir: string = process.cwd()) {
this.model = model;
this.filePath = join(baseDir, DEXTER_DIR, CONTEXT_DIR, CONTEXT_FILE);
}

/**
* Loads conversation context from disk.
* Call this on startup to resume previous session.
*/
async load(): Promise<void> {
if (this.loaded) return;

try {
if (existsSync(this.filePath)) {
const content = await readFile(this.filePath, 'utf-8');
const data: ContextFile = JSON.parse(content);
this.messages = data.messages || [];
// Restore IDs to ensure consistency
this.messages.forEach((msg, idx) => {
msg.id = idx;
});
}
} catch {
// If there's any error reading/parsing, start fresh
this.messages = [];
}

this.loaded = true;
}

/**
* Saves conversation context to disk.
* Called automatically after adding messages.
*/
private async save(): Promise<void> {
const dir = dirname(this.filePath);

if (!existsSync(dir)) {
await mkdir(dir, { recursive: true });
}

const data: ContextFile = {
messages: this.messages,
model: this.model,
savedAt: new Date().toISOString(),
};
await writeFile(this.filePath, JSON.stringify(data, null, 2), 'utf-8');
}

/**
Expand Down Expand Up @@ -95,6 +156,9 @@ Generate a brief 1-2 sentence summary of this answer.`;
answer,
summary,
});

// Persist to disk
await this.save();
}

/**
Expand Down Expand Up @@ -197,10 +261,11 @@ Select which previous messages are relevant to understanding or answering the cu
}

/**
* Clears all messages and cache
* Clears all messages and cache, and removes persisted file
*/
clear(): void {
async clear(): Promise<void> {
this.messages = [];
this.relevantMessagesByQuery.clear();
await this.save();
}
}