Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -217,6 +217,7 @@ SOLANA_TESTNET_RPC_URL=https://api.testnet.solana.com
OPENAI_API_KEY=your-openai-api-key
ANTHROPIC_API_KEY=your-anthropic-api-key
COHERE_API_KEY=your-cohere-api-key
OPENROUTER_API_KEY=your-openrouter-api-key

# ChainLink Data Feeds
CHAINLINK_API_KEY=your_chainlink_key_here
Expand Down
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -167,6 +167,7 @@ That's it! This will build and start JuliaOS in Docker containers. The CLI will
- `COHERE_API_KEY`: For Cohere integration
- `MISTRAL_API_KEY`: For Mistral integration
- `GOOGLE_API_KEY`: For Gemini integration
- `OPENROUTER_API_KEY`: For OpenRouter integration (access to multiple LLM providers through a unified API)

Without these keys, certain functionalities will use mock implementations or have limited capabilities.

Expand Down
1 change: 1 addition & 0 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion packages/agent-manager/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
"main": "dist/index.js",
"types": "dist/index.d.ts",
"scripts": {
"build": "tsc",
"build": "tsc -p tsconfig.build.json",
"clean": "rimraf dist",
"dev": "tsc -w",
"test": "jest",
Expand Down
2 changes: 1 addition & 1 deletion packages/agent-manager/src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ export interface NetworkConfig {
* LLM configuration interface
*/
export interface LLMConfig {
provider: 'openai' | 'anthropic' | 'google' | 'aws' | 'huggingface';
provider: 'openai' | 'anthropic' | 'google' | 'aws' | 'huggingface' | 'openrouter';
model: string;
apiKey?: string;
temperature?: number;
Expand Down
28 changes: 28 additions & 0 deletions packages/agent-manager/tsconfig.build.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src",
"skipLibCheck": true,
"noUnusedLocals": false,
"noUnusedParameters": false,
"noImplicitAny": false,
"skipDefaultLibCheck": true,
"strict": false,
"baseUrl": ".",
"paths": {
"@j3os/*": ["../*/dist"]
}
},
"include": [
"src/**/*"
],
"exclude": [
"node_modules",
"dist",
"**/*.test.ts",
"**/*.spec.ts",
"**/*.d.ts",
"src/examples/**/*"
]
}
3 changes: 2 additions & 1 deletion packages/core/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@
"main": "dist/index.js",
"types": "dist/index.d.ts",
"scripts": {
"build": "tsc",
"build": "tsc -p tsconfig.build.json",
"build:ignore": "tsc -p tsconfig.build.ignore.json",
"test": "jest",
"lint": "eslint src/**/*.ts",
"test:swap": "ts-node src/scripts/testSolanaSwap.ts"
Expand Down
34 changes: 23 additions & 11 deletions packages/core/src/agent/BaseAgent.ts
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,10 @@ export abstract class BaseAgent extends EventEmitter {
protected skills: Skill[];
protected parameters: Record<string, any>;
protected isRunning: boolean;
protected llmProvider?: LLMProvider;
protected llmProvider: LLMProvider | null = null;
protected memory: Map<string, any>;
protected memoryConfig: Required<AgentConfig['memoryConfig']>;
protected errorConfig: Required<AgentConfig['errorConfig']>;
protected memoryConfig: Required<NonNullable<AgentConfig['memoryConfig']>>;
protected errorConfig: Required<NonNullable<AgentConfig['errorConfig']>>;
protected retryCount: Map<string, number>;

constructor(config: AgentConfig) {
Expand All @@ -48,20 +48,27 @@ export abstract class BaseAgent extends EventEmitter {

// Initialize memory configuration
this.memoryConfig = {
maxSize: config.memoryConfig?.maxSize || 1000,
cleanupInterval: config.memoryConfig?.cleanupInterval || 3600000, // 1 hour
retentionPolicy: config.memoryConfig?.retentionPolicy || 'lru'
maxSize: config.memoryConfig?.maxSize ?? 1000,
cleanupInterval: config.memoryConfig?.cleanupInterval ?? 3600000, // 1 hour
retentionPolicy: config.memoryConfig?.retentionPolicy ?? 'lru'
};

// Initialize error configuration
this.errorConfig = {
maxRetries: config.errorConfig?.maxRetries || 3,
backoffStrategy: config.errorConfig?.backoffStrategy || 'exponential',
errorHandlers: config.errorConfig?.errorHandlers || {}
maxRetries: config.errorConfig?.maxRetries ?? 3,
backoffStrategy: config.errorConfig?.backoffStrategy ?? 'exponential',
errorHandlers: config.errorConfig?.errorHandlers ?? {}
};

// Set up memory cleanup interval
setInterval(() => this.cleanupMemory(), this.memoryConfig.cleanupInterval);

// Initialize LLM if config is provided
if (config.llmConfig) {
this.initializeLLM(config.llmConfig).catch(error => {
this.emit('error', error);
});
}
}

abstract initialize(): Promise<void>;
Expand Down Expand Up @@ -112,8 +119,13 @@ Respond with the result in a structured format.`;

protected async initializeLLM(config: LLMConfig): Promise<void> {
try {
this.llmProvider = new OpenAIProvider();
await this.llmProvider.initialize(config);
// Use factory method to create the appropriate provider
const provider = config.provider?.toLowerCase() === 'openai'
? new OpenAIProvider()
: new OpenAIProvider(); // Default to OpenAI for now

await provider.initialize(config);
this.llmProvider = provider;
} catch (error) {
this.emit('error', error);
throw error;
Expand Down
Loading