-
Notifications
You must be signed in to change notification settings - Fork 8
Expand file tree
/
Copy pathenv.example
More file actions
97 lines (87 loc) · 5.06 KB
/
env.example
File metadata and controls
97 lines (87 loc) · 5.06 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
# =============================================================================
# STEM Agent — Environment Configuration
# Copy this file to `.env` and adjust values for your environment.
# =============================================================================
# -----------------------------------------------------------------------------
# LLM Provider
# -----------------------------------------------------------------------------
# Default provider is Amazon Bedrock. To use the direct Anthropic API instead,
# set LLM_PROVIDER=anthropic and provide ANTHROPIC_API_KEY.
# -----------------------------------------------------------------------------
LLM_PROVIDER=amazon_bedrock # amazon_bedrock | anthropic | openai
# -----------------------------------------------------------------------------
# Amazon Bedrock Credentials
# -----------------------------------------------------------------------------
# The AWS SDK resolves credentials in this order:
# 1. Explicit keys — AWS_ACCESS_KEY_ID + AWS_SECRET_ACCESS_KEY
# 2. Named profile — AWS_PROFILE pointing to ~/.aws/credentials
# 3. IAM role — EC2 instance profile, ECS task role, Lambda role, etc.
#
# On EC2/ECS/Lambda with an attached IAM role, only AWS_REGION is needed.
# For local development, provide explicit keys OR configure a named profile.
# -----------------------------------------------------------------------------
AWS_REGION=us-east-1
# AWS_ACCESS_KEY_ID=
# AWS_SECRET_ACCESS_KEY=
# AWS_SESSION_TOKEN= # Required only for temporary credentials (STS)
# AWS_PROFILE= # Named profile from ~/.aws/credentials
# -----------------------------------------------------------------------------
# Anthropic API (alternative to Bedrock)
# -----------------------------------------------------------------------------
# ANTHROPIC_API_KEY= # Required only when LLM_PROVIDER=anthropic
# -----------------------------------------------------------------------------
# Model IDs — per-phase tiering (Amazon Bedrock model IDs shown)
# Override any of these to use different models or providers.
# -----------------------------------------------------------------------------
# LLM_MODEL_DEFAULT=us.anthropic.claude-sonnet-4-5-20250929-v1
# LLM_MODEL_PERCEPTION=us.anthropic.claude-haiku-4-5-20250929-v1
# LLM_MODEL_REASONING=us.anthropic.claude-opus-4-6-v1
# LLM_MODEL_PLANNING=us.anthropic.claude-opus-4-6-v1
# LLM_MODEL_FORMATTING=us.anthropic.claude-haiku-4-5-20250929-v1
# LLM_MODEL_EVALUATION=us.anthropic.claude-opus-4-6-v1
# LLM_TEMPERATURE=0.7
# LLM_MAX_TOKENS=4096
# -----------------------------------------------------------------------------
# Embedding
# -----------------------------------------------------------------------------
# Used by the memory system for similarity search.
# openai — requires OPENAI_API_KEY (or EMBEDDING_API_KEY)
# bedrock — uses AWS credentials above (not yet implemented)
# local — no-op zero vectors, for development/testing
# -----------------------------------------------------------------------------
# EMBEDDING_PROVIDER=openai # openai | bedrock | local
# EMBEDDING_MODEL=text-embedding-3-small
# OPENAI_API_KEY= # Required when EMBEDDING_PROVIDER=openai
# EMBEDDING_API_KEY= # Alternative to OPENAI_API_KEY
# -----------------------------------------------------------------------------
# Infrastructure
# -----------------------------------------------------------------------------
DATABASE_URL=postgresql://stem:stemdev@localhost:5432/stem_agent
REDIS_URL=redis://localhost:6379
# -----------------------------------------------------------------------------
# Server
# -----------------------------------------------------------------------------
HOST=127.0.0.1 # Bind address
PORT=8000 # HTTP port
LOG_LEVEL=info # debug | info | warn | error
NODE_ENV=development # development | production
# MAX_CONCURRENT_TASKS=10
# -----------------------------------------------------------------------------
# Agent Identity
# -----------------------------------------------------------------------------
# AGENT_ID=stem-agent-001
# AGENT_NAME=STEM Adaptive Agent
# AGENT_VERSION=0.1.0
# AGENT_DESCRIPTION=Self-adaptive general-purpose agent
# -----------------------------------------------------------------------------
# Cost Guardrails
# -----------------------------------------------------------------------------
# COST_MAX_LLM_CALLS=20 # Max LLM calls per interaction
# COST_MAX_PER_INTERACTION_USD=0.50 # Dollar limit per request
# COST_MAX_PER_USER_DAILY_USD=10.00 # Dollar limit per user per day
# COST_MONTHLY_BUDGET_USD=2000.00 # Monthly total cap
# -----------------------------------------------------------------------------
# CLI Client (optional)
# -----------------------------------------------------------------------------
# STEM_AGENT_URL=http://localhost:8000 # Agent endpoint for the CLI
# STEM_CALLER_ID=my-user-id # Default caller identity