diff --git a/server/routes/soroban_prompt_routes.py b/server/routes/soroban_prompt_routes.py new file mode 100755 index 0000000..8dd4161 --- /dev/null +++ b/server/routes/soroban_prompt_routes.py @@ -0,0 +1,256 @@ +""" +Soroban prompt action routes for Calliope IDE. +Addresses issue #54. + +Endpoints: + GET /api/prompts/soroban — list all available prompt templates + GET /api/prompts/soroban/ — get a single template's metadata + POST /api/prompts/soroban/execute — build + execute a prompt via Gemini + POST /api/prompts/soroban/build — build prompt text only (no AI call) +""" + +import os +import logging +from flask import Blueprint, request, jsonify +from server.utils.auth_utils import token_required +from server.utils.monitoring import capture_exception +from server.utils.soroban_prompts import ( + list_prompt_templates, + get_prompt_template, + build_soroban_prompt, + PROMPT_TEMPLATES, +) + +try: + from server.models import Session, ChatHistory + from server.utils.db_utils import add_chat_message +except Exception: + Session = None # type: ignore + ChatHistory = None # type: ignore + add_chat_message = None # type: ignore + +logger = logging.getLogger(__name__) + +soroban_prompts_bp = Blueprint( + "soroban_prompts", __name__, url_prefix="/api/prompts/soroban" +) + +_MAX_CODE_LEN = 50_000 # characters +_MAX_DESC_LEN = 2_000 + + +# ── Routes ──────────────────────────────────────────────────────────────────── + +@soroban_prompts_bp.route("/", methods=["GET"]) +@soroban_prompts_bp.route("", methods=["GET"]) +def list_prompts(): + """ + List all available Soroban prompt templates. + + Response JSON: + success (bool) + prompts (list[dict]) — id, name, description, category, requires_code, placeholder + total (int) + """ + try: + prompts = list_prompt_templates() + return jsonify({"success": True, "prompts": prompts, "total": len(prompts)}), 200 + except Exception as e: + logger.exception("List prompts error") + return jsonify({"success": False, "error": "Failed to list prompts"}), 500 + + +@soroban_prompts_bp.route("/", methods=["GET"]) +def get_prompt(prompt_id: str): + """ + Get metadata for a single prompt template. + + Response JSON: + success (bool) + prompt (dict) + """ + try: + prompt = get_prompt_template(prompt_id) + if not prompt: + return jsonify({ + "success": False, + "error": f"Prompt '{prompt_id}' not found", + "available": [p["id"] for p in list_prompt_templates()], + }), 404 + return jsonify({"success": True, "prompt": prompt}), 200 + except Exception as e: + logger.exception("Get prompt error") + return jsonify({"success": False, "error": "Failed to get prompt"}), 500 + + +@soroban_prompts_bp.route("/build", methods=["POST"]) +@token_required +def build_prompt_text(current_user): + """ + Build the full prompt text without executing it. + Useful for the frontend to preview the prompt before sending. + + Request JSON: + prompt_id (str) — one of: generate_contract, explain_contract, generate_tests, security_review + description (str) — user's task description + context_code (str) — optional: contract source code + + Response JSON: + success (bool) + prompt_id (str) + prompt_text (str) — the full prompt string + char_count (int) + """ + try: + data = request.get_json(silent=True, force=True) + if not data: + return jsonify({"success": False, "error": "No data provided"}), 400 + + prompt_id = (data.get("prompt_id") or "").strip() + description = (data.get("description") or "").strip()[:_MAX_DESC_LEN] + context_code = (data.get("context_code") or "").strip()[:_MAX_CODE_LEN] + + if not prompt_id: + return jsonify({"success": False, "error": "prompt_id is required"}), 400 + + if prompt_id not in PROMPT_TEMPLATES: + return jsonify({ + "success": False, + "error": f"Unknown prompt '{prompt_id}'", + "available": list(PROMPT_TEMPLATES.keys()), + }), 404 + + template = PROMPT_TEMPLATES[prompt_id] + if template.requires_code and not context_code and not description: + return jsonify({ + "success": False, + "error": f"Prompt '{prompt_id}' requires either contract code or a description", + }), 400 + + prompt_text = build_soroban_prompt(prompt_id, description, context_code) + + return jsonify({ + "success": True, + "prompt_id": prompt_id, + "prompt_text": prompt_text, + "char_count": len(prompt_text), + }), 200 + + except ValueError as e: + return jsonify({"success": False, "error": str(e)}), 400 + except Exception as e: + logger.exception("Build prompt error") + capture_exception(e, {"route": "soroban_prompts.build_prompt_text", "user_id": current_user.id}) + return jsonify({"success": False, "error": "Failed to build prompt"}), 500 + + +@soroban_prompts_bp.route("/execute", methods=["POST"]) +@token_required +def execute_prompt(current_user): + """ + Build and execute a Soroban prompt via Gemini, returning the AI response. + + Request JSON: + session_id (int) — active session ID (for chat history) + prompt_id (str) — one of: generate_contract, explain_contract, generate_tests, security_review + description (str) — user's task description + context_code (str) — optional: contract source code + + Response JSON: + success (bool) + prompt_id (str) + result (str) — AI-generated response + char_count (int) + """ + try: + data = request.get_json(silent=True, force=True) + if not data: + return jsonify({"success": False, "error": "No data provided"}), 400 + + session_id = data.get("session_id") + prompt_id = (data.get("prompt_id") or "").strip() + description = (data.get("description") or "").strip()[:_MAX_DESC_LEN] + context_code = (data.get("context_code") or "").strip()[:_MAX_CODE_LEN] + + if not session_id: + return jsonify({"success": False, "error": "session_id is required"}), 400 + if not prompt_id: + return jsonify({"success": False, "error": "prompt_id is required"}), 400 + + if prompt_id not in PROMPT_TEMPLATES: + return jsonify({ + "success": False, + "error": f"Unknown prompt '{prompt_id}'", + "available": list(PROMPT_TEMPLATES.keys()), + }), 404 + + template = PROMPT_TEMPLATES[prompt_id] + if template.requires_code and not context_code and not description: + return jsonify({ + "success": False, + "error": f"Prompt '{prompt_id}' requires either contract code or a description", + }), 400 + + # Verify session + if Session: + session = Session.query.filter_by( + id=session_id, user_id=current_user.id, is_active=True + ).first() + if not session: + return jsonify({"success": False, "error": "Session not found or access denied"}), 404 + + # Build prompt + prompt_text = build_soroban_prompt(prompt_id, description, context_code) + + # Call Gemini + try: + import google.generativeai as genai + api_key = os.environ.get("GEMINI_API_KEY") + if not api_key: + return jsonify({"success": False, "error": "GEMINI_API_KEY not configured"}), 500 + + genai.configure(api_key=api_key) + model = genai.GenerativeModel( + model_name="gemini-2.0-flash", + generation_config={ + "temperature": 0.2, + "top_p": 0.95, + "max_output_tokens": 8192, + }, + ) + response = model.generate_content(prompt_text) + result = response.text + except ImportError: + return jsonify({"success": False, "error": "Gemini SDK not installed"}), 500 + + # Persist to chat history + if add_chat_message and session_id: + try: + add_chat_message( + session_id=session_id, + role="user", + content=f"[{template.name}] {description or '(no description)'}", + message_type="soroban_prompt", + ) + add_chat_message( + session_id=session_id, + role="assistant", + content=result, + message_type="soroban_prompt_response", + ) + except Exception as e: + logger.warning("Failed to persist prompt result: %s", e) + + return jsonify({ + "success": True, + "prompt_id": prompt_id, + "result": result, + "char_count": len(result), + }), 200 + + except ValueError as e: + return jsonify({"success": False, "error": str(e)}), 400 + except Exception as e: + logger.exception("Execute prompt error") + capture_exception(e, {"route": "soroban_prompts.execute_prompt", "user_id": current_user.id}) + return jsonify({"success": False, "error": "An error occurred while executing the prompt"}), 500 diff --git a/server/tests/test_soroban_prompts.py b/server/tests/test_soroban_prompts.py new file mode 100755 index 0000000..a60f40b --- /dev/null +++ b/server/tests/test_soroban_prompts.py @@ -0,0 +1,264 @@ +"""Tests for server/utils/soroban_prompts.py and /api/prompts/soroban routes""" + +import sys +import functools +from unittest.mock import MagicMock, patch + +# Stub deps +def _passthrough(f): + @functools.wraps(f) + def inner(*args, **kwargs): + u = MagicMock(); u.id = 1; u.username = "testuser" + return f(u, *args, **kwargs) + return inner + +_auth_stub = MagicMock() +_auth_stub.token_required = _passthrough +sys.modules["server.utils.auth_utils"] = _auth_stub +sys.modules["server.models"] = MagicMock() +sys.modules["server.utils.monitoring"] = MagicMock() +sys.modules["server.utils.db_utils"] = MagicMock() + +import server.utils.soroban_prompts as sp +import server.routes.soroban_prompt_routes as r +prompts_bp = r.soroban_prompts_bp + +for mod in ["server.utils.auth_utils","server.models","server.utils.monitoring","server.utils.db_utils"]: + sys.modules.pop(mod, None) + +import pytest +from flask import Flask + + +@pytest.fixture +def app(): + a = Flask(__name__) + a.config["TESTING"] = True + a.register_blueprint(prompts_bp) + return a + +@pytest.fixture +def client(app): return app.test_client() + +def yes_session(): + s = MagicMock(); s.id=1; s.user_id=1; s.is_active=True + x = MagicMock(); x.query.filter_by.return_value.first.return_value = s + return x + +def no_session(): + x = MagicMock(); x.query.filter_by.return_value.first.return_value = None + return x + + +# ── list_prompt_templates ───────────────────────────────────────────────────── + +class TestListPromptTemplates: + def test_returns_4_templates(self): + templates = sp.list_prompt_templates() + assert len(templates) == 4 + + def test_all_have_required_fields(self): + for t in sp.list_prompt_templates(): + assert "id" in t + assert "name" in t + assert "description" in t + assert "category" in t + assert "requires_code" in t + assert "placeholder" in t + + def test_includes_all_prompt_ids(self): + ids = [t["id"] for t in sp.list_prompt_templates()] + assert "generate_contract" in ids + assert "explain_contract" in ids + assert "generate_tests" in ids + assert "security_review" in ids + + +# ── get_prompt_template ─────────────────────────────────────────────────────── + +class TestGetPromptTemplate: + def test_returns_template_for_valid_id(self): + t = sp.get_prompt_template("generate_contract") + assert t is not None + assert t["id"] == "generate_contract" + + def test_returns_none_for_invalid_id(self): + assert sp.get_prompt_template("nonexistent") is None + + def test_generate_contract_does_not_require_code(self): + t = sp.get_prompt_template("generate_contract") + assert t["requires_code"] is False + + def test_explain_contract_requires_code(self): + t = sp.get_prompt_template("explain_contract") + assert t["requires_code"] is True + + def test_security_review_requires_code(self): + t = sp.get_prompt_template("security_review") + assert t["requires_code"] is True + + +# ── build_soroban_prompt ────────────────────────────────────────────────────── + +class TestBuildSorobanPrompt: + def test_generate_contract_contains_description(self): + prompt = sp.build_soroban_prompt("generate_contract", "A vesting contract") + assert "A vesting contract" in prompt + + def test_generate_contract_mentions_soroban(self): + prompt = sp.build_soroban_prompt("generate_contract", "token contract") + assert "Soroban" in prompt or "soroban" in prompt + + def test_explain_contract_includes_code(self): + code = "pub struct MyContract;" + prompt = sp.build_soroban_prompt("explain_contract", "", code) + assert code in prompt + + def test_generate_tests_mentions_test_suite(self): + prompt = sp.build_soroban_prompt("generate_tests", "my contract", "// code") + assert "test" in prompt.lower() + + def test_security_review_mentions_audit(self): + prompt = sp.build_soroban_prompt("security_review", "", "// code") + assert "security" in prompt.lower() or "audit" in prompt.lower() + + def test_raises_for_unknown_prompt_id(self): + with pytest.raises(ValueError, match="Unknown prompt"): + sp.build_soroban_prompt("nonexistent", "desc") + + def test_prompt_is_non_empty(self): + for pid in ["generate_contract", "explain_contract", "generate_tests", "security_review"]: + prompt = sp.build_soroban_prompt(pid, "test description", "// code") + assert len(prompt) > 100 + + +# ── GET /api/prompts/soroban ────────────────────────────────────────────────── + +class TestListPromptsRoute: + def test_returns_200(self, client): + resp = client.get("/api/prompts/soroban") + assert resp.status_code == 200 + + def test_returns_4_prompts(self, client): + data = client.get("/api/prompts/soroban").get_json() + assert data["success"] is True + assert data["total"] == 4 + + def test_prompt_has_required_fields(self, client): + data = client.get("/api/prompts/soroban").get_json() + for p in data["prompts"]: + assert "id" in p + assert "name" in p + assert "category" in p + + +# ── GET /api/prompts/soroban/ ───────────────────────────────────────────── + +class TestGetPromptRoute: + def test_returns_200_for_valid_id(self, client): + resp = client.get("/api/prompts/soroban/generate_contract") + assert resp.status_code == 200 + + def test_returns_404_for_unknown_id(self, client): + resp = client.get("/api/prompts/soroban/nonexistent") + assert resp.status_code == 404 + + def test_returns_available_list_on_404(self, client): + data = client.get("/api/prompts/soroban/nonexistent").get_json() + assert "available" in data + + +# ── POST /api/prompts/soroban/build ────────────────────────────────────────── + +class TestBuildPromptRoute: + def test_missing_prompt_id(self, client): + resp = client.post("/api/prompts/soroban/build", json={"description": "hi"}) + assert resp.status_code == 400 + + def test_unknown_prompt_id(self, client): + resp = client.post("/api/prompts/soroban/build", json={"prompt_id": "bad"}) + assert resp.status_code == 404 + + def test_requires_code_without_code_or_desc(self, client): + resp = client.post("/api/prompts/soroban/build", json={ + "prompt_id": "explain_contract" + }) + assert resp.status_code == 400 + + def test_builds_prompt_successfully(self, client): + resp = client.post("/api/prompts/soroban/build", json={ + "prompt_id": "generate_contract", + "description": "A simple counter contract", + }) + assert resp.status_code == 200 + data = resp.get_json() + assert data["success"] is True + assert "prompt_text" in data + assert "A simple counter contract" in data["prompt_text"] + assert "char_count" in data + + def test_includes_context_code_in_prompt(self, client): + resp = client.post("/api/prompts/soroban/build", json={ + "prompt_id": "explain_contract", + "description": "explain this", + "context_code": "pub struct MyContract;", + }) + data = resp.get_json() + assert "pub struct MyContract;" in data["prompt_text"] + + +# ── POST /api/prompts/soroban/execute ───────────────────────────────────────── + +class TestExecutePromptRoute: + def test_missing_session_id(self, client): + resp = client.post("/api/prompts/soroban/execute", json={ + "prompt_id": "generate_contract", "description": "hi" + }) + assert resp.status_code == 400 + + def test_missing_prompt_id(self, client): + resp = client.post("/api/prompts/soroban/execute", json={ + "session_id": 1, "description": "hi" + }) + assert resp.status_code == 400 + + def test_session_not_found(self, client): + r.Session = no_session() + resp = client.post("/api/prompts/soroban/execute", json={ + "session_id": 99, "prompt_id": "generate_contract", "description": "hi" + }) + assert resp.status_code == 404 + + def test_unknown_prompt_returns_404(self, client): + r.Session = yes_session() + resp = client.post("/api/prompts/soroban/execute", json={ + "session_id": 1, "prompt_id": "nonexistent", "description": "hi" + }) + assert resp.status_code == 404 + + def test_successful_execution(self, client): + r.Session = yes_session() + r.add_chat_message = MagicMock() + + mock_response = MagicMock() + mock_response.text = "Generated contract code here" + mock_model = MagicMock() + mock_model.generate_content.return_value = mock_response + + with patch.dict("sys.modules", { + "google.generativeai": MagicMock( + GenerativeModel=MagicMock(return_value=mock_model), + configure=MagicMock(), + ) + }), patch.dict("os.environ", {"GEMINI_API_KEY": "test-key"}): + resp = client.post("/api/prompts/soroban/execute", json={ + "session_id": 1, + "prompt_id": "generate_contract", + "description": "A simple counter contract", + }) + + assert resp.status_code == 200 + data = resp.get_json() + assert data["success"] is True + assert data["result"] == "Generated contract code here" + assert data["prompt_id"] == "generate_contract" diff --git a/server/utils/soroban_prompts.py b/server/utils/soroban_prompts.py new file mode 100755 index 0000000..c82a898 --- /dev/null +++ b/server/utils/soroban_prompts.py @@ -0,0 +1,220 @@ +""" +Soroban-specific AI prompt actions for Calliope IDE. +Addresses issue #54. + +Provides 4 prebuilt prompt templates: + - generate_contract : generate a Soroban smart contract from a description + - explain_contract : explain an existing contract in plain language + - generate_tests : generate a Rust test suite for a contract + - security_review : perform a security audit of a contract + +Each prompt is designed to produce focused, actionable AI output +that can be inserted directly into the editor. +""" + +from __future__ import annotations +from dataclasses import dataclass +from typing import Callable + +# ── Prompt registry ─────────────────────────────────────────────────────────── + +@dataclass(frozen=True) +class SorobanPromptTemplate: + id: str + name: str + description: str + category: str + requires_code: bool # True if user must supply contract code as context + placeholder: str # Hint shown in the input field + + +def _generate_contract(description: str, context_code: str = "") -> str: + ctx = f"\n\nExisting context:\n```rust\n{context_code}\n```" if context_code else "" + return f"""You are an expert Soroban smart contract developer for the Stellar blockchain. + +Generate a complete, production-ready Soroban smart contract for the following requirement: +{description}{ctx} + +Requirements: +- Use #![no_std] and soroban_sdk +- Include proper #[contract] and #[contractimpl] annotations +- Add #[contracttype] for all custom data structures +- Use persistent storage with typed DataKey enum +- Include proper authorization with require_auth() where needed +- Add inline comments explaining key logic +- Include a complete #[cfg(test)] module with at least 3 meaningful tests +- Follow Soroban best practices (no panics in production paths, proper error handling) + +Output ONLY the complete Rust source code, ready to save as src/lib.rs. +Do not include any explanation outside the code.""" + + +def _explain_contract(description: str, context_code: str = "") -> str: + code_section = f"\n\nContract code:\n```rust\n{context_code}\n```" if context_code else "" + extra = f"\nFocus on: {description}" if description else "" + return f"""You are an expert Soroban smart contract auditor and educator. + +Explain the following Soroban smart contract in clear, plain language.{code_section}{extra} + +Your explanation must cover: +1. **Purpose** — What does this contract do? What problem does it solve? +2. **Storage** — What data does it store and how is it organized? +3. **Functions** — Explain each public function: inputs, outputs, side effects +4. **Authorization** — Who can call each function and how is access controlled? +5. **Events** — What events are emitted and when? +6. **Limitations** — Any edge cases, assumptions, or known constraints? + +Write for a developer who understands Rust but is new to Soroban. +Use clear headings and bullet points.""" + + +def _generate_tests(description: str, context_code: str = "") -> str: + code_section = f"\n\nContract code:\n```rust\n{context_code}\n```" if context_code else "" + focus = f"\nTest focus: {description}" if description else "" + return f"""You are an expert Soroban smart contract tester. + +Generate a comprehensive Rust test suite for the following Soroban contract.{code_section}{focus} + +Requirements: +- Use soroban_sdk::testutils::Address as _ for Address::generate +- Use Env::default() and mock_all_auths() where appropriate +- Cover all public functions with at least one test each +- Include happy path tests AND edge case / failure tests +- Use #[should_panic(expected = "...")] for expected failures +- Add a setup() helper function to reduce boilerplate +- Group tests into logical test classes with descriptive names +- Each test should have a clear docstring explaining what it verifies + +Output ONLY the complete Rust test module code (the #[cfg(test)] block). +Do not include any explanation outside the code.""" + + +def _security_review(description: str, context_code: str = "") -> str: + code_section = f"\n\nContract code:\n```rust\n{context_code}\n```" if context_code else "" + scope = f"\nReview scope: {description}" if description else "" + return f"""You are an expert Soroban smart contract security auditor. + +Perform a thorough security review of the following Soroban smart contract.{code_section}{scope} + +Review checklist: +1. **Access Control** — Are all sensitive functions properly protected with require_auth()? + Are there any functions that should require admin-only access? +2. **Input Validation** — Are all inputs validated? Could any cause panics or unexpected behavior? +3. **Integer Overflow/Underflow** — Are arithmetic operations safe? (Soroban uses overflow-checks=true but review anyway) +4. **Storage Manipulation** — Can unauthorized callers read or write sensitive storage keys? +5. **Initialization** — Is there a risk of re-initialization or missing initialization? +6. **Reentrancy** — Are there any cross-contract call patterns that could be exploited? +7. **Event Emission** — Are sensitive operations properly logged via events? +8. **Denial of Service** — Are there unbounded loops or storage operations that could be abused? +9. **Logic Errors** — Any business logic flaws or incorrect assumptions? +10. **Best Practices** — Any deviations from Soroban / Stellar security best practices? + +For each issue found, provide: +- **Severity**: Critical / High / Medium / Low / Informational +- **Location**: Function name and line description +- **Description**: What the issue is and why it matters +- **Recommendation**: How to fix it with a code example if applicable + +End with an overall risk rating and a summary of the most important fixes.""" + + +# ── Registry ────────────────────────────────────────────────────────────────── + +PROMPT_TEMPLATES: dict[str, SorobanPromptTemplate] = { + "generate_contract": SorobanPromptTemplate( + id="generate_contract", + name="Generate Contract", + description="Generate a complete Soroban smart contract from a description", + category="generation", + requires_code=False, + placeholder="Describe the contract you want to build (e.g. 'A token vesting contract that releases tokens linearly over 12 months')", + ), + "explain_contract": SorobanPromptTemplate( + id="explain_contract", + name="Explain Contract", + description="Explain an existing Soroban contract in plain language", + category="education", + requires_code=True, + placeholder="Paste your contract code in the context field, or describe what aspect to focus on", + ), + "generate_tests": SorobanPromptTemplate( + id="generate_tests", + name="Generate Tests", + description="Generate a Rust test suite for a Soroban contract", + category="testing", + requires_code=True, + placeholder="Paste your contract code in the context field, or describe specific test scenarios to cover", + ), + "security_review": SorobanPromptTemplate( + id="security_review", + name="Security Review", + description="Perform a security audit of a Soroban contract", + category="security", + requires_code=True, + placeholder="Paste your contract code in the context field, or specify the review scope", + ), +} + +_BUILDERS: dict[str, Callable[[str, str], str]] = { + "generate_contract": _generate_contract, + "explain_contract": _explain_contract, + "generate_tests": _generate_tests, + "security_review": _security_review, +} + + +def list_prompt_templates() -> list[dict]: + """Return metadata for all available prompt templates.""" + return [ + { + "id": t.id, + "name": t.name, + "description": t.description, + "category": t.category, + "requires_code": t.requires_code, + "placeholder": t.placeholder, + } + for t in PROMPT_TEMPLATES.values() + ] + + +def get_prompt_template(prompt_id: str) -> dict | None: + """Return metadata for a single template, or None if not found.""" + t = PROMPT_TEMPLATES.get(prompt_id) + if not t: + return None + return { + "id": t.id, + "name": t.name, + "description": t.description, + "category": t.category, + "requires_code": t.requires_code, + "placeholder": t.placeholder, + } + + +def build_soroban_prompt( + prompt_id: str, + user_description: str, + context_code: str = "", +) -> str: + """ + Build the full prompt string for a given prompt template. + + Args: + prompt_id: One of the keys in PROMPT_TEMPLATES. + user_description: User's task description or focus area. + context_code: Optional contract source code to include. + + Returns: + The complete prompt string ready to send to the AI model. + + Raises: + ValueError: If prompt_id is not recognized. + """ + if prompt_id not in _BUILDERS: + raise ValueError( + f"Unknown prompt '{prompt_id}'. " + f"Available: {', '.join(_BUILDERS.keys())}" + ) + return _BUILDERS[prompt_id](user_description, context_code)