diff --git a/core/http/endpoints/anthropic/messages.go b/core/http/endpoints/anthropic/messages.go index b82ed3f3b6d1..389d60466591 100644 --- a/core/http/endpoints/anthropic/messages.go +++ b/core/http/endpoints/anthropic/messages.go @@ -11,6 +11,7 @@ import ( "github.com/mudler/LocalAI/core/http/middleware" "github.com/mudler/LocalAI/core/schema" "github.com/mudler/LocalAI/core/templates" + "github.com/mudler/LocalAI/pkg/functions" "github.com/mudler/LocalAI/pkg/model" "github.com/mudler/xlog" ) @@ -44,6 +45,9 @@ func MessagesEndpoint(cl *config.ModelConfigLoader, ml *model.ModelLoader, evalu // Convert Anthropic messages to OpenAI format for internal processing openAIMessages := convertAnthropicToOpenAIMessages(input) + // Convert Anthropic tools to internal Functions format + funcs, shouldUseFn := convertAnthropicTools(input, cfg) + // Create an OpenAI-compatible request for internal processing openAIReq := &schema.OpenAIRequest{ PredictionOptions: schema.PredictionOptions{ @@ -79,19 +83,19 @@ func MessagesEndpoint(cl *config.ModelConfigLoader, ml *model.ModelLoader, evalu cfg.StopWords = append(cfg.StopWords, input.StopSequences...) } - // Template the prompt - predInput := evaluator.TemplateMessages(*openAIReq, openAIReq.Messages, cfg, nil, false) + // Template the prompt with tools if available + predInput := evaluator.TemplateMessages(*openAIReq, openAIReq.Messages, cfg, funcs, shouldUseFn) xlog.Debug("Anthropic Messages - Prompt (after templating)", "prompt", predInput) if input.Stream { - return handleAnthropicStream(c, id, input, cfg, ml, predInput) + return handleAnthropicStream(c, id, input, cfg, ml, predInput, openAIReq, funcs, shouldUseFn) } - return handleAnthropicNonStream(c, id, input, cfg, ml, predInput, openAIReq) + return handleAnthropicNonStream(c, id, input, cfg, ml, predInput, openAIReq, funcs, shouldUseFn) } } -func handleAnthropicNonStream(c echo.Context, id string, input *schema.AnthropicRequest, cfg *config.ModelConfig, ml *model.ModelLoader, predInput string, openAIReq *schema.OpenAIRequest) error { +func handleAnthropicNonStream(c echo.Context, id string, input *schema.AnthropicRequest, cfg *config.ModelConfig, ml *model.ModelLoader, predInput string, openAIReq *schema.OpenAIRequest, funcs functions.Functions, shouldUseFn bool) error { images := []string{} for _, m := range openAIReq.Messages { images = append(images, m.StringImages...) @@ -111,7 +115,45 @@ func handleAnthropicNonStream(c echo.Context, id string, input *schema.Anthropic } result := backend.Finetune(*cfg, predInput, prediction.Response) - stopReason := "end_turn" + + // Check if the result contains tool calls + toolCalls := functions.ParseFunctionCall(result, cfg.FunctionsConfig) + + var contentBlocks []schema.AnthropicContentBlock + var stopReason string + + if shouldUseFn && len(toolCalls) > 0 { + // Model wants to use tools + stopReason = "tool_use" + for _, tc := range toolCalls { + // Parse arguments as JSON + var inputArgs map[string]interface{} + if err := json.Unmarshal([]byte(tc.Arguments), &inputArgs); err != nil { + xlog.Warn("Failed to parse tool call arguments as JSON", "error", err, "args", tc.Arguments) + inputArgs = map[string]interface{}{"raw": tc.Arguments} + } + + contentBlocks = append(contentBlocks, schema.AnthropicContentBlock{ + Type: "tool_use", + ID: fmt.Sprintf("toolu_%s_%d", id, len(contentBlocks)), + Name: tc.Name, + Input: inputArgs, + }) + } + + // Add any text content before the tool calls + textContent := functions.ParseTextContent(result, cfg.FunctionsConfig) + if textContent != "" { + // Prepend text block + contentBlocks = append([]schema.AnthropicContentBlock{{Type: "text", Text: textContent}}, contentBlocks...) + } + } else { + // Normal text response + stopReason = "end_turn" + contentBlocks = []schema.AnthropicContentBlock{ + {Type: "text", Text: result}, + } + } resp := &schema.AnthropicResponse{ ID: fmt.Sprintf("msg_%s", id), @@ -119,9 +161,7 @@ func handleAnthropicNonStream(c echo.Context, id string, input *schema.Anthropic Role: "assistant", Model: input.Model, StopReason: &stopReason, - Content: []schema.AnthropicContentBlock{ - {Type: "text", Text: result}, - }, + Content: contentBlocks, Usage: schema.AnthropicUsage{ InputTokens: prediction.Usage.Prompt, OutputTokens: prediction.Usage.Completion, @@ -135,13 +175,13 @@ func handleAnthropicNonStream(c echo.Context, id string, input *schema.Anthropic return c.JSON(200, resp) } -func handleAnthropicStream(c echo.Context, id string, input *schema.AnthropicRequest, cfg *config.ModelConfig, ml *model.ModelLoader, predInput string) error { +func handleAnthropicStream(c echo.Context, id string, input *schema.AnthropicRequest, cfg *config.ModelConfig, ml *model.ModelLoader, predInput string, openAIReq *schema.OpenAIRequest, funcs functions.Functions, shouldUseFn bool) error { c.Response().Header().Set("Content-Type", "text/event-stream") c.Response().Header().Set("Cache-Control", "no-cache") c.Response().Header().Set("Connection", "keep-alive") // Create OpenAI messages for inference - openAIMessages := convertAnthropicToOpenAIMessages(input) + openAIMessages := openAIReq.Messages images := []string{} for _, m := range openAIMessages { @@ -162,25 +202,93 @@ func handleAnthropicStream(c echo.Context, id string, input *schema.AnthropicReq } sendAnthropicSSE(c, messageStart) - // Send content_block_start event + // Track accumulated content for tool call detection + accumulatedContent := "" + currentBlockIndex := 0 + inToolCall := false + toolCallsEmitted := 0 + + // Send initial content_block_start event contentBlockStart := schema.AnthropicStreamEvent{ Type: "content_block_start", - Index: 0, + Index: currentBlockIndex, ContentBlock: &schema.AnthropicContentBlock{Type: "text", Text: ""}, } sendAnthropicSSE(c, contentBlockStart) // Stream content deltas tokenCallback := func(token string, usage backend.TokenUsage) bool { - delta := schema.AnthropicStreamEvent{ - Type: "content_block_delta", - Index: 0, - Delta: &schema.AnthropicStreamDelta{ - Type: "text_delta", - Text: token, - }, + accumulatedContent += token + + // If we're using functions, try to detect tool calls incrementally + if shouldUseFn { + cleanedResult := functions.CleanupLLMResult(accumulatedContent, cfg.FunctionsConfig) + + // Try parsing for tool calls + toolCalls := functions.ParseFunctionCall(cleanedResult, cfg.FunctionsConfig) + + // If we detected new tool calls and haven't emitted them yet + if len(toolCalls) > toolCallsEmitted { + // Stop the current text block if we were in one + if !inToolCall && currentBlockIndex == 0 { + sendAnthropicSSE(c, schema.AnthropicStreamEvent{ + Type: "content_block_stop", + Index: currentBlockIndex, + }) + currentBlockIndex++ + inToolCall = true + } + + // Emit new tool calls + for i := toolCallsEmitted; i < len(toolCalls); i++ { + tc := toolCalls[i] + + // Send content_block_start for tool_use + sendAnthropicSSE(c, schema.AnthropicStreamEvent{ + Type: "content_block_start", + Index: currentBlockIndex, + ContentBlock: &schema.AnthropicContentBlock{ + Type: "tool_use", + ID: fmt.Sprintf("toolu_%s_%d", id, i), + Name: tc.Name, + }, + }) + + // Send input_json_delta with the arguments + sendAnthropicSSE(c, schema.AnthropicStreamEvent{ + Type: "content_block_delta", + Index: currentBlockIndex, + Delta: &schema.AnthropicStreamDelta{ + Type: "input_json_delta", + PartialJSON: tc.Arguments, + }, + }) + + // Send content_block_stop + sendAnthropicSSE(c, schema.AnthropicStreamEvent{ + Type: "content_block_stop", + Index: currentBlockIndex, + }) + + currentBlockIndex++ + } + toolCallsEmitted = len(toolCalls) + return true + } + } + + // Send regular text delta if not in tool call mode + if !inToolCall { + delta := schema.AnthropicStreamEvent{ + Type: "content_block_delta", + Index: 0, + Delta: &schema.AnthropicStreamDelta{ + Type: "text_delta", + Text: token, + }, + } + sendAnthropicSSE(c, delta) } - sendAnthropicSSE(c, delta) return true } @@ -197,15 +305,22 @@ func handleAnthropicStream(c echo.Context, id string, input *schema.AnthropicReq return sendAnthropicError(c, 500, "api_error", fmt.Sprintf("prediction failed: %v", err)) } - // Send content_block_stop event - contentBlockStop := schema.AnthropicStreamEvent{ - Type: "content_block_stop", - Index: 0, + // Send content_block_stop event for last block if we didn't close it yet + if !inToolCall { + contentBlockStop := schema.AnthropicStreamEvent{ + Type: "content_block_stop", + Index: 0, + } + sendAnthropicSSE(c, contentBlockStop) } - sendAnthropicSSE(c, contentBlockStop) - // Send message_delta event with stop_reason + // Determine stop reason stopReason := "end_turn" + if toolCallsEmitted > 0 { + stopReason = "tool_use" + } + + // Send message_delta event with stop_reason messageDelta := schema.AnthropicStreamEvent{ Type: "message_delta", Delta: &schema.AnthropicStreamDelta{ @@ -274,6 +389,8 @@ func convertAnthropicToOpenAIMessages(input *schema.AnthropicRequest) []schema.M // Handle array of content blocks var textContent string var stringImages []string + var toolCalls []schema.ToolCall + toolCallIndex := 0 for _, block := range content { if blockMap, ok := block.(map[string]interface{}); ok { @@ -295,12 +412,79 @@ func convertAnthropicToOpenAIMessages(input *schema.AnthropicRequest) []schema.M } } } + case "tool_use": + // Convert tool_use to ToolCall format + toolID, _ := blockMap["id"].(string) + toolName, _ := blockMap["name"].(string) + toolInput := blockMap["input"] + + // Serialize input to JSON string + inputJSON, err := json.Marshal(toolInput) + if err != nil { + xlog.Warn("Failed to marshal tool input", "error", err) + inputJSON = []byte("{}") + } + + toolCalls = append(toolCalls, schema.ToolCall{ + Index: toolCallIndex, + ID: toolID, + Type: "function", + FunctionCall: schema.FunctionCall{ + Name: toolName, + Arguments: string(inputJSON), + }, + }) + toolCallIndex++ + case "tool_result": + // Convert tool_result to a message with role "tool" + // This is handled by creating a separate message after this block + // For now, we'll add it as text content + toolUseID, _ := blockMap["tool_use_id"].(string) + isError := false + if isErrorPtr, ok := blockMap["is_error"].(*bool); ok && isErrorPtr != nil { + isError = *isErrorPtr + } + + var resultText string + if resultContent, ok := blockMap["content"]; ok { + switch rc := resultContent.(type) { + case string: + resultText = rc + case []interface{}: + // Array of content blocks + for _, cb := range rc { + if cbMap, ok := cb.(map[string]interface{}); ok { + if cbMap["type"] == "text" { + if text, ok := cbMap["text"].(string); ok { + resultText += text + } + } + } + } + } + } + + // Add tool result as a tool role message + // We need to handle this differently - create a new message + if msg.Role == "user" { + // Store tool result info for creating separate message + prefix := "" + if isError { + prefix = "Error: " + } + textContent += fmt.Sprintf("\n[Tool Result for %s]: %s%s", toolUseID, prefix, resultText) + } } } } openAIMsg.StringContent = textContent openAIMsg.Content = textContent openAIMsg.StringImages = stringImages + + // Add tool calls if present + if len(toolCalls) > 0 { + openAIMsg.ToolCalls = toolCalls + } } messages = append(messages, openAIMsg) @@ -308,3 +492,46 @@ func convertAnthropicToOpenAIMessages(input *schema.AnthropicRequest) []schema.M return messages } + +// convertAnthropicTools converts Anthropic tools to internal Functions format +func convertAnthropicTools(input *schema.AnthropicRequest, cfg *config.ModelConfig) (functions.Functions, bool) { + if len(input.Tools) == 0 { + return nil, false + } + + var funcs functions.Functions + for _, tool := range input.Tools { + f := functions.Function{ + Name: tool.Name, + Description: tool.Description, + Parameters: tool.InputSchema, + } + funcs = append(funcs, f) + } + + // Handle tool_choice + if input.ToolChoice != nil { + switch tc := input.ToolChoice.(type) { + case string: + // "auto", "any", or "none" + if tc == "any" { + // Force the model to use one of the tools + cfg.SetFunctionCallString("required") + } else if tc == "none" { + // Don't use tools + return nil, false + } + // "auto" is the default - let model decide + case map[string]interface{}: + // Specific tool selection: {"type": "tool", "name": "tool_name"} + if tcType, ok := tc["type"].(string); ok && tcType == "tool" { + if name, ok := tc["name"].(string); ok { + // Force specific tool + cfg.SetFunctionCallString(name) + } + } + } + } + + return funcs, len(funcs) > 0 && cfg.ShouldUseFunctions() +} diff --git a/core/schema/anthropic.go b/core/schema/anthropic.go index 02398faa4af9..d6c17ba79e4d 100644 --- a/core/schema/anthropic.go +++ b/core/schema/anthropic.go @@ -18,6 +18,8 @@ type AnthropicRequest struct { Temperature *float64 `json:"temperature,omitempty"` TopK *int `json:"top_k,omitempty"` TopP *float64 `json:"top_p,omitempty"` + Tools []AnthropicTool `json:"tools,omitempty"` + ToolChoice interface{} `json:"tool_choice,omitempty"` // Internal fields for request handling Context context.Context `json:"-"` @@ -32,6 +34,13 @@ func (ar *AnthropicRequest) ModelName(s *string) string { return ar.Model } +// AnthropicTool represents a tool definition in the Anthropic format +type AnthropicTool struct { + Name string `json:"name"` + Description string `json:"description,omitempty"` + InputSchema map[string]interface{} `json:"input_schema"` +} + // AnthropicMessage represents a message in the Anthropic format type AnthropicMessage struct { Role string `json:"role"` @@ -40,12 +49,15 @@ type AnthropicMessage struct { // AnthropicContentBlock represents a content block in an Anthropic message type AnthropicContentBlock struct { - Type string `json:"type"` - Text string `json:"text,omitempty"` - Source *AnthropicImageSource `json:"source,omitempty"` - ID string `json:"id,omitempty"` - Name string `json:"name,omitempty"` - Input map[string]interface{} `json:"input,omitempty"` + Type string `json:"type"` + Text string `json:"text,omitempty"` + Source *AnthropicImageSource `json:"source,omitempty"` + ID string `json:"id,omitempty"` + Name string `json:"name,omitempty"` + Input map[string]interface{} `json:"input,omitempty"` + ToolUseID string `json:"tool_use_id,omitempty"` + Content interface{} `json:"content,omitempty"` + IsError *bool `json:"is_error,omitempty"` } // AnthropicImageSource represents an image source in Anthropic format @@ -87,6 +99,7 @@ type AnthropicStreamEvent struct { type AnthropicStreamDelta struct { Type string `json:"type,omitempty"` Text string `json:"text,omitempty"` + PartialJSON string `json:"partial_json,omitempty"` StopReason *string `json:"stop_reason,omitempty"` StopSequence *string `json:"stop_sequence,omitempty"` } diff --git a/core/schema/anthropic_test.go b/core/schema/anthropic_test.go index f4dbde16df77..56f7bc5ddb65 100644 --- a/core/schema/anthropic_test.go +++ b/core/schema/anthropic_test.go @@ -31,6 +31,37 @@ var _ = Describe("Anthropic Schema", func() { Expect(*req.Temperature).To(Equal(0.7)) }) + It("should unmarshal a request with tools", func() { + jsonData := `{ + "model": "claude-3-sonnet-20240229", + "max_tokens": 1024, + "messages": [ + {"role": "user", "content": "What's the weather?"} + ], + "tools": [ + { + "name": "get_weather", + "description": "Get the current weather", + "input_schema": { + "type": "object", + "properties": { + "location": {"type": "string"} + } + } + } + ], + "tool_choice": {"type": "tool", "name": "get_weather"} + }` + + var req schema.AnthropicRequest + err := json.Unmarshal([]byte(jsonData), &req) + Expect(err).ToNot(HaveOccurred()) + Expect(len(req.Tools)).To(Equal(1)) + Expect(req.Tools[0].Name).To(Equal("get_weather")) + Expect(req.Tools[0].Description).To(Equal("Get the current weather")) + Expect(req.ToolChoice).ToNot(BeNil()) + }) + It("should implement LocalAIRequest interface", func() { req := &schema.AnthropicRequest{Model: "test-model"} Expect(req.ModelName(nil)).To(Equal("test-model")) @@ -117,6 +148,46 @@ var _ = Describe("Anthropic Schema", func() { Expect(result["role"]).To(Equal("assistant")) Expect(result["stop_reason"]).To(Equal("end_turn")) }) + + It("should marshal a response with tool use", func() { + stopReason := "tool_use" + resp := schema.AnthropicResponse{ + ID: "msg_123", + Type: "message", + Role: "assistant", + Model: "claude-3-sonnet-20240229", + StopReason: &stopReason, + Content: []schema.AnthropicContentBlock{ + { + Type: "tool_use", + ID: "toolu_123", + Name: "get_weather", + Input: map[string]interface{}{ + "location": "San Francisco", + }, + }, + }, + Usage: schema.AnthropicUsage{ + InputTokens: 10, + OutputTokens: 5, + }, + } + + data, err := json.Marshal(resp) + Expect(err).ToNot(HaveOccurred()) + + var result map[string]interface{} + err = json.Unmarshal(data, &result) + Expect(err).ToNot(HaveOccurred()) + + Expect(result["stop_reason"]).To(Equal("tool_use")) + content := result["content"].([]interface{}) + Expect(len(content)).To(Equal(1)) + toolUse := content[0].(map[string]interface{}) + Expect(toolUse["type"]).To(Equal("tool_use")) + Expect(toolUse["id"]).To(Equal("toolu_123")) + Expect(toolUse["name"]).To(Equal("get_weather")) + }) }) Describe("AnthropicErrorResponse", func() { diff --git a/tests/e2e/e2e_anthropic_test.go b/tests/e2e/e2e_anthropic_test.go index 8980ec355784..c4646cf14661 100644 --- a/tests/e2e/e2e_anthropic_test.go +++ b/tests/e2e/e2e_anthropic_test.go @@ -146,5 +146,230 @@ var _ = Describe("Anthropic API E2E test", func() { Expect(message.Content).ToNot(BeEmpty()) }) }) + + Context("Tool calling", func() { + It("handles tool calls in non-streaming mode", func() { + message, err := client.Messages.New(context.TODO(), anthropic.MessageNewParams{ + Model: "gpt-4", + MaxTokens: 1024, + Messages: []anthropic.MessageParam{ + anthropic.NewUserMessage(anthropic.NewTextBlock("What's the weather like in San Francisco?")), + }, + Tools: []anthropic.ToolParam{ + { + Name: "get_weather", + Description: anthropic.F("Get the current weather in a given location"), + InputSchema: anthropic.F(map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "location": map[string]interface{}{ + "type": "string", + "description": "The city and state, e.g. San Francisco, CA", + }, + }, + "required": []string{"location"}, + }), + }, + }, + }) + + Expect(err).ToNot(HaveOccurred()) + Expect(message.Content).ToNot(BeEmpty()) + + // The model must use tools - find the tool use in the response + hasToolUse := false + for _, block := range message.Content { + if block.Type == anthropic.ContentBlockTypeToolUse { + hasToolUse = true + Expect(block.Name).To(Equal("get_weather")) + Expect(block.ID).ToNot(BeEmpty()) + // Verify that input contains location + inputMap, ok := block.Input.(map[string]interface{}) + Expect(ok).To(BeTrue()) + _, hasLocation := inputMap["location"] + Expect(hasLocation).To(BeTrue()) + } + } + + // Model must have called the tool + Expect(hasToolUse).To(BeTrue(), "Model should have called the get_weather tool") + Expect(message.StopReason).To(Equal(anthropic.MessageStopReasonToolUse)) + }) + + It("handles tool_choice parameter", func() { + message, err := client.Messages.New(context.TODO(), anthropic.MessageNewParams{ + Model: "gpt-4", + MaxTokens: 1024, + Messages: []anthropic.MessageParam{ + anthropic.NewUserMessage(anthropic.NewTextBlock("Tell me about the weather")), + }, + Tools: []anthropic.ToolParam{ + { + Name: "get_weather", + Description: anthropic.F("Get the current weather"), + InputSchema: anthropic.F(map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "location": map[string]interface{}{ + "type": "string", + }, + }, + }), + }, + }, + ToolChoice: anthropic.F[anthropic.ToolChoiceUnionParam]( + anthropic.ToolChoiceAutoParam{ + Type: anthropic.F(anthropic.ToolChoiceAutoTypeAuto), + }, + ), + }) + + Expect(err).ToNot(HaveOccurred()) + Expect(message.Content).ToNot(BeEmpty()) + }) + + It("handles tool results in messages", func() { + // First, make a request that should trigger a tool call + firstMessage, err := client.Messages.New(context.TODO(), anthropic.MessageNewParams{ + Model: "gpt-4", + MaxTokens: 1024, + Messages: []anthropic.MessageParam{ + anthropic.NewUserMessage(anthropic.NewTextBlock("What's the weather in SF?")), + }, + Tools: []anthropic.ToolParam{ + { + Name: "get_weather", + Description: anthropic.F("Get weather"), + InputSchema: anthropic.F(map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "location": map[string]interface{}{"type": "string"}, + }, + }), + }, + }, + }) + + Expect(err).ToNot(HaveOccurred()) + + // Find the tool use block - model must call the tool + var toolUseID string + var toolUseName string + for _, block := range firstMessage.Content { + if block.Type == anthropic.ContentBlockTypeToolUse { + toolUseID = block.ID + toolUseName = block.Name + break + } + } + + // Model must have called the tool + Expect(toolUseID).ToNot(BeEmpty(), "Model should have called the get_weather tool") + + // Send back a tool result and verify it's handled correctly + secondMessage, err := client.Messages.New(context.TODO(), anthropic.MessageNewParams{ + Model: "gpt-4", + MaxTokens: 1024, + Messages: []anthropic.MessageParam{ + anthropic.NewUserMessage(anthropic.NewTextBlock("What's the weather in SF?")), + anthropic.NewAssistantMessage(firstMessage.Content...), + anthropic.NewUserMessage( + anthropic.NewToolResultBlock(toolUseID, "Sunny, 72°F", false), + ), + }, + Tools: []anthropic.ToolParam{ + { + Name: toolUseName, + Description: anthropic.F("Get weather"), + InputSchema: anthropic.F(map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "location": map[string]interface{}{"type": "string"}, + }, + }), + }, + }, + }) + + Expect(err).ToNot(HaveOccurred()) + Expect(secondMessage.Content).ToNot(BeEmpty()) + }) + + It("handles tool calls in streaming mode", func() { + stream := client.Messages.NewStreaming(context.TODO(), anthropic.MessageNewParams{ + Model: "gpt-4", + MaxTokens: 1024, + Messages: []anthropic.MessageParam{ + anthropic.NewUserMessage(anthropic.NewTextBlock("What's the weather like in San Francisco?")), + }, + Tools: []anthropic.ToolParam{ + { + Name: "get_weather", + Description: anthropic.F("Get the current weather in a given location"), + InputSchema: anthropic.F(map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "location": map[string]interface{}{ + "type": "string", + "description": "The city and state, e.g. San Francisco, CA", + }, + }, + "required": []string{"location"}, + }), + }, + }, + }) + + message := anthropic.Message{} + eventCount := 0 + hasToolUseBlock := false + hasContentBlockStart := false + hasContentBlockDelta := false + hasContentBlockStop := false + + for stream.Next() { + event := stream.Current() + err := message.Accumulate(event) + Expect(err).ToNot(HaveOccurred()) + eventCount++ + + // Check for different event types related to tool use + switch e := event.AsAny().(type) { + case anthropic.ContentBlockStartEvent: + hasContentBlockStart = true + if e.ContentBlock.Type == anthropic.ContentBlockTypeToolUse { + hasToolUseBlock = true + } + case anthropic.ContentBlockDeltaEvent: + hasContentBlockDelta = true + case anthropic.ContentBlockStopEvent: + hasContentBlockStop = true + } + } + + Expect(stream.Err()).ToNot(HaveOccurred()) + Expect(eventCount).To(BeNumerically(">", 0)) + + // Verify streaming events were emitted + Expect(hasContentBlockStart).To(BeTrue(), "Should have content_block_start event") + Expect(hasContentBlockDelta).To(BeTrue(), "Should have content_block_delta event") + Expect(hasContentBlockStop).To(BeTrue(), "Should have content_block_stop event") + + // Check accumulated message has tool use + Expect(message.Content).ToNot(BeEmpty()) + + // Model must have called the tool + foundToolUse := false + for _, block := range message.Content { + if block.Type == anthropic.ContentBlockTypeToolUse { + foundToolUse = true + Expect(block.Name).To(Equal("get_weather")) + Expect(block.ID).ToNot(BeEmpty()) + } + } + Expect(foundToolUse).To(BeTrue(), "Model should have called the get_weather tool in streaming mode") + Expect(message.StopReason).To(Equal(anthropic.MessageStopReasonToolUse)) + }) + }) }) })