diff --git a/engine/baml-runtime/src/cli/serve/error.rs b/engine/baml-runtime/src/cli/serve/error.rs index 24b0966027..8a6196a5bd 100644 --- a/engine/baml-runtime/src/cli/serve/error.rs +++ b/engine/baml-runtime/src/cli/serve/error.rs @@ -92,6 +92,9 @@ impl BamlError { message: format!("Unexpected error from BAML: {err:?}"), }, LLMResponse::LLMFailure(failed) => match &failed.code { + crate::internal::llm_client::ErrorCode::FailedToConnect => Self::ClientError { + message: format!("Failed to connect to the LLM provider: {err:?}"), + }, crate::internal::llm_client::ErrorCode::Other(2) => Self::InternalError { message: format!("Something went wrong with the LLM client: {err:?}"), }, diff --git a/engine/baml-runtime/src/internal/llm_client/mod.rs b/engine/baml-runtime/src/internal/llm_client/mod.rs index 942985606a..ebf64e9a1b 100644 --- a/engine/baml-runtime/src/internal/llm_client/mod.rs +++ b/engine/baml-runtime/src/internal/llm_client/mod.rs @@ -211,6 +211,12 @@ pub struct LLMErrorResponse { #[derive(Debug, Clone, Serialize, PartialEq)] pub enum ErrorCode { + // Failed to establish a connection + // Tends to happen when either (1) user enters the wrong connection details + // (e.g. wrong AWS region and therefore URL) or (2) in prod it's been + // chugging along and then the server provider goes hard down. + FailedToConnect, + InvalidAuthentication, // 401 NotSupported, // 403 RateLimited, // 429 @@ -227,6 +233,7 @@ pub enum ErrorCode { impl std::fmt::Display for ErrorCode { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { + ErrorCode::FailedToConnect => f.write_str("Failed while establishing connection"), ErrorCode::InvalidAuthentication => f.write_str("InvalidAuthentication (401)"), ErrorCode::NotSupported => f.write_str("NotSupported (403)"), ErrorCode::RateLimited => f.write_str("RateLimited (429)"), @@ -263,6 +270,10 @@ impl ErrorCode { pub fn to_u16(&self) -> u16 { match self { + // FAILED_TO_CONNECT maps to 2 because the internal callsites usually used ErrorCode::Other(2) + // for connection errors. It's unclear if this actually makes its way out to users in any + // meaningful way. + ErrorCode::FailedToConnect => 2, ErrorCode::InvalidAuthentication => 401, ErrorCode::NotSupported => 403, ErrorCode::RateLimited => 429, diff --git a/engine/baml-runtime/src/internal/llm_client/primitive/aws/aws_client.rs b/engine/baml-runtime/src/internal/llm_client/primitive/aws/aws_client.rs index fa566f97fa..e3bef79bcb 100644 --- a/engine/baml-runtime/src/internal/llm_client/primitive/aws/aws_client.rs +++ b/engine/baml-runtime/src/internal/llm_client/primitive/aws/aws_client.rs @@ -1245,29 +1245,13 @@ impl WithChat for AwsClient { latency: instant_start.elapsed(), message: format!("{e:#?}"), code: match e { - SdkError::ConstructionFailure(_) => ErrorCode::Other(2), + SdkError::ConstructionFailure(_) => ErrorCode::FailedToConnect, SdkError::TimeoutError(_) => ErrorCode::Other(2), - SdkError::DispatchFailure(_) => ErrorCode::Other(2), + SdkError::DispatchFailure(_) => ErrorCode::FailedToConnect, SdkError::ResponseError(e) => { ErrorCode::UnsupportedResponse(e.raw().status().as_u16()) } - SdkError::ServiceError(e) => { - let status = e.raw().status(); - match status.as_u16() { - 400 => ErrorCode::InvalidAuthentication, - 403 => ErrorCode::NotSupported, - 429 => ErrorCode::RateLimited, - 500 => ErrorCode::ServerError, - 503 => ErrorCode::ServiceUnavailable, - _ => { - if status.is_server_error() { - ErrorCode::ServerError - } else { - ErrorCode::Other(status.as_u16()) - } - } - } - } + SdkError::ServiceError(e) => ErrorCode::from_u16(e.raw().status().as_u16()), _ => ErrorCode::Other(2), }, }); diff --git a/engine/baml-runtime/src/internal/llm_client/primitive/openai/openai_client.rs b/engine/baml-runtime/src/internal/llm_client/primitive/openai/openai_client.rs index d8d75143b6..51387a88f1 100644 --- a/engine/baml-runtime/src/internal/llm_client/primitive/openai/openai_client.rs +++ b/engine/baml-runtime/src/internal/llm_client/primitive/openai/openai_client.rs @@ -488,7 +488,12 @@ impl WithStreamChat for OpenAIClient { } macro_rules! make_openai_client { - ($client:ident, $properties:ident, $provider:expr, dynamic) => { + ($client:ident, $properties:ident, $provider:expr, dynamic) => {{ + let resolve_pdf_urls = if $provider == "openai-responses" { + ResolveMediaUrls::Never + } else { + ResolveMediaUrls::Always + }; Ok(Self { name: $client.name.clone(), provider: $provider.into(), @@ -506,7 +511,7 @@ macro_rules! make_openai_client { max_one_system_prompt: false, resolve_audio_urls: ResolveMediaUrls::Always, resolve_image_urls: ResolveMediaUrls::Never, - resolve_pdf_urls: ResolveMediaUrls::Never, + resolve_pdf_urls, resolve_video_urls: ResolveMediaUrls::Never, allowed_metadata: $properties.allowed_metadata.clone(), }, @@ -514,8 +519,13 @@ macro_rules! make_openai_client { retry_policy: $client.retry_policy.clone(), client: create_client()?, }) - }; - ($client:ident, $properties:ident, $provider:expr) => { + }}; + ($client:ident, $properties:ident, $provider:expr) => {{ + let resolve_pdf_urls = if $provider == "openai-responses" { + ResolveMediaUrls::Never + } else { + ResolveMediaUrls::Always + }; Ok(Self { name: $client.name().into(), provider: $provider.into(), @@ -533,7 +543,7 @@ macro_rules! make_openai_client { max_one_system_prompt: false, resolve_audio_urls: ResolveMediaUrls::Always, resolve_image_urls: ResolveMediaUrls::Never, - resolve_pdf_urls: ResolveMediaUrls::Never, + resolve_pdf_urls, resolve_video_urls: ResolveMediaUrls::Never, allowed_metadata: $properties.allowed_metadata.clone(), }, @@ -545,7 +555,7 @@ macro_rules! make_openai_client { .map(|s| s.to_string()), client: create_client()?, }) - }; + }}; } impl OpenAIClient { @@ -714,8 +724,12 @@ impl ToProviderMessage for OpenAIClient { match &media.content { BamlMediaContent::Url(url_content) => { // For URLs, we need to resolve them to base64 first - anyhow::bail!( - "BAML internal error (openai): Pdf URL are not supported by OpenAI use base64." + content.insert( + payload_key.into(), + json!({ + "type": "input_file", + "file_url": url_content.url + }), ); } BamlMediaContent::Base64(b64_media) => { diff --git a/engine/baml-runtime/src/internal/llm_client/primitive/request.rs b/engine/baml-runtime/src/internal/llm_client/primitive/request.rs index 8b8f27cbb6..ba5510b15a 100644 --- a/engine/baml-runtime/src/internal/llm_client/primitive/request.rs +++ b/engine/baml-runtime/src/internal/llm_client/primitive/request.rs @@ -182,7 +182,11 @@ pub(crate) async fn build_and_log_outbound_request( request_options: client.request_options().clone(), latency: instant_now.elapsed(), message: format!("Failed to build request: {e:#?}"), - code: ErrorCode::Other(2), + code: if e.is_connect() { + ErrorCode::FailedToConnect + } else { + ErrorCode::Other(2) + }, })); } }; @@ -253,9 +257,12 @@ pub async fn execute_request( ) } }, - code: e - .status() - .map_or(ErrorCode::Other(2), ErrorCode::from_status), + code: if e.is_connect() { + ErrorCode::FailedToConnect + } else { + e.status() + .map_or(ErrorCode::Other(2), ErrorCode::from_status) + }, })); } }; diff --git a/engine/baml-runtime/src/types/response.rs b/engine/baml-runtime/src/types/response.rs index ebdded866d..c659df8916 100644 --- a/engine/baml-runtime/src/types/response.rs +++ b/engine/baml-runtime/src/types/response.rs @@ -123,9 +123,26 @@ impl FunctionResult { } fn format_err(&self, err: &anyhow::Error) -> ExposedError { + // panic!("format_err {:?}", err); if let Some(exposed_error) = err.downcast_ref::() { return exposed_error.clone(); } + + if let LLMResponse::LLMFailure(err) = self.llm_response() { + if err.code == ErrorCode::FailedToConnect { + let actual_error = err.message.clone(); + return ExposedError::ClientHttpError { + client_name: match self.llm_response() { + LLMResponse::Success(resp) => resp.client.clone(), + LLMResponse::LLMFailure(err) => err.client.clone(), + _ => "unknown".to_string(), + }, + message: actual_error, + status_code: ErrorCode::FailedToConnect, + }; + } + } + // Capture the actual error to preserve its details let actual_error = err.to_string(); // TODO: HACK! Figure out why now connection errors dont get converted into ExposedError. Instead of converting to a validation error, check for connection errors here. We probably are missing a lot of other connection failures that should NOT be validation errors. @@ -137,9 +154,10 @@ impl FunctionResult { _ => "unknown".to_string(), }, message: actual_error, - status_code: ErrorCode::ServiceUnavailable, + status_code: ErrorCode::FailedToConnect, }; } + ExposedError::ValidationError { prompt: match self.llm_response() { LLMResponse::Success(resp) => resp.prompt.to_string(), diff --git a/engine/language_client_python/src/errors.rs b/engine/language_client_python/src/errors.rs index 9c143a5eb2..1fcb109ba0 100644 --- a/engine/language_client_python/src/errors.rs +++ b/engine/language_client_python/src/errors.rs @@ -131,6 +131,7 @@ impl BamlError { )) } baml_runtime::internal::llm_client::ErrorCode::Other(_) + | baml_runtime::internal::llm_client::ErrorCode::FailedToConnect | baml_runtime::internal::llm_client::ErrorCode::InvalidAuthentication | baml_runtime::internal::llm_client::ErrorCode::NotSupported | baml_runtime::internal::llm_client::ErrorCode::RateLimited diff --git a/engine/language_client_typescript/src/errors.rs b/engine/language_client_typescript/src/errors.rs index abe41c13a3..bd8e2f6e96 100644 --- a/engine/language_client_typescript/src/errors.rs +++ b/engine/language_client_typescript/src/errors.rs @@ -57,6 +57,7 @@ pub fn from_anyhow_error(err: anyhow::Error) -> napi::Error { ), ), baml_runtime::internal::llm_client::ErrorCode::Other(_) + | baml_runtime::internal::llm_client::ErrorCode::FailedToConnect | baml_runtime::internal::llm_client::ErrorCode::InvalidAuthentication | baml_runtime::internal::llm_client::ErrorCode::NotSupported | baml_runtime::internal::llm_client::ErrorCode::RateLimited diff --git a/integ-tests/baml_src/test-files/abort-handlers/abort-handlers.baml b/integ-tests/baml_src/test-files/abort-handlers/abort-handlers.baml index e344b86438..32dd23c472 100644 --- a/integ-tests/baml_src/test-files/abort-handlers/abort-handlers.baml +++ b/integ-tests/baml_src/test-files/abort-handlers/abort-handlers.baml @@ -73,8 +73,8 @@ function FnFailRetryExponentialDelay(retries: int, initial_delay_ms: int) -> str function TestAbortFallbackChain(input: string) -> string { client AbortTestFallback prompt #" - This is a test for fallback chain cancellation. - Please fail so we test the fallback behavior. + Tell me a 200-word story about tigers. + Input: {{ input }} "# } diff --git a/integ-tests/go/baml_client/baml_source_map.go b/integ-tests/go/baml_client/baml_source_map.go index 0183705f60..13405e83bb 100644 --- a/integ-tests/go/baml_client/baml_source_map.go +++ b/integ-tests/go/baml_client/baml_source_map.go @@ -27,7 +27,7 @@ var file_map = map[string]string{ "fiddle-examples/symbol-tuning.baml": "enum Category3 {\n Refund @alias(\"k1\")\n @description(\"Customer wants to refund a product\")\n\n CancelOrder @alias(\"k2\")\n @description(\"Customer wants to cancel an order\")\n\n TechnicalSupport @alias(\"k3\")\n @description(\"Customer needs help with a technical issue unrelated to account creation or login\")\n\n AccountIssue @alias(\"k4\")\n @description(\"Specifically relates to account-login or account-creation\")\n\n Question @alias(\"k5\")\n @description(\"Customer has a question\")\n}\n\nfunction ClassifyMessage3(input: string) -> Category {\n client GPT4\n\n prompt #\"\n Classify the following INPUT into ONE\n of the following categories:\n\n INPUT: {{ input }}\n\n {{ ctx.output_format }}\n\n Response:\n \"#\n}", "formatter/test-comments.baml": "class FormatterTest0 {\n lorem string // trailing comments should be preserved\n ipsum string\n}\n\nclass FormatterTest1 {\n lorem string\n ipsum string\n // dolor string\n}\n\nclass FormatterTest2 {\n // \"lorem\" is a latin word\n lorem string\n // \"ipsum\" is a latin word\n ipsum string\n}\n\nclass FormatterTest3 {\n lorem string\n ipsum string\n // Lorem ipsum dolor sit amet\n // Consectetur adipiscing elit\n // Sed do eiusmod tempor incididunt\n // Ut labore et dolore magna aliqua\n // Ut enim ad minim veniam\n}", "generators.baml": "generator lang_python {\n output_type python/pydantic\n output_dir \"../python\"\n version \"0.206.0\"\n}\n\ngenerator lang_python_v1 {\n output_type python/pydantic/v1\n output_dir \"../python-v1\"\n version \"0.206.0\"\n}\n\ngenerator lang_typescript {\n output_type typescript\n output_dir \"../typescript\"\n version \"0.206.0\"\n}\n \n\ngenerator lang_typescript_esm {\n output_type typescript\n output_dir \"../typescript-esm\"\n version \"0.206.0\"\n module_format esm\n}\n\n\ngenerator lang_typescript_react {\n output_type typescript/react\n output_dir \"../react\"\n version \"0.206.0\"\n}\n\ngenerator lang_ruby {\n output_type ruby/sorbet\n output_dir \"../ruby\"\n version \"0.206.0\"\n}\n\ngenerator openapi {\n output_type rest/openapi\n output_dir \"../openapi\"\n version \"0.206.0\"\n on_generate \"rm .gitignore\"\n}\n\ngenerator lang_go {\n output_type go\n output_dir \"../go\"\n version \"0.206.0\"\n client_package_name \"example.com/integ-tests\"\n on_generate \"gofmt -w . && goimports -w . && go mod tidy\"\n}\n", - "test-files/abort-handlers/abort-handlers.baml": "// Test functions for abort handler functionality\n// These functions are designed to fail and retry to test cancellation\n\nretry_policy AbortTestConstantRetry {\n max_retries 5\n strategy {\n type constant_delay\n delay_ms 100\n }\n}\n\nretry_policy AbortTestExponentialRetry {\n max_retries 5\n strategy {\n type exponential_backoff\n initial_delay_ms 100\n multiplier 2\n max_delay_ms 1000\n }\n}\n\nclient AbortTestRetryConstant {\n provider openai\n retry_policy AbortTestConstantRetry\n options {\n model \"gpt-4o-mini\"\n api_key env.OPENAI_API_KEY\n }\n}\n\nclient AbortTestRetryExponential {\n provider openai\n retry_policy AbortTestExponentialRetry\n options {\n model \"gpt-4o-mini\"\n api_key env.OPENAI_API_KEY\n }\n}\n\nclient AbortTestFallback {\n provider fallback\n options {\n strategy [\n AbortTestRetryConstant\n AbortTestRetryExponential\n ]\n }\n}\n\n// Force failure by asking for something impossible\nfunction FnFailRetryConstantDelay(retries: int, delay_ms: int) -> string {\n client AbortTestRetryConstant\n prompt #\"\n This is a test that should always fail.\n Please return an error by throwing an exception.\n DO NOT return any valid response.\n RETRIES: {{ retries }}\n DELAY: {{ delay_ms }}\n \"#\n}\n\nfunction FnFailRetryExponentialDelay(retries: int, initial_delay_ms: int) -> string {\n client AbortTestRetryExponential\n prompt #\"\n This is a test that should always fail.\n Please return an error by throwing an exception.\n DO NOT return any valid response.\n RETRIES: {{ retries }}\n INITIAL_DELAY: {{ initial_delay_ms }}\n \"#\n}\n\nfunction TestAbortFallbackChain(input: string) -> string {\n client AbortTestFallback\n prompt #\"\n This is a test for fallback chain cancellation.\n Please fail so we test the fallback behavior.\n Input: {{ input }}\n \"#\n}\n\n// A simple function that should succeed for testing normal operation\nfunction ExtractName(text: string) -> string {\n client openai/gpt-4o-mini\n prompt #\"\n Extract the person's name from this text: {{ text }}\n Return only the name, nothing else.\n \"#\n}", + "test-files/abort-handlers/abort-handlers.baml": "// Test functions for abort handler functionality\n// These functions are designed to fail and retry to test cancellation\n\nretry_policy AbortTestConstantRetry {\n max_retries 5\n strategy {\n type constant_delay\n delay_ms 100\n }\n}\n\nretry_policy AbortTestExponentialRetry {\n max_retries 5\n strategy {\n type exponential_backoff\n initial_delay_ms 100\n multiplier 2\n max_delay_ms 1000\n }\n}\n\nclient AbortTestRetryConstant {\n provider openai\n retry_policy AbortTestConstantRetry\n options {\n model \"gpt-4o-mini\"\n api_key env.OPENAI_API_KEY\n }\n}\n\nclient AbortTestRetryExponential {\n provider openai\n retry_policy AbortTestExponentialRetry\n options {\n model \"gpt-4o-mini\"\n api_key env.OPENAI_API_KEY\n }\n}\n\nclient AbortTestFallback {\n provider fallback\n options {\n strategy [\n AbortTestRetryConstant\n AbortTestRetryExponential\n ]\n }\n}\n\n// Force failure by asking for something impossible\nfunction FnFailRetryConstantDelay(retries: int, delay_ms: int) -> string {\n client AbortTestRetryConstant\n prompt #\"\n This is a test that should always fail.\n Please return an error by throwing an exception.\n DO NOT return any valid response.\n RETRIES: {{ retries }}\n DELAY: {{ delay_ms }}\n \"#\n}\n\nfunction FnFailRetryExponentialDelay(retries: int, initial_delay_ms: int) -> string {\n client AbortTestRetryExponential\n prompt #\"\n This is a test that should always fail.\n Please return an error by throwing an exception.\n DO NOT return any valid response.\n RETRIES: {{ retries }}\n INITIAL_DELAY: {{ initial_delay_ms }}\n \"#\n}\n\nfunction TestAbortFallbackChain(input: string) -> string {\n client AbortTestFallback\n prompt #\"\n Tell me a 200-word story about tigers.\n\n Input: {{ input }}\n \"#\n}\n\n// A simple function that should succeed for testing normal operation\nfunction ExtractName(text: string) -> string {\n client openai/gpt-4o-mini\n prompt #\"\n Extract the person's name from this text: {{ text }}\n Return only the name, nothing else.\n \"#\n}", "test-files/aliases/aliased-inputs.baml": "\nclass InputClass {\n key string @alias(\"color\")\n key2 string\n}\n\n\nclass InputClassNested {\n key string\n nested InputClass @alias(\"interesting-key\")\n}\n \n\nfunction AliasedInputClass(input: InputClass) -> string {\n client GPT35\n prompt #\"\n\n {{input}}\n\n This is a test. What's the name of the first json key above? Remember, tell me the key, not value.\n \"#\n}\n \nfunction AliasedInputClass2(input: InputClass) -> string {\n client GPT35\n prompt #\"\n\n {# making sure we can still access the original key #}\n {%if input.key == \"tiger\"%}\n Repeat this value back to me, and nothing else: {{input.key}}\n {%endif%}\n \"#\n}\n \n function AliasedInputClassNested(input: InputClassNested) -> string {\n client GPT35\n prompt #\"\n {{ _.role(\"user\")}}\n\n {{input}}\n\n This is a test. What's the name of the second json key above? Remember, tell me the key, not value.\n \"#\n }\n\n\nenum AliasedEnum {\n KEY_ONE @alias(\"tiger\")\n KEY_TWO\n}\n\nfunction AliasedInputEnum(input: AliasedEnum) -> string {\n client GPT4o\n prompt #\"\n {{ _.role(\"user\")}}\n\n\n Write out this word only in your response, in lowercase:\n ---\n {{input}}\n ---\n Answer:\n \"#\n}\n\n\nfunction AliasedInputList(input: AliasedEnum[]) -> string {\n client GPT35\n prompt #\"\n {{ _.role(\"user\")}}\n Given this array:\n ---\n {{input}}\n ---\n\n Return the first element in the array:\n \"#\n}\n\n", "test-files/aliases/classes.baml": "class TestClassAlias {\n key string @alias(\"key-dash\") @description(#\"\n This is a description for key\n af asdf\n \"#)\n key2 string @alias(\"key21\")\n key3 string @alias(\"key with space\")\n key4 string //unaliased\n key5 string @alias(\"key.with.punctuation/123\")\n}\n\nfunction FnTestClassAlias(input: string) -> TestClassAlias {\n client GPT35\n prompt #\"\n {{ctx.output_format}}\n \"#\n}\n\ntest FnTestClassAlias {\n functions [FnTestClassAlias]\n args {\n input \"example input\"\n }\n}\n", "test-files/aliases/enums.baml": "enum TestEnum {\n A @alias(\"k1\") @description(#\"\n User is angry\n \"#)\n B @alias(\"k22\") @description(#\"\n User is happy\n \"#)\n // tests whether k1 doesnt incorrectly get matched with k11\n C @alias(\"k11\") @description(#\"\n User is sad\n \"#)\n D @alias(\"k44\") @description(#\"\n User is confused\n \"#)\n E @description(#\"\n User is excited\n \"#)\n F @alias(\"k5\") // only alias\n \n G @alias(\"k6\") @description(#\"\n User is bored\n With a long description\n \"#)\n \n @@alias(\"Category\")\n}\n\nfunction FnTestAliasedEnumOutput(input: string) -> TestEnum {\n client GPT35\n prompt #\"\n Classify the user input into the following category\n \n {{ ctx.output_format }}\n\n {{ _.role('user') }}\n {{input}}\n\n {{ _.role('assistant') }}\n Category ID:\n \"#\n}\n\ntest FnTestAliasedEnumOutput {\n functions [FnTestAliasedEnumOutput]\n args {\n input \"mehhhhh\"\n }\n}", diff --git a/integ-tests/python-v1/baml_client/inlinedbaml.py b/integ-tests/python-v1/baml_client/inlinedbaml.py index fa483f401e..96a83fad57 100644 --- a/integ-tests/python-v1/baml_client/inlinedbaml.py +++ b/integ-tests/python-v1/baml_client/inlinedbaml.py @@ -24,7 +24,7 @@ "fiddle-examples/symbol-tuning.baml": "enum Category3 {\n Refund @alias(\"k1\")\n @description(\"Customer wants to refund a product\")\n\n CancelOrder @alias(\"k2\")\n @description(\"Customer wants to cancel an order\")\n\n TechnicalSupport @alias(\"k3\")\n @description(\"Customer needs help with a technical issue unrelated to account creation or login\")\n\n AccountIssue @alias(\"k4\")\n @description(\"Specifically relates to account-login or account-creation\")\n\n Question @alias(\"k5\")\n @description(\"Customer has a question\")\n}\n\nfunction ClassifyMessage3(input: string) -> Category {\n client GPT4\n\n prompt #\"\n Classify the following INPUT into ONE\n of the following categories:\n\n INPUT: {{ input }}\n\n {{ ctx.output_format }}\n\n Response:\n \"#\n}", "formatter/test-comments.baml": "class FormatterTest0 {\n lorem string // trailing comments should be preserved\n ipsum string\n}\n\nclass FormatterTest1 {\n lorem string\n ipsum string\n // dolor string\n}\n\nclass FormatterTest2 {\n // \"lorem\" is a latin word\n lorem string\n // \"ipsum\" is a latin word\n ipsum string\n}\n\nclass FormatterTest3 {\n lorem string\n ipsum string\n // Lorem ipsum dolor sit amet\n // Consectetur adipiscing elit\n // Sed do eiusmod tempor incididunt\n // Ut labore et dolore magna aliqua\n // Ut enim ad minim veniam\n}", "generators.baml": "generator lang_python {\n output_type python/pydantic\n output_dir \"../python\"\n version \"0.206.0\"\n}\n\ngenerator lang_python_v1 {\n output_type python/pydantic/v1\n output_dir \"../python-v1\"\n version \"0.206.0\"\n}\n\ngenerator lang_typescript {\n output_type typescript\n output_dir \"../typescript\"\n version \"0.206.0\"\n}\n \n\ngenerator lang_typescript_esm {\n output_type typescript\n output_dir \"../typescript-esm\"\n version \"0.206.0\"\n module_format esm\n}\n\n\ngenerator lang_typescript_react {\n output_type typescript/react\n output_dir \"../react\"\n version \"0.206.0\"\n}\n\ngenerator lang_ruby {\n output_type ruby/sorbet\n output_dir \"../ruby\"\n version \"0.206.0\"\n}\n\ngenerator openapi {\n output_type rest/openapi\n output_dir \"../openapi\"\n version \"0.206.0\"\n on_generate \"rm .gitignore\"\n}\n\ngenerator lang_go {\n output_type go\n output_dir \"../go\"\n version \"0.206.0\"\n client_package_name \"example.com/integ-tests\"\n on_generate \"gofmt -w . && goimports -w . && go mod tidy\"\n}\n", - "test-files/abort-handlers/abort-handlers.baml": "// Test functions for abort handler functionality\n// These functions are designed to fail and retry to test cancellation\n\nretry_policy AbortTestConstantRetry {\n max_retries 5\n strategy {\n type constant_delay\n delay_ms 100\n }\n}\n\nretry_policy AbortTestExponentialRetry {\n max_retries 5\n strategy {\n type exponential_backoff\n initial_delay_ms 100\n multiplier 2\n max_delay_ms 1000\n }\n}\n\nclient AbortTestRetryConstant {\n provider openai\n retry_policy AbortTestConstantRetry\n options {\n model \"gpt-4o-mini\"\n api_key env.OPENAI_API_KEY\n }\n}\n\nclient AbortTestRetryExponential {\n provider openai\n retry_policy AbortTestExponentialRetry\n options {\n model \"gpt-4o-mini\"\n api_key env.OPENAI_API_KEY\n }\n}\n\nclient AbortTestFallback {\n provider fallback\n options {\n strategy [\n AbortTestRetryConstant\n AbortTestRetryExponential\n ]\n }\n}\n\n// Force failure by asking for something impossible\nfunction FnFailRetryConstantDelay(retries: int, delay_ms: int) -> string {\n client AbortTestRetryConstant\n prompt #\"\n This is a test that should always fail.\n Please return an error by throwing an exception.\n DO NOT return any valid response.\n RETRIES: {{ retries }}\n DELAY: {{ delay_ms }}\n \"#\n}\n\nfunction FnFailRetryExponentialDelay(retries: int, initial_delay_ms: int) -> string {\n client AbortTestRetryExponential\n prompt #\"\n This is a test that should always fail.\n Please return an error by throwing an exception.\n DO NOT return any valid response.\n RETRIES: {{ retries }}\n INITIAL_DELAY: {{ initial_delay_ms }}\n \"#\n}\n\nfunction TestAbortFallbackChain(input: string) -> string {\n client AbortTestFallback\n prompt #\"\n This is a test for fallback chain cancellation.\n Please fail so we test the fallback behavior.\n Input: {{ input }}\n \"#\n}\n\n// A simple function that should succeed for testing normal operation\nfunction ExtractName(text: string) -> string {\n client openai/gpt-4o-mini\n prompt #\"\n Extract the person's name from this text: {{ text }}\n Return only the name, nothing else.\n \"#\n}", + "test-files/abort-handlers/abort-handlers.baml": "// Test functions for abort handler functionality\n// These functions are designed to fail and retry to test cancellation\n\nretry_policy AbortTestConstantRetry {\n max_retries 5\n strategy {\n type constant_delay\n delay_ms 100\n }\n}\n\nretry_policy AbortTestExponentialRetry {\n max_retries 5\n strategy {\n type exponential_backoff\n initial_delay_ms 100\n multiplier 2\n max_delay_ms 1000\n }\n}\n\nclient AbortTestRetryConstant {\n provider openai\n retry_policy AbortTestConstantRetry\n options {\n model \"gpt-4o-mini\"\n api_key env.OPENAI_API_KEY\n }\n}\n\nclient AbortTestRetryExponential {\n provider openai\n retry_policy AbortTestExponentialRetry\n options {\n model \"gpt-4o-mini\"\n api_key env.OPENAI_API_KEY\n }\n}\n\nclient AbortTestFallback {\n provider fallback\n options {\n strategy [\n AbortTestRetryConstant\n AbortTestRetryExponential\n ]\n }\n}\n\n// Force failure by asking for something impossible\nfunction FnFailRetryConstantDelay(retries: int, delay_ms: int) -> string {\n client AbortTestRetryConstant\n prompt #\"\n This is a test that should always fail.\n Please return an error by throwing an exception.\n DO NOT return any valid response.\n RETRIES: {{ retries }}\n DELAY: {{ delay_ms }}\n \"#\n}\n\nfunction FnFailRetryExponentialDelay(retries: int, initial_delay_ms: int) -> string {\n client AbortTestRetryExponential\n prompt #\"\n This is a test that should always fail.\n Please return an error by throwing an exception.\n DO NOT return any valid response.\n RETRIES: {{ retries }}\n INITIAL_DELAY: {{ initial_delay_ms }}\n \"#\n}\n\nfunction TestAbortFallbackChain(input: string) -> string {\n client AbortTestFallback\n prompt #\"\n Tell me a 200-word story about tigers.\n\n Input: {{ input }}\n \"#\n}\n\n// A simple function that should succeed for testing normal operation\nfunction ExtractName(text: string) -> string {\n client openai/gpt-4o-mini\n prompt #\"\n Extract the person's name from this text: {{ text }}\n Return only the name, nothing else.\n \"#\n}", "test-files/aliases/aliased-inputs.baml": "\nclass InputClass {\n key string @alias(\"color\")\n key2 string\n}\n\n\nclass InputClassNested {\n key string\n nested InputClass @alias(\"interesting-key\")\n}\n \n\nfunction AliasedInputClass(input: InputClass) -> string {\n client GPT35\n prompt #\"\n\n {{input}}\n\n This is a test. What's the name of the first json key above? Remember, tell me the key, not value.\n \"#\n}\n \nfunction AliasedInputClass2(input: InputClass) -> string {\n client GPT35\n prompt #\"\n\n {# making sure we can still access the original key #}\n {%if input.key == \"tiger\"%}\n Repeat this value back to me, and nothing else: {{input.key}}\n {%endif%}\n \"#\n}\n \n function AliasedInputClassNested(input: InputClassNested) -> string {\n client GPT35\n prompt #\"\n {{ _.role(\"user\")}}\n\n {{input}}\n\n This is a test. What's the name of the second json key above? Remember, tell me the key, not value.\n \"#\n }\n\n\nenum AliasedEnum {\n KEY_ONE @alias(\"tiger\")\n KEY_TWO\n}\n\nfunction AliasedInputEnum(input: AliasedEnum) -> string {\n client GPT4o\n prompt #\"\n {{ _.role(\"user\")}}\n\n\n Write out this word only in your response, in lowercase:\n ---\n {{input}}\n ---\n Answer:\n \"#\n}\n\n\nfunction AliasedInputList(input: AliasedEnum[]) -> string {\n client GPT35\n prompt #\"\n {{ _.role(\"user\")}}\n Given this array:\n ---\n {{input}}\n ---\n\n Return the first element in the array:\n \"#\n}\n\n", "test-files/aliases/classes.baml": "class TestClassAlias {\n key string @alias(\"key-dash\") @description(#\"\n This is a description for key\n af asdf\n \"#)\n key2 string @alias(\"key21\")\n key3 string @alias(\"key with space\")\n key4 string //unaliased\n key5 string @alias(\"key.with.punctuation/123\")\n}\n\nfunction FnTestClassAlias(input: string) -> TestClassAlias {\n client GPT35\n prompt #\"\n {{ctx.output_format}}\n \"#\n}\n\ntest FnTestClassAlias {\n functions [FnTestClassAlias]\n args {\n input \"example input\"\n }\n}\n", "test-files/aliases/enums.baml": "enum TestEnum {\n A @alias(\"k1\") @description(#\"\n User is angry\n \"#)\n B @alias(\"k22\") @description(#\"\n User is happy\n \"#)\n // tests whether k1 doesnt incorrectly get matched with k11\n C @alias(\"k11\") @description(#\"\n User is sad\n \"#)\n D @alias(\"k44\") @description(#\"\n User is confused\n \"#)\n E @description(#\"\n User is excited\n \"#)\n F @alias(\"k5\") // only alias\n \n G @alias(\"k6\") @description(#\"\n User is bored\n With a long description\n \"#)\n \n @@alias(\"Category\")\n}\n\nfunction FnTestAliasedEnumOutput(input: string) -> TestEnum {\n client GPT35\n prompt #\"\n Classify the user input into the following category\n \n {{ ctx.output_format }}\n\n {{ _.role('user') }}\n {{input}}\n\n {{ _.role('assistant') }}\n Category ID:\n \"#\n}\n\ntest FnTestAliasedEnumOutput {\n functions [FnTestAliasedEnumOutput]\n args {\n input \"mehhhhh\"\n }\n}", diff --git a/integ-tests/python/baml_client/inlinedbaml.py b/integ-tests/python/baml_client/inlinedbaml.py index fa483f401e..96a83fad57 100644 --- a/integ-tests/python/baml_client/inlinedbaml.py +++ b/integ-tests/python/baml_client/inlinedbaml.py @@ -24,7 +24,7 @@ "fiddle-examples/symbol-tuning.baml": "enum Category3 {\n Refund @alias(\"k1\")\n @description(\"Customer wants to refund a product\")\n\n CancelOrder @alias(\"k2\")\n @description(\"Customer wants to cancel an order\")\n\n TechnicalSupport @alias(\"k3\")\n @description(\"Customer needs help with a technical issue unrelated to account creation or login\")\n\n AccountIssue @alias(\"k4\")\n @description(\"Specifically relates to account-login or account-creation\")\n\n Question @alias(\"k5\")\n @description(\"Customer has a question\")\n}\n\nfunction ClassifyMessage3(input: string) -> Category {\n client GPT4\n\n prompt #\"\n Classify the following INPUT into ONE\n of the following categories:\n\n INPUT: {{ input }}\n\n {{ ctx.output_format }}\n\n Response:\n \"#\n}", "formatter/test-comments.baml": "class FormatterTest0 {\n lorem string // trailing comments should be preserved\n ipsum string\n}\n\nclass FormatterTest1 {\n lorem string\n ipsum string\n // dolor string\n}\n\nclass FormatterTest2 {\n // \"lorem\" is a latin word\n lorem string\n // \"ipsum\" is a latin word\n ipsum string\n}\n\nclass FormatterTest3 {\n lorem string\n ipsum string\n // Lorem ipsum dolor sit amet\n // Consectetur adipiscing elit\n // Sed do eiusmod tempor incididunt\n // Ut labore et dolore magna aliqua\n // Ut enim ad minim veniam\n}", "generators.baml": "generator lang_python {\n output_type python/pydantic\n output_dir \"../python\"\n version \"0.206.0\"\n}\n\ngenerator lang_python_v1 {\n output_type python/pydantic/v1\n output_dir \"../python-v1\"\n version \"0.206.0\"\n}\n\ngenerator lang_typescript {\n output_type typescript\n output_dir \"../typescript\"\n version \"0.206.0\"\n}\n \n\ngenerator lang_typescript_esm {\n output_type typescript\n output_dir \"../typescript-esm\"\n version \"0.206.0\"\n module_format esm\n}\n\n\ngenerator lang_typescript_react {\n output_type typescript/react\n output_dir \"../react\"\n version \"0.206.0\"\n}\n\ngenerator lang_ruby {\n output_type ruby/sorbet\n output_dir \"../ruby\"\n version \"0.206.0\"\n}\n\ngenerator openapi {\n output_type rest/openapi\n output_dir \"../openapi\"\n version \"0.206.0\"\n on_generate \"rm .gitignore\"\n}\n\ngenerator lang_go {\n output_type go\n output_dir \"../go\"\n version \"0.206.0\"\n client_package_name \"example.com/integ-tests\"\n on_generate \"gofmt -w . && goimports -w . && go mod tidy\"\n}\n", - "test-files/abort-handlers/abort-handlers.baml": "// Test functions for abort handler functionality\n// These functions are designed to fail and retry to test cancellation\n\nretry_policy AbortTestConstantRetry {\n max_retries 5\n strategy {\n type constant_delay\n delay_ms 100\n }\n}\n\nretry_policy AbortTestExponentialRetry {\n max_retries 5\n strategy {\n type exponential_backoff\n initial_delay_ms 100\n multiplier 2\n max_delay_ms 1000\n }\n}\n\nclient AbortTestRetryConstant {\n provider openai\n retry_policy AbortTestConstantRetry\n options {\n model \"gpt-4o-mini\"\n api_key env.OPENAI_API_KEY\n }\n}\n\nclient AbortTestRetryExponential {\n provider openai\n retry_policy AbortTestExponentialRetry\n options {\n model \"gpt-4o-mini\"\n api_key env.OPENAI_API_KEY\n }\n}\n\nclient AbortTestFallback {\n provider fallback\n options {\n strategy [\n AbortTestRetryConstant\n AbortTestRetryExponential\n ]\n }\n}\n\n// Force failure by asking for something impossible\nfunction FnFailRetryConstantDelay(retries: int, delay_ms: int) -> string {\n client AbortTestRetryConstant\n prompt #\"\n This is a test that should always fail.\n Please return an error by throwing an exception.\n DO NOT return any valid response.\n RETRIES: {{ retries }}\n DELAY: {{ delay_ms }}\n \"#\n}\n\nfunction FnFailRetryExponentialDelay(retries: int, initial_delay_ms: int) -> string {\n client AbortTestRetryExponential\n prompt #\"\n This is a test that should always fail.\n Please return an error by throwing an exception.\n DO NOT return any valid response.\n RETRIES: {{ retries }}\n INITIAL_DELAY: {{ initial_delay_ms }}\n \"#\n}\n\nfunction TestAbortFallbackChain(input: string) -> string {\n client AbortTestFallback\n prompt #\"\n This is a test for fallback chain cancellation.\n Please fail so we test the fallback behavior.\n Input: {{ input }}\n \"#\n}\n\n// A simple function that should succeed for testing normal operation\nfunction ExtractName(text: string) -> string {\n client openai/gpt-4o-mini\n prompt #\"\n Extract the person's name from this text: {{ text }}\n Return only the name, nothing else.\n \"#\n}", + "test-files/abort-handlers/abort-handlers.baml": "// Test functions for abort handler functionality\n// These functions are designed to fail and retry to test cancellation\n\nretry_policy AbortTestConstantRetry {\n max_retries 5\n strategy {\n type constant_delay\n delay_ms 100\n }\n}\n\nretry_policy AbortTestExponentialRetry {\n max_retries 5\n strategy {\n type exponential_backoff\n initial_delay_ms 100\n multiplier 2\n max_delay_ms 1000\n }\n}\n\nclient AbortTestRetryConstant {\n provider openai\n retry_policy AbortTestConstantRetry\n options {\n model \"gpt-4o-mini\"\n api_key env.OPENAI_API_KEY\n }\n}\n\nclient AbortTestRetryExponential {\n provider openai\n retry_policy AbortTestExponentialRetry\n options {\n model \"gpt-4o-mini\"\n api_key env.OPENAI_API_KEY\n }\n}\n\nclient AbortTestFallback {\n provider fallback\n options {\n strategy [\n AbortTestRetryConstant\n AbortTestRetryExponential\n ]\n }\n}\n\n// Force failure by asking for something impossible\nfunction FnFailRetryConstantDelay(retries: int, delay_ms: int) -> string {\n client AbortTestRetryConstant\n prompt #\"\n This is a test that should always fail.\n Please return an error by throwing an exception.\n DO NOT return any valid response.\n RETRIES: {{ retries }}\n DELAY: {{ delay_ms }}\n \"#\n}\n\nfunction FnFailRetryExponentialDelay(retries: int, initial_delay_ms: int) -> string {\n client AbortTestRetryExponential\n prompt #\"\n This is a test that should always fail.\n Please return an error by throwing an exception.\n DO NOT return any valid response.\n RETRIES: {{ retries }}\n INITIAL_DELAY: {{ initial_delay_ms }}\n \"#\n}\n\nfunction TestAbortFallbackChain(input: string) -> string {\n client AbortTestFallback\n prompt #\"\n Tell me a 200-word story about tigers.\n\n Input: {{ input }}\n \"#\n}\n\n// A simple function that should succeed for testing normal operation\nfunction ExtractName(text: string) -> string {\n client openai/gpt-4o-mini\n prompt #\"\n Extract the person's name from this text: {{ text }}\n Return only the name, nothing else.\n \"#\n}", "test-files/aliases/aliased-inputs.baml": "\nclass InputClass {\n key string @alias(\"color\")\n key2 string\n}\n\n\nclass InputClassNested {\n key string\n nested InputClass @alias(\"interesting-key\")\n}\n \n\nfunction AliasedInputClass(input: InputClass) -> string {\n client GPT35\n prompt #\"\n\n {{input}}\n\n This is a test. What's the name of the first json key above? Remember, tell me the key, not value.\n \"#\n}\n \nfunction AliasedInputClass2(input: InputClass) -> string {\n client GPT35\n prompt #\"\n\n {# making sure we can still access the original key #}\n {%if input.key == \"tiger\"%}\n Repeat this value back to me, and nothing else: {{input.key}}\n {%endif%}\n \"#\n}\n \n function AliasedInputClassNested(input: InputClassNested) -> string {\n client GPT35\n prompt #\"\n {{ _.role(\"user\")}}\n\n {{input}}\n\n This is a test. What's the name of the second json key above? Remember, tell me the key, not value.\n \"#\n }\n\n\nenum AliasedEnum {\n KEY_ONE @alias(\"tiger\")\n KEY_TWO\n}\n\nfunction AliasedInputEnum(input: AliasedEnum) -> string {\n client GPT4o\n prompt #\"\n {{ _.role(\"user\")}}\n\n\n Write out this word only in your response, in lowercase:\n ---\n {{input}}\n ---\n Answer:\n \"#\n}\n\n\nfunction AliasedInputList(input: AliasedEnum[]) -> string {\n client GPT35\n prompt #\"\n {{ _.role(\"user\")}}\n Given this array:\n ---\n {{input}}\n ---\n\n Return the first element in the array:\n \"#\n}\n\n", "test-files/aliases/classes.baml": "class TestClassAlias {\n key string @alias(\"key-dash\") @description(#\"\n This is a description for key\n af asdf\n \"#)\n key2 string @alias(\"key21\")\n key3 string @alias(\"key with space\")\n key4 string //unaliased\n key5 string @alias(\"key.with.punctuation/123\")\n}\n\nfunction FnTestClassAlias(input: string) -> TestClassAlias {\n client GPT35\n prompt #\"\n {{ctx.output_format}}\n \"#\n}\n\ntest FnTestClassAlias {\n functions [FnTestClassAlias]\n args {\n input \"example input\"\n }\n}\n", "test-files/aliases/enums.baml": "enum TestEnum {\n A @alias(\"k1\") @description(#\"\n User is angry\n \"#)\n B @alias(\"k22\") @description(#\"\n User is happy\n \"#)\n // tests whether k1 doesnt incorrectly get matched with k11\n C @alias(\"k11\") @description(#\"\n User is sad\n \"#)\n D @alias(\"k44\") @description(#\"\n User is confused\n \"#)\n E @description(#\"\n User is excited\n \"#)\n F @alias(\"k5\") // only alias\n \n G @alias(\"k6\") @description(#\"\n User is bored\n With a long description\n \"#)\n \n @@alias(\"Category\")\n}\n\nfunction FnTestAliasedEnumOutput(input: string) -> TestEnum {\n client GPT35\n prompt #\"\n Classify the user input into the following category\n \n {{ ctx.output_format }}\n\n {{ _.role('user') }}\n {{input}}\n\n {{ _.role('assistant') }}\n Category ID:\n \"#\n}\n\ntest FnTestAliasedEnumOutput {\n functions [FnTestAliasedEnumOutput]\n args {\n input \"mehhhhh\"\n }\n}", diff --git a/integ-tests/react/baml_client/inlinedbaml.ts b/integ-tests/react/baml_client/inlinedbaml.ts index 158b31165f..86372fe414 100644 --- a/integ-tests/react/baml_client/inlinedbaml.ts +++ b/integ-tests/react/baml_client/inlinedbaml.ts @@ -32,7 +32,7 @@ const fileMap = { "fiddle-examples/symbol-tuning.baml": "enum Category3 {\n Refund @alias(\"k1\")\n @description(\"Customer wants to refund a product\")\n\n CancelOrder @alias(\"k2\")\n @description(\"Customer wants to cancel an order\")\n\n TechnicalSupport @alias(\"k3\")\n @description(\"Customer needs help with a technical issue unrelated to account creation or login\")\n\n AccountIssue @alias(\"k4\")\n @description(\"Specifically relates to account-login or account-creation\")\n\n Question @alias(\"k5\")\n @description(\"Customer has a question\")\n}\n\nfunction ClassifyMessage3(input: string) -> Category {\n client GPT4\n\n prompt #\"\n Classify the following INPUT into ONE\n of the following categories:\n\n INPUT: {{ input }}\n\n {{ ctx.output_format }}\n\n Response:\n \"#\n}", "formatter/test-comments.baml": "class FormatterTest0 {\n lorem string // trailing comments should be preserved\n ipsum string\n}\n\nclass FormatterTest1 {\n lorem string\n ipsum string\n // dolor string\n}\n\nclass FormatterTest2 {\n // \"lorem\" is a latin word\n lorem string\n // \"ipsum\" is a latin word\n ipsum string\n}\n\nclass FormatterTest3 {\n lorem string\n ipsum string\n // Lorem ipsum dolor sit amet\n // Consectetur adipiscing elit\n // Sed do eiusmod tempor incididunt\n // Ut labore et dolore magna aliqua\n // Ut enim ad minim veniam\n}", "generators.baml": "generator lang_python {\n output_type python/pydantic\n output_dir \"../python\"\n version \"0.206.0\"\n}\n\ngenerator lang_python_v1 {\n output_type python/pydantic/v1\n output_dir \"../python-v1\"\n version \"0.206.0\"\n}\n\ngenerator lang_typescript {\n output_type typescript\n output_dir \"../typescript\"\n version \"0.206.0\"\n}\n \n\ngenerator lang_typescript_esm {\n output_type typescript\n output_dir \"../typescript-esm\"\n version \"0.206.0\"\n module_format esm\n}\n\n\ngenerator lang_typescript_react {\n output_type typescript/react\n output_dir \"../react\"\n version \"0.206.0\"\n}\n\ngenerator lang_ruby {\n output_type ruby/sorbet\n output_dir \"../ruby\"\n version \"0.206.0\"\n}\n\ngenerator openapi {\n output_type rest/openapi\n output_dir \"../openapi\"\n version \"0.206.0\"\n on_generate \"rm .gitignore\"\n}\n\ngenerator lang_go {\n output_type go\n output_dir \"../go\"\n version \"0.206.0\"\n client_package_name \"example.com/integ-tests\"\n on_generate \"gofmt -w . && goimports -w . && go mod tidy\"\n}\n", - "test-files/abort-handlers/abort-handlers.baml": "// Test functions for abort handler functionality\n// These functions are designed to fail and retry to test cancellation\n\nretry_policy AbortTestConstantRetry {\n max_retries 5\n strategy {\n type constant_delay\n delay_ms 100\n }\n}\n\nretry_policy AbortTestExponentialRetry {\n max_retries 5\n strategy {\n type exponential_backoff\n initial_delay_ms 100\n multiplier 2\n max_delay_ms 1000\n }\n}\n\nclient AbortTestRetryConstant {\n provider openai\n retry_policy AbortTestConstantRetry\n options {\n model \"gpt-4o-mini\"\n api_key env.OPENAI_API_KEY\n }\n}\n\nclient AbortTestRetryExponential {\n provider openai\n retry_policy AbortTestExponentialRetry\n options {\n model \"gpt-4o-mini\"\n api_key env.OPENAI_API_KEY\n }\n}\n\nclient AbortTestFallback {\n provider fallback\n options {\n strategy [\n AbortTestRetryConstant\n AbortTestRetryExponential\n ]\n }\n}\n\n// Force failure by asking for something impossible\nfunction FnFailRetryConstantDelay(retries: int, delay_ms: int) -> string {\n client AbortTestRetryConstant\n prompt #\"\n This is a test that should always fail.\n Please return an error by throwing an exception.\n DO NOT return any valid response.\n RETRIES: {{ retries }}\n DELAY: {{ delay_ms }}\n \"#\n}\n\nfunction FnFailRetryExponentialDelay(retries: int, initial_delay_ms: int) -> string {\n client AbortTestRetryExponential\n prompt #\"\n This is a test that should always fail.\n Please return an error by throwing an exception.\n DO NOT return any valid response.\n RETRIES: {{ retries }}\n INITIAL_DELAY: {{ initial_delay_ms }}\n \"#\n}\n\nfunction TestAbortFallbackChain(input: string) -> string {\n client AbortTestFallback\n prompt #\"\n This is a test for fallback chain cancellation.\n Please fail so we test the fallback behavior.\n Input: {{ input }}\n \"#\n}\n\n// A simple function that should succeed for testing normal operation\nfunction ExtractName(text: string) -> string {\n client openai/gpt-4o-mini\n prompt #\"\n Extract the person's name from this text: {{ text }}\n Return only the name, nothing else.\n \"#\n}", + "test-files/abort-handlers/abort-handlers.baml": "// Test functions for abort handler functionality\n// These functions are designed to fail and retry to test cancellation\n\nretry_policy AbortTestConstantRetry {\n max_retries 5\n strategy {\n type constant_delay\n delay_ms 100\n }\n}\n\nretry_policy AbortTestExponentialRetry {\n max_retries 5\n strategy {\n type exponential_backoff\n initial_delay_ms 100\n multiplier 2\n max_delay_ms 1000\n }\n}\n\nclient AbortTestRetryConstant {\n provider openai\n retry_policy AbortTestConstantRetry\n options {\n model \"gpt-4o-mini\"\n api_key env.OPENAI_API_KEY\n }\n}\n\nclient AbortTestRetryExponential {\n provider openai\n retry_policy AbortTestExponentialRetry\n options {\n model \"gpt-4o-mini\"\n api_key env.OPENAI_API_KEY\n }\n}\n\nclient AbortTestFallback {\n provider fallback\n options {\n strategy [\n AbortTestRetryConstant\n AbortTestRetryExponential\n ]\n }\n}\n\n// Force failure by asking for something impossible\nfunction FnFailRetryConstantDelay(retries: int, delay_ms: int) -> string {\n client AbortTestRetryConstant\n prompt #\"\n This is a test that should always fail.\n Please return an error by throwing an exception.\n DO NOT return any valid response.\n RETRIES: {{ retries }}\n DELAY: {{ delay_ms }}\n \"#\n}\n\nfunction FnFailRetryExponentialDelay(retries: int, initial_delay_ms: int) -> string {\n client AbortTestRetryExponential\n prompt #\"\n This is a test that should always fail.\n Please return an error by throwing an exception.\n DO NOT return any valid response.\n RETRIES: {{ retries }}\n INITIAL_DELAY: {{ initial_delay_ms }}\n \"#\n}\n\nfunction TestAbortFallbackChain(input: string) -> string {\n client AbortTestFallback\n prompt #\"\n Tell me a 200-word story about tigers.\n\n Input: {{ input }}\n \"#\n}\n\n// A simple function that should succeed for testing normal operation\nfunction ExtractName(text: string) -> string {\n client openai/gpt-4o-mini\n prompt #\"\n Extract the person's name from this text: {{ text }}\n Return only the name, nothing else.\n \"#\n}", "test-files/aliases/aliased-inputs.baml": "\nclass InputClass {\n key string @alias(\"color\")\n key2 string\n}\n\n\nclass InputClassNested {\n key string\n nested InputClass @alias(\"interesting-key\")\n}\n \n\nfunction AliasedInputClass(input: InputClass) -> string {\n client GPT35\n prompt #\"\n\n {{input}}\n\n This is a test. What's the name of the first json key above? Remember, tell me the key, not value.\n \"#\n}\n \nfunction AliasedInputClass2(input: InputClass) -> string {\n client GPT35\n prompt #\"\n\n {# making sure we can still access the original key #}\n {%if input.key == \"tiger\"%}\n Repeat this value back to me, and nothing else: {{input.key}}\n {%endif%}\n \"#\n}\n \n function AliasedInputClassNested(input: InputClassNested) -> string {\n client GPT35\n prompt #\"\n {{ _.role(\"user\")}}\n\n {{input}}\n\n This is a test. What's the name of the second json key above? Remember, tell me the key, not value.\n \"#\n }\n\n\nenum AliasedEnum {\n KEY_ONE @alias(\"tiger\")\n KEY_TWO\n}\n\nfunction AliasedInputEnum(input: AliasedEnum) -> string {\n client GPT4o\n prompt #\"\n {{ _.role(\"user\")}}\n\n\n Write out this word only in your response, in lowercase:\n ---\n {{input}}\n ---\n Answer:\n \"#\n}\n\n\nfunction AliasedInputList(input: AliasedEnum[]) -> string {\n client GPT35\n prompt #\"\n {{ _.role(\"user\")}}\n Given this array:\n ---\n {{input}}\n ---\n\n Return the first element in the array:\n \"#\n}\n\n", "test-files/aliases/classes.baml": "class TestClassAlias {\n key string @alias(\"key-dash\") @description(#\"\n This is a description for key\n af asdf\n \"#)\n key2 string @alias(\"key21\")\n key3 string @alias(\"key with space\")\n key4 string //unaliased\n key5 string @alias(\"key.with.punctuation/123\")\n}\n\nfunction FnTestClassAlias(input: string) -> TestClassAlias {\n client GPT35\n prompt #\"\n {{ctx.output_format}}\n \"#\n}\n\ntest FnTestClassAlias {\n functions [FnTestClassAlias]\n args {\n input \"example input\"\n }\n}\n", "test-files/aliases/enums.baml": "enum TestEnum {\n A @alias(\"k1\") @description(#\"\n User is angry\n \"#)\n B @alias(\"k22\") @description(#\"\n User is happy\n \"#)\n // tests whether k1 doesnt incorrectly get matched with k11\n C @alias(\"k11\") @description(#\"\n User is sad\n \"#)\n D @alias(\"k44\") @description(#\"\n User is confused\n \"#)\n E @description(#\"\n User is excited\n \"#)\n F @alias(\"k5\") // only alias\n \n G @alias(\"k6\") @description(#\"\n User is bored\n With a long description\n \"#)\n \n @@alias(\"Category\")\n}\n\nfunction FnTestAliasedEnumOutput(input: string) -> TestEnum {\n client GPT35\n prompt #\"\n Classify the user input into the following category\n \n {{ ctx.output_format }}\n\n {{ _.role('user') }}\n {{input}}\n\n {{ _.role('assistant') }}\n Category ID:\n \"#\n}\n\ntest FnTestAliasedEnumOutput {\n functions [FnTestAliasedEnumOutput]\n args {\n input \"mehhhhh\"\n }\n}", diff --git a/integ-tests/typescript-esm/baml_client/inlinedbaml.ts b/integ-tests/typescript-esm/baml_client/inlinedbaml.ts index 158b31165f..86372fe414 100644 --- a/integ-tests/typescript-esm/baml_client/inlinedbaml.ts +++ b/integ-tests/typescript-esm/baml_client/inlinedbaml.ts @@ -32,7 +32,7 @@ const fileMap = { "fiddle-examples/symbol-tuning.baml": "enum Category3 {\n Refund @alias(\"k1\")\n @description(\"Customer wants to refund a product\")\n\n CancelOrder @alias(\"k2\")\n @description(\"Customer wants to cancel an order\")\n\n TechnicalSupport @alias(\"k3\")\n @description(\"Customer needs help with a technical issue unrelated to account creation or login\")\n\n AccountIssue @alias(\"k4\")\n @description(\"Specifically relates to account-login or account-creation\")\n\n Question @alias(\"k5\")\n @description(\"Customer has a question\")\n}\n\nfunction ClassifyMessage3(input: string) -> Category {\n client GPT4\n\n prompt #\"\n Classify the following INPUT into ONE\n of the following categories:\n\n INPUT: {{ input }}\n\n {{ ctx.output_format }}\n\n Response:\n \"#\n}", "formatter/test-comments.baml": "class FormatterTest0 {\n lorem string // trailing comments should be preserved\n ipsum string\n}\n\nclass FormatterTest1 {\n lorem string\n ipsum string\n // dolor string\n}\n\nclass FormatterTest2 {\n // \"lorem\" is a latin word\n lorem string\n // \"ipsum\" is a latin word\n ipsum string\n}\n\nclass FormatterTest3 {\n lorem string\n ipsum string\n // Lorem ipsum dolor sit amet\n // Consectetur adipiscing elit\n // Sed do eiusmod tempor incididunt\n // Ut labore et dolore magna aliqua\n // Ut enim ad minim veniam\n}", "generators.baml": "generator lang_python {\n output_type python/pydantic\n output_dir \"../python\"\n version \"0.206.0\"\n}\n\ngenerator lang_python_v1 {\n output_type python/pydantic/v1\n output_dir \"../python-v1\"\n version \"0.206.0\"\n}\n\ngenerator lang_typescript {\n output_type typescript\n output_dir \"../typescript\"\n version \"0.206.0\"\n}\n \n\ngenerator lang_typescript_esm {\n output_type typescript\n output_dir \"../typescript-esm\"\n version \"0.206.0\"\n module_format esm\n}\n\n\ngenerator lang_typescript_react {\n output_type typescript/react\n output_dir \"../react\"\n version \"0.206.0\"\n}\n\ngenerator lang_ruby {\n output_type ruby/sorbet\n output_dir \"../ruby\"\n version \"0.206.0\"\n}\n\ngenerator openapi {\n output_type rest/openapi\n output_dir \"../openapi\"\n version \"0.206.0\"\n on_generate \"rm .gitignore\"\n}\n\ngenerator lang_go {\n output_type go\n output_dir \"../go\"\n version \"0.206.0\"\n client_package_name \"example.com/integ-tests\"\n on_generate \"gofmt -w . && goimports -w . && go mod tidy\"\n}\n", - "test-files/abort-handlers/abort-handlers.baml": "// Test functions for abort handler functionality\n// These functions are designed to fail and retry to test cancellation\n\nretry_policy AbortTestConstantRetry {\n max_retries 5\n strategy {\n type constant_delay\n delay_ms 100\n }\n}\n\nretry_policy AbortTestExponentialRetry {\n max_retries 5\n strategy {\n type exponential_backoff\n initial_delay_ms 100\n multiplier 2\n max_delay_ms 1000\n }\n}\n\nclient AbortTestRetryConstant {\n provider openai\n retry_policy AbortTestConstantRetry\n options {\n model \"gpt-4o-mini\"\n api_key env.OPENAI_API_KEY\n }\n}\n\nclient AbortTestRetryExponential {\n provider openai\n retry_policy AbortTestExponentialRetry\n options {\n model \"gpt-4o-mini\"\n api_key env.OPENAI_API_KEY\n }\n}\n\nclient AbortTestFallback {\n provider fallback\n options {\n strategy [\n AbortTestRetryConstant\n AbortTestRetryExponential\n ]\n }\n}\n\n// Force failure by asking for something impossible\nfunction FnFailRetryConstantDelay(retries: int, delay_ms: int) -> string {\n client AbortTestRetryConstant\n prompt #\"\n This is a test that should always fail.\n Please return an error by throwing an exception.\n DO NOT return any valid response.\n RETRIES: {{ retries }}\n DELAY: {{ delay_ms }}\n \"#\n}\n\nfunction FnFailRetryExponentialDelay(retries: int, initial_delay_ms: int) -> string {\n client AbortTestRetryExponential\n prompt #\"\n This is a test that should always fail.\n Please return an error by throwing an exception.\n DO NOT return any valid response.\n RETRIES: {{ retries }}\n INITIAL_DELAY: {{ initial_delay_ms }}\n \"#\n}\n\nfunction TestAbortFallbackChain(input: string) -> string {\n client AbortTestFallback\n prompt #\"\n This is a test for fallback chain cancellation.\n Please fail so we test the fallback behavior.\n Input: {{ input }}\n \"#\n}\n\n// A simple function that should succeed for testing normal operation\nfunction ExtractName(text: string) -> string {\n client openai/gpt-4o-mini\n prompt #\"\n Extract the person's name from this text: {{ text }}\n Return only the name, nothing else.\n \"#\n}", + "test-files/abort-handlers/abort-handlers.baml": "// Test functions for abort handler functionality\n// These functions are designed to fail and retry to test cancellation\n\nretry_policy AbortTestConstantRetry {\n max_retries 5\n strategy {\n type constant_delay\n delay_ms 100\n }\n}\n\nretry_policy AbortTestExponentialRetry {\n max_retries 5\n strategy {\n type exponential_backoff\n initial_delay_ms 100\n multiplier 2\n max_delay_ms 1000\n }\n}\n\nclient AbortTestRetryConstant {\n provider openai\n retry_policy AbortTestConstantRetry\n options {\n model \"gpt-4o-mini\"\n api_key env.OPENAI_API_KEY\n }\n}\n\nclient AbortTestRetryExponential {\n provider openai\n retry_policy AbortTestExponentialRetry\n options {\n model \"gpt-4o-mini\"\n api_key env.OPENAI_API_KEY\n }\n}\n\nclient AbortTestFallback {\n provider fallback\n options {\n strategy [\n AbortTestRetryConstant\n AbortTestRetryExponential\n ]\n }\n}\n\n// Force failure by asking for something impossible\nfunction FnFailRetryConstantDelay(retries: int, delay_ms: int) -> string {\n client AbortTestRetryConstant\n prompt #\"\n This is a test that should always fail.\n Please return an error by throwing an exception.\n DO NOT return any valid response.\n RETRIES: {{ retries }}\n DELAY: {{ delay_ms }}\n \"#\n}\n\nfunction FnFailRetryExponentialDelay(retries: int, initial_delay_ms: int) -> string {\n client AbortTestRetryExponential\n prompt #\"\n This is a test that should always fail.\n Please return an error by throwing an exception.\n DO NOT return any valid response.\n RETRIES: {{ retries }}\n INITIAL_DELAY: {{ initial_delay_ms }}\n \"#\n}\n\nfunction TestAbortFallbackChain(input: string) -> string {\n client AbortTestFallback\n prompt #\"\n Tell me a 200-word story about tigers.\n\n Input: {{ input }}\n \"#\n}\n\n// A simple function that should succeed for testing normal operation\nfunction ExtractName(text: string) -> string {\n client openai/gpt-4o-mini\n prompt #\"\n Extract the person's name from this text: {{ text }}\n Return only the name, nothing else.\n \"#\n}", "test-files/aliases/aliased-inputs.baml": "\nclass InputClass {\n key string @alias(\"color\")\n key2 string\n}\n\n\nclass InputClassNested {\n key string\n nested InputClass @alias(\"interesting-key\")\n}\n \n\nfunction AliasedInputClass(input: InputClass) -> string {\n client GPT35\n prompt #\"\n\n {{input}}\n\n This is a test. What's the name of the first json key above? Remember, tell me the key, not value.\n \"#\n}\n \nfunction AliasedInputClass2(input: InputClass) -> string {\n client GPT35\n prompt #\"\n\n {# making sure we can still access the original key #}\n {%if input.key == \"tiger\"%}\n Repeat this value back to me, and nothing else: {{input.key}}\n {%endif%}\n \"#\n}\n \n function AliasedInputClassNested(input: InputClassNested) -> string {\n client GPT35\n prompt #\"\n {{ _.role(\"user\")}}\n\n {{input}}\n\n This is a test. What's the name of the second json key above? Remember, tell me the key, not value.\n \"#\n }\n\n\nenum AliasedEnum {\n KEY_ONE @alias(\"tiger\")\n KEY_TWO\n}\n\nfunction AliasedInputEnum(input: AliasedEnum) -> string {\n client GPT4o\n prompt #\"\n {{ _.role(\"user\")}}\n\n\n Write out this word only in your response, in lowercase:\n ---\n {{input}}\n ---\n Answer:\n \"#\n}\n\n\nfunction AliasedInputList(input: AliasedEnum[]) -> string {\n client GPT35\n prompt #\"\n {{ _.role(\"user\")}}\n Given this array:\n ---\n {{input}}\n ---\n\n Return the first element in the array:\n \"#\n}\n\n", "test-files/aliases/classes.baml": "class TestClassAlias {\n key string @alias(\"key-dash\") @description(#\"\n This is a description for key\n af asdf\n \"#)\n key2 string @alias(\"key21\")\n key3 string @alias(\"key with space\")\n key4 string //unaliased\n key5 string @alias(\"key.with.punctuation/123\")\n}\n\nfunction FnTestClassAlias(input: string) -> TestClassAlias {\n client GPT35\n prompt #\"\n {{ctx.output_format}}\n \"#\n}\n\ntest FnTestClassAlias {\n functions [FnTestClassAlias]\n args {\n input \"example input\"\n }\n}\n", "test-files/aliases/enums.baml": "enum TestEnum {\n A @alias(\"k1\") @description(#\"\n User is angry\n \"#)\n B @alias(\"k22\") @description(#\"\n User is happy\n \"#)\n // tests whether k1 doesnt incorrectly get matched with k11\n C @alias(\"k11\") @description(#\"\n User is sad\n \"#)\n D @alias(\"k44\") @description(#\"\n User is confused\n \"#)\n E @description(#\"\n User is excited\n \"#)\n F @alias(\"k5\") // only alias\n \n G @alias(\"k6\") @description(#\"\n User is bored\n With a long description\n \"#)\n \n @@alias(\"Category\")\n}\n\nfunction FnTestAliasedEnumOutput(input: string) -> TestEnum {\n client GPT35\n prompt #\"\n Classify the user input into the following category\n \n {{ ctx.output_format }}\n\n {{ _.role('user') }}\n {{input}}\n\n {{ _.role('assistant') }}\n Category ID:\n \"#\n}\n\ntest FnTestAliasedEnumOutput {\n functions [FnTestAliasedEnumOutput]\n args {\n input \"mehhhhh\"\n }\n}", diff --git a/integ-tests/typescript/baml_client/inlinedbaml.ts b/integ-tests/typescript/baml_client/inlinedbaml.ts index 158b31165f..86372fe414 100644 --- a/integ-tests/typescript/baml_client/inlinedbaml.ts +++ b/integ-tests/typescript/baml_client/inlinedbaml.ts @@ -32,7 +32,7 @@ const fileMap = { "fiddle-examples/symbol-tuning.baml": "enum Category3 {\n Refund @alias(\"k1\")\n @description(\"Customer wants to refund a product\")\n\n CancelOrder @alias(\"k2\")\n @description(\"Customer wants to cancel an order\")\n\n TechnicalSupport @alias(\"k3\")\n @description(\"Customer needs help with a technical issue unrelated to account creation or login\")\n\n AccountIssue @alias(\"k4\")\n @description(\"Specifically relates to account-login or account-creation\")\n\n Question @alias(\"k5\")\n @description(\"Customer has a question\")\n}\n\nfunction ClassifyMessage3(input: string) -> Category {\n client GPT4\n\n prompt #\"\n Classify the following INPUT into ONE\n of the following categories:\n\n INPUT: {{ input }}\n\n {{ ctx.output_format }}\n\n Response:\n \"#\n}", "formatter/test-comments.baml": "class FormatterTest0 {\n lorem string // trailing comments should be preserved\n ipsum string\n}\n\nclass FormatterTest1 {\n lorem string\n ipsum string\n // dolor string\n}\n\nclass FormatterTest2 {\n // \"lorem\" is a latin word\n lorem string\n // \"ipsum\" is a latin word\n ipsum string\n}\n\nclass FormatterTest3 {\n lorem string\n ipsum string\n // Lorem ipsum dolor sit amet\n // Consectetur adipiscing elit\n // Sed do eiusmod tempor incididunt\n // Ut labore et dolore magna aliqua\n // Ut enim ad minim veniam\n}", "generators.baml": "generator lang_python {\n output_type python/pydantic\n output_dir \"../python\"\n version \"0.206.0\"\n}\n\ngenerator lang_python_v1 {\n output_type python/pydantic/v1\n output_dir \"../python-v1\"\n version \"0.206.0\"\n}\n\ngenerator lang_typescript {\n output_type typescript\n output_dir \"../typescript\"\n version \"0.206.0\"\n}\n \n\ngenerator lang_typescript_esm {\n output_type typescript\n output_dir \"../typescript-esm\"\n version \"0.206.0\"\n module_format esm\n}\n\n\ngenerator lang_typescript_react {\n output_type typescript/react\n output_dir \"../react\"\n version \"0.206.0\"\n}\n\ngenerator lang_ruby {\n output_type ruby/sorbet\n output_dir \"../ruby\"\n version \"0.206.0\"\n}\n\ngenerator openapi {\n output_type rest/openapi\n output_dir \"../openapi\"\n version \"0.206.0\"\n on_generate \"rm .gitignore\"\n}\n\ngenerator lang_go {\n output_type go\n output_dir \"../go\"\n version \"0.206.0\"\n client_package_name \"example.com/integ-tests\"\n on_generate \"gofmt -w . && goimports -w . && go mod tidy\"\n}\n", - "test-files/abort-handlers/abort-handlers.baml": "// Test functions for abort handler functionality\n// These functions are designed to fail and retry to test cancellation\n\nretry_policy AbortTestConstantRetry {\n max_retries 5\n strategy {\n type constant_delay\n delay_ms 100\n }\n}\n\nretry_policy AbortTestExponentialRetry {\n max_retries 5\n strategy {\n type exponential_backoff\n initial_delay_ms 100\n multiplier 2\n max_delay_ms 1000\n }\n}\n\nclient AbortTestRetryConstant {\n provider openai\n retry_policy AbortTestConstantRetry\n options {\n model \"gpt-4o-mini\"\n api_key env.OPENAI_API_KEY\n }\n}\n\nclient AbortTestRetryExponential {\n provider openai\n retry_policy AbortTestExponentialRetry\n options {\n model \"gpt-4o-mini\"\n api_key env.OPENAI_API_KEY\n }\n}\n\nclient AbortTestFallback {\n provider fallback\n options {\n strategy [\n AbortTestRetryConstant\n AbortTestRetryExponential\n ]\n }\n}\n\n// Force failure by asking for something impossible\nfunction FnFailRetryConstantDelay(retries: int, delay_ms: int) -> string {\n client AbortTestRetryConstant\n prompt #\"\n This is a test that should always fail.\n Please return an error by throwing an exception.\n DO NOT return any valid response.\n RETRIES: {{ retries }}\n DELAY: {{ delay_ms }}\n \"#\n}\n\nfunction FnFailRetryExponentialDelay(retries: int, initial_delay_ms: int) -> string {\n client AbortTestRetryExponential\n prompt #\"\n This is a test that should always fail.\n Please return an error by throwing an exception.\n DO NOT return any valid response.\n RETRIES: {{ retries }}\n INITIAL_DELAY: {{ initial_delay_ms }}\n \"#\n}\n\nfunction TestAbortFallbackChain(input: string) -> string {\n client AbortTestFallback\n prompt #\"\n This is a test for fallback chain cancellation.\n Please fail so we test the fallback behavior.\n Input: {{ input }}\n \"#\n}\n\n// A simple function that should succeed for testing normal operation\nfunction ExtractName(text: string) -> string {\n client openai/gpt-4o-mini\n prompt #\"\n Extract the person's name from this text: {{ text }}\n Return only the name, nothing else.\n \"#\n}", + "test-files/abort-handlers/abort-handlers.baml": "// Test functions for abort handler functionality\n// These functions are designed to fail and retry to test cancellation\n\nretry_policy AbortTestConstantRetry {\n max_retries 5\n strategy {\n type constant_delay\n delay_ms 100\n }\n}\n\nretry_policy AbortTestExponentialRetry {\n max_retries 5\n strategy {\n type exponential_backoff\n initial_delay_ms 100\n multiplier 2\n max_delay_ms 1000\n }\n}\n\nclient AbortTestRetryConstant {\n provider openai\n retry_policy AbortTestConstantRetry\n options {\n model \"gpt-4o-mini\"\n api_key env.OPENAI_API_KEY\n }\n}\n\nclient AbortTestRetryExponential {\n provider openai\n retry_policy AbortTestExponentialRetry\n options {\n model \"gpt-4o-mini\"\n api_key env.OPENAI_API_KEY\n }\n}\n\nclient AbortTestFallback {\n provider fallback\n options {\n strategy [\n AbortTestRetryConstant\n AbortTestRetryExponential\n ]\n }\n}\n\n// Force failure by asking for something impossible\nfunction FnFailRetryConstantDelay(retries: int, delay_ms: int) -> string {\n client AbortTestRetryConstant\n prompt #\"\n This is a test that should always fail.\n Please return an error by throwing an exception.\n DO NOT return any valid response.\n RETRIES: {{ retries }}\n DELAY: {{ delay_ms }}\n \"#\n}\n\nfunction FnFailRetryExponentialDelay(retries: int, initial_delay_ms: int) -> string {\n client AbortTestRetryExponential\n prompt #\"\n This is a test that should always fail.\n Please return an error by throwing an exception.\n DO NOT return any valid response.\n RETRIES: {{ retries }}\n INITIAL_DELAY: {{ initial_delay_ms }}\n \"#\n}\n\nfunction TestAbortFallbackChain(input: string) -> string {\n client AbortTestFallback\n prompt #\"\n Tell me a 200-word story about tigers.\n\n Input: {{ input }}\n \"#\n}\n\n// A simple function that should succeed for testing normal operation\nfunction ExtractName(text: string) -> string {\n client openai/gpt-4o-mini\n prompt #\"\n Extract the person's name from this text: {{ text }}\n Return only the name, nothing else.\n \"#\n}", "test-files/aliases/aliased-inputs.baml": "\nclass InputClass {\n key string @alias(\"color\")\n key2 string\n}\n\n\nclass InputClassNested {\n key string\n nested InputClass @alias(\"interesting-key\")\n}\n \n\nfunction AliasedInputClass(input: InputClass) -> string {\n client GPT35\n prompt #\"\n\n {{input}}\n\n This is a test. What's the name of the first json key above? Remember, tell me the key, not value.\n \"#\n}\n \nfunction AliasedInputClass2(input: InputClass) -> string {\n client GPT35\n prompt #\"\n\n {# making sure we can still access the original key #}\n {%if input.key == \"tiger\"%}\n Repeat this value back to me, and nothing else: {{input.key}}\n {%endif%}\n \"#\n}\n \n function AliasedInputClassNested(input: InputClassNested) -> string {\n client GPT35\n prompt #\"\n {{ _.role(\"user\")}}\n\n {{input}}\n\n This is a test. What's the name of the second json key above? Remember, tell me the key, not value.\n \"#\n }\n\n\nenum AliasedEnum {\n KEY_ONE @alias(\"tiger\")\n KEY_TWO\n}\n\nfunction AliasedInputEnum(input: AliasedEnum) -> string {\n client GPT4o\n prompt #\"\n {{ _.role(\"user\")}}\n\n\n Write out this word only in your response, in lowercase:\n ---\n {{input}}\n ---\n Answer:\n \"#\n}\n\n\nfunction AliasedInputList(input: AliasedEnum[]) -> string {\n client GPT35\n prompt #\"\n {{ _.role(\"user\")}}\n Given this array:\n ---\n {{input}}\n ---\n\n Return the first element in the array:\n \"#\n}\n\n", "test-files/aliases/classes.baml": "class TestClassAlias {\n key string @alias(\"key-dash\") @description(#\"\n This is a description for key\n af asdf\n \"#)\n key2 string @alias(\"key21\")\n key3 string @alias(\"key with space\")\n key4 string //unaliased\n key5 string @alias(\"key.with.punctuation/123\")\n}\n\nfunction FnTestClassAlias(input: string) -> TestClassAlias {\n client GPT35\n prompt #\"\n {{ctx.output_format}}\n \"#\n}\n\ntest FnTestClassAlias {\n functions [FnTestClassAlias]\n args {\n input \"example input\"\n }\n}\n", "test-files/aliases/enums.baml": "enum TestEnum {\n A @alias(\"k1\") @description(#\"\n User is angry\n \"#)\n B @alias(\"k22\") @description(#\"\n User is happy\n \"#)\n // tests whether k1 doesnt incorrectly get matched with k11\n C @alias(\"k11\") @description(#\"\n User is sad\n \"#)\n D @alias(\"k44\") @description(#\"\n User is confused\n \"#)\n E @description(#\"\n User is excited\n \"#)\n F @alias(\"k5\") // only alias\n \n G @alias(\"k6\") @description(#\"\n User is bored\n With a long description\n \"#)\n \n @@alias(\"Category\")\n}\n\nfunction FnTestAliasedEnumOutput(input: string) -> TestEnum {\n client GPT35\n prompt #\"\n Classify the user input into the following category\n \n {{ ctx.output_format }}\n\n {{ _.role('user') }}\n {{input}}\n\n {{ _.role('assistant') }}\n Category ID:\n \"#\n}\n\ntest FnTestAliasedEnumOutput {\n functions [FnTestAliasedEnumOutput]\n args {\n input \"mehhhhh\"\n }\n}", diff --git a/integ-tests/typescript/tests/abort-handlers.test.ts b/integ-tests/typescript/tests/abort-handlers.test.ts index 779588f215..d7f0aef840 100644 --- a/integ-tests/typescript/tests/abort-handlers.test.ts +++ b/integ-tests/typescript/tests/abort-handlers.test.ts @@ -30,7 +30,7 @@ describe("Abort Handlers", () => { let aborted = false; try { for await (const value of stream) { - values.push(value); + values.push({ timestamp: Date.now(), value }); } const _ = await stream.getFinalResponse(); } catch (e) { @@ -40,7 +40,9 @@ describe("Abort Handlers", () => { // Should have stopped early due to cancellation expect(aborted).toBe(true); - expect(values.length).toBeLessThan(10); + const latestTimestamp = values[values.length - 1].timestamp; + const firstTimestamp = values[0].timestamp; + expect(latestTimestamp - firstTimestamp).toBeLessThan(1000); }); it("timeout using AbortSignal.timeout", async () => { diff --git a/integ-tests/typescript/tests/prompt_renderer.test.ts b/integ-tests/typescript/tests/prompt_renderer.test.ts index 48329a630e..f9c7c2d1e4 100644 --- a/integ-tests/typescript/tests/prompt_renderer.test.ts +++ b/integ-tests/typescript/tests/prompt_renderer.test.ts @@ -24,19 +24,17 @@ describe("Prompt Renderer Tests", () => { expect(res2).toContain("interesting-key"); }); - // TODO: Enum aliases are not supported it("should use aliases when serializing input objects - enums", async () => { const res = await b.AliasedInputEnum(AliasedEnum.KEY_ONE); - expect(res.toLowerCase()).not.toContain("tiger"); + expect(res.toLowerCase()).toContain("tiger"); }); - // TODO: enum aliases are not supported it("should use aliases when serializing input objects - lists", async () => { const res = await b.AliasedInputList([ AliasedEnum.KEY_ONE, AliasedEnum.KEY_TWO, ]); - expect(res.toLowerCase()).not.toContain("tiger"); + expect(res.toLowerCase()).toContain("tiger"); }); it("maintain field order", async () => { diff --git a/integ-tests/typescript/tests/providers/aws.test.ts b/integ-tests/typescript/tests/providers/aws.test.ts index 383802bf36..b4aed0c0c1 100644 --- a/integ-tests/typescript/tests/providers/aws.test.ts +++ b/integ-tests/typescript/tests/providers/aws.test.ts @@ -12,7 +12,7 @@ describe("AWS Provider", () => { await expect(async () => { await b.TestAwsInvalidRegion("Write a nice short story about Dr. Pepper"); }).rejects.toMatchObject({ - code: "GenericFailure", + name: "BamlClientHttpError", }); }); @@ -279,7 +279,7 @@ describe("AWS Provider", () => { await expect(async () => { await b.TestAws("Dr. Pepper", { clientRegistry: cr }); }).rejects.toMatchObject({ - code: "GenericFailure", + name: "BamlClientHttpError", }); }); @@ -298,7 +298,7 @@ describe("AWS Provider", () => { await b.TestAws("Dr. Pepper", { clientRegistry: cr }); }).rejects.toMatchObject({ name: "BamlClientHttpError", - status_code: 401, + status_code: 400, client_name: "DynamicAWSClient", message: expect.stringContaining( "BamlError: BamlClientError: BamlClientHttpError:", diff --git a/tools/build b/tools/build index 0c841f12ab..84bf19dc36 100755 --- a/tools/build +++ b/tools/build @@ -207,10 +207,10 @@ case "$_path" in command="${command} && uv run baml-cli generate --from ${_repo_root}/integ-tests/baml_src" if [ "$_test_mode" -eq 1 ]; then #command="${command} && BAML_LOG=trace infisical run --env=test -- uv run python baml_example_tracing.py" - # command="${command} && BAML_LOG=debug,baml_runtime=trace infisical run --env=test -- uv run pytest -s tests/test_functions.py::test_streaming" + command="${command} && BAML_LOG=debug,baml_runtime=trace infisical run --env=dev -- uv run pytest -s tests/test_functions.py::test_client_response_type" # command="${command} && BAML_LOG=debug,baml_runtime=trace uv run pytest -s tests/test_functions.py::test_should_work_with_vertex_claude" - # command="${command} && BAML_LOG=info infisical run --env=test -- uv run pytest" - command="${command} && BAML_LOG=info uv run pytest -s tests/test_dynamic_enum_request.py" + # command="${command} && BAML_LOG=info infisical run --env=dev -- uv run pytest" + # command="${command} && BAML_LOG=info uv run pytest -s tests/test_dynamic_enum_request.py" fi if [ "$_watch_mode" -eq 1 ]; then npx nodemon \ @@ -233,7 +233,8 @@ case "$_path" in command="(cd ${_repo_root}/engine/language_client_typescript && pnpm build:debug)" command="${command} && pnpm baml-cli generate --from ${_repo_root}/integ-tests/baml_src" if [ "$_test_mode" -eq 1 ]; then - command="${command} && infisical run -- pnpm integ-tests" + # command="${command} && infisical run --env=dev -- pnpm integ-tests" + command="${command} && infisical run --env=dev -- pnpm integ-tests tests/providers/aws.test.ts" # command="${command} && BAML_LOG=${BAML_LOG} infisical run -- pnpm test tests/integ-tests.test.ts" # command="${command} && BAML_LOG=${BAML_LOG} pnpm test tests/dynamic-enum-request.test.ts" fi @@ -277,7 +278,7 @@ case "$_path" in command="(cd ${_repo_root}/engine/language_client_cffi && cargo make)" if [ "$_test_mode" -eq 1 ]; then - command="${command} && BAML_LIBRARY_PATH=${_repo_root}/engine/target/debug/libbaml_cffi.dylib infisical run --env=test -- go test" + command="${command} && BAML_LIBRARY_PATH=${_repo_root}/engine/target/debug/libbaml_cffi.dylib infisical run --env=test -- go test -timeout 0" fi if [ "$_watch_mode" -eq 1 ]; then