Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 17 additions & 0 deletions integ-tests/baml_src/clients.baml
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,23 @@ client<llm> GPT35LegacyProvider {
}
}

client<llm> OpenAIConcurrencyTestClientEnvBaseUrl {
provider openai-generic
options {
base_url env.OPENAI_CONCURRENCY_TEST_BASE_URL
model "concurrency-test"
api_key env.OPENAI_API_KEY
}
}

client<llm> OpenAIConcurrencyTestClientHardocodedBaseUrl {
provider openai-generic
options {
base_url "http://127.0.0.1:9876/v1/"
model "concurrency-test"
api_key env.OPENAI_API_KEY
}
}

client<llm> Ollama {
provider ollama
Expand Down
20 changes: 20 additions & 0 deletions integ-tests/baml_src/test-files/providers/openai.baml
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,26 @@ function TestOpenAI(input: string) -> string {
"#
}

function TestOpenAIConcurrencyClientEnvBaseUrl(input: string) -> string {
client OpenAIConcurrencyTestClientEnvBaseUrl
prompt #"
{{ _.role("user") }}
Write a nice haiku, given the user input. Make sure to reference the input in the haiku. Make it 50 paragraphs

Input: {{ input }}
"#
}

function TestOpenAIConcurrencyClientHardocodedBaseUrl(input: string) -> string {
client OpenAIConcurrencyTestClientHardocodedBaseUrl
prompt #"
{{ _.role("user") }}
Write a nice haiku, given the user input. Make sure to reference the input in the haiku. Make it 50 paragraphs

Input: {{ input }}
"#
}

// Test O1 model without max_tokens (should not add default)
function TestOpenAIO1NoMaxTokens(input: string) -> string {
client OpenAIO1
Expand Down
166 changes: 166 additions & 0 deletions integ-tests/common/concurrent_server.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,166 @@
// Used to test connection pool concurrency

const http = require("http");
const { URL } = require("url");

// Get host and port.
const HOST = getArg("--host") || process.env.HOST || "127.0.0.1";
const PORT = Number(getArg("--port") || process.env.PORT || 8001);

// Latency in milliseconds.
const LATENCY = Number(getArg("--latency") || process.env.LATENCY || 50);

// Get CLI args.
function getArg(flag) {
const i = process.argv.indexOf(flag);
return i !== -1 ? process.argv[i + 1] : undefined;
}

// Sleep millis.
function sleep(ms) {
return new Promise((res) => setTimeout(res, ms));
}

// Respond with JSON.
function json(res, status, bodyObj) {
const body = JSON.stringify(bodyObj);
res.writeHead(status, {
"Content-Type": "application/json",
"Content-Length": Buffer.byteLength(body),
"Cache-Control": "no-store",
"Connection": "keep-alive",
// CORS (harmless if you curl)
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Headers": "Content-Type, Authorization",
});
res.end(body);
}

async function handleRequest(req, res) {
const url = new URL(req.url, `http://${req.headers.host}`);

// Health
if (req.method === "GET" && url.pathname === "/health") {
return json(res, 200, { ok: true });
}

// OpenAI generic.
if (req.method === "POST" && url.pathname === "/v1/chat/completions") {
let body = "";

req.on("data", chunk => body += chunk);

req.on("end", async () => {
// We don't actually need the request payload for this test.
// But parse if present to avoid client errors.
try {
if (body && body.length) {
JSON.parse(body);
}
} catch {
return json(res, 400, { error: { message: "Invalid JSON" } });
}

// Simulate latency for concurrency testing
await sleep(LATENCY);

const now = Math.floor(Date.now() / 1000);

return json(res, 200, {
id: `cmpl-${now}-${Math.random().toString(36).slice(2, 8)}`,
object: "chat.completion",
created: now,
model: "concurrency-test",
choices: [
{
index: 0,
message: { role: "assistant", content: "OpenAI" },
finish_reason: "stop",
},
],
usage: { prompt_tokens: 0, completion_tokens: 1, total_tokens: 1 },
});
});

return;
}

// Anthropic.
if (req.method === "POST" && url.pathname === "/v1/messages") {
let body = "";

req.on("data", chunk => body += chunk);

req.on("end", async () => {
// We don't actually need the request payload for this test.
// But parse if present to avoid client errors.
try {
if (body && body.length) {
JSON.parse(body);
}
} catch {
return json(res, 400, { error: { message: "Invalid JSON" } });
}

// Simulate latency for concurrency testing
await sleep(LATENCY);

const now = Math.floor(Date.now() / 1000);

return json(res, 200, {
id: `msg_${Math.random().toString(36).slice(2, 10)}`,
type: "message",
role: "assistant",
model: "concurrency-test",
content: [
{ type: "text", text: "Anthropic" }
],
stop_reason: "end_turn",
stop_sequence: null,
usage: { input_tokens: 0, output_tokens: 1 },
created_at: now,
});
});

return;
}

// Not found
json(res, 404, { error: { message: "Not found" } });
}

const server = http.createServer(async (req, res) => {
console.log(`${req.method} ${req.url}`);

Check warning

Code scanning / CodeQL

Log injection Medium

Log entry depends on a
user-provided value
.

Copilot Autofix

AI 14 days ago

To prevent log injection, any user-controlled values included in the log string (such as req.url and potentially req.method) should have line breaks (\r, \n) stripped or replaced. The best and simplest mitigation is to process each such value with String.prototype.replace(/\r|\n/g, "") before logging.

  • In this file, on line 133, update the log entry to process both req.method and req.url through a sanitizing function that removes/replaces newlines and carriage returns.
  • If you want to make the fix most robust (and ensure code clarity/reuse), you can define a simple helper function (e.g., sanitizeForLog(str)) that takes a string and strips newlines, then use it for both req.method and req.url in your log statement.

Apply these changes only to this file/snippet.


Suggested changeset 1
integ-tests/common/concurrent_server.js

Autofix patch

Autofix patch
Run the following command in your local git repository to apply this patch
cat << 'EOF' | git apply
diff --git a/integ-tests/common/concurrent_server.js b/integ-tests/common/concurrent_server.js
--- a/integ-tests/common/concurrent_server.js
+++ b/integ-tests/common/concurrent_server.js
@@ -130,7 +130,11 @@
 }
 
 const server = http.createServer(async (req, res) => {
-    console.log(`${req.method} ${req.url}`);
+    // Remove newlines to prevent log injection from user-controlled values
+    function sanitizeForLog(str) {
+        return String(str).replace(/[\r\n]/g, "");
+    }
+    console.log(`${sanitizeForLog(req.method)} ${sanitizeForLog(req.url)}`);
 
     try {
         await handleRequest(req, res);
EOF
@@ -130,7 +130,11 @@
}

const server = http.createServer(async (req, res) => {
console.log(`${req.method} ${req.url}`);
// Remove newlines to prevent log injection from user-controlled values
function sanitizeForLog(str) {
return String(str).replace(/[\r\n]/g, "");
}
console.log(`${sanitizeForLog(req.method)} ${sanitizeForLog(req.url)}`);

try {
await handleRequest(req, res);
Copilot is powered by AI and may make mistakes. Always verify output.

try {
await handleRequest(req, res);
} catch (e) {
json(res, 500, { error: { message: e?.message || "Internal error" } });
}
});

server.listen({ host: HOST, port: PORT, reuseAddress: true }, () => {
process.stdout.write(`Concurrency test server listening on http://${HOST}:${PORT}\n`);
});

const sockets = new Set();

server.on("connection", (socket) => {
sockets.add(socket);
socket.on("close", () => sockets.delete(socket));
});


function shutdown() {
server.close(() => process.exit(0));
for (const s of sockets) {
try {
s.destroy();
} catch {
// Ignore errors
}
}
}

process.on("SIGINT", shutdown);
process.on("SIGTERM", shutdown);
4 changes: 2 additions & 2 deletions integ-tests/go/baml_client/baml_source_map.go

Large diffs are not rendered by default.

132 changes: 132 additions & 0 deletions integ-tests/go/baml_client/functions.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading
Loading