diff --git a/bun.lock b/bun.lock
index 462734b..1e6ee3b 100644
--- a/bun.lock
+++ b/bun.lock
@@ -1,5 +1,6 @@
{
"lockfileVersion": 1,
+ "configVersion": 0,
"workspaces": {
"": {
"dependencies": {
@@ -269,7 +270,7 @@
"@types/body-parser": ["@types/body-parser@1.19.5", "", { "dependencies": { "@types/connect": "*", "@types/node": "*" } }, "sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg=="],
- "@types/bun": ["@types/bun@1.2.18", "", { "dependencies": { "bun-types": "1.2.18" } }, "sha512-Xf6RaWVheyemaThV0kUfaAUvCNokFr+bH8Jxp+tTZfx7dAPA8z9ePnP9S9+Vspzuxxx9JRAXhnyccRj3GyCMdQ=="],
+ "@types/bun": ["@types/bun@1.3.12", "", { "dependencies": { "bun-types": "1.3.12" } }, "sha512-DBv81elK+/VSwXHDlnH3Qduw+KxkTIWi7TXkAeh24zpi5l0B2kUg9Ga3tb4nJaPcOFswflgi/yAvMVBPrxMB+A=="],
"@types/codemirror": ["@types/codemirror@5.60.8", "", { "dependencies": { "@types/tern": "*" } }, "sha512-VjFgDF/eB+Aklcy15TtOTLQeMjTo07k7KAjql8OK5Dirr7a6sJY4T1uVBDuTVG9VEmn1uUsohOpYnVfgC6/jyw=="],
@@ -299,8 +300,6 @@
"@types/range-parser": ["@types/range-parser@1.2.7", "", {}, "sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ=="],
- "@types/react": ["@types/react@19.1.8", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-AwAfQ2Wa5bCx9WP8nZL2uMZWod7J7/JSplxbTmBQ5ms6QpqNYm672H0Vu9ZVKVngQ+ii4R/byguVEUZQyeg44g=="],
-
"@types/response-time": ["@types/response-time@2.3.8", "", { "dependencies": { "@types/express": "*", "@types/node": "*" } }, "sha512-7qGaNYvdxc0zRab8oHpYx7AW17qj+G0xuag1eCrw3M2VWPJQ/HyKaaghWygiaOUl0y9x7QGQwppDpqLJ5V9pzw=="],
"@types/semver": ["@types/semver@7.5.8", "", {}, "sha512-I8EUhyrgfLrcTkzV3TSsGyl1tSuPrEDzr0yd5m90UgNxQkyDXULk3b6MlQqTCpZpNtWe1K0hzclnZkTcLBe2UQ=="],
@@ -403,7 +402,7 @@
"buffer-crc32": ["buffer-crc32@1.0.0", "", {}, "sha512-Db1SbgBS/fg/392AblrMJk97KggmvYhr4pB5ZIMTWtaivCPMWLkmb7m21cJvpvgK+J3nsU2CmmixNBZx4vFj/w=="],
- "bun-types": ["bun-types@1.2.18", "", { "dependencies": { "@types/node": "*" }, "peerDependencies": { "@types/react": "^19" } }, "sha512-04+Eha5NP7Z0A9YgDAzMk5PHR16ZuLVa83b26kH5+cp1qZW4F6FmAURngE7INf4tKOvCE69vYvDEwoNl1tGiWw=="],
+ "bun-types": ["bun-types@1.3.12", "", { "dependencies": { "@types/node": "*" } }, "sha512-HqOLj5PoFajAQciOMRiIZGNoKxDJSr6qigAttOX40vJuSp6DN/CxWp9s3C1Xwm4oH7ybueITwiaOcWXoYVoRkA=="],
"bytes": ["bytes@3.1.2", "", {}, "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg=="],
@@ -455,8 +454,6 @@
"cssesc": ["cssesc@3.0.0", "", { "bin": { "cssesc": "bin/cssesc" } }, "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg=="],
- "csstype": ["csstype@3.1.3", "", {}, "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw=="],
-
"data-view-buffer": ["data-view-buffer@1.0.2", "", { "dependencies": { "call-bound": "^1.0.3", "es-errors": "^1.3.0", "is-data-view": "^1.0.2" } }, "sha512-EmKO5V3OLXh1rtK2wgXRansaK1/mtVdTUEiEI0W8RkvgT05kfxaH29PliLnpLP73yYO6142Q72QNa8Wx/A5CqQ=="],
"data-view-byte-length": ["data-view-byte-length@1.0.2", "", { "dependencies": { "call-bound": "^1.0.3", "es-errors": "^1.3.0", "is-data-view": "^1.0.2" } }, "sha512-tuhGbE6CfTM9+5ANGf+oQb72Ky/0+s3xKUpHvShfiz2RxMFgFPjsXuRLBVMtvMs15awe45SRb83D6wH4ew6wlQ=="],
@@ -861,7 +858,7 @@
"object.assign": ["object.assign@4.1.7", "", { "dependencies": { "call-bind": "^1.0.8", "call-bound": "^1.0.3", "define-properties": "^1.2.1", "es-object-atoms": "^1.0.0", "has-symbols": "^1.1.0", "object-keys": "^1.1.1" } }, "sha512-nK28WOo+QIjBkDduTINE4JkF/UJJKyf2EJxvJKfblDpyg0Q+pkOHNTL0Qwy6NP6FhE/EnzV73BxxqcJaXY9anw=="],
- "obsidian": ["obsidian@1.8.7", "", { "dependencies": { "@types/codemirror": "5.60.8", "moment": "2.29.4" }, "peerDependencies": { "@codemirror/state": "^6.0.0", "@codemirror/view": "^6.0.0" } }, "sha512-h4bWwNFAGRXlMlMAzdEiIM2ppTGlrh7uGOJS6w4gClrsjc+ei/3YAtU2VdFUlCiPuTHpY4aBpFJJW75S1Tl/JA=="],
+ "obsidian": ["obsidian@1.12.3", "", { "dependencies": { "@types/codemirror": "5.60.8", "moment": "2.29.4" }, "peerDependencies": { "@codemirror/state": "6.5.0", "@codemirror/view": "6.38.6" } }, "sha512-HxWqe763dOqzXjnNiHmAJTRERN8KILBSqxDSEqbeSr7W8R8Jxezzbca+nz1LiiqXnMpM8lV2jzAezw3CZ4xNUw=="],
"obsidian-calendar-ui": ["obsidian-calendar-ui@0.3.12", "", { "dependencies": { "obsidian-daily-notes-interface": "0.8.4", "svelte": "3.35.0", "tslib": "2.1.0" } }, "sha512-hdoRqCPnukfRgCARgArXaqMQZ+Iai0eY7f0ZsFHHfywpv4gKg3Tx5p47UsLvRO5DD+4knlbrL7Gel57MkfcLTw=="],
diff --git a/bunfig.toml b/bunfig.toml
new file mode 100644
index 0000000..05310ec
--- /dev/null
+++ b/bunfig.toml
@@ -0,0 +1,5 @@
+[test]
+coverage = true
+coverageThreshold = { lines = 1.0, functions = 1.0, statements = 1.0 }
+coverageSkipTestFiles = true
+coverageReporter = ["text", "lcov"]
\ No newline at end of file
diff --git a/manifest.json b/manifest.json
index e7dd496..137717c 100644
--- a/manifest.json
+++ b/manifest.json
@@ -1,7 +1,7 @@
{
"id": "mcp-tools",
"name": "MCP Tools",
- "version": "0.2.27",
+ "version": "0.2.30",
"minAppVersion": "0.15.0",
"description": "Securely connect Claude Desktop to your vault with semantic search, templates, and file management capabilities.",
"author": "Jack Steam",
diff --git a/package.json b/package.json
index 5ad4010..5c1ba34 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "mcp-tools-for-obsidian",
- "version": "0.2.27",
+ "version": "0.2.30",
"private": true,
"description": "Securely connect Claude Desktop to your Obsidian vault with semantic search, templates, and file management capabilities.",
"tags": [
diff --git a/packages/mcp-server/src/features/fetch/services/markdown.test.ts b/packages/mcp-server/src/features/fetch/services/markdown.test.ts
index bb5a94f..eb8144c 100644
--- a/packages/mcp-server/src/features/fetch/services/markdown.test.ts
+++ b/packages/mcp-server/src/features/fetch/services/markdown.test.ts
@@ -34,6 +34,23 @@ describe("convertHtmlToMarkdown", () => {
expect(result).toBe("[Link](https://other.com/page)");
});
+ test("resolves bare relative URLs against base URL", () => {
+ /**
+ * Given HTML containing a link with a bare relative path (no leading '/')
+ * When convertHtmlToMarkdown is called
+ * Then the URL is resolved relative to the base URL's directory
+ */
+
+ // Given: a link with a bare relative path
+ const html = 'Other';
+
+ // When: converted to markdown
+ const result = convertHtmlToMarkdown(html, baseUrl);
+
+ // Then: the relative path is resolved against the base URL
+ expect(result).toBe("[Other](https://example.com/other-post)");
+ });
+
test("extracts article content when present", () => {
const html = `
diff --git a/packages/mcp-server/src/features/local-rest-api/buildPatchHeaders.test.ts b/packages/mcp-server/src/features/local-rest-api/buildPatchHeaders.test.ts
new file mode 100644
index 0000000..f02c20b
--- /dev/null
+++ b/packages/mcp-server/src/features/local-rest-api/buildPatchHeaders.test.ts
@@ -0,0 +1,423 @@
+/**
+ * BDD specs for buildPatchHeaders.
+ *
+ * Covers: TestBuildPatchHeadersRequiredFields,
+ * TestBuildPatchHeadersOptionalFields,
+ * TestBuildPatchHeadersCreateTargetIfMissing
+ */
+
+// Public API surface (from src/features/local-rest-api/buildPatchHeaders.ts):
+// buildPatchHeaders(args: ApiPatchParameters) -> Record
+// Input: parsed ApiPatchParameters (operation, targetType, target, and optionals)
+// Output: Record suitable for HTTP headers
+
+import { describe, expect, test } from "bun:test";
+import { buildPatchHeaders } from "./buildPatchHeaders";
+
+/**
+ * REQUIREMENT: Required patch parameters are always mapped to HTTP headers.
+ *
+ * WHO: The local-rest-api patch handlers (patch_active_file, patch_vault_file)
+ * WHAT: The three required fields (operation, targetType, target) are always
+ * present in the returned headers under their HTTP header names
+ * WHY: The Obsidian Local REST API requires Operation, Target-Type, and Target
+ * headers on every PATCH request; omitting any causes a silent failure
+ *
+ * MOCK BOUNDARY:
+ * Mock: nothing — this function is pure computation
+ * Real: buildPatchHeaders
+ * Never: construct headers directly — always obtain via buildPatchHeaders()
+ */
+describe("buildPatchHeaders — required fields", () => {
+ test("maps operation, targetType, and target to HTTP headers", () => {
+ /**
+ * Given required fields for a patch operation
+ * When buildPatchHeaders is called with those fields
+ * Then the returned headers contain Operation, Target-Type, and Target
+ */
+
+ // Given: a minimal set of required patch parameters
+ const args = {
+ operation: "append" as const,
+ targetType: "heading" as const,
+ target: "Section Title",
+ content: "New content paragraph",
+ };
+
+ // When: headers are built
+ const headers = buildPatchHeaders(args);
+
+ // Then: all three required headers are present with correct values
+ // Operation header should map directly from args.operation
+ expect(headers["Operation"]).toBe("append");
+ // Target-Type header should map directly from args.targetType
+ expect(headers["Target-Type"]).toBe("heading");
+ // Target header should be URL-encoded from args.target
+ expect(headers["Target"]).toBe("Section%20Title");
+ });
+
+ test("does not include optional headers when optional fields are omitted", () => {
+ /**
+ * Given only required fields are provided
+ * When buildPatchHeaders is called
+ * Then optional headers (Target-Delimiter, Trim-Target-Whitespace,
+ * Content-Type, Create-Target-If-Missing) are absent
+ */
+
+ // Given: only required fields
+ const args = {
+ operation: "replace" as const,
+ targetType: "block" as const,
+ target: "block-ref-id",
+ content: "Replacement text",
+ };
+
+ // When: headers are built
+ const headers = buildPatchHeaders(args);
+
+ // Then: no optional headers are present
+ // Target-Delimiter should be absent when targetDelimiter is not provided
+ expect(headers).not.toHaveProperty("Target-Delimiter");
+ // Trim-Target-Whitespace should be absent when trimTargetWhitespace is not provided
+ expect(headers).not.toHaveProperty("Trim-Target-Whitespace");
+ // Content-Type should be absent when contentType is not provided
+ expect(headers).not.toHaveProperty("Content-Type");
+ // Create-Target-If-Missing should be absent when createTargetIfMissing is not provided
+ expect(headers).not.toHaveProperty("Create-Target-If-Missing");
+ });
+});
+
+/**
+ * REQUIREMENT: Optional patch parameters are conditionally mapped to HTTP headers.
+ *
+ * WHO: The local-rest-api patch handlers
+ * WHAT: When targetDelimiter, trimTargetWhitespace, or contentType are provided,
+ * they appear in the headers under the correct HTTP header names;
+ * boolean values are serialized as strings
+ * WHY: The Obsidian Local REST API uses custom HTTP headers for patch options;
+ * incorrect or missing mapping causes the API to use wrong defaults
+ *
+ * MOCK BOUNDARY:
+ * Mock: nothing — this function is pure computation
+ * Real: buildPatchHeaders
+ * Never: construct headers directly — always obtain via buildPatchHeaders()
+ */
+describe("buildPatchHeaders — optional fields", () => {
+ test("includes Target-Delimiter header when targetDelimiter is provided", () => {
+ /**
+ * Given a targetDelimiter value is specified
+ * When buildPatchHeaders is called
+ * Then the Target-Delimiter header is present with that value
+ */
+
+ // Given: args with a custom delimiter
+ const args = {
+ operation: "append" as const,
+ targetType: "heading" as const,
+ target: "Parent > Child",
+ content: "Content under child heading",
+ targetDelimiter: ">",
+ };
+
+ // When: headers are built
+ const headers = buildPatchHeaders(args);
+
+ // Then: Target-Delimiter is set to the provided value
+ expect(headers["Target-Delimiter"]).toBe(">");
+ // Then: Target segments are encoded but the custom delimiter is preserved
+ expect(headers["Target"]).toBe("Parent%20>%20Child");
+ });
+
+ test("includes Trim-Target-Whitespace header as string when trimTargetWhitespace is true", () => {
+ /**
+ * Given trimTargetWhitespace is set to true
+ * When buildPatchHeaders is called
+ * Then the Trim-Target-Whitespace header is "true" (string, not boolean)
+ */
+
+ // Given: args with trimTargetWhitespace enabled
+ const args = {
+ operation: "replace" as const,
+ targetType: "heading" as const,
+ target: " Heading With Spaces ",
+ content: "Trimmed content",
+ trimTargetWhitespace: true,
+ };
+
+ // When: headers are built
+ const headers = buildPatchHeaders(args);
+
+ // Then: Trim-Target-Whitespace is the string "true", not boolean true
+ expect(headers["Trim-Target-Whitespace"]).toBe("true");
+ });
+
+ test("includes Trim-Target-Whitespace header as string when trimTargetWhitespace is false", () => {
+ /**
+ * Given trimTargetWhitespace is explicitly set to false
+ * When buildPatchHeaders is called
+ * Then the Trim-Target-Whitespace header is "false" (string)
+ */
+
+ // Given: args with trimTargetWhitespace explicitly disabled
+ const args = {
+ operation: "replace" as const,
+ targetType: "heading" as const,
+ target: "Heading",
+ content: "Content",
+ trimTargetWhitespace: false,
+ };
+
+ // When: headers are built
+ const headers = buildPatchHeaders(args);
+
+ // Then: Trim-Target-Whitespace is the string "false", not omitted
+ expect(headers["Trim-Target-Whitespace"]).toBe("false");
+ });
+
+ test("includes Content-Type header when contentType is provided", () => {
+ /**
+ * Given a contentType value is specified
+ * When buildPatchHeaders is called
+ * Then the Content-Type header is present with that value
+ */
+
+ // Given: args with JSON content type
+ const args = {
+ operation: "replace" as const,
+ targetType: "frontmatter" as const,
+ target: "tags",
+ content: '["tag1", "tag2"]',
+ contentType: "application/json" as const,
+ };
+
+ // When: headers are built
+ const headers = buildPatchHeaders(args);
+
+ // Then: Content-Type is set to the provided value
+ expect(headers["Content-Type"]).toBe("application/json");
+ });
+});
+
+/**
+ * REQUIREMENT: createTargetIfMissing is conditionally mapped to the
+ * Create-Target-If-Missing HTTP header.
+ *
+ * WHO: The local-rest-api patch handlers
+ * WHAT: When createTargetIfMissing is true, the header is "true";
+ * when false, the header is "false";
+ * when omitted, the header is absent entirely
+ * WHY: Previously hardcoded to "true", which created targets unconditionally.
+ * Agents must explicitly opt in to target creation to avoid unintended
+ * modifications to vault structure.
+ *
+ * MOCK BOUNDARY:
+ * Mock: nothing — this function is pure computation
+ * Real: buildPatchHeaders
+ * Never: construct headers directly — always obtain via buildPatchHeaders()
+ */
+describe("buildPatchHeaders — createTargetIfMissing", () => {
+ test("includes Create-Target-If-Missing as 'true' when set to true", () => {
+ /**
+ * Given createTargetIfMissing is true
+ * When buildPatchHeaders is called
+ * Then the Create-Target-If-Missing header is the string "true"
+ */
+
+ // Given: args opting in to target creation
+ const args = {
+ operation: "append" as const,
+ targetType: "heading" as const,
+ target: "New Section",
+ content: "Content for new section",
+ createTargetIfMissing: true,
+ };
+
+ // When: headers are built
+ const headers = buildPatchHeaders(args);
+
+ // Then: Create-Target-If-Missing is "true"
+ expect(headers["Create-Target-If-Missing"]).toBe("true");
+ });
+
+ test("includes Create-Target-If-Missing as 'false' when explicitly set to false", () => {
+ /**
+ * Given createTargetIfMissing is explicitly false
+ * When buildPatchHeaders is called
+ * Then the Create-Target-If-Missing header is the string "false"
+ */
+
+ // Given: args explicitly opting out of target creation
+ const args = {
+ operation: "append" as const,
+ targetType: "heading" as const,
+ target: "Existing Section",
+ content: "Appended content",
+ createTargetIfMissing: false,
+ };
+
+ // When: headers are built
+ const headers = buildPatchHeaders(args);
+
+ // Then: Create-Target-If-Missing is "false"
+ expect(headers["Create-Target-If-Missing"]).toBe("false");
+ });
+
+ test("omits Create-Target-If-Missing header when createTargetIfMissing is not provided", () => {
+ /**
+ * Given createTargetIfMissing is omitted from args
+ * When buildPatchHeaders is called
+ * Then the Create-Target-If-Missing header is absent
+ */
+
+ // Given: args without createTargetIfMissing
+ const args = {
+ operation: "prepend" as const,
+ targetType: "heading" as const,
+ target: "Existing Section",
+ content: "Prepended content",
+ };
+
+ // When: headers are built
+ const headers = buildPatchHeaders(args);
+
+ // Then: Create-Target-If-Missing is not in the headers — the old hardcoded 'true' behavior is the bug this fixes
+ expect(headers).not.toHaveProperty("Create-Target-If-Missing");
+ });
+});
+
+/**
+ * REQUIREMENT: Non-ASCII and special characters in the Target header are
+ * URL-encoded per-segment to produce valid HTTP headers.
+ *
+ * WHO: Users with non-ASCII headings (Polish diacritics, CJK, emoji, etc.)
+ * and headings containing API-special characters ('/', '#')
+ * WHAT: 1. Non-ASCII characters in heading targets are URL-encoded
+ * 2. API-special characters ('/', '#') within heading names are encoded
+ * 3. The heading path delimiter ('::' by default) is preserved unencoded
+ * 4. Plain ASCII targets without special characters are encoded (spaces → %20)
+ * 5. A custom delimiter is preserved unencoded while segments are encoded
+ * WHY: HTTP/1.1 headers must contain only visible ASCII characters plus SP/HTAB;
+ * non-ASCII bytes cause transport-level failures. The Obsidian Local REST API
+ * requires '/' and '#' to be URL-encoded to avoid ambiguity in target parsing.
+ *
+ * MOCK BOUNDARY:
+ * Mock: nothing — this function is pure computation
+ * Real: buildPatchHeaders
+ * Never: construct headers directly — always obtain via buildPatchHeaders()
+ */
+describe("buildPatchHeaders — target URL-encoding", () => {
+ test("encodes non-ASCII characters in heading targets", () => {
+ /**
+ * Given a heading target containing Polish diacritics
+ * When buildPatchHeaders is called
+ * Then the Target header contains URL-encoded non-ASCII characters
+ */
+
+ // Given: a heading with Polish diacritics
+ const args = {
+ operation: "append" as const,
+ targetType: "heading" as const,
+ target: "Wstęp do projektu",
+ content: "New paragraph",
+ };
+
+ // When: headers are built
+ const headers = buildPatchHeaders(args);
+
+ // Then: non-ASCII characters are URL-encoded
+ expect(headers["Target"]).toBe("Wst%C4%99p%20do%20projektu");
+ });
+
+ test("encodes API-special characters within heading names", () => {
+ /**
+ * Given a heading target containing '/' and '#' characters
+ * When buildPatchHeaders is called
+ * Then those characters are URL-encoded in the Target header
+ */
+
+ // Given: a heading path with special characters in segment names
+ const args = {
+ operation: "replace" as const,
+ targetType: "heading" as const,
+ target: "Root Heading::coddingtonbear/markdown-patch",
+ content: "Updated content",
+ };
+
+ // When: headers are built
+ const headers = buildPatchHeaders(args);
+
+ // Then: '/' is encoded but the '::' delimiter is preserved
+ expect(headers["Target"]).toBe(
+ "Root%20Heading::coddingtonbear%2Fmarkdown-patch",
+ );
+ });
+
+ test("preserves default delimiter while encoding segments", () => {
+ /**
+ * Given a multi-segment heading path with non-ASCII in each segment
+ * When buildPatchHeaders is called
+ * Then each segment is encoded individually and '::' delimiters are preserved
+ */
+
+ // Given: a multi-level heading path with Polish diacritics
+ const args = {
+ operation: "prepend" as const,
+ targetType: "heading" as const,
+ target: "Główny::Podrozdział::Szczegóły",
+ content: "Prepended text",
+ };
+
+ // When: headers are built
+ const headers = buildPatchHeaders(args);
+
+ // Then: each segment is encoded, '::' delimiters are preserved
+ expect(headers["Target"]).toBe(
+ "G%C5%82%C3%B3wny::Podrozdzia%C5%82::Szczeg%C3%B3%C5%82y",
+ );
+ });
+
+ test("preserves custom delimiter while encoding segments", () => {
+ /**
+ * Given a heading path using a custom delimiter with non-ASCII segments
+ * When buildPatchHeaders is called with a targetDelimiter
+ * Then segments are encoded individually and the custom delimiter is preserved
+ */
+
+ // Given: a heading path with custom '>' delimiter and non-ASCII
+ const args = {
+ operation: "append" as const,
+ targetType: "heading" as const,
+ target: "Résumé>Éducation",
+ content: "Details",
+ targetDelimiter: ">",
+ };
+
+ // When: headers are built
+ const headers = buildPatchHeaders(args);
+
+ // Then: segments are encoded, custom delimiter '>' is preserved
+ expect(headers["Target"]).toBe("R%C3%A9sum%C3%A9>%C3%89ducation");
+ });
+
+ test("encodes block reference targets as a single value", () => {
+ /**
+ * Given a block reference target containing no delimiter
+ * When buildPatchHeaders is called
+ * Then the entire target is encoded as one segment
+ */
+
+ // Given: a block target that is ASCII-safe (no encoding needed)
+ const args = {
+ operation: "replace" as const,
+ targetType: "block" as const,
+ target: "block-ref-123",
+ content: "Replaced block",
+ };
+
+ // When: headers are built
+ const headers = buildPatchHeaders(args);
+
+ // Then: ASCII-safe target is passed through (encodeURIComponent is a no-op for these chars)
+ expect(headers["Target"]).toBe("block-ref-123");
+ });
+});
diff --git a/packages/mcp-server/src/features/local-rest-api/buildPatchHeaders.ts b/packages/mcp-server/src/features/local-rest-api/buildPatchHeaders.ts
new file mode 100644
index 0000000..78907b7
--- /dev/null
+++ b/packages/mcp-server/src/features/local-rest-api/buildPatchHeaders.ts
@@ -0,0 +1,41 @@
+import type { LocalRestAPI } from "shared";
+
+type ApiPatchArgs = typeof LocalRestAPI.ApiPatchParameters.infer;
+
+const DEFAULT_DELIMITER = "::";
+
+/**
+ * URL-encode each segment of a heading target path individually,
+ * preserving the delimiter between segments. Non-heading targets
+ * (block refs, frontmatter fields) are encoded as a single value.
+ */
+function encodeTarget(target: string, delimiter?: string): string {
+ const delim = delimiter ?? DEFAULT_DELIMITER;
+ return target
+ .split(delim)
+ .map((segment) => encodeURIComponent(segment))
+ .join(delim);
+}
+
+export function buildPatchHeaders(args: ApiPatchArgs): Record {
+ const headers: Record = {
+ Operation: args.operation,
+ "Target-Type": args.targetType,
+ Target: encodeTarget(args.target, args.targetDelimiter),
+ };
+
+ if (args.createTargetIfMissing !== undefined) {
+ headers["Create-Target-If-Missing"] = String(args.createTargetIfMissing);
+ }
+ if (args.targetDelimiter) {
+ headers["Target-Delimiter"] = args.targetDelimiter;
+ }
+ if (args.trimTargetWhitespace !== undefined) {
+ headers["Trim-Target-Whitespace"] = String(args.trimTargetWhitespace);
+ }
+ if (args.contentType) {
+ headers["Content-Type"] = args.contentType;
+ }
+
+ return headers;
+}
diff --git a/packages/mcp-server/src/features/local-rest-api/index.ts b/packages/mcp-server/src/features/local-rest-api/index.ts
index 37a1424..fc58173 100644
--- a/packages/mcp-server/src/features/local-rest-api/index.ts
+++ b/packages/mcp-server/src/features/local-rest-api/index.ts
@@ -2,6 +2,7 @@ import { makeRequest, type ToolRegistry } from "$/shared";
import type { Server } from "@modelcontextprotocol/sdk/server/index.js";
import { type } from "arktype";
import { LocalRestAPI } from "shared";
+import { buildPatchHeaders } from "./buildPatchHeaders";
export function registerLocalRestApiTools(tools: ToolRegistry, server: Server) {
// GET Status
@@ -92,25 +93,10 @@ export function registerLocalRestApiTools(tools: ToolRegistry, server: Server) {
name: '"patch_active_file"',
arguments: LocalRestAPI.ApiPatchParameters,
}).describe(
- "Insert or modify content in the currently-open note relative to a heading, block reference, or frontmatter field.",
+ "Insert or modify content in the currently-open note relative to a heading, block reference, or frontmatter field. Use for surgical edits, section updates, or targeted replacements without rewriting the whole file. Equivalent to str_replace for the active Obsidian note.",
),
async ({ arguments: args }) => {
- const headers: Record = {
- Operation: args.operation,
- "Target-Type": args.targetType,
- Target: args.target,
- "Create-Target-If-Missing": "true",
- };
-
- if (args.targetDelimiter) {
- headers["Target-Delimiter"] = args.targetDelimiter;
- }
- if (args.trimTargetWhitespace !== undefined) {
- headers["Trim-Target-Whitespace"] = String(args.trimTargetWhitespace);
- }
- if (args.contentType) {
- headers["Content-Type"] = args.contentType;
- }
+ const headers = buildPatchHeaders(args);
const response = await makeRequest(
LocalRestAPI.ApiContentResponse,
@@ -353,25 +339,10 @@ export function registerLocalRestApiTools(tools: ToolRegistry, server: Server) {
filename: "string",
}).and(LocalRestAPI.ApiPatchParameters),
}).describe(
- "Insert or modify content in a file relative to a heading, block reference, or frontmatter field.",
+ "Insert or modify content in a file relative to a heading, block reference, or frontmatter field. Use for surgical edits, section updates, or targeted replacements without rewriting the whole file. Equivalent to str_replace for Obsidian vault files.",
),
async ({ arguments: args }) => {
- const headers: HeadersInit = {
- Operation: args.operation,
- "Target-Type": args.targetType,
- Target: args.target,
- "Create-Target-If-Missing": "true",
- };
-
- if (args.targetDelimiter) {
- headers["Target-Delimiter"] = args.targetDelimiter;
- }
- if (args.trimTargetWhitespace !== undefined) {
- headers["Trim-Target-Whitespace"] = String(args.trimTargetWhitespace);
- }
- if (args.contentType) {
- headers["Content-Type"] = args.contentType;
- }
+ const headers = buildPatchHeaders(args);
const response = await makeRequest(
LocalRestAPI.ApiContentResponse,
diff --git a/packages/mcp-server/src/shared/parseTemplateParameters.test.ts b/packages/mcp-server/src/shared/parseTemplateParameters.test.ts
index 6105d30..416f168 100644
--- a/packages/mcp-server/src/shared/parseTemplateParameters.test.ts
+++ b/packages/mcp-server/src/shared/parseTemplateParameters.test.ts
@@ -11,14 +11,14 @@ describe("parseTemplateParameters", () => {
});
test("parses single parameter without description", () => {
- const content = '<% tp.user.promptArg("name") %>';
+ const content = '<% tp.mcpTools.prompt("name") %>';
const result = parseTemplateParameters(content);
PromptParameterSchema.array().assert(result);
expect(result).toEqual([{ name: "name" }]);
});
test("parses single parameter with description", () => {
- const content = '<% tp.user.promptArg("name", "Enter your name") %>';
+ const content = '<% tp.mcpTools.prompt("name", "Enter your name") %>';
const result = parseTemplateParameters(content);
PromptParameterSchema.array().assert(result);
expect(result).toEqual([{ name: "name", description: "Enter your name" }]);
@@ -26,8 +26,8 @@ describe("parseTemplateParameters", () => {
test("parses multiple parameters", () => {
const content = `
- <% tp.user.promptArg("name", "Enter your name") %>
- <% tp.user.promptArg("age", "Enter your age") %>
+ <% tp.mcpTools.prompt("name", "Enter your name") %>
+ <% tp.mcpTools.prompt("age", "Enter your age") %>
`;
const result = parseTemplateParameters(content);
PromptParameterSchema.array().assert(result);
@@ -40,10 +40,31 @@ describe("parseTemplateParameters", () => {
test("ignores invalid template syntax", () => {
const content = `
<% invalid.syntax %>
- <% tp.user.promptArg("name", "Enter your name") %>
+ <% tp.mcpTools.prompt("name", "Enter your name") %>
`;
const result = parseTemplateParameters(content);
PromptParameterSchema.array().assert(result);
expect(result).toEqual([{ name: "name", description: "Enter your name" }]);
});
+
+ test("skips template tags with unparseable code and continues parsing", () => {
+ /**
+ * Given content with a template tag containing syntax that cannot be parsed
+ * When parseTemplateParameters is called
+ * Then the malformed tag is skipped and subsequent valid tags are still parsed
+ */
+
+ // Given: a template with a syntax error followed by a valid parameter
+ const content = `
+ <% {{{invalid javascript %>
+ <% tp.mcpTools.prompt("title", "Enter title") %>
+ `;
+
+ // When: parameters are parsed
+ const result = parseTemplateParameters(content);
+
+ // Then: the malformed tag is skipped, valid parameter is still extracted
+ PromptParameterSchema.array().assert(result);
+ expect(result).toEqual([{ name: "title", description: "Enter title" }]);
+ });
});
diff --git a/packages/mcp-server/src/shared/parseTemplateParameters.ts b/packages/mcp-server/src/shared/parseTemplateParameters.ts
index 71fb832..268dbd0 100644
--- a/packages/mcp-server/src/shared/parseTemplateParameters.ts
+++ b/packages/mcp-server/src/shared/parseTemplateParameters.ts
@@ -36,7 +36,7 @@ export function parseTemplateParameters(content: string): PromptParameter[] {
* and may contain additional modifiers.
*/
const TEMPLATER_START_TAG = /<%[*-_]*/g;
- const TEMPLATER_END_TAG = /[-_]*%>/g;
+ const TEMPLATER_END_TAG = /[-_]*%>/;
// Split content by template tags
const parts = content.split(TEMPLATER_START_TAG);
diff --git a/packages/obsidian-plugin/bun.config.ts b/packages/obsidian-plugin/bun.config.ts
index 8b57921..a63f58b 100644
--- a/packages/obsidian-plugin/bun.config.ts
+++ b/packages/obsidian-plugin/bun.config.ts
@@ -79,9 +79,10 @@ const config: BuildConfig = {
"import.meta.filename": JSON.stringify("mcp-tools-for-obsidian.ts"),
// These environment variables are critical for the MCP server download functionality
// They define the base URL and version for downloading the correct server binaries
- "process.env.GITHUB_DOWNLOAD_URL": JSON.stringify(
- `https://github.com/jacksteamdev/obsidian-mcp-tools/releases/download/${version}`
- ),
+ "process.env.GITHUB_DOWNLOAD_URL": JSON.stringify(
+ process.env.GITHUB_DOWNLOAD_URL ??
+ `https://github.com/jacksteamdev/obsidian-mcp-tools/releases/download/${version}`
+ ),
"process.env.GITHUB_REF_NAME": JSON.stringify(version),
},
naming: {
diff --git a/packages/obsidian-plugin/docs/openapi.yaml b/packages/obsidian-plugin/docs/openapi.yaml
index 63d3546..e1096c0 100644
--- a/packages/obsidian-plugin/docs/openapi.yaml
+++ b/packages/obsidian-plugin/docs/openapi.yaml
@@ -375,6 +375,16 @@ paths:
- "true"
- "false"
type: "string"
+ - description: "Create the target if it does not already exist (default: false)"
+ in: "header"
+ name: "Create-Target-If-Missing"
+ required: false
+ schema:
+ default: "false"
+ enum:
+ - "true"
+ - "false"
+ type: "string"
requestBody:
content:
application/json:
@@ -819,6 +829,16 @@ paths:
- "true"
- "false"
type: "string"
+ - description: "Create the target if it does not already exist (default: false)"
+ in: "header"
+ name: "Create-Target-If-Missing"
+ required: false
+ schema:
+ default: "false"
+ enum:
+ - "true"
+ - "false"
+ type: "string"
- description: "The name of the period for which you would like to grab the current note."
in: "path"
name: "period"
@@ -1450,6 +1470,16 @@ paths:
- "true"
- "false"
type: "string"
+ - description: "Create the target if it does not already exist (default: false)"
+ in: "header"
+ name: "Create-Target-If-Missing"
+ required: false
+ schema:
+ default: "false"
+ enum:
+ - "true"
+ - "false"
+ type: "string"
- description: |
Path to the relevant file (relative to your vault root).
in: "path"
diff --git a/packages/obsidian-plugin/scripts/zip.ts b/packages/obsidian-plugin/scripts/zip.ts
index 84fae86..47aa5c3 100644
--- a/packages/obsidian-plugin/scripts/zip.ts
+++ b/packages/obsidian-plugin/scripts/zip.ts
@@ -5,23 +5,28 @@ import { join, resolve } from "path";
import { version } from "../../../package.json" with { type: "json" };
async function zipPlugin() {
- const pluginDir = resolve(import.meta.dir, "..");
+ const pluginDir = resolve(import.meta.dir, ".."); // packages/obsidian-plugin/
+ const repoRoot = resolve(import.meta.dir, "../../.."); // repo root — where main.js actually lives
const releaseDir = join(pluginDir, "releases");
fs.ensureDirSync(releaseDir);
const zipFilePath = join(releaseDir, `obsidian-plugin-${version}.zip`);
const output = createWriteStream(zipFilePath);
-
const archive = create("zip", { zlib: { level: 9 } });
archive.pipe(output);
- // Add the required files
- archive.file(join(pluginDir, "main.js"), { name: "main.js" });
- archive.file(join(pluginDir, "manifest.json"), { name: "manifest.json" });
- archive.file(join(pluginDir, "styles.css"), { name: "styles.css" });
+ archive.file(join(repoRoot, "main.js"), { name: "main.js" });
+ archive.file(join(repoRoot, "manifest.json"), { name: "manifest.json" });
+ archive.file(join(repoRoot, "styles.css"), { name: "styles.css" });
+
+ await new Promise((resolve, reject) => {
+ output.on("close", resolve);
+ output.on("error", reject);
+ archive.on("error", reject);
+ archive.finalize();
+ });
- await archive.finalize();
console.log("Plugin files zipped successfully!");
}
diff --git a/packages/shared/src/logger.platform.test.ts b/packages/shared/src/logger.platform.test.ts
new file mode 100644
index 0000000..fd04cef
--- /dev/null
+++ b/packages/shared/src/logger.platform.test.ts
@@ -0,0 +1,93 @@
+/**
+ * BDD specs for getLogFilePath — platform-specific branches.
+ *
+ * These tests use Bun's mock.module to simulate different OS platforms,
+ * covering the win32, linux, and unsupported-platform branches that are
+ * unreachable on the native test platform.
+ */
+
+import { describe, expect, mock, test } from "bun:test";
+import { homedir } from "os";
+import { resolve } from "path";
+
+/**
+ * REQUIREMENT: getLogFilePath returns the correct OS-conventional log path
+ * for each supported platform and throws for unsupported ones.
+ *
+ * WHO: Any consumer that needs to locate log files on the filesystem
+ * WHAT: 1. On win32, returns ~/AppData/Local/Logs//
+ * 2. On linux, returns ~/.local/share/logs//
+ * 3. On unsupported platforms, throws an error
+ * WHY: Log files must be written to OS-conventional locations; writing to
+ * the wrong directory makes logs unfindable for users and support tools
+ *
+ * MOCK BOUNDARY:
+ * Mock: os.platform() — process-level environment state
+ * Real: getLogFilePath, os.homedir(), path.resolve()
+ * Never: mock getLogFilePath itself or path resolution
+ */
+describe("getLogFilePath — platform-specific paths", () => {
+ test("returns Windows-style path on win32", async () => {
+ /**
+ * Given the OS platform is win32
+ * When getLogFilePath is called
+ * Then the returned path follows Windows conventions (AppData/Local/Logs)
+ */
+
+ // Given: platform is mocked to win32
+ mock.module("os", () => ({
+ platform: () => "win32",
+ homedir,
+ }));
+ const { getLogFilePath } = await import("./logger");
+
+ // When: the log file path is resolved
+ const result = getLogFilePath("test-app", "test.log");
+
+ // Then: the path follows Windows conventions
+ const expected = resolve(homedir(), "AppData", "Local", "Logs", "test-app", "test.log");
+ expect(result).toBe(expected);
+ });
+
+ test("returns Linux-style path on linux", async () => {
+ /**
+ * Given the OS platform is linux
+ * When getLogFilePath is called
+ * Then the returned path follows Linux conventions (.local/share/logs)
+ */
+
+ // Given: platform is mocked to linux
+ mock.module("os", () => ({
+ platform: () => "linux",
+ homedir,
+ }));
+ const { getLogFilePath } = await import("./logger");
+
+ // When: the log file path is resolved
+ const result = getLogFilePath("test-app", "test.log");
+
+ // Then: the path follows Linux conventions
+ const expected = resolve(homedir(), ".local", "share", "logs", "test-app", "test.log");
+ expect(result).toBe(expected);
+ });
+
+ test("throws for unsupported platforms", async () => {
+ /**
+ * Given the OS platform is an unsupported value
+ * When getLogFilePath is called
+ * Then it throws an error indicating the platform is unsupported
+ */
+
+ // Given: platform is mocked to an unsupported value
+ mock.module("os", () => ({
+ platform: () => "freebsd",
+ homedir,
+ }));
+ const { getLogFilePath } = await import("./logger");
+
+ // When/Then: calling getLogFilePath throws
+ expect(() => getLogFilePath("test-app", "test.log")).toThrow(
+ "Unsupported operating system",
+ );
+ });
+});
diff --git a/packages/shared/src/logger.test.ts b/packages/shared/src/logger.test.ts
new file mode 100644
index 0000000..f8fc296
--- /dev/null
+++ b/packages/shared/src/logger.test.ts
@@ -0,0 +1,279 @@
+/**
+ * BDD specs for the shared logger module.
+ *
+ * Covers: getLogFilePath, formatMessage (via log output), createLogger,
+ * log level filtering, flush, config getter/setter, meta setter.
+ */
+
+// Public API surface (from packages/shared/src/logger.ts):
+// getLogFilePath(appName: string, fileName: string) -> string
+// logLevelSchema — arktype schema for log levels
+// loggerConfigMorph — arktype morph from InputLoggerConfig to FullLoggerConfig
+// createLogger(inputConfig: InputLoggerConfig) -> Logger
+// Logger.debug/info/warn/error/fatal(message, meta?) -> void
+// Logger.flush() -> Promise
+// Logger.config (get) -> FullLoggerConfig
+// Logger.config (set) -> void (partial InputLoggerConfig)
+// Logger.meta (set) -> void
+
+import { afterEach, describe, expect, test } from "bun:test";
+import { existsSync, readFileSync, rmSync } from "fs";
+import { createLogger, getLogFilePath } from "./logger";
+
+/**
+ * REQUIREMENT: getLogFilePath returns platform-appropriate log directory paths.
+ *
+ * WHO: Any consumer that needs to locate log files on the filesystem
+ * WHAT: 1. On macOS (darwin), returns ~/Library/Logs//
+ * 2. Returns a string path containing the appName and fileName
+ * WHY: Log files must be written to OS-conventional locations so users
+ * and support tools can find them without project-specific knowledge
+ *
+ * MOCK BOUNDARY:
+ * Mock: nothing — uses real os.platform() and os.homedir()
+ * Real: getLogFilePath
+ * Never: mock os.platform() — we test the current platform's branch
+ */
+describe("getLogFilePath", () => {
+ test("returns a path containing the app name and file name", () => {
+ /**
+ * Given an application name and log file name
+ * When getLogFilePath is called
+ * Then the returned path contains both the app name and file name
+ */
+
+ // Given: app name and file name
+ const appName = "test-app";
+ const fileName = "test.log";
+
+ // When: the log file path is resolved
+ const result = getLogFilePath(appName, fileName);
+
+ // Then: the path contains both identifiers
+ expect(result).toContain(appName);
+ expect(result).toContain(fileName);
+ });
+});
+
+/**
+ * REQUIREMENT: createLogger produces a logger that writes structured messages
+ * to a file, respects log level filtering, and supports runtime
+ * reconfiguration.
+ *
+ * WHO: All packages in the monorepo that import the shared logger
+ * WHAT: 1. Logger writes formatted messages to the configured file path
+ * 2. Messages below the configured level are filtered out
+ * 3. flush() waits for all queued writes to complete
+ * 4. config getter returns a copy of the current configuration
+ * 5. config setter updates the log level and re-derives the level list
+ * 6. meta setter attaches metadata to all subsequent log messages
+ * 7. All five log methods (debug, info, warn, error, fatal) write when at level
+ * WHY: Structured, level-filtered logging is essential for diagnosing issues
+ * in production without flooding disk with debug output
+ *
+ * MOCK BOUNDARY:
+ * Mock: nothing — uses real filesystem via tmp directory
+ * Real: createLogger, all log methods, flush, config, meta
+ * Never: mock fs.appendFile — use real filesystem with tmp cleanup
+ */
+describe("createLogger", () => {
+ const logDir = getLogFilePath("test-app", "placeholder").replace(/placeholder$/, "");
+
+ afterEach(() => {
+ if (existsSync(logDir)) {
+ rmSync(logDir, { recursive: true });
+ }
+ });
+
+ test("writes a formatted message to the log file", async () => {
+ /**
+ * Given a logger configured at DEBUG level
+ * When an error message is logged and flushed
+ * Then the log file contains the formatted message with timestamp and level
+ */
+
+ // Given: a logger at DEBUG level
+ const logger = createLogger({
+ appName: "test-app",
+ filename: "write-test.log",
+ level: "DEBUG",
+ });
+
+ // When: a message is logged and flushed
+ logger.error("something went wrong", { code: 42 });
+ await logger.flush();
+
+ // Then: the log file exists and contains the expected content
+ const logPath = logger.config.filename;
+ expect(existsSync(logPath)).toBe(true);
+ const content = readFileSync(logPath, "utf-8");
+ expect(content).toContain("[ERROR]");
+ expect(content).toContain("something went wrong");
+ expect(content).toContain('"code": 42');
+ });
+
+ test("filters messages below the configured log level", async () => {
+ /**
+ * Given a logger configured at WARN level
+ * When debug and info messages are logged and flushed
+ * Then those messages are not written to the log file
+ */
+
+ // Given: a logger at WARN level
+ const logger = createLogger({
+ appName: "test-app",
+ filename: "filter-test.log",
+ level: "WARN",
+ });
+
+ // When: sub-threshold messages are logged
+ logger.debug("should not appear");
+ logger.info("should not appear either");
+ logger.warn("this should appear");
+ await logger.flush();
+
+ // Then: only the WARN message is in the log
+ const logPath = logger.config.filename;
+ const content = readFileSync(logPath, "utf-8");
+ expect(content).not.toContain("[DEBUG]");
+ expect(content).not.toContain("[INFO ]");
+ expect(content).toContain("[WARN ]");
+ });
+
+ test("config getter returns a copy of the current configuration", () => {
+ /**
+ * Given a logger instance
+ * When the config getter is accessed
+ * Then it returns an object with the expected configuration properties
+ */
+
+ // Given: a logger
+ const logger = createLogger({
+ appName: "test-app",
+ filename: "config-test.log",
+ level: "INFO",
+ });
+
+ // When: config is accessed
+ const config = logger.config;
+
+ // Then: it contains the expected fields
+ expect(config.appName).toBe("test-app");
+ expect(config.level).toBe("INFO");
+ expect(config.levels).toContain("INFO");
+ expect(config.levels).toContain("ERROR");
+ expect(config.levels).not.toContain("DEBUG");
+ });
+
+ test("config setter updates the log level at runtime", async () => {
+ /**
+ * Given a logger initially configured at ERROR level
+ * When the config is updated to DEBUG level
+ * Then debug messages are subsequently written to the log file
+ */
+
+ // Given: a logger at ERROR level
+ const logger = createLogger({
+ appName: "test-app",
+ filename: "reconfig-test.log",
+ level: "ERROR",
+ });
+
+ // When: level is changed to DEBUG
+ logger.config = { level: "DEBUG" };
+ logger.debug("now visible");
+ await logger.flush();
+
+ // Then: the debug message was written
+ const logPath = logger.config.filename;
+ const content = readFileSync(logPath, "utf-8");
+ expect(content).toContain("[DEBUG]");
+ expect(content).toContain("now visible");
+ });
+
+ test("meta setter attaches metadata to all subsequent log messages", async () => {
+ /**
+ * Given a logger with meta set to include a request ID
+ * When a message is logged and flushed
+ * Then the log entry contains the metadata fields
+ */
+
+ // Given: a logger with meta
+ const logger = createLogger({
+ appName: "test-app",
+ filename: "meta-test.log",
+ level: "DEBUG",
+ });
+ logger.meta = { requestId: "abc-123" };
+
+ // When: a message is logged
+ logger.info("processing request");
+ await logger.flush();
+
+ // Then: the metadata appears in the log output
+ const logPath = logger.config.filename;
+ const content = readFileSync(logPath, "utf-8");
+ expect(content).toContain("requestId");
+ expect(content).toContain("abc-123");
+ });
+
+ test("all five log methods write when at DEBUG level", async () => {
+ /**
+ * Given a logger configured at DEBUG level (includes all levels)
+ * When debug, info, warn, error, and fatal messages are each logged
+ * Then all five messages appear in the log file
+ */
+
+ // Given: a logger at the lowest level
+ const logger = createLogger({
+ appName: "test-app",
+ filename: "all-levels-test.log",
+ level: "DEBUG",
+ });
+
+ // When: every log method is called
+ logger.debug("debug msg");
+ logger.info("info msg");
+ logger.warn("warn msg");
+ logger.error("error msg");
+ logger.fatal("fatal msg");
+ await logger.flush();
+
+ // Then: all five levels appear in the output
+ const logPath = logger.config.filename;
+ const content = readFileSync(logPath, "utf-8");
+ expect(content).toContain("[DEBUG]");
+ expect(content).toContain("[INFO ]");
+ expect(content).toContain("[WARN ]");
+ expect(content).toContain("[ERROR]");
+ expect(content).toContain("[FATAL]");
+ });
+
+ test("formats message without metadata when meta is empty", async () => {
+ /**
+ * Given a logger with no metadata set
+ * When a message is logged without per-call meta
+ * Then the log entry contains only the timestamp, level, and message
+ */
+
+ // Given: a logger with no meta, at DEBUG level
+ const logger = createLogger({
+ appName: "test-app",
+ filename: "no-meta-test.log",
+ level: "DEBUG",
+ });
+
+ // When: a message is logged without meta
+ logger.info("simple message");
+ await logger.flush();
+
+ // Then: the log line has no JSON metadata block
+ const logPath = logger.config.filename;
+ const content = readFileSync(logPath, "utf-8");
+ const lines = content.trim().split("\n");
+ // A message without meta should be a single line (no JSON block follows)
+ expect(lines.length).toBe(1);
+ expect(lines[0]).toContain("[INFO ]");
+ expect(lines[0]).toContain("simple message");
+ });
+});
diff --git a/packages/shared/src/types/plugin-local-rest-api.ts b/packages/shared/src/types/plugin-local-rest-api.ts
index 3f7a216..a76bde6 100644
--- a/packages/shared/src/types/plugin-local-rest-api.ts
+++ b/packages/shared/src/types/plugin-local-rest-api.ts
@@ -200,11 +200,12 @@ export const ApiVaultFileResponse = type({
*
* @property operation - Specifies how to modify the content: append (add after), prepend (add before), or replace existing content
* @property targetType - Identifies what to modify: a section under a heading, a referenced block, or a frontmatter field
- * @property target - The identifier - either heading path (e.g. 'Heading 1::Subheading 1:1'), block reference ID, or frontmatter field name
+ * @property target - The identifier - either a fully-qualified heading path from the document root (e.g. 'Root Heading::Subheading::Leaf'), block reference ID, or frontmatter field name. For headings, the full path from the top-level heading is required; leaf-only names (e.g. 'Leaf') will not match and will return an invalid-target error. Non-ASCII characters and special characters (e.g. '/', '#') in heading names are URL-encoded automatically; provide the target as plain text.
* @property targetDelimiter - The separator used in heading paths to indicate nesting (default '::')
* @property trimTargetWhitespace - Whether to remove whitespace from target identifier before matching (default: false)
* @property content - The actual content to insert, append, or use as replacement
* @property contentType - Format of the content - use application/json for structured data like table rows or frontmatter values
+ * @property createTargetIfMissing - Whether to create the target heading if it does not exist (default: false). Set to true only for append/prepend operations.
*/
export const ApiPatchParameters = type({
operation: type("'append' | 'prepend' | 'replace'").describe(
@@ -214,7 +215,7 @@ export const ApiPatchParameters = type({
"Identifies what to modify: a section under a heading, a referenced block, or a frontmatter field",
),
target: type("string").describe(
- "The identifier - either heading path (e.g. 'Heading 1::Subheading 1:1'), block reference ID, or frontmatter field name",
+ "The identifier - either a fully-qualified heading path from the document root (e.g. 'Root Heading::Subheading::Leaf'), block reference ID, or frontmatter field name. For headings, the full path from the top-level heading is required; leaf-only names (e.g. 'Leaf') will not match and will return an invalid-target error. Non-ASCII characters and special characters (e.g. '/', '#') are URL-encoded automatically; provide the target as plain text."
),
"targetDelimiter?": type("string").describe(
"The separator used in heading paths to indicate nesting (default '::')",
@@ -228,6 +229,9 @@ export const ApiPatchParameters = type({
"contentType?": type("'text/markdown' | 'application/json'").describe(
"Format of the content - use application/json for structured data like table rows or frontmatter values",
),
+ "createTargetIfMissing?": type("boolean").describe(
+ "Whether to create the target heading if it does not exist (default: false). Set to true only for append/prepend operations.",
+ ),
});
/**
diff --git a/packages/shared/src/types/plugin-templater.ts b/packages/shared/src/types/plugin-templater.ts
index 5857aae..861077a 100644
--- a/packages/shared/src/types/plugin-templater.ts
+++ b/packages/shared/src/types/plugin-templater.ts
@@ -1,6 +1,6 @@
-import {
+import type {
App,
- type MarkdownPostProcessorContext,
+ MarkdownPostProcessorContext,
TAbstractFile,
TFile,
TFolder,
diff --git a/packages/shared/src/types/prompts.test.ts b/packages/shared/src/types/prompts.test.ts
new file mode 100644
index 0000000..c5e3428
--- /dev/null
+++ b/packages/shared/src/types/prompts.test.ts
@@ -0,0 +1,94 @@
+/**
+ * BDD specs for buildTemplateArgumentsSchema.
+ *
+ * Covers: buildTemplateArgumentsSchema
+ */
+
+// Public API surface (from packages/shared/src/types/prompts.ts):
+// buildTemplateArgumentsSchema(args: PromptParameter[]) -> Type>
+// Input: array of PromptParameter (name, optional description, optional required)
+// Output: arktype Type that validates an object with string fields matching the parameters
+
+import { describe, expect, test } from "bun:test";
+import { buildTemplateArgumentsSchema } from "./prompts";
+
+/**
+ * REQUIREMENT: buildTemplateArgumentsSchema creates an arktype schema from
+ * prompt parameters that enforces required/optional string fields.
+ *
+ * WHO: The prompts feature, which validates user-supplied template arguments
+ * against the parameters declared in a template file
+ * WHAT: 1. Required parameters produce required string fields in the schema
+ * 2. Optional parameters produce optional string fields in the schema
+ * 3. An empty parameter list produces a schema that accepts an empty object
+ * WHY: Without schema validation, invalid or missing template arguments would
+ * cause silent failures or confusing errors at template execution time
+ *
+ * MOCK BOUNDARY:
+ * Mock: nothing — this function is pure computation
+ * Real: buildTemplateArgumentsSchema
+ * Never: construct arktype schemas directly — always use this builder
+ */
+describe("buildTemplateArgumentsSchema", () => {
+ test("creates a schema with required string fields for required parameters", () => {
+ /**
+ * Given a parameter list with a required parameter
+ * When buildTemplateArgumentsSchema is called
+ * Then the resulting schema rejects objects missing the required field
+ */
+
+ // Given: a required parameter
+ const args = [{ name: "title", required: true }];
+
+ // When: the schema is built
+ const schema = buildTemplateArgumentsSchema(args);
+
+ // Then: an object with the field validates successfully
+ const valid = schema({ title: "Hello" });
+ expect(valid).toEqual({ title: "Hello" });
+
+ // Then: an object missing the field fails validation
+ const invalid = schema({});
+ expect(invalid instanceof Error || String(invalid).includes("must")).toBe(true);
+ });
+
+ test("creates a schema with optional string fields for optional parameters", () => {
+ /**
+ * Given a parameter list with an optional parameter (required not set)
+ * When buildTemplateArgumentsSchema is called
+ * Then the resulting schema accepts objects with or without the field
+ */
+
+ // Given: an optional parameter
+ const args = [{ name: "subtitle" }];
+
+ // When: the schema is built
+ const schema = buildTemplateArgumentsSchema(args);
+
+ // Then: an object without the field validates successfully
+ const withoutField = schema({});
+ expect(withoutField).toEqual({});
+
+ // Then: an object with the field also validates successfully
+ const withField = schema({ subtitle: "World" });
+ expect(withField).toEqual({ subtitle: "World" });
+ });
+
+ test("creates a schema accepting an empty object when no parameters are given", () => {
+ /**
+ * Given an empty parameter list
+ * When buildTemplateArgumentsSchema is called
+ * Then the resulting schema accepts an empty object
+ */
+
+ // Given: no parameters
+ const args: Parameters[0] = [];
+
+ // When: the schema is built
+ const schema = buildTemplateArgumentsSchema(args);
+
+ // Then: an empty object validates successfully
+ const result = schema({});
+ expect(result).toEqual({});
+ });
+});
diff --git a/versions.json b/versions.json
index 3a80e33..baf8e23 100644
--- a/versions.json
+++ b/versions.json
@@ -24,5 +24,8 @@
"0.2.24": "0.15.0",
"0.2.25": "0.15.0",
"0.2.26": "0.15.0",
- "0.2.27": "0.15.0"
+ "0.2.27": "0.15.0",
+ "0.2.28": "0.15.0",
+ "0.2.29": "0.15.0",
+ "0.2.30": "0.15.0"
}