diff --git a/.changeset/sour-ends-occur.md b/.changeset/sour-ends-occur.md new file mode 100644 index 00000000..dfc0a431 --- /dev/null +++ b/.changeset/sour-ends-occur.md @@ -0,0 +1,5 @@ +--- +'@tanstack/pacer-lite': minor +--- + +feat: add pacer-lite library diff --git a/README.md b/README.md index e064d45e..42347dcc 100644 --- a/README.md +++ b/README.md @@ -35,12 +35,51 @@ # TanStack Pacer -A lightweight timing and scheduling library for debouncing, throttling, rate limiting, and managing complex async workflows. +A lightweight timing and scheduling library for debouncing, throttling, rate limiting, queuing, and batching. -- Debouncing, throttling & rate limiting with sync/async support -- Queuing & batching utilities with pause, resume & cancel controls -- Framework adapters (React, Solid, etc.) with convenient hooks -- Fully type‑safe with small, tree‑shakeable utilities +> [!NOTE] +> TanStack Pacer is currently mostly a client-side only library, but it is being designed to be able to potentially be used on the server-side as well. + +- **Debouncing** + - Delay execution until after a period of inactivity for when you only care about the last execution in a sequence. + - Synchronous or Asynchronous Debounce utilities with promise support and error handling + - Control of leading, trailing, and enabled options +- **Throttling** + - Smoothly limit the rate at which a function can fire + - Synchronous or Asynchronous Throttle utilities with promise support and error handling + - Control of leading, trailing, and enabled options. +- **Rate Limiting** + - Limit the rate at which a function can fire over a period of time + - Synchronous or Asynchronous Rate Limiting utilities with promise support and error handling + - Fixed or Sliding Window variations of Rate Limiting +- **Queuing** + - Queue functions to be executed in a specific order + - Choose from FIFO, LIFO, and Priority queue implementations + - Control processing speed with configurable wait times or concurrency limits + - Manage queue execution with start/stop capabilities + - Expire items from the queue after a configurable duration +- **Batching** + - Chunk up multiple operations into larger batches to reduce total back-and-forth operations + - Batch by time period, batch size, whichever comes first, or a custom condition to trigger batch executions +- **Async or Sync Variations** + - Choose between synchronous and asynchronous versions of each utility + - Optional error, success, and settled handling for async variations + - Retry and Abort support for async variations +- **State Management** + - Uses TanStack Store under the hood for state management with fine-grained reactivity + - Easily integrate with your own state management library of choice + - Persist state to local or session storage for some utilities like rate limiting and queuing +- **Convenient Hooks** + - Reduce boilerplate code with pre-built hooks like `useDebouncedCallback`, `useThrottledValue`, and `useQueuedState`, and more. + - Multiple layers of abstraction to choose from depending on your use case. + - Works with each framework's default state management solutions, or with whatever custom state management library that you prefer. +- **Type Safety** + - Full type safety with TypeScript that makes sure that your functions will always be called with the correct arguments + - Generics for flexible and reusable utilities +- **Framework Adapters** + - React, Solid, and more +- **Tree Shaking** + - We, of course, get tree-shaking right for your applications by default, but we also provide extra deep imports for each utility, making it easier to embed these utilities into your libraries without increasing the bundle-phobia reports of your library. ### Read the docs → diff --git a/docs/config.json b/docs/config.json index f13ba632..df95b5cc 100644 --- a/docs/config.json +++ b/docs/config.json @@ -51,41 +51,45 @@ "label": "Guides", "children": [ { - "label": "Debouncing Guide", - "to": "guides/debouncing" + "label": "Which Utility Should I Choose?", + "to": "guides/which-pacer-utility-should-i-choose" }, { - "label": "Async Debouncing Guide", - "to": "guides/async-debouncing" + "label": "Debouncing Guide", + "to": "guides/debouncing" }, { "label": "Throttling Guide", "to": "guides/throttling" }, - { - "label": "Async Throttling Guide", - "to": "guides/async-throttling" - }, { "label": "Rate Limiting Guide", "to": "guides/rate-limiting" }, - { - "label": "Async Rate Limiting Guide", - "to": "guides/async-rate-limiting" - }, { "label": "Queuing Guide", "to": "guides/queuing" }, - { - "label": "Async Queuing Guide", - "to": "guides/async-queuing" - }, { "label": "Batching Guide", "to": "guides/batching" }, + { + "label": "Async Debouncing Guide", + "to": "guides/async-debouncing" + }, + { + "label": "Async Throttling Guide", + "to": "guides/async-throttling" + }, + { + "label": "Async Rate Limiting Guide", + "to": "guides/async-rate-limiting" + }, + { + "label": "Async Queuing Guide", + "to": "guides/async-queuing" + }, { "label": "Async Batching Guide", "to": "guides/async-batching" @@ -634,7 +638,16 @@ }, { "label": "Debouncer Examples", - "children": [], + "children": [ + { + "label": "liteDebounce", + "to": "examples/vanilla/liteDebounce" + }, + { + "label": "LiteDebouncer", + "to": "examples/vanilla/LiteDebouncer" + } + ], "frameworks": [ { "label": "react", @@ -706,7 +719,16 @@ }, { "label": "Throttler Examples", - "children": [], + "children": [ + { + "label": "liteThrottle", + "to": "examples/vanilla/liteThrottle" + }, + { + "label": "LiteThrottler", + "to": "examples/vanilla/LiteThrottler" + } + ], "frameworks": [ { "label": "react", @@ -774,7 +796,16 @@ }, { "label": "Rate Limiter Examples", - "children": [], + "children": [ + { + "label": "liteRateLimit", + "to": "examples/vanilla/liteRateLimit" + }, + { + "label": "LiteRateLimiter", + "to": "examples/vanilla/LiteRateLimiter" + } + ], "frameworks": [ { "label": "react", @@ -850,7 +881,16 @@ }, { "label": "Queue Examples", - "children": [], + "children": [ + { + "label": "liteQueue", + "to": "examples/vanilla/liteQueue" + }, + { + "label": "LiteQueuer", + "to": "examples/vanilla/LiteQueuer" + } + ], "frameworks": [ { "label": "react", @@ -906,7 +946,16 @@ }, { "label": "Batcher Examples", - "children": [], + "children": [ + { + "label": "liteBatch", + "to": "examples/vanilla/liteBatch" + }, + { + "label": "LiteBatcher", + "to": "examples/vanilla/LiteBatcher" + } + ], "frameworks": [ { "label": "react", diff --git a/docs/guides/which-pacer-utility-should-i-choose.md b/docs/guides/which-pacer-utility-should-i-choose.md new file mode 100644 index 00000000..94d988fb --- /dev/null +++ b/docs/guides/which-pacer-utility-should-i-choose.md @@ -0,0 +1,110 @@ +--- +title: Which Pacer Utility Should I Choose? +id: which-pacer-utility-should-i-choose +--- + +TanStack Pacer provides 5 core utilities for controlling function execution frequency. Here is a one-sentence summary of each utility: + + - [**Debouncer**](../debouncing.md) - Executes a function after a period of inactivity. (Rejects other calls during activity) + - [**Throttler**](../throttling.md) - Executes a function at regular intervals. (Rejects all but one call during each interval) + - [**Rate Limiter**](../rate-limiting.md) - Prevents a function from being called too frequently. (Rejects calls when the limit is reached) + - [**Queuer**](../queuing.md) - Processes all calls to a function in order. (Only rejects calls if the queue is full) + - [**Batcher**](../batching.md) - Groups multiple function calls into a single batch. (No rejections) + +After choosing which strategy fits your needs, there are additional variations and decisions to consider. This guide provides quick clarifications on the most common decisions you'll need to make. + +## Synchronous vs Asynchronous + +You may see both a [Debouncer](../debouncing.md) and an [Async Debouncer](../async-debouncing.md) when first exploring TanStack Pacer. Which one should you use? + +Each utility comes in both synchronous and asynchronous versions. For most use cases, the simpler synchronous version is sufficient. However, if you need these utilities to handle async logic for you, the complexity of each utility increases significantly. The bundle size of the asynchronous versions of each utility is often more than double the size of the synchronous versions. If you actually need and use some of these additional APIs, the extra complexity is worth it, but don't choose the asynchronous version of a utility unless you actually end up using these features. + +> [!TIP] We recommend using the simpler synchronous version of each utility for most use cases. (Debouncer, Throttler, Rate Limiter, Queuer, Batcher) + +Luckily, switching between the synchronous and asynchronous versions of a utility is straightforward. For the most part, just replace the import whenever you decide you need to switch. + +### When to Use the Asynchronous Version + +Use the asynchronous version when you need any of these capabilities: + +- **Await Return Values**: Await and use the return value from your function, rather than just calling it for side effects. The synchronous version returns void, while the async version returns a Promise that resolves with your function's result. You can also await the return value to determine when to send another execution when execution order matters. + +- **Error Handling**: Built-in error handling with configurable error callbacks, control over whether errors are thrown or swallowed, and error statistics tracking. + +- **Extra Callbacks**: Instead of just an `onExecute` callback that comes with the synchronous version, the asynchronous version comes with additional callbacks such as `onSuccess`, `onError`, `onSettled`, and `onAbort`. + +- **Concurrency**: For queuing specifically, concurrency support allowing multiple items to be processed simultaneously while maintaining control over how many run at once. + +- **Retries and Aborts**: Built-in integration with `AsyncRetryer` for automatic retries of failed executions with configurable backoff strategies, jitter, and retry limits. Cancel in-flight operations using AbortController. + +## Pacer Lite vs Pacer + +Pacer Lite (`@tanstack/pacer-lite`) is a stripped-down version of the core TanStack Pacer library. It is designed to be used in libraries and npm packages that need minimal overhead and no reactivity features. The Lite version of each utility has the same core functionality as its core counterpart, but with a smaller API surface and a smaller bundle size. Pacer Lite lacks reactivity features, framework adapters, devtools support, and some of the advanced options that the core utilities have. + +If you are building an application, use the normal `@tanstack/pacer` package (or your framework adapter like `@tanstack/react-pacer` for React, `@tanstack/solid-pacer` for Solid, etc.). Only use Pacer Lite if you are building a library or npm package that needs to be as lightweight as possible and doesn't need the extra features of the core utilities. + +## Which Hook Variation Should I Use? + +We will use the Debouncer utility as the main example, but the same principles apply to all the other utilities. + +If you are using a framework adapter like React, you will see that there are lots of examples with multiple hook variations. For example, for debouncing you will see: + +- [`useDebouncer`](../../framework/react/examples/useDebouncer) +- [`useDebouncedCallback`](../../framework/react/examples/useDebouncedCallback) +- [`useDebouncedState`](../../framework/react/examples/useDebouncedState) +- [`useDebouncedValue`](../../framework/react/examples/useDebouncedValue) + +You will also probably see that you can use the core `Debouncer` class directly or the core `debounce` function directly without using a hook. + +These are all variations of the same basic debouncing functionality. So, which one should you use? + +The answer is: It Depends! 🤷‍♂️ + +But also: It doesn't really matter too much. They all do essentially the same thing. It's mostly a matter of personal preference and how you want to interact with the utility. Under the hood, a `Debouncer` instance is created no matter what you choose. + +You can start with the [`useDebouncer`](../../framework/react/examples/useDebouncer) hook if you don't know which one to use. All of the others wrap the `useDebouncer` hook with different argument and return value signatures. + +```tsx +import { useDebouncer } from '@tanstack/react-pacer' +//... +const debouncer = useDebouncer(fn, options) + +debouncer.maybeExecute(args) // execute the debounced function +//... +debouncer.cancel() // use Debouncer APIs with full access to the debouncer instance +debouncer.flush() +``` + +If you only need to create a debounced function and don't need access to the debouncer instance to call methods or use its extra features, use the [`useDebouncedCallback`](../../framework/react/examples/useDebouncedCallback) hook. The `*Callback` versions of the hooks are actually most similar to calling the core functions directly (like `debounce`) but with the memoization setup taken care of for you. + +```tsx +import { useDebouncedCallback } from '@tanstack/react-pacer' +//... +const debouncedFn = useDebouncedCallback(fn, options) + +debouncedFn(args) // execute the debounced function +//... +``` + +The other variations are convenience hooks that wrap the `useDebouncer` hook with different argument and return value signatures. For example, the [`useDebouncedState`](../../framework/react/examples/useDebouncedState) hook is useful when you need to debounce a state value. + +```tsx +import { useDebouncedState } from '@tanstack/react-pacer' +//... +const [debouncedValue, setDebouncedValue] = useDebouncedState(value, options) + +setDebouncedValue(newValue) // set the debounced value (will be debounced state setter) +//... +``` + +The [`useDebouncedValue`](../../framework/react/examples/useDebouncedValue) hook is useful when your debounced value is derived from an instant value that changes frequently. + +```tsx +import { useDebouncedValue } from '@tanstack/react-pacer' +//... +const [instantValue, setInstantValue] = useState(0) +const [debouncedValue] = useDebouncedValue(instantValue, options) +//... +setInstantValue(newValue) // Set the instant value; the debounced value will update automatically, delayed by the wait time +//... +``` \ No newline at end of file diff --git a/docs/overview.md b/docs/overview.md index 58751590..38448879 100644 --- a/docs/overview.md +++ b/docs/overview.md @@ -6,7 +6,7 @@ id: overview TanStack Pacer is a library focused on providing high-quality utilities for controlling function execution timing in your applications. While similar utilities exist elsewhere, we aim to get all the important details right - including ***type-safety***, ***tree-shaking***, and a consistent and ***intuitive API***. By focusing on these fundamentals and making them available in a ***framework agnostic*** way, we hope to make these utilities and patterns more commonplace in your applications. Proper execution control is often an afterthought in application development, leading to performance issues, race conditions, and poor user experiences that could have been prevented. TanStack Pacer helps you implement these critical patterns correctly from the start! > [!IMPORTANT] -> TanStack Pacer is currently in **alpha** and its API is subject to change. +> TanStack Pacer is currently in **beta** and its API is still subject to change. > > The scope of this library may grow, but we hope to keep the bundle size of each individual utility lean and focused. @@ -43,6 +43,7 @@ Many of the ideas (and code) for TanStack Pacer are not new. In fact, many of th - **Async or Sync Variations** - Choose between synchronous and asynchronous versions of each utility - Optional error, success, and settled handling for async variations + - Retry and Abort support for async variations - **State Management** - Uses TanStack Store under the hood for state management with fine-grained reactivity - Easily integrate with your own state management library of choice @@ -66,3 +67,7 @@ Each utility is designed to be used in a specific way, and each utility has its See how each utility behaves with this interactive comparison. Move the range slider to observe the differences between debouncing, throttling, rate limiting, queuing, and batching: + +## Pacer Lite + +Pacer Lite (`@tanstack/pacer-lite`) is a stripped down version of the core TanStack Pacer library. It is designed to be used in libraries and npm packages that need minimal overhead, and no reactivity features. The Lite version of each utility has the same core functionality of its core counterpart, but is stripped down to have a slightly smaller API surface and a smaller bundle size. Pacer Lite lacks reactivity features, framework adapters, devtools support, and some of the advanced options that the core utilities have. If that sounds interesting to you, you can \ No newline at end of file diff --git a/examples/vanilla/LiteBatcher/index.html b/examples/vanilla/LiteBatcher/index.html new file mode 100644 index 00000000..0096c9e1 --- /dev/null +++ b/examples/vanilla/LiteBatcher/index.html @@ -0,0 +1,13 @@ + + + + + + + TanStack Pacer - Vanilla Batcher Examples + + +
+ + + diff --git a/examples/vanilla/LiteBatcher/package.json b/examples/vanilla/LiteBatcher/package.json new file mode 100644 index 00000000..2baf4111 --- /dev/null +++ b/examples/vanilla/LiteBatcher/package.json @@ -0,0 +1,17 @@ +{ + "name": "@tanstack/pacer-example-vanilla-lite-batcher", + "private": true, + "type": "module", + "scripts": { + "dev": "vite --port=3005", + "build": "vite build", + "preview": "vite preview", + "test:types": "tsc" + }, + "dependencies": { + "@tanstack/pacer-lite": "0.0.1" + }, + "devDependencies": { + "vite": "^7.2.2" + } +} diff --git a/examples/vanilla/LiteBatcher/public/emblem-light.svg b/examples/vanilla/LiteBatcher/public/emblem-light.svg new file mode 100644 index 00000000..a58e69ad --- /dev/null +++ b/examples/vanilla/LiteBatcher/public/emblem-light.svg @@ -0,0 +1,13 @@ + + + + emblem-light + Created with Sketch. + + + + + + + + \ No newline at end of file diff --git a/examples/vanilla/LiteBatcher/src/index.ts b/examples/vanilla/LiteBatcher/src/index.ts new file mode 100644 index 00000000..39f28f4d --- /dev/null +++ b/examples/vanilla/LiteBatcher/src/index.ts @@ -0,0 +1,123 @@ +/** + * Note: @tanstack/pacer-lite is a stripped-down alternative designed for library use. + * It does not include TanStack Store, reactivity features, framework adapters, or devtools support + * that are available in the core @tanstack/pacer package for app development. + * The core version also includes more advanced features in some utilities. + */ +import { LiteBatcher } from '@tanstack/pacer-lite/lite-batcher' + +function createApp1() { + const container = document.createElement('div') + + let processedBatches: Array> = [] + let batchesProcessed = 0 + let totalItemsProcessed = 0 + let lastNumber = 0 + + const batcher = new LiteBatcher( + (items: Array) => { + processedBatches.push(items) + batchesProcessed += 1 + totalItemsProcessed += items.length + console.log('✅ Processing batch:', items) + updateDisplay() + }, + { + maxSize: 5, + wait: 3000, + getShouldExecute: (items) => items.includes(42), + }, + ) + + function addItem() { + lastNumber += 1 + batcher.addItem(lastNumber) + updateDisplay() + } + + function flushBatch() { + batcher.flush() + console.log('⚡ Flushed current batch') + updateDisplay() + } + + function clearBatch() { + batcher.clear() + console.log('🔄 Batch cleared') + updateDisplay() + } + + function cancelPending() { + batcher.cancel() + console.log('❌ Cancelled pending batch') + updateDisplay() + } + + function updateDisplay() { + const batchSize = batcher.size + const isEmpty = batcher.isEmpty + const isPending = batcher.isPending + const batchItems = batcher.peekAllItems() + + container.innerHTML = ` +
+

TanStack Pacer LiteBatcher Example

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Batch Size:${batchSize}
Batch Max Size:5
Batch Items:${batchItems.length > 0 ? batchItems.join(', ') : 'None'}
Is Pending:${isPending ? 'Yes' : 'No'}
Batches Processed:${batchesProcessed}
Items Processed:${totalItemsProcessed}
Processed Batches:${processedBatches.length > 0 ? processedBatches.map((b) => `[${b.join(', ')}]`).join(', ') : 'None'}
+
+ + + + +
+
+ ` + + container.querySelector('#add-item-btn')?.addEventListener('click', addItem) + container.querySelector('#flush-btn')?.addEventListener('click', flushBatch) + container.querySelector('#clear-btn')?.addEventListener('click', clearBatch) + container + .querySelector('#cancel-btn') + ?.addEventListener('click', cancelPending) + } + + updateDisplay() + return container +} + +const app = document.getElementById('app')! +app.appendChild(createApp1()) + +console.log( + 'LiteBatcher example ready! Add items and watch them batch automatically, or use flush to process immediately.', +) diff --git a/examples/vanilla/LiteBatcher/tsconfig.json b/examples/vanilla/LiteBatcher/tsconfig.json new file mode 100644 index 00000000..41a2fe79 --- /dev/null +++ b/examples/vanilla/LiteBatcher/tsconfig.json @@ -0,0 +1,22 @@ +{ + "compilerOptions": { + "target": "ESNext", + "lib": ["DOM", "DOM.Iterable", "ESNext"], + "module": "ESNext", + "skipLibCheck": true, + + /* Bundler mode */ + "moduleResolution": "Bundler", + "allowImportingTsExtensions": true, + "resolveJsonModule": true, + "isolatedModules": true, + "noEmit": true, + + /* Linting */ + "strict": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "noFallthroughCasesInSwitch": true + }, + "include": ["src", "vite.config.ts"] +} diff --git a/examples/vanilla/LiteBatcher/vite.config.ts b/examples/vanilla/LiteBatcher/vite.config.ts new file mode 100644 index 00000000..f93d3793 --- /dev/null +++ b/examples/vanilla/LiteBatcher/vite.config.ts @@ -0,0 +1,12 @@ +import { defineConfig } from 'vite' + +export default defineConfig({ + root: '.', + publicDir: 'public', + build: { + outDir: 'dist', + }, + server: { + port: 3005, + }, +}) diff --git a/examples/vanilla/LiteDebouncer/index.html b/examples/vanilla/LiteDebouncer/index.html new file mode 100644 index 00000000..181e3539 --- /dev/null +++ b/examples/vanilla/LiteDebouncer/index.html @@ -0,0 +1,13 @@ + + + + + + + TanStack Pacer - Vanilla LiteDebouncer Examples + + +
+ + + diff --git a/examples/vanilla/LiteDebouncer/package.json b/examples/vanilla/LiteDebouncer/package.json new file mode 100644 index 00000000..730b1130 --- /dev/null +++ b/examples/vanilla/LiteDebouncer/package.json @@ -0,0 +1,17 @@ +{ + "name": "@tanstack/pacer-example-vanilla-lite-debouncer", + "private": true, + "type": "module", + "scripts": { + "dev": "vite --port=3005", + "build": "vite build", + "preview": "vite preview", + "test:types": "tsc" + }, + "dependencies": { + "@tanstack/pacer-lite": "0.0.1" + }, + "devDependencies": { + "vite": "^7.2.2" + } +} diff --git a/examples/vanilla/LiteDebouncer/public/emblem-light.svg b/examples/vanilla/LiteDebouncer/public/emblem-light.svg new file mode 100644 index 00000000..a58e69ad --- /dev/null +++ b/examples/vanilla/LiteDebouncer/public/emblem-light.svg @@ -0,0 +1,13 @@ + + + + emblem-light + Created with Sketch. + + + + + + + + \ No newline at end of file diff --git a/examples/vanilla/LiteDebouncer/src/index.ts b/examples/vanilla/LiteDebouncer/src/index.ts new file mode 100644 index 00000000..d1e1b3b2 --- /dev/null +++ b/examples/vanilla/LiteDebouncer/src/index.ts @@ -0,0 +1,95 @@ +/** + * Note: @tanstack/pacer-lite is a stripped-down alternative designed for library use. + * It does not include TanStack Store, reactivity features, framework adapters, or devtools support + * that are available in the core @tanstack/pacer package for app development. + * The core version also includes more advanced features in some utilities. + */ +import { LiteDebouncer } from '@tanstack/pacer-lite/lite-debouncer' + +function createApp1() { + const container = document.createElement('div') + + let instantCount = 0 + let debouncedCount = 0 + + const debouncer = new LiteDebouncer( + (newCount: number) => { + debouncedCount = newCount + console.log('🔄 Debounced count updated:', newCount) + updateDisplay() + }, + { + wait: 500, + leading: false, + trailing: true, + }, + ) + + function increment() { + instantCount += 1 + debouncer.maybeExecute(instantCount) + updateDisplay() + } + + function decrement() { + instantCount -= 1 + debouncer.maybeExecute(instantCount) + updateDisplay() + } + + function flush() { + debouncer.flush() + console.log('⚡ Forced flush executed') + } + + function cancel() { + debouncer.cancel() + console.log('❌ Debouncer canceled') + updateDisplay() + } + + function updateDisplay() { + container.innerHTML = ` +
+

TanStack Pacer LiteDebouncer Example

+ + + + + + + + + + + +
Instant Count:${instantCount}
Debounced Count:${debouncedCount}
+
+ + + + +
+
+ ` + + container + .querySelector('#increment-btn') + ?.addEventListener('click', increment) + container + .querySelector('#decrement-btn') + ?.addEventListener('click', decrement) + container.querySelector('#flush-btn')?.addEventListener('click', flush) + container.querySelector('#cancel-btn')?.addEventListener('click', cancel) + } + + updateDisplay() + return container +} + +const app = document.getElementById('app')! +app.appendChild(createApp1()) + +console.log( + 'LiteDebouncer example ready! Click the buttons rapidly and watch the console for debounced executions.', +) diff --git a/examples/vanilla/LiteDebouncer/tsconfig.json b/examples/vanilla/LiteDebouncer/tsconfig.json new file mode 100644 index 00000000..41a2fe79 --- /dev/null +++ b/examples/vanilla/LiteDebouncer/tsconfig.json @@ -0,0 +1,22 @@ +{ + "compilerOptions": { + "target": "ESNext", + "lib": ["DOM", "DOM.Iterable", "ESNext"], + "module": "ESNext", + "skipLibCheck": true, + + /* Bundler mode */ + "moduleResolution": "Bundler", + "allowImportingTsExtensions": true, + "resolveJsonModule": true, + "isolatedModules": true, + "noEmit": true, + + /* Linting */ + "strict": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "noFallthroughCasesInSwitch": true + }, + "include": ["src", "vite.config.ts"] +} diff --git a/examples/vanilla/LiteDebouncer/vite.config.ts b/examples/vanilla/LiteDebouncer/vite.config.ts new file mode 100644 index 00000000..f93d3793 --- /dev/null +++ b/examples/vanilla/LiteDebouncer/vite.config.ts @@ -0,0 +1,12 @@ +import { defineConfig } from 'vite' + +export default defineConfig({ + root: '.', + publicDir: 'public', + build: { + outDir: 'dist', + }, + server: { + port: 3005, + }, +}) diff --git a/examples/vanilla/LiteQueuer/index.html b/examples/vanilla/LiteQueuer/index.html new file mode 100644 index 00000000..dbb31999 --- /dev/null +++ b/examples/vanilla/LiteQueuer/index.html @@ -0,0 +1,13 @@ + + + + + + + TanStack Pacer - Vanilla Queuer Examples + + +
+ + + diff --git a/examples/vanilla/LiteQueuer/package.json b/examples/vanilla/LiteQueuer/package.json new file mode 100644 index 00000000..ff103a2a --- /dev/null +++ b/examples/vanilla/LiteQueuer/package.json @@ -0,0 +1,17 @@ +{ + "name": "@tanstack/pacer-example-vanilla-lite-queuer", + "private": true, + "type": "module", + "scripts": { + "dev": "vite --port=3005", + "build": "vite build", + "preview": "vite preview", + "test:types": "tsc" + }, + "dependencies": { + "@tanstack/pacer-lite": "0.0.1" + }, + "devDependencies": { + "vite": "^7.2.2" + } +} diff --git a/examples/vanilla/LiteQueuer/public/emblem-light.svg b/examples/vanilla/LiteQueuer/public/emblem-light.svg new file mode 100644 index 00000000..a58e69ad --- /dev/null +++ b/examples/vanilla/LiteQueuer/public/emblem-light.svg @@ -0,0 +1,13 @@ + + + + emblem-light + Created with Sketch. + + + + + + + + \ No newline at end of file diff --git a/examples/vanilla/LiteQueuer/src/index.ts b/examples/vanilla/LiteQueuer/src/index.ts new file mode 100644 index 00000000..92b1370e --- /dev/null +++ b/examples/vanilla/LiteQueuer/src/index.ts @@ -0,0 +1,155 @@ +/** + * Note: @tanstack/pacer-lite is a stripped-down alternative designed for library use. + * It does not include TanStack Store, reactivity features, framework adapters, or devtools support + * that are available in the core @tanstack/pacer package for app development. + * The core version also includes more advanced features in some utilities. + */ +import { LiteQueuer } from '@tanstack/pacer-lite/lite-queuer' + +function createApp1() { + const container = document.createElement('div') + + let executionCount = 0 + + const queuer = new LiteQueuer( + (item: number) => { + executionCount += 1 + console.log('✅ Processing item:', item) + updateDisplay() + }, + { + initialItems: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], + maxSize: 25, + started: false, + wait: 1000, + }, + ) + + function addItem() { + const nextNumber = + queuer.size > 0 + ? (queuer.peekAllItems()[queuer.peekAllItems().length - 1] ?? 0) + 1 + : 1 + const added = queuer.addItem(nextNumber) + if (!added) { + console.log('❌ Queue is full, item rejected') + } + updateDisplay() + } + + function executeNext() { + const item = queuer.execute() + if (item !== undefined) { + executionCount += 1 + console.log('✅ Manually processed item:', item) + } + updateDisplay() + } + + function clearQueue() { + queuer.clear() + executionCount = 0 + console.log('🔄 Queue cleared') + updateDisplay() + } + + function startProcessing() { + queuer.start() + console.log('▶️ Started processing') + updateDisplay() + } + + function stopProcessing() { + queuer.stop() + console.log('⏸️ Stopped processing') + updateDisplay() + } + + function flushQueue() { + queuer.flush() + console.log('⚡ Flushed queue') + updateDisplay() + } + + function updateDisplay() { + const queueSize = queuer.size + const isEmpty = queuer.isEmpty + const isFull = queueSize >= 25 + const isRunning = queuer.isQueueRunning + const peekNext = queuer.peekNextItem() + const allItems = queuer.peekAllItems() + + container.innerHTML = ` +
+

TanStack Pacer LiteQueuer Example

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Queue Size:${queueSize}
Queue Max Size:25
Queue Full:${isFull ? 'Yes' : 'No'}
Queue Peek:${peekNext ?? 'None'}
Queue Empty:${isEmpty ? 'Yes' : 'No'}
Queuer Status:${isRunning ? 'Running' : 'Stopped'}
Items Processed:${executionCount}
Queue Items:${allItems.length > 0 ? allItems.join(', ') : 'None'}
+
+ + + + + + +
+
+ ` + + container.querySelector('#add-item-btn')?.addEventListener('click', addItem) + container + .querySelector('#execute-btn') + ?.addEventListener('click', executeNext) + container.querySelector('#clear-btn')?.addEventListener('click', clearQueue) + container + .querySelector('#start-btn') + ?.addEventListener('click', startProcessing) + container + .querySelector('#stop-btn') + ?.addEventListener('click', stopProcessing) + container.querySelector('#flush-btn')?.addEventListener('click', flushQueue) + } + + updateDisplay() + return container +} + +const app = document.getElementById('app')! +app.appendChild(createApp1()) + +console.log( + 'LiteQueuer example ready! Use the buttons to control queue processing.', +) diff --git a/examples/vanilla/LiteQueuer/tsconfig.json b/examples/vanilla/LiteQueuer/tsconfig.json new file mode 100644 index 00000000..41a2fe79 --- /dev/null +++ b/examples/vanilla/LiteQueuer/tsconfig.json @@ -0,0 +1,22 @@ +{ + "compilerOptions": { + "target": "ESNext", + "lib": ["DOM", "DOM.Iterable", "ESNext"], + "module": "ESNext", + "skipLibCheck": true, + + /* Bundler mode */ + "moduleResolution": "Bundler", + "allowImportingTsExtensions": true, + "resolveJsonModule": true, + "isolatedModules": true, + "noEmit": true, + + /* Linting */ + "strict": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "noFallthroughCasesInSwitch": true + }, + "include": ["src", "vite.config.ts"] +} diff --git a/examples/vanilla/LiteQueuer/vite.config.ts b/examples/vanilla/LiteQueuer/vite.config.ts new file mode 100644 index 00000000..f93d3793 --- /dev/null +++ b/examples/vanilla/LiteQueuer/vite.config.ts @@ -0,0 +1,12 @@ +import { defineConfig } from 'vite' + +export default defineConfig({ + root: '.', + publicDir: 'public', + build: { + outDir: 'dist', + }, + server: { + port: 3005, + }, +}) diff --git a/examples/vanilla/LiteRateLimiter/index.html b/examples/vanilla/LiteRateLimiter/index.html new file mode 100644 index 00000000..2f19b23c --- /dev/null +++ b/examples/vanilla/LiteRateLimiter/index.html @@ -0,0 +1,13 @@ + + + + + + + TanStack Pacer - Vanilla Rate Limiter Examples + + +
+ + + diff --git a/examples/vanilla/LiteRateLimiter/package.json b/examples/vanilla/LiteRateLimiter/package.json new file mode 100644 index 00000000..d869ddb9 --- /dev/null +++ b/examples/vanilla/LiteRateLimiter/package.json @@ -0,0 +1,17 @@ +{ + "name": "@tanstack/pacer-example-vanilla-lite-rate-limiter", + "private": true, + "type": "module", + "scripts": { + "dev": "vite --port=3005", + "build": "vite build", + "preview": "vite preview", + "test:types": "tsc" + }, + "dependencies": { + "@tanstack/pacer-lite": "0.0.1" + }, + "devDependencies": { + "vite": "^7.2.2" + } +} diff --git a/examples/vanilla/LiteRateLimiter/public/emblem-light.svg b/examples/vanilla/LiteRateLimiter/public/emblem-light.svg new file mode 100644 index 00000000..a58e69ad --- /dev/null +++ b/examples/vanilla/LiteRateLimiter/public/emblem-light.svg @@ -0,0 +1,13 @@ + + + + emblem-light + Created with Sketch. + + + + + + + + \ No newline at end of file diff --git a/examples/vanilla/LiteRateLimiter/src/index.ts b/examples/vanilla/LiteRateLimiter/src/index.ts new file mode 100644 index 00000000..995f9b13 --- /dev/null +++ b/examples/vanilla/LiteRateLimiter/src/index.ts @@ -0,0 +1,154 @@ +/** + * Note: @tanstack/pacer-lite is a stripped-down alternative designed for library use. + * It does not include TanStack Store, reactivity features, framework adapters, or devtools support + * that are available in the core @tanstack/pacer package for app development. + * The core version also includes more advanced features in some utilities. + */ +import { LiteRateLimiter } from '@tanstack/pacer-lite/lite-rate-limiter' + +function createApp1() { + const container = document.createElement('div') + + let instantCount = 0 + let limitedCount = 0 + let executionCount = 0 + let rejectionCount = 0 + let windowType: 'fixed' | 'sliding' = 'fixed' + + let rateLimiter = new LiteRateLimiter( + (newCount: number) => { + limitedCount = newCount + executionCount += 1 + console.log('✅ Rate limited count updated:', newCount) + updateDisplay() + }, + { + limit: 5, + window: 5000, + windowType: windowType, + }, + ) + + function increment() { + instantCount += 1 + const executed = rateLimiter.maybeExecute(instantCount) + if (!executed) { + rejectionCount += 1 + console.log( + '❌ Rejected by rate limiter', + rateLimiter.getMsUntilNextWindow(), + 'ms until next window', + ) + } + updateDisplay() + } + + function reset() { + rateLimiter.reset() + instantCount = 0 + limitedCount = 0 + executionCount = 0 + rejectionCount = 0 + console.log('🔄 Rate limiter reset') + updateDisplay() + } + + function setWindowType(type: 'fixed' | 'sliding') { + windowType = type + instantCount = 0 + limitedCount = 0 + executionCount = 0 + rejectionCount = 0 + rateLimiter = new LiteRateLimiter( + (newCount: number) => { + limitedCount = newCount + executionCount += 1 + console.log('✅ Rate limited count updated:', newCount) + updateDisplay() + }, + { + limit: 5, + window: 5000, + windowType: windowType, + }, + ) + updateDisplay() + } + + function updateDisplay() { + const remainingInWindow = rateLimiter.getRemainingInWindow() + const msUntilNextWindow = rateLimiter.getMsUntilNextWindow() + + container.innerHTML = ` +
+

TanStack Pacer LiteRateLimiter Example

+
+ + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Execution Count:${executionCount}
Rejection Count:${rejectionCount}
Remaining in Window:${remainingInWindow}
Ms Until Next Window:${msUntilNextWindow}

Instant Count:${instantCount}
Rate Limited Count:${limitedCount}
+
+ + +
+
+ ` + + container + .querySelector('#increment-btn') + ?.addEventListener('click', increment) + container.querySelector('#reset-btn')?.addEventListener('click', reset) + container + .querySelector('input[value="fixed"]') + ?.addEventListener('change', () => setWindowType('fixed')) + container + .querySelector('input[value="sliding"]') + ?.addEventListener('change', () => setWindowType('sliding')) + } + + updateDisplay() + return container +} + +const app = document.getElementById('app')! +app.appendChild(createApp1()) + +console.log( + 'LiteRateLimiter example ready! Click increment rapidly and watch the console for rate limited executions.', +) diff --git a/examples/vanilla/LiteRateLimiter/tsconfig.json b/examples/vanilla/LiteRateLimiter/tsconfig.json new file mode 100644 index 00000000..41a2fe79 --- /dev/null +++ b/examples/vanilla/LiteRateLimiter/tsconfig.json @@ -0,0 +1,22 @@ +{ + "compilerOptions": { + "target": "ESNext", + "lib": ["DOM", "DOM.Iterable", "ESNext"], + "module": "ESNext", + "skipLibCheck": true, + + /* Bundler mode */ + "moduleResolution": "Bundler", + "allowImportingTsExtensions": true, + "resolveJsonModule": true, + "isolatedModules": true, + "noEmit": true, + + /* Linting */ + "strict": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "noFallthroughCasesInSwitch": true + }, + "include": ["src", "vite.config.ts"] +} diff --git a/examples/vanilla/LiteRateLimiter/vite.config.ts b/examples/vanilla/LiteRateLimiter/vite.config.ts new file mode 100644 index 00000000..f93d3793 --- /dev/null +++ b/examples/vanilla/LiteRateLimiter/vite.config.ts @@ -0,0 +1,12 @@ +import { defineConfig } from 'vite' + +export default defineConfig({ + root: '.', + publicDir: 'public', + build: { + outDir: 'dist', + }, + server: { + port: 3005, + }, +}) diff --git a/examples/vanilla/LiteThrottler/index.html b/examples/vanilla/LiteThrottler/index.html new file mode 100644 index 00000000..ff224096 --- /dev/null +++ b/examples/vanilla/LiteThrottler/index.html @@ -0,0 +1,13 @@ + + + + + + + TanStack Pacer - Vanilla LiteThrottler Examples + + +
+ + + diff --git a/examples/vanilla/LiteThrottler/package.json b/examples/vanilla/LiteThrottler/package.json new file mode 100644 index 00000000..38337a88 --- /dev/null +++ b/examples/vanilla/LiteThrottler/package.json @@ -0,0 +1,17 @@ +{ + "name": "@tanstack/pacer-example-vanilla-lite-throttler", + "private": true, + "type": "module", + "scripts": { + "dev": "vite --port=3005", + "build": "vite build", + "preview": "vite preview", + "test:types": "tsc" + }, + "dependencies": { + "@tanstack/pacer-lite": "0.0.1" + }, + "devDependencies": { + "vite": "^7.2.2" + } +} diff --git a/examples/vanilla/LiteThrottler/public/emblem-light.svg b/examples/vanilla/LiteThrottler/public/emblem-light.svg new file mode 100644 index 00000000..a58e69ad --- /dev/null +++ b/examples/vanilla/LiteThrottler/public/emblem-light.svg @@ -0,0 +1,13 @@ + + + + emblem-light + Created with Sketch. + + + + + + + + \ No newline at end of file diff --git a/examples/vanilla/LiteThrottler/src/index.ts b/examples/vanilla/LiteThrottler/src/index.ts new file mode 100644 index 00000000..3d2147e8 --- /dev/null +++ b/examples/vanilla/LiteThrottler/src/index.ts @@ -0,0 +1,95 @@ +/** + * Note: @tanstack/pacer-lite is a stripped-down alternative designed for library use. + * It does not include TanStack Store, reactivity features, framework adapters, or devtools support + * that are available in the core @tanstack/pacer package for app development. + * The core version also includes more advanced features in some utilities. + */ +import { LiteThrottler } from '@tanstack/pacer-lite/lite-throttler' + +function createApp1() { + const container = document.createElement('div') + + let instantCount = 0 + let throttledCount = 0 + + const throttler = new LiteThrottler( + (newCount: number) => { + throttledCount = newCount + console.log('🔄 Throttled count updated:', newCount) + updateDisplay() + }, + { + wait: 500, + leading: true, + trailing: true, + }, + ) + + function increment() { + instantCount += 1 + throttler.maybeExecute(instantCount) + updateDisplay() + } + + function decrement() { + instantCount -= 1 + throttler.maybeExecute(instantCount) + updateDisplay() + } + + function flush() { + throttler.flush() + console.log('⚡ Forced flush executed') + } + + function cancel() { + throttler.cancel() + console.log('❌ Throttler canceled') + updateDisplay() + } + + function updateDisplay() { + container.innerHTML = ` +
+

TanStack Pacer LiteThrottler Example

+ + + + + + + + + + + +
Instant Count:${instantCount}
Throttled Count:${throttledCount}
+
+ + + + +
+
+ ` + + container + .querySelector('#increment-btn') + ?.addEventListener('click', increment) + container + .querySelector('#decrement-btn') + ?.addEventListener('click', decrement) + container.querySelector('#flush-btn')?.addEventListener('click', flush) + container.querySelector('#cancel-btn')?.addEventListener('click', cancel) + } + + updateDisplay() + return container +} + +const app = document.getElementById('app')! +app.appendChild(createApp1()) + +console.log( + 'LiteThrottler example ready! Click the buttons rapidly and watch the console for throttled executions.', +) diff --git a/examples/vanilla/LiteThrottler/tsconfig.json b/examples/vanilla/LiteThrottler/tsconfig.json new file mode 100644 index 00000000..41a2fe79 --- /dev/null +++ b/examples/vanilla/LiteThrottler/tsconfig.json @@ -0,0 +1,22 @@ +{ + "compilerOptions": { + "target": "ESNext", + "lib": ["DOM", "DOM.Iterable", "ESNext"], + "module": "ESNext", + "skipLibCheck": true, + + /* Bundler mode */ + "moduleResolution": "Bundler", + "allowImportingTsExtensions": true, + "resolveJsonModule": true, + "isolatedModules": true, + "noEmit": true, + + /* Linting */ + "strict": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "noFallthroughCasesInSwitch": true + }, + "include": ["src", "vite.config.ts"] +} diff --git a/examples/vanilla/LiteThrottler/vite.config.ts b/examples/vanilla/LiteThrottler/vite.config.ts new file mode 100644 index 00000000..f93d3793 --- /dev/null +++ b/examples/vanilla/LiteThrottler/vite.config.ts @@ -0,0 +1,12 @@ +import { defineConfig } from 'vite' + +export default defineConfig({ + root: '.', + publicDir: 'public', + build: { + outDir: 'dist', + }, + server: { + port: 3005, + }, +}) diff --git a/examples/vanilla/liteBatch/index.html b/examples/vanilla/liteBatch/index.html new file mode 100644 index 00000000..dc4e0ecb --- /dev/null +++ b/examples/vanilla/liteBatch/index.html @@ -0,0 +1,13 @@ + + + + + + + TanStack Pacer - Vanilla Batch Examples + + +
+ + + diff --git a/examples/vanilla/liteBatch/package.json b/examples/vanilla/liteBatch/package.json new file mode 100644 index 00000000..bb946a2a --- /dev/null +++ b/examples/vanilla/liteBatch/package.json @@ -0,0 +1,17 @@ +{ + "name": "@tanstack/pacer-example-vanilla-lite-batch", + "private": true, + "type": "module", + "scripts": { + "dev": "vite --port=3005", + "build": "vite build", + "preview": "vite preview", + "test:types": "tsc" + }, + "dependencies": { + "@tanstack/pacer-lite": "0.0.1" + }, + "devDependencies": { + "vite": "^7.2.2" + } +} diff --git a/examples/vanilla/liteBatch/public/emblem-light.svg b/examples/vanilla/liteBatch/public/emblem-light.svg new file mode 100644 index 00000000..a58e69ad --- /dev/null +++ b/examples/vanilla/liteBatch/public/emblem-light.svg @@ -0,0 +1,13 @@ + + + + emblem-light + Created with Sketch. + + + + + + + + \ No newline at end of file diff --git a/examples/vanilla/liteBatch/src/index.ts b/examples/vanilla/liteBatch/src/index.ts new file mode 100644 index 00000000..dabdc9e0 --- /dev/null +++ b/examples/vanilla/liteBatch/src/index.ts @@ -0,0 +1,83 @@ +/** + * Note: @tanstack/pacer-lite is a stripped-down alternative designed for library use. + * It does not include TanStack Store, reactivity features, framework adapters, or devtools support + * that are available in the core @tanstack/pacer package for app development. + * The core version also includes more advanced features in some utilities. + */ +import { liteBatch } from '@tanstack/pacer-lite/lite-batcher' + +function createApp1() { + const container = document.createElement('div') + + let processedBatches: Array> = [] + let batchesProcessed = 0 + let totalItemsProcessed = 0 + let lastNumber = 0 + + const batchItems = liteBatch( + (items: Array) => { + processedBatches.push(items) + batchesProcessed += 1 + totalItemsProcessed += items.length + console.log('✅ Processing batch:', items) + updateDisplay() + }, + { + maxSize: 5, + wait: 3000, + getShouldExecute: (items) => items.includes(42), + }, + ) + + function addItem() { + lastNumber += 1 + batchItems(lastNumber) + updateDisplay() + } + + function updateDisplay() { + container.innerHTML = ` +
+

TanStack Pacer liteBatch Example

+ + + + + + + + + + + + + + + + + + + +
Batch Max Size:5
Batches Processed:${batchesProcessed}
Items Processed:${totalItemsProcessed}
Processed Batches:${processedBatches.length > 0 ? processedBatches.map((b) => `[${b.join(', ')}]`).join(', ') : 'None'}
+
+ +
+
+

Note: liteBatch function automatically processes batches. Batches process when: maxSize (5) is reached, wait time (3s) elapses, or item 42 is added. Use LiteBatcher class for manual control.

+
+
+ ` + + container.querySelector('#add-item-btn')?.addEventListener('click', addItem) + } + + updateDisplay() + return container +} + +const app = document.getElementById('app')! +app.appendChild(createApp1()) + +console.log( + 'liteBatch example ready! Add items and watch them batch automatically.', +) diff --git a/examples/vanilla/liteBatch/tsconfig.json b/examples/vanilla/liteBatch/tsconfig.json new file mode 100644 index 00000000..41a2fe79 --- /dev/null +++ b/examples/vanilla/liteBatch/tsconfig.json @@ -0,0 +1,22 @@ +{ + "compilerOptions": { + "target": "ESNext", + "lib": ["DOM", "DOM.Iterable", "ESNext"], + "module": "ESNext", + "skipLibCheck": true, + + /* Bundler mode */ + "moduleResolution": "Bundler", + "allowImportingTsExtensions": true, + "resolveJsonModule": true, + "isolatedModules": true, + "noEmit": true, + + /* Linting */ + "strict": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "noFallthroughCasesInSwitch": true + }, + "include": ["src", "vite.config.ts"] +} diff --git a/examples/vanilla/liteBatch/vite.config.ts b/examples/vanilla/liteBatch/vite.config.ts new file mode 100644 index 00000000..f93d3793 --- /dev/null +++ b/examples/vanilla/liteBatch/vite.config.ts @@ -0,0 +1,12 @@ +import { defineConfig } from 'vite' + +export default defineConfig({ + root: '.', + publicDir: 'public', + build: { + outDir: 'dist', + }, + server: { + port: 3005, + }, +}) diff --git a/examples/vanilla/liteDebounce/index.html b/examples/vanilla/liteDebounce/index.html new file mode 100644 index 00000000..2e51fefb --- /dev/null +++ b/examples/vanilla/liteDebounce/index.html @@ -0,0 +1,13 @@ + + + + + + + TanStack Pacer - Vanilla Debounce Examples + + +
+ + + diff --git a/examples/vanilla/liteDebounce/package.json b/examples/vanilla/liteDebounce/package.json new file mode 100644 index 00000000..7637e217 --- /dev/null +++ b/examples/vanilla/liteDebounce/package.json @@ -0,0 +1,17 @@ +{ + "name": "@tanstack/pacer-example-vanilla-lite-debounce", + "private": true, + "type": "module", + "scripts": { + "dev": "vite --port=3005", + "build": "vite build", + "preview": "vite preview", + "test:types": "tsc" + }, + "dependencies": { + "@tanstack/pacer-lite": "0.0.1" + }, + "devDependencies": { + "vite": "^7.2.2" + } +} diff --git a/examples/vanilla/liteDebounce/public/emblem-light.svg b/examples/vanilla/liteDebounce/public/emblem-light.svg new file mode 100644 index 00000000..a58e69ad --- /dev/null +++ b/examples/vanilla/liteDebounce/public/emblem-light.svg @@ -0,0 +1,13 @@ + + + + emblem-light + Created with Sketch. + + + + + + + + \ No newline at end of file diff --git a/examples/vanilla/liteDebounce/src/index.ts b/examples/vanilla/liteDebounce/src/index.ts new file mode 100644 index 00000000..6d4ba1fd --- /dev/null +++ b/examples/vanilla/liteDebounce/src/index.ts @@ -0,0 +1,80 @@ +/** + * Note: @tanstack/pacer-lite is a stripped-down alternative designed for library use. + * It does not include TanStack Store, reactivity features, framework adapters, or devtools support + * that are available in the core @tanstack/pacer package for app development. + * The core version also includes more advanced features in some utilities. + */ +import { liteDebounce } from '@tanstack/pacer-lite/lite-debouncer' + +function createApp1() { + const container = document.createElement('div') + + let instantCount = 0 + let debouncedCount = 0 + + const debouncedSetCount = liteDebounce( + (newCount: number) => { + debouncedCount = newCount + console.log('🔄 Debounced count updated:', newCount) + updateDisplay() + }, + { + wait: 500, + }, + ) + + function increment() { + instantCount += 1 + debouncedSetCount(instantCount) + updateDisplay() + } + + function decrement() { + instantCount -= 1 + debouncedSetCount(instantCount) + updateDisplay() + } + + function updateDisplay() { + container.innerHTML = ` +
+

TanStack Pacer liteDebounce Example

+ + + + + + + + + + + +
Instant Count:${instantCount}
Debounced Count:${debouncedCount}
+
+ + +
+
+ ` + + const incrementBtn = container.querySelector( + '#increment-btn', + ) as HTMLButtonElement + const decrementBtn = container.querySelector( + '#decrement-btn', + ) as HTMLButtonElement + incrementBtn?.addEventListener('click', increment) + decrementBtn?.addEventListener('click', decrement) + } + + updateDisplay() + return container +} + +const app = document.getElementById('app')! +app.appendChild(createApp1()) + +console.log( + 'liteDebounce example ready! Click the buttons rapidly and watch the console for debounced executions.', +) diff --git a/examples/vanilla/liteDebounce/tsconfig.json b/examples/vanilla/liteDebounce/tsconfig.json new file mode 100644 index 00000000..41a2fe79 --- /dev/null +++ b/examples/vanilla/liteDebounce/tsconfig.json @@ -0,0 +1,22 @@ +{ + "compilerOptions": { + "target": "ESNext", + "lib": ["DOM", "DOM.Iterable", "ESNext"], + "module": "ESNext", + "skipLibCheck": true, + + /* Bundler mode */ + "moduleResolution": "Bundler", + "allowImportingTsExtensions": true, + "resolveJsonModule": true, + "isolatedModules": true, + "noEmit": true, + + /* Linting */ + "strict": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "noFallthroughCasesInSwitch": true + }, + "include": ["src", "vite.config.ts"] +} diff --git a/examples/vanilla/liteDebounce/vite.config.ts b/examples/vanilla/liteDebounce/vite.config.ts new file mode 100644 index 00000000..f93d3793 --- /dev/null +++ b/examples/vanilla/liteDebounce/vite.config.ts @@ -0,0 +1,12 @@ +import { defineConfig } from 'vite' + +export default defineConfig({ + root: '.', + publicDir: 'public', + build: { + outDir: 'dist', + }, + server: { + port: 3005, + }, +}) diff --git a/examples/vanilla/liteQueue/index.html b/examples/vanilla/liteQueue/index.html new file mode 100644 index 00000000..b8e249fc --- /dev/null +++ b/examples/vanilla/liteQueue/index.html @@ -0,0 +1,13 @@ + + + + + + + TanStack Pacer - Vanilla Queue Examples + + +
+ + + diff --git a/examples/vanilla/liteQueue/package.json b/examples/vanilla/liteQueue/package.json new file mode 100644 index 00000000..515bd235 --- /dev/null +++ b/examples/vanilla/liteQueue/package.json @@ -0,0 +1,17 @@ +{ + "name": "@tanstack/pacer-example-vanilla-lite-queue", + "private": true, + "type": "module", + "scripts": { + "dev": "vite --port=3005", + "build": "vite build", + "preview": "vite preview", + "test:types": "tsc" + }, + "dependencies": { + "@tanstack/pacer-lite": "0.0.1" + }, + "devDependencies": { + "vite": "^7.2.2" + } +} diff --git a/examples/vanilla/liteQueue/public/emblem-light.svg b/examples/vanilla/liteQueue/public/emblem-light.svg new file mode 100644 index 00000000..a58e69ad --- /dev/null +++ b/examples/vanilla/liteQueue/public/emblem-light.svg @@ -0,0 +1,13 @@ + + + + emblem-light + Created with Sketch. + + + + + + + + \ No newline at end of file diff --git a/examples/vanilla/liteQueue/src/index.ts b/examples/vanilla/liteQueue/src/index.ts new file mode 100644 index 00000000..b1c83e63 --- /dev/null +++ b/examples/vanilla/liteQueue/src/index.ts @@ -0,0 +1,90 @@ +/** + * Note: @tanstack/pacer-lite is a stripped-down alternative designed for library use. + * It does not include TanStack Store, reactivity features, framework adapters, or devtools support + * that are available in the core @tanstack/pacer package for app development. + * The core version also includes more advanced features in some utilities. + */ +import { liteQueue } from '@tanstack/pacer-lite/lite-queuer' + +function createApp1() { + const container = document.createElement('div') + + let executionCount = 0 + let processedItems: number[] = [] + + const processItem = liteQueue( + (item: number) => { + executionCount += 1 + processedItems.push(item) + console.log('✅ Processing item:', item) + updateDisplay() + }, + { + initialItems: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], + maxSize: 25, + started: true, + wait: 1000, + }, + ) + + function addItem() { + const nextNumber = + processedItems.length > 0 ? Math.max(...processedItems) + 1 : 1 + const added = processItem(nextNumber) + if (!added) { + console.log('❌ Queue is full, item rejected') + } + updateDisplay() + } + + function clearQueue() { + processedItems = [] + executionCount = 0 + console.log('🔄 Queue cleared') + updateDisplay() + } + + function updateDisplay() { + container.innerHTML = ` +
+

TanStack Pacer liteQueue Example

+ + + + + + + + + + + + + + + +
Items Processed:${executionCount}
Processed Items:${processedItems.length > 0 ? processedItems.join(', ') : 'None'}
Queue Max Size:25
+
+ + +
+
+

Note: liteQueue function automatically processes items with 1 second delay. Queue state is not accessible - use LiteQueuer class for full control.

+
+
+ ` + + container.querySelector('#add-item-btn')?.addEventListener('click', addItem) + container.querySelector('#clear-btn')?.addEventListener('click', clearQueue) + } + + updateDisplay() + return container +} + +const app = document.getElementById('app')! +app.appendChild(createApp1()) + +console.log( + 'liteQueue example ready! Items will be processed automatically with 1 second delay between each.', +) diff --git a/examples/vanilla/liteQueue/tsconfig.json b/examples/vanilla/liteQueue/tsconfig.json new file mode 100644 index 00000000..41a2fe79 --- /dev/null +++ b/examples/vanilla/liteQueue/tsconfig.json @@ -0,0 +1,22 @@ +{ + "compilerOptions": { + "target": "ESNext", + "lib": ["DOM", "DOM.Iterable", "ESNext"], + "module": "ESNext", + "skipLibCheck": true, + + /* Bundler mode */ + "moduleResolution": "Bundler", + "allowImportingTsExtensions": true, + "resolveJsonModule": true, + "isolatedModules": true, + "noEmit": true, + + /* Linting */ + "strict": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "noFallthroughCasesInSwitch": true + }, + "include": ["src", "vite.config.ts"] +} diff --git a/examples/vanilla/liteQueue/vite.config.ts b/examples/vanilla/liteQueue/vite.config.ts new file mode 100644 index 00000000..f93d3793 --- /dev/null +++ b/examples/vanilla/liteQueue/vite.config.ts @@ -0,0 +1,12 @@ +import { defineConfig } from 'vite' + +export default defineConfig({ + root: '.', + publicDir: 'public', + build: { + outDir: 'dist', + }, + server: { + port: 3005, + }, +}) diff --git a/examples/vanilla/liteRateLimit/index.html b/examples/vanilla/liteRateLimit/index.html new file mode 100644 index 00000000..06c71fcd --- /dev/null +++ b/examples/vanilla/liteRateLimit/index.html @@ -0,0 +1,13 @@ + + + + + + + TanStack Pacer - Vanilla Rate Limit Examples + + +
+ + + diff --git a/examples/vanilla/liteRateLimit/package.json b/examples/vanilla/liteRateLimit/package.json new file mode 100644 index 00000000..d7bbcb07 --- /dev/null +++ b/examples/vanilla/liteRateLimit/package.json @@ -0,0 +1,17 @@ +{ + "name": "@tanstack/pacer-example-vanilla-lite-rate-limit", + "private": true, + "type": "module", + "scripts": { + "dev": "vite --port=3005", + "build": "vite build", + "preview": "vite preview", + "test:types": "tsc" + }, + "dependencies": { + "@tanstack/pacer-lite": "0.0.1" + }, + "devDependencies": { + "vite": "^7.2.2" + } +} diff --git a/examples/vanilla/liteRateLimit/public/emblem-light.svg b/examples/vanilla/liteRateLimit/public/emblem-light.svg new file mode 100644 index 00000000..a58e69ad --- /dev/null +++ b/examples/vanilla/liteRateLimit/public/emblem-light.svg @@ -0,0 +1,13 @@ + + + + emblem-light + Created with Sketch. + + + + + + + + \ No newline at end of file diff --git a/examples/vanilla/liteRateLimit/src/index.ts b/examples/vanilla/liteRateLimit/src/index.ts new file mode 100644 index 00000000..8de0dfb8 --- /dev/null +++ b/examples/vanilla/liteRateLimit/src/index.ts @@ -0,0 +1,137 @@ +/** + * Note: @tanstack/pacer-lite is a stripped-down alternative designed for library use. + * It does not include TanStack Store, reactivity features, framework adapters, or devtools support + * that are available in the core @tanstack/pacer package for app development. + * The core version also includes more advanced features in some utilities. + */ +import { liteRateLimit } from '@tanstack/pacer-lite/lite-rate-limiter' + +function createApp1() { + const container = document.createElement('div') + + let instantCount = 0 + let limitedCount = 0 + let executionCount = 0 + let rejectionCount = 0 + let windowType: 'fixed' | 'sliding' = 'fixed' + + let rateLimitedSetCount = liteRateLimit( + (newCount: number) => { + limitedCount = newCount + executionCount += 1 + console.log('✅ Rate limited count updated:', newCount) + updateDisplay() + }, + { + limit: 5, + window: 5000, + windowType: windowType, + }, + ) + + function increment() { + instantCount += 1 + const executed = rateLimitedSetCount(instantCount) + if (!executed) { + rejectionCount += 1 + console.log('❌ Rejected by rate limiter') + } + updateDisplay() + } + + function reset() { + instantCount = 0 + limitedCount = 0 + executionCount = 0 + rejectionCount = 0 + updateDisplay() + } + + function setWindowType(type: 'fixed' | 'sliding') { + windowType = type + instantCount = 0 + limitedCount = 0 + executionCount = 0 + rejectionCount = 0 + rateLimitedSetCount = liteRateLimit( + (newCount: number) => { + limitedCount = newCount + executionCount += 1 + console.log('✅ Rate limited count updated:', newCount) + updateDisplay() + }, + { + limit: 5, + window: 5000, + windowType: windowType, + }, + ) + updateDisplay() + } + + function updateDisplay() { + container.innerHTML = ` +
+

TanStack Pacer liteRateLimit Example

+
+ + +
+ + + + + + + + + + + + + + + + + + + + + + +
Execution Count:${executionCount}
Rejection Count:${rejectionCount}

Instant Count:${instantCount}
Rate Limited Count:${limitedCount}
+
+ + +
+
+ ` + + container + .querySelector('#increment-btn') + ?.addEventListener('click', increment) + container.querySelector('#reset-btn')?.addEventListener('click', reset) + container + .querySelector('input[value="fixed"]') + ?.addEventListener('change', () => setWindowType('fixed')) + container + .querySelector('input[value="sliding"]') + ?.addEventListener('change', () => setWindowType('sliding')) + } + + updateDisplay() + return container +} + +const app = document.getElementById('app')! +app.appendChild(createApp1()) + +console.log( + 'liteRateLimit example ready! Click increment rapidly and watch the console for rate limited executions.', +) diff --git a/examples/vanilla/liteRateLimit/tsconfig.json b/examples/vanilla/liteRateLimit/tsconfig.json new file mode 100644 index 00000000..41a2fe79 --- /dev/null +++ b/examples/vanilla/liteRateLimit/tsconfig.json @@ -0,0 +1,22 @@ +{ + "compilerOptions": { + "target": "ESNext", + "lib": ["DOM", "DOM.Iterable", "ESNext"], + "module": "ESNext", + "skipLibCheck": true, + + /* Bundler mode */ + "moduleResolution": "Bundler", + "allowImportingTsExtensions": true, + "resolveJsonModule": true, + "isolatedModules": true, + "noEmit": true, + + /* Linting */ + "strict": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "noFallthroughCasesInSwitch": true + }, + "include": ["src", "vite.config.ts"] +} diff --git a/examples/vanilla/liteRateLimit/vite.config.ts b/examples/vanilla/liteRateLimit/vite.config.ts new file mode 100644 index 00000000..f93d3793 --- /dev/null +++ b/examples/vanilla/liteRateLimit/vite.config.ts @@ -0,0 +1,12 @@ +import { defineConfig } from 'vite' + +export default defineConfig({ + root: '.', + publicDir: 'public', + build: { + outDir: 'dist', + }, + server: { + port: 3005, + }, +}) diff --git a/examples/vanilla/liteThrottle/index.html b/examples/vanilla/liteThrottle/index.html new file mode 100644 index 00000000..51a534be --- /dev/null +++ b/examples/vanilla/liteThrottle/index.html @@ -0,0 +1,13 @@ + + + + + + + TanStack Pacer - Vanilla Throttle Examples + + +
+ + + diff --git a/examples/vanilla/liteThrottle/package.json b/examples/vanilla/liteThrottle/package.json new file mode 100644 index 00000000..2bd6d22b --- /dev/null +++ b/examples/vanilla/liteThrottle/package.json @@ -0,0 +1,17 @@ +{ + "name": "@tanstack/pacer-example-vanilla-lite-throttle", + "private": true, + "type": "module", + "scripts": { + "dev": "vite --port=3005", + "build": "vite build", + "preview": "vite preview", + "test:types": "tsc" + }, + "dependencies": { + "@tanstack/pacer-lite": "0.0.1" + }, + "devDependencies": { + "vite": "^7.2.2" + } +} diff --git a/examples/vanilla/liteThrottle/public/emblem-light.svg b/examples/vanilla/liteThrottle/public/emblem-light.svg new file mode 100644 index 00000000..a58e69ad --- /dev/null +++ b/examples/vanilla/liteThrottle/public/emblem-light.svg @@ -0,0 +1,13 @@ + + + + emblem-light + Created with Sketch. + + + + + + + + \ No newline at end of file diff --git a/examples/vanilla/liteThrottle/src/index.ts b/examples/vanilla/liteThrottle/src/index.ts new file mode 100644 index 00000000..7a072cce --- /dev/null +++ b/examples/vanilla/liteThrottle/src/index.ts @@ -0,0 +1,80 @@ +/** + * Note: @tanstack/pacer-lite is a stripped-down alternative designed for library use. + * It does not include TanStack Store, reactivity features, framework adapters, or devtools support + * that are available in the core @tanstack/pacer package for app development. + * The core version also includes more advanced features in some utilities. + */ +import { liteThrottle } from '@tanstack/pacer-lite/lite-throttler' + +function createApp1() { + const container = document.createElement('div') + + let instantCount = 0 + let throttledCount = 0 + + const throttledSetCount = liteThrottle( + (newCount: number) => { + throttledCount = newCount + console.log('🔄 Throttled count updated:', newCount) + updateDisplay() + }, + { + wait: 500, + }, + ) + + function increment() { + instantCount += 1 + throttledSetCount(instantCount) + updateDisplay() + } + + function decrement() { + instantCount -= 1 + throttledSetCount(instantCount) + updateDisplay() + } + + function updateDisplay() { + container.innerHTML = ` +
+

TanStack Pacer liteThrottle Example

+ + + + + + + + + + + +
Instant Count:${instantCount}
Throttled Count:${throttledCount}
+
+ + +
+
+ ` + + const incrementBtn = container.querySelector( + '#increment-btn', + ) as HTMLButtonElement + const decrementBtn = container.querySelector( + '#decrement-btn', + ) as HTMLButtonElement + incrementBtn?.addEventListener('click', increment) + decrementBtn?.addEventListener('click', decrement) + } + + updateDisplay() + return container +} + +const app = document.getElementById('app')! +app.appendChild(createApp1()) + +console.log( + 'liteThrottle example ready! Click the buttons rapidly and watch the console for throttled executions.', +) diff --git a/examples/vanilla/liteThrottle/tsconfig.json b/examples/vanilla/liteThrottle/tsconfig.json new file mode 100644 index 00000000..41a2fe79 --- /dev/null +++ b/examples/vanilla/liteThrottle/tsconfig.json @@ -0,0 +1,22 @@ +{ + "compilerOptions": { + "target": "ESNext", + "lib": ["DOM", "DOM.Iterable", "ESNext"], + "module": "ESNext", + "skipLibCheck": true, + + /* Bundler mode */ + "moduleResolution": "Bundler", + "allowImportingTsExtensions": true, + "resolveJsonModule": true, + "isolatedModules": true, + "noEmit": true, + + /* Linting */ + "strict": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "noFallthroughCasesInSwitch": true + }, + "include": ["src", "vite.config.ts"] +} diff --git a/examples/vanilla/liteThrottle/vite.config.ts b/examples/vanilla/liteThrottle/vite.config.ts new file mode 100644 index 00000000..f93d3793 --- /dev/null +++ b/examples/vanilla/liteThrottle/vite.config.ts @@ -0,0 +1,12 @@ +import { defineConfig } from 'vite' + +export default defineConfig({ + root: '.', + publicDir: 'public', + build: { + outDir: 'dist', + }, + server: { + port: 3005, + }, +}) diff --git a/package.json b/package.json index 93f76934..babcf209 100644 --- a/package.json +++ b/package.json @@ -8,15 +8,16 @@ "packageManager": "pnpm@10.17.1", "type": "module", "scripts": { - "build": "nx affected --skip-nx-cache --targets=build --exclude=examples/** && size-limit", - "build:all": "nx run-many --targets=build --exclude=examples/** && size-limit", - "build:core": "nx run-many --targets=build --projects=packages/pacer,packages/persister && size-limit", + "build": "nx affected --skip-nx-cache --targets=build --exclude=examples/** && size-limit && pnpm run copy:readme", + "build:all": "nx run-many --targets=build --exclude=examples/** && size-limit && pnpm run copy:readme", + "build:core": "nx run-many --targets=build --projects=packages/pacer,packages/persister && size-limit && pnpm run copy:readme", "changeset": "changeset", "changeset:publish": "changeset publish", "changeset:version": "changeset version && pnpm install --no-frozen-lockfile && pnpm prettier:write", "clean": "find . -name 'dist' -type d -prune -exec rm -rf {} +", "clean:node_modules": "find . -name 'node_modules' -type d -prune -exec rm -rf {} +", "clean:all": "pnpm run clean && pnpm run clean:node_modules", + "copy:readme": "cp README.md packages/pacer/README.md && cp README.md packages/pacer-devtools/README.md && cp README.md packages/pacer-lite/README.md && cp README.md packages/react-pacer/README.md && cp README.md packages/react-pacer-devtools/README.md && cp README.md packages/solid-pacer/README.md && cp README.md packages/solid-pacer-devtools/README.md", "dev": "pnpm run watch", "docs:generate": "node scripts/generateDocs.js", "format": "pnpm run prettier:write", @@ -79,6 +80,7 @@ }, "overrides": { "@tanstack/pacer": "workspace:*", + "@tanstack/pacer-lite": "workspace:*", "@tanstack/react-pacer": "workspace:*", "@tanstack/solid-pacer": "workspace:*", "@tanstack/pacer-devtools": "workspace:*", diff --git a/packages/pacer-devtools/README.md b/packages/pacer-devtools/README.md new file mode 100644 index 00000000..42347dcc --- /dev/null +++ b/packages/pacer-devtools/README.md @@ -0,0 +1,165 @@ +
+ +
+ +
+ + + + + +
+ +### [Become a Sponsor!](https://github.com/sponsors/tannerlinsley/) +
+ +# TanStack Pacer + +A lightweight timing and scheduling library for debouncing, throttling, rate limiting, queuing, and batching. + +> [!NOTE] +> TanStack Pacer is currently mostly a client-side only library, but it is being designed to be able to potentially be used on the server-side as well. + +- **Debouncing** + - Delay execution until after a period of inactivity for when you only care about the last execution in a sequence. + - Synchronous or Asynchronous Debounce utilities with promise support and error handling + - Control of leading, trailing, and enabled options +- **Throttling** + - Smoothly limit the rate at which a function can fire + - Synchronous or Asynchronous Throttle utilities with promise support and error handling + - Control of leading, trailing, and enabled options. +- **Rate Limiting** + - Limit the rate at which a function can fire over a period of time + - Synchronous or Asynchronous Rate Limiting utilities with promise support and error handling + - Fixed or Sliding Window variations of Rate Limiting +- **Queuing** + - Queue functions to be executed in a specific order + - Choose from FIFO, LIFO, and Priority queue implementations + - Control processing speed with configurable wait times or concurrency limits + - Manage queue execution with start/stop capabilities + - Expire items from the queue after a configurable duration +- **Batching** + - Chunk up multiple operations into larger batches to reduce total back-and-forth operations + - Batch by time period, batch size, whichever comes first, or a custom condition to trigger batch executions +- **Async or Sync Variations** + - Choose between synchronous and asynchronous versions of each utility + - Optional error, success, and settled handling for async variations + - Retry and Abort support for async variations +- **State Management** + - Uses TanStack Store under the hood for state management with fine-grained reactivity + - Easily integrate with your own state management library of choice + - Persist state to local or session storage for some utilities like rate limiting and queuing +- **Convenient Hooks** + - Reduce boilerplate code with pre-built hooks like `useDebouncedCallback`, `useThrottledValue`, and `useQueuedState`, and more. + - Multiple layers of abstraction to choose from depending on your use case. + - Works with each framework's default state management solutions, or with whatever custom state management library that you prefer. +- **Type Safety** + - Full type safety with TypeScript that makes sure that your functions will always be called with the correct arguments + - Generics for flexible and reusable utilities +- **Framework Adapters** + - React, Solid, and more +- **Tree Shaking** + - We, of course, get tree-shaking right for your applications by default, but we also provide extra deep imports for each utility, making it easier to embed these utilities into your libraries without increasing the bundle-phobia reports of your library. + +### Read the docs → + +
+ +> [!NOTE] +> You may know **TanSack Pacer** by our adapter names, too! +> +> - [**React Pacer**](https://tanstack.com/pacer/latest/docs/framework/react/react-pacer) +> - [**Solid Pacer**](https://tanstack.com/pacer/latest/docs/framework/solid/solid-pacer) +> - Angular Pacer - needs a contributor! +> - Preact Pacer - Coming soon! (After React Pacer is more fleshed out) +> - Svelte Pacer - needs a contributor! +> - Vue Pacer - needs a contributor! + +## Get Involved + +- We welcome issues and pull requests! +- Participate in [GitHub discussions](https://github.com/TanStack/pacer/discussions) +- Chat with the community on [Discord](https://discord.com/invite/WrRKjPJ) +- See [CONTRIBUTING.md](./CONTRIBUTING.md) for setup instructions + +## Partners + + + + + + + +
+ + + + + CodeRabbit + + + + + + + + Cloudflare + + + + + + + + Unkey + + +
+ +
+Pacer & you? +

+We're looking for TanStack Pacer Partners to join our mission! Partner with us to push the boundaries of TanStack Pacer and build amazing things together. +

+LET'S CHAT +
+ + + +## Explore the TanStack Ecosystem + +- TanStack Config – Tooling for JS/TS packages +- TanStack DB – Reactive sync client store +- TanStack DevTools – Unified devtools panel +- TanStack Form – Type‑safe form state +- TanStack Query – Async state & caching +- TanStack Ranger – Range & slider primitives +- TanStack Router – Type‑safe routing, caching & URL state +- TanStack Start – Full‑stack SSR & streaming +- TanStack Store – Reactive data store +- TanStack Table – Headless datagrids +- TanStack Virtual – Virtualized rendering + +… and more at TanStack.com » + + diff --git a/packages/pacer-lite/README.md b/packages/pacer-lite/README.md new file mode 100644 index 00000000..42347dcc --- /dev/null +++ b/packages/pacer-lite/README.md @@ -0,0 +1,165 @@ +
+ +
+ +
+ + + + + +
+ +### [Become a Sponsor!](https://github.com/sponsors/tannerlinsley/) +
+ +# TanStack Pacer + +A lightweight timing and scheduling library for debouncing, throttling, rate limiting, queuing, and batching. + +> [!NOTE] +> TanStack Pacer is currently mostly a client-side only library, but it is being designed to be able to potentially be used on the server-side as well. + +- **Debouncing** + - Delay execution until after a period of inactivity for when you only care about the last execution in a sequence. + - Synchronous or Asynchronous Debounce utilities with promise support and error handling + - Control of leading, trailing, and enabled options +- **Throttling** + - Smoothly limit the rate at which a function can fire + - Synchronous or Asynchronous Throttle utilities with promise support and error handling + - Control of leading, trailing, and enabled options. +- **Rate Limiting** + - Limit the rate at which a function can fire over a period of time + - Synchronous or Asynchronous Rate Limiting utilities with promise support and error handling + - Fixed or Sliding Window variations of Rate Limiting +- **Queuing** + - Queue functions to be executed in a specific order + - Choose from FIFO, LIFO, and Priority queue implementations + - Control processing speed with configurable wait times or concurrency limits + - Manage queue execution with start/stop capabilities + - Expire items from the queue after a configurable duration +- **Batching** + - Chunk up multiple operations into larger batches to reduce total back-and-forth operations + - Batch by time period, batch size, whichever comes first, or a custom condition to trigger batch executions +- **Async or Sync Variations** + - Choose between synchronous and asynchronous versions of each utility + - Optional error, success, and settled handling for async variations + - Retry and Abort support for async variations +- **State Management** + - Uses TanStack Store under the hood for state management with fine-grained reactivity + - Easily integrate with your own state management library of choice + - Persist state to local or session storage for some utilities like rate limiting and queuing +- **Convenient Hooks** + - Reduce boilerplate code with pre-built hooks like `useDebouncedCallback`, `useThrottledValue`, and `useQueuedState`, and more. + - Multiple layers of abstraction to choose from depending on your use case. + - Works with each framework's default state management solutions, or with whatever custom state management library that you prefer. +- **Type Safety** + - Full type safety with TypeScript that makes sure that your functions will always be called with the correct arguments + - Generics for flexible and reusable utilities +- **Framework Adapters** + - React, Solid, and more +- **Tree Shaking** + - We, of course, get tree-shaking right for your applications by default, but we also provide extra deep imports for each utility, making it easier to embed these utilities into your libraries without increasing the bundle-phobia reports of your library. + +### Read the docs → + +
+ +> [!NOTE] +> You may know **TanSack Pacer** by our adapter names, too! +> +> - [**React Pacer**](https://tanstack.com/pacer/latest/docs/framework/react/react-pacer) +> - [**Solid Pacer**](https://tanstack.com/pacer/latest/docs/framework/solid/solid-pacer) +> - Angular Pacer - needs a contributor! +> - Preact Pacer - Coming soon! (After React Pacer is more fleshed out) +> - Svelte Pacer - needs a contributor! +> - Vue Pacer - needs a contributor! + +## Get Involved + +- We welcome issues and pull requests! +- Participate in [GitHub discussions](https://github.com/TanStack/pacer/discussions) +- Chat with the community on [Discord](https://discord.com/invite/WrRKjPJ) +- See [CONTRIBUTING.md](./CONTRIBUTING.md) for setup instructions + +## Partners + + + + + + + +
+ + + + + CodeRabbit + + + + + + + + Cloudflare + + + + + + + + Unkey + + +
+ +
+Pacer & you? +

+We're looking for TanStack Pacer Partners to join our mission! Partner with us to push the boundaries of TanStack Pacer and build amazing things together. +

+LET'S CHAT +
+ + + +## Explore the TanStack Ecosystem + +- TanStack Config – Tooling for JS/TS packages +- TanStack DB – Reactive sync client store +- TanStack DevTools – Unified devtools panel +- TanStack Form – Type‑safe form state +- TanStack Query – Async state & caching +- TanStack Ranger – Range & slider primitives +- TanStack Router – Type‑safe routing, caching & URL state +- TanStack Start – Full‑stack SSR & streaming +- TanStack Store – Reactive data store +- TanStack Table – Headless datagrids +- TanStack Virtual – Virtualized rendering + +… and more at TanStack.com » + + diff --git a/packages/pacer-lite/package.json b/packages/pacer-lite/package.json new file mode 100644 index 00000000..34cacb99 --- /dev/null +++ b/packages/pacer-lite/package.json @@ -0,0 +1,113 @@ +{ + "name": "@tanstack/pacer-lite", + "version": "0.0.1", + "description": "Lightweight utilities for debouncing, throttling, and more - designed for npm packages.", + "author": "Tanner Linsley", + "license": "MIT", + "repository": { + "type": "git", + "url": "git+https://github.com/TanStack/pacer.git", + "directory": "packages/pacer-lite" + }, + "homepage": "https://tanstack.com/pacer", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + }, + "keywords": [ + "debounce", + "throttle", + "rate-limit", + "pacer", + "lightweight", + "minimal" + ], + "type": "module", + "types": "dist/esm/index.d.ts", + "main": "dist/cjs/index.cjs", + "module": "dist/esm/index.js", + "exports": { + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/cjs/index.d.cts", + "default": "./dist/cjs/index.cjs" + } + }, + "./lite-debouncer": { + "import": { + "types": "./dist/esm/lite-debouncer.d.ts", + "default": "./dist/esm/lite-debouncer.js" + }, + "require": { + "types": "./dist/cjs/lite-debouncer.d.cts", + "default": "./dist/cjs/lite-debouncer.cjs" + } + }, + "./lite-throttler": { + "import": { + "types": "./dist/esm/lite-throttler.d.ts", + "default": "./dist/esm/lite-throttler.js" + }, + "require": { + "types": "./dist/cjs/lite-throttler.d.cts", + "default": "./dist/cjs/lite-throttler.cjs" + } + }, + "./lite-rate-limiter": { + "import": { + "types": "./dist/esm/lite-rate-limiter.d.ts", + "default": "./dist/esm/lite-rate-limiter.js" + }, + "require": { + "types": "./dist/cjs/lite-rate-limiter.d.cts", + "default": "./dist/cjs/lite-rate-limiter.cjs" + } + }, + "./lite-queuer": { + "import": { + "types": "./dist/esm/lite-queuer.d.ts", + "default": "./dist/esm/lite-queuer.js" + }, + "require": { + "types": "./dist/cjs/lite-queuer.d.cts", + "default": "./dist/cjs/lite-queuer.cjs" + } + }, + "./lite-batcher": { + "import": { + "types": "./dist/esm/lite-batcher.d.ts", + "default": "./dist/esm/lite-batcher.js" + }, + "require": { + "types": "./dist/cjs/lite-batcher.d.cts", + "default": "./dist/cjs/lite-batcher.cjs" + } + }, + "./package.json": "./package.json" + }, + "sideEffects": false, + "engines": { + "node": ">=18" + }, + "files": [ + "dist/", + "src" + ], + "scripts": { + "clean": "premove ./build ./dist", + "lint:fix": "eslint ./src --fix", + "test:eslint": "eslint ./src", + "test:lib": "vitest", + "test:lib:dev": "pnpm test:lib --watch", + "test:types": "tsc", + "test:build": "publint --strict", + "build": "vite build" + }, + "devDependencies": { + "@tanstack/pacer": "workspace:*" + } +} diff --git a/packages/pacer-lite/src/index.ts b/packages/pacer-lite/src/index.ts new file mode 100644 index 00000000..dcb0c9f0 --- /dev/null +++ b/packages/pacer-lite/src/index.ts @@ -0,0 +1,5 @@ +export * from './lite-debouncer' +export * from './lite-throttler' +export * from './lite-rate-limiter' +export * from './lite-queuer' +export * from './lite-batcher' diff --git a/packages/pacer-lite/src/lite-batcher.ts b/packages/pacer-lite/src/lite-batcher.ts new file mode 100644 index 00000000..92806e41 --- /dev/null +++ b/packages/pacer-lite/src/lite-batcher.ts @@ -0,0 +1,267 @@ +/** + * Options for configuring a lite batcher instance + */ +export interface LiteBatcherOptions { + /** + * Custom function to determine if a batch should be processed + * Return true to process the batch immediately + */ + getShouldExecute?: ( + items: Array, + batcher: LiteBatcher, + ) => boolean + /** + * Maximum number of items in a batch + * @default Infinity + */ + maxSize?: number + /** + * Callback fired after a batch is processed + */ + onExecute?: (batch: Array, batcher: LiteBatcher) => void + /** + * Callback fired after items are added to the batcher + */ + onItemsChange?: (batcher: LiteBatcher) => void + /** + * Whether the batcher should start processing immediately + * @default true + */ + started?: boolean + /** + * Maximum time in milliseconds to wait before processing a batch. + * If the wait duration has elapsed, the batch will be processed. + * If not provided, the batch will not be triggered by a timeout. + * @default Infinity + */ + wait?: number | ((batcher: LiteBatcher) => number) +} + +/** + * A lightweight class that collects items and processes them in batches. + * + * This is an alternative to the Batcher in the core @tanstack/pacer package, but is more + * suitable for libraries and npm packages that need minimal overhead. Unlike the core Batcher, + * this version does not use TanStack Store for state management, has no devtools integration, + * no callbacks, and provides only essential batching functionality. + * + * Batching is a technique for grouping multiple operations together to be processed as a single unit. + * This synchronous version is lighter weight and often all you need. + * + * The Batcher provides a flexible way to implement batching with configurable: + * - Maximum batch size (number of items per batch) + * - Time-based batching (process after X milliseconds) + * - Custom batch processing logic via getShouldExecute + * + * Features included: + * - Core batching functionality (addItem, flush, clear, cancel) + * - Size-based batching (maxSize) + * - Time-based batching (wait timeout) + * - Custom condition batching (getShouldExecute) + * - Manual processing controls + * - Public mutable options + * - Callback support for monitoring batch execution and state changes + * + * Features NOT included (compared to core Batcher): + * - No TanStack Store state management + * - No devtools integration + * - No complex state tracking (execution counts, etc.) + * - No reactive state management + * + * @example + * ```ts + * // Basic batching + * const batcher = new LiteBatcher( + * (items) => console.log('Processing batch:', items), + * { + * maxSize: 5, + * wait: 2000, + * onExecute: (batch, batcher) => { + * console.log('Batch executed with', batch.length, 'items'); + * }, + * onItemsChange: (batcher) => { + * console.log('Batch size changed to:', batcher.size); + * } + * } + * ); + * + * batcher.addItem(1); + * batcher.addItem(2); + * // After 2 seconds or when 5 items are added, whichever comes first, + * // the batch will be processed + * ``` + * + * @example + * ```ts + * // Custom condition batching + * const batcher = new LiteBatcher( + * (items) => processTasks(items), + * { + * getShouldExecute: (items) => items.some(task => task.urgent), + * maxSize: 10, + * } + * ); + * + * batcher.addItem({ name: 'normal', urgent: false }); + * batcher.addItem({ name: 'urgent', urgent: true }); // Triggers immediate processing + * ``` + */ +export class LiteBatcher { + private items: Array = [] + private timeoutId: NodeJS.Timeout | null = null + private _isPending = false + + constructor( + public fn: (items: Array) => void, + public options: LiteBatcherOptions = {}, + ) { + // Set defaults + this.options.maxSize = this.options.maxSize ?? Infinity + this.options.started = this.options.started ?? true + this.options.wait = this.options.wait ?? Infinity + this.options.getShouldExecute = + this.options.getShouldExecute ?? (() => false) + } + + /** + * Number of items currently in the batch + */ + get size(): number { + return this.items.length + } + + /** + * Whether the batch has no items to process (items array is empty) + */ + get isEmpty(): boolean { + return this.items.length === 0 + } + + /** + * Whether the batcher is waiting for the timeout to trigger batch processing + */ + get isPending(): boolean { + return this._isPending + } + + private getWait(): number { + if (typeof this.options.wait === 'function') { + return this.options.wait(this) + } + return this.options.wait! + } + + /** + * Adds an item to the batcher + * If the batch size is reached, timeout occurs, or getShouldExecute returns true, the batch will be processed + */ + addItem = (item: TValue): void => { + this.items.push(item) + this._isPending = this.options.wait !== Infinity + this.options.onItemsChange?.(this) + + const shouldProcess = + this.items.length >= this.options.maxSize! || + this.options.getShouldExecute!(this.items, this) + + if (shouldProcess) { + this.execute() + } else if (this.options.wait !== Infinity) { + this.clearTimeout() // clear any pending timeout to replace it with a new one + this.timeoutId = setTimeout(() => this.execute(), this.getWait()) + } + } + + /** + * Processes the current batch of items. + * This method will automatically be triggered if the batcher is running and any of these conditions are met: + * - The number of items reaches maxSize + * - The wait duration has elapsed + * - The getShouldExecute function returns true upon adding an item + * + * You can also call this method manually to process the current batch at any time. + */ + private execute = (): void => { + if (this.items.length === 0) { + return + } + + const batch = this.peekAllItems() // copy of the items to be processed (to prevent race conditions) + this.clear() // Clear items before processing to prevent race conditions + + this.fn(batch) // EXECUTE + this.options.onExecute?.(batch, this) + } + + /** + * Processes the current batch of items immediately + */ + flush = (): void => { + this.clearTimeout() // clear any pending timeout + this.execute() // execute immediately + } + + /** + * Returns a copy of all items in the batcher + */ + peekAllItems = (): Array => { + return [...this.items] + } + + private clearTimeout = (): void => { + if (this.timeoutId) { + clearTimeout(this.timeoutId) + this.timeoutId = null + } + } + + /** + * Removes all items from the batcher + */ + clear = (): void => { + const hadItems = this.items.length > 0 + this.items = [] + this._isPending = false + if (hadItems) { + this.options.onItemsChange?.(this) + } + } + + /** + * Cancels any pending execution that was scheduled. + * Does NOT clear out the items. + */ + cancel = (): void => { + this.clearTimeout() + this._isPending = false + } +} + +/** + * Creates a batcher that processes items in batches. + * + * This is an alternative to the batch function in the core @tanstack/pacer package, but is more + * suitable for libraries and npm packages that need minimal overhead. Unlike the core version, + * this function creates a batcher with no external dependencies, devtools integration, or reactive state. + * + * @example + * ```ts + * const batchItems = liteBatch( + * (items) => console.log('Processing:', items), + * { + * maxSize: 3, + * } + * ); + * + * batchItems(1); + * batchItems(2); + * batchItems(3); // Triggers batch processing + * ``` + */ +export function liteBatch( + fn: (items: Array) => void, + options: LiteBatcherOptions = {}, +): (item: TValue) => void { + const batcher = new LiteBatcher(fn, options) + return batcher.addItem +} diff --git a/packages/pacer-lite/src/lite-debouncer.ts b/packages/pacer-lite/src/lite-debouncer.ts new file mode 100644 index 00000000..2c3adf79 --- /dev/null +++ b/packages/pacer-lite/src/lite-debouncer.ts @@ -0,0 +1,184 @@ +import type { AnyFunction } from '@tanstack/pacer/types' + +/** + * Options for configuring a lite debounced function + */ +export interface LiteDebouncerOptions { + /** + * Whether to execute on the leading edge of the timeout. + * The first call will execute immediately and the rest will wait the delay. + * Defaults to false. + */ + leading?: boolean + /** + * Callback function that is called after the function is executed + */ + onExecute?: (args: Parameters, debouncer: LiteDebouncer) => void + /** + * Whether to execute on the trailing edge of the timeout. + * Defaults to true. + */ + trailing?: boolean + /** + * Delay in milliseconds before executing the function. + */ + wait: number +} + +/** + * A lightweight class that creates a debounced function. + * + * This is an alternative to the Debouncer in the core @tanstack/pacer package, but is more + * suitable for libraries and npm packages that need minimal overhead. Unlike the core Debouncer, + * this version does not use TanStack Store for state management, has no devtools integration, + * and provides only essential debouncing functionality. + * + * Debouncing ensures that a function is only executed after a certain amount of time has passed + * since its last invocation. This is useful for handling frequent events like window resizing, + * scroll events, or input changes where you want to limit the rate of execution. + * + * The debounced function can be configured to execute either at the start of the delay period + * (leading edge) or at the end (trailing edge, default). Each new call during the wait period + * will reset the timer. + * + * Features: + * - Zero dependencies - no external libraries required + * - Minimal API surface - only essential methods (maybeExecute, flush, cancel) + * - Simple state management - uses basic private properties instead of reactive stores + * - Callback support for monitoring execution events + * - Lightweight - designed for use in npm packages where bundle size matters + * + * @example + * ```ts + * const debouncer = new LiteDebouncer((value: string) => { + * saveToDatabase(value); + * }, { + * wait: 500, + * onExecute: (args, debouncer) => { + * console.log('Saved value:', args[0]); + * } + * }); + * + * // Will only save after 500ms of no new input + * inputElement.addEventListener('input', () => { + * debouncer.maybeExecute(inputElement.value); + * }); + * ``` + */ +export class LiteDebouncer { + private timeoutId: NodeJS.Timeout | undefined + private lastArgs: Parameters | undefined + private canLeadingExecute = true + + constructor( + public fn: TFn, + public options: LiteDebouncerOptions, + ) { + // Default trailing to true if neither leading nor trailing is specified + if ( + this.options.leading === undefined && + this.options.trailing === undefined + ) { + this.options.trailing = true + } + } + + /** + * Attempts to execute the debounced function. + * If leading is true and this is the first call, executes immediately. + * Otherwise, queues the execution for after the wait time. + * Each new call resets the timer. + */ + maybeExecute = (...args: Parameters): void => { + let didLeadingExecute = false + + if (this.options.leading && this.canLeadingExecute) { + this.canLeadingExecute = false + didLeadingExecute = true + this.fn(...args) + this.options.onExecute?.(args, this) + } + + this.lastArgs = args + + if (this.timeoutId) { + clearTimeout(this.timeoutId) + } + + this.timeoutId = setTimeout(() => { + this.canLeadingExecute = true + if (this.options.trailing && !didLeadingExecute && this.lastArgs) { + this.fn(...this.lastArgs) + this.options.onExecute?.(this.lastArgs, this) + } + this.lastArgs = undefined + }, this.options.wait) + } + + /** + * Processes the current pending execution immediately. + * If there's a pending execution, it will be executed right away + * and the timeout will be cleared. + */ + flush = (): void => { + if (this.timeoutId && this.lastArgs) { + clearTimeout(this.timeoutId) + this.timeoutId = undefined + const args = this.lastArgs + this.fn(...args) + this.options.onExecute?.(args, this) + this.lastArgs = undefined + this.canLeadingExecute = true + } + } + + /** + * Cancels any pending execution. + * Clears the timeout and resets the internal state. + */ + cancel = (): void => { + if (this.timeoutId) { + clearTimeout(this.timeoutId) + this.timeoutId = undefined + } + this.lastArgs = undefined + this.canLeadingExecute = true + } +} + +/** + * Creates a lightweight debounced function that delays invoking the provided function until after a specified wait time. + * Multiple calls during the wait period will cancel previous pending invocations and reset the timer. + * + * This is an alternative to the debounce function in the core @tanstack/pacer package, but is more + * suitable for libraries and npm packages that need minimal overhead. Unlike the core version, + * this function creates a debouncer with no external dependencies, devtools integration, or reactive state. + * + * If leading option is true, the function will execute immediately on the first call, then wait the delay + * before allowing another execution. + * + * @example + * ```ts + * const debouncedSave = liteDebounce(() => { + * saveChanges(); + * }, { wait: 1000 }); + * + * // Called repeatedly but executes at most once per second + * inputElement.addEventListener('input', debouncedSave); + * ``` + * + * @example + * ```ts + * // Leading edge execution - fires immediately then waits + * const debouncedSearch = liteDebounce((query: string) => { + * performSearch(query); + * }, { wait: 300, leading: true }); + * ``` + */ +export function liteDebounce( + fn: TFn, + options: LiteDebouncerOptions, +): (...args: Parameters) => void { + const debouncer = new LiteDebouncer(fn, options) + return debouncer.maybeExecute +} diff --git a/packages/pacer-lite/src/lite-queuer.ts b/packages/pacer-lite/src/lite-queuer.ts new file mode 100644 index 00000000..5010eaee --- /dev/null +++ b/packages/pacer-lite/src/lite-queuer.ts @@ -0,0 +1,434 @@ +/** + * Position type for addItem and getNextItem operations. + * + * - 'front': Operate on the front of the queue (FIFO for getNextItem) + * - 'back': Operate on the back of the queue (LIFO for getNextItem) + */ +export type QueuePosition = 'front' | 'back' + +/** + * Options for configuring a lite queuer instance + */ +export interface LiteQueuerOptions { + /** + * Default position to add items to the queue + * @default 'back' + */ + addItemsTo?: QueuePosition + /** + * Default position to get items from during processing + * @default 'front' + */ + getItemsFrom?: QueuePosition + /** + * Function to determine priority of items in the queue + * Higher priority items will be processed first + * Return undefined for items that should use positional ordering + */ + getPriority?: (item: TValue) => number | undefined + /** + * Initial items to populate the queue with + */ + initialItems?: Array + /** + * Maximum number of items allowed in the queue + */ + maxSize?: number + /** + * Whether the queuer should start processing items immediately + * @default true + */ + started?: boolean + /** + * Time in milliseconds to wait between processing items + * @default 0 + */ + wait?: number +} + +/** + * A lightweight class that creates a queue for processing items. + * + * This is an alternative to the Queuer in the core @tanstack/pacer package, but is more + * suitable for libraries and npm packages that need minimal overhead. Unlike the core Queuer, + * this version does not use TanStack Store for state management, has no devtools integration, + * no callbacks, and provides only essential queueing functionality. + * + * The queuer supports FIFO (First In First Out), LIFO (Last In First Out), and priority-based + * processing of items. Items can be processed automatically with configurable wait times + * between executions, or processed manually using the execute methods. + * + * Features included: + * - Automatic or manual processing of items + * - FIFO, LIFO, and priority-based ordering + * - Queue size limits with item rejection + * - Configurable wait times between processing + * - Batch processing capabilities + * - Start/stop processing control + * - Callback support for monitoring execution, rejection, and state change events + * + * Features NOT included (compared to core Queuer): + * - No TanStack Store state management + * - No devtools integration + * - No item expiration functionality (no onExpire callback) + * - No dynamic options updates (setOptions) + * - No detailed state tracking (execution counts, etc.) + * + * Queue behavior: + * - Default: FIFO (add to back, process from front) + * - LIFO: Configure addItemsTo: 'back', getItemsFrom: 'back' + * - Priority: Provide getPriority function; higher values processed first + * + * @example + * ```ts + * // Basic FIFO queue + * const queue = new LiteQueuer((item: string) => { + * console.log('Processing:', item); + * }, { wait: 100 }); + * + * queue.addItem('task1'); + * queue.addItem('task2'); + * // Processes: task1, then task2 after 100ms delay + * ``` + * + * @example + * ```ts + * // Priority queue + * const priorityQueue = new LiteQueuer((item: Task) => { + * processTask(item); + * }, { + * getPriority: task => task.priority, + * wait: 500 + * }); + * + * priorityQueue.addItem({ name: 'low', priority: 1 }); + * priorityQueue.addItem({ name: 'high', priority: 10 }); + * // Processes high priority task first + * ``` + */ +export class LiteQueuer { + private items: Array = [] + private timeoutId: NodeJS.Timeout | null = null + private isRunning = true + private pendingTick = false + + constructor( + public fn: (item: TValue) => void, + public options: LiteQueuerOptions = {}, + ) { + // Set defaults + this.options.addItemsTo = this.options.addItemsTo ?? 'back' + this.options.getItemsFrom = this.options.getItemsFrom ?? 'front' + this.options.maxSize = this.options.maxSize ?? Infinity + this.options.started = this.options.started ?? true + this.options.wait = this.options.wait ?? 0 + + this.isRunning = this.options.started + + // Add initial items if provided + if (this.options.initialItems) { + for (const item of this.options.initialItems) { + this.addItem(item, this.options.addItemsTo, false) + } + } + + // Start processing if enabled and has items + if (this.isRunning && this.items.length > 0) { + this.tick() + } + } + + /** + * Number of items currently in the queue + */ + get size(): number { + return this.items.length + } + + /** + * Whether the queue is empty + */ + get isEmpty(): boolean { + return this.items.length === 0 + } + + /** + * Whether the queue is currently running (auto-processing items) + */ + get isQueueRunning(): boolean { + return this.isRunning + } + + /** + * Adds an item to the queue. If the queue is full, the item is rejected. + * Items can be inserted at the front or back, and priority ordering is applied if getPriority is configured. + * + * Returns true if the item was added, false if the queue is full. + * + * @example + * ```ts + * queue.addItem('task1'); // Add to default position (back) + * queue.addItem('task2', 'front'); // Add to front + * ``` + */ + addItem = ( + item: TValue, + position: QueuePosition = this.options.addItemsTo!, + startProcessing: boolean = true, + ): boolean => { + // Check size limit + if (this.items.length >= this.options.maxSize!) { + return false + } + + // Handle priority insertion + if (this.options.getPriority) { + const priority = this.options.getPriority(item) + if (priority !== undefined) { + // Find insertion point for priority + const insertIndex = this.items.findIndex((existing) => { + const existingPriority = this.options.getPriority!(existing) + // Treat undefined priority as negative infinity for comparison + const effectivePriority = existingPriority ?? -Infinity + return effectivePriority < priority + }) + + if (insertIndex === -1) { + this.items.push(item) + } else { + this.items.splice(insertIndex, 0, item) + } + } else { + // No priority, use position + this.insertAtPosition(item, position) + } + } else { + // No priority function, use position + this.insertAtPosition(item, position) + } + + // Start processing if running and not already processing + if (startProcessing && this.isRunning && !this.pendingTick) { + this.tick() + } + + return true + } + + private insertAtPosition = (item: TValue, position: QueuePosition): void => { + if (position === 'front') { + this.items.unshift(item) + } else { + this.items.push(item) + } + } + + /** + * Removes and returns the next item from the queue without executing the function. + * Use for manual queue management. Normally, use execute() to process items. + * + * @example + * ```ts + * const nextItem = queue.getNextItem(); // Get from default position (front) + * const lastItem = queue.getNextItem('back'); // Get from back (LIFO) + * ``` + */ + getNextItem = ( + position: QueuePosition = this.options.getItemsFrom!, + ): TValue | undefined => { + if (this.items.length === 0) { + return undefined + } + + let item: TValue | undefined + + // When priority function is provided, always get from front (highest priority) + if (this.options.getPriority || position === 'front') { + item = this.items.shift() + } else { + item = this.items.pop() + } + + return item + } + + /** + * Removes and returns the next item from the queue and processes it using the provided function. + * + * @example + * ```ts + * queue.execute(); // Execute from default position + * queue.execute('back'); // Execute from back (LIFO) + * ``` + */ + execute = (position?: QueuePosition): TValue | undefined => { + const item = this.getNextItem(position) + if (item !== undefined) { + this.fn(item) + } + return item + } + + /** + * Internal method that processes items in the queue with wait intervals + */ + private tick = (): void => { + if (!this.isRunning) { + this.pendingTick = false + return + } + + this.pendingTick = true + + // Process items while queue is not empty + while (this.items.length > 0) { + const item = this.execute(this.options.getItemsFrom) + if (item === undefined) { + break + } + + const wait = this.options.wait! + if (wait > 0) { + // Schedule next processing after wait time + this.timeoutId = setTimeout(() => this.tick(), wait) + return + } + + // No wait time, continue processing immediately + } + + this.pendingTick = false + } + + /** + * Starts processing items in the queue. If already running, does nothing. + */ + start = (): void => { + this.isRunning = true + if (!this.pendingTick && this.items.length > 0) { + this.tick() + } + } + + /** + * Stops processing items in the queue. Does not clear the queue. + */ + stop = (): void => { + this.clearTimeout() + this.isRunning = false + this.pendingTick = false + } + + /** + * Clears any pending timeout + */ + private clearTimeout = (): void => { + if (this.timeoutId) { + clearTimeout(this.timeoutId) + this.timeoutId = null + } + } + + /** + * Returns the next item in the queue without removing it. + * + * @example + * ```ts + * const next = queue.peekNextItem(); // Peek at front + * const last = queue.peekNextItem('back'); // Peek at back + * ``` + */ + peekNextItem = (position: QueuePosition = 'front'): TValue | undefined => { + if (this.items.length === 0) { + return undefined + } + + if (this.options.getPriority || position === 'front') { + return this.items[0] + } else { + return this.items[this.items.length - 1] + } + } + + /** + * Returns a copy of all items in the queue. + */ + peekAllItems = (): Array => { + return [...this.items] + } + + /** + * Processes a specified number of items immediately with no wait time. + * If no numberOfItems is provided, all items will be processed. + * + * @example + * ```ts + * queue.flush(); // Process all items immediately + * queue.flush(3); // Process next 3 items immediately + * ``` + */ + flush = ( + numberOfItems: number = this.items.length, + position?: QueuePosition, + ): void => { + this.clearTimeout() // Clear any pending timeout + for (let i = 0; i < numberOfItems && this.items.length > 0; i++) { + this.execute(position) + } + // Restart normal processing if still running and has items + if (this.isRunning && this.items.length > 0 && !this.pendingTick) { + this.tick() + } + } + + /** + * Processes all items in the queue as a batch using the provided function. + * The queue is cleared after processing. + * + * @example + * ```ts + * queue.flushAsBatch((items) => { + * console.log('Processing batch:', items); + * // Process all items together + * }); + * ``` + */ + flushAsBatch = (batchFunction: (items: Array) => void): void => { + const items = this.peekAllItems() + this.clear() + batchFunction(items) + } + + /** + * Removes all items from the queue. Does not affect items being processed. + */ + clear = (): void => { + this.items = [] + } +} + +/** + * Creates a lightweight queue that processes items using the provided function. + * + * This is an alternative to the queue function in the core @tanstack/pacer package, but is more + * suitable for libraries and npm packages that need minimal overhead. Unlike the core version, + * this function creates a queuer with no external dependencies, devtools integration, or reactive state. + * + * @example + * ```ts + * const processItem = liteQueue((item: string) => { + * console.log('Processing:', item); + * }, { wait: 1000 }); + * + * processItem('task1'); + * processItem('task2'); + * // Processes each item with 1 second delay between them + * ``` + */ +export function liteQueue( + fn: (item: TValue) => void, + options: LiteQueuerOptions = {}, +): (item: TValue) => boolean { + const queuer = new LiteQueuer(fn, options) + return (item: TValue) => queuer.addItem(item) +} diff --git a/packages/pacer-lite/src/lite-rate-limiter.ts b/packages/pacer-lite/src/lite-rate-limiter.ts new file mode 100644 index 00000000..e35d6e70 --- /dev/null +++ b/packages/pacer-lite/src/lite-rate-limiter.ts @@ -0,0 +1,246 @@ +import type { AnyFunction } from '@tanstack/pacer/types' + +/** + * Options for configuring a lite rate-limited function + */ +export interface LiteRateLimiterOptions { + /** + * Maximum number of executions allowed within the time window. + */ + limit: number + /** + * Callback function that is called after the function is executed + */ + onExecute?: (args: Parameters, rateLimiter: LiteRateLimiter) => void + /** + * Optional callback function that is called when an execution is rejected due to rate limiting + */ + onReject?: (rateLimiter: LiteRateLimiter) => void + /** + * Time window in milliseconds within which the limit applies. + */ + window: number + /** + * Type of window to use for rate limiting + * - 'fixed': Uses a fixed window that resets after the window period + * - 'sliding': Uses a sliding window that allows executions as old ones expire + * Defaults to 'fixed' + */ + windowType?: 'fixed' | 'sliding' +} + +/** + * A lightweight class that creates a rate-limited function. + * + * This is an alternative to the RateLimiter in the core @tanstack/pacer package, but is more + * suitable for libraries and npm packages that need minimal overhead. Unlike the core RateLimiter, + * this version does not use TanStack Store for state management, has no devtools integration, + * and provides only essential rate limiting functionality. + * + * Rate limiting allows a function to execute up to a limit within a time window, + * then blocks all subsequent calls until the window passes. This can lead to "bursty" behavior where + * all executions happen immediately, followed by a complete block. + * + * The rate limiter supports two types of windows: + * - 'fixed': A strict window that resets after the window period. All executions within the window count + * towards the limit, and the window resets completely after the period. + * - 'sliding': A rolling window that allows executions as old ones expire. This provides a more + * consistent rate of execution over time. + * + * Features: + * - Zero dependencies - no external libraries required + * - Minimal API surface - only essential methods (maybeExecute, getRemainingInWindow, getMsUntilNextWindow, reset) + * - Simple state management - uses basic private properties instead of reactive stores + * - Lightweight - designed for use in npm packages where bundle size matters + * + * @example + * ```ts + * const rateLimiter = new LiteRateLimiter((id: string) => { + * api.getData(id); + * }, { limit: 5, window: 1000 }); + * + * // First 5 calls will execute, then block until window resets + * if (rateLimiter.maybeExecute('123')) { + * console.log('API call made'); + * } else { + * console.log('Rate limited - try again in', rateLimiter.getMsUntilNextWindow(), 'ms'); + * } + * ``` + */ +export class LiteRateLimiter { + private executionTimes: Array = [] + private timeoutIds: Set = new Set() + + constructor( + public fn: TFn, + public options: LiteRateLimiterOptions, + ) { + // Default windowType to 'fixed' if not specified + if (this.options.windowType === undefined) { + this.options.windowType = 'fixed' + } + } + + /** + * Attempts to execute the rate-limited function if within the configured limits. + * Returns true if executed, false if rejected due to rate limiting. + * + * @example + * ```ts + * const rateLimiter = new LiteRateLimiter(fn, { limit: 5, window: 1000 }); + * + * // First 5 calls return true + * rateLimiter.maybeExecute('arg1', 'arg2'); // true + * + * // Additional calls within the window return false + * rateLimiter.maybeExecute('arg1', 'arg2'); // false + * ``` + */ + maybeExecute = (...args: Parameters): boolean => { + this.cleanupOldExecutions() + + const relevantExecutionTimes = this.getExecutionTimesInWindow() + + if (relevantExecutionTimes.length < this.options.limit) { + this.execute(...args) + return true + } + + this.options.onReject?.(this) + return false + } + + private execute = (...args: Parameters): void => { + const now = Date.now() + this.fn(...args) + this.options.onExecute?.(args, this) + this.executionTimes.push(now) + this.setCleanupTimeout(now) + } + + private getExecutionTimesInWindow = (): Array => { + if (this.options.windowType === 'sliding') { + // For sliding window, return all executions within the current window + return this.executionTimes.filter( + (time) => time > Date.now() - this.options.window, + ) + } else { + // For fixed window, return all executions in the current window + if (this.executionTimes.length === 0) { + return [] + } + const oldestExecution = Math.min(...this.executionTimes) + const windowStart = oldestExecution + const windowEnd = windowStart + this.options.window + const now = Date.now() + + // If the window has expired, return empty array + if (now > windowEnd) { + return [] + } + + // Otherwise, return all executions in the current window + return this.executionTimes.filter( + (time) => time >= windowStart && time <= windowEnd, + ) + } + } + + private setCleanupTimeout = (executionTime: number): void => { + if ( + this.options.windowType === 'sliding' || + this.timeoutIds.size === 0 // new fixed window + ) { + const now = Date.now() + const timeUntilExpiration = executionTime - now + this.options.window + 1 + const timeoutId = setTimeout(() => { + this.cleanupOldExecutions() + this.clearTimeout(timeoutId) + }, timeUntilExpiration) + this.timeoutIds.add(timeoutId) + } + } + + private clearTimeout = (timeoutId: NodeJS.Timeout): void => { + clearTimeout(timeoutId) + this.timeoutIds.delete(timeoutId) + } + + private clearTimeouts = (): void => { + this.timeoutIds.forEach((timeoutId) => clearTimeout(timeoutId)) + this.timeoutIds.clear() + } + + private cleanupOldExecutions = (): void => { + this.executionTimes = this.getExecutionTimesInWindow() + } + + /** + * Returns the number of remaining executions allowed in the current window. + */ + getRemainingInWindow = (): number => { + const relevantExecutionTimes = this.getExecutionTimesInWindow() + return Math.max(0, this.options.limit - relevantExecutionTimes.length) + } + + /** + * Returns the number of milliseconds until the next execution will be possible. + * Returns 0 if executions are currently allowed. + */ + getMsUntilNextWindow = (): number => { + if (this.getRemainingInWindow() > 0) { + return 0 + } + const oldestExecution = this.executionTimes[0] ?? Infinity + return oldestExecution + this.options.window - Date.now() + } + + /** + * Resets the rate limiter state, clearing all execution history. + */ + reset = (): void => { + this.executionTimes = [] + this.clearTimeouts() + } +} + +/** + * Creates a lightweight rate-limited function that will execute the provided function up to a maximum number of times within a time window. + * + * This is an alternative to the rateLimit function in the core @tanstack/pacer package, but is more + * suitable for libraries and npm packages that need minimal overhead. Unlike the core version, + * this function creates a rate limiter with no external dependencies, devtools integration, or reactive state. + * + * Rate limiting allows all executions until the limit is reached, then blocks all subsequent calls until the window resets. + * This differs from throttling (which ensures even spacing) and debouncing (which waits for pauses). + * + * @example + * ```ts + * const rateLimitedApi = liteRateLimit(makeApiCall, { + * limit: 5, + * window: 60000, // 1 minute + * windowType: 'sliding' + * }); + * + * // First 5 calls execute immediately + * // Additional calls are rejected until window allows + * rateLimitedApi(); + * ``` + * + * @example + * ```ts + * // Fixed window - all 10 calls happen in first second, then 10 second wait + * const rateLimitedFixed = liteRateLimit(logEvent, { + * limit: 10, + * window: 10000, + * windowType: 'fixed' + * }); + * ``` + */ +export function liteRateLimit( + fn: TFn, + options: LiteRateLimiterOptions, +): (...args: Parameters) => boolean { + const rateLimiter = new LiteRateLimiter(fn, options) + return rateLimiter.maybeExecute +} diff --git a/packages/pacer-lite/src/lite-throttler.ts b/packages/pacer-lite/src/lite-throttler.ts new file mode 100644 index 00000000..48360b5f --- /dev/null +++ b/packages/pacer-lite/src/lite-throttler.ts @@ -0,0 +1,195 @@ +import type { AnyFunction } from '@tanstack/pacer/types' + +/** + * Options for configuring a lite throttled function + */ +export interface LiteThrottlerOptions { + /** + * Whether to execute on the leading edge of the timeout. + * Defaults to true. + */ + leading?: boolean + /** + * Callback function that is called after the function is executed + */ + onExecute?: (args: Parameters, throttler: LiteThrottler) => void + /** + * Whether to execute on the trailing edge of the timeout. + * Defaults to true. + */ + trailing?: boolean + /** + * Time window in milliseconds during which the function can only be executed once. + */ + wait: number +} + +/** + * A lightweight class that creates a throttled function. + * + * This is an alternative to the Throttler in the core @tanstack/pacer package, but is more + * suitable for libraries and npm packages that need minimal overhead. Unlike the core Throttler, + * this version does not use TanStack Store for state management, has no devtools integration, + * and provides only essential throttling functionality. + * + * Throttling ensures a function is called at most once within a specified time window. + * Unlike debouncing which waits for a pause in calls, throttling guarantees consistent + * execution timing regardless of call frequency. + * + * Supports both leading and trailing edge execution: + * - Leading: Execute immediately on first call (default: true) + * - Trailing: Execute after wait period if called during throttle (default: true) + * + * Features: + * - Zero dependencies - no external libraries required + * - Minimal API surface - only essential methods (maybeExecute, flush, cancel) + * - Simple state management - uses basic private properties instead of reactive stores + * - Callback support for monitoring execution events + * - Lightweight - designed for use in npm packages where bundle size matters + * + * @example + * ```ts + * const throttler = new LiteThrottler((scrollY: number) => { + * updateScrollPosition(scrollY); + * }, { + * wait: 100, + * onExecute: (args, throttler) => { + * console.log('Updated scroll position:', args[0]); + * } + * }); + * + * // Will execute at most once per 100ms + * window.addEventListener('scroll', () => { + * throttler.maybeExecute(window.scrollY); + * }); + * ``` + */ +export class LiteThrottler { + private timeoutId: NodeJS.Timeout | undefined + private lastArgs: Parameters | undefined + private lastExecutionTime = 0 + private isPending = false + + constructor( + public fn: TFn, + public options: LiteThrottlerOptions, + ) { + // Default both leading and trailing to true if neither is specified + if ( + this.options.leading === undefined && + this.options.trailing === undefined + ) { + this.options.leading = true + this.options.trailing = true + } + } + + /** + * Attempts to execute the throttled function. The execution behavior depends on the throttler options: + * + * - If enough time has passed since the last execution (>= wait period): + * - With leading=true: Executes immediately + * - With leading=false: Waits for the next trailing execution + * + * - If within the wait period: + * - With trailing=true: Schedules execution for end of wait period + * - With trailing=false: Drops the execution + */ + maybeExecute = (...args: Parameters): void => { + const now = Date.now() + const timeSinceLastExecution = now - this.lastExecutionTime + + // Handle leading execution + if (this.options.leading && timeSinceLastExecution >= this.options.wait) { + this.execute(...args) + } else { + // Store the most recent arguments for potential trailing execution + this.lastArgs = args + + // Set up trailing execution if not already scheduled + if (!this.timeoutId && this.options.trailing) { + const timeoutDuration = this.options.wait - timeSinceLastExecution + this.isPending = true + this.timeoutId = setTimeout(() => { + if (this.lastArgs !== undefined) { + this.execute(...this.lastArgs) + } + }, timeoutDuration) + } + } + } + + private execute = (...args: Parameters): void => { + this.fn(...args) + this.options.onExecute?.(args, this) + this.lastExecutionTime = Date.now() + this.clearTimeout() + this.lastArgs = undefined + this.isPending = false + } + + /** + * Processes the current pending execution immediately. + * If there's a pending execution, it will be executed right away + * and the timeout will be cleared. + */ + flush = (): void => { + if (this.isPending && this.lastArgs) { + this.execute(...this.lastArgs) + } + } + + /** + * Cancels any pending trailing execution and clears internal state. + * If a trailing execution is scheduled, this will prevent that execution from occurring. + */ + cancel = (): void => { + this.clearTimeout() + this.lastArgs = undefined + this.isPending = false + } + + private clearTimeout = (): void => { + if (this.timeoutId) { + clearTimeout(this.timeoutId) + this.timeoutId = undefined + } + } +} + +/** + * Creates a lightweight throttled function that limits how often the provided function can execute. + * + * This is an alternative to the throttle function in the core @tanstack/pacer package, but is more + * suitable for libraries and npm packages that need minimal overhead. Unlike the core version, + * this function creates a throttler with no external dependencies, devtools integration, or reactive state. + * + * Throttling ensures a function executes at most once within a specified time window, + * regardless of how many times it is called. This is useful for rate-limiting + * expensive operations or UI updates. + * + * @example + * ```ts + * const throttledScroll = liteThrottle(() => { + * updateScrollIndicator(); + * }, { wait: 100 }); + * + * // Will execute at most once per 100ms + * window.addEventListener('scroll', throttledScroll); + * ``` + * + * @example + * ```ts + * // Leading edge execution - fires immediately then throttles + * const throttledResize = liteThrottle(() => { + * recalculateLayout(); + * }, { wait: 250, leading: true, trailing: false }); + * ``` + */ +export function liteThrottle( + fn: TFn, + options: LiteThrottlerOptions, +): (...args: Parameters) => void { + const throttler = new LiteThrottler(fn, options) + return throttler.maybeExecute +} diff --git a/packages/pacer-lite/tests/lite-batcher.test.ts b/packages/pacer-lite/tests/lite-batcher.test.ts new file mode 100644 index 00000000..4974557b --- /dev/null +++ b/packages/pacer-lite/tests/lite-batcher.test.ts @@ -0,0 +1,848 @@ +import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' +import { LiteBatcher, liteBatch } from '../src/lite-batcher' + +describe('LiteBatcher', () => { + beforeEach(() => { + vi.useFakeTimers() + }) + + afterEach(() => { + vi.restoreAllMocks() + }) + + describe('Basic Functionality', () => { + it('should create an empty batcher', () => { + const mockFn = vi.fn() + const batcher = new LiteBatcher(mockFn, { started: false }) + + expect(batcher.size).toBe(0) + expect(batcher.isEmpty).toBe(true) + expect(batcher.isPending).toBe(false) + }) + + it('should start with started: true by default', () => { + const mockFn = vi.fn() + const batcher = new LiteBatcher(mockFn, {}) + + expect(batcher.options.started).toBe(true) + }) + + it('should set default options correctly', () => { + const mockFn = vi.fn() + const batcher = new LiteBatcher(mockFn, {}) + + expect(batcher.options.maxSize).toBe(Infinity) + expect(batcher.options.started).toBe(true) + expect(batcher.options.wait).toBe(Infinity) + expect(typeof batcher.options.getShouldExecute).toBe('function') + expect(batcher.options.getShouldExecute!([], batcher)).toBe(false) + }) + }) + + describe('addItem', () => { + it('should add items to the batch', () => { + const mockFn = vi.fn() + const batcher = new LiteBatcher(mockFn, { started: false }) + + batcher.addItem(1) + expect(batcher.size).toBe(1) + expect(batcher.peekAllItems()).toEqual([1]) + expect(batcher.isEmpty).toBe(false) + }) + + it('should add multiple items to the batch', () => { + const mockFn = vi.fn() + const batcher = new LiteBatcher(mockFn, { started: false }) + + batcher.addItem(1) + batcher.addItem(2) + batcher.addItem(3) + + expect(batcher.size).toBe(3) + expect(batcher.peekAllItems()).toEqual([1, 2, 3]) + }) + + it('should set isPending when wait is configured', () => { + const mockFn = vi.fn() + const batcher = new LiteBatcher(mockFn, { wait: 100, started: false }) + + expect(batcher.isPending).toBe(false) + + batcher.addItem(1) + expect(batcher.isPending).toBe(true) + }) + + it('should not set isPending when wait is Infinity', () => { + const mockFn = vi.fn() + const batcher = new LiteBatcher(mockFn, { + wait: Infinity, + started: false, + }) + + batcher.addItem(1) + expect(batcher.isPending).toBe(false) + }) + }) + + describe('Size-based Batching', () => { + it('should process batch when maxSize is reached', () => { + const mockFn = vi.fn() + const batcher = new LiteBatcher(mockFn, { maxSize: 3, started: false }) + + batcher.addItem(1) + batcher.addItem(2) + expect(mockFn).not.toHaveBeenCalled() + + batcher.addItem(3) + expect(mockFn).toHaveBeenCalledWith([1, 2, 3]) + expect(batcher.isEmpty).toBe(true) + }) + + it('should clear items after processing', () => { + const mockFn = vi.fn() + const batcher = new LiteBatcher(mockFn, { maxSize: 2, started: false }) + + batcher.addItem('a') + batcher.addItem('b') + + expect(mockFn).toHaveBeenCalledWith(['a', 'b']) + expect(batcher.size).toBe(0) + expect(batcher.isEmpty).toBe(true) + }) + }) + + describe('Time-based Batching', () => { + it('should process batch after wait timeout', () => { + const mockFn = vi.fn() + const batcher = new LiteBatcher(mockFn, { wait: 100, started: false }) + + batcher.addItem('test1') + batcher.addItem('test2') + + expect(mockFn).not.toHaveBeenCalled() + + vi.advanceTimersByTime(100) + + expect(mockFn).toHaveBeenCalledWith(['test1', 'test2']) + expect(batcher.isEmpty).toBe(true) + }) + + it('should reset timeout when new items are added', () => { + const mockFn = vi.fn() + const batcher = new LiteBatcher(mockFn, { wait: 100, started: false }) + + batcher.addItem('test1') + vi.advanceTimersByTime(50) + + batcher.addItem('test2') + vi.advanceTimersByTime(50) // Should not trigger yet + + expect(mockFn).not.toHaveBeenCalled() + + vi.advanceTimersByTime(50) // Now it should trigger + + expect(mockFn).toHaveBeenCalledWith(['test1', 'test2']) + }) + + it('should support function-based wait', () => { + const mockFn = vi.fn() + const getWait = vi.fn(() => 200) + const batcher = new LiteBatcher(mockFn, { + wait: getWait, + started: false, + }) + + batcher.addItem('test') + expect(getWait).toHaveBeenCalledWith(batcher) + + vi.advanceTimersByTime(200) + expect(mockFn).toHaveBeenCalledWith(['test']) + }) + }) + + describe('Custom Condition Batching', () => { + it('should process immediately when getShouldExecute returns true', () => { + const mockFn = vi.fn() + const getShouldExecute = vi.fn((items: any[]) => items.length >= 2) + const batcher = new LiteBatcher(mockFn, { + getShouldExecute, + started: false, + }) + + batcher.addItem('test1') + expect(mockFn).not.toHaveBeenCalled() + + batcher.addItem('test2') + expect(mockFn).toHaveBeenCalledWith(['test1', 'test2']) + }) + + it('should pass items and batcher to getShouldExecute', () => { + const mockFn = vi.fn() + const getShouldExecute = vi.fn(() => true) + const batcher = new LiteBatcher(mockFn, { + getShouldExecute, + started: false, + }) + + batcher.addItem('test') + + expect(getShouldExecute).toHaveBeenCalledWith(['test'], batcher) + }) + + it('should combine with other triggers', () => { + const mockFn = vi.fn() + const getShouldExecute = vi.fn((items: any[]) => + items.some((item) => item.urgent), + ) + const batcher = new LiteBatcher(mockFn, { + maxSize: 5, + getShouldExecute, + started: false, + }) + + batcher.addItem({ name: 'normal', urgent: false }) + batcher.addItem({ name: 'normal2', urgent: false }) + expect(mockFn).not.toHaveBeenCalled() + + batcher.addItem({ name: 'urgent', urgent: true }) + expect(mockFn).toHaveBeenCalledWith([ + { name: 'normal', urgent: false }, + { name: 'normal2', urgent: false }, + { name: 'urgent', urgent: true }, + ]) + }) + }) + + describe('Manual Processing', () => { + describe('flush', () => { + it('should process all items immediately', () => { + const mockFn = vi.fn() + const batcher = new LiteBatcher(mockFn, { + wait: 1000, + started: false, + }) + + batcher.addItem('test1') + batcher.addItem('test2') + batcher.addItem('test3') + + expect(mockFn).not.toHaveBeenCalled() + + batcher.flush() + + expect(mockFn).toHaveBeenCalledWith(['test1', 'test2', 'test3']) + expect(batcher.isEmpty).toBe(true) + }) + + it('should clear pending timeout when flushing', () => { + const mockFn = vi.fn() + const batcher = new LiteBatcher(mockFn, { wait: 1000, started: false }) + + batcher.addItem('test') + expect(batcher.isPending).toBe(true) + + batcher.flush() + + expect(mockFn).toHaveBeenCalledWith(['test']) + expect(batcher.isPending).toBe(false) + + // Timeout should not trigger anymore + vi.advanceTimersByTime(1000) + expect(mockFn).toHaveBeenCalledTimes(1) + }) + + it('should do nothing when batch is empty', () => { + const mockFn = vi.fn() + const batcher = new LiteBatcher(mockFn, { started: false }) + + batcher.flush() + expect(mockFn).not.toHaveBeenCalled() + }) + }) + + describe('clear', () => { + it('should remove all items from the batch', () => { + const mockFn = vi.fn() + const batcher = new LiteBatcher(mockFn, { started: false }) + + batcher.addItem('test1') + batcher.addItem('test2') + + expect(batcher.size).toBe(2) + + batcher.clear() + + expect(batcher.isEmpty).toBe(true) + expect(batcher.size).toBe(0) + expect(batcher.peekAllItems()).toEqual([]) + }) + + it('should reset isPending state', () => { + const mockFn = vi.fn() + const batcher = new LiteBatcher(mockFn, { wait: 100, started: false }) + + batcher.addItem('test') + expect(batcher.isPending).toBe(true) + + batcher.clear() + expect(batcher.isPending).toBe(false) + }) + }) + + describe('cancel', () => { + it('should cancel pending execution without clearing items', () => { + const mockFn = vi.fn() + const batcher = new LiteBatcher(mockFn, { wait: 100, started: false }) + + batcher.addItem('test1') + batcher.addItem('test2') + + expect(batcher.isPending).toBe(true) + expect(batcher.size).toBe(2) + + batcher.cancel() + + expect(batcher.isPending).toBe(false) + expect(batcher.size).toBe(2) // Items still there + expect(batcher.peekAllItems()).toEqual(['test1', 'test2']) + + vi.advanceTimersByTime(100) + expect(mockFn).not.toHaveBeenCalled() + }) + }) + }) + + describe('Utility Methods', () => { + describe('peekAllItems', () => { + it('should return copy of all items', () => { + const mockFn = vi.fn() + const batcher = new LiteBatcher(mockFn, { started: false }) + + batcher.addItem('test1') + batcher.addItem('test2') + + const items = batcher.peekAllItems() + expect(items).toEqual(['test1', 'test2']) + + // Should be a copy, not reference + items.push('test3') + expect(batcher.peekAllItems()).toEqual(['test1', 'test2']) + }) + + it('should return empty array when batch is empty', () => { + const mockFn = vi.fn() + const batcher = new LiteBatcher(mockFn, { started: false }) + + expect(batcher.peekAllItems()).toEqual([]) + }) + }) + }) + + describe('Edge Cases', () => { + it('should handle rapid item additions', () => { + const mockFn = vi.fn() + const batcher = new LiteBatcher(mockFn, { maxSize: 100, started: false }) + + for (let i = 0; i < 50; i++) { + batcher.addItem(i) + } + + expect(batcher.size).toBe(50) + expect(mockFn).not.toHaveBeenCalled() + }) + + it('should handle zero wait time', () => { + const mockFn = vi.fn() + const batcher = new LiteBatcher(mockFn, { wait: 0, started: false }) + + batcher.addItem('test1') + batcher.addItem('test2') + + vi.runAllTimers() + + expect(mockFn).toHaveBeenCalledWith(['test1', 'test2']) + }) + + it('should handle adding items after batch is processed', () => { + const mockFn = vi.fn() + const batcher = new LiteBatcher(mockFn, { maxSize: 2, started: false }) + + batcher.addItem('test1') + batcher.addItem('test2') + + expect(mockFn).toHaveBeenCalledWith(['test1', 'test2']) + mockFn.mockClear() + + batcher.addItem('test3') + batcher.addItem('test4') + + expect(mockFn).toHaveBeenCalledWith(['test3', 'test4']) + }) + + it('should handle maxSize of 1', () => { + const mockFn = vi.fn() + const batcher = new LiteBatcher(mockFn, { maxSize: 1, started: false }) + + batcher.addItem('test1') + expect(mockFn).toHaveBeenCalledWith(['test1']) + + batcher.addItem('test2') + expect(mockFn).toHaveBeenCalledWith(['test2']) + + expect(mockFn).toHaveBeenCalledTimes(2) + }) + + it('should handle simultaneous triggers', () => { + const mockFn = vi.fn() + const batcher = new LiteBatcher(mockFn, { + maxSize: 2, + wait: 100, + getShouldExecute: (items) => items.length >= 2, + started: false, + }) + + batcher.addItem('test1') + batcher.addItem('test2') // Should trigger immediately + + expect(mockFn).toHaveBeenCalledWith(['test1', 'test2']) + expect(batcher.isEmpty).toBe(true) + + // Timeout should not trigger anything + vi.advanceTimersByTime(100) + expect(mockFn).toHaveBeenCalledTimes(1) + }) + }) + + describe('Options Mutability', () => { + it('should allow modifying options after creation', () => { + const mockFn = vi.fn() + const batcher = new LiteBatcher(mockFn, { maxSize: 5, started: false }) + + expect(batcher.options.maxSize).toBe(5) + + batcher.options.maxSize = 2 + + batcher.addItem('test1') + batcher.addItem('test2') + + expect(mockFn).toHaveBeenCalledWith(['test1', 'test2']) + }) + + it('should allow modifying getShouldExecute after creation', () => { + const mockFn = vi.fn() + const batcher = new LiteBatcher(mockFn, { started: false }) + + batcher.addItem('test1') + batcher.addItem('test2') + expect(mockFn).not.toHaveBeenCalled() + + batcher.options.getShouldExecute = (items) => items.length >= 2 + batcher.addItem('test3') + + expect(mockFn).toHaveBeenCalledWith(['test1', 'test2', 'test3']) + }) + }) + + describe('Callbacks', () => { + it('should call onExecute when batch is processed', () => { + const mockFn = vi.fn() + const onExecute = vi.fn() + const batcher = new LiteBatcher(mockFn, { + maxSize: 2, + started: false, + onExecute, + }) + + batcher.addItem('item1') + batcher.addItem('item2') // Should trigger execution + + expect(onExecute).toHaveBeenCalledTimes(1) + expect(onExecute).toHaveBeenCalledWith(['item1', 'item2'], batcher) + expect(mockFn).toHaveBeenCalledWith(['item1', 'item2']) + }) + + it('should call onExecute with time-based batching', () => { + const mockFn = vi.fn() + const onExecute = vi.fn() + const batcher = new LiteBatcher(mockFn, { + wait: 100, + started: false, + onExecute, + }) + + batcher.addItem('item1') + batcher.addItem('item2') + + expect(onExecute).not.toHaveBeenCalled() + + vi.advanceTimersByTime(100) + + expect(onExecute).toHaveBeenCalledTimes(1) + expect(onExecute).toHaveBeenCalledWith(['item1', 'item2'], batcher) + }) + + it('should call onExecute during manual flush', () => { + const mockFn = vi.fn() + const onExecute = vi.fn() + const batcher = new LiteBatcher(mockFn, { + wait: 1000, + started: false, + onExecute, + }) + + batcher.addItem('item1') + batcher.addItem('item2') + batcher.flush() + + expect(onExecute).toHaveBeenCalledTimes(1) + expect(onExecute).toHaveBeenCalledWith(['item1', 'item2'], batcher) + }) + + it('should call onItemsChange when items are added', () => { + const mockFn = vi.fn() + const onItemsChange = vi.fn() + const batcher = new LiteBatcher(mockFn, { + started: false, + onItemsChange, + }) + + batcher.addItem('item1') + expect(onItemsChange).toHaveBeenCalledTimes(1) + expect(onItemsChange).toHaveBeenCalledWith(batcher) + + batcher.addItem('item2') + expect(onItemsChange).toHaveBeenCalledTimes(2) + }) + + it('should call onItemsChange when batch is cleared', () => { + const mockFn = vi.fn() + const onItemsChange = vi.fn() + const batcher = new LiteBatcher(mockFn, { + started: false, + onItemsChange, + }) + + batcher.addItem('item1') + batcher.addItem('item2') + onItemsChange.mockClear() + + batcher.clear() + expect(onItemsChange).toHaveBeenCalledTimes(1) + expect(onItemsChange).toHaveBeenCalledWith(batcher) + }) + + it('should not call onItemsChange when clearing empty batch', () => { + const mockFn = vi.fn() + const onItemsChange = vi.fn() + const batcher = new LiteBatcher(mockFn, { + started: false, + onItemsChange, + }) + + batcher.clear() // Clear empty batch + expect(onItemsChange).not.toHaveBeenCalled() + }) + + it('should call onItemsChange when batch is processed', () => { + const mockFn = vi.fn() + const onItemsChange = vi.fn() + const batcher = new LiteBatcher(mockFn, { + maxSize: 2, + started: false, + onItemsChange, + }) + + batcher.addItem('item1') + batcher.addItem('item2') // Should trigger execution + + // Called once for each add, and once for clearing during execution + expect(onItemsChange).toHaveBeenCalledTimes(3) + }) + + it('should not call callbacks when not provided', () => { + const mockFn = vi.fn() + const batcher = new LiteBatcher(mockFn, { started: false }) + + // Should not throw when callbacks are undefined + expect(() => { + batcher.addItem('test1') + batcher.addItem('test2') + batcher.flush() + batcher.clear() + }).not.toThrow() + + expect(mockFn).toHaveBeenCalledTimes(1) // Once from flush + }) + + it('should call callbacks with custom execution condition', () => { + const mockFn = vi.fn() + const onExecute = vi.fn() + const onItemsChange = vi.fn() + const batcher = new LiteBatcher(mockFn, { + getShouldExecute: (items) => items.some((item: any) => item.urgent), + started: false, + onExecute, + onItemsChange, + }) + + batcher.addItem({ name: 'normal', urgent: false }) + batcher.addItem({ name: 'urgent', urgent: true }) // Should trigger + + expect(onExecute).toHaveBeenCalledTimes(1) + expect(onItemsChange).toHaveBeenCalledTimes(3) // 2 adds + 1 clear from execution + }) + + it('should handle errors in onExecute callback gracefully', () => { + const mockFn = vi.fn() + const onExecute = vi.fn(() => { + throw new Error('Callback error') + }) + const batcher = new LiteBatcher(mockFn, { + maxSize: 1, + started: false, + onExecute, + }) + + // Callback errors should propagate (not handled gracefully in current implementation) + expect(() => batcher.addItem('test')).toThrow('Callback error') + expect(mockFn).toHaveBeenCalledWith(['test']) + expect(onExecute).toHaveBeenCalledTimes(1) + }) + + it('should handle errors in onItemsChange callback gracefully', () => { + const mockFn = vi.fn() + const onItemsChange = vi.fn(() => { + throw new Error('Callback error') + }) + const batcher = new LiteBatcher(mockFn, { + started: false, + onItemsChange, + }) + + // Callback errors should propagate (not handled gracefully in current implementation) + expect(() => batcher.addItem('test')).toThrow('Callback error') + expect(onItemsChange).toHaveBeenCalledTimes(1) + }) + + it('should call callbacks with function-based wait', () => { + const mockFn = vi.fn() + const onExecute = vi.fn() + const getWait = vi.fn(() => 200) + const batcher = new LiteBatcher(mockFn, { + wait: getWait, + started: false, + onExecute, + }) + + batcher.addItem('test') + expect(getWait).toHaveBeenCalledWith(batcher) + + vi.advanceTimersByTime(200) + expect(onExecute).toHaveBeenCalledWith(['test'], batcher) + }) + + it('should call onExecute after canceling and flushing', () => { + const mockFn = vi.fn() + const onExecute = vi.fn() + const batcher = new LiteBatcher(mockFn, { + wait: 100, + started: false, + onExecute, + }) + + batcher.addItem('test1') + batcher.addItem('test2') + batcher.cancel() // Cancel timeout + + expect(onExecute).not.toHaveBeenCalled() + + batcher.flush() // Manual flush + expect(onExecute).toHaveBeenCalledTimes(1) + expect(onExecute).toHaveBeenCalledWith(['test1', 'test2'], batcher) + }) + }) +}) + +describe('liteBatch helper function', () => { + beforeEach(() => { + vi.useFakeTimers() + }) + + afterEach(() => { + vi.restoreAllMocks() + }) + + describe('Basic Functionality', () => { + it('should create a batch function', () => { + const mockFn = vi.fn() + const batchFn = liteBatch(mockFn, { started: false }) + + expect(typeof batchFn).toBe('function') + + batchFn('test') + expect(mockFn).not.toHaveBeenCalled() // Not started + }) + + it('should add items when called', () => { + const mockFn = vi.fn() + const batchFn = liteBatch(mockFn, { maxSize: 2, started: false }) + + batchFn('test1') + batchFn('test2') + + expect(mockFn).toHaveBeenCalledWith(['test1', 'test2']) + }) + + it('should work with wait times', () => { + const mockFn = vi.fn() + const batchFn = liteBatch(mockFn, { wait: 100, started: false }) + + batchFn('test1') + batchFn('test2') + + expect(mockFn).not.toHaveBeenCalled() + + vi.advanceTimersByTime(100) + + expect(mockFn).toHaveBeenCalledWith(['test1', 'test2']) + }) + + it('should work with custom conditions', () => { + const mockFn = vi.fn() + const batchFn = liteBatch(mockFn, { + getShouldExecute: (items: any[]) => + items.some((item) => item.priority === 'high'), + started: false, + }) + + batchFn({ name: 'task1', priority: 'low' }) + batchFn({ name: 'task2', priority: 'medium' }) + expect(mockFn).not.toHaveBeenCalled() + + batchFn({ name: 'task3', priority: 'high' }) + expect(mockFn).toHaveBeenCalledWith([ + { name: 'task1', priority: 'low' }, + { name: 'task2', priority: 'medium' }, + { name: 'task3', priority: 'high' }, + ]) + }) + + it('should work with all triggers combined', () => { + const mockFn = vi.fn() + const batchFn = liteBatch(mockFn, { + maxSize: 5, + wait: 1000, + getShouldExecute: (items: any[]) => items.some((item) => item.urgent), + started: false, + }) + + // Test size trigger + for (let i = 0; i < 5; i++) { + batchFn({ id: i, urgent: false }) + } + expect(mockFn).toHaveBeenCalledTimes(1) + + // Test custom condition trigger + batchFn({ id: 'urgent', urgent: true }) + expect(mockFn).toHaveBeenCalledTimes(2) + + // Test time trigger + batchFn({ id: 'delayed', urgent: false }) + vi.advanceTimersByTime(1000) + expect(mockFn).toHaveBeenCalledTimes(3) + }) + }) + + describe('Callbacks', () => { + it('should work with onExecute callback', () => { + const mockFn = vi.fn() + const onExecute = vi.fn() + const batchFn = liteBatch(mockFn, { + maxSize: 2, + started: false, + onExecute, + }) + + batchFn('item1') + batchFn('item2') // Should trigger execution + + expect(onExecute).toHaveBeenCalledTimes(1) + expect(mockFn).toHaveBeenCalledTimes(1) + }) + + it('should work with onItemsChange callback', () => { + const mockFn = vi.fn() + const onItemsChange = vi.fn() + const batchFn = liteBatch(mockFn, { + started: false, + onItemsChange, + }) + + batchFn('item1') + batchFn('item2') + + expect(onItemsChange).toHaveBeenCalledTimes(2) + }) + + it('should work with both callbacks together', () => { + const mockFn = vi.fn() + const onExecute = vi.fn() + const onItemsChange = vi.fn() + const batchFn = liteBatch(mockFn, { + maxSize: 2, + started: false, + onExecute, + onItemsChange, + }) + + batchFn('item1') + batchFn('item2') // Should trigger execution + + expect(onExecute).toHaveBeenCalledTimes(1) + expect(onItemsChange).toHaveBeenCalledTimes(3) // 2 adds + 1 clear + expect(mockFn).toHaveBeenCalledTimes(1) + }) + + it('should work with time-based batching and onExecute', () => { + const mockFn = vi.fn() + const onExecute = vi.fn() + const batchFn = liteBatch(mockFn, { + wait: 100, + started: false, + onExecute, + }) + + batchFn('item1') + batchFn('item2') + + expect(onExecute).not.toHaveBeenCalled() + + vi.advanceTimersByTime(100) + + expect(onExecute).toHaveBeenCalledTimes(1) + expect(mockFn).toHaveBeenCalledWith(['item1', 'item2']) + }) + + it('should work with custom execution conditions and callbacks', () => { + const mockFn = vi.fn() + const onExecute = vi.fn() + const onItemsChange = vi.fn() + const batchFn = liteBatch(mockFn, { + getShouldExecute: (items: any[]) => items.some((item) => item.urgent), + started: false, + onExecute, + onItemsChange, + }) + + batchFn({ name: 'normal', urgent: false }) + batchFn({ name: 'urgent', urgent: true }) // Should trigger + + expect(onExecute).toHaveBeenCalledTimes(1) + expect(onItemsChange).toHaveBeenCalledTimes(3) // 2 adds + 1 clear + expect(mockFn).toHaveBeenCalledWith([ + { name: 'normal', urgent: false }, + { name: 'urgent', urgent: true }, + ]) + }) + }) +}) diff --git a/packages/pacer-lite/tests/lite-debouncer.test.ts b/packages/pacer-lite/tests/lite-debouncer.test.ts new file mode 100644 index 00000000..e872772a --- /dev/null +++ b/packages/pacer-lite/tests/lite-debouncer.test.ts @@ -0,0 +1,591 @@ +import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' +import { LiteDebouncer, liteDebounce } from '../src/lite-debouncer' + +describe('LiteDebouncer', () => { + beforeEach(() => { + vi.useFakeTimers() + }) + + afterEach(() => { + vi.restoreAllMocks() + }) + + describe('Basic Debouncing', () => { + it('should not execute the function before the specified wait', () => { + const mockFn = vi.fn() + const debouncer = new LiteDebouncer(mockFn, { wait: 1000 }) + + debouncer.maybeExecute() + expect(mockFn).not.toBeCalled() + }) + + it('should execute the function after the specified wait', () => { + const mockFn = vi.fn() + const debouncer = new LiteDebouncer(mockFn, { wait: 1000 }) + + debouncer.maybeExecute() + expect(mockFn).not.toBeCalled() + + vi.advanceTimersByTime(1000) + expect(mockFn).toBeCalledTimes(1) + }) + + it('should debounce multiple calls', () => { + const mockFn = vi.fn() + const debouncer = new LiteDebouncer(mockFn, { wait: 1000 }) + + debouncer.maybeExecute() + debouncer.maybeExecute() + debouncer.maybeExecute() + expect(mockFn).not.toBeCalled() + + vi.advanceTimersByTime(1000) + expect(mockFn).toBeCalledTimes(1) + }) + + it('should pass arguments to the debounced function', () => { + const mockFn = vi.fn() + const debouncer = new LiteDebouncer(mockFn, { wait: 1000 }) + + debouncer.maybeExecute('test', 123) + vi.advanceTimersByTime(1000) + + expect(mockFn).toBeCalledWith('test', 123) + }) + + it('should use latest arguments from multiple calls', () => { + const mockFn = vi.fn() + const debouncer = new LiteDebouncer(mockFn, { wait: 1000 }) + + debouncer.maybeExecute('first') + debouncer.maybeExecute('second') + debouncer.maybeExecute('third') + + vi.advanceTimersByTime(1000) + expect(mockFn).toBeCalledWith('third') + }) + }) + + describe('Execution Edge Cases', () => { + it('should execute immediately with leading option', () => { + const mockFn = vi.fn() + const debouncer = new LiteDebouncer(mockFn, { + wait: 1000, + leading: true, + trailing: false, + }) + + debouncer.maybeExecute('test') + expect(mockFn).toBeCalledTimes(1) + expect(mockFn).toBeCalledWith('test') + + vi.advanceTimersByTime(1000) + expect(mockFn).toBeCalledTimes(1) + }) + + it('should respect leading edge timing', () => { + const mockFn = vi.fn() + const debouncer = new LiteDebouncer(mockFn, { + wait: 1000, + leading: true, + trailing: false, + }) + + // First call - executes immediately + debouncer.maybeExecute('first') + expect(mockFn).toBeCalledTimes(1) + + // Call again before wait expires - should not execute + vi.advanceTimersByTime(500) + debouncer.maybeExecute('second') + expect(mockFn).toBeCalledTimes(1) + + // Advance to end of second call's wait period - should not execute + vi.advanceTimersByTime(1000) + expect(mockFn).toBeCalledTimes(1) + + // Now that the full wait has passed since last call, this should execute + debouncer.maybeExecute('third') + expect(mockFn).toBeCalledTimes(2) + expect(mockFn).toHaveBeenLastCalledWith('third') + }) + + it('should support both leading and trailing execution', () => { + const mockFn = vi.fn() + const debouncer = new LiteDebouncer(mockFn, { + wait: 1000, + leading: true, + trailing: true, + }) + + debouncer.maybeExecute('test1') + debouncer.maybeExecute('test2') + expect(mockFn).toBeCalledTimes(1) // Leading call + expect(mockFn).toBeCalledWith('test1') + + vi.advanceTimersByTime(1000) + expect(mockFn).toBeCalledTimes(2) // Trailing call + expect(mockFn).toHaveBeenLastCalledWith('test2') + }) + + it('should default to trailing-only execution', () => { + const mockFn = vi.fn() + const debouncer = new LiteDebouncer(mockFn, { wait: 1000 }) + + debouncer.maybeExecute('test1') + debouncer.maybeExecute('test2') + expect(mockFn).not.toBeCalled() + + vi.advanceTimersByTime(1000) + expect(mockFn).toBeCalledTimes(1) + expect(mockFn).toBeCalledWith('test2') + }) + + it('should handle case where both leading and trailing are false', () => { + const mockFn = vi.fn() + const debouncer = new LiteDebouncer(mockFn, { + wait: 1000, + leading: false, + trailing: false, + }) + + debouncer.maybeExecute('test') + expect(mockFn).not.toBeCalled() + + vi.advanceTimersByTime(1000) + expect(mockFn).not.toBeCalled() + + // Should still reset canLeadingExecute flag + debouncer.maybeExecute('test2') + expect(mockFn).not.toBeCalled() + }) + }) + + describe('Execution Control', () => { + it('should cancel pending execution', () => { + const mockFn = vi.fn() + const debouncer = new LiteDebouncer(mockFn, { wait: 1000 }) + + debouncer.maybeExecute() + debouncer.cancel() + + vi.advanceTimersByTime(1000) + expect(mockFn).not.toBeCalled() + }) + + it('should properly handle canLeadingExecute flag after cancellation', () => { + const mockFn = vi.fn() + const debouncer = new LiteDebouncer(mockFn, { + wait: 1000, + leading: true, + trailing: false, + }) + + // First call - executes immediately + debouncer.maybeExecute('first') + expect(mockFn).toBeCalledTimes(1) + + // Cancel before wait expires + vi.advanceTimersByTime(500) + debouncer.cancel() + + // Should be able to execute immediately again after cancellation + debouncer.maybeExecute('second') + expect(mockFn).toBeCalledTimes(2) + expect(mockFn).toHaveBeenLastCalledWith('second') + }) + + it('should handle rapid calls with leading edge execution', () => { + const mockFn = vi.fn() + const debouncer = new LiteDebouncer(mockFn, { + wait: 1000, + leading: true, + trailing: false, + }) + + // Make rapid calls + debouncer.maybeExecute('first') + debouncer.maybeExecute('second') + debouncer.maybeExecute('third') + debouncer.maybeExecute('fourth') + + // Only first call should execute immediately + expect(mockFn).toBeCalledTimes(1) + expect(mockFn).toBeCalledWith('first') + + // Wait for timeout + vi.advanceTimersByTime(1000) + + // Next call should execute immediately + debouncer.maybeExecute('fifth') + expect(mockFn).toBeCalledTimes(2) + expect(mockFn).toHaveBeenLastCalledWith('fifth') + }) + }) + + describe('Flush Method', () => { + it('should execute pending function immediately', () => { + const mockFn = vi.fn() + const debouncer = new LiteDebouncer(mockFn, { wait: 1000 }) + + debouncer.maybeExecute('test') + expect(mockFn).not.toBeCalled() + + debouncer.flush() + expect(mockFn).toBeCalledTimes(1) + expect(mockFn).toBeCalledWith('test') + }) + + it('should clear pending timeout when flushing', () => { + const mockFn = vi.fn() + const debouncer = new LiteDebouncer(mockFn, { wait: 1000 }) + + debouncer.maybeExecute('test') + debouncer.flush() + + // Advance time to ensure timeout would have fired + vi.advanceTimersByTime(1000) + + expect(mockFn).toBeCalledTimes(1) + }) + + it('should do nothing when no pending execution', () => { + const mockFn = vi.fn() + const debouncer = new LiteDebouncer(mockFn, { wait: 1000 }) + + debouncer.flush() + expect(mockFn).not.toBeCalled() + }) + + it('should work with leading and trailing execution', () => { + const mockFn = vi.fn() + const debouncer = new LiteDebouncer(mockFn, { + wait: 1000, + leading: true, + trailing: true, + }) + + debouncer.maybeExecute('first') + expect(mockFn).toBeCalledTimes(1) + + debouncer.maybeExecute('second') + debouncer.flush() + + expect(mockFn).toBeCalledTimes(2) + expect(mockFn).toHaveBeenLastCalledWith('second') + }) + + it('should flush pending execution even with trailing: false', () => { + const mockFn = vi.fn() + const debouncer = new LiteDebouncer(mockFn, { + wait: 1000, + leading: true, + trailing: false, + }) + + debouncer.maybeExecute('first') + expect(mockFn).toBeCalledTimes(1) + + debouncer.maybeExecute('second') + debouncer.flush() + + // Since we have lastArgs, flush will execute even with trailing: false + expect(mockFn).toBeCalledTimes(2) + expect(mockFn).toHaveBeenLastCalledWith('second') + }) + }) + + describe('Callbacks', () => { + it('should call onExecute after leading execution', () => { + const mockFn = vi.fn() + const onExecute = vi.fn() + const debouncer = new LiteDebouncer(mockFn, { + wait: 100, + leading: true, + onExecute, + }) + + debouncer.maybeExecute('test') + + expect(mockFn).toHaveBeenCalledWith('test') + expect(onExecute).toHaveBeenCalledWith(['test'], debouncer) + expect(onExecute).toHaveBeenCalledTimes(1) + }) + + it('should call onExecute after trailing execution', () => { + const mockFn = vi.fn() + const onExecute = vi.fn() + const debouncer = new LiteDebouncer(mockFn, { + wait: 100, + trailing: true, + onExecute, + }) + + debouncer.maybeExecute('test') + expect(onExecute).not.toHaveBeenCalled() + + vi.advanceTimersByTime(100) + + expect(mockFn).toHaveBeenCalledWith('test') + expect(onExecute).toHaveBeenCalledWith(['test'], debouncer) + expect(onExecute).toHaveBeenCalledTimes(1) + }) + + it('should call onExecute with latest args after trailing execution', () => { + const mockFn = vi.fn() + const onExecute = vi.fn() + const debouncer = new LiteDebouncer(mockFn, { + wait: 100, + trailing: true, + onExecute, + }) + + debouncer.maybeExecute('first') + debouncer.maybeExecute('second') + + vi.advanceTimersByTime(100) + + expect(onExecute).toHaveBeenCalledWith(['second'], debouncer) + expect(onExecute).toHaveBeenCalledTimes(1) + }) + + it('should call onExecute after flush', () => { + const mockFn = vi.fn() + const onExecute = vi.fn() + const debouncer = new LiteDebouncer(mockFn, { + wait: 100, + onExecute, + }) + + debouncer.maybeExecute('test') + debouncer.flush() + + expect(mockFn).toHaveBeenCalledWith('test') + expect(onExecute).toHaveBeenCalledWith(['test'], debouncer) + }) + + it('should not call onExecute when cancelled', () => { + const mockFn = vi.fn() + const onExecute = vi.fn() + const debouncer = new LiteDebouncer(mockFn, { + wait: 100, + onExecute, + }) + + debouncer.maybeExecute('test') + debouncer.cancel() + + vi.advanceTimersByTime(100) + + expect(mockFn).not.toHaveBeenCalled() + expect(onExecute).not.toHaveBeenCalled() + }) + + it('should work with both leading and trailing enabled', () => { + const mockFn = vi.fn() + const onExecute = vi.fn() + const debouncer = new LiteDebouncer(mockFn, { + wait: 100, + leading: true, + trailing: true, + onExecute, + }) + + debouncer.maybeExecute('first') + expect(onExecute).toHaveBeenCalledWith(['first'], debouncer) + + debouncer.maybeExecute('second') + vi.advanceTimersByTime(100) + + expect(onExecute).toHaveBeenCalledWith(['second'], debouncer) + expect(onExecute).toHaveBeenCalledTimes(2) + }) + }) + + describe('Edge Cases and Error Handling', () => { + it('should handle wait time of 0', () => { + const mockFn = vi.fn() + const debouncer = new LiteDebouncer(mockFn, { wait: 0 }) + + debouncer.maybeExecute() + expect(mockFn).not.toBeCalled() + + vi.advanceTimersByTime(0) + expect(mockFn).toBeCalledTimes(1) + }) + + it('should handle negative wait time', () => { + const mockFn = vi.fn() + const debouncer = new LiteDebouncer(mockFn, { wait: -1000 }) + + debouncer.maybeExecute() + expect(mockFn).not.toBeCalled() + + vi.advanceTimersByTime(0) + expect(mockFn).toBeCalledTimes(1) + }) + + it('should handle very large wait times', () => { + const mockFn = vi.fn() + const debouncer = new LiteDebouncer(mockFn, { + wait: Number.MAX_SAFE_INTEGER, + }) + + debouncer.maybeExecute() + expect(mockFn).not.toBeCalled() + + vi.advanceTimersByTime(Number.MAX_SAFE_INTEGER) + expect(mockFn).toBeCalledTimes(1) + }) + + it('should handle NaN wait time', () => { + const mockFn = vi.fn() + const debouncer = new LiteDebouncer(mockFn, { wait: NaN }) + + debouncer.maybeExecute() + expect(mockFn).not.toBeCalled() + + vi.advanceTimersByTime(0) + expect(mockFn).toBeCalledTimes(1) + }) + + it('should handle undefined/null arguments', () => { + const mockFn = vi.fn() + const debouncer = new LiteDebouncer(mockFn, { wait: 1000 }) + + debouncer.maybeExecute(undefined, null) + vi.advanceTimersByTime(1000) + expect(mockFn).toBeCalledWith(undefined, null) + }) + + it('should prevent memory leaks by clearing timeouts', () => { + const mockFn = vi.fn() + const debouncer = new LiteDebouncer(mockFn, { wait: 1000 }) + + // Create multiple pending executions + debouncer.maybeExecute() + debouncer.maybeExecute() + debouncer.maybeExecute() + + // Cancel all pending executions + debouncer.cancel() + + // Advance time to ensure no executions occur + vi.advanceTimersByTime(1000) + expect(mockFn).not.toBeCalled() + }) + }) +}) + +describe('liteDebounce helper function', () => { + beforeEach(() => { + vi.useFakeTimers() + }) + + afterEach(() => { + vi.restoreAllMocks() + }) + + describe('Basic Functionality', () => { + it('should create a debounced function with default options', () => { + const mockFn = vi.fn() + const debouncedFn = liteDebounce(mockFn, { wait: 1000 }) + + debouncedFn('test') + expect(mockFn).not.toBeCalled() + + vi.advanceTimersByTime(1000) + expect(mockFn).toBeCalledTimes(1) + expect(mockFn).toBeCalledWith('test') + }) + + it('should pass arguments correctly', () => { + const mockFn = vi.fn() + const debouncedFn = liteDebounce(mockFn, { wait: 1000 }) + + debouncedFn(42, 'test', { foo: 'bar' }) + vi.advanceTimersByTime(1000) + + expect(mockFn).toBeCalledWith(42, 'test', { foo: 'bar' }) + }) + }) + + describe('Execution Options', () => { + it('should respect leading option', () => { + const mockFn = vi.fn() + const debouncedFn = liteDebounce(mockFn, { + wait: 1000, + leading: true, + trailing: false, + }) + + debouncedFn('first') + expect(mockFn).toBeCalledTimes(1) + expect(mockFn).toBeCalledWith('first') + + debouncedFn('second') + expect(mockFn).toBeCalledTimes(1) + + vi.advanceTimersByTime(1000) + expect(mockFn).toBeCalledTimes(1) + + debouncedFn('third') + expect(mockFn).toBeCalledTimes(2) + expect(mockFn).toHaveBeenLastCalledWith('third') + }) + + it('should handle multiple calls with trailing edge', () => { + const mockFn = vi.fn() + const debouncedFn = liteDebounce(mockFn, { wait: 1000 }) + + debouncedFn('a') + debouncedFn('b') + debouncedFn('c') + expect(mockFn).not.toBeCalled() + + vi.advanceTimersByTime(500) + debouncedFn('d') + expect(mockFn).not.toBeCalled() + + vi.advanceTimersByTime(1000) + expect(mockFn).toBeCalledTimes(1) + expect(mockFn).toBeCalledWith('d') + }) + + it('should support both leading and trailing execution', () => { + const mockFn = vi.fn() + const debouncedFn = liteDebounce(mockFn, { + wait: 1000, + leading: true, + trailing: true, + }) + + debouncedFn('first') + expect(mockFn).toBeCalledTimes(1) + expect(mockFn).toBeCalledWith('first') + + debouncedFn('second') + expect(mockFn).toBeCalledTimes(1) + + vi.advanceTimersByTime(1000) + expect(mockFn).toBeCalledTimes(2) + expect(mockFn).toHaveBeenLastCalledWith('second') + }) + + it('should work with onExecute callback', () => { + const mockFn = vi.fn() + const onExecute = vi.fn() + const debouncedFn = liteDebounce(mockFn, { + wait: 100, + onExecute, + }) + + debouncedFn('test') + vi.advanceTimersByTime(100) + + expect(mockFn).toHaveBeenCalledWith('test') + expect(onExecute).toHaveBeenCalledTimes(1) + expect(onExecute).toHaveBeenCalledWith(['test'], expect.any(Object)) + }) + }) +}) diff --git a/packages/pacer-lite/tests/lite-queuer.test.ts b/packages/pacer-lite/tests/lite-queuer.test.ts new file mode 100644 index 00000000..806fc907 --- /dev/null +++ b/packages/pacer-lite/tests/lite-queuer.test.ts @@ -0,0 +1,761 @@ +import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' +import { LiteQueuer, liteQueue } from '../src/lite-queuer' + +describe('LiteQueuer', () => { + beforeEach(() => { + vi.useFakeTimers() + }) + + afterEach(() => { + vi.restoreAllMocks() + }) + + describe('Basic Functionality', () => { + it('should create an empty queuer', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { started: false }) + + expect(queuer.size).toBe(0) + expect(queuer.isEmpty).toBe(true) + expect(queuer.isQueueRunning).toBe(false) + }) + + it('should start with started: true by default', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, {}) + + expect(queuer.isQueueRunning).toBe(true) + }) + + it('should respect maxSize option', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { maxSize: 2, started: false }) + + expect(queuer.addItem(1)).toBe(true) + expect(queuer.addItem(2)).toBe(true) + expect(queuer.addItem(3)).toBe(false) + expect(queuer.size).toBe(2) + }) + + it('should set default options correctly', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, {}) + + expect(queuer.options.addItemsTo).toBe('back') + expect(queuer.options.getItemsFrom).toBe('front') + expect(queuer.options.maxSize).toBe(Infinity) + expect(queuer.options.started).toBe(true) + expect(queuer.options.wait).toBe(0) + }) + }) + + describe('addItem', () => { + it('should add items to the queue', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { started: false }) + + expect(queuer.addItem(1)).toBe(true) + expect(queuer.size).toBe(1) + expect(queuer.peekNextItem()).toBe(1) + expect(queuer.isEmpty).toBe(false) + }) + + it('should add items to back by default', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { started: false }) + + queuer.addItem(1) + queuer.addItem(2) + queuer.addItem(3) + + expect(queuer.peekAllItems()).toEqual([1, 2, 3]) + }) + + it('should add items to front when specified', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { started: false }) + + queuer.addItem(1, 'front') + queuer.addItem(2, 'front') + queuer.addItem(3, 'front') + + expect(queuer.peekAllItems()).toEqual([3, 2, 1]) + }) + + it('should reject items when queue is full', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { maxSize: 1, started: false }) + + expect(queuer.addItem(1)).toBe(true) + expect(queuer.addItem(2)).toBe(false) + expect(queuer.size).toBe(1) + }) + }) + + describe('getNextItem', () => { + it('should remove and return items in FIFO order', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { started: false }) + + queuer.addItem(1) + queuer.addItem(2) + queuer.addItem(3) + + expect(queuer.getNextItem()).toBe(1) + expect(queuer.getNextItem()).toBe(2) + expect(queuer.getNextItem()).toBe(3) + expect(queuer.getNextItem()).toBeUndefined() + }) + + it('should return undefined when queue is empty', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { started: false }) + + expect(queuer.getNextItem()).toBeUndefined() + }) + + it('should support LIFO when getting from back', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { started: false }) + + queuer.addItem(1) + queuer.addItem(2) + queuer.addItem(3) + + expect(queuer.getNextItem('back')).toBe(3) + expect(queuer.getNextItem('back')).toBe(2) + expect(queuer.getNextItem('back')).toBe(1) + }) + }) + + describe('execute', () => { + it('should execute function with next item', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { started: false }) + + queuer.addItem('test') + const result = queuer.execute() + + expect(result).toBe('test') + expect(mockFn).toHaveBeenCalledWith('test') + expect(queuer.size).toBe(0) + }) + + it('should return undefined when queue is empty', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { started: false }) + + const result = queuer.execute() + + expect(result).toBeUndefined() + expect(mockFn).not.toHaveBeenCalled() + }) + }) + + describe('Priority System', () => { + it('should maintain priority order when adding items', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { + getPriority: (item: any) => item.priority, + started: false, + }) + + queuer.addItem({ value: 'medium', priority: 2 }) + queuer.addItem({ value: 'high', priority: 3 }) + queuer.addItem({ value: 'low', priority: 1 }) + + expect(queuer.peekAllItems()).toEqual([ + { value: 'high', priority: 3 }, + { value: 'medium', priority: 2 }, + { value: 'low', priority: 1 }, + ]) + }) + + it('should insert items in correct position based on priority', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { + getPriority: (item: any) => item.priority, + started: false, + }) + + queuer.addItem({ value: 'lowest', priority: 0 }) + queuer.addItem({ value: 'highest', priority: 4 }) + queuer.addItem({ value: 'medium', priority: 2 }) + + expect(queuer.peekAllItems()).toEqual([ + { value: 'highest', priority: 4 }, + { value: 'medium', priority: 2 }, + { value: 'lowest', priority: 0 }, + ]) + }) + + it('should handle items with equal priorities', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { + getPriority: (item: any) => item.priority, + started: false, + }) + + queuer.addItem({ value: 'first', priority: 1 }) + queuer.addItem({ value: 'second', priority: 1 }) + queuer.addItem({ value: 'third', priority: 1 }) + + // Equal priority items should maintain their insertion order + expect(queuer.peekAllItems()).toEqual([ + { value: 'first', priority: 1 }, + { value: 'second', priority: 1 }, + { value: 'third', priority: 1 }, + ]) + }) + + it('should handle items without priority when getPriority returns undefined', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { + getPriority: (item: any) => item.priority, + started: false, + }) + + queuer.addItem({ value: 'no-priority' }) // priority undefined, added to back + queuer.addItem({ value: 'with-priority', priority: 5 }) + + // With-priority item should be inserted at correct priority position + expect(queuer.peekAllItems()).toEqual([ + { value: 'with-priority', priority: 5 }, + { value: 'no-priority' }, + ]) + }) + + it('should always get from front when priority function is provided', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { + getPriority: (item: any) => item.priority, + started: false, + }) + + queuer.addItem({ value: 'low', priority: 1 }) + queuer.addItem({ value: 'high', priority: 3 }) + + // Even when requesting from back, should get highest priority (front) + expect(queuer.getNextItem('back')).toEqual({ value: 'high', priority: 3 }) + expect(queuer.getNextItem('back')).toEqual({ value: 'low', priority: 1 }) + }) + }) + + describe('Initial Items', () => { + it('should initialize queue with provided items', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { + initialItems: [1, 2, 3], + started: false, + }) + + expect(queuer.size).toBe(3) + expect(queuer.peekAllItems()).toEqual([1, 2, 3]) + }) + + it('should sort initial items by priority if getPriority is provided', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { + initialItems: [ + { value: 'low', priority: 1 }, + { value: 'high', priority: 3 }, + { value: 'medium', priority: 2 }, + ], + getPriority: (item: any) => item.priority, + started: false, + }) + + expect(queuer.peekAllItems()).toEqual([ + { value: 'high', priority: 3 }, + { value: 'medium', priority: 2 }, + { value: 'low', priority: 1 }, + ]) + }) + + it('should handle empty initialItems array', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { + initialItems: [], + started: false, + }) + + expect(queuer.isEmpty).toBe(true) + }) + }) + + describe('Auto Processing', () => { + it('should auto-process items when started', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { started: true }) + + queuer.addItem('test1') + queuer.addItem('test2') + + vi.runAllTimers() + + expect(mockFn).toHaveBeenCalledWith('test1') + expect(mockFn).toHaveBeenCalledWith('test2') + expect(queuer.isEmpty).toBe(true) + }) + + it('should respect wait time between executions', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { + started: true, + wait: 100, + }) + + queuer.addItem('test1') + queuer.addItem('test2') + + expect(mockFn).toHaveBeenCalledWith('test1') + expect(mockFn).toHaveBeenCalledTimes(1) + + vi.advanceTimersByTime(100) + expect(mockFn).toHaveBeenCalledWith('test2') + expect(mockFn).toHaveBeenCalledTimes(2) + }) + + it('should not auto-process when started: false', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { started: false }) + + queuer.addItem('test') + + vi.runAllTimers() + + expect(mockFn).not.toHaveBeenCalled() + expect(queuer.size).toBe(1) + }) + }) + + describe('Start/Stop Control', () => { + it('should start processing when start() is called', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { started: false }) + + queuer.addItem('test') + expect(mockFn).not.toHaveBeenCalled() + + queuer.start() + vi.runAllTimers() + + expect(mockFn).toHaveBeenCalledWith('test') + expect(queuer.isQueueRunning).toBe(true) + }) + + it('should stop processing when stop() is called', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { + started: true, + wait: 100, + }) + + queuer.addItem('test1') + queuer.addItem('test2') + + expect(mockFn).toHaveBeenCalledWith('test1') + + queuer.stop() + vi.advanceTimersByTime(100) + + expect(mockFn).toHaveBeenCalledTimes(1) + expect(queuer.isQueueRunning).toBe(false) + expect(queuer.size).toBe(1) // test2 still in queue + }) + + it('should clear pending timeout when stopped', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { + started: true, + wait: 1000, + }) + + queuer.addItem('test1') + queuer.addItem('test2') + + expect(mockFn).toHaveBeenCalledWith('test1') + + queuer.stop() + + vi.advanceTimersByTime(1000) + expect(mockFn).toHaveBeenCalledTimes(1) + }) + }) + + describe('Utility Methods', () => { + describe('peekNextItem', () => { + it('should return next item without removing it', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { started: false }) + + queuer.addItem(1) + queuer.addItem(2) + + expect(queuer.peekNextItem()).toBe(1) + expect(queuer.size).toBe(2) + expect(queuer.peekNextItem()).toBe(1) // Still there + }) + + it('should return undefined when queue is empty', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { started: false }) + + expect(queuer.peekNextItem()).toBeUndefined() + }) + + it('should peek from back when specified', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { started: false }) + + queuer.addItem(1) + queuer.addItem(2) + queuer.addItem(3) + + expect(queuer.peekNextItem('back')).toBe(3) + expect(queuer.size).toBe(3) + }) + + it('should always peek from front when priority function exists', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { + getPriority: (item: any) => item.priority, + started: false, + }) + + queuer.addItem({ value: 'low', priority: 1 }) + queuer.addItem({ value: 'high', priority: 3 }) + + expect(queuer.peekNextItem('back')).toEqual({ + value: 'high', + priority: 3, + }) + }) + }) + + describe('peekAllItems', () => { + it('should return copy of all items', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { started: false }) + + queuer.addItem(1) + queuer.addItem(2) + queuer.addItem(3) + + const items = queuer.peekAllItems() + expect(items).toEqual([1, 2, 3]) + + // Should be a copy, not reference + items.push(4) + expect(queuer.peekAllItems()).toEqual([1, 2, 3]) + }) + + it('should return empty array when queue is empty', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { started: false }) + + expect(queuer.peekAllItems()).toEqual([]) + }) + }) + + describe('clear', () => { + it('should remove all items from queue', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { started: false }) + + queuer.addItem(1) + queuer.addItem(2) + queuer.clear() + + expect(queuer.isEmpty).toBe(true) + expect(queuer.size).toBe(0) + expect(queuer.peekNextItem()).toBeUndefined() + }) + }) + + describe('flush', () => { + it('should process all items immediately', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { + started: false, + wait: 1000, + }) + + queuer.addItem('test1') + queuer.addItem('test2') + queuer.addItem('test3') + + queuer.flush() + + expect(mockFn).toHaveBeenCalledWith('test1') + expect(mockFn).toHaveBeenCalledWith('test2') + expect(mockFn).toHaveBeenCalledWith('test3') + expect(queuer.isEmpty).toBe(true) + }) + + it('should process specified number of items', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { started: false }) + + queuer.addItem('test1') + queuer.addItem('test2') + queuer.addItem('test3') + + queuer.flush(2) + + expect(mockFn).toHaveBeenCalledTimes(2) + expect(queuer.size).toBe(1) + expect(queuer.peekNextItem()).toBe('test3') + }) + + it('should restart processing if queue is running and has remaining items', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { + started: false, + wait: 100, + }) + + queuer.addItem('test1') + queuer.addItem('test2') + queuer.addItem('test3') + + // Manually flush 2 items + queuer.flush(2) + + expect(mockFn).toHaveBeenCalledWith('test1') + expect(mockFn).toHaveBeenCalledWith('test2') + expect(mockFn).toHaveBeenCalledTimes(2) + expect(queuer.size).toBe(1) + + // Now start processing - should process remaining item + queuer.start() + + vi.runAllTimers() + + expect(mockFn).toHaveBeenCalledWith('test3') + expect(mockFn).toHaveBeenCalledTimes(3) + }) + }) + + describe('flushAsBatch', () => { + it('should process all items as a batch', () => { + const mockFn = vi.fn() + const batchFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { started: false }) + + queuer.addItem('test1') + queuer.addItem('test2') + queuer.addItem('test3') + + queuer.flushAsBatch(batchFn) + + expect(batchFn).toHaveBeenCalledWith(['test1', 'test2', 'test3']) + expect(mockFn).not.toHaveBeenCalled() // Individual function not called + expect(queuer.isEmpty).toBe(true) + }) + + it('should clear queue after batch processing', () => { + const mockFn = vi.fn() + const batchFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { started: false }) + + queuer.addItem('test') + queuer.flushAsBatch(batchFn) + + expect(queuer.size).toBe(0) + expect(queuer.isEmpty).toBe(true) + }) + + it('should handle empty queue', () => { + const mockFn = vi.fn() + const batchFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { started: false }) + + queuer.flushAsBatch(batchFn) + + expect(batchFn).toHaveBeenCalledWith([]) + }) + }) + }) + + describe('Edge Cases', () => { + it('should handle rapid item additions', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { started: false }) + + for (let i = 0; i < 100; i++) { + queuer.addItem(i) + } + + expect(queuer.size).toBe(100) + expect(queuer.peekNextItem()).toBe(0) + }) + + it('should handle zero wait time', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { + started: true, + wait: 0, + }) + + queuer.addItem('test1') + queuer.addItem('test2') + + vi.runAllTimers() + + expect(mockFn).toHaveBeenCalledTimes(2) + }) + + it('should handle adding items while processing', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { + started: true, + wait: 100, + }) + + queuer.addItem('test1') + expect(mockFn).toHaveBeenCalledWith('test1') + + queuer.addItem('test2') + queuer.addItem('test3') + + vi.advanceTimersByTime(100) + expect(mockFn).toHaveBeenCalledWith('test2') + + vi.advanceTimersByTime(100) + expect(mockFn).toHaveBeenCalledWith('test3') + }) + }) + + describe('Callbacks', () => { + it('should not call callbacks when not provided', () => { + const mockFn = vi.fn() + const queuer = new LiteQueuer(mockFn, { started: false }) + + // Should not throw when callbacks are undefined + expect(() => { + queuer.addItem('test') + queuer.execute() + queuer.clear() + }).not.toThrow() + + expect(mockFn).toHaveBeenCalledTimes(1) + }) + + // TODO: Add comprehensive callback tests once onExecute, onReject, and onItemsChange callbacks are implemented + // These tests will verify: + // - onExecute is called when items are processed + // - onReject is called when queue is full + // - onItemsChange is called when items are added/removed + // - Callbacks work with priority queues, auto-processing, and manual operations + // - Error handling in callbacks + }) +}) + +describe('liteQueue helper function', () => { + beforeEach(() => { + vi.useFakeTimers() + }) + + afterEach(() => { + vi.restoreAllMocks() + }) + + describe('Basic Functionality', () => { + it('should create a queue function', () => { + const mockFn = vi.fn() + const queueFn = liteQueue(mockFn, { started: false }) + + expect(typeof queueFn).toBe('function') + + queueFn('test') + expect(mockFn).not.toHaveBeenCalled() // Not started + }) + + it('should add items when called', () => { + const mockFn = vi.fn() + const queueFn = liteQueue(mockFn, { started: true }) + + const result1 = queueFn('test1') + const result2 = queueFn('test2') + + expect(result1).toBe(true) + expect(result2).toBe(true) + + vi.runAllTimers() + + expect(mockFn).toHaveBeenCalledWith('test1') + expect(mockFn).toHaveBeenCalledWith('test2') + }) + + it('should return false when queue is full', () => { + const mockFn = vi.fn() + const queueFn = liteQueue(mockFn, { + maxSize: 1, + started: false, + }) + + expect(queueFn('test1')).toBe(true) + expect(queueFn('test2')).toBe(false) + }) + + it('should work with wait times', () => { + const mockFn = vi.fn() + const queueFn = liteQueue(mockFn, { + started: true, + wait: 100, + }) + + queueFn('test1') + queueFn('test2') + + expect(mockFn).toHaveBeenCalledWith('test1') + expect(mockFn).toHaveBeenCalledTimes(1) + + vi.advanceTimersByTime(100) + expect(mockFn).toHaveBeenCalledWith('test2') + expect(mockFn).toHaveBeenCalledTimes(2) + }) + + it('should work with priority', () => { + const mockFn = vi.fn() + const queueFn = liteQueue(mockFn, { + getPriority: (item: any) => item.priority, + started: true, + wait: 0, // No wait to simplify test + }) + + queueFn({ value: 'low', priority: 1 }) + queueFn({ value: 'high', priority: 3 }) + + vi.runAllTimers() + + // First item added should process first (low priority) + // Second item (high priority) should process after due to immediate processing + expect(mockFn).toHaveBeenCalledTimes(2) + expect(mockFn).toHaveBeenNthCalledWith(1, { value: 'low', priority: 1 }) + expect(mockFn).toHaveBeenNthCalledWith(2, { value: 'high', priority: 3 }) + }) + }) + + describe('Callbacks', () => { + // TODO: Add comprehensive callback tests once onExecute, onReject, and onItemsChange callbacks are implemented + // These tests will verify: + // - Helper functions work correctly with all callback types + // - Callbacks are called at the right times during queue operations + // - Integration between helper functions and callback functionality + + it('should not throw when callbacks are not provided', () => { + const mockFn = vi.fn() + const queueFn = liteQueue(mockFn, { started: true, wait: 0 }) + + expect(() => { + queueFn('test1') + queueFn('test2') + vi.runAllTimers() + }).not.toThrow() + + expect(mockFn).toHaveBeenCalledTimes(2) + }) + }) +}) diff --git a/packages/pacer-lite/tests/lite-rate-limiter.test.ts b/packages/pacer-lite/tests/lite-rate-limiter.test.ts new file mode 100644 index 00000000..aa9d75b9 --- /dev/null +++ b/packages/pacer-lite/tests/lite-rate-limiter.test.ts @@ -0,0 +1,827 @@ +import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' +import { LiteRateLimiter, liteRateLimit } from '../src/lite-rate-limiter' + +describe('LiteRateLimiter', () => { + beforeEach(() => { + vi.useFakeTimers() + }) + + afterEach(() => { + vi.useRealTimers() + }) + + describe('Basic Rate Limiting', () => { + it('should execute function when within limits', () => { + const mockFn = vi.fn() + const rateLimiter = new LiteRateLimiter(mockFn, { + limit: 3, + window: 1000, + }) + + expect(rateLimiter.maybeExecute()).toBe(true) + expect(mockFn).toHaveBeenCalledTimes(1) + }) + + it('should reject execution when limit is reached', () => { + const mockFn = vi.fn() + const rateLimiter = new LiteRateLimiter(mockFn, { + limit: 2, + window: 1000, + }) + + expect(rateLimiter.maybeExecute()).toBe(true) + expect(rateLimiter.maybeExecute()).toBe(true) + expect(rateLimiter.maybeExecute()).toBe(false) + + expect(mockFn).toHaveBeenCalledTimes(2) + }) + + it('should pass arguments to the function', () => { + const mockFn = vi.fn() + const rateLimiter = new LiteRateLimiter(mockFn, { + limit: 3, + window: 1000, + }) + + rateLimiter.maybeExecute('arg1', 42, { test: 'value' }) + expect(mockFn).toHaveBeenCalledWith('arg1', 42, { test: 'value' }) + }) + + it('should allow execution again after window expires', () => { + const mockFn = vi.fn() + const rateLimiter = new LiteRateLimiter(mockFn, { + limit: 2, + window: 1000, + }) + + expect(rateLimiter.maybeExecute()).toBe(true) + expect(rateLimiter.maybeExecute()).toBe(true) + expect(rateLimiter.maybeExecute()).toBe(false) + + // Advance time past the window + vi.advanceTimersByTime(1001) + + expect(rateLimiter.maybeExecute()).toBe(true) + expect(mockFn).toHaveBeenCalledTimes(3) + }) + + it('should track remaining executions correctly', () => { + const mockFn = vi.fn() + const rateLimiter = new LiteRateLimiter(mockFn, { + limit: 3, + window: 1000, + }) + + expect(rateLimiter.getRemainingInWindow()).toBe(3) + + rateLimiter.maybeExecute() + expect(rateLimiter.getRemainingInWindow()).toBe(2) + + rateLimiter.maybeExecute() + expect(rateLimiter.getRemainingInWindow()).toBe(1) + + rateLimiter.maybeExecute() + expect(rateLimiter.getRemainingInWindow()).toBe(0) + }) + }) + + describe('Window Types', () => { + it('should default to fixed window when windowType not specified', () => { + const mockFn = vi.fn() + const rateLimiter = new LiteRateLimiter(mockFn, { + limit: 2, + window: 1000, + }) + + // Fill up the window + expect(rateLimiter.maybeExecute()).toBe(true) + expect(rateLimiter.maybeExecute()).toBe(true) + expect(rateLimiter.maybeExecute()).toBe(false) + + // Advance time by half the window - should still be blocked + vi.advanceTimersByTime(500) + expect(rateLimiter.maybeExecute()).toBe(false) + + // Advance time past the full window - should be allowed + vi.advanceTimersByTime(600) + expect(rateLimiter.maybeExecute()).toBe(true) + expect(mockFn).toHaveBeenCalledTimes(3) + }) + + it('should respect sliding window type', () => { + const mockFn = vi.fn() + const rateLimiter = new LiteRateLimiter(mockFn, { + limit: 3, + window: 1000, + windowType: 'sliding', + }) + + // Fill up the window + expect(rateLimiter.maybeExecute()).toBe(true) + expect(rateLimiter.maybeExecute()).toBe(true) + expect(rateLimiter.maybeExecute()).toBe(true) + expect(rateLimiter.maybeExecute()).toBe(false) + + // Advance time by 500ms - oldest execution should still be in window + vi.advanceTimersByTime(500) + expect(rateLimiter.maybeExecute()).toBe(false) + + // Advance time by 600ms more - oldest execution should be expired + vi.advanceTimersByTime(600) + expect(rateLimiter.maybeExecute()).toBe(true) + expect(mockFn).toHaveBeenCalledTimes(4) + }) + + it('should handle fixed window correctly with multiple resets', () => { + const mockFn = vi.fn() + const rateLimiter = new LiteRateLimiter(mockFn, { + limit: 2, + window: 1000, + windowType: 'fixed', + }) + + // First window + expect(rateLimiter.maybeExecute()).toBe(true) + expect(rateLimiter.maybeExecute()).toBe(true) + expect(rateLimiter.maybeExecute()).toBe(false) + + // Wait for window to expire + vi.advanceTimersByTime(1001) + + // Second window + expect(rateLimiter.maybeExecute()).toBe(true) + expect(rateLimiter.maybeExecute()).toBe(true) + expect(rateLimiter.maybeExecute()).toBe(false) + + expect(mockFn).toHaveBeenCalledTimes(4) + }) + }) + + describe('Utility Methods', () => { + describe('getRemainingInWindow', () => { + it('should return correct remaining count', () => { + const mockFn = vi.fn() + const rateLimiter = new LiteRateLimiter(mockFn, { + limit: 3, + window: 1000, + }) + + expect(rateLimiter.getRemainingInWindow()).toBe(3) + + rateLimiter.maybeExecute() + expect(rateLimiter.getRemainingInWindow()).toBe(2) + + rateLimiter.maybeExecute() + expect(rateLimiter.getRemainingInWindow()).toBe(1) + + rateLimiter.maybeExecute() + expect(rateLimiter.getRemainingInWindow()).toBe(0) + }) + + it('should never return negative values', () => { + const mockFn = vi.fn() + const rateLimiter = new LiteRateLimiter(mockFn, { + limit: 1, + window: 1000, + }) + + rateLimiter.maybeExecute() + expect(rateLimiter.getRemainingInWindow()).toBe(0) + + // Try to execute more times + rateLimiter.maybeExecute() + rateLimiter.maybeExecute() + expect(rateLimiter.getRemainingInWindow()).toBe(0) + }) + }) + + describe('getMsUntilNextWindow', () => { + it('should correctly calculate time until next window', () => { + const mockFn = vi.fn() + const rateLimiter = new LiteRateLimiter(mockFn, { + limit: 1, + window: 1000, + }) + + rateLimiter.maybeExecute() + expect(rateLimiter.getMsUntilNextWindow()).toBe(1000) + + vi.advanceTimersByTime(500) + expect(rateLimiter.getMsUntilNextWindow()).toBe(500) + + vi.advanceTimersByTime(500) + expect(rateLimiter.getMsUntilNextWindow()).toBe(0) + }) + + it('should return 0 when executions are available', () => { + const mockFn = vi.fn() + const rateLimiter = new LiteRateLimiter(mockFn, { + limit: 2, + window: 1000, + }) + + expect(rateLimiter.getMsUntilNextWindow()).toBe(0) + rateLimiter.maybeExecute() + expect(rateLimiter.getMsUntilNextWindow()).toBe(0) + }) + + it('should handle sliding window correctly', () => { + const mockFn = vi.fn() + const rateLimiter = new LiteRateLimiter(mockFn, { + limit: 1, + window: 1000, + windowType: 'sliding', + }) + + rateLimiter.maybeExecute() + const timeUntilNext = rateLimiter.getMsUntilNextWindow() + expect(timeUntilNext).toBe(1000) + + vi.advanceTimersByTime(300) + expect(rateLimiter.getMsUntilNextWindow()).toBe(700) + }) + }) + + describe('reset', () => { + it('should reset execution state', () => { + const mockFn = vi.fn() + const rateLimiter = new LiteRateLimiter(mockFn, { + limit: 2, + window: 1000, + }) + + rateLimiter.maybeExecute() + rateLimiter.maybeExecute() + expect(rateLimiter.getRemainingInWindow()).toBe(0) + + rateLimiter.reset() + expect(rateLimiter.getRemainingInWindow()).toBe(2) + expect(rateLimiter.maybeExecute()).toBe(true) + }) + + it('should clear execution history', () => { + const mockFn = vi.fn() + const rateLimiter = new LiteRateLimiter(mockFn, { + limit: 1, + window: 1000, + }) + + rateLimiter.maybeExecute() + expect(rateLimiter.maybeExecute()).toBe(false) + + rateLimiter.reset() + expect(rateLimiter.maybeExecute()).toBe(true) + expect(mockFn).toHaveBeenCalledTimes(2) + }) + + it('should clear all timeouts', () => { + const mockFn = vi.fn() + const rateLimiter = new LiteRateLimiter(mockFn, { + limit: 3, + window: 1000, + windowType: 'sliding', + }) + + // Create some executions that would have timeouts + rateLimiter.maybeExecute() + rateLimiter.maybeExecute() + + rateLimiter.reset() + + // Should be able to execute full limit again + expect(rateLimiter.maybeExecute()).toBe(true) + expect(rateLimiter.maybeExecute()).toBe(true) + expect(rateLimiter.maybeExecute()).toBe(true) + expect(rateLimiter.getRemainingInWindow()).toBe(0) + }) + }) + }) + + describe('Callbacks', () => { + it('should call onExecute when function executes successfully', () => { + const mockFn = vi.fn() + const onExecute = vi.fn() + const rateLimiter = new LiteRateLimiter(mockFn, { + limit: 2, + window: 1000, + onExecute, + } as any) + + rateLimiter.maybeExecute('arg1', 42) + + expect(onExecute).toHaveBeenCalledTimes(1) + expect(onExecute).toHaveBeenCalledWith(['arg1', 42], rateLimiter) + expect(mockFn).toHaveBeenCalledWith('arg1', 42) + }) + + it('should call onExecute with correct arguments for each execution', () => { + const mockFn = vi.fn() + const onExecute = vi.fn() + const rateLimiter = new LiteRateLimiter(mockFn, { + limit: 3, + window: 1000, + onExecute, + } as any) + + rateLimiter.maybeExecute('first') + rateLimiter.maybeExecute('second', 123) + rateLimiter.maybeExecute() + + expect(onExecute).toHaveBeenCalledTimes(3) + expect(onExecute).toHaveBeenNthCalledWith(1, ['first'], rateLimiter) + expect(onExecute).toHaveBeenNthCalledWith(2, ['second', 123], rateLimiter) + expect(onExecute).toHaveBeenNthCalledWith(3, [], rateLimiter) + }) + + it('should call onReject when rate limit is exceeded', () => { + const mockFn = vi.fn() + const onReject = vi.fn() + const rateLimiter = new LiteRateLimiter(mockFn, { + limit: 1, + window: 1000, + onReject, + } as any) + + expect(rateLimiter.maybeExecute()).toBe(true) + expect(onReject).not.toHaveBeenCalled() + + expect(rateLimiter.maybeExecute()).toBe(false) + expect(onReject).toHaveBeenCalledTimes(1) + expect(onReject).toHaveBeenCalledWith(rateLimiter) + }) + + it('should call onReject for consecutive rejections', () => { + const mockFn = vi.fn() + const onReject = vi.fn() + const rateLimiter = new LiteRateLimiter(mockFn, { + limit: 1, + window: 1000, + onReject, + } as any) + + rateLimiter.maybeExecute() + rateLimiter.maybeExecute() + rateLimiter.maybeExecute() + rateLimiter.maybeExecute() + + expect(onReject).toHaveBeenCalledTimes(3) + expect(mockFn).toHaveBeenCalledTimes(1) + }) + + it('should not call callbacks when not provided', () => { + const mockFn = vi.fn() + const rateLimiter = new LiteRateLimiter(mockFn, { + limit: 1, + window: 1000, + }) + + // Should not throw when callbacks are undefined + expect(() => { + rateLimiter.maybeExecute() + rateLimiter.maybeExecute() // This should be rejected + }).not.toThrow() + + expect(mockFn).toHaveBeenCalledTimes(1) + }) + + it('should call callbacks with sliding window', () => { + const mockFn = vi.fn() + const onExecute = vi.fn() + const onReject = vi.fn() + const rateLimiter = new LiteRateLimiter(mockFn, { + limit: 2, + window: 1000, + windowType: 'sliding', + onExecute, + onReject, + } as any) + + rateLimiter.maybeExecute('first') + rateLimiter.maybeExecute('second') + rateLimiter.maybeExecute('third') // Should be rejected + + expect(onExecute).toHaveBeenCalledTimes(2) + expect(onReject).toHaveBeenCalledTimes(1) + + // After time passes, should execute again + vi.advanceTimersByTime(1001) + rateLimiter.maybeExecute('fourth') + + expect(onExecute).toHaveBeenCalledTimes(3) + expect(onExecute).toHaveBeenLastCalledWith(['fourth'], rateLimiter) + }) + + it('should call callbacks with fixed window', () => { + const mockFn = vi.fn() + const onExecute = vi.fn() + const onReject = vi.fn() + const rateLimiter = new LiteRateLimiter(mockFn, { + limit: 2, + window: 1000, + windowType: 'fixed', + onExecute, + onReject, + } as any) + + rateLimiter.maybeExecute('first') + rateLimiter.maybeExecute('second') + rateLimiter.maybeExecute('third') // Should be rejected + + expect(onExecute).toHaveBeenCalledTimes(2) + expect(onReject).toHaveBeenCalledTimes(1) + + // After window expires, should execute again + vi.advanceTimersByTime(1001) + rateLimiter.maybeExecute('fourth') + + expect(onExecute).toHaveBeenCalledTimes(3) + }) + + it('should handle errors in onExecute callback gracefully', () => { + const mockFn = vi.fn() + const onExecute = vi.fn(() => { + throw new Error('Callback error') + }) + const rateLimiter = new LiteRateLimiter(mockFn, { + limit: 2, + window: 1000, + onExecute, + } as any) + + // Callback errors should propagate (not handled gracefully in current implementation) + expect(() => rateLimiter.maybeExecute()).toThrow('Callback error') + expect(mockFn).toHaveBeenCalledTimes(1) + expect(onExecute).toHaveBeenCalledTimes(1) + }) + + it('should handle errors in onReject callback gracefully', () => { + const mockFn = vi.fn() + const onReject = vi.fn(() => { + throw new Error('Callback error') + }) + const rateLimiter = new LiteRateLimiter(mockFn, { + limit: 1, + window: 1000, + onReject, + } as any) + + rateLimiter.maybeExecute() + + // Callback errors should propagate (not handled gracefully in current implementation) + expect(() => rateLimiter.maybeExecute()).toThrow('Callback error') + expect(onReject).toHaveBeenCalledTimes(1) + }) + + it('should work with callbacks after reset', () => { + const mockFn = vi.fn() + const onExecute = vi.fn() + const onReject = vi.fn() + const rateLimiter = new LiteRateLimiter(mockFn, { + limit: 1, + window: 1000, + onExecute, + onReject, + } as any) + + rateLimiter.maybeExecute('before-reset') + rateLimiter.maybeExecute() // Should be rejected + + expect(onExecute).toHaveBeenCalledTimes(1) + expect(onReject).toHaveBeenCalledTimes(1) + + rateLimiter.reset() + + rateLimiter.maybeExecute('after-reset') + expect(onExecute).toHaveBeenCalledTimes(2) + expect(onExecute).toHaveBeenLastCalledWith(['after-reset'], rateLimiter) + }) + }) + + describe('Edge Cases', () => { + it('should handle zero limit correctly', () => { + const mockFn = vi.fn() + const rateLimiter = new LiteRateLimiter(mockFn, { + limit: 0, + window: 1000, + }) + + expect(rateLimiter.maybeExecute()).toBe(false) + expect(mockFn).not.toHaveBeenCalled() + expect(rateLimiter.getRemainingInWindow()).toBe(0) + }) + + it('should handle zero window correctly', () => { + const mockFn = vi.fn() + const rateLimiter = new LiteRateLimiter(mockFn, { limit: 2, window: 0 }) + + expect(rateLimiter.maybeExecute()).toBe(true) + expect(rateLimiter.maybeExecute()).toBe(true) + expect(rateLimiter.maybeExecute()).toBe(false) + + // With zero window, should immediately allow execution again + vi.advanceTimersByTime(1) + expect(rateLimiter.maybeExecute()).toBe(true) + }) + + it('should handle negative window correctly', () => { + const mockFn = vi.fn() + const rateLimiter = new LiteRateLimiter(mockFn, { + limit: 1, + window: -1000, + }) + + expect(rateLimiter.maybeExecute()).toBe(true) + // With negative window in sliding mode, current time - (-1000) results in a large positive number + // so the execution time is still within the window + expect(rateLimiter.maybeExecute()).toBe(true) + + expect(mockFn).toHaveBeenCalledTimes(2) + }) + + it('should handle very large window values', () => { + const mockFn = vi.fn() + const rateLimiter = new LiteRateLimiter(mockFn, { + limit: 2, + window: Number.MAX_SAFE_INTEGER, + }) + + expect(rateLimiter.maybeExecute()).toBe(true) + expect(rateLimiter.maybeExecute()).toBe(true) + expect(rateLimiter.maybeExecute()).toBe(false) + + expect(mockFn).toHaveBeenCalledTimes(2) + }) + + it('should handle rapid consecutive executions', () => { + const mockFn = vi.fn() + const rateLimiter = new LiteRateLimiter(mockFn, { + limit: 3, + window: 1000, + }) + + // Execute rapidly + for (let i = 0; i < 10; i++) { + rateLimiter.maybeExecute() + } + + expect(mockFn).toHaveBeenCalledTimes(3) + expect(rateLimiter.getRemainingInWindow()).toBe(0) + }) + + it('should handle NaN window correctly', () => { + const mockFn = vi.fn() + const rateLimiter = new LiteRateLimiter(mockFn, { limit: 1, window: NaN }) + + expect(rateLimiter.maybeExecute()).toBe(true) + // With NaN window, Date.now() - NaN = NaN, and time > NaN is always false + // So executions never expire and we hit the limit + expect(rateLimiter.maybeExecute()).toBe(true) + + expect(mockFn).toHaveBeenCalledTimes(2) + }) + + it('should handle undefined/null arguments', () => { + const mockFn = vi.fn() + const rateLimiter = new LiteRateLimiter(mockFn, { + limit: 1, + window: 1000, + }) + + rateLimiter.maybeExecute(undefined, null) + expect(mockFn).toHaveBeenCalledWith(undefined, null) + }) + + it('should maintain consistent rate with sliding window under load', () => { + const mockFn = vi.fn() + const rateLimiter = new LiteRateLimiter(mockFn, { + limit: 3, + window: 1000, + windowType: 'sliding', + }) + + // Execute 3 times + rateLimiter.maybeExecute() + rateLimiter.maybeExecute() + rateLimiter.maybeExecute() + + // Advance time by 400ms + vi.advanceTimersByTime(400) + expect(rateLimiter.maybeExecute()).toBe(false) + + // Advance time by 700ms - first execution should be expired + vi.advanceTimersByTime(700) + expect(rateLimiter.maybeExecute()).toBe(true) + + // Advance time by another 100ms - second execution should be expired + vi.advanceTimersByTime(100) + expect(rateLimiter.maybeExecute()).toBe(true) + + expect(mockFn).toHaveBeenCalledTimes(5) + }) + + it('should handle multiple resets correctly', () => { + const mockFn = vi.fn() + const rateLimiter = new LiteRateLimiter(mockFn, { + limit: 1, + window: 1000, + }) + + rateLimiter.maybeExecute() + rateLimiter.reset() + rateLimiter.reset() + rateLimiter.reset() + + expect(rateLimiter.getRemainingInWindow()).toBe(1) + expect(rateLimiter.maybeExecute()).toBe(true) + }) + }) +}) + +describe('liteRateLimit helper function', () => { + beforeEach(() => { + vi.useFakeTimers() + }) + + afterEach(() => { + vi.useRealTimers() + }) + + describe('Basic Functionality', () => { + it('should create a rate-limited function', () => { + const mockFn = vi.fn() + const rateLimitedFn = liteRateLimit(mockFn, { limit: 2, window: 1000 }) + + expect(rateLimitedFn()).toBe(true) + expect(rateLimitedFn()).toBe(true) + expect(rateLimitedFn()).toBe(false) + + expect(mockFn).toHaveBeenCalledTimes(2) + }) + + it('should pass arguments to the wrapped function', () => { + const mockFn = vi.fn() + const rateLimitedFn = liteRateLimit(mockFn, { limit: 1, window: 1000 }) + + rateLimitedFn(42, 'test') + + expect(mockFn).toHaveBeenCalledWith(42, 'test') + }) + + it('should handle multiple executions with proper timing', () => { + const mockFn = vi.fn() + const rateLimitedFn = liteRateLimit(mockFn, { limit: 2, window: 1000 }) + + // First burst + expect(rateLimitedFn('a')).toBe(true) + expect(rateLimitedFn('b')).toBe(true) + expect(rateLimitedFn('c')).toBe(false) + expect(mockFn).toHaveBeenCalledTimes(2) + + // Advance past window + vi.advanceTimersByTime(1001) + + // Should be able to execute again + expect(rateLimitedFn('d')).toBe(true) + expect(mockFn).toHaveBeenCalledTimes(3) + expect(mockFn).toHaveBeenLastCalledWith('d') + }) + + it('should work with sliding window', () => { + const mockFn = vi.fn() + const rateLimitedFn = liteRateLimit(mockFn, { + limit: 2, + window: 1000, + windowType: 'sliding', + }) + + expect(rateLimitedFn('a')).toBe(true) + expect(rateLimitedFn('b')).toBe(true) + expect(rateLimitedFn('c')).toBe(false) + + // Advance time by half window + vi.advanceTimersByTime(500) + expect(rateLimitedFn('d')).toBe(false) + + // Advance time to expire first execution + vi.advanceTimersByTime(600) + expect(rateLimitedFn('e')).toBe(true) + expect(mockFn).toHaveBeenCalledTimes(3) + }) + + it('should work with fixed window', () => { + const mockFn = vi.fn() + const rateLimitedFn = liteRateLimit(mockFn, { + limit: 2, + window: 1000, + windowType: 'fixed', + }) + + expect(rateLimitedFn('a')).toBe(true) + expect(rateLimitedFn('b')).toBe(true) + expect(rateLimitedFn('c')).toBe(false) + + // Advance time by half window - should still be blocked + vi.advanceTimersByTime(500) + expect(rateLimitedFn('d')).toBe(false) + + // Advance time past full window + vi.advanceTimersByTime(600) + expect(rateLimitedFn('e')).toBe(true) + expect(mockFn).toHaveBeenCalledTimes(3) + }) + }) + + describe('Callbacks', () => { + it('should work with onExecute callback', () => { + const mockFn = vi.fn() + const onExecute = vi.fn() + const rateLimitedFn = liteRateLimit(mockFn, { + limit: 2, + window: 1000, + onExecute, + } as any) + + rateLimitedFn('test1') + rateLimitedFn('test2') + + expect(onExecute).toHaveBeenCalledTimes(2) + expect(mockFn).toHaveBeenCalledTimes(2) + }) + + it('should work with onReject callback', () => { + const mockFn = vi.fn() + const onReject = vi.fn() + const rateLimitedFn = liteRateLimit(mockFn, { + limit: 1, + window: 1000, + onReject, + } as any) + + expect(rateLimitedFn('test1')).toBe(true) + expect(rateLimitedFn('test2')).toBe(false) + + expect(onReject).toHaveBeenCalledTimes(1) + expect(mockFn).toHaveBeenCalledTimes(1) + }) + + it('should work with both callbacks together', () => { + const mockFn = vi.fn() + const onExecute = vi.fn() + const onReject = vi.fn() + const rateLimitedFn = liteRateLimit(mockFn, { + limit: 1, + window: 1000, + onExecute, + onReject, + } as any) + + expect(rateLimitedFn('accepted')).toBe(true) + expect(rateLimitedFn('rejected')).toBe(false) + + expect(onExecute).toHaveBeenCalledTimes(1) + expect(onReject).toHaveBeenCalledTimes(1) + expect(mockFn).toHaveBeenCalledTimes(1) + }) + }) + + describe('Edge Cases', () => { + it('should handle zero limit', () => { + const mockFn = vi.fn() + const rateLimitedFn = liteRateLimit(mockFn, { limit: 0, window: 1000 }) + + expect(rateLimitedFn()).toBe(false) + expect(mockFn).not.toHaveBeenCalled() + }) + + it('should handle rapid successive calls', () => { + const mockFn = vi.fn() + const rateLimitedFn = liteRateLimit(mockFn, { limit: 3, window: 1000 }) + + for (let i = 0; i < 10; i++) { + rateLimitedFn(`call-${i}`) + } + + expect(mockFn).toHaveBeenCalledTimes(3) + expect(mockFn).toHaveBeenCalledWith('call-0') + expect(mockFn).toHaveBeenCalledWith('call-1') + expect(mockFn).toHaveBeenCalledWith('call-2') + }) + + it('should handle large window values', () => { + const mockFn = vi.fn() + const rateLimitedFn = liteRateLimit(mockFn, { + limit: 1, + window: 1000000, + }) + + expect(rateLimitedFn()).toBe(true) + expect(rateLimitedFn()).toBe(false) + + vi.advanceTimersByTime(999999) + expect(rateLimitedFn()).toBe(false) + + vi.advanceTimersByTime(2) + expect(rateLimitedFn()).toBe(true) + }) + }) +}) diff --git a/packages/pacer-lite/tests/lite-throttler.test.ts b/packages/pacer-lite/tests/lite-throttler.test.ts new file mode 100644 index 00000000..8ecdb19c --- /dev/null +++ b/packages/pacer-lite/tests/lite-throttler.test.ts @@ -0,0 +1,655 @@ +import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' +import { LiteThrottler, liteThrottle } from '../src/lite-throttler' + +describe('LiteThrottler', () => { + beforeEach(() => { + vi.useFakeTimers() + }) + + afterEach(() => { + vi.restoreAllMocks() + }) + + describe('Basic Throttling', () => { + it('should execute immediately with default options', () => { + const mockFn = vi.fn() + const throttler = new LiteThrottler(mockFn, { wait: 100 }) + + throttler.maybeExecute() + expect(mockFn).toHaveBeenCalledTimes(1) + }) + + it('should not execute more than once within the wait period', () => { + const mockFn = vi.fn() + const throttler = new LiteThrottler(mockFn, { wait: 100 }) + + throttler.maybeExecute() + throttler.maybeExecute() + throttler.maybeExecute() + + expect(mockFn).toHaveBeenCalledTimes(1) + }) + + it('should execute with trailing edge after wait period', () => { + const mockFn = vi.fn() + const throttler = new LiteThrottler(mockFn, { wait: 100 }) + + throttler.maybeExecute('first') + throttler.maybeExecute('second') + throttler.maybeExecute('third') + + expect(mockFn).toHaveBeenCalledTimes(1) + expect(mockFn).toHaveBeenLastCalledWith('first') + + vi.advanceTimersByTime(100) + + expect(mockFn).toHaveBeenCalledTimes(2) + expect(mockFn).toHaveBeenLastCalledWith('third') + }) + + it('should execute again after wait period', () => { + const mockFn = vi.fn() + const throttler = new LiteThrottler(mockFn, { wait: 100 }) + + throttler.maybeExecute('first') + expect(mockFn).toHaveBeenCalledTimes(1) + + vi.advanceTimersByTime(100) + throttler.maybeExecute('second') + expect(mockFn).toHaveBeenCalledTimes(2) + expect(mockFn).toHaveBeenLastCalledWith('second') + }) + + it('should pass arguments correctly', () => { + const mockFn = vi.fn() + const throttler = new LiteThrottler(mockFn, { wait: 100 }) + + throttler.maybeExecute('test', 123, { foo: 'bar' }) + expect(mockFn).toHaveBeenCalledWith('test', 123, { foo: 'bar' }) + }) + }) + + describe('Leading and Trailing Options', () => { + it('should not execute when leading and trailing are false', () => { + const mockFn = vi.fn() + const throttler = new LiteThrottler(mockFn, { + wait: 100, + leading: false, + trailing: false, + }) + + throttler.maybeExecute() + expect(mockFn).not.toHaveBeenCalled() + + vi.advanceTimersByTime(100) + expect(mockFn).toHaveBeenCalledTimes(0) + }) + + it('should not execute on trailing edge when trailing is false', () => { + const mockFn = vi.fn() + const throttler = new LiteThrottler(mockFn, { + wait: 100, + leading: true, + trailing: false, + }) + + throttler.maybeExecute('first') + throttler.maybeExecute('second') + + expect(mockFn).toHaveBeenCalledTimes(1) + expect(mockFn).toHaveBeenCalledWith('first') + + vi.advanceTimersByTime(100) + expect(mockFn).toHaveBeenCalledTimes(1) + }) + + it('should not execute immediately when leading is false', () => { + const mockFn = vi.fn() + const throttler = new LiteThrottler(mockFn, { + wait: 100, + leading: false, + trailing: true, + }) + + throttler.maybeExecute('test') + expect(mockFn).not.toHaveBeenCalled() + + vi.advanceTimersByTime(100) + expect(mockFn).toHaveBeenCalledTimes(1) + expect(mockFn).toHaveBeenCalledWith('test') + }) + + it('should default to both leading and trailing true when neither specified', () => { + const mockFn = vi.fn() + const throttler = new LiteThrottler(mockFn, { wait: 100 }) + + throttler.maybeExecute('first') + expect(mockFn).toHaveBeenCalledTimes(1) // Leading execution + + throttler.maybeExecute('second') + expect(mockFn).toHaveBeenCalledTimes(1) // Still throttled + + vi.advanceTimersByTime(100) + expect(mockFn).toHaveBeenCalledTimes(2) // Trailing execution + expect(mockFn).toHaveBeenLastCalledWith('second') + }) + }) + + describe('Timing and Multiple Executions', () => { + it('should handle multiple executions with proper timing', () => { + const mockFn = vi.fn() + const throttler = new LiteThrottler(mockFn, { wait: 100 }) + + // First burst + throttler.maybeExecute('a') + throttler.maybeExecute('b') + expect(mockFn).toHaveBeenCalledTimes(1) + expect(mockFn).toHaveBeenLastCalledWith('a') + + // Advance halfway + vi.advanceTimersByTime(50) + throttler.maybeExecute('c') + expect(mockFn).toHaveBeenCalledTimes(1) + + // Complete first wait period + vi.advanceTimersByTime(50) + expect(mockFn).toHaveBeenCalledTimes(2) + expect(mockFn).toHaveBeenLastCalledWith('c') + + // New execution after wait period + vi.advanceTimersByTime(100) + throttler.maybeExecute('d') + expect(mockFn).toHaveBeenCalledTimes(3) + expect(mockFn).toHaveBeenLastCalledWith('d') + }) + + it('should handle zero wait time correctly', () => { + const mockFn = vi.fn() + const throttler = new LiteThrottler(mockFn, { wait: 0 }) + + // Should execute immediately due to leading: true + throttler.maybeExecute('first') + expect(mockFn).toBeCalledTimes(1) + expect(mockFn).toBeCalledWith('first') + + // Should execute immediately again since wait is 0 + throttler.maybeExecute('second') + expect(mockFn).toBeCalledTimes(2) + expect(mockFn).toHaveBeenLastCalledWith('second') + }) + + it('should handle negative wait time correctly', () => { + const mockFn = vi.fn() + const throttler = new LiteThrottler(mockFn, { wait: -100 }) + + throttler.maybeExecute('first') + expect(mockFn).toBeCalledTimes(1) + + throttler.maybeExecute('second') + expect(mockFn).toBeCalledTimes(2) // Should execute immediately due to negative wait + }) + + it('should handle very large wait times', () => { + const mockFn = vi.fn() + const throttler = new LiteThrottler(mockFn, { wait: 1000000 }) + + // First call should execute immediately + throttler.maybeExecute('first') + expect(mockFn).toBeCalledTimes(1) + expect(mockFn).toBeCalledWith('first') + + // Subsequent calls should be throttled + throttler.maybeExecute('second') + throttler.maybeExecute('third') + expect(mockFn).toBeCalledTimes(1) + + // Advance time by half the wait period + vi.advanceTimersByTime(500000) + expect(mockFn).toBeCalledTimes(1) + + // Complete the wait period + vi.advanceTimersByTime(500000) + expect(mockFn).toBeCalledTimes(2) + expect(mockFn).toHaveBeenLastCalledWith('third') + }) + }) + + describe('Execution Control', () => { + it('should cancel pending trailing execution', () => { + const mockFn = vi.fn() + const throttler = new LiteThrottler(mockFn, { wait: 100 }) + + throttler.maybeExecute('first') + throttler.maybeExecute('second') + + expect(mockFn).toHaveBeenCalledTimes(1) + + throttler.cancel() + vi.advanceTimersByTime(100) + + expect(mockFn).toHaveBeenCalledTimes(1) + expect(mockFn).toHaveBeenCalledWith('first') + }) + + it('should handle multiple cancellations', () => { + const mockFn = vi.fn() + const throttler = new LiteThrottler(mockFn, { wait: 100 }) + + // First call + throttler.maybeExecute('first') + expect(mockFn).toBeCalledTimes(1) + + // Cancel before trailing execution + throttler.cancel() + vi.advanceTimersByTime(100) + expect(mockFn).toBeCalledTimes(1) + + // Second call + throttler.maybeExecute('second') + expect(mockFn).toBeCalledTimes(2) + + // Cancel again + throttler.cancel() + vi.advanceTimersByTime(100) + expect(mockFn).toBeCalledTimes(2) + }) + }) + + describe('Flush Method', () => { + it('should execute pending function immediately', () => { + const mockFn = vi.fn() + const throttler = new LiteThrottler(mockFn, { wait: 1000 }) + + throttler.maybeExecute('test') + expect(mockFn).toBeCalledTimes(1) // Leading execution + + throttler.maybeExecute('pending') + expect(mockFn).toBeCalledTimes(1) // Still throttled + + throttler.flush() + expect(mockFn).toBeCalledTimes(2) + expect(mockFn).toHaveBeenLastCalledWith('pending') + }) + + it('should clear pending timeout when flushing', () => { + const mockFn = vi.fn() + const throttler = new LiteThrottler(mockFn, { wait: 1000 }) + + throttler.maybeExecute('first') + throttler.maybeExecute('second') + throttler.flush() + + // Advance time to ensure timeout would have fired + vi.advanceTimersByTime(1000) + + expect(mockFn).toBeCalledTimes(2) + }) + + it('should do nothing when no pending execution', () => { + const mockFn = vi.fn() + const throttler = new LiteThrottler(mockFn, { wait: 1000 }) + + throttler.flush() + expect(mockFn).not.toBeCalled() + }) + + it('should work with leading and trailing execution', () => { + const mockFn = vi.fn() + const throttler = new LiteThrottler(mockFn, { + wait: 1000, + leading: true, + trailing: true, + }) + + throttler.maybeExecute('first') + expect(mockFn).toBeCalledTimes(1) + + throttler.maybeExecute('second') + throttler.flush() + + expect(mockFn).toBeCalledTimes(2) + expect(mockFn).toHaveBeenLastCalledWith('second') + }) + + it('should work with trailing-only execution', () => { + const mockFn = vi.fn() + const throttler = new LiteThrottler(mockFn, { + wait: 1000, + leading: false, + trailing: true, + }) + + throttler.maybeExecute('first') + expect(mockFn).not.toBeCalled() + + throttler.flush() + expect(mockFn).toBeCalledTimes(1) + expect(mockFn).toBeCalledWith('first') + }) + + it('should not flush when leading only and no pending execution', () => { + const mockFn = vi.fn() + const throttler = new LiteThrottler(mockFn, { + wait: 1000, + leading: true, + trailing: false, + }) + + throttler.maybeExecute('first') + expect(mockFn).toBeCalledTimes(1) + + // No pending execution to flush + throttler.flush() + expect(mockFn).toBeCalledTimes(1) + }) + }) + + describe('Callbacks', () => { + it('should call onExecute after leading execution', () => { + const mockFn = vi.fn() + const onExecute = vi.fn() + const throttler = new LiteThrottler(mockFn, { + wait: 100, + leading: true, + onExecute, + }) + + throttler.maybeExecute('test') + + expect(mockFn).toHaveBeenCalledWith('test') + expect(onExecute).toHaveBeenCalledWith(['test'], throttler) + expect(onExecute).toHaveBeenCalledTimes(1) + }) + + it('should call onExecute after trailing execution', () => { + const mockFn = vi.fn() + const onExecute = vi.fn() + const throttler = new LiteThrottler(mockFn, { + wait: 100, + leading: false, + trailing: true, + onExecute, + }) + + throttler.maybeExecute('test') + expect(onExecute).not.toHaveBeenCalled() + + vi.advanceTimersByTime(100) + + expect(mockFn).toHaveBeenCalledWith('test') + expect(onExecute).toHaveBeenCalledWith(['test'], throttler) + expect(onExecute).toHaveBeenCalledTimes(1) + }) + + it('should call onExecute with latest args after trailing execution', () => { + const mockFn = vi.fn() + const onExecute = vi.fn() + const throttler = new LiteThrottler(mockFn, { + wait: 100, + leading: false, + trailing: true, + onExecute, + }) + + throttler.maybeExecute('first') + throttler.maybeExecute('second') + + vi.advanceTimersByTime(100) + + expect(onExecute).toHaveBeenCalledWith(['second'], throttler) + expect(onExecute).toHaveBeenCalledTimes(1) + }) + + it('should call onExecute after flush', () => { + const mockFn = vi.fn() + const onExecute = vi.fn() + const throttler = new LiteThrottler(mockFn, { + wait: 100, + onExecute, + }) + + throttler.maybeExecute('first') // Leading execution + expect(onExecute).toHaveBeenCalledTimes(1) + + throttler.maybeExecute('second') // Pending for trailing + throttler.flush() + + expect(mockFn).toHaveBeenCalledWith('second') + expect(onExecute).toHaveBeenCalledWith(['second'], throttler) + expect(onExecute).toHaveBeenCalledTimes(2) + }) + + it('should not call onExecute when cancelled', () => { + const mockFn = vi.fn() + const onExecute = vi.fn() + const throttler = new LiteThrottler(mockFn, { + wait: 100, + leading: false, + trailing: true, + onExecute, + }) + + throttler.maybeExecute('test') + throttler.cancel() + + vi.advanceTimersByTime(100) + + expect(mockFn).not.toHaveBeenCalled() + expect(onExecute).not.toHaveBeenCalled() + }) + + it('should work with both leading and trailing enabled', () => { + const mockFn = vi.fn() + const onExecute = vi.fn() + const throttler = new LiteThrottler(mockFn, { + wait: 100, + leading: true, + trailing: true, + onExecute, + }) + + throttler.maybeExecute('first') + expect(onExecute).toHaveBeenCalledWith(['first'], throttler) + + throttler.maybeExecute('second') + vi.advanceTimersByTime(100) + + expect(onExecute).toHaveBeenCalledWith(['second'], throttler) + expect(onExecute).toHaveBeenCalledTimes(2) + }) + }) + + describe('Edge Cases', () => { + it('should handle undefined/null arguments', () => { + const mockFn = vi.fn() + const throttler = new LiteThrottler(mockFn, { wait: 100 }) + + throttler.maybeExecute(undefined, null) + expect(mockFn).toHaveBeenCalledWith(undefined, null) + }) + + it('should handle NaN wait time', () => { + const mockFn = vi.fn() + const throttler = new LiteThrottler(mockFn, { wait: NaN }) + + // With NaN wait, timeSinceLastExecution >= NaN is false, so no leading execution + // But trailing execution will be scheduled with NaN timeout duration + throttler.maybeExecute('first') + expect(mockFn).toBeCalledTimes(0) // No leading execution + + // The trailing execution should happen with NaN timeout + vi.advanceTimersByTime(0) + expect(mockFn).toBeCalledTimes(1) + expect(mockFn).toHaveBeenCalledWith('first') + }) + + it('should prevent memory leaks by clearing timeouts', () => { + const mockFn = vi.fn() + const throttler = new LiteThrottler(mockFn, { wait: 1000 }) + + // Create pending execution + throttler.maybeExecute('first') + throttler.maybeExecute('second') + + // Cancel should clear timeout + throttler.cancel() + + // Advance time to ensure no executions occur + vi.advanceTimersByTime(1000) + expect(mockFn).toBeCalledTimes(1) // Only the leading execution + }) + }) +}) + +describe('liteThrottle helper function', () => { + beforeEach(() => { + vi.useFakeTimers() + }) + + afterEach(() => { + vi.restoreAllMocks() + }) + + describe('Basic Functionality', () => { + it('should create a throttled function with default options', () => { + const mockFn = vi.fn() + const throttledFn = liteThrottle(mockFn, { wait: 100 }) + + throttledFn('test') + expect(mockFn).toBeCalledTimes(1) // Leading edge + expect(mockFn).toBeCalledWith('test') + + throttledFn('ignored') + expect(mockFn).toBeCalledTimes(1) + + vi.advanceTimersByTime(100) + expect(mockFn).toBeCalledTimes(2) // Trailing edge + expect(mockFn).toHaveBeenLastCalledWith('ignored') + }) + + it('should pass arguments correctly', () => { + const mockFn = vi.fn() + const throttledFn = liteThrottle(mockFn, { wait: 100 }) + + throttledFn(42, 'test', { foo: 'bar' }) + expect(mockFn).toBeCalledWith(42, 'test', { foo: 'bar' }) + }) + }) + + describe('Execution Options', () => { + it('should respect leading: false option', () => { + const mockFn = vi.fn() + const throttledFn = liteThrottle(mockFn, { + wait: 100, + leading: false, + trailing: true, + }) + + throttledFn('first') + expect(mockFn).not.toBeCalled() // No leading edge execution + + throttledFn('second') // Add another call to ensure trailing edge triggers + + // Need to advance time by wait period to trigger trailing edge + vi.advanceTimersByTime(100) + expect(mockFn).toHaveBeenCalledTimes(1) // Trailing edge only + expect(mockFn).toHaveBeenCalledWith('second') // Should get last call + }) + + it('should respect trailing: false option', () => { + const mockFn = vi.fn() + const throttledFn = liteThrottle(mockFn, { + wait: 100, + leading: true, + trailing: false, + }) + + throttledFn('first') + expect(mockFn).toBeCalledTimes(1) // Leading edge + + throttledFn('second') + vi.advanceTimersByTime(100) + expect(mockFn).toBeCalledTimes(1) // No trailing edge + expect(mockFn).toHaveBeenCalledWith('first') + }) + + it('should handle multiple calls with proper timing', () => { + const mockFn = vi.fn() + const throttledFn = liteThrottle(mockFn, { wait: 100 }) + + // First burst + throttledFn('a') + throttledFn('b') + expect(mockFn).toBeCalledTimes(1) + expect(mockFn).toBeCalledWith('a') + + // Advance halfway and make another call + vi.advanceTimersByTime(50) + throttledFn('c') + expect(mockFn).toBeCalledTimes(1) + + // Complete first wait period + vi.advanceTimersByTime(50) + expect(mockFn).toBeCalledTimes(2) + expect(mockFn).toHaveBeenLastCalledWith('c') + + // Wait another period and make new call + vi.advanceTimersByTime(100) + throttledFn('d') + expect(mockFn).toBeCalledTimes(3) + expect(mockFn).toHaveBeenLastCalledWith('d') + }) + + it('should handle rapid successive calls', () => { + const mockFn = vi.fn() + const throttledFn = liteThrottle(mockFn, { wait: 100 }) + + // Rapid succession of calls + for (let i = 0; i < 5; i++) { + throttledFn(`call-${i}`) + } + expect(mockFn).toBeCalledTimes(1) + expect(mockFn).toBeCalledWith('call-0') + + // Should execute the last call after wait + vi.advanceTimersByTime(100) + expect(mockFn).toBeCalledTimes(2) + expect(mockFn).toHaveBeenLastCalledWith('call-4') + }) + + it('should work with both leading and trailing disabled', () => { + const mockFn = vi.fn() + const throttledFn = liteThrottle(mockFn, { + wait: 100, + leading: false, + trailing: false, + }) + + throttledFn('test') + expect(mockFn).not.toBeCalled() + + vi.advanceTimersByTime(100) + expect(mockFn).not.toBeCalled() + }) + + it('should work with onExecute callback', () => { + const mockFn = vi.fn() + const onExecute = vi.fn() + const throttledFn = liteThrottle(mockFn, { + wait: 100, + onExecute, + }) + + throttledFn('test') + expect(mockFn).toHaveBeenCalledWith('test') + expect(onExecute).toHaveBeenCalledTimes(1) + expect(onExecute).toHaveBeenCalledWith(['test'], expect.any(Object)) + + throttledFn('second') + vi.advanceTimersByTime(100) + + expect(mockFn).toHaveBeenCalledWith('second') + expect(onExecute).toHaveBeenCalledTimes(2) + }) + }) +}) diff --git a/packages/pacer-lite/tsconfig.json b/packages/pacer-lite/tsconfig.json new file mode 100644 index 00000000..b36d1bf9 --- /dev/null +++ b/packages/pacer-lite/tsconfig.json @@ -0,0 +1,4 @@ +{ + "extends": "../../tsconfig.json", + "include": ["src", "vite.config.ts"] +} diff --git a/packages/pacer-lite/vite.config.ts b/packages/pacer-lite/vite.config.ts new file mode 100644 index 00000000..3fd0ec49 --- /dev/null +++ b/packages/pacer-lite/vite.config.ts @@ -0,0 +1,22 @@ +import { defineConfig, mergeConfig } from 'vitest/config' +import { tanstackViteConfig } from '@tanstack/config/vite' +import packageJson from './package.json' + +const config = defineConfig({ + test: { + name: packageJson.name, + dir: './', + watch: false, + environment: 'happy-dom', + setupFiles: [], + globals: true, + }, +}) + +export default mergeConfig( + config, + tanstackViteConfig({ + entry: ['./src/index.ts'], + srcDir: './src', + }), +) diff --git a/packages/pacer/README.md b/packages/pacer/README.md new file mode 100644 index 00000000..42347dcc --- /dev/null +++ b/packages/pacer/README.md @@ -0,0 +1,165 @@ +
+ +
+ +
+ + + + + +
+ +### [Become a Sponsor!](https://github.com/sponsors/tannerlinsley/) +
+ +# TanStack Pacer + +A lightweight timing and scheduling library for debouncing, throttling, rate limiting, queuing, and batching. + +> [!NOTE] +> TanStack Pacer is currently mostly a client-side only library, but it is being designed to be able to potentially be used on the server-side as well. + +- **Debouncing** + - Delay execution until after a period of inactivity for when you only care about the last execution in a sequence. + - Synchronous or Asynchronous Debounce utilities with promise support and error handling + - Control of leading, trailing, and enabled options +- **Throttling** + - Smoothly limit the rate at which a function can fire + - Synchronous or Asynchronous Throttle utilities with promise support and error handling + - Control of leading, trailing, and enabled options. +- **Rate Limiting** + - Limit the rate at which a function can fire over a period of time + - Synchronous or Asynchronous Rate Limiting utilities with promise support and error handling + - Fixed or Sliding Window variations of Rate Limiting +- **Queuing** + - Queue functions to be executed in a specific order + - Choose from FIFO, LIFO, and Priority queue implementations + - Control processing speed with configurable wait times or concurrency limits + - Manage queue execution with start/stop capabilities + - Expire items from the queue after a configurable duration +- **Batching** + - Chunk up multiple operations into larger batches to reduce total back-and-forth operations + - Batch by time period, batch size, whichever comes first, or a custom condition to trigger batch executions +- **Async or Sync Variations** + - Choose between synchronous and asynchronous versions of each utility + - Optional error, success, and settled handling for async variations + - Retry and Abort support for async variations +- **State Management** + - Uses TanStack Store under the hood for state management with fine-grained reactivity + - Easily integrate with your own state management library of choice + - Persist state to local or session storage for some utilities like rate limiting and queuing +- **Convenient Hooks** + - Reduce boilerplate code with pre-built hooks like `useDebouncedCallback`, `useThrottledValue`, and `useQueuedState`, and more. + - Multiple layers of abstraction to choose from depending on your use case. + - Works with each framework's default state management solutions, or with whatever custom state management library that you prefer. +- **Type Safety** + - Full type safety with TypeScript that makes sure that your functions will always be called with the correct arguments + - Generics for flexible and reusable utilities +- **Framework Adapters** + - React, Solid, and more +- **Tree Shaking** + - We, of course, get tree-shaking right for your applications by default, but we also provide extra deep imports for each utility, making it easier to embed these utilities into your libraries without increasing the bundle-phobia reports of your library. + +### Read the docs → + +
+ +> [!NOTE] +> You may know **TanSack Pacer** by our adapter names, too! +> +> - [**React Pacer**](https://tanstack.com/pacer/latest/docs/framework/react/react-pacer) +> - [**Solid Pacer**](https://tanstack.com/pacer/latest/docs/framework/solid/solid-pacer) +> - Angular Pacer - needs a contributor! +> - Preact Pacer - Coming soon! (After React Pacer is more fleshed out) +> - Svelte Pacer - needs a contributor! +> - Vue Pacer - needs a contributor! + +## Get Involved + +- We welcome issues and pull requests! +- Participate in [GitHub discussions](https://github.com/TanStack/pacer/discussions) +- Chat with the community on [Discord](https://discord.com/invite/WrRKjPJ) +- See [CONTRIBUTING.md](./CONTRIBUTING.md) for setup instructions + +## Partners + + + + + + + +
+ + + + + CodeRabbit + + + + + + + + Cloudflare + + + + + + + + Unkey + + +
+ +
+Pacer & you? +

+We're looking for TanStack Pacer Partners to join our mission! Partner with us to push the boundaries of TanStack Pacer and build amazing things together. +

+LET'S CHAT +
+ + + +## Explore the TanStack Ecosystem + +- TanStack Config – Tooling for JS/TS packages +- TanStack DB – Reactive sync client store +- TanStack DevTools – Unified devtools panel +- TanStack Form – Type‑safe form state +- TanStack Query – Async state & caching +- TanStack Ranger – Range & slider primitives +- TanStack Router – Type‑safe routing, caching & URL state +- TanStack Start – Full‑stack SSR & streaming +- TanStack Store – Reactive data store +- TanStack Table – Headless datagrids +- TanStack Virtual – Virtualized rendering + +… and more at TanStack.com » + + diff --git a/packages/react-pacer-devtools/README.md b/packages/react-pacer-devtools/README.md new file mode 100644 index 00000000..42347dcc --- /dev/null +++ b/packages/react-pacer-devtools/README.md @@ -0,0 +1,165 @@ +
+ +
+ +
+ + + + + +
+ +### [Become a Sponsor!](https://github.com/sponsors/tannerlinsley/) +
+ +# TanStack Pacer + +A lightweight timing and scheduling library for debouncing, throttling, rate limiting, queuing, and batching. + +> [!NOTE] +> TanStack Pacer is currently mostly a client-side only library, but it is being designed to be able to potentially be used on the server-side as well. + +- **Debouncing** + - Delay execution until after a period of inactivity for when you only care about the last execution in a sequence. + - Synchronous or Asynchronous Debounce utilities with promise support and error handling + - Control of leading, trailing, and enabled options +- **Throttling** + - Smoothly limit the rate at which a function can fire + - Synchronous or Asynchronous Throttle utilities with promise support and error handling + - Control of leading, trailing, and enabled options. +- **Rate Limiting** + - Limit the rate at which a function can fire over a period of time + - Synchronous or Asynchronous Rate Limiting utilities with promise support and error handling + - Fixed or Sliding Window variations of Rate Limiting +- **Queuing** + - Queue functions to be executed in a specific order + - Choose from FIFO, LIFO, and Priority queue implementations + - Control processing speed with configurable wait times or concurrency limits + - Manage queue execution with start/stop capabilities + - Expire items from the queue after a configurable duration +- **Batching** + - Chunk up multiple operations into larger batches to reduce total back-and-forth operations + - Batch by time period, batch size, whichever comes first, or a custom condition to trigger batch executions +- **Async or Sync Variations** + - Choose between synchronous and asynchronous versions of each utility + - Optional error, success, and settled handling for async variations + - Retry and Abort support for async variations +- **State Management** + - Uses TanStack Store under the hood for state management with fine-grained reactivity + - Easily integrate with your own state management library of choice + - Persist state to local or session storage for some utilities like rate limiting and queuing +- **Convenient Hooks** + - Reduce boilerplate code with pre-built hooks like `useDebouncedCallback`, `useThrottledValue`, and `useQueuedState`, and more. + - Multiple layers of abstraction to choose from depending on your use case. + - Works with each framework's default state management solutions, or with whatever custom state management library that you prefer. +- **Type Safety** + - Full type safety with TypeScript that makes sure that your functions will always be called with the correct arguments + - Generics for flexible and reusable utilities +- **Framework Adapters** + - React, Solid, and more +- **Tree Shaking** + - We, of course, get tree-shaking right for your applications by default, but we also provide extra deep imports for each utility, making it easier to embed these utilities into your libraries without increasing the bundle-phobia reports of your library. + +### Read the docs → + +
+ +> [!NOTE] +> You may know **TanSack Pacer** by our adapter names, too! +> +> - [**React Pacer**](https://tanstack.com/pacer/latest/docs/framework/react/react-pacer) +> - [**Solid Pacer**](https://tanstack.com/pacer/latest/docs/framework/solid/solid-pacer) +> - Angular Pacer - needs a contributor! +> - Preact Pacer - Coming soon! (After React Pacer is more fleshed out) +> - Svelte Pacer - needs a contributor! +> - Vue Pacer - needs a contributor! + +## Get Involved + +- We welcome issues and pull requests! +- Participate in [GitHub discussions](https://github.com/TanStack/pacer/discussions) +- Chat with the community on [Discord](https://discord.com/invite/WrRKjPJ) +- See [CONTRIBUTING.md](./CONTRIBUTING.md) for setup instructions + +## Partners + + + + + + + +
+ + + + + CodeRabbit + + + + + + + + Cloudflare + + + + + + + + Unkey + + +
+ +
+Pacer & you? +

+We're looking for TanStack Pacer Partners to join our mission! Partner with us to push the boundaries of TanStack Pacer and build amazing things together. +

+LET'S CHAT +
+ + + +## Explore the TanStack Ecosystem + +- TanStack Config – Tooling for JS/TS packages +- TanStack DB – Reactive sync client store +- TanStack DevTools – Unified devtools panel +- TanStack Form – Type‑safe form state +- TanStack Query – Async state & caching +- TanStack Ranger – Range & slider primitives +- TanStack Router – Type‑safe routing, caching & URL state +- TanStack Start – Full‑stack SSR & streaming +- TanStack Store – Reactive data store +- TanStack Table – Headless datagrids +- TanStack Virtual – Virtualized rendering + +… and more at TanStack.com » + + diff --git a/packages/react-pacer/README.md b/packages/react-pacer/README.md new file mode 100644 index 00000000..42347dcc --- /dev/null +++ b/packages/react-pacer/README.md @@ -0,0 +1,165 @@ +
+ +
+ +
+ + + + + +
+ +### [Become a Sponsor!](https://github.com/sponsors/tannerlinsley/) +
+ +# TanStack Pacer + +A lightweight timing and scheduling library for debouncing, throttling, rate limiting, queuing, and batching. + +> [!NOTE] +> TanStack Pacer is currently mostly a client-side only library, but it is being designed to be able to potentially be used on the server-side as well. + +- **Debouncing** + - Delay execution until after a period of inactivity for when you only care about the last execution in a sequence. + - Synchronous or Asynchronous Debounce utilities with promise support and error handling + - Control of leading, trailing, and enabled options +- **Throttling** + - Smoothly limit the rate at which a function can fire + - Synchronous or Asynchronous Throttle utilities with promise support and error handling + - Control of leading, trailing, and enabled options. +- **Rate Limiting** + - Limit the rate at which a function can fire over a period of time + - Synchronous or Asynchronous Rate Limiting utilities with promise support and error handling + - Fixed or Sliding Window variations of Rate Limiting +- **Queuing** + - Queue functions to be executed in a specific order + - Choose from FIFO, LIFO, and Priority queue implementations + - Control processing speed with configurable wait times or concurrency limits + - Manage queue execution with start/stop capabilities + - Expire items from the queue after a configurable duration +- **Batching** + - Chunk up multiple operations into larger batches to reduce total back-and-forth operations + - Batch by time period, batch size, whichever comes first, or a custom condition to trigger batch executions +- **Async or Sync Variations** + - Choose between synchronous and asynchronous versions of each utility + - Optional error, success, and settled handling for async variations + - Retry and Abort support for async variations +- **State Management** + - Uses TanStack Store under the hood for state management with fine-grained reactivity + - Easily integrate with your own state management library of choice + - Persist state to local or session storage for some utilities like rate limiting and queuing +- **Convenient Hooks** + - Reduce boilerplate code with pre-built hooks like `useDebouncedCallback`, `useThrottledValue`, and `useQueuedState`, and more. + - Multiple layers of abstraction to choose from depending on your use case. + - Works with each framework's default state management solutions, or with whatever custom state management library that you prefer. +- **Type Safety** + - Full type safety with TypeScript that makes sure that your functions will always be called with the correct arguments + - Generics for flexible and reusable utilities +- **Framework Adapters** + - React, Solid, and more +- **Tree Shaking** + - We, of course, get tree-shaking right for your applications by default, but we also provide extra deep imports for each utility, making it easier to embed these utilities into your libraries without increasing the bundle-phobia reports of your library. + +### Read the docs → + +
+ +> [!NOTE] +> You may know **TanSack Pacer** by our adapter names, too! +> +> - [**React Pacer**](https://tanstack.com/pacer/latest/docs/framework/react/react-pacer) +> - [**Solid Pacer**](https://tanstack.com/pacer/latest/docs/framework/solid/solid-pacer) +> - Angular Pacer - needs a contributor! +> - Preact Pacer - Coming soon! (After React Pacer is more fleshed out) +> - Svelte Pacer - needs a contributor! +> - Vue Pacer - needs a contributor! + +## Get Involved + +- We welcome issues and pull requests! +- Participate in [GitHub discussions](https://github.com/TanStack/pacer/discussions) +- Chat with the community on [Discord](https://discord.com/invite/WrRKjPJ) +- See [CONTRIBUTING.md](./CONTRIBUTING.md) for setup instructions + +## Partners + + + + + + + +
+ + + + + CodeRabbit + + + + + + + + Cloudflare + + + + + + + + Unkey + + +
+ +
+Pacer & you? +

+We're looking for TanStack Pacer Partners to join our mission! Partner with us to push the boundaries of TanStack Pacer and build amazing things together. +

+LET'S CHAT +
+ + + +## Explore the TanStack Ecosystem + +- TanStack Config – Tooling for JS/TS packages +- TanStack DB – Reactive sync client store +- TanStack DevTools – Unified devtools panel +- TanStack Form – Type‑safe form state +- TanStack Query – Async state & caching +- TanStack Ranger – Range & slider primitives +- TanStack Router – Type‑safe routing, caching & URL state +- TanStack Start – Full‑stack SSR & streaming +- TanStack Store – Reactive data store +- TanStack Table – Headless datagrids +- TanStack Virtual – Virtualized rendering + +… and more at TanStack.com » + + diff --git a/packages/solid-pacer-devtools/README.md b/packages/solid-pacer-devtools/README.md new file mode 100644 index 00000000..42347dcc --- /dev/null +++ b/packages/solid-pacer-devtools/README.md @@ -0,0 +1,165 @@ +
+ +
+ +
+ + + + + +
+ +### [Become a Sponsor!](https://github.com/sponsors/tannerlinsley/) +
+ +# TanStack Pacer + +A lightweight timing and scheduling library for debouncing, throttling, rate limiting, queuing, and batching. + +> [!NOTE] +> TanStack Pacer is currently mostly a client-side only library, but it is being designed to be able to potentially be used on the server-side as well. + +- **Debouncing** + - Delay execution until after a period of inactivity for when you only care about the last execution in a sequence. + - Synchronous or Asynchronous Debounce utilities with promise support and error handling + - Control of leading, trailing, and enabled options +- **Throttling** + - Smoothly limit the rate at which a function can fire + - Synchronous or Asynchronous Throttle utilities with promise support and error handling + - Control of leading, trailing, and enabled options. +- **Rate Limiting** + - Limit the rate at which a function can fire over a period of time + - Synchronous or Asynchronous Rate Limiting utilities with promise support and error handling + - Fixed or Sliding Window variations of Rate Limiting +- **Queuing** + - Queue functions to be executed in a specific order + - Choose from FIFO, LIFO, and Priority queue implementations + - Control processing speed with configurable wait times or concurrency limits + - Manage queue execution with start/stop capabilities + - Expire items from the queue after a configurable duration +- **Batching** + - Chunk up multiple operations into larger batches to reduce total back-and-forth operations + - Batch by time period, batch size, whichever comes first, or a custom condition to trigger batch executions +- **Async or Sync Variations** + - Choose between synchronous and asynchronous versions of each utility + - Optional error, success, and settled handling for async variations + - Retry and Abort support for async variations +- **State Management** + - Uses TanStack Store under the hood for state management with fine-grained reactivity + - Easily integrate with your own state management library of choice + - Persist state to local or session storage for some utilities like rate limiting and queuing +- **Convenient Hooks** + - Reduce boilerplate code with pre-built hooks like `useDebouncedCallback`, `useThrottledValue`, and `useQueuedState`, and more. + - Multiple layers of abstraction to choose from depending on your use case. + - Works with each framework's default state management solutions, or with whatever custom state management library that you prefer. +- **Type Safety** + - Full type safety with TypeScript that makes sure that your functions will always be called with the correct arguments + - Generics for flexible and reusable utilities +- **Framework Adapters** + - React, Solid, and more +- **Tree Shaking** + - We, of course, get tree-shaking right for your applications by default, but we also provide extra deep imports for each utility, making it easier to embed these utilities into your libraries without increasing the bundle-phobia reports of your library. + +### Read the docs → + +
+ +> [!NOTE] +> You may know **TanSack Pacer** by our adapter names, too! +> +> - [**React Pacer**](https://tanstack.com/pacer/latest/docs/framework/react/react-pacer) +> - [**Solid Pacer**](https://tanstack.com/pacer/latest/docs/framework/solid/solid-pacer) +> - Angular Pacer - needs a contributor! +> - Preact Pacer - Coming soon! (After React Pacer is more fleshed out) +> - Svelte Pacer - needs a contributor! +> - Vue Pacer - needs a contributor! + +## Get Involved + +- We welcome issues and pull requests! +- Participate in [GitHub discussions](https://github.com/TanStack/pacer/discussions) +- Chat with the community on [Discord](https://discord.com/invite/WrRKjPJ) +- See [CONTRIBUTING.md](./CONTRIBUTING.md) for setup instructions + +## Partners + + + + + + + +
+ + + + + CodeRabbit + + + + + + + + Cloudflare + + + + + + + + Unkey + + +
+ +
+Pacer & you? +

+We're looking for TanStack Pacer Partners to join our mission! Partner with us to push the boundaries of TanStack Pacer and build amazing things together. +

+LET'S CHAT +
+ + + +## Explore the TanStack Ecosystem + +- TanStack Config – Tooling for JS/TS packages +- TanStack DB – Reactive sync client store +- TanStack DevTools – Unified devtools panel +- TanStack Form – Type‑safe form state +- TanStack Query – Async state & caching +- TanStack Ranger – Range & slider primitives +- TanStack Router – Type‑safe routing, caching & URL state +- TanStack Start – Full‑stack SSR & streaming +- TanStack Store – Reactive data store +- TanStack Table – Headless datagrids +- TanStack Virtual – Virtualized rendering + +… and more at TanStack.com » + + diff --git a/packages/solid-pacer/README.md b/packages/solid-pacer/README.md new file mode 100644 index 00000000..42347dcc --- /dev/null +++ b/packages/solid-pacer/README.md @@ -0,0 +1,165 @@ +
+ +
+ +
+ + + + + +
+ +### [Become a Sponsor!](https://github.com/sponsors/tannerlinsley/) +
+ +# TanStack Pacer + +A lightweight timing and scheduling library for debouncing, throttling, rate limiting, queuing, and batching. + +> [!NOTE] +> TanStack Pacer is currently mostly a client-side only library, but it is being designed to be able to potentially be used on the server-side as well. + +- **Debouncing** + - Delay execution until after a period of inactivity for when you only care about the last execution in a sequence. + - Synchronous or Asynchronous Debounce utilities with promise support and error handling + - Control of leading, trailing, and enabled options +- **Throttling** + - Smoothly limit the rate at which a function can fire + - Synchronous or Asynchronous Throttle utilities with promise support and error handling + - Control of leading, trailing, and enabled options. +- **Rate Limiting** + - Limit the rate at which a function can fire over a period of time + - Synchronous or Asynchronous Rate Limiting utilities with promise support and error handling + - Fixed or Sliding Window variations of Rate Limiting +- **Queuing** + - Queue functions to be executed in a specific order + - Choose from FIFO, LIFO, and Priority queue implementations + - Control processing speed with configurable wait times or concurrency limits + - Manage queue execution with start/stop capabilities + - Expire items from the queue after a configurable duration +- **Batching** + - Chunk up multiple operations into larger batches to reduce total back-and-forth operations + - Batch by time period, batch size, whichever comes first, or a custom condition to trigger batch executions +- **Async or Sync Variations** + - Choose between synchronous and asynchronous versions of each utility + - Optional error, success, and settled handling for async variations + - Retry and Abort support for async variations +- **State Management** + - Uses TanStack Store under the hood for state management with fine-grained reactivity + - Easily integrate with your own state management library of choice + - Persist state to local or session storage for some utilities like rate limiting and queuing +- **Convenient Hooks** + - Reduce boilerplate code with pre-built hooks like `useDebouncedCallback`, `useThrottledValue`, and `useQueuedState`, and more. + - Multiple layers of abstraction to choose from depending on your use case. + - Works with each framework's default state management solutions, or with whatever custom state management library that you prefer. +- **Type Safety** + - Full type safety with TypeScript that makes sure that your functions will always be called with the correct arguments + - Generics for flexible and reusable utilities +- **Framework Adapters** + - React, Solid, and more +- **Tree Shaking** + - We, of course, get tree-shaking right for your applications by default, but we also provide extra deep imports for each utility, making it easier to embed these utilities into your libraries without increasing the bundle-phobia reports of your library. + +### Read the docs → + +
+ +> [!NOTE] +> You may know **TanSack Pacer** by our adapter names, too! +> +> - [**React Pacer**](https://tanstack.com/pacer/latest/docs/framework/react/react-pacer) +> - [**Solid Pacer**](https://tanstack.com/pacer/latest/docs/framework/solid/solid-pacer) +> - Angular Pacer - needs a contributor! +> - Preact Pacer - Coming soon! (After React Pacer is more fleshed out) +> - Svelte Pacer - needs a contributor! +> - Vue Pacer - needs a contributor! + +## Get Involved + +- We welcome issues and pull requests! +- Participate in [GitHub discussions](https://github.com/TanStack/pacer/discussions) +- Chat with the community on [Discord](https://discord.com/invite/WrRKjPJ) +- See [CONTRIBUTING.md](./CONTRIBUTING.md) for setup instructions + +## Partners + + + + + + + +
+ + + + + CodeRabbit + + + + + + + + Cloudflare + + + + + + + + Unkey + + +
+ +
+Pacer & you? +

+We're looking for TanStack Pacer Partners to join our mission! Partner with us to push the boundaries of TanStack Pacer and build amazing things together. +

+LET'S CHAT +
+ + + +## Explore the TanStack Ecosystem + +- TanStack Config – Tooling for JS/TS packages +- TanStack DB – Reactive sync client store +- TanStack DevTools – Unified devtools panel +- TanStack Form – Type‑safe form state +- TanStack Query – Async state & caching +- TanStack Ranger – Range & slider primitives +- TanStack Router – Type‑safe routing, caching & URL state +- TanStack Start – Full‑stack SSR & streaming +- TanStack Store – Reactive data store +- TanStack Table – Headless datagrids +- TanStack Virtual – Virtualized rendering + +… and more at TanStack.com » + + diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index c2ab253d..812f5249 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1619,6 +1619,106 @@ importers: specifier: ^2.11.10 version: 2.11.10(@testing-library/jest-dom@6.9.1)(solid-js@1.9.10)(vite@7.2.2(@types/node@24.10.1)(jiti@2.6.0)(yaml@2.8.1)) + examples/vanilla/LiteBatcher: + dependencies: + '@tanstack/pacer-lite': + specifier: 0.0.1 + version: link:../../../packages/pacer-lite + devDependencies: + vite: + specifier: ^7.2.2 + version: 7.2.2(@types/node@24.10.1)(jiti@2.6.0)(yaml@2.8.1) + + examples/vanilla/LiteDebouncer: + dependencies: + '@tanstack/pacer-lite': + specifier: 0.0.1 + version: link:../../../packages/pacer-lite + devDependencies: + vite: + specifier: ^7.2.2 + version: 7.2.2(@types/node@24.10.1)(jiti@2.6.0)(yaml@2.8.1) + + examples/vanilla/LiteQueuer: + dependencies: + '@tanstack/pacer-lite': + specifier: 0.0.1 + version: link:../../../packages/pacer-lite + devDependencies: + vite: + specifier: ^7.2.2 + version: 7.2.2(@types/node@24.10.1)(jiti@2.6.0)(yaml@2.8.1) + + examples/vanilla/LiteRateLimiter: + dependencies: + '@tanstack/pacer-lite': + specifier: 0.0.1 + version: link:../../../packages/pacer-lite + devDependencies: + vite: + specifier: ^7.2.2 + version: 7.2.2(@types/node@24.10.1)(jiti@2.6.0)(yaml@2.8.1) + + examples/vanilla/LiteThrottler: + dependencies: + '@tanstack/pacer-lite': + specifier: 0.0.1 + version: link:../../../packages/pacer-lite + devDependencies: + vite: + specifier: ^7.2.2 + version: 7.2.2(@types/node@24.10.1)(jiti@2.6.0)(yaml@2.8.1) + + examples/vanilla/liteBatch: + dependencies: + '@tanstack/pacer-lite': + specifier: 0.0.1 + version: link:../../../packages/pacer-lite + devDependencies: + vite: + specifier: ^7.2.2 + version: 7.2.2(@types/node@24.10.1)(jiti@2.6.0)(yaml@2.8.1) + + examples/vanilla/liteDebounce: + dependencies: + '@tanstack/pacer-lite': + specifier: 0.0.1 + version: link:../../../packages/pacer-lite + devDependencies: + vite: + specifier: ^7.2.2 + version: 7.2.2(@types/node@24.10.1)(jiti@2.6.0)(yaml@2.8.1) + + examples/vanilla/liteQueue: + dependencies: + '@tanstack/pacer-lite': + specifier: 0.0.1 + version: link:../../../packages/pacer-lite + devDependencies: + vite: + specifier: ^7.2.2 + version: 7.2.2(@types/node@24.10.1)(jiti@2.6.0)(yaml@2.8.1) + + examples/vanilla/liteRateLimit: + dependencies: + '@tanstack/pacer-lite': + specifier: 0.0.1 + version: link:../../../packages/pacer-lite + devDependencies: + vite: + specifier: ^7.2.2 + version: 7.2.2(@types/node@24.10.1)(jiti@2.6.0)(yaml@2.8.1) + + examples/vanilla/liteThrottle: + dependencies: + '@tanstack/pacer-lite': + specifier: 0.0.1 + version: link:../../../packages/pacer-lite + devDependencies: + vite: + specifier: ^7.2.2 + version: 7.2.2(@types/node@24.10.1)(jiti@2.6.0)(yaml@2.8.1) + packages/pacer: dependencies: '@tanstack/devtools-event-client': @@ -1659,6 +1759,12 @@ importers: specifier: ^2.11.10 version: 2.11.10(@testing-library/jest-dom@6.9.1)(solid-js@1.9.10)(vite@7.2.2(@types/node@24.10.1)(jiti@2.6.0)(yaml@2.8.1)) + packages/pacer-lite: + devDependencies: + '@tanstack/pacer': + specifier: workspace:* + version: link:../pacer + packages/react-pacer: dependencies: '@tanstack/pacer': diff --git a/vitest.workspace.js b/vitest.workspace.js index fcb88e20..e5a5d999 100644 --- a/vitest.workspace.js +++ b/vitest.workspace.js @@ -4,10 +4,11 @@ export default defineConfig({ test: { projects: [ './packages/pacer/vite.config.ts', - './packages/persister/vite.config.ts', + './packages/pacer-lite/vite.config.ts', './packages/react-pacer/vite.config.ts', - './packages/react-persister/vite.config.ts', + './packages/react-pacer-devtools/vite.config.ts', './packages/solid-pacer/vite.config.ts', + './packages/solid-pacer-devtools/vite.config.ts', ], }, })