From 83a7b1bee44a128714c31b8c369dedb5e963e21f Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Tue, 1 Oct 2024 22:19:57 +0600 Subject: [PATCH 01/45] inital commit: queueing event processor --- lib/event_processor/dispatch_strategy.ts | 55 +++++++++ lib/event_processor/eventProcessor.ts | 2 +- .../queueing_event_processor.ts | 36 ++++++ lib/utils/cache/cache.ts | 23 ++++ lib/utils/cache/store.ts | 104 ++++++++++++++++++ lib/utils/type.ts | 1 + 6 files changed, 220 insertions(+), 1 deletion(-) create mode 100644 lib/event_processor/dispatch_strategy.ts create mode 100644 lib/event_processor/queueing_event_processor.ts create mode 100644 lib/utils/cache/cache.ts create mode 100644 lib/utils/cache/store.ts diff --git a/lib/event_processor/dispatch_strategy.ts b/lib/event_processor/dispatch_strategy.ts new file mode 100644 index 000000000..12e3bce3e --- /dev/null +++ b/lib/event_processor/dispatch_strategy.ts @@ -0,0 +1,55 @@ +import { ExponentialBackoff } from '../utils/repeater/repeater'; +import { AsyncProducer } from '../utils/type'; + +export interface DispatchStrategy { + close(): void; + registerDispatcher(dispatcher: AsyncProducer): void; + notifyBatch(): void; +} + +enum State { + Idle, + BackingOff, + Dispatching, +} + +export class BackoffDispatchStrategy implements DispatchStrategy { + private dispatcher: AsyncProducer; + private backoff: ExponentialBackoff; + private state: State = State.Idle; + + constructor(backoff: ExponentialBackoff) { + this.backoff = backoff; + } + + public stop(): void { + } + + private async executeDispatcher(): Promise { + this.state = State.Dispatching; + this.dispatcher().then((hasMoreBatches) => { + this.state = State.Idle; + this.backoff.reset(); + if (hasMoreBatches) { + this.executeDispatcher(); + } + }).catch((err) => { + this.state = State.BackingOff; + setTimeout(() => { + this.executeDispatcher(); + }, this.backoff.backoff()); + }); + } + + public registerDispatcher(dispatcher: AsyncProducer): void { + this.dispatcher = dispatcher; + } + + public notifyBatch(): void { + if (this.state !== State.Idle) { + return; + } + + this.executeDispatcher(); + } +} diff --git a/lib/event_processor/eventProcessor.ts b/lib/event_processor/eventProcessor.ts index fa2cab200..b20b20e31 100644 --- a/lib/event_processor/eventProcessor.ts +++ b/lib/event_processor/eventProcessor.ts @@ -32,7 +32,7 @@ export type ProcessableEvent = ConversionEvent | ImpressionEvent export type EventDispatchResult = { result: boolean; event: ProcessableEvent } export interface EventProcessor extends Managed { - process(event: ProcessableEvent): void + process(event: ProcessableEvent): Promise } export function validateAndGetFlushInterval(flushInterval: number): number { diff --git a/lib/event_processor/queueing_event_processor.ts b/lib/event_processor/queueing_event_processor.ts new file mode 100644 index 000000000..4d5a9b52e --- /dev/null +++ b/lib/event_processor/queueing_event_processor.ts @@ -0,0 +1,36 @@ +import { EventProcessor, ProcessableEvent } from "./eventProcessor"; +import { Cache } from "../utils/cache/cache"; +import { EventV1Request } from "./eventDispatcher"; +import { formatEvents } from "../core/event_builder/build_event_v1"; +export class QueueingEventProcessor implements EventProcessor { + private eventQueue: ProcessableEvent[] = []; + private readonly maxQueueSize: number; + private eventCache: Cache; + private pendingEventsCache: Cache + private maxPendingEvents: number; + + private async createNewEventBatch(): Promise { + const request = formatEvents(this.eventQueue); + const dispatchId = this.getDispatchId(); + await this.pendingEventsCache.set(dispatchId, request); + } + + private getDispatchId(): string { + const time = Date.now(); + return `${time}-${Math.random().toFixed(2)}`; + } + + process(event: ProcessableEvent): Promise { + if (this.eventQueue.length == this.maxQueueSize) { + + } + } + + start(): Promise { + throw new Error("Method not implemented."); + } + + stop(): Promise { + throw new Error("Method not implemented."); + } +} diff --git a/lib/utils/cache/cache.ts b/lib/utils/cache/cache.ts new file mode 100644 index 000000000..50e4e750c --- /dev/null +++ b/lib/utils/cache/cache.ts @@ -0,0 +1,23 @@ +export interface SyncCache { + operation: 'sync'; + set(key: string, value: V): void; + get(key: string): V; + remove(key: string): void; + clear(): void; + getKeys(): string[]; + getAll(): Map; + size(): number; +}; + +export interface AsyncCache { + operation: 'async'; + set(key: string, value: V): Promise; + get(key: string): Promise; + remove(key: string): Promise; + clear(): Promise; + getKeys(): Promise; + getAll():Promise>; + size(): number; +}; + +export type Cache = SyncCache | AsyncCache; diff --git a/lib/utils/cache/store.ts b/lib/utils/cache/store.ts new file mode 100644 index 000000000..e29b89217 --- /dev/null +++ b/lib/utils/cache/store.ts @@ -0,0 +1,104 @@ +import { SyncCache, AsyncCache } from "./cache"; + +export class SyncPrefixStore implements SyncCache { + private cache: SyncCache; + private prefix: string; + public readonly operation = 'sync'; + + constructor(cache: SyncCache, prefix: string) { + this.cache = cache; + this.prefix = prefix; + } + + private addPrefix(key: string): string { + return `${this.prefix}:${key}`; + } + + private removePrefix(key: string): string { + return key.substring(this.prefix.length + 1); + } + + set(key: string, value: V): void { + return this.cache.set(this.addPrefix(key), value); + } + + get(key: string): V { + return this.cache.get(this.addPrefix(key)); + } + + remove(key: string): void { + return this.cache.remove(this.addPrefix(key)); + } + + clear(): void { + this.getInternalKeys().forEach((key) => this.cache.remove(key)); + } + + private getInternalKeys(): string[] { + return this.cache.getKeys().filter((key) => key.startsWith(this.prefix)); + } + + getKeys(): string[] { + return this.getInternalKeys().map((key) => this.removePrefix(key)); + } + + getAll(): Map { + const map = new Map(); + this.getInternalKeys().forEach((key) => + map.set(this.removePrefix(key), this.cache.get(key))); + return map; + } +} + + +export class AyncPrefixStore implements AsyncCache { + private cache: AsyncCache; + private prefix: string; + public readonly operation = 'async'; + + constructor(cache: AsyncCache, prefix: string) { + this.cache = cache; + this.prefix = prefix; + } + + private addPrefix(key: string): string { + return `${this.prefix}:${key}`; + } + + private removePrefix(key: string): string { + return key.substring(this.prefix.length + 1); + } + + set(key: string, value: V): Promise { + return this.cache.set(this.addPrefix(key), value); + } + + get(key: string): Promise { + return this.cache.get(this.addPrefix(key)); + } + + remove(key: string): Promise { + return this.cache.remove(this.addPrefix(key)); + } + + async clear(): Promise { + const keys = await this.getInternalKeys(); + await Promise.all(keys.map((key) => this.cache.remove(key))); + } + + private async getInternalKeys(): Promise { + return this.cache.getKeys().then((keys) => keys.filter((key) => key.startsWith(this.prefix))); + } + + async getKeys(): Promise { + return this.getInternalKeys().then((keys) => keys.map((key) => this.removePrefix(key))); + } + + async getAll(): Promise> { + const keys = await this.getInternalKeys(); + const values = await Promise.all(keys.map((key) => this.cache.get(key))); + const map = new Map(); + keys.forEach((key, index) => map.set(this.removePrefix(key), values[index])); + return map; + } +} diff --git a/lib/utils/type.ts b/lib/utils/type.ts index 9c9a704dc..7b2df5bed 100644 --- a/lib/utils/type.ts +++ b/lib/utils/type.ts @@ -15,6 +15,7 @@ */ export type Fn = () => void; +export type AsyncFn = () => Promise; export type AsyncTransformer = (arg: A) => Promise; export type Transformer = (arg: A) => B; From 9a139e41a7ae6c7b07c25e2c4a71a538469f1b87 Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Sat, 5 Oct 2024 03:56:05 +0600 Subject: [PATCH 02/45] saving --- lib/event_processor/dispatch_manager.ts | 32 ++++++++++ lib/utils/executor/backoff_retry_runner.ts | 43 ++++++++++++++ .../executor/concurrency_limited_executor.ts | 59 +++++++++++++++++++ lib/utils/executor/executor.ts | 6 ++ lib/utils/executor/task_runner.ts | 6 ++ lib/utils/queue/queue.ts | 31 ++++++++++ 6 files changed, 177 insertions(+) create mode 100644 lib/event_processor/dispatch_manager.ts create mode 100644 lib/utils/executor/backoff_retry_runner.ts create mode 100644 lib/utils/executor/concurrency_limited_executor.ts create mode 100644 lib/utils/executor/executor.ts create mode 100644 lib/utils/executor/task_runner.ts create mode 100644 lib/utils/queue/queue.ts diff --git a/lib/event_processor/dispatch_manager.ts b/lib/event_processor/dispatch_manager.ts new file mode 100644 index 000000000..c1c6501b5 --- /dev/null +++ b/lib/event_processor/dispatch_manager.ts @@ -0,0 +1,32 @@ +import { BaseService, Service } from "../service"; +import { Executor } from "../utils/executor/executor"; +import { EventDispatcher, EventV1Request } from "./eventDispatcher"; +import { Cache } from "../utils/cache/cache"; + +interface DispatchManager extends Service { + addRequest(request: EventV1Request): Promise +} + +class DispatchManagerImpl extends BaseService implements DispatchManager { + private eventDispatcher: EventDispatcher; + private executor: Executor; + private cache: Cache; + + start(): void { + throw new Error("Method not implemented."); + } + + stop(): void { + throw new Error("Method not implemented."); + } + + getId(): string { + throw new Error("Method not implemented."); + } + + async addRequest(request: EventV1Request): Promise { + const id = this.getId(); + await this.cache.set(id, request); + await this.executor.execute(() => this.eventDispatcher.dispatch(request)); + } +} diff --git a/lib/utils/executor/backoff_retry_runner.ts b/lib/utils/executor/backoff_retry_runner.ts new file mode 100644 index 000000000..505348fe5 --- /dev/null +++ b/lib/utils/executor/backoff_retry_runner.ts @@ -0,0 +1,43 @@ +import { resolvablePromise, ResolvablePromise } from "../promise/resolvablePromise"; +import { BackoffController } from "../repeater/repeater"; +import { AsyncFn } from "../type"; +import { scheduleMicrotask } from "../microtask"; +import { TaskRunner } from "./task_runner"; + +class BackoffRetryRunner implements TaskRunner { + private maxRetries?: number; + private backoff: BackoffController; + + constructor(backoff: BackoffController, maxRetries?: number) { + this.maxRetries = maxRetries; + this.backoff = backoff; + } + + private exectueWithBackoff(task: AsyncFn, nTry: number, backoff: BackoffController, returnPromise: ResolvablePromise): void { + if (this.maxRetries && nTry > this.maxRetries) { + returnPromise.reject(new Error(`Task failed after ${nTry} retries`)); + return; + } + + task().then(() => { + returnPromise.resolve(); + }).catch((e) => { + const delay = backoff.backoff(); + setTimeout(() => { + this.exectueWithBackoff(task, nTry + 1, backoff, returnPromise); + }, delay); + }); + } + + async run(task: AsyncFn): Promise { + const returnPromise = resolvablePromise(); + scheduleMicrotask(() => { + this.exectueWithBackoff(task, 1, this.backoff, returnPromise); + }); + return returnPromise.promise; + } + + async close(): Promise { + // this.backoff.close(); + } +} diff --git a/lib/utils/executor/concurrency_limited_executor.ts b/lib/utils/executor/concurrency_limited_executor.ts new file mode 100644 index 000000000..f1f79a818 --- /dev/null +++ b/lib/utils/executor/concurrency_limited_executor.ts @@ -0,0 +1,59 @@ +import { BaseService } from "../../service"; +import { scheduleMicrotask } from "../microtask"; +import { resolvablePromise, ResolvablePromise } from "../promise/resolvablePromise"; +import { AsyncFn } from "../type"; +import { Executor } from "./executor"; +import { TaskRunner } from "./task_runner"; + +type RunnerFactory = () => TaskRunner; + +class ConcurrencyLimitedExecutor extends BaseService implements Executor { + private maxConcurrent: number; + private queue: Queue<[AsyncFn, ResolvablePromise]>; + private nRunning = 0; + private runnerFactory: RunnerFactory; + + constructor(maxConcurrent: number, maxQueueLength: number, runnerFactory: RunnerFactory) { + super(); + this.maxConcurrent = maxConcurrent; + this.runnerFactory = runnerFactory; + this.queue = new Queue(maxQueueLength); + } + + start(): void { + throw new Error("Method not implemented."); + } + stop(): void { + throw new Error("Method not implemented."); + } + + private runFromQueue(): void { + if (this.nRunning == this.maxConcurrent) { + return; + } + + const task = this.queue.dequeue(); + if (!task) { + return; + } + + this.nRunning++; + this.runnerFactory().run(task[0]).then(() => { + task[1].resolve(); + }).catch((e) => { + task[1].reject(e); + }).finally(() => { + this.nRunning--; + this.runFromQueue(); + }); + } + + async execute(task: AsyncFn): Promise { + const result = resolvablePromise(); + this.queue.enqueue([task, result]); + scheduleMicrotask(() => { + this.runFromQueue(); + }); + return result.promise; + } +} diff --git a/lib/utils/executor/executor.ts b/lib/utils/executor/executor.ts new file mode 100644 index 000000000..76d0e9c95 --- /dev/null +++ b/lib/utils/executor/executor.ts @@ -0,0 +1,6 @@ +import { Service } from "../../service"; +import { AsyncFn } from "../type"; + +export interface Executor extends Service { + execute(task: AsyncFn): Promise; +} diff --git a/lib/utils/executor/task_runner.ts b/lib/utils/executor/task_runner.ts new file mode 100644 index 000000000..503fda6df --- /dev/null +++ b/lib/utils/executor/task_runner.ts @@ -0,0 +1,6 @@ +import { AsyncFn } from "../type"; + +export interface TaskRunner { + run(task: AsyncFn): Promise; + close(): Promise; +} diff --git a/lib/utils/queue/queue.ts b/lib/utils/queue/queue.ts new file mode 100644 index 000000000..e42c1c847 --- /dev/null +++ b/lib/utils/queue/queue.ts @@ -0,0 +1,31 @@ +class Queue { + private maxQueueSize: number; + private queue: T[]; + private nItems: number; + private tail: number; + + constructor(maxQueueSize: number) { + this.maxQueueSize = maxQueueSize; + this.queue = new Array(maxQueueSize); + this.nItems = 0; + this.tail = 0; + } + + enqueue(item: T): void { + if (this.queue.length === this.maxQueueSize) { + throw new Error("Queue is full"); + } + this.queue[this.tail] = item; + this.nItems++; + this.tail = (this.tail + 1) % this.maxQueueSize; + } + + dequeue(): T | undefined { + if (this.nItems === 0) { + return undefined; + } + const item = this.queue[(this.tail - this.nItems + this.maxQueueSize) % this.maxQueueSize]; + this.nItems--; + return item; + } +} From be3cbd33ea4391d520388965aaae34051254935f Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Mon, 7 Oct 2024 20:39:59 +0600 Subject: [PATCH 03/45] s --- ...atch_manager.ts => dispatch_controller.ts} | 6 +- lib/event_processor/eventProcessor.ts | 11 ++-- .../forwarding_event_processor.ts | 52 +++++++++------ lib/event_processor/managed.ts | 20 ------ .../queueing_event_processor.ts | 63 ++++++++++++++----- lib/optimizely/index.ts | 9 ++- lib/utils/executor/backoff_retry_runner.ts | 60 ++++++++---------- lib/utils/queue/queue.ts | 12 ++++ 8 files changed, 134 insertions(+), 99 deletions(-) rename lib/event_processor/{dispatch_manager.ts => dispatch_controller.ts} (86%) delete mode 100644 lib/event_processor/managed.ts diff --git a/lib/event_processor/dispatch_manager.ts b/lib/event_processor/dispatch_controller.ts similarity index 86% rename from lib/event_processor/dispatch_manager.ts rename to lib/event_processor/dispatch_controller.ts index c1c6501b5..e0f1fdec6 100644 --- a/lib/event_processor/dispatch_manager.ts +++ b/lib/event_processor/dispatch_controller.ts @@ -3,11 +3,11 @@ import { Executor } from "../utils/executor/executor"; import { EventDispatcher, EventV1Request } from "./eventDispatcher"; import { Cache } from "../utils/cache/cache"; -interface DispatchManager extends Service { - addRequest(request: EventV1Request): Promise +export interface DispatchController extends Service { + handleBatch(request: EventV1Request): Promise } -class DispatchManagerImpl extends BaseService implements DispatchManager { +class DispatchManagerImpl extends BaseService implements DispatchController { private eventDispatcher: EventDispatcher; private executor: Executor; private cache: Cache; diff --git a/lib/event_processor/eventProcessor.ts b/lib/event_processor/eventProcessor.ts index b20b20e31..b8e43ce89 100644 --- a/lib/event_processor/eventProcessor.ts +++ b/lib/event_processor/eventProcessor.ts @@ -13,14 +13,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -// TODO change this to use Managed from js-sdk-models when available -import { Managed } from './managed' import { ConversionEvent, ImpressionEvent } from './events' import { EventV1Request } from './eventDispatcher' import { EventQueue, DefaultEventQueue, SingleEventQueue, EventQueueSink } from './eventQueue' import { getLogger } from '../modules/logging' import { NOTIFICATION_TYPES } from '../utils/enums' import { NotificationSender } from '../core/notification_center' +import { Service } from '../service' +import { Consumer, Fn } from '../utils/type'; export const DEFAULT_FLUSH_INTERVAL = 30000 // Unit is ms - default flush interval is 30s export const DEFAULT_BATCH_SIZE = 10 @@ -29,10 +29,9 @@ const logger = getLogger('EventProcessor') export type ProcessableEvent = ConversionEvent | ImpressionEvent -export type EventDispatchResult = { result: boolean; event: ProcessableEvent } - -export interface EventProcessor extends Managed { - process(event: ProcessableEvent): Promise +export interface EventProcessor extends Service { + process(event: ProcessableEvent): Promise; + onDispatch(handler: Consumer): Fn; } export function validateAndGetFlushInterval(flushInterval: number): number { diff --git a/lib/event_processor/forwarding_event_processor.ts b/lib/event_processor/forwarding_event_processor.ts index 919710c53..a22296fa2 100644 --- a/lib/event_processor/forwarding_event_processor.ts +++ b/lib/event_processor/forwarding_event_processor.ts @@ -16,6 +16,7 @@ import { EventProcessor, + EventV1Request, ProcessableEvent, } from '.'; import { NotificationSender } from '../core/notification_center'; @@ -23,36 +24,49 @@ import { NotificationSender } from '../core/notification_center'; import { EventDispatcher } from '../shared_types'; import { NOTIFICATION_TYPES } from '../utils/enums'; import { formatEvents } from '../core/event_builder/build_event_v1'; - -class ForwardingEventProcessor implements EventProcessor { +import { BaseService, ServiceState } from '../service'; +import { EventEmitter } from '../utils/event_emitter/event_emitter'; +import { Consumer, Fn } from '../utils/type'; +class ForwardingEventProcessor extends BaseService implements EventProcessor { + onDispatch(handler: Consumer): Fn { + throw new Error('Method not implemented.'); + } private dispatcher: EventDispatcher; - private NotificationSender?: NotificationSender; + private eventEmitter: EventEmitter<{ dispatch: EventV1Request }>; - constructor(dispatcher: EventDispatcher, notificationSender?: NotificationSender) { + constructor(dispatcher: EventDispatcher) { + super(); this.dispatcher = dispatcher; - this.NotificationSender = notificationSender; + this.eventEmitter = new EventEmitter(); } - process(event: ProcessableEvent): void { + process(event: ProcessableEvent): Promise { const formattedEvent = formatEvents([event]); - this.dispatcher.dispatchEvent(formattedEvent).catch(() => {}); - if (this.NotificationSender) { - this.NotificationSender.sendNotifications( - NOTIFICATION_TYPES.LOG_EVENT, - formattedEvent, - ) - } + const res = this.dispatcher.dispatchEvent(formattedEvent); + this.eventEmitter.emit('dispatch', formattedEvent); + return res; } - start(): Promise { - return Promise.resolve(); + start(): void { + if (!this.isNew()) { + return; + } + this.state = ServiceState.Running; + this.startPromise.resolve(); } - stop(): Promise { - return Promise.resolve(); + stop(): void { + if (this.isDone()) { + return; + } + this.state = ServiceState.Terminated; + if (this.isNew()) { + this.startPromise.reject(new Error('Service stopped before it was started')); + } + this.stopPromise.resolve(); } } -export function getForwardingEventProcessor(dispatcher: EventDispatcher, notificationSender?: NotificationSender): EventProcessor { - return new ForwardingEventProcessor(dispatcher, notificationSender); +export function getForwardingEventProcessor(dispatcher: EventDispatcher): EventProcessor { + return new ForwardingEventProcessor(dispatcher); } diff --git a/lib/event_processor/managed.ts b/lib/event_processor/managed.ts deleted file mode 100644 index dfb94e0f5..000000000 --- a/lib/event_processor/managed.ts +++ /dev/null @@ -1,20 +0,0 @@ -/** - * Copyright 2022, 2024, Optimizely - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export interface Managed { - start(): Promise - - stop(): Promise -} diff --git a/lib/event_processor/queueing_event_processor.ts b/lib/event_processor/queueing_event_processor.ts index 4d5a9b52e..240e45389 100644 --- a/lib/event_processor/queueing_event_processor.ts +++ b/lib/event_processor/queueing_event_processor.ts @@ -2,27 +2,56 @@ import { EventProcessor, ProcessableEvent } from "./eventProcessor"; import { Cache } from "../utils/cache/cache"; import { EventV1Request } from "./eventDispatcher"; import { formatEvents } from "../core/event_builder/build_event_v1"; +import { Repeater } from "../utils/repeater/repeater"; +import { DispatchController } from "./dispatch_controller"; +import { LoggerFacade } from "../modules/logging"; + +type EventWithId = { + id: string; + event: ProcessableEvent; +}; + export class QueueingEventProcessor implements EventProcessor { - private eventQueue: ProcessableEvent[] = []; - private readonly maxQueueSize: number; - private eventCache: Cache; - private pendingEventsCache: Cache - private maxPendingEvents: number; - - private async createNewEventBatch(): Promise { - const request = formatEvents(this.eventQueue); - const dispatchId = this.getDispatchId(); - await this.pendingEventsCache.set(dispatchId, request); + private eventQueue: Queue = new Queue(1000); + private maxQueueSize: number = 1000; + private eventStore?: Cache; + private repeater: Repeater; + private dispatchController: DispatchController; + private logger?: LoggerFacade; + + private createNewBatch(): [EventV1Request, Array] | undefined { + if (this.eventQueue.isEmpty()) { + return + } + + const events: ProcessableEvent[] = []; + let event: EventWithId | undefined; + let ids: string[] = [] + while(event = this.eventQueue.dequeue()) { + events.push(event.event); + ids.push(event.id); + } + + return [formatEvents(events), ids]; } - private getDispatchId(): string { - const time = Date.now(); - return `${time}-${Math.random().toFixed(2)}`; + private async createNewEventBatch(): Promise { + return this.dispatchController.handleBatch(request).then(() => { + events.forEach((event) => { + this.eventStore?.remove(event.id); + }); + }).catch((err) => { + this.logger?.error('Failed to dispatch events', err); + }); } - process(event: ProcessableEvent): Promise { - if (this.eventQueue.length == this.maxQueueSize) { + constructor() { + } + + process(event: ProcessableEvent): Promise { + if (this.eventQueue.size() == this.maxQueueSize) { + } } @@ -33,4 +62,8 @@ export class QueueingEventProcessor implements EventProcessor { stop(): Promise { throw new Error("Method not implemented."); } + + public flushNow(): Promise { + throw new Error("Method not implemented."); + } } diff --git a/lib/optimizely/index.ts b/lib/optimizely/index.ts index c78154311..7d8edbfc0 100644 --- a/lib/optimizely/index.ts +++ b/lib/optimizely/index.ts @@ -171,12 +171,17 @@ export default class Optimizely implements Client { this.eventProcessor = config.eventProcessor; - const eventProcessorStartedPromise = this.eventProcessor ? this.eventProcessor.start() : + this.eventProcessor?.start(); + const eventProcessorRunningPromise = this.eventProcessor ? this.eventProcessor.onRunning() : Promise.resolve(undefined); + this.eventProcessor?.onDispatch((event) => { + this.notificationCenter.sendNotifications(NOTIFICATION_TYPES.LOG_EVENT, event as any); + }); + this.readyPromise = Promise.all([ projectConfigManagerRunningPromise, - eventProcessorStartedPromise, + eventProcessorRunningPromise, config.odpManager ? config.odpManager.onReady() : Promise.resolve(), ]); diff --git a/lib/utils/executor/backoff_retry_runner.ts b/lib/utils/executor/backoff_retry_runner.ts index 505348fe5..cc380fec3 100644 --- a/lib/utils/executor/backoff_retry_runner.ts +++ b/lib/utils/executor/backoff_retry_runner.ts @@ -2,42 +2,34 @@ import { resolvablePromise, ResolvablePromise } from "../promise/resolvablePromi import { BackoffController } from "../repeater/repeater"; import { AsyncFn } from "../type"; import { scheduleMicrotask } from "../microtask"; -import { TaskRunner } from "./task_runner"; -class BackoffRetryRunner implements TaskRunner { - private maxRetries?: number; - private backoff: BackoffController; - - constructor(backoff: BackoffController, maxRetries?: number) { - this.maxRetries = maxRetries; - this.backoff = backoff; - } - - private exectueWithBackoff(task: AsyncFn, nTry: number, backoff: BackoffController, returnPromise: ResolvablePromise): void { - if (this.maxRetries && nTry > this.maxRetries) { - returnPromise.reject(new Error(`Task failed after ${nTry} retries`)); +const runTask = ( + task: AsyncFn, + returnPromise: ResolvablePromise, + backoff?: BackoffController, + retryRemaining?: number, +): void => { + task().then(() => { + returnPromise.resolve(); + }).catch((e) => { + if (retryRemaining === 0) { + returnPromise.reject(e); return; } + const delay = backoff?.backoff() ?? 0; + setTimeout(() => { + retryRemaining = retryRemaining === undefined ? undefined : retryRemaining - 1; + runTask(task, returnPromise, backoff, retryRemaining); + }, delay); + }); +} - task().then(() => { - returnPromise.resolve(); - }).catch((e) => { - const delay = backoff.backoff(); - setTimeout(() => { - this.exectueWithBackoff(task, nTry + 1, backoff, returnPromise); - }, delay); - }); - } - - async run(task: AsyncFn): Promise { - const returnPromise = resolvablePromise(); - scheduleMicrotask(() => { - this.exectueWithBackoff(task, 1, this.backoff, returnPromise); - }); - return returnPromise.promise; - } - - async close(): Promise { - // this.backoff.close(); - } +export const runWithRetry = ( + task: AsyncFn, + backoff?: BackoffController, + maxRetries?: number +) => { + const returnPromise = resolvablePromise(); + scheduleMicrotask(() => runTask(task, returnPromise, backoff, maxRetries)); + return returnPromise.promise; } diff --git a/lib/utils/queue/queue.ts b/lib/utils/queue/queue.ts index e42c1c847..99b43fc46 100644 --- a/lib/utils/queue/queue.ts +++ b/lib/utils/queue/queue.ts @@ -28,4 +28,16 @@ class Queue { this.nItems--; return item; } + + size(): number { + return this.nItems; + } + + isEmpty(): boolean { + return this.nItems === 0; + } + + isFull(): boolean { + return this.nItems === this.maxQueueSize; + } } From e1f41cee459bdff92ab79b80aa6fea76436f57cb Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Tue, 8 Oct 2024 01:01:21 +0600 Subject: [PATCH 04/45] saving --- .../queueing_event_processor.ts | 47 ++++++++++++++++--- 1 file changed, 40 insertions(+), 7 deletions(-) diff --git a/lib/event_processor/queueing_event_processor.ts b/lib/event_processor/queueing_event_processor.ts index 240e45389..6e406dc58 100644 --- a/lib/event_processor/queueing_event_processor.ts +++ b/lib/event_processor/queueing_event_processor.ts @@ -11,6 +11,8 @@ type EventWithId = { event: ProcessableEvent; }; +const idSuffixBase = 10_000; + export class QueueingEventProcessor implements EventProcessor { private eventQueue: Queue = new Queue(1000); private maxQueueSize: number = 1000; @@ -18,6 +20,10 @@ export class QueueingEventProcessor implements EventProcessor { private repeater: Repeater; private dispatchController: DispatchController; private logger?: LoggerFacade; + private idSuffixOffset: number = 0; + + constructor() { + } private createNewBatch(): [EventV1Request, Array] | undefined { if (this.eventQueue.isEmpty()) { @@ -35,24 +41,51 @@ export class QueueingEventProcessor implements EventProcessor { return [formatEvents(events), ids]; } - private async createNewEventBatch(): Promise { + private async dispatchNewBatch(): Promise { + const batch = this.createNewBatch(); + if (!batch) { + return; + } + + const [request, ids] = batch; + return this.dispatchController.handleBatch(request).then(() => { - events.forEach((event) => { - this.eventStore?.remove(event.id); + // if the dispatch controller succeeds, remove the events from the store + ids.forEach((id) => { + this.eventStore?.remove(id); }); }).catch((err) => { + // if the dispatch controller fails, the events will still be + // in the store for future processing this.logger?.error('Failed to dispatch events', err); }); } - constructor() { - + // getId returns an Id that generally increases with each call + // only exceptions are when idSuffix rotates back to 0 within the same millisecond + // or when the clock goes back + getId(): string { + const idSuffix = idSuffixBase + this.idSuffixOffset; + this.idSuffixOffset = (this.idSuffixOffset + 1) % idSuffixBase; + const timestamp = Date.now(); + return `${timestamp}${idSuffix}`; } - process(event: ProcessableEvent): Promise { + async process(event: ProcessableEvent): Promise { if (this.eventQueue.size() == this.maxQueueSize) { - + this.dispatchNewBatch(); } + + const eventWithId = { + id: this.getId(), + event: event, + }; + + await this.eventStore?.set(eventWithId.id, eventWithId); + this.eventQueue.enqueue({ + id: this.getId(), + event: event, + }); } start(): Promise { From 9c82bee13c2c27a7311119c93edacfc03702f073 Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Tue, 8 Oct 2024 01:36:24 +0600 Subject: [PATCH 05/45] save --- lib/event_processor/dispatch_controller.ts | 2 +- .../queueing_event_processor.ts | 52 +++++++++++-------- lib/utils/id_generator/index.ts | 15 ++++++ lib/utils/repeater/repeater.ts | 2 +- 4 files changed, 48 insertions(+), 23 deletions(-) create mode 100644 lib/utils/id_generator/index.ts diff --git a/lib/event_processor/dispatch_controller.ts b/lib/event_processor/dispatch_controller.ts index e0f1fdec6..d6356d0b2 100644 --- a/lib/event_processor/dispatch_controller.ts +++ b/lib/event_processor/dispatch_controller.ts @@ -7,7 +7,7 @@ export interface DispatchController extends Service { handleBatch(request: EventV1Request): Promise } -class DispatchManagerImpl extends BaseService implements DispatchController { +class DispatchControllerImpl extends BaseService implements DispatchController { private eventDispatcher: EventDispatcher; private executor: Executor; private cache: Cache; diff --git a/lib/event_processor/queueing_event_processor.ts b/lib/event_processor/queueing_event_processor.ts index 6e406dc58..9403f3c6a 100644 --- a/lib/event_processor/queueing_event_processor.ts +++ b/lib/event_processor/queueing_event_processor.ts @@ -2,27 +2,50 @@ import { EventProcessor, ProcessableEvent } from "./eventProcessor"; import { Cache } from "../utils/cache/cache"; import { EventV1Request } from "./eventDispatcher"; import { formatEvents } from "../core/event_builder/build_event_v1"; -import { Repeater } from "../utils/repeater/repeater"; +import { IntervalRepeater, Repeater } from "../utils/repeater/repeater"; import { DispatchController } from "./dispatch_controller"; import { LoggerFacade } from "../modules/logging"; +import { BaseService, ServiceState } from "../service"; +import { Consumer, Fn } from "../utils/type"; -type EventWithId = { +export type EventWithId = { id: string; event: ProcessableEvent; }; -const idSuffixBase = 10_000; +export type QueueingEventProcessorConfig = { + flushInterval: number, + maxQueueSize: 1000, + eventStore: Cache, + dispatchController: DispatchController, + logger?: LoggerFacade, +}; + -export class QueueingEventProcessor implements EventProcessor { +export class QueueingEventProcessor extends BaseService implements EventProcessor { private eventQueue: Queue = new Queue(1000); private maxQueueSize: number = 1000; + private flushInterval: number = 1000; private eventStore?: Cache; private repeater: Repeater; private dispatchController: DispatchController; private logger?: LoggerFacade; - private idSuffixOffset: number = 0; + private idGenerator: IdGenerator = new IdGenerator(); + + constructor(config: QueueingEventProcessorConfig) { + super(); + this.flushInterval = config.flushInterval; + this.maxQueueSize = config.maxQueueSize; + this.eventStore = config.eventStore; + this.dispatchController = config.dispatchController; + this.logger = config.logger; + + this.repeater = new IntervalRepeater(this.flushInterval); + this.repeater.setTask(this.dispatchNewBatch.bind(this)); + } - constructor() { + onDispatch(handler: Consumer): Fn { + throw new Error("Method not implemented."); } private createNewBatch(): [EventV1Request, Array] | undefined { @@ -61,31 +84,18 @@ export class QueueingEventProcessor implements EventProcessor { }); } - // getId returns an Id that generally increases with each call - // only exceptions are when idSuffix rotates back to 0 within the same millisecond - // or when the clock goes back - getId(): string { - const idSuffix = idSuffixBase + this.idSuffixOffset; - this.idSuffixOffset = (this.idSuffixOffset + 1) % idSuffixBase; - const timestamp = Date.now(); - return `${timestamp}${idSuffix}`; - } - async process(event: ProcessableEvent): Promise { if (this.eventQueue.size() == this.maxQueueSize) { this.dispatchNewBatch(); } const eventWithId = { - id: this.getId(), + id: this.idGenerator.getId(), event: event, }; await this.eventStore?.set(eventWithId.id, eventWithId); - this.eventQueue.enqueue({ - id: this.getId(), - event: event, - }); + this.eventQueue.enqueue(eventWithId); } start(): Promise { diff --git a/lib/utils/id_generator/index.ts b/lib/utils/id_generator/index.ts new file mode 100644 index 000000000..3a00fc56a --- /dev/null +++ b/lib/utils/id_generator/index.ts @@ -0,0 +1,15 @@ +const idSuffixBase = 10_000; + +class IdGenerator { + private idSuffixOffset: number = 0; + + // getId returns an Id that generally increases with each call. + // only exceptions are when idSuffix rotates back to 0 within the same millisecond + // or when the clock goes back + getId(): string { + const idSuffix = idSuffixBase + this.idSuffixOffset; + this.idSuffixOffset = (this.idSuffixOffset + 1) % idSuffixBase; + const timestamp = Date.now(); + return `${timestamp}${idSuffix}`; + } +} diff --git a/lib/utils/repeater/repeater.ts b/lib/utils/repeater/repeater.ts index f758f0dc9..1425db431 100644 --- a/lib/utils/repeater/repeater.ts +++ b/lib/utils/repeater/repeater.ts @@ -30,7 +30,7 @@ export interface Repeater { start(immediateExecution?: boolean): void; stop(): void; reset(): void; - setTask(task: AsyncTransformer): void; + setTask(task: AsyncTransformer): void; } export interface BackoffController { From 4a28d26051f95e6b6f8761ddfed47c465dd85b22 Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Tue, 8 Oct 2024 23:46:11 +0600 Subject: [PATCH 06/45] saving --- lib/event_processor/dispatch_controller.ts | 45 ++++++++-- .../queueing_event_processor.ts | 1 - lib/service.ts | 4 + lib/utils/executor/backoff_retry_runner.ts | 27 ++++-- .../executor/concurrency_limited_executor.ts | 89 ++++++++++++++----- lib/utils/executor/executor.ts | 22 ++++- lib/utils/type.ts | 4 +- 7 files changed, 153 insertions(+), 39 deletions(-) diff --git a/lib/event_processor/dispatch_controller.ts b/lib/event_processor/dispatch_controller.ts index d6356d0b2..1eab0c1f1 100644 --- a/lib/event_processor/dispatch_controller.ts +++ b/lib/event_processor/dispatch_controller.ts @@ -2,15 +2,40 @@ import { BaseService, Service } from "../service"; import { Executor } from "../utils/executor/executor"; import { EventDispatcher, EventV1Request } from "./eventDispatcher"; import { Cache } from "../utils/cache/cache"; +import { EventEmitter } from '../utils/event_emitter/event_emitter'; +import { Consumer, Fn } from "../utils/type"; export interface DispatchController extends Service { handleBatch(request: EventV1Request): Promise + onDispatch(handler: Consumer): Fn; +} + +export type EventRequestWithId = { + id: string; + event: EventV1Request; +}; + +export type DispatchControllerConfig = { + eventDispatcher: EventDispatcher; + executor: Executor; + requestStore?: Cache; } class DispatchControllerImpl extends BaseService implements DispatchController { private eventDispatcher: EventDispatcher; private executor: Executor; - private cache: Cache; + private eventEmitter: EventEmitter<{ dispatch: EventV1Request }>; + + constructor(config: DispatchControllerConfig) { + super(); + this.eventDispatcher = config.eventDispatcher; + this.executor = config.executor; + this.eventEmitter = new EventEmitter(); + } + + onDispatch(handler: Consumer): Fn { + return this.eventEmitter.on('dispatch', handler); + } start(): void { throw new Error("Method not implemented."); @@ -20,13 +45,17 @@ class DispatchControllerImpl extends BaseService implements DispatchController { throw new Error("Method not implemented."); } - getId(): string { - throw new Error("Method not implemented."); - } + async handleBatch(request: EventV1Request): Promise { + const executorResponse = this.executor.submit(() => { + const dispatchRes = this.eventDispatcher.dispatchEvent(request); + this.eventEmitter.emit('dispatch', request); + return dispatchRes; + }); + + if (executorResponse.accepted) { + return executorResponse.result; + } - async addRequest(request: EventV1Request): Promise { - const id = this.getId(); - await this.cache.set(id, request); - await this.executor.execute(() => this.eventDispatcher.dispatch(request)); + return Promise.reject(executorResponse.error); } } diff --git a/lib/event_processor/queueing_event_processor.ts b/lib/event_processor/queueing_event_processor.ts index 9403f3c6a..9ceaf8d50 100644 --- a/lib/event_processor/queueing_event_processor.ts +++ b/lib/event_processor/queueing_event_processor.ts @@ -21,7 +21,6 @@ export type QueueingEventProcessorConfig = { logger?: LoggerFacade, }; - export class QueueingEventProcessor extends BaseService implements EventProcessor { private eventQueue: Queue = new Queue(1000); private maxQueueSize: number = 1000; diff --git a/lib/service.ts b/lib/service.ts index 48ad8fbff..54f4924ed 100644 --- a/lib/service.ts +++ b/lib/service.ts @@ -77,6 +77,10 @@ export abstract class BaseService implements Service { return this.state === ServiceState.Starting; } + isRunning(): boolean { + return this.state === ServiceState.Running; + } + isNew(): boolean { return this.state === ServiceState.New; } diff --git a/lib/utils/executor/backoff_retry_runner.ts b/lib/utils/executor/backoff_retry_runner.ts index cc380fec3..86814a65a 100644 --- a/lib/utils/executor/backoff_retry_runner.ts +++ b/lib/utils/executor/backoff_retry_runner.ts @@ -1,14 +1,25 @@ import { resolvablePromise, ResolvablePromise } from "../promise/resolvablePromise"; import { BackoffController } from "../repeater/repeater"; -import { AsyncFn } from "../type"; +import { AsyncFn, Fn } from "../type"; import { scheduleMicrotask } from "../microtask"; +export type RunResult = { + result: Promise; + cancel: Fn; +}; + const runTask = ( task: AsyncFn, returnPromise: ResolvablePromise, backoff?: BackoffController, retryRemaining?: number, -): void => { +): Fn => { + let cancelled = false; + + const cancel = () => { + cancelled = true; + }; + task().then(() => { returnPromise.resolve(); }).catch((e) => { @@ -16,20 +27,26 @@ const runTask = ( returnPromise.reject(e); return; } + if (cancelled) { + returnPromise.reject(new Error('Retry cancelled')); + return; + } const delay = backoff?.backoff() ?? 0; setTimeout(() => { retryRemaining = retryRemaining === undefined ? undefined : retryRemaining - 1; runTask(task, returnPromise, backoff, retryRemaining); }, delay); }); + + return cancel; } export const runWithRetry = ( task: AsyncFn, backoff?: BackoffController, maxRetries?: number -) => { +): RunResult => { const returnPromise = resolvablePromise(); - scheduleMicrotask(() => runTask(task, returnPromise, backoff, maxRetries)); - return returnPromise.promise; + const cancel = runTask(task, returnPromise, backoff, maxRetries); + return { cancel, result: returnPromise.promise }; } diff --git a/lib/utils/executor/concurrency_limited_executor.ts b/lib/utils/executor/concurrency_limited_executor.ts index f1f79a818..072f78205 100644 --- a/lib/utils/executor/concurrency_limited_executor.ts +++ b/lib/utils/executor/concurrency_limited_executor.ts @@ -1,59 +1,104 @@ import { BaseService } from "../../service"; import { scheduleMicrotask } from "../microtask"; import { resolvablePromise, ResolvablePromise } from "../promise/resolvablePromise"; -import { AsyncFn } from "../type"; -import { Executor } from "./executor"; +import { BackoffController } from "../repeater/repeater"; +import { AsyncFn, Fn } from "../type"; +import { RunResult, runWithRetry } from "./backoff_retry_runner"; +import { SubmitResponse, Executor, RetryConfig } from "./executor"; import { TaskRunner } from "./task_runner"; -type RunnerFactory = () => TaskRunner; + +type TaskDefiniton = { + task: AsyncFn, + response: ResolvablePromise, + retryConfig?: RetryConfig, +} + +type RunningTask = { + result: Promise, + cancel?: Fn, +} class ConcurrencyLimitedExecutor extends BaseService implements Executor { private maxConcurrent: number; - private queue: Queue<[AsyncFn, ResolvablePromise]>; + private queue: Queue; private nRunning = 0; - private runnerFactory: RunnerFactory; + private runningTask: Map = new Map(); + private idGenerator: IdGenerator = new IdGenerator(); - constructor(maxConcurrent: number, maxQueueLength: number, runnerFactory: RunnerFactory) { + constructor(maxConcurrent: number, maxQueueLength: number) { super(); this.maxConcurrent = maxConcurrent; - this.runnerFactory = runnerFactory; this.queue = new Queue(maxQueueLength); } + forceExecuteAll(): Promise { + + } + start(): void { throw new Error("Method not implemented."); } + stop(): void { throw new Error("Method not implemented."); } - private runFromQueue(): void { - if (this.nRunning == this.maxConcurrent) { + + private handleTaskCompletion(id: string): void { + this.runningTask.delete(id); + this.nRunning--; + this.runFromQueue(); + } + + private runFromQueue(ignoreMaxConcurrency = false): void { + if (!this.isRunning()) { return; } - const task = this.queue.dequeue(); - if (!task) { + if (!ignoreMaxConcurrency && this.nRunning >= this.maxConcurrent) { return; } + const taskDefinition = this.queue.dequeue(); + if (!taskDefinition) { + return; + } + + const id = this.idGenerator.getId(); + + const { cancel, result } = taskDefinition.retryConfig ? + runWithRetry(taskDefinition.task, taskDefinition.retryConfig.backoff, taskDefinition.retryConfig.maxRetries) : + { result: taskDefinition.task() }; + + this.runningTask.set(id, { result, cancel }); this.nRunning++; - this.runnerFactory().run(task[0]).then(() => { - task[1].resolve(); - }).catch((e) => { - task[1].reject(e); - }).finally(() => { - this.nRunning--; - this.runFromQueue(); + result.finally(() => { + this.handleTaskCompletion(id); }); } - async execute(task: AsyncFn): Promise { - const result = resolvablePromise(); - this.queue.enqueue([task, result]); + submit(task: AsyncFn, retryConfig?: RetryConfig): SubmitResponse { + if (!this.isRunning()) { + return { accepted: false, error: new Error('Executor is not running') }; + } + + if (this.queue.isFull()) { + return { accepted: false, error: new Error('Queue is full') }; + } + + const taskDefinition: TaskDefiniton = { + task, + response: resolvablePromise(), + retryConfig, + }; + + this.queue.enqueue(taskDefinition); + scheduleMicrotask(() => { this.runFromQueue(); }); - return result.promise; + + return { accepted: true, result: taskDefinition.response.promise }; } } diff --git a/lib/utils/executor/executor.ts b/lib/utils/executor/executor.ts index 76d0e9c95..ff34d00c6 100644 --- a/lib/utils/executor/executor.ts +++ b/lib/utils/executor/executor.ts @@ -1,6 +1,26 @@ import { Service } from "../../service"; +import { BackoffController } from "../repeater/repeater"; import { AsyncFn } from "../type"; +export type TaskAcceptedResponse = { + accepted: true, + result: Promise, +}; + +export type TaskRejectedResponse = { + accepted: false, + error: Error, +}; + +export type SubmitResponse = TaskAcceptedResponse | TaskRejectedResponse; + +export type RetryConfig = { + backoff?: BackoffController, + maxRetries?: number, +} + export interface Executor extends Service { - execute(task: AsyncFn): Promise; + submit(task: AsyncFn, retryConfig?: RetryConfig): SubmitResponse; + forceExecuteAll(): Promise; } + diff --git a/lib/utils/type.ts b/lib/utils/type.ts index 7b2df5bed..ee8a440b9 100644 --- a/lib/utils/type.ts +++ b/lib/utils/type.ts @@ -14,8 +14,8 @@ * limitations under the License. */ -export type Fn = () => void; -export type AsyncFn = () => Promise; +export type Fn = () => unknown; +export type AsyncFn = () => Promise; export type AsyncTransformer = (arg: A) => Promise; export type Transformer = (arg: A) => B; From 5fcafc129493c9e6e4b9598e29c02c08b089bc9c Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Wed, 9 Oct 2024 05:23:08 +0600 Subject: [PATCH 07/45] saving --- lib/event_processor/dispatch_controller.ts | 38 ++-- .../queueing_event_processor.react_native.ts | 40 +++++ .../queueing_event_processor.ts | 170 ++++++++++++++---- lib/utils/executor/backoff_retry_runner.ts | 28 +-- .../executor/concurrency_limited_executor.ts | 4 +- lib/utils/http_request_handler/http_util.ts | 4 + 6 files changed, 218 insertions(+), 66 deletions(-) create mode 100644 lib/event_processor/queueing_event_processor.react_native.ts create mode 100644 lib/utils/http_request_handler/http_util.ts diff --git a/lib/event_processor/dispatch_controller.ts b/lib/event_processor/dispatch_controller.ts index 1eab0c1f1..6e54a3642 100644 --- a/lib/event_processor/dispatch_controller.ts +++ b/lib/event_processor/dispatch_controller.ts @@ -1,35 +1,29 @@ import { BaseService, Service } from "../service"; import { Executor } from "../utils/executor/executor"; import { EventDispatcher, EventV1Request } from "./eventDispatcher"; -import { Cache } from "../utils/cache/cache"; import { EventEmitter } from '../utils/event_emitter/event_emitter'; import { Consumer, Fn } from "../utils/type"; +import { RunResult, runWithRetry } from "../utils/executor/backoff_retry_runner"; +import { ExponentialBackoff } from "../utils/repeater/repeater"; export interface DispatchController extends Service { handleBatch(request: EventV1Request): Promise onDispatch(handler: Consumer): Fn; } -export type EventRequestWithId = { - id: string; - event: EventV1Request; -}; - export type DispatchControllerConfig = { eventDispatcher: EventDispatcher; - executor: Executor; - requestStore?: Cache; } -class DispatchControllerImpl extends BaseService implements DispatchController { +class ImmediateDispatchDispatchController extends BaseService implements DispatchController { private eventDispatcher: EventDispatcher; - private executor: Executor; private eventEmitter: EventEmitter<{ dispatch: EventV1Request }>; + private runningTask: Map = new Map(); + private idGenerator: IdGenerator = new IdGenerator(); constructor(config: DispatchControllerConfig) { super(); this.eventDispatcher = config.eventDispatcher; - this.executor = config.executor; this.eventEmitter = new EventEmitter(); } @@ -46,16 +40,20 @@ class DispatchControllerImpl extends BaseService implements DispatchController { } async handleBatch(request: EventV1Request): Promise { - const executorResponse = this.executor.submit(() => { - const dispatchRes = this.eventDispatcher.dispatchEvent(request); - this.eventEmitter.emit('dispatch', request); - return dispatchRes; - }); - - if (executorResponse.accepted) { - return executorResponse.result; + if (!this.isRunning()) { + return; } - return Promise.reject(executorResponse.error); + const id = this.idGenerator.getId(); + + const backoff = new ExponentialBackoff(1000, 30000, 2); + const runResult = runWithRetry(() => this.eventDispatcher.dispatchEvent(request), backoff); + + this.runningTask.set(id, runResult); + runResult.result.finally(() => { + this.runningTask.delete(id); + }); + + return runResult.result; } } diff --git a/lib/event_processor/queueing_event_processor.react_native.ts b/lib/event_processor/queueing_event_processor.react_native.ts new file mode 100644 index 000000000..631631acb --- /dev/null +++ b/lib/event_processor/queueing_event_processor.react_native.ts @@ -0,0 +1,40 @@ +import { + NetInfoState, + addEventListener as addConnectionListener, +} from "@react-native-community/netinfo" + +import { QueueingEventProcessor, QueueingEventProcessorConfig } from "./queueing_event_processor"; +import { Fn } from "../utils/type"; + +class ReactNativeNetInfoEventProcessor extends QueueingEventProcessor { + private isInternetReachable = true; + private unsubscribeNetInfo?: Fn; + + constructor(config: QueueingEventProcessorConfig) { + super(config); + } + + private async connectionListener(state: NetInfoState) { + if (this.isInternetReachable && !state.isInternetReachable) { + this.isInternetReachable = false; + return; + } + + if (!this.isInternetReachable && state.isInternetReachable) { + this.isInternetReachable = true; + await this.retryFailedEvents() + } + } + + start(): void { + this.unsubscribeNetInfo = addConnectionListener(this.connectionListener.bind(this)) + super.start() + } + + stop(): void { + if (this.unsubscribeNetInfo) { + this.unsubscribeNetInfo() + } + super.stop() + } +} diff --git a/lib/event_processor/queueing_event_processor.ts b/lib/event_processor/queueing_event_processor.ts index 9ceaf8d50..c5ef2c0c4 100644 --- a/lib/event_processor/queueing_event_processor.ts +++ b/lib/event_processor/queueing_event_processor.ts @@ -1,12 +1,14 @@ import { EventProcessor, ProcessableEvent } from "./eventProcessor"; import { Cache } from "../utils/cache/cache"; -import { EventV1Request } from "./eventDispatcher"; +import { EventDispatcher, EventDispatcherResponse, EventV1Request } from "./eventDispatcher"; import { formatEvents } from "../core/event_builder/build_event_v1"; -import { IntervalRepeater, Repeater } from "../utils/repeater/repeater"; -import { DispatchController } from "./dispatch_controller"; +import { ExponentialBackoff, IntervalRepeater, Repeater } from "../utils/repeater/repeater"; import { LoggerFacade } from "../modules/logging"; import { BaseService, ServiceState } from "../service"; import { Consumer, Fn } from "../utils/type"; +import { RunResult, runWithRetry } from "../utils/executor/backoff_retry_runner"; +import { isSuccessStatusCode } from "../utils/http_request_handler/http_util"; +import { EventEmitter } from "../utils/event_emitter/event_emitter"; export type EventWithId = { id: string; @@ -17,37 +19,104 @@ export type QueueingEventProcessorConfig = { flushInterval: number, maxQueueSize: 1000, eventStore: Cache, - dispatchController: DispatchController, + eventDispatcher: EventDispatcher, + closingEventDispatcher?: EventDispatcher, logger?: LoggerFacade, + retryMinBackoff?: number, + retryMaxBackoff?: number, + maxRetries?: number, }; +type EventBatch = { + request: EventV1Request, + ids: string[], +} + +const DEFAULT_RETRY_MIN_BACKOFF = 1000; +const DEFAULT_RETRY_MAX_BACKOFF = 30000; + export class QueueingEventProcessor extends BaseService implements EventProcessor { + private eventDispatcher: EventDispatcher; + private closingEventDispatcher?: EventDispatcher; private eventQueue: Queue = new Queue(1000); private maxQueueSize: number = 1000; private flushInterval: number = 1000; private eventStore?: Cache; - private repeater: Repeater; - private dispatchController: DispatchController; - private logger?: LoggerFacade; + private dispatchRepeater: Repeater; + private failedEventRepeater: Repeater; private idGenerator: IdGenerator = new IdGenerator(); + private runningTask: Map> = new Map(); + private dispatchingIds: Set = new Set(); + private retryMinBackoff: number; + private retryMaxBackoff: number; + private maxRetries?: number; + private logger?: LoggerFacade; + private eventEmitter: EventEmitter<{ dispatch: EventV1Request }> = new EventEmitter(); constructor(config: QueueingEventProcessorConfig) { super(); + this.eventDispatcher = config.eventDispatcher; + this.closingEventDispatcher = config.closingEventDispatcher; this.flushInterval = config.flushInterval; this.maxQueueSize = config.maxQueueSize; this.eventStore = config.eventStore; - this.dispatchController = config.dispatchController; this.logger = config.logger; + this.retryMinBackoff = config.retryMinBackoff || DEFAULT_RETRY_MIN_BACKOFF; + this.retryMaxBackoff = config.retryMaxBackoff || DEFAULT_RETRY_MAX_BACKOFF; + this.maxRetries = config.maxRetries; + + this.dispatchRepeater = new IntervalRepeater(this.flushInterval); + this.dispatchRepeater.setTask(() => this.flush()); - this.repeater = new IntervalRepeater(this.flushInterval); - this.repeater.setTask(this.dispatchNewBatch.bind(this)); + this.failedEventRepeater = new IntervalRepeater(this.flushInterval * 4); + this.failedEventRepeater.setTask(() => this.retryFailedEvents()); } onDispatch(handler: Consumer): Fn { - throw new Error("Method not implemented."); + return this.eventEmitter.on('dispatch', handler); } - private createNewBatch(): [EventV1Request, Array] | undefined { + public async retryFailedEvents(): Promise { + const failedEvents = await this.eventStore?.getAll(); + if (!failedEvents) { + return; + } + + if (failedEvents.size == 0) { + return; + } + + const failedEventsArray = Array.from(failedEvents.values()).sort(); + + let batches: EventBatch[] = []; + let currentBatch: EventWithId[] = []; + + failedEventsArray.forEach((event) => { + if (!this.dispatchingIds.has(event.id)) { + currentBatch.push(event); + if (currentBatch.length === this.maxQueueSize) { + batches.push({ + request: formatEvents(currentBatch.map((e) => e.event)), + ids: currentBatch.map((e) => e.id), + }); + currentBatch = []; + } + } + }); + + if (currentBatch.length > 0) { + batches.push({ + request: formatEvents(currentBatch.map((e) => e.event)), + ids: currentBatch.map((e) => e.id), + }); + } + + batches.forEach((batch) => { + this.dispatchBatch(batch, false); + }); + } + + private createNewBatch(): EventBatch | undefined { if (this.eventQueue.isEmpty()) { return } @@ -60,32 +129,50 @@ export class QueueingEventProcessor extends BaseService implements EventProcesso ids.push(event.id); } - return [formatEvents(events), ids]; + return { request: formatEvents(events), ids }; } - private async dispatchNewBatch(): Promise { - const batch = this.createNewBatch(); - if (!batch) { - return; - } + private dispatchBatch(batch: EventBatch, closing: boolean): void { + const { request, ids } = batch; + ids.forEach((id) => this.dispatchingIds.add(id)); - const [request, ids] = batch; + const dispatcher = closing && this.closingEventDispatcher ? this.closingEventDispatcher : this.eventDispatcher; + const backoff = new ExponentialBackoff(this.retryMinBackoff, this.retryMaxBackoff, 500); + const runResult = runWithRetry(() => dispatcher.dispatchEvent(request), backoff, this.maxRetries); - return this.dispatchController.handleBatch(request).then(() => { - // if the dispatch controller succeeds, remove the events from the store + const taskId = this.idGenerator.getId(); + this.runningTask.set(taskId, runResult); + + runResult.result.then((res) => { + if (res.statusCode && !isSuccessStatusCode(res.statusCode)) { + return Promise.reject(new Error(`Failed to dispatch events: ${res.statusCode}`)); + } ids.forEach((id) => { this.eventStore?.remove(id); }); + return Promise.resolve(); }).catch((err) => { - // if the dispatch controller fails, the events will still be + // if the dispatch fails, the events will still be // in the store for future processing this.logger?.error('Failed to dispatch events', err); + }).finally(() => { + this.runningTask.delete(taskId); + ids.forEach((id) => this.dispatchingIds.delete(id)); }); } + private async flush(closing = false): Promise { + const batch = this.createNewBatch(); + if (!batch) { + return; + } + + this.dispatchBatch(batch, closing); + } + async process(event: ProcessableEvent): Promise { if (this.eventQueue.size() == this.maxQueueSize) { - this.dispatchNewBatch(); + this.flush(); } const eventWithId = { @@ -97,15 +184,38 @@ export class QueueingEventProcessor extends BaseService implements EventProcesso this.eventQueue.enqueue(eventWithId); } - start(): Promise { - throw new Error("Method not implemented."); - } + start(): void { + if (!this.isNew()) { + return; + } + this.state = ServiceState.Running; + this.dispatchRepeater.start(); + this.failedEventRepeater.start(); - stop(): Promise { - throw new Error("Method not implemented."); + this.retryFailedEvents(); + this.startPromise.resolve(); } - public flushNow(): Promise { - throw new Error("Method not implemented."); + stop(): void { + if (this.isDone()) { + return; + } + + if (this.isNew()) { + // TOOD: replace message with imported constants + this.startPromise.reject(new Error('Event processor stopped before it could be started')); + } + + this.state = ServiceState.Stopping; + this.dispatchRepeater.stop(); + this.failedEventRepeater.stop(); + + this.flush(true); + this.runningTask.forEach((task) => task.cancelRetry()); + + Promise.allSettled(Array.from(this.runningTask.values()).map((task) => task.result)).then(() => { + this.state = ServiceState.Terminated; + this.stopPromise.resolve(); + }) } } diff --git a/lib/utils/executor/backoff_retry_runner.ts b/lib/utils/executor/backoff_retry_runner.ts index 86814a65a..c6ef1e04a 100644 --- a/lib/utils/executor/backoff_retry_runner.ts +++ b/lib/utils/executor/backoff_retry_runner.ts @@ -1,16 +1,16 @@ import { resolvablePromise, ResolvablePromise } from "../promise/resolvablePromise"; import { BackoffController } from "../repeater/repeater"; -import { AsyncFn, Fn } from "../type"; +import { AsyncFn, AsyncProducer, Fn } from "../type"; import { scheduleMicrotask } from "../microtask"; -export type RunResult = { - result: Promise; - cancel: Fn; +export type RunResult = { + result: Promise; + cancelRetry: Fn; }; -const runTask = ( - task: AsyncFn, - returnPromise: ResolvablePromise, +const runTask = ( + task: AsyncProducer, + returnPromise: ResolvablePromise, backoff?: BackoffController, retryRemaining?: number, ): Fn => { @@ -20,8 +20,8 @@ const runTask = ( cancelled = true; }; - task().then(() => { - returnPromise.resolve(); + task().then((res) => { + returnPromise.resolve(res); }).catch((e) => { if (retryRemaining === 0) { returnPromise.reject(e); @@ -41,12 +41,12 @@ const runTask = ( return cancel; } -export const runWithRetry = ( - task: AsyncFn, +export const runWithRetry = ( + task: AsyncProducer, backoff?: BackoffController, maxRetries?: number -): RunResult => { - const returnPromise = resolvablePromise(); +): RunResult => { + const returnPromise = resolvablePromise(); const cancel = runTask(task, returnPromise, backoff, maxRetries); - return { cancel, result: returnPromise.promise }; + return { cancelRetry: cancel, result: returnPromise.promise }; } diff --git a/lib/utils/executor/concurrency_limited_executor.ts b/lib/utils/executor/concurrency_limited_executor.ts index 072f78205..ae480b831 100644 --- a/lib/utils/executor/concurrency_limited_executor.ts +++ b/lib/utils/executor/concurrency_limited_executor.ts @@ -33,7 +33,7 @@ class ConcurrencyLimitedExecutor extends BaseService implements Executor { } forceExecuteAll(): Promise { - + } start(): void { @@ -67,7 +67,7 @@ class ConcurrencyLimitedExecutor extends BaseService implements Executor { const id = this.idGenerator.getId(); - const { cancel, result } = taskDefinition.retryConfig ? + const { cancelRetry: cancel, result } = taskDefinition.retryConfig ? runWithRetry(taskDefinition.task, taskDefinition.retryConfig.backoff, taskDefinition.retryConfig.maxRetries) : { result: taskDefinition.task() }; diff --git a/lib/utils/http_request_handler/http_util.ts b/lib/utils/http_request_handler/http_util.ts new file mode 100644 index 000000000..c38217a40 --- /dev/null +++ b/lib/utils/http_request_handler/http_util.ts @@ -0,0 +1,4 @@ + +export const isSuccessStatusCode = (statusCode: number): boolean => { + return statusCode >= 200 && statusCode < 400; +} From 6769148d8a92a5c556354164e8ac6a97de198079 Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Thu, 10 Oct 2024 04:39:00 +0600 Subject: [PATCH 08/45] update --- lib/event_processor/dispatch_controller.ts | 4 +- lib/event_processor/dispatch_strategy.ts | 55 --- lib/event_processor/index.ts | 1 - lib/event_processor/managed.ts | 4 + .../queueing_event_processor.spec.ts | 312 ++++++++++++++++++ .../queueing_event_processor.ts | 14 +- .../v1/v1EventProcessor.react_native.ts | 16 +- lib/event_processor/v1/v1EventProcessor.ts | 20 +- lib/optimizely/index.ts | 4 +- lib/tests/mock/create_event.ts | 41 +++ lib/tests/mock/mock_cache.ts | 30 ++ lib/utils/cache/cache.ts | 4 +- lib/utils/id_generator/index.ts | 2 +- lib/utils/queue/queue.ts | 4 +- lib/utils/repeater/repeater.spec.ts | 1 - vitest.config.mts | 2 +- 16 files changed, 440 insertions(+), 74 deletions(-) delete mode 100644 lib/event_processor/dispatch_strategy.ts create mode 100644 lib/event_processor/managed.ts create mode 100644 lib/event_processor/queueing_event_processor.spec.ts create mode 100644 lib/tests/mock/create_event.ts create mode 100644 lib/tests/mock/mock_cache.ts diff --git a/lib/event_processor/dispatch_controller.ts b/lib/event_processor/dispatch_controller.ts index 6e54a3642..8f34e83c5 100644 --- a/lib/event_processor/dispatch_controller.ts +++ b/lib/event_processor/dispatch_controller.ts @@ -1,6 +1,6 @@ import { BaseService, Service } from "../service"; import { Executor } from "../utils/executor/executor"; -import { EventDispatcher, EventV1Request } from "./eventDispatcher"; +import { EventDispatcher, EventDispatcherResponse, EventV1Request } from "./eventDispatcher"; import { EventEmitter } from '../utils/event_emitter/event_emitter'; import { Consumer, Fn } from "../utils/type"; import { RunResult, runWithRetry } from "../utils/executor/backoff_retry_runner"; @@ -18,7 +18,7 @@ export type DispatchControllerConfig = { class ImmediateDispatchDispatchController extends BaseService implements DispatchController { private eventDispatcher: EventDispatcher; private eventEmitter: EventEmitter<{ dispatch: EventV1Request }>; - private runningTask: Map = new Map(); + private runningTask: Map> = new Map(); private idGenerator: IdGenerator = new IdGenerator(); constructor(config: DispatchControllerConfig) { diff --git a/lib/event_processor/dispatch_strategy.ts b/lib/event_processor/dispatch_strategy.ts deleted file mode 100644 index 12e3bce3e..000000000 --- a/lib/event_processor/dispatch_strategy.ts +++ /dev/null @@ -1,55 +0,0 @@ -import { ExponentialBackoff } from '../utils/repeater/repeater'; -import { AsyncProducer } from '../utils/type'; - -export interface DispatchStrategy { - close(): void; - registerDispatcher(dispatcher: AsyncProducer): void; - notifyBatch(): void; -} - -enum State { - Idle, - BackingOff, - Dispatching, -} - -export class BackoffDispatchStrategy implements DispatchStrategy { - private dispatcher: AsyncProducer; - private backoff: ExponentialBackoff; - private state: State = State.Idle; - - constructor(backoff: ExponentialBackoff) { - this.backoff = backoff; - } - - public stop(): void { - } - - private async executeDispatcher(): Promise { - this.state = State.Dispatching; - this.dispatcher().then((hasMoreBatches) => { - this.state = State.Idle; - this.backoff.reset(); - if (hasMoreBatches) { - this.executeDispatcher(); - } - }).catch((err) => { - this.state = State.BackingOff; - setTimeout(() => { - this.executeDispatcher(); - }, this.backoff.backoff()); - }); - } - - public registerDispatcher(dispatcher: AsyncProducer): void { - this.dispatcher = dispatcher; - } - - public notifyBatch(): void { - if (this.state !== State.Idle) { - return; - } - - this.executeDispatcher(); - } -} diff --git a/lib/event_processor/index.ts b/lib/event_processor/index.ts index c91ca2d21..0a266c4ae 100644 --- a/lib/event_processor/index.ts +++ b/lib/event_processor/index.ts @@ -17,7 +17,6 @@ export * from './events' export * from './eventProcessor' export * from './eventDispatcher' -export * from './managed' export * from './pendingEventsDispatcher' export * from './v1/buildEventV1' export * from './v1/v1EventProcessor' diff --git a/lib/event_processor/managed.ts b/lib/event_processor/managed.ts new file mode 100644 index 000000000..03f30d179 --- /dev/null +++ b/lib/event_processor/managed.ts @@ -0,0 +1,4 @@ +export interface Managed { + start(): Promise; + stop(): Promise; +} diff --git a/lib/event_processor/queueing_event_processor.spec.ts b/lib/event_processor/queueing_event_processor.spec.ts new file mode 100644 index 000000000..eaee6929b --- /dev/null +++ b/lib/event_processor/queueing_event_processor.spec.ts @@ -0,0 +1,312 @@ +/** + * Copyright 2024, Optimizely + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { expect, describe, it, vi, beforeEach, afterEach, MockInstance } from 'vitest'; + +import { EventWithId, QueueingEventProcessor } from './queueing_event_processor'; +import { getMockSyncCache } from '../tests/mock/mock_cache'; +import { createImpressionEvent } from '../tests/mock/create_event'; +import { ProcessableEvent } from './eventProcessor'; +import { EventDispatcher } from './eventDispatcher'; +import { formatEvents } from './v1/buildEventV1'; +import { resolvablePromise } from '../utils/promise/resolvablePromise'; +import { advanceTimersByTime } from '../../tests/testUtils'; +import { getMockLogger } from '../tests/mock/mock_logger'; +import exp from 'constants'; + +const getMockDispatcher = () => { + return { + dispatchEvent: vi.fn(), + }; +}; + +describe('QueueingEventProcessor', async () => { + beforeEach(() => { + vi.useFakeTimers(); + }); + + afterEach(() => { + vi.useRealTimers(); + }); + + it('should resolve onRunning() when start() is called', async () => { + const eventDispatcher = getMockDispatcher(); + const processor = new QueueingEventProcessor({ + eventDispatcher, + flushInterval: 2000, + maxQueueSize: 1000, + }); + + processor.start(); + await expect(processor.onRunning()).resolves.not.toThrow(); + }); + + it('should dispatch failed events in correct batch size and order when start is called', async () => { + const eventDispatcher = getMockDispatcher(); + const mockDispatch: MockInstance = eventDispatcher.dispatchEvent; + mockDispatch.mockResolvedValue({}); + + const cache = getMockSyncCache(); + let events: ProcessableEvent[] = []; + + for(let i = 0; i < 5; i++) { + const id = `id-${i}`; + const event = createImpressionEvent(id); + events.push(event); + cache.set(id, { id, event }); + } + + const processor = new QueueingEventProcessor({ + eventDispatcher, + flushInterval: 2000, + maxQueueSize: 2, + eventStore: cache, + }); + + processor.start(); + await processor.onRunning(); + + expect(mockDispatch).toHaveBeenCalledTimes(3); + expect(mockDispatch.mock.calls[0][0]).toEqual(formatEvents([events[0], events[1]])); + expect(mockDispatch.mock.calls[1][0]).toEqual(formatEvents([events[2], events[3]])); + expect(mockDispatch.mock.calls[2][0]).toEqual(formatEvents([events[4]])); + }); + + it('should dispatch failed events in correct batch size and order when retryFailedEvents is called', async () => { + const eventDispatcher = getMockDispatcher(); + const mockDispatch: MockInstance = eventDispatcher.dispatchEvent; + mockDispatch.mockResolvedValue({}); + + const cache = getMockSyncCache(); + + const processor = new QueueingEventProcessor({ + eventDispatcher, + flushInterval: 2000, + maxQueueSize: 2, + eventStore: cache, + }); + + processor.start(); + await processor.onRunning(); + + expect(mockDispatch).toHaveBeenCalledTimes(0); + + let events: ProcessableEvent[] = []; + + for(let i = 0; i < 5; i++) { + const id = `id-${i}`; + const event = createImpressionEvent(id); + events.push(event); + cache.set(id, { id, event }); + } + + await processor.retryFailedEvents(); + + expect(mockDispatch).toHaveBeenCalledTimes(3); + expect(mockDispatch.mock.calls[0][0]).toEqual(formatEvents([events[0], events[1]])); + expect(mockDispatch.mock.calls[1][0]).toEqual(formatEvents([events[2], events[3]])); + expect(mockDispatch.mock.calls[2][0]).toEqual(formatEvents([events[4]])); + }); + + describe('process', () => { + it('should enqueue event without dispatching immediately', async () => { + const eventDispatcher = getMockDispatcher(); + const processor = new QueueingEventProcessor({ + eventDispatcher, + flushInterval: 2000, + maxQueueSize: 100, + }); + + processor.start(); + await processor.onRunning(); + for(let i = 0; i < 100; i++) { + const event = createImpressionEvent(`id-${i}`); + await processor.process(event); + } + + expect(eventDispatcher.dispatchEvent).toHaveBeenCalledTimes(0); + }); + + it('should dispatch events if queue is full and clear queue', async () => { + const eventDispatcher = getMockDispatcher(); + const mockDispatch: MockInstance = eventDispatcher.dispatchEvent; + mockDispatch.mockResolvedValue({}); + + const processor = new QueueingEventProcessor({ + eventDispatcher, + flushInterval: 2000, + maxQueueSize: 100, + }); + + processor.start(); + await processor.onRunning(); + + let events: ProcessableEvent[] = []; + for(let i = 0; i < 100; i++) { + const event = createImpressionEvent(`id-${i}`); + events.push(event); + await processor.process(event); + } + + expect(eventDispatcher.dispatchEvent).toHaveBeenCalledTimes(0); + + // we are using fake timers, so no dispatch will occur due to timeout + let event = createImpressionEvent('id-100'); + await processor.process(event); + + expect(eventDispatcher.dispatchEvent).toHaveBeenCalledTimes(1); + expect(eventDispatcher.dispatchEvent.mock.calls[0][0]).toEqual(formatEvents(events)); + + events = [event]; + for(let i = 101; i < 200; i++) { + const event = createImpressionEvent(`id-${i}`); + events.push(event); + await processor.process(event); + } + + expect(eventDispatcher.dispatchEvent).toHaveBeenCalledTimes(1); + + event = createImpressionEvent('id-200'); + await processor.process(event); + + expect(eventDispatcher.dispatchEvent).toHaveBeenCalledTimes(2); + expect(eventDispatcher.dispatchEvent.mock.calls[1][0]).toEqual(formatEvents(events)); + }); + + it('should store the event in the eventStore with increasing ids', async () => { + const eventDispatcher = getMockDispatcher(); + const eventStore = getMockSyncCache(); + + const processor = new QueueingEventProcessor({ + eventDispatcher, + flushInterval: 2000, + maxQueueSize: 100, + eventStore, + }); + + processor.start(); + await processor.onRunning(); + + const events: ProcessableEvent[] = []; + for(let i = 0; i < 10; i++) { + const event = createImpressionEvent(`id-${i}`); + events.push(event); + await processor.process(event) + } + + expect(eventStore.size()).toEqual(10); + + const eventsInStore = Array.from(eventStore.getAll().values()) + .sort((a, b) => a < b ? -1 : 1).map(e => e.event); + + expect(events).toEqual(eventsInStore); + }); + }); + + // TODO: test retry of dispatch: specified number of times and infinite retry + + it('should remove the events from the eventStore after dispatch is successfull', async () => { + const eventDispatcher = getMockDispatcher(); + const mockDispatch: MockInstance = eventDispatcher.dispatchEvent; + const dispatchResponse = resolvablePromise(); + + mockDispatch.mockResolvedValue(dispatchResponse.promise); + + const eventStore = getMockSyncCache(); + + const processor = new QueueingEventProcessor({ + eventDispatcher, + flushInterval: 2000, + maxQueueSize: 10, + eventStore, + }); + + processor.start(); + await processor.onRunning(); + + const events: ProcessableEvent[] = []; + for(let i = 0; i < 10; i++) { + const event = createImpressionEvent(`id-${i}`); + events.push(event); + await processor.process(event) + } + + expect(eventStore.size()).toEqual(10); + + const event = createImpressionEvent('id-10'); + await processor.process(event); + + expect(mockDispatch).toHaveBeenCalledTimes(1); + // the dispatch is not resolved yet, so all the events should still be in the store + expect(eventStore.size()).toEqual(11); + + dispatchResponse.resolve({ statusCode: 200 }); + + // to ensure that microtask queue is cleared several times + for(let i = 0; i < 100; i++) { + await Promise.resolve(); + } + + expect(eventStore.size()).toEqual(1); + }); + + it('should log error and keep events in store if dispatch return 5xx response', async () => { + const eventDispatcher = getMockDispatcher(); + const mockDispatch: MockInstance = eventDispatcher.dispatchEvent; + const dispatchResponse = resolvablePromise(); + const logger = getMockLogger(); + + mockDispatch.mockResolvedValue(dispatchResponse.promise); + + const eventStore = getMockSyncCache(); + + const processor = new QueueingEventProcessor({ + eventDispatcher, + flushInterval: 2000, + maxQueueSize: 10, + eventStore, + logger, + }); + + processor.start(); + await processor.onRunning(); + + const events: ProcessableEvent[] = []; + for(let i = 0; i < 10; i++) { + const event = createImpressionEvent(`id-${i}`); + events.push(event); + await processor.process(event) + } + + expect(eventStore.size()).toEqual(10); + + const event = createImpressionEvent('id-10'); + await processor.process(event); + + expect(mockDispatch).toHaveBeenCalledTimes(1); + // the dispatch is not resolved yet, so all the events should still be in the store + expect(eventStore.size()).toEqual(11); + + dispatchResponse.resolve({ statusCode: 500 }); + + // to ensure that microtask queue is cleared several times + for(let i = 0; i < 100; i++) { + await Promise.resolve(); + } + + expect(eventStore.size()).toEqual(11); + expect(logger.error).toHaveBeenCalledTimes(1); + }); +}); diff --git a/lib/event_processor/queueing_event_processor.ts b/lib/event_processor/queueing_event_processor.ts index c5ef2c0c4..500648f8a 100644 --- a/lib/event_processor/queueing_event_processor.ts +++ b/lib/event_processor/queueing_event_processor.ts @@ -9,6 +9,8 @@ import { Consumer, Fn } from "../utils/type"; import { RunResult, runWithRetry } from "../utils/executor/backoff_retry_runner"; import { isSuccessStatusCode } from "../utils/http_request_handler/http_util"; import { EventEmitter } from "../utils/event_emitter/event_emitter"; +import { Queue } from "../utils/queue/queue"; +import { IdGenerator } from "../utils/id_generator"; export type EventWithId = { id: string; @@ -17,8 +19,8 @@ export type EventWithId = { export type QueueingEventProcessorConfig = { flushInterval: number, - maxQueueSize: 1000, - eventStore: Cache, + maxQueueSize: number, + eventStore?: Cache, eventDispatcher: EventDispatcher, closingEventDispatcher?: EventDispatcher, logger?: LoggerFacade, @@ -38,9 +40,9 @@ const DEFAULT_RETRY_MAX_BACKOFF = 30000; export class QueueingEventProcessor extends BaseService implements EventProcessor { private eventDispatcher: EventDispatcher; private closingEventDispatcher?: EventDispatcher; - private eventQueue: Queue = new Queue(1000); - private maxQueueSize: number = 1000; - private flushInterval: number = 1000; + private eventQueue: Queue; + private maxQueueSize: number; + private flushInterval: number; private eventStore?: Cache; private dispatchRepeater: Repeater; private failedEventRepeater: Repeater; @@ -65,6 +67,8 @@ export class QueueingEventProcessor extends BaseService implements EventProcesso this.retryMaxBackoff = config.retryMaxBackoff || DEFAULT_RETRY_MAX_BACKOFF; this.maxRetries = config.maxRetries; + this.eventQueue = new Queue(this.maxQueueSize); + this.dispatchRepeater = new IntervalRepeater(this.flushInterval); this.dispatchRepeater.setTask(() => this.flush()); diff --git a/lib/event_processor/v1/v1EventProcessor.react_native.ts b/lib/event_processor/v1/v1EventProcessor.react_native.ts index f4998a37b..58592f0ec 100644 --- a/lib/event_processor/v1/v1EventProcessor.react_native.ts +++ b/lib/event_processor/v1/v1EventProcessor.react_native.ts @@ -45,6 +45,8 @@ import { EventDispatcher, } from '../eventDispatcher' import { PersistentCacheProvider } from '../../shared_types' +import { ServiceState } from '../../service' +import { Consumer, Fn } from '../../utils/type' const logger = getLogger('ReactNativeEventProcessor') @@ -119,6 +121,18 @@ export class LogTierV1EventProcessor implements EventProcessor { persistentCacheProvider && persistentCacheProvider(), ) } + onDispatch(handler: Consumer): Fn { + throw new Error('Method not implemented.') + } + getState(): ServiceState { + throw new Error('Method not implemented.') + } + onRunning(): Promise { + throw new Error('Method not implemented.') + } + onTerminated(): Promise { + throw new Error('Method not implemented.') + } private async connectionListener(state: NetInfoState) { if (this.isInternetReachable && !state.isInternetReachable) { @@ -230,7 +244,7 @@ export class LogTierV1EventProcessor implements EventProcessor { events.forEach(this.process.bind(this)) } - public process(event: ProcessableEvent): void { + public async process(event: ProcessableEvent): Promise { // Adding events to buffer store. If app closes before dispatch, we can reprocess next time the app initializes this.eventBufferStore.set(event.uuid, event).then(() => { this.queue.enqueue(event) diff --git a/lib/event_processor/v1/v1EventProcessor.ts b/lib/event_processor/v1/v1EventProcessor.ts index aac5103ef..a8132311a 100644 --- a/lib/event_processor/v1/v1EventProcessor.ts +++ b/lib/event_processor/v1/v1EventProcessor.ts @@ -16,7 +16,7 @@ import { getLogger } from '../../modules/logging' import { NotificationSender } from '../../core/notification_center' -import { EventDispatcher } from '../eventDispatcher' +import { EventDispatcher, EventV1Request } from '../eventDispatcher' import { getQueue, EventProcessor, @@ -31,6 +31,8 @@ import { EventQueue } from '../eventQueue' import RequestTracker from '../requestTracker' import { areEventContextsEqual } from '../events' import { formatEvents } from './buildEventV1' +import { ServiceState } from '../../service' +import { Consumer, Fn } from '../../utils/type' const logger = getLogger('LogTierV1EventProcessor') @@ -70,6 +72,20 @@ export class LogTierV1EventProcessor implements EventProcessor { ); } + onDispatch(handler: Consumer): Fn { + return () => {} + } + + getState(): ServiceState { + throw new Error('Method not implemented.') + } + onRunning(): Promise { + throw new Error('Method not implemented.') + } + onTerminated(): Promise { + throw new Error('Method not implemented.') + } + private drainQueue(useClosingDispatcher: boolean, buffer: ProcessableEvent[]): Promise { const reqPromise = new Promise(resolve => { logger.debug('draining queue with %s events', buffer.length) @@ -94,7 +110,7 @@ export class LogTierV1EventProcessor implements EventProcessor { return reqPromise } - process(event: ProcessableEvent): void { + async process(event: ProcessableEvent): Promise { this.queue.enqueue(event) } diff --git a/lib/optimizely/index.ts b/lib/optimizely/index.ts index 7d8edbfc0..023c68d40 100644 --- a/lib/optimizely/index.ts +++ b/lib/optimizely/index.ts @@ -1320,7 +1320,9 @@ export default class Optimizely implements Client { this.notificationCenter.clearAllNotificationListeners(); - const eventProcessorStoppedPromise = this.eventProcessor ? this.eventProcessor.stop() : + this.eventProcessor?.stop(); + + const eventProcessorStoppedPromise = this.eventProcessor ? this.eventProcessor.onTerminated() : Promise.resolve(); if (this.disposeOnUpdate) { diff --git a/lib/tests/mock/create_event.ts b/lib/tests/mock/create_event.ts new file mode 100644 index 000000000..f283ffc50 --- /dev/null +++ b/lib/tests/mock/create_event.ts @@ -0,0 +1,41 @@ +export function createImpressionEvent(id: string = 'uuid') { + return { + type: 'impression' as const, + timestamp: 69, + uuid: id, + + context: { + accountId: 'accountId', + projectId: 'projectId', + clientName: 'node-sdk', + clientVersion: '3.0.0', + revision: '1', + botFiltering: true, + anonymizeIP: true, + }, + + user: { + id: 'userId', + attributes: [{ entityId: 'attr1-id', key: 'attr1-key', value: 'attr1-value' }], + }, + + layer: { + id: 'layerId', + }, + + experiment: { + id: 'expId', + key: 'expKey', + }, + + variation: { + id: 'varId', + key: 'varKey', + }, + + ruleKey: 'expKey', + flagKey: 'flagKey1', + ruleType: 'experiment', + enabled: true, + } +} \ No newline at end of file diff --git a/lib/tests/mock/mock_cache.ts b/lib/tests/mock/mock_cache.ts new file mode 100644 index 000000000..ef7fb516b --- /dev/null +++ b/lib/tests/mock/mock_cache.ts @@ -0,0 +1,30 @@ +export const getMockSyncCache = () => { + const cache = { + operation: 'sync' as const, + data: new Map(), + remove(key: string): void { + this.data.delete(key); + }, + clear(): void { + this.data.clear(); + }, + getKeys(): string[] { + return Array.from(this.data.keys()); + }, + getAll(): Map { + return this.data; + }, + size(): number { + return this.data.size; + }, + get(key: string): T | undefined { + return this.data.get(key); + }, + set(key: string, value: T): void { + this.data.set(key, value); + } + } + + return cache; +}; + diff --git a/lib/utils/cache/cache.ts b/lib/utils/cache/cache.ts index 50e4e750c..39b79c991 100644 --- a/lib/utils/cache/cache.ts +++ b/lib/utils/cache/cache.ts @@ -1,7 +1,7 @@ export interface SyncCache { operation: 'sync'; set(key: string, value: V): void; - get(key: string): V; + get(key: string): V | undefined; remove(key: string): void; clear(): void; getKeys(): string[]; @@ -12,7 +12,7 @@ export interface SyncCache { export interface AsyncCache { operation: 'async'; set(key: string, value: V): Promise; - get(key: string): Promise; + get(key: string): Promise; remove(key: string): Promise; clear(): Promise; getKeys(): Promise; diff --git a/lib/utils/id_generator/index.ts b/lib/utils/id_generator/index.ts index 3a00fc56a..9715af842 100644 --- a/lib/utils/id_generator/index.ts +++ b/lib/utils/id_generator/index.ts @@ -1,6 +1,6 @@ const idSuffixBase = 10_000; -class IdGenerator { +export class IdGenerator { private idSuffixOffset: number = 0; // getId returns an Id that generally increases with each call. diff --git a/lib/utils/queue/queue.ts b/lib/utils/queue/queue.ts index 99b43fc46..adabbcaff 100644 --- a/lib/utils/queue/queue.ts +++ b/lib/utils/queue/queue.ts @@ -1,4 +1,4 @@ -class Queue { +export class Queue { private maxQueueSize: number; private queue: T[]; private nItems: number; @@ -12,7 +12,7 @@ class Queue { } enqueue(item: T): void { - if (this.queue.length === this.maxQueueSize) { + if (this.nItems === this.maxQueueSize) { throw new Error("Queue is full"); } this.queue[this.tail] = item; diff --git a/lib/utils/repeater/repeater.spec.ts b/lib/utils/repeater/repeater.spec.ts index cebb17e38..7d998e7b6 100644 --- a/lib/utils/repeater/repeater.spec.ts +++ b/lib/utils/repeater/repeater.spec.ts @@ -16,7 +16,6 @@ import { expect, vi, it, beforeEach, afterEach, describe } from 'vitest'; import { ExponentialBackoff, IntervalRepeater } from './repeater'; import { advanceTimersByTime } from '../../../tests/testUtils'; -import { ad } from 'vitest/dist/chunks/reporters.C_zwCd4j'; import { resolvablePromise } from '../promise/resolvablePromise'; describe("ExponentialBackoff", () => { diff --git a/vitest.config.mts b/vitest.config.mts index 673f7d1c6..10d4e4d17 100644 --- a/vitest.config.mts +++ b/vitest.config.mts @@ -20,7 +20,7 @@ export default defineConfig({ test: { onConsoleLog: () => true, environment: 'happy-dom', - include: ['**/*.spec.ts'], + include: ['**/queueing_event_processor.spec.ts'], typecheck: { tsconfig: 'tsconfig.spec.json', }, From 7f13bec46d1860f09e419463c5402343c259f3e0 Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Mon, 14 Oct 2024 20:08:29 +0600 Subject: [PATCH 09/45] update --- .../queueing_event_processor.spec.ts | 50 ++++++++++++++++++- .../queueing_event_processor.ts | 18 +++---- 2 files changed, 58 insertions(+), 10 deletions(-) diff --git a/lib/event_processor/queueing_event_processor.spec.ts b/lib/event_processor/queueing_event_processor.spec.ts index eaee6929b..ce4942f98 100644 --- a/lib/event_processor/queueing_event_processor.spec.ts +++ b/lib/event_processor/queueing_event_processor.spec.ts @@ -216,7 +216,7 @@ describe('QueueingEventProcessor', async () => { }); // TODO: test retry of dispatch: specified number of times and infinite retry - + it('should remove the events from the eventStore after dispatch is successfull', async () => { const eventDispatcher = getMockDispatcher(); const mockDispatch: MockInstance = eventDispatcher.dispatchEvent; @@ -309,4 +309,52 @@ describe('QueueingEventProcessor', async () => { expect(eventStore.size()).toEqual(11); expect(logger.error).toHaveBeenCalledTimes(1); }); + + it('should log error and keep events in store if dispatch promise fails', async () => { + const eventDispatcher = getMockDispatcher(); + const mockDispatch: MockInstance = eventDispatcher.dispatchEvent; + const dispatchResponse = resolvablePromise(); + const logger = getMockLogger(); + + mockDispatch.mockResolvedValue(dispatchResponse.promise); + + const eventStore = getMockSyncCache(); + + const processor = new QueueingEventProcessor({ + eventDispatcher, + flushInterval: 2000, + maxQueueSize: 10, + eventStore, + logger, + }); + + processor.start(); + await processor.onRunning(); + + const events: ProcessableEvent[] = []; + for(let i = 0; i < 10; i++) { + const event = createImpressionEvent(`id-${i}`); + events.push(event); + await processor.process(event) + } + + expect(eventStore.size()).toEqual(10); + + const event = createImpressionEvent('id-10'); + await processor.process(event); + + expect(mockDispatch).toHaveBeenCalledTimes(1); + // the dispatch is not resolved yet, so all the events should still be in the store + expect(eventStore.size()).toEqual(11); + + dispatchResponse.reject(new Error()); + + // to ensure that microtask queue is cleared several times + for(let i = 0; i < 100; i++) { + await Promise.resolve(); + } + + expect(eventStore.size()).toEqual(11); + // expect(logger.error).toHaveBeenCalledTimes(1); + }); }); diff --git a/lib/event_processor/queueing_event_processor.ts b/lib/event_processor/queueing_event_processor.ts index 500648f8a..835543ee9 100644 --- a/lib/event_processor/queueing_event_processor.ts +++ b/lib/event_processor/queueing_event_processor.ts @@ -40,7 +40,7 @@ const DEFAULT_RETRY_MAX_BACKOFF = 30000; export class QueueingEventProcessor extends BaseService implements EventProcessor { private eventDispatcher: EventDispatcher; private closingEventDispatcher?: EventDispatcher; - private eventQueue: Queue; + private eventQueue: EventWithId[] = []; private maxQueueSize: number; private flushInterval: number; private eventStore?: Cache; @@ -67,7 +67,6 @@ export class QueueingEventProcessor extends BaseService implements EventProcesso this.retryMaxBackoff = config.retryMaxBackoff || DEFAULT_RETRY_MAX_BACKOFF; this.maxRetries = config.maxRetries; - this.eventQueue = new Queue(this.maxQueueSize); this.dispatchRepeater = new IntervalRepeater(this.flushInterval); this.dispatchRepeater.setTask(() => this.flush()); @@ -121,18 +120,19 @@ export class QueueingEventProcessor extends BaseService implements EventProcesso } private createNewBatch(): EventBatch | undefined { - if (this.eventQueue.isEmpty()) { + if (this.eventQueue.length == 0) { return } const events: ProcessableEvent[] = []; - let event: EventWithId | undefined; - let ids: string[] = [] - while(event = this.eventQueue.dequeue()) { + let ids: string[] = []; + + this.eventQueue.forEach((event) => { events.push(event.event); ids.push(event.id); - } + }); + this.eventQueue = []; return { request: formatEvents(events), ids }; } @@ -175,7 +175,7 @@ export class QueueingEventProcessor extends BaseService implements EventProcesso } async process(event: ProcessableEvent): Promise { - if (this.eventQueue.size() == this.maxQueueSize) { + if (this.eventQueue.length == this.maxQueueSize) { this.flush(); } @@ -185,7 +185,7 @@ export class QueueingEventProcessor extends BaseService implements EventProcesso }; await this.eventStore?.set(eventWithId.id, eventWithId); - this.eventQueue.enqueue(eventWithId); + this.eventQueue.push(eventWithId); } start(): void { From 6bed2a1e861202dd93d751104eb7041dee40c56d Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Tue, 15 Oct 2024 00:43:47 +0600 Subject: [PATCH 10/45] update --- .../queueing_event_processor.spec.ts | 404 +++++++++++++++--- .../queueing_event_processor.ts | 71 +-- 2 files changed, 392 insertions(+), 83 deletions(-) diff --git a/lib/event_processor/queueing_event_processor.spec.ts b/lib/event_processor/queueing_event_processor.spec.ts index ce4942f98..ad2637087 100644 --- a/lib/event_processor/queueing_event_processor.spec.ts +++ b/lib/event_processor/queueing_event_processor.spec.ts @@ -25,6 +25,10 @@ import { resolvablePromise } from '../utils/promise/resolvablePromise'; import { advanceTimersByTime } from '../../tests/testUtils'; import { getMockLogger } from '../tests/mock/mock_logger'; import exp from 'constants'; +import { getMockRepeater } from '../tests/mock/mock_repeater'; +import event from 'sinon/lib/sinon/util/event'; +import { reset } from 'sinon/lib/sinon/collection'; +import logger from '../modules/logging/logger'; const getMockDispatcher = () => { return { @@ -32,6 +36,12 @@ const getMockDispatcher = () => { }; }; +const exhaustMicrotasks = async (loop: number = 100) => { + for(let i = 0; i < loop; i++) { + await Promise.resolve(); + } +} + describe('QueueingEventProcessor', async () => { beforeEach(() => { vi.useFakeTimers(); @@ -45,7 +55,7 @@ describe('QueueingEventProcessor', async () => { const eventDispatcher = getMockDispatcher(); const processor = new QueueingEventProcessor({ eventDispatcher, - flushInterval: 2000, + dispatchRepeater: getMockRepeater(), maxQueueSize: 1000, }); @@ -70,7 +80,7 @@ describe('QueueingEventProcessor', async () => { const processor = new QueueingEventProcessor({ eventDispatcher, - flushInterval: 2000, + dispatchRepeater: getMockRepeater(), maxQueueSize: 2, eventStore: cache, }); @@ -93,7 +103,7 @@ describe('QueueingEventProcessor', async () => { const processor = new QueueingEventProcessor({ eventDispatcher, - flushInterval: 2000, + dispatchRepeater: getMockRepeater(), maxQueueSize: 2, eventStore: cache, }); @@ -125,7 +135,7 @@ describe('QueueingEventProcessor', async () => { const eventDispatcher = getMockDispatcher(); const processor = new QueueingEventProcessor({ eventDispatcher, - flushInterval: 2000, + dispatchRepeater: getMockRepeater(), maxQueueSize: 100, }); @@ -146,7 +156,7 @@ describe('QueueingEventProcessor', async () => { const processor = new QueueingEventProcessor({ eventDispatcher, - flushInterval: 2000, + dispatchRepeater: getMockRepeater(), maxQueueSize: 100, }); @@ -162,7 +172,6 @@ describe('QueueingEventProcessor', async () => { expect(eventDispatcher.dispatchEvent).toHaveBeenCalledTimes(0); - // we are using fake timers, so no dispatch will occur due to timeout let event = createImpressionEvent('id-100'); await processor.process(event); @@ -191,7 +200,7 @@ describe('QueueingEventProcessor', async () => { const processor = new QueueingEventProcessor({ eventDispatcher, - flushInterval: 2000, + dispatchRepeater: getMockRepeater(), maxQueueSize: 100, eventStore, }); @@ -215,7 +224,171 @@ describe('QueueingEventProcessor', async () => { }); }); - // TODO: test retry of dispatch: specified number of times and infinite retry + it('should dispatch events when dispatchRepeater is triggered', async () => { + const eventDispatcher = getMockDispatcher(); + const mockDispatch: MockInstance = eventDispatcher.dispatchEvent; + mockDispatch.mockResolvedValue({}); + const dispatchRepeater = getMockRepeater(); + + const processor = new QueueingEventProcessor({ + eventDispatcher, + dispatchRepeater, + maxQueueSize: 100, + }); + + processor.start(); + await processor.onRunning(); + + let events: ProcessableEvent[] = []; + for(let i = 0; i < 10; i++) { + const event = createImpressionEvent(`id-${i}`); + events.push(event); + await processor.process(event); + } + + expect(eventDispatcher.dispatchEvent).toHaveBeenCalledTimes(0); + await dispatchRepeater.execute(0); + + expect(eventDispatcher.dispatchEvent).toHaveBeenCalledTimes(1); + expect(eventDispatcher.dispatchEvent.mock.calls[0][0]).toEqual(formatEvents(events)); + + events = []; + for(let i = 1; i < 15; i++) { + const event = createImpressionEvent(`id-${i}`); + events.push(event); + await processor.process(event); + } + + await dispatchRepeater.execute(0); + expect(eventDispatcher.dispatchEvent).toHaveBeenCalledTimes(2); + expect(eventDispatcher.dispatchEvent.mock.calls[1][0]).toEqual(formatEvents(events)); + }); + + it('should not retry failed dispatch if retryConfig is not provided', async () => { + const eventDispatcher = getMockDispatcher(); + const mockDispatch: MockInstance = eventDispatcher.dispatchEvent; + mockDispatch.mockRejectedValue(new Error()); + const dispatchRepeater = getMockRepeater(); + + const processor = new QueueingEventProcessor({ + eventDispatcher, + dispatchRepeater, + maxQueueSize: 100, + }); + + processor.start(); + await processor.onRunning(); + + let events: ProcessableEvent[] = []; + for(let i = 0; i < 10; i++) { + const event = createImpressionEvent(`id-${i}`); + events.push(event); + await processor.process(event); + } + + expect(eventDispatcher.dispatchEvent).toHaveBeenCalledTimes(0); + await dispatchRepeater.execute(0); + + expect(eventDispatcher.dispatchEvent).toHaveBeenCalledTimes(1); + }); + + it('should retry specified number of times using the provided backoffController', async () => { + const eventDispatcher = getMockDispatcher(); + const mockDispatch: MockInstance = eventDispatcher.dispatchEvent; + mockDispatch.mockRejectedValue(new Error()); + const dispatchRepeater = getMockRepeater(); + + const backoffController = { + backoff: vi.fn().mockReturnValue(1000), + reset: vi.fn(), + }; + + const processor = new QueueingEventProcessor({ + eventDispatcher, + dispatchRepeater, + retryConfig: { + retry: true, + backoffProvider: () => backoffController, + maxRetries: 3, + }, + maxQueueSize: 100, + }); + + processor.start(); + await processor.onRunning(); + + let events: ProcessableEvent[] = []; + for(let i = 0; i < 10; i++) { + const event = createImpressionEvent(`id-${i}`); + events.push(event); + await processor.process(event); + } + + expect(eventDispatcher.dispatchEvent).toHaveBeenCalledTimes(0); + await dispatchRepeater.execute(0); + + for(let i = 0; i < 10; i++) { + await exhaustMicrotasks(); + await advanceTimersByTime(1000); + } + + expect(eventDispatcher.dispatchEvent).toHaveBeenCalledTimes(4); + expect(backoffController.backoff).toHaveBeenCalledTimes(3); + + const request = formatEvents(events); + for(let i = 0; i < 4; i++) { + expect(eventDispatcher.dispatchEvent.mock.calls[i][0]).toEqual(request); + } + }); + + it('should retry indefinitely using the provided backoffController if maxRetry is undefined', async () => { + const eventDispatcher = getMockDispatcher(); + const mockDispatch: MockInstance = eventDispatcher.dispatchEvent; + mockDispatch.mockRejectedValue(new Error()); + const dispatchRepeater = getMockRepeater(); + + const backoffController = { + backoff: vi.fn().mockReturnValue(1000), + reset: vi.fn(), + }; + + const processor = new QueueingEventProcessor({ + eventDispatcher, + dispatchRepeater, + retryConfig: { + retry: true, + backoffProvider: () => backoffController, + }, + maxQueueSize: 100, + }); + + processor.start(); + await processor.onRunning(); + + let events: ProcessableEvent[] = []; + for(let i = 0; i < 10; i++) { + const event = createImpressionEvent(`id-${i}`); + events.push(event); + await processor.process(event); + } + + expect(eventDispatcher.dispatchEvent).toHaveBeenCalledTimes(0); + await dispatchRepeater.execute(0); + + for(let i = 0; i < 200; i++) { + await exhaustMicrotasks(); + await advanceTimersByTime(1000); + } + + expect(eventDispatcher.dispatchEvent).toHaveBeenCalledTimes(201); + expect(backoffController.backoff).toHaveBeenCalledTimes(200); + + const request = formatEvents(events); + for(let i = 0; i < 201; i++) { + expect(eventDispatcher.dispatchEvent.mock.calls[i][0]).toEqual(request); + } + }); + it('should remove the events from the eventStore after dispatch is successfull', async () => { const eventDispatcher = getMockDispatcher(); @@ -225,11 +398,12 @@ describe('QueueingEventProcessor', async () => { mockDispatch.mockResolvedValue(dispatchResponse.promise); const eventStore = getMockSyncCache(); + const dispatchRepeater = getMockRepeater(); const processor = new QueueingEventProcessor({ eventDispatcher, - flushInterval: 2000, - maxQueueSize: 10, + dispatchRepeater, + maxQueueSize: 100, eventStore, }); @@ -244,40 +418,34 @@ describe('QueueingEventProcessor', async () => { } expect(eventStore.size()).toEqual(10); - - const event = createImpressionEvent('id-10'); - await processor.process(event); + await dispatchRepeater.execute(0); expect(mockDispatch).toHaveBeenCalledTimes(1); // the dispatch is not resolved yet, so all the events should still be in the store - expect(eventStore.size()).toEqual(11); + expect(eventStore.size()).toEqual(10); dispatchResponse.resolve({ statusCode: 200 }); - // to ensure that microtask queue is cleared several times - for(let i = 0; i < 100; i++) { - await Promise.resolve(); - } + await exhaustMicrotasks(); - expect(eventStore.size()).toEqual(1); + expect(eventStore.size()).toEqual(0); }); - it('should log error and keep events in store if dispatch return 5xx response', async () => { + it('should remove the events from the eventStore after dispatch is successfull', async () => { const eventDispatcher = getMockDispatcher(); const mockDispatch: MockInstance = eventDispatcher.dispatchEvent; const dispatchResponse = resolvablePromise(); - const logger = getMockLogger(); mockDispatch.mockResolvedValue(dispatchResponse.promise); const eventStore = getMockSyncCache(); + const dispatchRepeater = getMockRepeater(); const processor = new QueueingEventProcessor({ eventDispatcher, - flushInterval: 2000, - maxQueueSize: 10, + dispatchRepeater, + maxQueueSize: 100, eventStore, - logger, }); processor.start(); @@ -291,41 +459,45 @@ describe('QueueingEventProcessor', async () => { } expect(eventStore.size()).toEqual(10); - - const event = createImpressionEvent('id-10'); - await processor.process(event); + await dispatchRepeater.execute(0); expect(mockDispatch).toHaveBeenCalledTimes(1); // the dispatch is not resolved yet, so all the events should still be in the store - expect(eventStore.size()).toEqual(11); + expect(eventStore.size()).toEqual(10); - dispatchResponse.resolve({ statusCode: 500 }); + dispatchResponse.resolve({ statusCode: 200 }); - // to ensure that microtask queue is cleared several times - for(let i = 0; i < 100; i++) { - await Promise.resolve(); - } + await exhaustMicrotasks(); - expect(eventStore.size()).toEqual(11); - expect(logger.error).toHaveBeenCalledTimes(1); + expect(eventStore.size()).toEqual(0); }); - it('should log error and keep events in store if dispatch promise fails', async () => { + it('should remove the events from the eventStore after dispatch is successfull after retries', async () => { const eventDispatcher = getMockDispatcher(); const mockDispatch: MockInstance = eventDispatcher.dispatchEvent; - const dispatchResponse = resolvablePromise(); - const logger = getMockLogger(); - mockDispatch.mockResolvedValue(dispatchResponse.promise); + mockDispatch.mockResolvedValueOnce({ statusCode: 500 }) + .mockResolvedValueOnce({ statusCode: 500 }) + .mockResolvedValueOnce({ statusCode: 200 }); const eventStore = getMockSyncCache(); + const dispatchRepeater = getMockRepeater(); + + const backoffController = { + backoff: vi.fn().mockReturnValue(1000), + reset: vi.fn(), + }; const processor = new QueueingEventProcessor({ eventDispatcher, - flushInterval: 2000, - maxQueueSize: 10, + dispatchRepeater, + maxQueueSize: 100, eventStore, - logger, + retryConfig: { + retry: true, + backoffProvider: () => backoffController, + maxRetries: 3, + }, }); processor.start(); @@ -339,22 +511,148 @@ describe('QueueingEventProcessor', async () => { } expect(eventStore.size()).toEqual(10); + await dispatchRepeater.execute(0); - const event = createImpressionEvent('id-10'); - await processor.process(event); + for(let i = 0; i < 10; i++) { + await exhaustMicrotasks(); + await advanceTimersByTime(1000); + } - expect(mockDispatch).toHaveBeenCalledTimes(1); - // the dispatch is not resolved yet, so all the events should still be in the store - expect(eventStore.size()).toEqual(11); + expect(mockDispatch).toHaveBeenCalledTimes(3); + expect(eventStore.size()).toEqual(0); + }); + + it('should log error and keep events in store if dispatch return 5xx response', async () => { + const eventDispatcher = getMockDispatcher(); + const mockDispatch: MockInstance = eventDispatcher.dispatchEvent; + mockDispatch.mockResolvedValue({ statusCode: 500 }); + const dispatchRepeater = getMockRepeater(); + + const backoffController = { + backoff: vi.fn().mockReturnValue(1000), + reset: vi.fn(), + }; + + const eventStore = getMockSyncCache(); + const logger = getMockLogger(); + + const processor = new QueueingEventProcessor({ + eventDispatcher, + dispatchRepeater, + eventStore, + retryConfig: { + retry: true, + backoffProvider: () => backoffController, + maxRetries: 3, + }, + maxQueueSize: 100, + logger, + }); + + processor.start(); + await processor.onRunning(); + + let events: ProcessableEvent[] = []; + for(let i = 0; i < 10; i++) { + const event = createImpressionEvent(`id-${i}`); + events.push(event); + await processor.process(event); + } + + expect(eventDispatcher.dispatchEvent).toHaveBeenCalledTimes(0); + expect(eventStore.size()).toEqual(10); - dispatchResponse.reject(new Error()); + await dispatchRepeater.execute(0); - // to ensure that microtask queue is cleared several times - for(let i = 0; i < 100; i++) { - await Promise.resolve(); + for(let i = 0; i < 10; i++) { + await exhaustMicrotasks(); + await advanceTimersByTime(1000); + } + + expect(eventDispatcher.dispatchEvent).toHaveBeenCalledTimes(4); + expect(backoffController.backoff).toHaveBeenCalledTimes(3); + expect(eventStore.size()).toEqual(10); + expect(logger.error).toHaveBeenCalledOnce(); + }); + + it('should log error and keep events in store if dispatch promise fails', async () => { + const eventDispatcher = getMockDispatcher(); + const mockDispatch: MockInstance = eventDispatcher.dispatchEvent; + mockDispatch.mockRejectedValue(new Error()); + const dispatchRepeater = getMockRepeater(); + + const backoffController = { + backoff: vi.fn().mockReturnValue(1000), + reset: vi.fn(), + }; + + const eventStore = getMockSyncCache(); + const logger = getMockLogger(); + + const processor = new QueueingEventProcessor({ + eventDispatcher, + dispatchRepeater, + eventStore, + retryConfig: { + retry: true, + backoffProvider: () => backoffController, + maxRetries: 3, + }, + maxQueueSize: 100, + logger, + }); + + processor.start(); + await processor.onRunning(); + + let events: ProcessableEvent[] = []; + for(let i = 0; i < 10; i++) { + const event = createImpressionEvent(`id-${i}`); + events.push(event); + await processor.process(event); } - expect(eventStore.size()).toEqual(11); - // expect(logger.error).toHaveBeenCalledTimes(1); + expect(eventDispatcher.dispatchEvent).toHaveBeenCalledTimes(0); + expect(eventStore.size()).toEqual(10); + + await dispatchRepeater.execute(0); + + for(let i = 0; i < 10; i++) { + await exhaustMicrotasks(); + await advanceTimersByTime(1000); + } + + expect(eventDispatcher.dispatchEvent).toHaveBeenCalledTimes(4); + expect(backoffController.backoff).toHaveBeenCalledTimes(3); + expect(eventStore.size()).toEqual(10); + expect(logger.error).toHaveBeenCalledOnce(); + }); + + it('should emit dispatch event when dispatching events', async () => { + const eventDispatcher = getMockDispatcher(); + const dispatchRepeater = getMockRepeater(); + + const processor = new QueueingEventProcessor({ + eventDispatcher, + dispatchRepeater, + maxQueueSize: 100, + }); + + const event = createImpressionEvent('id-1'); + const event2 = createImpressionEvent('id-2'); + + const dispatchEvent = vi.fn(); + processor.onDispatch(dispatchEvent); + + processor.start(); + await processor.onRunning(); + + await processor.process(event); + await processor.process(event2); + + await dispatchRepeater.execute(0); + + expect(dispatchEvent).toHaveBeenCalledTimes(1); + expect(dispatchEvent.mock.calls[0][0]).toEqual(formatEvents([event, event2])); }); }); diff --git a/lib/event_processor/queueing_event_processor.ts b/lib/event_processor/queueing_event_processor.ts index 835543ee9..3eb2f05b7 100644 --- a/lib/event_processor/queueing_event_processor.ts +++ b/lib/event_processor/queueing_event_processor.ts @@ -2,14 +2,13 @@ import { EventProcessor, ProcessableEvent } from "./eventProcessor"; import { Cache } from "../utils/cache/cache"; import { EventDispatcher, EventDispatcherResponse, EventV1Request } from "./eventDispatcher"; import { formatEvents } from "../core/event_builder/build_event_v1"; -import { ExponentialBackoff, IntervalRepeater, Repeater } from "../utils/repeater/repeater"; +import { BackoffController, ExponentialBackoff, IntervalRepeater, Repeater } from "../utils/repeater/repeater"; import { LoggerFacade } from "../modules/logging"; import { BaseService, ServiceState } from "../service"; -import { Consumer, Fn } from "../utils/type"; +import { Consumer, Fn, Producer } from "../utils/type"; import { RunResult, runWithRetry } from "../utils/executor/backoff_retry_runner"; import { isSuccessStatusCode } from "../utils/http_request_handler/http_util"; import { EventEmitter } from "../utils/event_emitter/event_emitter"; -import { Queue } from "../utils/queue/queue"; import { IdGenerator } from "../utils/id_generator"; export type EventWithId = { @@ -17,8 +16,17 @@ export type EventWithId = { event: ProcessableEvent; }; +export type RetryConfig = { + retry: false; +} | { + retry: true; + maxRetries?: number; + backoffProvider: Producer; +} + export type QueueingEventProcessorConfig = { - flushInterval: number, + dispatchRepeater: Repeater, + failedEventRepeater?: Repeater, maxQueueSize: number, eventStore?: Cache, eventDispatcher: EventDispatcher, @@ -26,7 +34,7 @@ export type QueueingEventProcessorConfig = { logger?: LoggerFacade, retryMinBackoff?: number, retryMaxBackoff?: number, - maxRetries?: number, + retryConfig?: RetryConfig; }; type EventBatch = { @@ -34,45 +42,35 @@ type EventBatch = { ids: string[], } -const DEFAULT_RETRY_MIN_BACKOFF = 1000; -const DEFAULT_RETRY_MAX_BACKOFF = 30000; - export class QueueingEventProcessor extends BaseService implements EventProcessor { private eventDispatcher: EventDispatcher; private closingEventDispatcher?: EventDispatcher; private eventQueue: EventWithId[] = []; private maxQueueSize: number; - private flushInterval: number; private eventStore?: Cache; private dispatchRepeater: Repeater; - private failedEventRepeater: Repeater; + private failedEventRepeater?: Repeater; private idGenerator: IdGenerator = new IdGenerator(); private runningTask: Map> = new Map(); private dispatchingIds: Set = new Set(); - private retryMinBackoff: number; - private retryMaxBackoff: number; - private maxRetries?: number; private logger?: LoggerFacade; private eventEmitter: EventEmitter<{ dispatch: EventV1Request }> = new EventEmitter(); + private retryConfig?: RetryConfig; constructor(config: QueueingEventProcessorConfig) { super(); this.eventDispatcher = config.eventDispatcher; this.closingEventDispatcher = config.closingEventDispatcher; - this.flushInterval = config.flushInterval; this.maxQueueSize = config.maxQueueSize; this.eventStore = config.eventStore; this.logger = config.logger; - this.retryMinBackoff = config.retryMinBackoff || DEFAULT_RETRY_MIN_BACKOFF; - this.retryMaxBackoff = config.retryMaxBackoff || DEFAULT_RETRY_MAX_BACKOFF; - this.maxRetries = config.maxRetries; + this.retryConfig = config.retryConfig; - - this.dispatchRepeater = new IntervalRepeater(this.flushInterval); + this.dispatchRepeater = config.dispatchRepeater; this.dispatchRepeater.setTask(() => this.flush()); - this.failedEventRepeater = new IntervalRepeater(this.flushInterval * 4); - this.failedEventRepeater.setTask(() => this.retryFailedEvents()); + this.failedEventRepeater = config.failedEventRepeater; + this.failedEventRepeater?.setTask(() => this.retryFailedEvents()); } onDispatch(handler: Consumer): Fn { @@ -136,21 +134,34 @@ export class QueueingEventProcessor extends BaseService implements EventProcesso return { request: formatEvents(events), ids }; } + private async executeDispatch(request: EventV1Request, closing = false): Promise { + const dispatcher = closing && this.closingEventDispatcher ? this.closingEventDispatcher : this.eventDispatcher; + return dispatcher.dispatchEvent(request).then((res) => { + if (res.statusCode && !isSuccessStatusCode(res.statusCode)) { + return Promise.reject(new Error(`Failed to dispatch events: ${res.statusCode}`)); + } + return Promise.resolve(res); + }); + } + private dispatchBatch(batch: EventBatch, closing: boolean): void { const { request, ids } = batch; ids.forEach((id) => this.dispatchingIds.add(id)); - const dispatcher = closing && this.closingEventDispatcher ? this.closingEventDispatcher : this.eventDispatcher; - const backoff = new ExponentialBackoff(this.retryMinBackoff, this.retryMaxBackoff, 500); - const runResult = runWithRetry(() => dispatcher.dispatchEvent(request), backoff, this.maxRetries); + const runResult: RunResult = this.retryConfig?.retry + ? runWithRetry( + () => this.executeDispatch(request, closing), this.retryConfig.backoffProvider(), this.retryConfig.maxRetries + ) : { + result: this.executeDispatch(request, closing), + cancelRetry: () => {}, + }; + + this.eventEmitter.emit('dispatch', request); const taskId = this.idGenerator.getId(); this.runningTask.set(taskId, runResult); runResult.result.then((res) => { - if (res.statusCode && !isSuccessStatusCode(res.statusCode)) { - return Promise.reject(new Error(`Failed to dispatch events: ${res.statusCode}`)); - } ids.forEach((id) => { this.eventStore?.remove(id); }); @@ -194,7 +205,7 @@ export class QueueingEventProcessor extends BaseService implements EventProcesso } this.state = ServiceState.Running; this.dispatchRepeater.start(); - this.failedEventRepeater.start(); + this.failedEventRepeater?.start(); this.retryFailedEvents(); this.startPromise.resolve(); @@ -212,7 +223,7 @@ export class QueueingEventProcessor extends BaseService implements EventProcesso this.state = ServiceState.Stopping; this.dispatchRepeater.stop(); - this.failedEventRepeater.stop(); + this.failedEventRepeater?.stop(); this.flush(true); this.runningTask.forEach((task) => task.cancelRetry()); @@ -220,6 +231,6 @@ export class QueueingEventProcessor extends BaseService implements EventProcesso Promise.allSettled(Array.from(this.runningTask.values()).map((task) => task.result)).then(() => { this.state = ServiceState.Terminated; this.stopPromise.resolve(); - }) + }); } } From 66b217f9c95a3214966af73a4ec1e2b07ec2951a Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Fri, 8 Nov 2024 02:00:02 +0600 Subject: [PATCH 11/45] upd --- .../queueing_event_processor.spec.ts | 165 +++++++++++++----- .../queueing_event_processor.ts | 9 +- 2 files changed, 129 insertions(+), 45 deletions(-) diff --git a/lib/event_processor/queueing_event_processor.spec.ts b/lib/event_processor/queueing_event_processor.spec.ts index ad2637087..abdb3b6da 100644 --- a/lib/event_processor/queueing_event_processor.spec.ts +++ b/lib/event_processor/queueing_event_processor.spec.ts @@ -51,47 +51,66 @@ describe('QueueingEventProcessor', async () => { vi.useRealTimers(); }); - it('should resolve onRunning() when start() is called', async () => { - const eventDispatcher = getMockDispatcher(); - const processor = new QueueingEventProcessor({ - eventDispatcher, - dispatchRepeater: getMockRepeater(), - maxQueueSize: 1000, + describe('start', () => { + it('should resolve onRunning() when start() is called', async () => { + const eventDispatcher = getMockDispatcher(); + const processor = new QueueingEventProcessor({ + eventDispatcher, + dispatchRepeater: getMockRepeater(), + maxQueueSize: 1000, + }); + + processor.start(); + await expect(processor.onRunning()).resolves.not.toThrow(); }); - processor.start(); - await expect(processor.onRunning()).resolves.not.toThrow(); - }); - - it('should dispatch failed events in correct batch size and order when start is called', async () => { - const eventDispatcher = getMockDispatcher(); - const mockDispatch: MockInstance = eventDispatcher.dispatchEvent; - mockDispatch.mockResolvedValue({}); - - const cache = getMockSyncCache(); - let events: ProcessableEvent[] = []; - - for(let i = 0; i < 5; i++) { - const id = `id-${i}`; - const event = createImpressionEvent(id); - events.push(event); - cache.set(id, { id, event }); - } + it('should start dispatchRepeater and failedEventRepeater', () => { + const eventDispatcher = getMockDispatcher(); + const dispatchRepeater = getMockRepeater(); + const failedEventRepeater = getMockRepeater(); - const processor = new QueueingEventProcessor({ - eventDispatcher, - dispatchRepeater: getMockRepeater(), - maxQueueSize: 2, - eventStore: cache, + const processor = new QueueingEventProcessor({ + eventDispatcher, + dispatchRepeater, + failedEventRepeater, + maxQueueSize: 1000, + }); + + processor.start(); + expect(dispatchRepeater.start).toHaveBeenCalledOnce(); + expect(failedEventRepeater.start).toHaveBeenCalledOnce(); }); - processor.start(); - await processor.onRunning(); - - expect(mockDispatch).toHaveBeenCalledTimes(3); - expect(mockDispatch.mock.calls[0][0]).toEqual(formatEvents([events[0], events[1]])); - expect(mockDispatch.mock.calls[1][0]).toEqual(formatEvents([events[2], events[3]])); - expect(mockDispatch.mock.calls[2][0]).toEqual(formatEvents([events[4]])); + it('should dispatch failed events in correct batch sizes and order', async () => { + const eventDispatcher = getMockDispatcher(); + const mockDispatch: MockInstance = eventDispatcher.dispatchEvent; + mockDispatch.mockResolvedValue({}); + + const cache = getMockSyncCache(); + let events: ProcessableEvent[] = []; + + for(let i = 0; i < 5; i++) { + const id = `id-${i}`; + const event = createImpressionEvent(id); + events.push(event); + cache.set(id, { id, event }); + } + + const processor = new QueueingEventProcessor({ + eventDispatcher, + dispatchRepeater: getMockRepeater(), + maxQueueSize: 2, + eventStore: cache, + }); + + processor.start(); + await processor.onRunning(); + + expect(mockDispatch).toHaveBeenCalledTimes(3); + expect(mockDispatch.mock.calls[0][0]).toEqual(formatEvents([events[0], events[1]])); + expect(mockDispatch.mock.calls[1][0]).toEqual(formatEvents([events[2], events[3]])); + expect(mockDispatch.mock.calls[2][0]).toEqual(formatEvents([events[4]])); + }); }); it('should dispatch failed events in correct batch size and order when retryFailedEvents is called', async () => { @@ -128,6 +147,11 @@ describe('QueueingEventProcessor', async () => { expect(mockDispatch.mock.calls[0][0]).toEqual(formatEvents([events[0], events[1]])); expect(mockDispatch.mock.calls[1][0]).toEqual(formatEvents([events[2], events[3]])); expect(mockDispatch.mock.calls[2][0]).toEqual(formatEvents([events[4]])); + + cache.clear(); + + // this event is + await processor.process(createImpressionEvent('id-5')) }); describe('process', () => { @@ -389,7 +413,6 @@ describe('QueueingEventProcessor', async () => { } }); - it('should remove the events from the eventStore after dispatch is successfull', async () => { const eventDispatcher = getMockDispatcher(); const mockDispatch: MockInstance = eventDispatcher.dispatchEvent; @@ -641,8 +664,8 @@ describe('QueueingEventProcessor', async () => { const event = createImpressionEvent('id-1'); const event2 = createImpressionEvent('id-2'); - const dispatchEvent = vi.fn(); - processor.onDispatch(dispatchEvent); + const dispatchListener = vi.fn(); + processor.onDispatch(dispatchListener); processor.start(); await processor.onRunning(); @@ -652,7 +675,67 @@ describe('QueueingEventProcessor', async () => { await dispatchRepeater.execute(0); - expect(dispatchEvent).toHaveBeenCalledTimes(1); - expect(dispatchEvent.mock.calls[0][0]).toEqual(formatEvents([event, event2])); + expect(dispatchListener).toHaveBeenCalledTimes(1); + expect(dispatchListener.mock.calls[0][0]).toEqual(formatEvents([event, event2])); + }); + + it('should remove event handler when function returned from onDispatch is called', async () => { + const eventDispatcher = getMockDispatcher(); + const dispatchRepeater = getMockRepeater(); + + const processor = new QueueingEventProcessor({ + eventDispatcher, + dispatchRepeater, + maxQueueSize: 100, + }); + + const dispatchListener = vi.fn(); + + const unsub = processor.onDispatch(dispatchListener); + + processor.start(); + await processor.onRunning(); + + const event = createImpressionEvent('id-1'); + const event2 = createImpressionEvent('id-2'); + + await processor.process(event); + await processor.process(event2); + + await dispatchRepeater.execute(0); + + expect(dispatchListener).toHaveBeenCalledTimes(1); + expect(dispatchListener.mock.calls[0][0]).toEqual(formatEvents([event, event2])); + + unsub(); + + const event3 = createImpressionEvent('id-3'); + const event4 = createImpressionEvent('id-4'); + + await dispatchRepeater.execute(0); + expect(dispatchListener).toHaveBeenCalledTimes(1); + }); + + describe('retry failed event', () => { + + }); + + describe('stop', () => { + it('should reject onRunning if stop is called before the processor is started', async () => { + const eventDispatcher = getMockDispatcher(); + const dispatchRepeater = getMockRepeater(); + + const processor = new QueueingEventProcessor({ + eventDispatcher, + dispatchRepeater, + maxQueueSize: 100, + }); + + processor.stop(); + + await expect(processor.onRunning()).rejects.toThrow(); + }); + + it('should ') }); }); diff --git a/lib/event_processor/queueing_event_processor.ts b/lib/event_processor/queueing_event_processor.ts index 3eb2f05b7..da9b9963d 100644 --- a/lib/event_processor/queueing_event_processor.ts +++ b/lib/event_processor/queueing_event_processor.ts @@ -52,7 +52,7 @@ export class QueueingEventProcessor extends BaseService implements EventProcesso private failedEventRepeater?: Repeater; private idGenerator: IdGenerator = new IdGenerator(); private runningTask: Map> = new Map(); - private dispatchingIds: Set = new Set(); + private activeEventIds: Set = new Set(); private logger?: LoggerFacade; private eventEmitter: EventEmitter<{ dispatch: EventV1Request }> = new EventEmitter(); private retryConfig?: RetryConfig; @@ -93,7 +93,7 @@ export class QueueingEventProcessor extends BaseService implements EventProcesso let currentBatch: EventWithId[] = []; failedEventsArray.forEach((event) => { - if (!this.dispatchingIds.has(event.id)) { + if (!this.activeEventIds.has(event.id)) { currentBatch.push(event); if (currentBatch.length === this.maxQueueSize) { batches.push({ @@ -146,7 +146,7 @@ export class QueueingEventProcessor extends BaseService implements EventProcesso private dispatchBatch(batch: EventBatch, closing: boolean): void { const { request, ids } = batch; - ids.forEach((id) => this.dispatchingIds.add(id)); + ids.forEach((id) => this.activeEventIds.add(id)); const runResult: RunResult = this.retryConfig?.retry ? runWithRetry( @@ -172,7 +172,7 @@ export class QueueingEventProcessor extends BaseService implements EventProcesso this.logger?.error('Failed to dispatch events', err); }).finally(() => { this.runningTask.delete(taskId); - ids.forEach((id) => this.dispatchingIds.delete(id)); + ids.forEach((id) => this.activeEventIds.delete(id)); }); } @@ -196,6 +196,7 @@ export class QueueingEventProcessor extends BaseService implements EventProcesso }; await this.eventStore?.set(eventWithId.id, eventWithId); + this.activeEventIds.add(eventWithId.id); this.eventQueue.push(eventWithId); } From bc7fcd7f7333d5f4bd8cbad6ba6015a5733fb1ac Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Fri, 8 Nov 2024 22:52:20 +0600 Subject: [PATCH 12/45] test --- .../queueing_event_processor.spec.ts | 270 +++++++++++++++--- .../queueing_event_processor.ts | 4 +- 2 files changed, 230 insertions(+), 44 deletions(-) diff --git a/lib/event_processor/queueing_event_processor.spec.ts b/lib/event_processor/queueing_event_processor.spec.ts index abdb3b6da..b97627c92 100644 --- a/lib/event_processor/queueing_event_processor.spec.ts +++ b/lib/event_processor/queueing_event_processor.spec.ts @@ -29,6 +29,7 @@ import { getMockRepeater } from '../tests/mock/mock_repeater'; import event from 'sinon/lib/sinon/util/event'; import { reset } from 'sinon/lib/sinon/collection'; import logger from '../modules/logging/logger'; +import * as retry from '../utils/executor/backoff_retry_runner'; const getMockDispatcher = () => { return { @@ -113,47 +114,6 @@ describe('QueueingEventProcessor', async () => { }); }); - it('should dispatch failed events in correct batch size and order when retryFailedEvents is called', async () => { - const eventDispatcher = getMockDispatcher(); - const mockDispatch: MockInstance = eventDispatcher.dispatchEvent; - mockDispatch.mockResolvedValue({}); - - const cache = getMockSyncCache(); - - const processor = new QueueingEventProcessor({ - eventDispatcher, - dispatchRepeater: getMockRepeater(), - maxQueueSize: 2, - eventStore: cache, - }); - - processor.start(); - await processor.onRunning(); - - expect(mockDispatch).toHaveBeenCalledTimes(0); - - let events: ProcessableEvent[] = []; - - for(let i = 0; i < 5; i++) { - const id = `id-${i}`; - const event = createImpressionEvent(id); - events.push(event); - cache.set(id, { id, event }); - } - - await processor.retryFailedEvents(); - - expect(mockDispatch).toHaveBeenCalledTimes(3); - expect(mockDispatch.mock.calls[0][0]).toEqual(formatEvents([events[0], events[1]])); - expect(mockDispatch.mock.calls[1][0]).toEqual(formatEvents([events[2], events[3]])); - expect(mockDispatch.mock.calls[2][0]).toEqual(formatEvents([events[4]])); - - cache.clear(); - - // this event is - await processor.process(createImpressionEvent('id-5')) - }); - describe('process', () => { it('should enqueue event without dispatching immediately', async () => { const eventDispatcher = getMockDispatcher(); @@ -651,6 +611,67 @@ describe('QueueingEventProcessor', async () => { expect(logger.error).toHaveBeenCalledOnce(); }); + it('should dispatch only failed events in correct batch size and order when retryFailedEvents is called', async () => { + const eventDispatcher = getMockDispatcher(); + const mockDispatch: MockInstance = eventDispatcher.dispatchEvent; + mockDispatch.mockResolvedValue({}); + + const cache = getMockSyncCache(); + const dispatchRepeater = getMockRepeater(); + + const processor = new QueueingEventProcessor({ + eventDispatcher, + dispatchRepeater, + maxQueueSize: 2, + eventStore: cache, + }); + + processor.start(); + await processor.onRunning(); + + expect(mockDispatch).toHaveBeenCalledTimes(0); + + // these events should be active and should not be reomoved from store or dispatched with failed events + const eventA = createImpressionEvent('id-A'); + const eventB = createImpressionEvent('id-B'); + + await processor.process(eventA); + await processor.process(eventB); + + let eventsInStore = [...cache.getAll().values()].sort((a, b) => a.id < b.id ? -1 : 1).map(e => e.event); + expect(eventsInStore).toEqual(expect.arrayContaining([ + expect.objectContaining(eventA), + expect.objectContaining(eventB) + ])); + + + let events: ProcessableEvent[] = []; + + for(let i = 0; i < 5; i++) { + const id = `id-${i}`; + const event = createImpressionEvent(id); + events.push(event); + cache.set(id, { id, event }); + } + + await processor.retryFailedEvents(); + + expect(mockDispatch).toHaveBeenCalledTimes(3); + expect(mockDispatch.mock.calls[0][0]).toEqual(formatEvents([events[0], events[1]])); + expect(mockDispatch.mock.calls[1][0]).toEqual(formatEvents([events[2], events[3]])); + expect(mockDispatch.mock.calls[2][0]).toEqual(formatEvents([events[4]])); + + await exhaustMicrotasks(); + + expect(cache.size()).toBe(2); + eventsInStore = [...cache.getAll().values()].sort((a, b) => a.id < b.id ? -1 : 1).map(e => e.event); + expect(eventsInStore).toEqual(expect.arrayContaining([ + expect.objectContaining(eventA), + expect.objectContaining(eventB) + ])); + }); + + it('should emit dispatch event when dispatching events', async () => { const eventDispatcher = getMockDispatcher(); const dispatchRepeater = getMockRepeater(); @@ -716,10 +737,69 @@ describe('QueueingEventProcessor', async () => { expect(dispatchListener).toHaveBeenCalledTimes(1); }); - describe('retry failed event', () => { + it('should dispatch only failed events in correct batch size and order when failedEventRepeater is triggered', async () => { + const eventDispatcher = getMockDispatcher(); + const mockDispatch: MockInstance = eventDispatcher.dispatchEvent; + mockDispatch.mockResolvedValue({}); + const cache = getMockSyncCache(); + const dispatchRepeater = getMockRepeater(); + const failedEventRepeater = getMockRepeater(); + + const processor = new QueueingEventProcessor({ + eventDispatcher, + dispatchRepeater, + failedEventRepeater, + maxQueueSize: 2, + eventStore: cache, + }); + + processor.start(); + await processor.onRunning(); + + expect(mockDispatch).toHaveBeenCalledTimes(0); + + // these events should be active and should not be reomoved from store or dispatched with failed events + const eventA = createImpressionEvent('id-A'); + const eventB = createImpressionEvent('id-B'); + + await processor.process(eventA); + await processor.process(eventB); + + let eventsInStore = [...cache.getAll().values()].sort((a, b) => a.id < b.id ? -1 : 1).map(e => e.event); + expect(eventsInStore).toEqual(expect.arrayContaining([ + expect.objectContaining(eventA), + expect.objectContaining(eventB) + ])); + + + let events: ProcessableEvent[] = []; + + for(let i = 0; i < 5; i++) { + const id = `id-${i}`; + const event = createImpressionEvent(id); + events.push(event); + cache.set(id, { id, event }); + } + + await failedEventRepeater.execute(0); + + expect(mockDispatch).toHaveBeenCalledTimes(3); + expect(mockDispatch.mock.calls[0][0]).toEqual(formatEvents([events[0], events[1]])); + expect(mockDispatch.mock.calls[1][0]).toEqual(formatEvents([events[2], events[3]])); + expect(mockDispatch.mock.calls[2][0]).toEqual(formatEvents([events[4]])); + + await exhaustMicrotasks(); + + expect(cache.size()).toBe(2); + eventsInStore = [...cache.getAll().values()].sort((a, b) => a.id < b.id ? -1 : 1).map(e => e.event); + expect(eventsInStore).toEqual(expect.arrayContaining([ + expect.objectContaining(eventA), + expect.objectContaining(eventB) + ])); }); + describe('stop', () => { it('should reject onRunning if stop is called before the processor is started', async () => { const eventDispatcher = getMockDispatcher(); @@ -736,6 +816,110 @@ describe('QueueingEventProcessor', async () => { await expect(processor.onRunning()).rejects.toThrow(); }); - it('should ') + it('should stop dispatchRepeater and failedEventRepeater', async () => { + const eventDispatcher = getMockDispatcher(); + const dispatchRepeater = getMockRepeater(); + const failedEventRepeater = getMockRepeater(); + + const processor = new QueueingEventProcessor({ + eventDispatcher, + dispatchRepeater, + failedEventRepeater, + maxQueueSize: 100, + }); + + processor.start(); + await processor.onRunning(); + + processor.stop(); + expect(dispatchRepeater.stop).toHaveBeenCalledOnce(); + expect(failedEventRepeater.stop).toHaveBeenCalledOnce(); + }); + + it('should disptach the events in queue using the closing dispatcher if available', async () => { + const eventDispatcher = getMockDispatcher(); + const closingEventDispatcher = getMockDispatcher(); + closingEventDispatcher.dispatchEvent.mockResolvedValue({}); + + const dispatchRepeater = getMockRepeater(); + const failedEventRepeater = getMockRepeater(); + + const processor = new QueueingEventProcessor({ + eventDispatcher, + closingEventDispatcher, + dispatchRepeater, + failedEventRepeater, + maxQueueSize: 100, + }); + + processor.start(); + await processor.onRunning(); + + let events: ProcessableEvent[] = []; + for(let i = 0; i < 10; i++) { + const event = createImpressionEvent(`id-${i}`); + events.push(event); + await processor.process(event); + } + + expect(eventDispatcher.dispatchEvent).toHaveBeenCalledTimes(0); + expect(closingEventDispatcher.dispatchEvent).toHaveBeenCalledTimes(0); + + processor.stop(); + expect(closingEventDispatcher.dispatchEvent).toHaveBeenCalledTimes(1); + expect(closingEventDispatcher.dispatchEvent).toHaveBeenCalledWith(formatEvents(events)); + }); + + it('should cancel retry of active dispatches', async () => { + const runWithRetrySpy = vi.spyOn(retry, 'runWithRetry'); + const cancel1 = vi.fn(); + const cancel2 = vi.fn(); + runWithRetrySpy.mockReturnValueOnce({ + cancelRetry: cancel1, + result: resolvablePromise().promise, + }).mockReturnValueOnce({ + cancelRetry: cancel2, + result: resolvablePromise().promise, + }); + + const eventDispatcher = getMockDispatcher(); + const dispatchRepeater = getMockRepeater(); + + const backoffController = { + backoff: vi.fn().mockReturnValue(1000), + reset: vi.fn(), + }; + + const processor = new QueueingEventProcessor({ + eventDispatcher, + dispatchRepeater, + maxQueueSize: 100, + retryConfig: { + retry: true, + backoffProvider: () => backoffController, + maxRetries: 3, + } + }); + + processor.start(); + await processor.onRunning(); + + await processor.process(createImpressionEvent('id-1')); + await dispatchRepeater.execute(0); + + expect(runWithRetrySpy).toHaveBeenCalledTimes(1); + + await processor.process(createImpressionEvent('id-2')); + await dispatchRepeater.execute(0); + + expect(runWithRetrySpy).toHaveBeenCalledTimes(2); + + processor.stop(); + + expect(cancel1).toHaveBeenCalledOnce(); + expect(cancel2).toHaveBeenCalledOnce(); + + runWithRetrySpy.mockReset(); + }); }); }); diff --git a/lib/event_processor/queueing_event_processor.ts b/lib/event_processor/queueing_event_processor.ts index da9b9963d..4a8ec68a1 100644 --- a/lib/event_processor/queueing_event_processor.ts +++ b/lib/event_processor/queueing_event_processor.ts @@ -146,7 +146,6 @@ export class QueueingEventProcessor extends BaseService implements EventProcesso private dispatchBatch(batch: EventBatch, closing: boolean): void { const { request, ids } = batch; - ids.forEach((id) => this.activeEventIds.add(id)); const runResult: RunResult = this.retryConfig?.retry ? runWithRetry( @@ -161,8 +160,11 @@ export class QueueingEventProcessor extends BaseService implements EventProcesso const taskId = this.idGenerator.getId(); this.runningTask.set(taskId, runResult); + console.log(runResult); + runResult.result.then((res) => { ids.forEach((id) => { + this.activeEventIds.delete(id); this.eventStore?.remove(id); }); return Promise.resolve(); From 32928869f32f98c7d28b7d49c6f27c746012b012 Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Fri, 8 Nov 2024 23:49:37 +0600 Subject: [PATCH 13/45] tests --- .../queueing_event_processor.spec.ts | 47 +++++++++++++++++++ 1 file changed, 47 insertions(+) diff --git a/lib/event_processor/queueing_event_processor.spec.ts b/lib/event_processor/queueing_event_processor.spec.ts index b97627c92..622e4707e 100644 --- a/lib/event_processor/queueing_event_processor.spec.ts +++ b/lib/event_processor/queueing_event_processor.spec.ts @@ -30,6 +30,7 @@ import event from 'sinon/lib/sinon/util/event'; import { reset } from 'sinon/lib/sinon/collection'; import logger from '../modules/logging/logger'; import * as retry from '../utils/executor/backoff_retry_runner'; +import { ServiceState } from '../service'; const getMockDispatcher = () => { return { @@ -921,5 +922,51 @@ describe('QueueingEventProcessor', async () => { runWithRetrySpy.mockReset(); }); + + it('should resolve onTerminated when all active dispatch requests settles' , async () => { + const eventDispatcher = getMockDispatcher(); + const dispatchRes1 = resolvablePromise(); + const dispatchRes2 = resolvablePromise(); + eventDispatcher.dispatchEvent.mockReturnValueOnce(dispatchRes1.promise) + .mockReturnValueOnce(dispatchRes2.promise); + + const dispatchRepeater = getMockRepeater(); + + const backoffController = { + backoff: vi.fn().mockReturnValue(1000), + reset: vi.fn(), + }; + + const processor = new QueueingEventProcessor({ + eventDispatcher, + dispatchRepeater, + maxQueueSize: 100, + }); + + processor.start() + await processor.onRunning(); + + await processor.process(createImpressionEvent('id-1')); + await dispatchRepeater.execute(0); + expect(eventDispatcher.dispatchEvent).toHaveBeenCalledTimes(1); + + await processor.process(createImpressionEvent('id-2')); + await dispatchRepeater.execute(0); + expect(eventDispatcher.dispatchEvent).toHaveBeenCalledTimes(2); + + const onStop = vi.fn(); + processor.onTerminated().then(onStop); + + processor.stop(); + + await exhaustMicrotasks(); + expect(onStop).not.toHaveBeenCalled(); + expect(processor.getState()).toEqual(ServiceState.Stopping); + + dispatchRes1.resolve(); + dispatchRes2.reject(new Error()); + + await expect(processor.onTerminated()).resolves.not.toThrow(); + }); }); }); From 78908c6dc734e16fbe8cf68ad497dc832da9632b Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Sat, 9 Nov 2024 02:06:09 +0600 Subject: [PATCH 14/45] tests --- .../forwarding_event_processor.spec.ts | 140 +++++++++++------- .../forwarding_event_processor.ts | 11 +- vitest.config.mts | 2 +- 3 files changed, 91 insertions(+), 62 deletions(-) diff --git a/lib/event_processor/forwarding_event_processor.spec.ts b/lib/event_processor/forwarding_event_processor.spec.ts index 72da66633..b6d1b5303 100644 --- a/lib/event_processor/forwarding_event_processor.spec.ts +++ b/lib/event_processor/forwarding_event_processor.spec.ts @@ -16,49 +16,11 @@ import { expect, describe, it, vi } from 'vitest'; import { getForwardingEventProcessor } from './forwarding_event_processor'; -import { EventDispatcher, makeBatchedEventV1 } from '.'; +import { EventDispatcher, formatEvents, makeBatchedEventV1 } from '.'; -function createImpressionEvent() { - return { - type: 'impression' as const, - timestamp: 69, - uuid: 'uuid', - - context: { - accountId: 'accountId', - projectId: 'projectId', - clientName: 'node-sdk', - clientVersion: '3.0.0', - revision: '1', - botFiltering: true, - anonymizeIP: true, - }, - - user: { - id: 'userId', - attributes: [{ entityId: 'attr1-id', key: 'attr1-key', value: 'attr1-value' }], - }, - - layer: { - id: 'layerId', - }, - - experiment: { - id: 'expId', - key: 'expKey', - }, - - variation: { - id: 'varId', - key: 'varKey', - }, - - ruleKey: 'expKey', - flagKey: 'flagKey1', - ruleType: 'experiment', - enabled: true, - } -} +import { createImpressionEvent } from '../tests/mock/create_event'; +import exp from 'constants'; +import { ServiceState } from '../service'; const getMockEventDispatcher = (): EventDispatcher => { return { @@ -66,33 +28,97 @@ const getMockEventDispatcher = (): EventDispatcher => { }; }; -const getMockNotificationCenter = () => { - return { - sendNotifications: vi.fn(), - }; -} +describe('ForwardingEventProcessor', () => { + it('should resolve onRunning() when start is called', async () => { + const dispatcher = getMockEventDispatcher(); + const mockDispatch = vi.mocked(dispatcher.dispatchEvent); -describe('ForwardingEventProcessor', function() { - it('should dispatch event immediately when process is called', () => { + const processor = getForwardingEventProcessor(dispatcher); + + processor.start(); + await expect(processor.onRunning()).resolves.not.toThrow(); + }); + + it('should dispatch event immediately when process is called', async() => { const dispatcher = getMockEventDispatcher(); const mockDispatch = vi.mocked(dispatcher.dispatchEvent); - const notificationCenter = getMockNotificationCenter(); - const processor = getForwardingEventProcessor(dispatcher, notificationCenter); + + const processor = getForwardingEventProcessor(dispatcher); + processor.start(); + await processor.onRunning(); + const event = createImpressionEvent(); processor.process(event); expect(dispatcher.dispatchEvent).toHaveBeenCalledOnce(); const data = mockDispatch.mock.calls[0][0].params; expect(data).toEqual(makeBatchedEventV1([event])); - expect(notificationCenter.sendNotifications).toHaveBeenCalledOnce(); }); - it('should return a resolved promise when stop is called', async () => { + it('should emit dispatch event when event is dispatched', async() => { + const dispatcher = getMockEventDispatcher(); + const mockDispatch = vi.mocked(dispatcher.dispatchEvent); + + const processor = getForwardingEventProcessor(dispatcher); + + processor.start(); + await processor.onRunning(); + + const listener = vi.fn(); + processor.onDispatch(listener); + + const event = createImpressionEvent(); + processor.process(event); + expect(dispatcher.dispatchEvent).toHaveBeenCalledOnce(); + expect(dispatcher.dispatchEvent).toHaveBeenCalledWith(formatEvents([event])); + expect(listener).toHaveBeenCalledOnce(); + expect(listener).toHaveBeenCalledWith(formatEvents([event])); + }); + + it('should remove dispatch listener when the function returned from onDispatch is called', async() => { + const dispatcher = getMockEventDispatcher(); + const mockDispatch = vi.mocked(dispatcher.dispatchEvent); + + const processor = getForwardingEventProcessor(dispatcher); + + processor.start(); + await processor.onRunning(); + + const listener = vi.fn(); + const unsub = processor.onDispatch(listener); + + let event = createImpressionEvent(); + processor.process(event); + expect(dispatcher.dispatchEvent).toHaveBeenCalledOnce(); + expect(dispatcher.dispatchEvent).toHaveBeenCalledWith(formatEvents([event])); + expect(listener).toHaveBeenCalledOnce(); + expect(listener).toHaveBeenCalledWith(formatEvents([event])); + + unsub(); + event = createImpressionEvent('id-a'); + processor.process(event); + expect(listener).toHaveBeenCalledOnce(); + }); + + it('should resolve onTerminated promise when stop is called', async () => { const dispatcher = getMockEventDispatcher(); - const notificationCenter = getMockNotificationCenter(); - const processor = getForwardingEventProcessor(dispatcher, notificationCenter); + const processor = getForwardingEventProcessor(dispatcher); processor.start(); - const stopPromise = processor.stop(); - expect(stopPromise).resolves.not.toThrow(); + await processor.onRunning(); + + expect(processor.getState()).toEqual(ServiceState.Running); + + processor.stop(); + await expect(processor.onTerminated()).resolves.not.toThrow(); + }); + + it('should reject onRunning promise when stop is called in New state', async () => { + const dispatcher = getMockEventDispatcher(); + const processor = getForwardingEventProcessor(dispatcher); + + expect(processor.getState()).toEqual(ServiceState.New); + + processor.stop(); + await expect(processor.onRunning()).rejects.toThrow(); }); }); diff --git a/lib/event_processor/forwarding_event_processor.ts b/lib/event_processor/forwarding_event_processor.ts index a22296fa2..83acb2b33 100644 --- a/lib/event_processor/forwarding_event_processor.ts +++ b/lib/event_processor/forwarding_event_processor.ts @@ -28,9 +28,6 @@ import { BaseService, ServiceState } from '../service'; import { EventEmitter } from '../utils/event_emitter/event_emitter'; import { Consumer, Fn } from '../utils/type'; class ForwardingEventProcessor extends BaseService implements EventProcessor { - onDispatch(handler: Consumer): Fn { - throw new Error('Method not implemented.'); - } private dispatcher: EventDispatcher; private eventEmitter: EventEmitter<{ dispatch: EventV1Request }>; @@ -59,12 +56,18 @@ class ForwardingEventProcessor extends BaseService implements EventProcessor { if (this.isDone()) { return; } - this.state = ServiceState.Terminated; + if (this.isNew()) { this.startPromise.reject(new Error('Service stopped before it was started')); } + + this.state = ServiceState.Terminated; this.stopPromise.resolve(); } + + onDispatch(handler: Consumer): Fn { + return this.eventEmitter.on('dispatch', handler); + } } export function getForwardingEventProcessor(dispatcher: EventDispatcher): EventProcessor { diff --git a/vitest.config.mts b/vitest.config.mts index 10d4e4d17..673f7d1c6 100644 --- a/vitest.config.mts +++ b/vitest.config.mts @@ -20,7 +20,7 @@ export default defineConfig({ test: { onConsoleLog: () => true, environment: 'happy-dom', - include: ['**/queueing_event_processor.spec.ts'], + include: ['**/*.spec.ts'], typecheck: { tsconfig: 'tsconfig.spec.json', }, From 6754a1d0db1dd98034cb118b23e4c9488748b494 Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Sat, 9 Nov 2024 03:16:01 +0600 Subject: [PATCH 15/45] up --- lib/index.react_native.ts | 6 +++--- lib/optimizely/index.tests.js | 24 +++++++++++----------- lib/optimizely_user_context/index.tests.js | 9 ++++---- 3 files changed, 19 insertions(+), 20 deletions(-) diff --git a/lib/index.react_native.ts b/lib/index.react_native.ts index b2654823d..205158298 100644 --- a/lib/index.react_native.ts +++ b/lib/index.react_native.ts @@ -28,7 +28,7 @@ import { OptimizelyDecideOption, Client, Config } from './shared_types'; import { BrowserOdpManager } from './plugins/odp_manager/index.browser'; import * as commonExports from './common_exports'; import { createPollingProjectConfigManager } from './project_config/config_manager_factory.react_native'; -import { createForwardingEventProcessor } from './event_processor/event_processor_factory.react_native'; +import { getForwardingEventProcessor } from './event_processor/event_processor_factory.react_native'; import 'fast-text-encoding'; import 'react-native-get-random-values'; @@ -147,7 +147,7 @@ export { createInstance, OptimizelyDecideOption, createPollingProjectConfigManager, - createForwardingEventProcessor, + getForwardingEventProcessor as createForwardingEventProcessor, }; export * from './common_exports'; @@ -163,7 +163,7 @@ export default { createInstance, OptimizelyDecideOption, createPollingProjectConfigManager, - createForwardingEventProcessor, + createForwardingEventProcessor: getForwardingEventProcessor, }; export * from './export_types'; diff --git a/lib/optimizely/index.tests.js b/lib/optimizely/index.tests.js index ca375151b..624fa2f72 100644 --- a/lib/optimizely/index.tests.js +++ b/lib/optimizely/index.tests.js @@ -9690,6 +9690,9 @@ describe('lib/optimizely', function() { process: sinon.stub(), start: sinon.stub(), stop: sinon.stub(), + onRunning: sinon.stub(), + onTerminated: sinon.stub(), + onDispatch: sinon.stub(), }; }); @@ -9729,10 +9732,11 @@ describe('lib/optimizely', function() { }); }); - describe('when the event processor stop method returns a promise that rejects', function() { + describe('when the event processor onTerminated() method returns a promise that rejects', function() { beforeEach(function() { eventProcessorStopPromise = Promise.reject(new Error('Failed to stop')); - mockEventProcessor.stop.returns(eventProcessorStopPromise); + eventProcessorStopPromise.catch(() => {}); + mockEventProcessor.onTerminated.returns(eventProcessorStopPromise); const mockConfigManager = getMockProjectConfigManager({ initConfig: createProjectConfig(testData.getTestProjectConfig()), }); @@ -9779,11 +9783,9 @@ describe('lib/optimizely', function() { var notificationCenter = createNotificationCenter({ logger: createdLogger, errorHandler: errorHandler }); var eventDispatcher = getMockEventDispatcher(); - var eventProcessor = createEventProcessor({ - dispatcher: eventDispatcher, - batchSize: 1, - notificationCenter: notificationCenter, - }); + var eventProcessor = getForwardingEventProcessor( + eventDispatcher + ); beforeEach(function() { sinon.stub(errorHandler, 'handleError'); @@ -10107,11 +10109,9 @@ describe('lib/optimizely', function() { beforeEach(function() { bucketStub = sinon.stub(bucketer, 'bucket'); eventDispatcherSpy = sinon.spy(() => Promise.resolve({ statusCode: 200 })); - eventProcessor = createEventProcessor({ - dispatcher: { dispatchEvent: eventDispatcherSpy }, - batchSize: 1, - notificationCenter: notificationCenter, - }); + eventProcessor = getForwardingEventProcessor( + { dispatchEvent: eventDispatcherSpy }, + ); const datafile = testData.getTestProjectConfig(); const mockConfigManager = getMockProjectConfigManager(); diff --git a/lib/optimizely_user_context/index.tests.js b/lib/optimizely_user_context/index.tests.js index 54d34a953..8ddfc631c 100644 --- a/lib/optimizely_user_context/index.tests.js +++ b/lib/optimizely_user_context/index.tests.js @@ -32,6 +32,7 @@ import testData from '../tests/test_data'; import { OptimizelyDecideOption } from '../shared_types'; import { getMockProjectConfigManager } from '../tests/mock/mock_project_config_manager'; import { createProjectConfig } from '../project_config/project_config'; +import { getForwardingEventProcessor } from '../event_processor/forwarding_event_processor'; const getMockEventDispatcher = () => { const dispatcher = { @@ -358,11 +359,9 @@ describe('lib/optimizely_user_context', function() { var optlyInstance; var notificationCenter = createNotificationCenter({ logger: createdLogger, errorHandler: errorHandler }); var eventDispatcher = getMockEventDispatcher(); - var eventProcessor = createEventProcessor({ - dispatcher: eventDispatcher, - batchSize: 1, - notificationCenter: notificationCenter, - }); + var eventProcessor = getForwardingEventProcessor( + eventDispatcher, + ); beforeEach(function() { optlyInstance = new Optimizely({ clientEngine: 'node-sdk', From 5bf1eb83c0d3b019e898bbf720626193df4f99a6 Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Sat, 9 Nov 2024 05:10:31 +0600 Subject: [PATCH 16/45] update --- lib/optimizely/index.tests.js | 863 ++++++++++----------- lib/optimizely_user_context/index.tests.js | 106 ++- 2 files changed, 473 insertions(+), 496 deletions(-) diff --git a/lib/optimizely/index.tests.js b/lib/optimizely/index.tests.js index 624fa2f72..7c233c74a 100644 --- a/lib/optimizely/index.tests.js +++ b/lib/optimizely/index.tests.js @@ -60,6 +60,34 @@ const getMockEventProcessor = (notificationCenter) => { return getForwardingEventProcessor(getMockEventDispatcher(), notificationCenter); } +const getOptlyInstance = ({ datafileObj, defaultDecideOptions }) => { + const mockConfigManager = getMockProjectConfigManager({ + initConfig: createProjectConfig(datafileObj), + }); + const eventDispatcher = getMockEventDispatcher(); + const eventProcessor = getForwardingEventProcessor(eventDispatcher); + + const notificationCenter = createNotificationCenter({ logger: createdLogger, errorHandler: errorHandler }); + var createdLogger = logger.createLogger({ logLevel: LOG_LEVEL.INFO }); + + const optlyInstance = new Optimizely({ + clientEngine: 'node-sdk', + projectConfigManager: mockConfigManager, + errorHandler: errorHandler, + eventProcessor, + jsonSchemaValidator: jsonSchemaValidator, + logger: createdLogger, + isValidInstance: true, + eventBatchSize: 1, + defaultDecideOptions: defaultDecideOptions || [], + notificationCenter, + }); + + sinon.stub(notificationCenter, 'sendNotifications'); + + return { optlyInstance, eventProcessor, eventDispatcher, notificationCenter, createdLogger } +} + describe('lib/optimizely', function() { var ProjectConfigManagerStub; var globalStubErrorHandler; @@ -4474,11 +4502,9 @@ describe('lib/optimizely', function() { }); var notificationCenter = createNotificationCenter({ logger: createdLogger, errorHandler: errorHandler }); var eventDispatcher = getMockEventDispatcher(); - var eventProcessor = createEventProcessor({ - dispatcher: eventDispatcher, - batchSize: 1, - notificationCenter: notificationCenter, - }); + var eventProcessor = getForwardingEventProcessor( + eventDispatcher, + ); describe('#createUserContext', function() { beforeEach(function() { @@ -4591,26 +4617,14 @@ describe('lib/optimizely', function() { describe('#decide', function() { var userId = 'tester'; describe('with empty default decide options', function() { + let optlyInstance, notificationCenter, createdLogger; beforeEach(function() { - const mockConfigManager = getMockProjectConfigManager({ - initConfig: createProjectConfig(testData.getTestDecideProjectConfig()), - }); + + ({ optlyInstance, notificationCenter, createdLogger, eventDispatcher} = getOptlyInstance({ + datafileObj: testData.getTestDecideProjectConfig(), + })); - optlyInstance = new Optimizely({ - clientEngine: 'node-sdk', - projectConfigManager: mockConfigManager, - errorHandler: errorHandler, - eventDispatcher: eventDispatcher, - jsonSchemaValidator: jsonSchemaValidator, - logger: createdLogger, - isValidInstance: true, - eventBatchSize: 1, - defaultDecideOptions: [], - notificationCenter, - eventProcessor, - }); - sinon.stub(optlyInstance.notificationCenter, 'sendNotifications'); sinon.stub(errorHandler, 'handleError'); sinon.stub(createdLogger, 'log'); sinon.stub(fns, 'uuid').returns('a68cf1ad-0393-4e18-af87-efe8f01a7c9c'); @@ -4621,7 +4635,7 @@ describe('lib/optimizely', function() { errorHandler.handleError.restore(); createdLogger.log.restore(); fns.uuid.restore(); - optlyInstance.notificationCenter.sendNotifications.restore(); + notificationCenter.sendNotifications.restore(); }); it('should return error decision object when provided flagKey is invalid and do not dispatch an event', function() { @@ -4738,8 +4752,8 @@ describe('lib/optimizely', function() { }; var callArgs = eventDispatcher.dispatchEvent.getCalls()[0].args; assert.deepEqual(callArgs[0], expectedImpressionEvent); - sinon.assert.callCount(optlyInstance.notificationCenter.sendNotifications, 4); - var notificationCallArgs = optlyInstance.notificationCenter.sendNotifications.getCall(3).args; + sinon.assert.callCount(notificationCenter.sendNotifications, 4); + var notificationCallArgs = notificationCenter.sendNotifications.getCall(3).args; var expectedNotificationCallArgs = [ NOTIFICATION_TYPES.DECISION, { @@ -4779,8 +4793,8 @@ describe('lib/optimizely', function() { }; assert.deepEqual(decision, expectedDecision); sinon.assert.notCalled(eventDispatcher.dispatchEvent); - sinon.assert.calledTwice(optlyInstance.notificationCenter.sendNotifications); - var notificationCallArgs = optlyInstance.notificationCenter.sendNotifications.getCall(1).args; + sinon.assert.calledTwice(notificationCenter.sendNotifications); + var notificationCallArgs = notificationCenter.sendNotifications.getCall(1).args; var expectedNotificationCallArgs = [ NOTIFICATION_TYPES.DECISION, { @@ -4822,8 +4836,8 @@ describe('lib/optimizely', function() { }; assert.deepEqual(decision, expectedDecision); sinon.assert.notCalled(eventDispatcher.dispatchEvent); - sinon.assert.calledOnce(optlyInstance.notificationCenter.sendNotifications); - var notificationCallArgs = optlyInstance.notificationCenter.sendNotifications.getCall(0).args; + sinon.assert.calledOnce(notificationCenter.sendNotifications); + var notificationCallArgs = notificationCenter.sendNotifications.getCall(0).args; var expectedNotificationCallArgs = [ NOTIFICATION_TYPES.DECISION, { @@ -4845,6 +4859,11 @@ describe('lib/optimizely', function() { }); it('should make a decision for rollout and dispatch an event when sendFlagDecisions is set to true', function() { + const { optlyInstance, notificationCenter, eventDispatcher } = getOptlyInstance( + { + datafileObj: testData.getTestDecideProjectConfig(), + } + ) var flagKey = 'feature_1'; var expectedVariables = optlyInstance.getAllFeatureVariables(flagKey, userId); var user = new OptimizelyUserContext({ @@ -4863,8 +4882,8 @@ describe('lib/optimizely', function() { }; assert.deepEqual(decision, expectedDecision); sinon.assert.calledOnce(eventDispatcher.dispatchEvent); - sinon.assert.callCount(optlyInstance.notificationCenter.sendNotifications, 4); - var notificationCallArgs = optlyInstance.notificationCenter.sendNotifications.getCall(3).args; + sinon.assert.callCount(notificationCenter.sendNotifications, 4); + var notificationCallArgs = notificationCenter.sendNotifications.getCall(3).args; var expectedNotificationCallArgs = [ NOTIFICATION_TYPES.DECISION, { @@ -4886,6 +4905,12 @@ describe('lib/optimizely', function() { }); it('should make a decision for rollout and do not dispatch an event when sendFlagDecisions is set to false', function() { + const { optlyInstance, notificationCenter, eventDispatcher } = getOptlyInstance( + { + datafileObj: testData.getTestDecideProjectConfig(), + } + ) + var newConfig = optlyInstance.projectConfigManager.getConfig(); newConfig.sendFlagDecisions = false; optlyInstance.projectConfigManager.getConfig = sinon.stub().returns(newConfig); @@ -4907,8 +4932,8 @@ describe('lib/optimizely', function() { }; assert.deepEqual(decision, expectedDecision); sinon.assert.notCalled(eventDispatcher.dispatchEvent); - sinon.assert.calledTwice(optlyInstance.notificationCenter.sendNotifications); - var notificationCallArgs = optlyInstance.notificationCenter.sendNotifications.getCall(1).args; + sinon.assert.calledTwice(notificationCenter.sendNotifications); + var notificationCallArgs = notificationCenter.sendNotifications.getCall(1).args; var expectedNotificationCallArgs = [ NOTIFICATION_TYPES.DECISION, { @@ -4930,6 +4955,11 @@ describe('lib/optimizely', function() { }); it('should make a decision when variation is null and dispatch an event', function() { + const { optlyInstance, notificationCenter, eventDispatcher } = getOptlyInstance( + { + datafileObj: testData.getTestDecideProjectConfig(), + } + ) var flagKey = 'feature_3'; var expectedVariables = optlyInstance.getAllFeatureVariables(flagKey, userId); var user = new OptimizelyUserContext({ @@ -4948,8 +4978,8 @@ describe('lib/optimizely', function() { }; assert.deepEqual(decision, expectedDecision); sinon.assert.calledOnce(eventDispatcher.dispatchEvent); - sinon.assert.callCount(optlyInstance.notificationCenter.sendNotifications, 4); - var notificationCallArgs = optlyInstance.notificationCenter.sendNotifications.getCall(3).args; + sinon.assert.callCount(notificationCenter.sendNotifications, 4); + var notificationCallArgs = notificationCenter.sendNotifications.getCall(3).args; var expectedNotificationCallArgs = [ NOTIFICATION_TYPES.DECISION, { @@ -4972,40 +5002,11 @@ describe('lib/optimizely', function() { }); describe('with EXCLUDE_VARIABLES flag in default decide options', function() { - beforeEach(function() { - const mockConfigManager = getMockProjectConfigManager({ - initConfig: createProjectConfig(testData.getTestDecideProjectConfig()), - }); - - optlyInstance = new Optimizely({ - clientEngine: 'node-sdk', - projectConfigManager: mockConfigManager, - errorHandler: errorHandler, - eventProcessor, - jsonSchemaValidator: jsonSchemaValidator, - logger: createdLogger, - isValidInstance: true, - eventBatchSize: 1, - defaultDecideOptions: [OptimizelyDecideOption.EXCLUDE_VARIABLES], - eventProcessor, - notificationCenter, - }); - - sinon.stub(optlyInstance.notificationCenter, 'sendNotifications'); - sinon.stub(errorHandler, 'handleError'); - sinon.stub(createdLogger, 'log'); - sinon.stub(fns, 'uuid').returns('a68cf1ad-0393-4e18-af87-efe8f01a7c9c'); - }); - - afterEach(function() { - eventDispatcher.dispatchEvent.reset(); - optlyInstance.notificationCenter.sendNotifications.restore(); - errorHandler.handleError.restore(); - createdLogger.log.restore(); - fns.uuid.restore(); - }); - it('should exclude variables in decision object and dispatch an event', function() { + const { optlyInstance, notificationCenter, eventDispatcher } = getOptlyInstance({ + datafileObj: testData.getTestDecideProjectConfig(), + defaultDecideOptions: [OptimizelyDecideOption.EXCLUDE_VARIABLES], + }) var flagKey = 'feature_2'; var user = new OptimizelyUserContext({ optimizely: optlyInstance, @@ -5023,8 +5024,8 @@ describe('lib/optimizely', function() { }; assert.deepEqual(decision, expectedDecisionObj); sinon.assert.calledOnce(eventDispatcher.dispatchEvent); - sinon.assert.calledThrice(optlyInstance.notificationCenter.sendNotifications); - var notificationCallArgs = optlyInstance.notificationCenter.sendNotifications.getCall(2).args; + sinon.assert.calledThrice(notificationCenter.sendNotifications); + var notificationCallArgs = notificationCenter.sendNotifications.getCall(2).args; var expectedNotificationCallArgs = [ NOTIFICATION_TYPES.DECISION, { @@ -5046,6 +5047,11 @@ describe('lib/optimizely', function() { }); it('should exclude variables in decision object and do not dispatch an event when DISABLE_DECISION_EVENT is passed in decide options', function() { + const { optlyInstance, notificationCenter, eventDispatcher } = getOptlyInstance({ + datafileObj: testData.getTestDecideProjectConfig(), + defaultDecideOptions: [OptimizelyDecideOption.EXCLUDE_VARIABLES], + }) + var flagKey = 'feature_2'; var user = new OptimizelyUserContext({ optimizely: optlyInstance, @@ -5063,8 +5069,8 @@ describe('lib/optimizely', function() { }; assert.deepEqual(decision, expectedDecisionObj); sinon.assert.notCalled(eventDispatcher.dispatchEvent); - sinon.assert.calledOnce(optlyInstance.notificationCenter.sendNotifications); - var notificationCallArgs = optlyInstance.notificationCenter.sendNotifications.getCall(0).args; + sinon.assert.calledOnce(notificationCenter.sendNotifications); + var notificationCallArgs = notificationCenter.sendNotifications.getCall(0).args; var expectedNotificationCallArgs = [ NOTIFICATION_TYPES.DECISION, { @@ -5779,40 +5785,15 @@ describe('lib/optimizely', function() { }); }); + describe('#decideForKeys', function() { var userId = 'tester'; - beforeEach(function() { - eventDispatcher.dispatchEvent.reset(); - const mockConfigManager = getMockProjectConfigManager({ - initConfig: createProjectConfig(testData.getTestDecideProjectConfig()), - }); - - optlyInstance = new Optimizely({ - clientEngine: 'node-sdk', - projectConfigManager: mockConfigManager, - errorHandler: errorHandler, - eventProcessor, - jsonSchemaValidator: jsonSchemaValidator, - logger: createdLogger, - isValidInstance: true, - eventBatchSize: 1, - defaultDecideOptions: [], - notificationCenter, - eventProcessor, - }); - - sinon.stub(optlyInstance.notificationCenter, 'sendNotifications'); - }); - - afterEach(function() { - eventDispatcher.dispatchEvent.reset(); - optlyInstance.notificationCenter.sendNotifications.restore(); - }); - it('should return decision results map with single flag key provided for feature_test and dispatch an event', function() { var flagKey = 'feature_2'; + const { optlyInstance, eventDispatcher } = getOptlyInstance({ datafileObj: testData.getTestDecideProjectConfig() }); var user = optlyInstance.createUserContext(userId); var expectedVariables = optlyInstance.getAllFeatureVariables(flagKey, userId); + var decisionsMap = optlyInstance.decideForKeys(user, [flagKey]); var decision = decisionsMap[flagKey]; var expectedDecision = { @@ -5835,7 +5816,9 @@ describe('lib/optimizely', function() { it('should return decision results map with two flag keys provided and dispatch events', function() { var flagKeysArray = ['feature_1', 'feature_2']; + const { optlyInstance, eventDispatcher } = getOptlyInstance({ datafileObj: testData.getTestDecideProjectConfig() }); var user = optlyInstance.createUserContext(userId); + var expectedVariables1 = optlyInstance.getAllFeatureVariables(flagKeysArray[0], userId); var expectedVariables2 = optlyInstance.getAllFeatureVariables(flagKeysArray[1], userId); var decisionsMap = optlyInstance.decideForKeys(user, flagKeysArray); @@ -5868,6 +5851,7 @@ describe('lib/optimizely', function() { it('should return decision results map with only enabled flags when ENABLED_FLAGS_ONLY flag is passed in and dispatch events', function() { var flagKey1 = 'feature_2'; var flagKey2 = 'feature_3'; + const { optlyInstance, eventDispatcher } = getOptlyInstance({ datafileObj: testData.getTestDecideProjectConfig() }); var user = optlyInstance.createUserContext(userId, { gender: 'female' }); var expectedVariables = optlyInstance.getAllFeatureVariables(flagKey1, userId); var decisionsMap = optlyInstance.decideForKeys( @@ -5894,36 +5878,11 @@ describe('lib/optimizely', function() { describe('#decideAll', function() { var userId = 'tester'; describe('with empty default decide options', function() { - beforeEach(function() { - const mockConfigManager = getMockProjectConfigManager({ - initConfig: createProjectConfig(testData.getTestDecideProjectConfig()), - }); - - optlyInstance = new Optimizely({ - clientEngine: 'node-sdk', - projectConfigManager: mockConfigManager, - errorHandler: errorHandler, - eventProcessor, - jsonSchemaValidator: jsonSchemaValidator, - logger: createdLogger, - isValidInstance: true, - eventBatchSize: 1, - defaultDecideOptions: [], - notificationCenter, - eventProcessor, - }); - - sinon.stub(optlyInstance.notificationCenter, 'sendNotifications'); - }); - - afterEach(function() { - eventDispatcher.dispatchEvent.reset(); - optlyInstance.notificationCenter.sendNotifications.restore(); - }); it('should return decision results map with all flag keys provided and dispatch events', function() { + const { optlyInstance, eventDispatcher } = getOptlyInstance({ datafileObj: testData.getTestDecideProjectConfig() }); var configObj = optlyInstance.projectConfigManager.getConfig(); - var allFlagKeysArray = Object.keys(configObj.featureKeyMap); + var allFlagKeysArray = Object.keys(configObj.featureKeyMap); var user = optlyInstance.createUserContext(userId); var expectedVariables1 = optlyInstance.getAllFeatureVariables(allFlagKeysArray[0], userId); var expectedVariables2 = optlyInstance.getAllFeatureVariables(allFlagKeysArray[1], userId); @@ -5969,6 +5928,7 @@ describe('lib/optimizely', function() { it('should return decision results map with only enabled flags when ENABLED_FLAGS_ONLY flag is passed in and dispatch events', function() { var flagKey1 = 'feature_1'; var flagKey2 = 'feature_2'; + const { optlyInstance, eventDispatcher } = getOptlyInstance({ datafileObj: testData.getTestDecideProjectConfig() }); var user = optlyInstance.createUserContext(userId, { gender: 'female' }); var expectedVariables1 = optlyInstance.getAllFeatureVariables(flagKey1, userId); var expectedVariables2 = optlyInstance.getAllFeatureVariables(flagKey2, userId); @@ -6001,35 +5961,13 @@ describe('lib/optimizely', function() { }); describe('with ENABLED_FLAGS_ONLY flag in default decide options', function() { - beforeEach(function() { - const mockConfigManager = getMockProjectConfigManager({ - initConfig: createProjectConfig(testData.getTestDecideProjectConfig()), - }); - - optlyInstance = new Optimizely({ - clientEngine: 'node-sdk', - projectConfigManager: mockConfigManager, - errorHandler: errorHandler, - eventProcessor, - jsonSchemaValidator: jsonSchemaValidator, - logger: createdLogger, - isValidInstance: true, - eventBatchSize: 1, - defaultDecideOptions: [OptimizelyDecideOption.ENABLED_FLAGS_ONLY], - notificationCenter, - }); - - sinon.stub(optlyInstance.notificationCenter, 'sendNotifications'); - }); - - afterEach(function() { - eventDispatcher.dispatchEvent.reset(); - optlyInstance.notificationCenter.sendNotifications.restore(); - }); - it('should return decision results map with only enabled flags and dispatch events', function() { var flagKey1 = 'feature_1'; var flagKey2 = 'feature_2'; + const { optlyInstance, eventDispatcher } = getOptlyInstance({ + datafileObj: testData.getTestDecideProjectConfig(), + defaultDecideOptions: [OptimizelyDecideOption.ENABLED_FLAGS_ONLY] + }); var user = optlyInstance.createUserContext(userId, { gender: 'female' }); var expectedVariables1 = optlyInstance.getAllFeatureVariables(flagKey1, userId); var expectedVariables2 = optlyInstance.getAllFeatureVariables(flagKey2, userId); @@ -6063,6 +6001,12 @@ describe('lib/optimizely', function() { it('should return decision results map with only enabled flags and excluded variables when EXCLUDE_VARIABLES_FLAG is passed in', function() { var flagKey1 = 'feature_1'; var flagKey2 = 'feature_2'; + + const { optlyInstance, eventDispatcher } = getOptlyInstance({ + datafileObj: testData.getTestDecideProjectConfig(), + defaultDecideOptions: [OptimizelyDecideOption.ENABLED_FLAGS_ONLY] + }); + var user = optlyInstance.createUserContext(userId, { gender: 'female' }); var decisionsMap = optlyInstance.decideAll(user, [OptimizelyDecideOption.EXCLUDE_VARIABLES]); var decision1 = decisionsMap[flagKey1]; @@ -6085,6 +6029,7 @@ describe('lib/optimizely', function() { userContext: user, reasons: [], }; + console.log(decisionsMap); assert.deepEqual(Object.values(decisionsMap).length, 2); assert.deepEqual(decision1, expectedDecision1); assert.deepEqual(decision2, expectedDecision2); @@ -6103,11 +6048,9 @@ describe('lib/optimizely', function() { }); var notificationCenter = createNotificationCenter({ logger: createdLogger, errorHandler: errorHandler }); var eventDispatcher = getMockEventDispatcher(); - var eventProcessor = createEventProcessor({ - dispatcher: eventDispatcher, - batchSize: 1, - notificationCenter: notificationCenter, - }); + var eventProcessor = getForwardingEventProcessor( + eventDispatcher, + ); beforeEach(function() { const mockConfigManager = getMockProjectConfigManager({ initConfig: createProjectConfig(testData.getTestProjectConfig()), @@ -6177,11 +6120,9 @@ describe('lib/optimizely', function() { dispatchEvent: () => Promise.resolve({ statusCode: 200 }), }; - var eventProcessor = createEventProcessor({ - dispatcher: eventDispatcher, - batchSize: 1, - notificationCenter: notificationCenter, - }); + var eventProcessor = getForwardingEventProcessor( + eventDispatcher, + ); beforeEach(function() { const mockConfigManager = getMockProjectConfigManager({ @@ -9023,11 +8964,9 @@ describe('lib/optimizely', function() { var eventDispatcher = { dispatchEvent: () => Promise.resolve({ statusCode: 200 }), }; - var eventProcessor = createEventProcessor({ - dispatcher: eventDispatcher, - batchSize: 1, - notificationCenter: notificationCenter, - }); + var eventProcessor = getForwardingEventProcessor( + eventDispatcher, + ); beforeEach(function() { const mockConfigManager = getMockProjectConfigManager({ initConfig: createProjectConfig(testData.getTypedAudiencesConfig()), @@ -9171,11 +9110,9 @@ describe('lib/optimizely', function() { var eventDispatcher = { dispatchEvent: () => Promise.resolve({ statusCode: 200 }), }; - var eventProcessor = createEventProcessor({ - dispatcher: eventDispatcher, - batchSize: 1, - notificationCenter: notificationCenter, - }); + var eventProcessor = getForwardingEventProcessor( + eventDispatcher, + ); beforeEach(function() { const mockConfigManager = getMockProjectConfigManager({ initConfig: createProjectConfig(testData.getTypedAudiencesConfig()), @@ -9377,12 +9314,9 @@ describe('lib/optimizely', function() { sinon.stub(fns, 'uuid').returns('a68cf1ad-0393-4e18-af87-efe8f01a7c9c'); notificationCenter = createNotificationCenter({ logger: createdLogger, errorHandler: errorHandler }); eventDispatcher = getMockEventDispatcher(); - eventProcessor = createEventProcessor({ - dispatcher: eventDispatcher, - batchSize: 3, - notificationCenter: notificationCenter, - flushInterval: 100, - }); + eventProcessor = getForwardingEventProcessor( + eventDispatcher, + ); }); afterEach(function() { @@ -9393,293 +9327,294 @@ describe('lib/optimizely', function() { fns.uuid.restore(); }); - describe('when eventBatchSize = 3 and eventFlushInterval = 100', function() { - var optlyInstance; - - beforeEach(function() { - const mockConfigManager = getMockProjectConfigManager({ - initConfig: createProjectConfig(testData.getTestProjectConfig()), - }); - - optlyInstance = new Optimizely({ - clientEngine: 'node-sdk', - projectConfigManager: mockConfigManager, - errorHandler: errorHandler, - eventProcessor, - jsonSchemaValidator: jsonSchemaValidator, - logger: createdLogger, - isValidInstance: true, - eventBatchSize: 3, - eventFlushInterval: 100, - eventProcessor, - notificationCenter, - }); - }); - - afterEach(function() { - optlyInstance.close(); - }); - - it('should send batched events when the maxQueueSize is reached', function() { - fakeDecisionResponse = { - result: '111129', - reasons: [], - }; - bucketStub.returns(fakeDecisionResponse); - var activate = optlyInstance.activate('testExperiment', 'testUser'); - assert.strictEqual(activate, 'variation'); - - optlyInstance.track('testEvent', 'testUser'); - optlyInstance.track('testEvent', 'testUser'); - - sinon.assert.calledOnce(eventDispatcher.dispatchEvent); - - var expectedObj = { - url: 'https://logx.optimizely.com/v1/events', - httpVerb: 'POST', - params: { - account_id: '12001', - project_id: '111001', - visitors: [ - { - snapshots: [ - { - decisions: [ - { - campaign_id: '4', - experiment_id: '111127', - variation_id: '111129', - metadata: { - flag_key: '', - rule_key: 'testExperiment', - rule_type: 'experiment', - variation_key: 'variation', - enabled: true, - }, - }, - ], - events: [ - { - entity_id: '4', - timestamp: Math.round(new Date().getTime()), - key: 'campaign_activated', - uuid: 'a68cf1ad-0393-4e18-af87-efe8f01a7c9c', - }, - ], - }, - ], - visitor_id: 'testUser', - attributes: [], - }, - { - attributes: [], - snapshots: [ - { - events: [ - { - entity_id: '111095', - key: 'testEvent', - timestamp: new Date().getTime(), - uuid: 'a68cf1ad-0393-4e18-af87-efe8f01a7c9c', - }, - ], - }, - ], - visitor_id: 'testUser', - }, - { - attributes: [], - snapshots: [ - { - events: [ - { - entity_id: '111095', - key: 'testEvent', - timestamp: new Date().getTime(), - uuid: 'a68cf1ad-0393-4e18-af87-efe8f01a7c9c', - }, - ], - }, - ], - visitor_id: 'testUser', - }, - ], - revision: '42', - client_name: 'node-sdk', - client_version: enums.CLIENT_VERSION, - anonymize_ip: false, - enrich_decisions: true, - }, - }; - var eventDispatcherCall = eventDispatcher.dispatchEvent.args[0]; - assert.deepEqual(eventDispatcherCall[0], expectedObj); - }); - - it('should flush the queue when the flushInterval occurs', function() { - var timestamp = new Date().getTime(); - fakeDecisionResponse = { - result: '111129', - reasons: [], - }; - bucketStub.returns(fakeDecisionResponse); - var activate = optlyInstance.activate('testExperiment', 'testUser'); - assert.strictEqual(activate, 'variation'); - - optlyInstance.track('testEvent', 'testUser'); - - sinon.assert.notCalled(eventDispatcher.dispatchEvent); - - clock.tick(100); - - sinon.assert.calledOnce(eventDispatcher.dispatchEvent); - - var expectedObj = { - url: 'https://logx.optimizely.com/v1/events', - httpVerb: 'POST', - params: { - account_id: '12001', - project_id: '111001', - visitors: [ - { - snapshots: [ - { - decisions: [ - { - campaign_id: '4', - experiment_id: '111127', - variation_id: '111129', - metadata: { - flag_key: '', - rule_key: 'testExperiment', - rule_type: 'experiment', - variation_key: 'variation', - enabled: true, - }, - }, - ], - events: [ - { - entity_id: '4', - timestamp: timestamp, - key: 'campaign_activated', - uuid: 'a68cf1ad-0393-4e18-af87-efe8f01a7c9c', - }, - ], - }, - ], - visitor_id: 'testUser', - attributes: [], - }, - { - attributes: [], - snapshots: [ - { - events: [ - { - entity_id: '111095', - key: 'testEvent', - timestamp: timestamp, - uuid: 'a68cf1ad-0393-4e18-af87-efe8f01a7c9c', - }, - ], - }, - ], - visitor_id: 'testUser', - }, - ], - revision: '42', - client_name: 'node-sdk', - client_version: enums.CLIENT_VERSION, - anonymize_ip: false, - enrich_decisions: true, - }, - }; - var eventDispatcherCall = eventDispatcher.dispatchEvent.args[0]; - assert.deepEqual(eventDispatcherCall[0], expectedObj); - }); - - it('should flush the queue when optimizely.close() is called', function() { - fakeDecisionResponse = { - result: '111129', - reasons: [], - }; - bucketStub.returns(fakeDecisionResponse); - var activate = optlyInstance.activate('testExperiment', 'testUser'); - assert.strictEqual(activate, 'variation'); - - optlyInstance.track('testEvent', 'testUser'); + // TODO: these tests does not belong here, these belong in EventProcessor tests + // describe('when eventBatchSize = 3 and eventFlushInterval = 100', function() { + // var optlyInstance; + + // beforeEach(function() { + // const mockConfigManager = getMockProjectConfigManager({ + // initConfig: createProjectConfig(testData.getTestProjectConfig()), + // }); + + // optlyInstance = new Optimizely({ + // clientEngine: 'node-sdk', + // projectConfigManager: mockConfigManager, + // errorHandler: errorHandler, + // eventProcessor, + // jsonSchemaValidator: jsonSchemaValidator, + // logger: createdLogger, + // isValidInstance: true, + // eventBatchSize: 3, + // eventFlushInterval: 100, + // eventProcessor, + // notificationCenter, + // }); + // }); - sinon.assert.notCalled(eventDispatcher.dispatchEvent); + // afterEach(function() { + // optlyInstance.close(); + // }); - optlyInstance.close(); + // it('should send batched events when the maxQueueSize is reached', function() { + // fakeDecisionResponse = { + // result: '111129', + // reasons: [], + // }; + // bucketStub.returns(fakeDecisionResponse); + // var activate = optlyInstance.activate('testExperiment', 'testUser'); + // assert.strictEqual(activate, 'variation'); + + // optlyInstance.track('testEvent', 'testUser'); + // optlyInstance.track('testEvent', 'testUser'); + + // sinon.assert.calledOnce(eventDispatcher.dispatchEvent); + + // var expectedObj = { + // url: 'https://logx.optimizely.com/v1/events', + // httpVerb: 'POST', + // params: { + // account_id: '12001', + // project_id: '111001', + // visitors: [ + // { + // snapshots: [ + // { + // decisions: [ + // { + // campaign_id: '4', + // experiment_id: '111127', + // variation_id: '111129', + // metadata: { + // flag_key: '', + // rule_key: 'testExperiment', + // rule_type: 'experiment', + // variation_key: 'variation', + // enabled: true, + // }, + // }, + // ], + // events: [ + // { + // entity_id: '4', + // timestamp: Math.round(new Date().getTime()), + // key: 'campaign_activated', + // uuid: 'a68cf1ad-0393-4e18-af87-efe8f01a7c9c', + // }, + // ], + // }, + // ], + // visitor_id: 'testUser', + // attributes: [], + // }, + // { + // attributes: [], + // snapshots: [ + // { + // events: [ + // { + // entity_id: '111095', + // key: 'testEvent', + // timestamp: new Date().getTime(), + // uuid: 'a68cf1ad-0393-4e18-af87-efe8f01a7c9c', + // }, + // ], + // }, + // ], + // visitor_id: 'testUser', + // }, + // { + // attributes: [], + // snapshots: [ + // { + // events: [ + // { + // entity_id: '111095', + // key: 'testEvent', + // timestamp: new Date().getTime(), + // uuid: 'a68cf1ad-0393-4e18-af87-efe8f01a7c9c', + // }, + // ], + // }, + // ], + // visitor_id: 'testUser', + // }, + // ], + // revision: '42', + // client_name: 'node-sdk', + // client_version: enums.CLIENT_VERSION, + // anonymize_ip: false, + // enrich_decisions: true, + // }, + // }; + // var eventDispatcherCall = eventDispatcher.dispatchEvent.args[0]; + // assert.deepEqual(eventDispatcherCall[0], expectedObj); + // }); - sinon.assert.calledOnce(eventDispatcher.dispatchEvent); + // it('should flush the queue when the flushInterval occurs', function() { + // var timestamp = new Date().getTime(); + // fakeDecisionResponse = { + // result: '111129', + // reasons: [], + // }; + // bucketStub.returns(fakeDecisionResponse); + // var activate = optlyInstance.activate('testExperiment', 'testUser'); + // assert.strictEqual(activate, 'variation'); + + // optlyInstance.track('testEvent', 'testUser'); + + // sinon.assert.notCalled(eventDispatcher.dispatchEvent); + + // clock.tick(100); + + // sinon.assert.calledOnce(eventDispatcher.dispatchEvent); + + // var expectedObj = { + // url: 'https://logx.optimizely.com/v1/events', + // httpVerb: 'POST', + // params: { + // account_id: '12001', + // project_id: '111001', + // visitors: [ + // { + // snapshots: [ + // { + // decisions: [ + // { + // campaign_id: '4', + // experiment_id: '111127', + // variation_id: '111129', + // metadata: { + // flag_key: '', + // rule_key: 'testExperiment', + // rule_type: 'experiment', + // variation_key: 'variation', + // enabled: true, + // }, + // }, + // ], + // events: [ + // { + // entity_id: '4', + // timestamp: timestamp, + // key: 'campaign_activated', + // uuid: 'a68cf1ad-0393-4e18-af87-efe8f01a7c9c', + // }, + // ], + // }, + // ], + // visitor_id: 'testUser', + // attributes: [], + // }, + // { + // attributes: [], + // snapshots: [ + // { + // events: [ + // { + // entity_id: '111095', + // key: 'testEvent', + // timestamp: timestamp, + // uuid: 'a68cf1ad-0393-4e18-af87-efe8f01a7c9c', + // }, + // ], + // }, + // ], + // visitor_id: 'testUser', + // }, + // ], + // revision: '42', + // client_name: 'node-sdk', + // client_version: enums.CLIENT_VERSION, + // anonymize_ip: false, + // enrich_decisions: true, + // }, + // }; + // var eventDispatcherCall = eventDispatcher.dispatchEvent.args[0]; + // assert.deepEqual(eventDispatcherCall[0], expectedObj); + // }); - var expectedObj = { - url: 'https://logx.optimizely.com/v1/events', - httpVerb: 'POST', - params: { - account_id: '12001', - project_id: '111001', - visitors: [ - { - snapshots: [ - { - decisions: [ - { - campaign_id: '4', - experiment_id: '111127', - variation_id: '111129', - metadata: { - flag_key: '', - rule_key: 'testExperiment', - rule_type: 'experiment', - variation_key: 'variation', - enabled: true, - }, - }, - ], - events: [ - { - entity_id: '4', - timestamp: Math.round(new Date().getTime()), - key: 'campaign_activated', - uuid: 'a68cf1ad-0393-4e18-af87-efe8f01a7c9c', - }, - ], - }, - ], - visitor_id: 'testUser', - attributes: [], - }, - { - attributes: [], - snapshots: [ - { - events: [ - { - entity_id: '111095', - key: 'testEvent', - timestamp: new Date().getTime(), - uuid: 'a68cf1ad-0393-4e18-af87-efe8f01a7c9c', - }, - ], - }, - ], - visitor_id: 'testUser', - }, - ], - revision: '42', - client_name: 'node-sdk', - client_version: enums.CLIENT_VERSION, - anonymize_ip: false, - enrich_decisions: true, - }, - }; - var eventDispatcherCall = eventDispatcher.dispatchEvent.args[0]; - assert.deepEqual(eventDispatcherCall[0], expectedObj); - }); - }); + // it('should flush the queue when optimizely.close() is called', function() { + // fakeDecisionResponse = { + // result: '111129', + // reasons: [], + // }; + // bucketStub.returns(fakeDecisionResponse); + // var activate = optlyInstance.activate('testExperiment', 'testUser'); + // assert.strictEqual(activate, 'variation'); + + // optlyInstance.track('testEvent', 'testUser'); + + // sinon.assert.notCalled(eventDispatcher.dispatchEvent); + + // optlyInstance.close(); + + // sinon.assert.calledOnce(eventDispatcher.dispatchEvent); + + // var expectedObj = { + // url: 'https://logx.optimizely.com/v1/events', + // httpVerb: 'POST', + // params: { + // account_id: '12001', + // project_id: '111001', + // visitors: [ + // { + // snapshots: [ + // { + // decisions: [ + // { + // campaign_id: '4', + // experiment_id: '111127', + // variation_id: '111129', + // metadata: { + // flag_key: '', + // rule_key: 'testExperiment', + // rule_type: 'experiment', + // variation_key: 'variation', + // enabled: true, + // }, + // }, + // ], + // events: [ + // { + // entity_id: '4', + // timestamp: Math.round(new Date().getTime()), + // key: 'campaign_activated', + // uuid: 'a68cf1ad-0393-4e18-af87-efe8f01a7c9c', + // }, + // ], + // }, + // ], + // visitor_id: 'testUser', + // attributes: [], + // }, + // { + // attributes: [], + // snapshots: [ + // { + // events: [ + // { + // entity_id: '111095', + // key: 'testEvent', + // timestamp: new Date().getTime(), + // uuid: 'a68cf1ad-0393-4e18-af87-efe8f01a7c9c', + // }, + // ], + // }, + // ], + // visitor_id: 'testUser', + // }, + // ], + // revision: '42', + // client_name: 'node-sdk', + // client_version: enums.CLIENT_VERSION, + // anonymize_ip: false, + // enrich_decisions: true, + // }, + // }; + // var eventDispatcherCall = eventDispatcher.dispatchEvent.args[0]; + // assert.deepEqual(eventDispatcherCall[0], expectedObj); + // }); + // }); describe('close method', function() { var eventProcessorStopPromise; @@ -9696,10 +9631,10 @@ describe('lib/optimizely', function() { }; }); - describe('when the event processor stop method returns a promise that fulfills', function() { + describe('when the event processor onTerminated method returns a promise that fulfills', function() { beforeEach(function() { eventProcessorStopPromise = Promise.resolve(); - mockEventProcessor.stop.returns(eventProcessorStopPromise); + mockEventProcessor.onTerminated.returns(eventProcessorStopPromise); const mockConfigManager = getMockProjectConfigManager({ initConfig: createProjectConfig(testData.getTestProjectConfig()), }); diff --git a/lib/optimizely_user_context/index.tests.js b/lib/optimizely_user_context/index.tests.js index 8ddfc631c..7e1d4ed1d 100644 --- a/lib/optimizely_user_context/index.tests.js +++ b/lib/optimizely_user_context/index.tests.js @@ -33,6 +33,7 @@ import { OptimizelyDecideOption } from '../shared_types'; import { getMockProjectConfigManager } from '../tests/mock/mock_project_config_manager'; import { createProjectConfig } from '../project_config/project_config'; import { getForwardingEventProcessor } from '../event_processor/forwarding_event_processor'; +import * as logger from '../plugins/logger'; const getMockEventDispatcher = () => { const dispatcher = { @@ -41,6 +42,33 @@ const getMockEventDispatcher = () => { return dispatcher; } +const getOptlyInstance = ({ datafileObj, defaultDecideOptions }) => { + const mockConfigManager = getMockProjectConfigManager({ + initConfig: createProjectConfig(datafileObj), + }); + const eventDispatcher = getMockEventDispatcher(); + const eventProcessor = getForwardingEventProcessor(eventDispatcher); + + const notificationCenter = createNotificationCenter({ logger: createdLogger, errorHandler: errorHandler }); + var createdLogger = logger.createLogger({ logLevel: LOG_LEVEL.INFO }); + + const optlyInstance = new Optimizely({ + clientEngine: 'node-sdk', + projectConfigManager: mockConfigManager, + errorHandler: errorHandler, + eventProcessor, + logger: createdLogger, + isValidInstance: true, + eventBatchSize: 1, + defaultDecideOptions: defaultDecideOptions || [], + notificationCenter, + }); + + sinon.stub(notificationCenter, 'sendNotifications'); + + return { optlyInstance, eventProcessor, eventDispatcher, notificationCenter, createdLogger } +} + describe('lib/optimizely_user_context', function() { describe('APIs', function() { var fakeOptimizely; @@ -306,16 +334,26 @@ describe('lib/optimizely_user_context', function() { logToConsole: false, }); var stubLogHandler; + let optlyInstance, notificationCenter, createdLogger, eventDispatcher; + beforeEach(function() { stubLogHandler = { log: sinon.stub(), }; logging.setLogLevel('notset'); logging.setLogHandler(stubLogHandler); + + ({ optlyInstance, notificationCenter, createdLogger, eventDispatcher} = getOptlyInstance({ + datafileObj: testData.getTestDecideProjectConfig(), + })); }); + afterEach(function() { logging.resetLogger(); + eventDispatcher.dispatchEvent.reset(); + notificationCenter.sendNotifications.restore(); }); + it('should return true when client is not ready', function() { fakeOptimizely = { isValidInstance: sinon.stub().returns(false), @@ -458,6 +496,10 @@ describe('lib/optimizely_user_context', function() { }); it('should return forced decision object when forced decision is set for a flag and dispatch an event', function() { + const { optlyInstance, notificationCenter, eventDispatcher } = getOptlyInstance({ + datafileObj: testData.getTestDecideProjectConfig(), + }); + var user = optlyInstance.createUserContext(userId); var featureKey = 'feature_1'; var variationKey = '3324490562'; @@ -496,8 +538,8 @@ describe('lib/optimizely_user_context', function() { assert.equal(metadata.variation_key, variationKey); assert.equal(metadata.enabled, true); - sinon.assert.callCount(optlyInstance.notificationCenter.sendNotifications, 3); - var notificationCallArgs = optlyInstance.notificationCenter.sendNotifications.getCall(2).args; + sinon.assert.callCount(notificationCenter.sendNotifications, 3); + var notificationCallArgs = notificationCenter.sendNotifications.getCall(2).args; var expectedNotificationCallArgs = [ NOTIFICATION_TYPES.DECISION, { @@ -533,6 +575,9 @@ describe('lib/optimizely_user_context', function() { }); it('should return forced decision object when forced decision is set for an experiment rule and dispatch an event', function() { + const { optlyInstance, notificationCenter, eventDispatcher } = getOptlyInstance({ + datafileObj: testData.getTestDecideProjectConfig(), + }); var attributes = { country: 'US' }; var user = optlyInstance.createUserContext(userId, attributes); var featureKey = 'feature_1'; @@ -577,8 +622,8 @@ describe('lib/optimizely_user_context', function() { assert.equal(metadata.variation_key, 'b'); assert.equal(metadata.enabled, false); - sinon.assert.callCount(optlyInstance.notificationCenter.sendNotifications, 3); - var notificationCallArgs = optlyInstance.notificationCenter.sendNotifications.getCall(2).args; + sinon.assert.callCount(notificationCenter.sendNotifications, 3); + var notificationCallArgs = notificationCenter.sendNotifications.getCall(2).args; var expectedNotificationCallArgs = [ NOTIFICATION_TYPES.DECISION, { @@ -615,6 +660,9 @@ describe('lib/optimizely_user_context', function() { }); it('should return forced decision object when forced decision is set for a delivery rule and dispatch an event', function() { + const { optlyInstance, notificationCenter, eventDispatcher } = getOptlyInstance({ + datafileObj: testData.getTestDecideProjectConfig(), + }); var user = optlyInstance.createUserContext(userId); var featureKey = 'feature_1'; var variationKey = '3324490633'; @@ -631,17 +679,17 @@ describe('lib/optimizely_user_context', function() { assert.deepEqual(Object.keys(decision.userContext.forcedDecisionsMap[featureKey]).length, 1); assert.deepEqual(decision.userContext.forcedDecisionsMap[featureKey][ruleKey], { variationKey }); - sinon.assert.called(stubLogHandler.log); - var logMessage = optlyInstance.decisionService.logger.log.args[4]; - assert.strictEqual(logMessage[0], 2); - assert.strictEqual( - logMessage[1], - 'Variation (%s) is mapped to flag (%s), rule (%s) and user (%s) in the forced decision map.' - ); - assert.strictEqual(logMessage[2], variationKey); - assert.strictEqual(logMessage[3], featureKey); - assert.strictEqual(logMessage[4], ruleKey); - assert.strictEqual(logMessage[5], userId); + // sinon.assert.called(stubLogHandler.log); + // var logMessage = optlyInstance.decisionService.logger.log.args[4]; + // assert.strictEqual(logMessage[0], 2); + // assert.strictEqual( + // logMessage[1], + // 'Variation (%s) is mapped to flag (%s), rule (%s) and user (%s) in the forced decision map.' + // ); + // assert.strictEqual(logMessage[2], variationKey); + // assert.strictEqual(logMessage[3], featureKey); + // assert.strictEqual(logMessage[4], ruleKey); + // assert.strictEqual(logMessage[5], userId); sinon.assert.calledOnce(eventDispatcher.dispatchEvent); var callArgs = eventDispatcher.dispatchEvent.getCalls()[0].args; @@ -658,8 +706,8 @@ describe('lib/optimizely_user_context', function() { assert.equal(metadata.variation_key, '3324490633'); assert.equal(metadata.enabled, true); - sinon.assert.callCount(optlyInstance.notificationCenter.sendNotifications, 3); - var notificationCallArgs = optlyInstance.notificationCenter.sendNotifications.getCall(2).args; + sinon.assert.callCount(notificationCenter.sendNotifications, 3); + var notificationCallArgs = notificationCenter.sendNotifications.getCall(2).args; var expectedNotificationCallArgs = [ NOTIFICATION_TYPES.DECISION, { @@ -692,11 +740,9 @@ describe('lib/optimizely_user_context', function() { var optlyInstance; var notificationCenter = createNotificationCenter({ logger: createdLogger, errorHandler: errorHandler }); var eventDispatcher = getMockEventDispatcher(); - var eventProcessor = createEventProcessor({ - dispatcher: eventDispatcher, - batchSize: 1, - notificationCenter: notificationCenter, - }); + var eventProcessor = getForwardingEventProcessor( + eventDispatcher, + ); beforeEach(function() { optlyInstance = new Optimizely({ clientEngine: 'node-sdk', @@ -801,11 +847,9 @@ describe('lib/optimizely_user_context', function() { }); var notificationCenter = createNotificationCenter({ logger: createdLogger, errorHandler: errorHandler }); var eventDispatcher = getMockEventDispatcher(); - var eventProcessor = createEventProcessor({ - dispatcher: eventDispatcher, - batchSize: 1, - notificationCenter: notificationCenter, - }); + var eventProcessor = getForwardingEventProcessor( + eventDispatcher, + ); beforeEach(function() { optlyInstance = new Optimizely({ clientEngine: 'node-sdk', @@ -851,11 +895,9 @@ describe('lib/optimizely_user_context', function() { }); var notificationCenter = createNotificationCenter({ logger: createdLogger, errorHandler: errorHandler }); var eventDispatcher = getMockEventDispatcher(); - var eventProcessor = createEventProcessor({ - dispatcher: eventDispatcher, - batchSize: 1, - notificationCenter: notificationCenter, - }); + var eventProcessor = getForwardingEventProcessor( + eventDispatcher, + ); var optlyInstance = new Optimizely({ clientEngine: 'node-sdk', projectConfigManager: getMockProjectConfigManager({ From 51dd14325e5c5b6482df51d20f7dfe6d96f7f8f6 Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Sat, 9 Nov 2024 05:14:35 +0600 Subject: [PATCH 17/45] up --- lib/index.react_native.ts | 6 +++--- lib/utils/executor/concurrency_limited_executor.ts | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/index.react_native.ts b/lib/index.react_native.ts index 205158298..b2654823d 100644 --- a/lib/index.react_native.ts +++ b/lib/index.react_native.ts @@ -28,7 +28,7 @@ import { OptimizelyDecideOption, Client, Config } from './shared_types'; import { BrowserOdpManager } from './plugins/odp_manager/index.browser'; import * as commonExports from './common_exports'; import { createPollingProjectConfigManager } from './project_config/config_manager_factory.react_native'; -import { getForwardingEventProcessor } from './event_processor/event_processor_factory.react_native'; +import { createForwardingEventProcessor } from './event_processor/event_processor_factory.react_native'; import 'fast-text-encoding'; import 'react-native-get-random-values'; @@ -147,7 +147,7 @@ export { createInstance, OptimizelyDecideOption, createPollingProjectConfigManager, - getForwardingEventProcessor as createForwardingEventProcessor, + createForwardingEventProcessor, }; export * from './common_exports'; @@ -163,7 +163,7 @@ export default { createInstance, OptimizelyDecideOption, createPollingProjectConfigManager, - createForwardingEventProcessor: getForwardingEventProcessor, + createForwardingEventProcessor, }; export * from './export_types'; diff --git a/lib/utils/executor/concurrency_limited_executor.ts b/lib/utils/executor/concurrency_limited_executor.ts index ae480b831..088ec12cb 100644 --- a/lib/utils/executor/concurrency_limited_executor.ts +++ b/lib/utils/executor/concurrency_limited_executor.ts @@ -69,7 +69,7 @@ class ConcurrencyLimitedExecutor extends BaseService implements Executor { const { cancelRetry: cancel, result } = taskDefinition.retryConfig ? runWithRetry(taskDefinition.task, taskDefinition.retryConfig.backoff, taskDefinition.retryConfig.maxRetries) : - { result: taskDefinition.task() }; + { result: taskDefinition.task(), cancelRetry: undefined }; this.runningTask.set(id, { result, cancel }); this.nRunning++; From b1cf28fe6631056a298178457aa47080d7246689 Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Sat, 9 Nov 2024 05:35:05 +0600 Subject: [PATCH 18/45] up --- lib/event_processor/dispatch_controller.ts | 59 ---------- .../queueing_event_processor.spec.ts | 20 ++-- .../queueing_event_processor.ts | 4 +- lib/tests/mock/create_event.ts | 5 +- lib/tests/mock/mock_cache.ts | 4 +- lib/utils/cache/cache.ts | 6 +- lib/utils/cache/store.ts | 19 +++- .../executor/concurrency_limited_executor.ts | 104 ------------------ lib/utils/executor/executor.ts | 26 ----- lib/utils/executor/task_runner.ts | 6 - lib/utils/id_generator/index.ts | 2 +- 11 files changed, 36 insertions(+), 219 deletions(-) delete mode 100644 lib/event_processor/dispatch_controller.ts delete mode 100644 lib/utils/executor/concurrency_limited_executor.ts delete mode 100644 lib/utils/executor/executor.ts delete mode 100644 lib/utils/executor/task_runner.ts diff --git a/lib/event_processor/dispatch_controller.ts b/lib/event_processor/dispatch_controller.ts deleted file mode 100644 index 8f34e83c5..000000000 --- a/lib/event_processor/dispatch_controller.ts +++ /dev/null @@ -1,59 +0,0 @@ -import { BaseService, Service } from "../service"; -import { Executor } from "../utils/executor/executor"; -import { EventDispatcher, EventDispatcherResponse, EventV1Request } from "./eventDispatcher"; -import { EventEmitter } from '../utils/event_emitter/event_emitter'; -import { Consumer, Fn } from "../utils/type"; -import { RunResult, runWithRetry } from "../utils/executor/backoff_retry_runner"; -import { ExponentialBackoff } from "../utils/repeater/repeater"; - -export interface DispatchController extends Service { - handleBatch(request: EventV1Request): Promise - onDispatch(handler: Consumer): Fn; -} - -export type DispatchControllerConfig = { - eventDispatcher: EventDispatcher; -} - -class ImmediateDispatchDispatchController extends BaseService implements DispatchController { - private eventDispatcher: EventDispatcher; - private eventEmitter: EventEmitter<{ dispatch: EventV1Request }>; - private runningTask: Map> = new Map(); - private idGenerator: IdGenerator = new IdGenerator(); - - constructor(config: DispatchControllerConfig) { - super(); - this.eventDispatcher = config.eventDispatcher; - this.eventEmitter = new EventEmitter(); - } - - onDispatch(handler: Consumer): Fn { - return this.eventEmitter.on('dispatch', handler); - } - - start(): void { - throw new Error("Method not implemented."); - } - - stop(): void { - throw new Error("Method not implemented."); - } - - async handleBatch(request: EventV1Request): Promise { - if (!this.isRunning()) { - return; - } - - const id = this.idGenerator.getId(); - - const backoff = new ExponentialBackoff(1000, 30000, 2); - const runResult = runWithRetry(() => this.eventDispatcher.dispatchEvent(request), backoff); - - this.runningTask.set(id, runResult); - runResult.result.finally(() => { - this.runningTask.delete(id); - }); - - return runResult.result; - } -} diff --git a/lib/event_processor/queueing_event_processor.spec.ts b/lib/event_processor/queueing_event_processor.spec.ts index 622e4707e..019572f6f 100644 --- a/lib/event_processor/queueing_event_processor.spec.ts +++ b/lib/event_processor/queueing_event_processor.spec.ts @@ -38,7 +38,7 @@ const getMockDispatcher = () => { }; }; -const exhaustMicrotasks = async (loop: number = 100) => { +const exhaustMicrotasks = async (loop = 100) => { for(let i = 0; i < loop; i++) { await Promise.resolve(); } @@ -89,7 +89,7 @@ describe('QueueingEventProcessor', async () => { mockDispatch.mockResolvedValue({}); const cache = getMockSyncCache(); - let events: ProcessableEvent[] = []; + const events: ProcessableEvent[] = []; for(let i = 0; i < 5; i++) { const id = `id-${i}`; @@ -264,7 +264,7 @@ describe('QueueingEventProcessor', async () => { processor.start(); await processor.onRunning(); - let events: ProcessableEvent[] = []; + const events: ProcessableEvent[] = []; for(let i = 0; i < 10; i++) { const event = createImpressionEvent(`id-${i}`); events.push(event); @@ -302,7 +302,7 @@ describe('QueueingEventProcessor', async () => { processor.start(); await processor.onRunning(); - let events: ProcessableEvent[] = []; + const events: ProcessableEvent[] = []; for(let i = 0; i < 10; i++) { const event = createImpressionEvent(`id-${i}`); events.push(event); @@ -350,7 +350,7 @@ describe('QueueingEventProcessor', async () => { processor.start(); await processor.onRunning(); - let events: ProcessableEvent[] = []; + const events: ProcessableEvent[] = []; for(let i = 0; i < 10; i++) { const event = createImpressionEvent(`id-${i}`); events.push(event); @@ -536,7 +536,7 @@ describe('QueueingEventProcessor', async () => { processor.start(); await processor.onRunning(); - let events: ProcessableEvent[] = []; + const events: ProcessableEvent[] = []; for(let i = 0; i < 10; i++) { const event = createImpressionEvent(`id-${i}`); events.push(event); @@ -589,7 +589,7 @@ describe('QueueingEventProcessor', async () => { processor.start(); await processor.onRunning(); - let events: ProcessableEvent[] = []; + const events: ProcessableEvent[] = []; for(let i = 0; i < 10; i++) { const event = createImpressionEvent(`id-${i}`); events.push(event); @@ -646,7 +646,7 @@ describe('QueueingEventProcessor', async () => { ])); - let events: ProcessableEvent[] = []; + const events: ProcessableEvent[] = []; for(let i = 0; i < 5; i++) { const id = `id-${i}`; @@ -774,7 +774,7 @@ describe('QueueingEventProcessor', async () => { ])); - let events: ProcessableEvent[] = []; + const events: ProcessableEvent[] = []; for(let i = 0; i < 5; i++) { const id = `id-${i}`; @@ -856,7 +856,7 @@ describe('QueueingEventProcessor', async () => { processor.start(); await processor.onRunning(); - let events: ProcessableEvent[] = []; + const events: ProcessableEvent[] = []; for(let i = 0; i < 10; i++) { const event = createImpressionEvent(`id-${i}`); events.push(event); diff --git a/lib/event_processor/queueing_event_processor.ts b/lib/event_processor/queueing_event_processor.ts index 4a8ec68a1..d805718d4 100644 --- a/lib/event_processor/queueing_event_processor.ts +++ b/lib/event_processor/queueing_event_processor.ts @@ -89,7 +89,7 @@ export class QueueingEventProcessor extends BaseService implements EventProcesso const failedEventsArray = Array.from(failedEvents.values()).sort(); - let batches: EventBatch[] = []; + const batches: EventBatch[] = []; let currentBatch: EventWithId[] = []; failedEventsArray.forEach((event) => { @@ -123,7 +123,7 @@ export class QueueingEventProcessor extends BaseService implements EventProcesso } const events: ProcessableEvent[] = []; - let ids: string[] = []; + const ids: string[] = []; this.eventQueue.forEach((event) => { events.push(event.event); diff --git a/lib/tests/mock/create_event.ts b/lib/tests/mock/create_event.ts index f283ffc50..5d84bf534 100644 --- a/lib/tests/mock/create_event.ts +++ b/lib/tests/mock/create_event.ts @@ -1,4 +1,7 @@ -export function createImpressionEvent(id: string = 'uuid') { +import { EventV1 } from "../../event_processor"; +import { Event } from "../../shared_types"; + +export function createImpressionEvent(id = 'uuid'): any { return { type: 'impression' as const, timestamp: 69, diff --git a/lib/tests/mock/mock_cache.ts b/lib/tests/mock/mock_cache.ts index ef7fb516b..f8d637f2a 100644 --- a/lib/tests/mock/mock_cache.ts +++ b/lib/tests/mock/mock_cache.ts @@ -1,4 +1,6 @@ -export const getMockSyncCache = () => { +import { SyncCache } from "../../utils/cache/cache"; + +export const getMockSyncCache = (): SyncCache => { const cache = { operation: 'sync' as const, data: new Map(), diff --git a/lib/utils/cache/cache.ts b/lib/utils/cache/cache.ts index 39b79c991..697cdf6de 100644 --- a/lib/utils/cache/cache.ts +++ b/lib/utils/cache/cache.ts @@ -6,8 +6,7 @@ export interface SyncCache { clear(): void; getKeys(): string[]; getAll(): Map; - size(): number; -}; +} export interface AsyncCache { operation: 'async'; @@ -17,7 +16,6 @@ export interface AsyncCache { clear(): Promise; getKeys(): Promise; getAll():Promise>; - size(): number; -}; +} export type Cache = SyncCache | AsyncCache; diff --git a/lib/utils/cache/store.ts b/lib/utils/cache/store.ts index e29b89217..eda40f0bc 100644 --- a/lib/utils/cache/store.ts +++ b/lib/utils/cache/store.ts @@ -22,7 +22,7 @@ export class SyncPrefixStore implements SyncCache { return this.cache.set(this.addPrefix(key), value); } - get(key: string): V { + get(key: string): V | undefined{ return this.cache.get(this.addPrefix(key)); } @@ -44,8 +44,12 @@ export class SyncPrefixStore implements SyncCache { getAll(): Map { const map = new Map(); - this.getInternalKeys().forEach((key) => - map.set(this.removePrefix(key), this.cache.get(key))); + this.getInternalKeys().forEach((key) => { + const value = this.cache.get(key); + if (value) { + map.set(this.removePrefix(key), value); + } + }); return map; } } @@ -73,7 +77,7 @@ export class AyncPrefixStore implements AsyncCache { return this.cache.set(this.addPrefix(key), value); } - get(key: string): Promise { + get(key: string): Promise { return this.cache.get(this.addPrefix(key)); } @@ -98,7 +102,12 @@ export class AyncPrefixStore implements AsyncCache { const keys = await this.getInternalKeys(); const values = await Promise.all(keys.map((key) => this.cache.get(key))); const map = new Map(); - keys.forEach((key, index) => map.set(this.removePrefix(key), values[index])); + keys.forEach((key, index) => { + const value = values[index]; + if (value) { + map.set(this.removePrefix(key), value) + } + }); return map; } } diff --git a/lib/utils/executor/concurrency_limited_executor.ts b/lib/utils/executor/concurrency_limited_executor.ts deleted file mode 100644 index 088ec12cb..000000000 --- a/lib/utils/executor/concurrency_limited_executor.ts +++ /dev/null @@ -1,104 +0,0 @@ -import { BaseService } from "../../service"; -import { scheduleMicrotask } from "../microtask"; -import { resolvablePromise, ResolvablePromise } from "../promise/resolvablePromise"; -import { BackoffController } from "../repeater/repeater"; -import { AsyncFn, Fn } from "../type"; -import { RunResult, runWithRetry } from "./backoff_retry_runner"; -import { SubmitResponse, Executor, RetryConfig } from "./executor"; -import { TaskRunner } from "./task_runner"; - - -type TaskDefiniton = { - task: AsyncFn, - response: ResolvablePromise, - retryConfig?: RetryConfig, -} - -type RunningTask = { - result: Promise, - cancel?: Fn, -} - -class ConcurrencyLimitedExecutor extends BaseService implements Executor { - private maxConcurrent: number; - private queue: Queue; - private nRunning = 0; - private runningTask: Map = new Map(); - private idGenerator: IdGenerator = new IdGenerator(); - - constructor(maxConcurrent: number, maxQueueLength: number) { - super(); - this.maxConcurrent = maxConcurrent; - this.queue = new Queue(maxQueueLength); - } - - forceExecuteAll(): Promise { - - } - - start(): void { - throw new Error("Method not implemented."); - } - - stop(): void { - throw new Error("Method not implemented."); - } - - - private handleTaskCompletion(id: string): void { - this.runningTask.delete(id); - this.nRunning--; - this.runFromQueue(); - } - - private runFromQueue(ignoreMaxConcurrency = false): void { - if (!this.isRunning()) { - return; - } - - if (!ignoreMaxConcurrency && this.nRunning >= this.maxConcurrent) { - return; - } - - const taskDefinition = this.queue.dequeue(); - if (!taskDefinition) { - return; - } - - const id = this.idGenerator.getId(); - - const { cancelRetry: cancel, result } = taskDefinition.retryConfig ? - runWithRetry(taskDefinition.task, taskDefinition.retryConfig.backoff, taskDefinition.retryConfig.maxRetries) : - { result: taskDefinition.task(), cancelRetry: undefined }; - - this.runningTask.set(id, { result, cancel }); - this.nRunning++; - result.finally(() => { - this.handleTaskCompletion(id); - }); - } - - submit(task: AsyncFn, retryConfig?: RetryConfig): SubmitResponse { - if (!this.isRunning()) { - return { accepted: false, error: new Error('Executor is not running') }; - } - - if (this.queue.isFull()) { - return { accepted: false, error: new Error('Queue is full') }; - } - - const taskDefinition: TaskDefiniton = { - task, - response: resolvablePromise(), - retryConfig, - }; - - this.queue.enqueue(taskDefinition); - - scheduleMicrotask(() => { - this.runFromQueue(); - }); - - return { accepted: true, result: taskDefinition.response.promise }; - } -} diff --git a/lib/utils/executor/executor.ts b/lib/utils/executor/executor.ts deleted file mode 100644 index ff34d00c6..000000000 --- a/lib/utils/executor/executor.ts +++ /dev/null @@ -1,26 +0,0 @@ -import { Service } from "../../service"; -import { BackoffController } from "../repeater/repeater"; -import { AsyncFn } from "../type"; - -export type TaskAcceptedResponse = { - accepted: true, - result: Promise, -}; - -export type TaskRejectedResponse = { - accepted: false, - error: Error, -}; - -export type SubmitResponse = TaskAcceptedResponse | TaskRejectedResponse; - -export type RetryConfig = { - backoff?: BackoffController, - maxRetries?: number, -} - -export interface Executor extends Service { - submit(task: AsyncFn, retryConfig?: RetryConfig): SubmitResponse; - forceExecuteAll(): Promise; -} - diff --git a/lib/utils/executor/task_runner.ts b/lib/utils/executor/task_runner.ts deleted file mode 100644 index 503fda6df..000000000 --- a/lib/utils/executor/task_runner.ts +++ /dev/null @@ -1,6 +0,0 @@ -import { AsyncFn } from "../type"; - -export interface TaskRunner { - run(task: AsyncFn): Promise; - close(): Promise; -} diff --git a/lib/utils/id_generator/index.ts b/lib/utils/id_generator/index.ts index 9715af842..80fa7ca2b 100644 --- a/lib/utils/id_generator/index.ts +++ b/lib/utils/id_generator/index.ts @@ -1,7 +1,7 @@ const idSuffixBase = 10_000; export class IdGenerator { - private idSuffixOffset: number = 0; + private idSuffixOffset = 0; // getId returns an Id that generally increases with each call. // only exceptions are when idSuffix rotates back to 0 within the same millisecond From 1171325d19dc7fef124e8fbc88e5b30dee187eb5 Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Wed, 13 Nov 2024 02:20:29 +0600 Subject: [PATCH 19/45] u --- .../event_processor_factory.browser.ts | 20 ++ .../event_processor_factory.ts | 87 +++++ .../queueing_event_processor.ts | 13 +- lib/service.ts | 27 +- .../cache/async_storage_cache.react_native.ts | 39 ++ lib/utils/cache/cache.ts | 164 ++++++++- .../cache/local_storage_cache.browser.ts | 37 ++ lib/utils/cache/store.ts | 336 ++++++++++++------ 8 files changed, 586 insertions(+), 137 deletions(-) create mode 100644 lib/event_processor/event_processor_factory.ts create mode 100644 lib/utils/cache/async_storage_cache.react_native.ts create mode 100644 lib/utils/cache/local_storage_cache.browser.ts diff --git a/lib/event_processor/event_processor_factory.browser.ts b/lib/event_processor/event_processor_factory.browser.ts index ea4d2d2b1..e790649e4 100644 --- a/lib/event_processor/event_processor_factory.browser.ts +++ b/lib/event_processor/event_processor_factory.browser.ts @@ -17,10 +17,30 @@ import { getForwardingEventProcessor } from './forwarding_event_processor'; import { EventDispatcher } from './eventDispatcher'; import { EventProcessor } from './eventProcessor'; +import { QueueingEventProcessor, QueueingEventProcessorConfig } from './queueing_event_processor'; +import { getQueuingEventProcessor, QueueingEventProcessorOptions } from './event_processor_factory'; import defaultEventDispatcher from './default_dispatcher.browser'; +import sendBeaconEventDispatcher from '../plugins/event_dispatcher/send_beacon_dispatcher'; + +const FAILED_EVENT_RETRY_INTERVAL = 20 * 1000; // 1 minute export const createForwardingEventProcessor = ( eventDispatcher: EventDispatcher = defaultEventDispatcher, ): EventProcessor => { return getForwardingEventProcessor(eventDispatcher); }; + +export const createQueueingEventProcessor = ( + options: QueueingEventProcessorOptions +): EventProcessor => { + return getQueuingEventProcessor({ + eventDispatcher: options.eventDispatcher || defaultEventDispatcher, + closingEventDispatcher: options.closingEventDispatcher || + (options.eventDispatcher ? options.eventDispatcher : sendBeaconEventDispatcher), + flushInterval: options.flushInterval, + batchSize: options.batchSize, + maxQueueSize: options.maxQueueSize, + retryOptions: {}, + failedEventRetryInterval: FAILED_EVENT_RETRY_INTERVAL, + }); +} diff --git a/lib/event_processor/event_processor_factory.ts b/lib/event_processor/event_processor_factory.ts new file mode 100644 index 000000000..b29cd6761 --- /dev/null +++ b/lib/event_processor/event_processor_factory.ts @@ -0,0 +1,87 @@ +import { LogLevel } from "../common_exports"; +import { StartupLog } from "../service"; +import { ExponentialBackoff, IntervalRepeater } from "../utils/repeater/repeater"; +import { EventDispatcher } from "./eventDispatcher"; +import { EventProcessor } from "./eventProcessor"; +import { QueueingEventProcessor, RetryConfig } from "./queueing_event_processor"; + +export const DEFAULT_EVENT_BATCH_SIZE = 10; +export const DEFAULT_EVENT_FLUSH_INTERVAL = 1000; +export const DEFAULT_EVENT_MAX_QUEUE_SIZE = 10000; +export const DEFAULT_MIN_BACKOFF = 1000; +export const DEFAULT_MAX_BACKOFF = 32000; + +export type QueueingEventProcessorOptions = { + eventDispatcher?: EventDispatcher; + closingEventDispatcher?: EventDispatcher; + flushInterval?: number; + batchSize?: number; + maxQueueSize?: number; +}; + +export type QueueingEventProcessorFactoryOptions = Omit & { + eventDispatcher: EventDispatcher; + failedEventRetryInterval?: number; + retryOptions?: { + maxRetries?: number; + minBackoff?: number; + maxBackoff?: number; + }; +} + +export const getQueuingEventProcessor = ( + options: QueueingEventProcessorFactoryOptions, + EventProcessorConstructor: typeof QueueingEventProcessor = QueueingEventProcessor + ): EventProcessor => { + const { eventDispatcher, closingEventDispatcher, retryOptions } = options; + + const retryConfig: RetryConfig | undefined = retryOptions ? { + maxRetries: retryOptions.maxRetries, + backoffProvider: () => { + const minBackoff = retryOptions?.minBackoff ?? DEFAULT_MIN_BACKOFF; + const maxBackoff = retryOptions?.maxBackoff ?? DEFAULT_MAX_BACKOFF; + return new ExponentialBackoff(minBackoff, maxBackoff, 500); + } + } : undefined; + + const startupLogs: StartupLog[] = []; + + let flushInterval = DEFAULT_EVENT_FLUSH_INTERVAL; + if (options.flushInterval === undefined || options.flushInterval <= 0) { + startupLogs.push({ + level: LogLevel.WARNING, + message: 'Invalid eventFlushInterval %s, defaulting to %s', + params: [options.flushInterval, DEFAULT_EVENT_FLUSH_INTERVAL], + }); + } else { + flushInterval = options.flushInterval; + } + + let batchSize = DEFAULT_EVENT_BATCH_SIZE; + if (options.batchSize === undefined || options.batchSize <= 0) { + startupLogs.push({ + level: LogLevel.WARNING, + message: 'Invalid eventBatchSize %s, defaulting to %s', + params: [options.batchSize, DEFAULT_EVENT_BATCH_SIZE], + }); + } else { + batchSize = options.batchSize; + } + + const maxQueueSize = options.maxQueueSize ?? DEFAULT_EVENT_MAX_QUEUE_SIZE; + + const dispatchRepeater = new IntervalRepeater(flushInterval); + const failedEventRepeater = options.failedEventRetryInterval ? + new IntervalRepeater(options.failedEventRetryInterval) : undefined; + + return new EventProcessorConstructor({ + eventDispatcher, + closingEventDispatcher, + dispatchRepeater, + failedEventRepeater, + retryConfig, + batchSize, + maxQueueSize, + startupLogs + }); +}; diff --git a/lib/event_processor/queueing_event_processor.ts b/lib/event_processor/queueing_event_processor.ts index d805718d4..9c2da11d7 100644 --- a/lib/event_processor/queueing_event_processor.ts +++ b/lib/event_processor/queueing_event_processor.ts @@ -4,7 +4,7 @@ import { EventDispatcher, EventDispatcherResponse, EventV1Request } from "./even import { formatEvents } from "../core/event_builder/build_event_v1"; import { BackoffController, ExponentialBackoff, IntervalRepeater, Repeater } from "../utils/repeater/repeater"; import { LoggerFacade } from "../modules/logging"; -import { BaseService, ServiceState } from "../service"; +import { BaseService, ServiceState, StartupLog } from "../service"; import { Consumer, Fn, Producer } from "../utils/type"; import { RunResult, runWithRetry } from "../utils/executor/backoff_retry_runner"; import { isSuccessStatusCode } from "../utils/http_request_handler/http_util"; @@ -17,9 +17,6 @@ export type EventWithId = { }; export type RetryConfig = { - retry: false; -} | { - retry: true; maxRetries?: number; backoffProvider: Producer; } @@ -27,6 +24,7 @@ export type RetryConfig = { export type QueueingEventProcessorConfig = { dispatchRepeater: Repeater, failedEventRepeater?: Repeater, + batchSize: number, maxQueueSize: number, eventStore?: Cache, eventDispatcher: EventDispatcher, @@ -35,6 +33,7 @@ export type QueueingEventProcessorConfig = { retryMinBackoff?: number, retryMaxBackoff?: number, retryConfig?: RetryConfig; + startupLogs?: StartupLog[]; }; type EventBatch = { @@ -53,12 +52,11 @@ export class QueueingEventProcessor extends BaseService implements EventProcesso private idGenerator: IdGenerator = new IdGenerator(); private runningTask: Map> = new Map(); private activeEventIds: Set = new Set(); - private logger?: LoggerFacade; private eventEmitter: EventEmitter<{ dispatch: EventV1Request }> = new EventEmitter(); private retryConfig?: RetryConfig; constructor(config: QueueingEventProcessorConfig) { - super(); + super(config.startupLogs); this.eventDispatcher = config.eventDispatcher; this.closingEventDispatcher = config.closingEventDispatcher; this.maxQueueSize = config.maxQueueSize; @@ -147,7 +145,7 @@ export class QueueingEventProcessor extends BaseService implements EventProcesso private dispatchBatch(batch: EventBatch, closing: boolean): void { const { request, ids } = batch; - const runResult: RunResult = this.retryConfig?.retry + const runResult: RunResult = this.retryConfig ? runWithRetry( () => this.executeDispatch(request, closing), this.retryConfig.backoffProvider(), this.retryConfig.maxRetries ) : { @@ -206,6 +204,7 @@ export class QueueingEventProcessor extends BaseService implements EventProcesso if (!this.isNew()) { return; } + super.start(); this.state = ServiceState.Running; this.dispatchRepeater.start(); this.failedEventRepeater?.start(); diff --git a/lib/service.ts b/lib/service.ts index 54f4924ed..459488027 100644 --- a/lib/service.ts +++ b/lib/service.ts @@ -14,6 +14,7 @@ * limitations under the License. */ +import { LoggerFacade, LogLevel } from "./modules/logging"; import { resolvablePromise, ResolvablePromise } from "./utils/promise/resolvablePromise"; @@ -32,6 +33,12 @@ export enum ServiceState { Failed, } +export type StartupLog = { + level: LogLevel; + message: string; + params: any[]; +} + export interface Service { getState(): ServiceState; start(): void; @@ -50,17 +57,30 @@ export abstract class BaseService implements Service { protected state: ServiceState; protected startPromise: ResolvablePromise; protected stopPromise: ResolvablePromise; + protected logger?: LoggerFacade; + protected startupLogs: StartupLog[]; - constructor() { + constructor(startupLogs: StartupLog[] = []) { this.state = ServiceState.New; this.startPromise = resolvablePromise(); this.stopPromise = resolvablePromise(); + this.startupLogs = startupLogs; // avoid unhandled promise rejection this.startPromise.promise.catch(() => {}); this.stopPromise.promise.catch(() => {}); } + setLogger(logger: LoggerFacade): void { + this.logger = logger; + } + + protected printStartupLogs(): void { + this.startupLogs.forEach(({ level, message, params }) => { + this.logger?.log(level, message, ...params); + }); + } + onRunning(): Promise { return this.startPromise.promise; } @@ -93,6 +113,9 @@ export abstract class BaseService implements Service { ].includes(this.state); } - abstract start(): void; + start(): void { + this.printStartupLogs(); + } + abstract stop(): void; } diff --git a/lib/utils/cache/async_storage_cache.react_native.ts b/lib/utils/cache/async_storage_cache.react_native.ts new file mode 100644 index 000000000..9fa25e48e --- /dev/null +++ b/lib/utils/cache/async_storage_cache.react_native.ts @@ -0,0 +1,39 @@ +import { AsyncCache } from "./cache"; +import AsyncStorage from '@react-native-async-storage/async-storage'; + +export class AsyncStorageCache implements AsyncCache { + public readonly operation = 'async'; + + async get(key: string): Promise { + const value = await AsyncStorage.getItem(key); + return value ? JSON.parse(value) : undefined; + } + + async remove(key: string): Promise { + return AsyncStorage.removeItem(key); + } + + async set(key: string, val: V): Promise { + return AsyncStorage.setItem(key, JSON.stringify(val)); + } + + async clear(): Promise { + return AsyncStorage.clear(); + } + + async getKeys(): Promise { + return [... await AsyncStorage.getAllKeys()]; + } + + async getAll(): Promise> { + const keys = await AsyncStorage.getAllKeys(); + const items = await AsyncStorage.multiGet(keys); + const map = new Map(); + items.forEach(([key, value]) => { + if (value) { + map.set(key, JSON.parse(value)); + } + }); + return map; + } +} diff --git a/lib/utils/cache/cache.ts b/lib/utils/cache/cache.ts index 697cdf6de..eebce86ac 100644 --- a/lib/utils/cache/cache.ts +++ b/lib/utils/cache/cache.ts @@ -1,21 +1,151 @@ -export interface SyncCache { - operation: 'sync'; - set(key: string, value: V): void; - get(key: string): V | undefined; - remove(key: string): void; - clear(): void; - getKeys(): string[]; - getAll(): Map; +import { Transformer } from '../../utils/type'; + +export type CacheOp = 'sync' | 'async'; +export type OpValue = Op extends 'sync' ? V : Promise; + +export interface CacheWithOp { + operation: Op; + set(key: string, value: V): OpValue; + get(key: string): OpValue; + remove(key: string): OpValue; + clear(): OpValue; + getKeys(): OpValue; + getAll(): OpValue>; } -export interface AsyncCache { - operation: 'async'; - set(key: string, value: V): Promise; - get(key: string): Promise; - remove(key: string): Promise; - clear(): Promise; - getKeys(): Promise; - getAll():Promise>; +export type SyncCache = CacheWithOp<'sync', V>; +export type AsyncCache = CacheWithOp<'async', V>; +export type Cache = SyncCache | AsyncCache; + +export class SyncPrefixCache implements SyncCache { + private cache: SyncCache; + private prefix: string; + private transformTo: Transformer; + private transformFrom: Transformer; + + public readonly operation = 'sync'; + + constructor( + cache: SyncCache, + prefix: string, + transformTo: Transformer, + transformFrom: Transformer + ) { + this.cache = cache; + this.prefix = prefix; + this.transformTo = transformTo; + this.transformFrom = transformFrom; + } + + private addPrefix(key: string): string { + return `${this.prefix}${key}`; + } + + private removePrefix(key: string): string { + return key.substring(this.prefix.length); + } + + set(key: string, value: V): unknown { + return this.cache.set(this.addPrefix(key), this.transformFrom(value)); + } + + get(key: string): V | undefined { + const value = this.cache.get(this.addPrefix(key)); + return value ? this.transformTo(value) : undefined; + } + + remove(key: string): unknown { + return this.cache.remove(this.addPrefix(key)); + } + + clear(): void { + this.getInternalKeys().forEach((key) => this.cache.remove(key)); + } + + private getInternalKeys(): string[] { + return this.cache.getKeys().filter((key) => key.startsWith(this.prefix)); + } + + getKeys(): string[] { + return this.getInternalKeys().map((key) => this.removePrefix(key)); + } + + getAll(): Map { + const map = new Map(); + this.getInternalKeys().forEach((key) => { + const value = this.cache.get(key); + if (value) { + map.set(this.removePrefix(key), this.transformTo(value)); + } + }); + return map; + } } -export type Cache = SyncCache | AsyncCache; +export class AyncPrefixStore implements AsyncCache { + private cache: AsyncCache; + private prefix: string; + private transformTo: Transformer; + private transformFrom: Transformer; + + public readonly operation = 'async'; + + constructor( + cache: AsyncCache, + prefix: string, + transformTo: Transformer, + transformFrom: Transformer + ) { + this.cache = cache; + this.prefix = prefix; + this.transformTo = transformTo; + this.transformFrom = transformFrom; + } + + private addPrefix(key: string): string { + return `${this.prefix}${key}`; + } + + private removePrefix(key: string): string { + return key.substring(this.prefix.length); + } + + set(key: string, value: V): Promise { + return this.cache.set(this.addPrefix(key), this.transformFrom(value)); + } + + async get(key: string): Promise { + const value = await this.cache.get(this.addPrefix(key)); + return value ? this.transformTo(value) : undefined; + } + + remove(key: string): Promise { + return this.cache.remove(this.addPrefix(key)); + } + + async clear(): Promise { + const keys = await this.getInternalKeys(); + await Promise.all(keys.map((key) => this.cache.remove(key))); + } + + private async getInternalKeys(): Promise { + return this.cache.getKeys().then((keys) => keys.filter((key) => key.startsWith(this.prefix))); + } + + async getKeys(): Promise { + return this.getInternalKeys().then((keys) => keys.map((key) => this.removePrefix(key))); + } + + async getAll(): Promise> { + const keys = await this.getInternalKeys(); + const values = await Promise.all(keys.map((key) => this.cache.get(key))); + const map = new Map(); + keys.forEach((key, index) => { + const value = values[index]; + if (value) { + map.set(this.removePrefix(key), this.transformTo(value)); + } + }); + return map; + } +} diff --git a/lib/utils/cache/local_storage_cache.browser.ts b/lib/utils/cache/local_storage_cache.browser.ts new file mode 100644 index 000000000..21f721f65 --- /dev/null +++ b/lib/utils/cache/local_storage_cache.browser.ts @@ -0,0 +1,37 @@ +import { SyncCache } from "./cache"; + +export class LocalStorageCache implements SyncCache { + public readonly operation = 'sync'; + + public set(key: string, value: V): void { + localStorage.setItem(key, JSON.stringify(value)); + } + + public get(key: string): V | undefined { + const value = localStorage.getItem(key); + return value ? JSON.parse(value) : undefined; + } + + public remove(key: string): void { + localStorage.removeItem(key); + } + + public clear(): void { + localStorage.clear(); + } + + public getKeys(): string[] { + return Object.keys(localStorage); + } + + public getAll(): Map { + const map = new Map(); + this.getKeys().forEach((key) => { + const value = this.get(key); + if (value) { + map.set(key, value); + } + }); + return map; + } +} diff --git a/lib/utils/cache/store.ts b/lib/utils/cache/store.ts index eda40f0bc..6ed9fd1ef 100644 --- a/lib/utils/cache/store.ts +++ b/lib/utils/cache/store.ts @@ -1,113 +1,227 @@ -import { SyncCache, AsyncCache } from "./cache"; - -export class SyncPrefixStore implements SyncCache { - private cache: SyncCache; - private prefix: string; - public readonly operation = 'sync'; - - constructor(cache: SyncCache, prefix: string) { - this.cache = cache; - this.prefix = prefix; - } - - private addPrefix(key: string): string { - return `${this.prefix}:${key}`; - } - - private removePrefix(key: string): string { - return key.substring(this.prefix.length + 1); - } - - set(key: string, value: V): void { - return this.cache.set(this.addPrefix(key), value); - } - - get(key: string): V | undefined{ - return this.cache.get(this.addPrefix(key)); - } - - remove(key: string): void { - return this.cache.remove(this.addPrefix(key)); - } - - clear(): void { - this.getInternalKeys().forEach((key) => this.cache.remove(key)); - } +// import { SyncCache, AsyncCache, Cache, CacheOp, CacheWithOp, OpValue, OperationOf } from "./cache"; + +// export const isAsync = (operation: 'sync' | 'async'): operation is 'async' => { +// return operation === 'async'; +// } + +// const transform = (op: Op, value: OpValue, transformer: (source: VS) => VT): OpValue => { +// if (op === 'async') { +// const val: Promise = value as any; +// const ret: Promise = val.then((v) => transformer(v)); +// return ret as OpValue; +// } + +// return transformer(value as VS) as OpValue; +// } + +// export const transformCache = ( +// cache: Cache, +// prefix: string, +// transformTo: (value: VS) => VT, +// transformFrom: (value: VT) => VS +// ): CacheWithOp, VT> => { +// const addPrefix = (key: string): string => { +// return `${prefix}${key}`; +// }; + +// const removePrefix = (key: string): string => { +// return key.substring(prefix.length); +// }; + +// const transformedCache: CacheWithOp, VT> = { +// operation: cache.operation, +// set: (key: string, value: VT) => cache.set(addPrefix(key), transformFrom(value)), +// get: (key: string) => { +// const prefixedKey = addPrefix(key); +// if (cache.operation === 'async') { +// const value = cache.get(prefixedKey); +// return value.then((v) => v ? transformTo(v) : undefined); +// } +// const value = cache.get(prefixedKey); +// return value ? transformTo(value) : undefined; +// }, +// remove: (key: string) => cache.remove(addPrefix(key)), +// clear: () => cache.clear(), +// getKeys: () => { +// if (cache.operation === 'async') { + +// } +// cache.getKeys(), +// } +// getAll: () => { +// const map = new Map(); +// cache.getAll().forEach((value, key) => { +// map.set(key, transformTo(value)); +// }); +// return map; +// } +// }; + +// return transformedCache; +// } + +// export class SyncPrefixStore implements SyncCache { +// private cache: SyncCache; +// private prefix: string; +// public readonly operation = 'sync'; + +// constructor(cache: SyncCache, prefix: string) { +// this.cache = cache; +// this.prefix = prefix; +// } + +// private addPrefix(key: string): string { +// return `${this.prefix}:${key}`; +// } + +// private removePrefix(key: string): string { +// return key.substring(this.prefix.length + 1); +// } + +// set(key: string, value: V): void { +// return this.cache.set(this.addPrefix(key), value); +// } + +// get(key: string): V | undefined{ +// return this.cache.get(this.addPrefix(key)); +// } + +// remove(key: string): void { +// return this.cache.remove(this.addPrefix(key)); +// } + +// clear(): void { +// this.getInternalKeys().forEach((key) => this.cache.remove(key)); +// } + +// private getInternalKeys(): string[] { +// return this.cache.getKeys().filter((key) => key.startsWith(this.prefix)); +// } + +// getKeys(): string[] { +// return this.getInternalKeys().map((key) => this.removePrefix(key)); +// } + +// getAll(): Map { +// const map = new Map(); +// this.getInternalKeys().forEach((key) => { +// const value = this.cache.get(key); +// if (value) { +// map.set(this.removePrefix(key), value); +// } +// }); +// return map; +// } +// } + +// // export class SyncPrefixStore implements SyncCache { +// // private cache: SyncCache; +// // private prefix: string; +// // public readonly operation = 'sync'; + +// // constructor(cache: SyncCache, prefix: string) { +// // this.cache = cache; +// // this.prefix = prefix; +// // } + +// // private addPrefix(key: string): string { +// // return `${this.prefix}:${key}`; +// // } + +// // private removePrefix(key: string): string { +// // return key.substring(this.prefix.length + 1); +// // } + +// // set(key: string, value: V): void { +// // return this.cache.set(this.addPrefix(key), value); +// // } + +// // get(key: string): V | undefined{ +// // return this.cache.get(this.addPrefix(key)); +// // } + +// // remove(key: string): void { +// // return this.cache.remove(this.addPrefix(key)); +// // } + +// // clear(): void { +// // this.getInternalKeys().forEach((key) => this.cache.remove(key)); +// // } - private getInternalKeys(): string[] { - return this.cache.getKeys().filter((key) => key.startsWith(this.prefix)); - } - - getKeys(): string[] { - return this.getInternalKeys().map((key) => this.removePrefix(key)); - } - - getAll(): Map { - const map = new Map(); - this.getInternalKeys().forEach((key) => { - const value = this.cache.get(key); - if (value) { - map.set(this.removePrefix(key), value); - } - }); - return map; - } -} - - -export class AyncPrefixStore implements AsyncCache { - private cache: AsyncCache; - private prefix: string; - public readonly operation = 'async'; - - constructor(cache: AsyncCache, prefix: string) { - this.cache = cache; - this.prefix = prefix; - } - - private addPrefix(key: string): string { - return `${this.prefix}:${key}`; - } - - private removePrefix(key: string): string { - return key.substring(this.prefix.length + 1); - } - - set(key: string, value: V): Promise { - return this.cache.set(this.addPrefix(key), value); - } - - get(key: string): Promise { - return this.cache.get(this.addPrefix(key)); - } - - remove(key: string): Promise { - return this.cache.remove(this.addPrefix(key)); - } - - async clear(): Promise { - const keys = await this.getInternalKeys(); - await Promise.all(keys.map((key) => this.cache.remove(key))); - } +// // private getInternalKeys(): string[] { +// // return this.cache.getKeys().filter((key) => key.startsWith(this.prefix)); +// // } + +// // getKeys(): string[] { +// // return this.getInternalKeys().map((key) => this.removePrefix(key)); +// // } + +// // getAll(): Map { +// // const map = new Map(); +// // this.getInternalKeys().forEach((key) => { +// // const value = this.cache.get(key); +// // if (value) { +// // map.set(this.removePrefix(key), value); +// // } +// // }); +// // return map; +// // } +// // } + + +// export class AyncPrefixStore implements AsyncCache { +// private cache: AsyncCache; +// private prefix: string; +// public readonly operation = 'async'; + +// constructor(cache: AsyncCache, prefix: string) { +// this.cache = cache; +// this.prefix = prefix; +// } + +// private addPrefix(key: string): string { +// return `${this.prefix}:${key}`; +// } + +// private removePrefix(key: string): string { +// return key.substring(this.prefix.length + 1); +// } + +// set(key: string, value: V): Promise { +// return this.cache.set(this.addPrefix(key), value); +// } + +// get(key: string): Promise { +// return this.cache.get(this.addPrefix(key)); +// } + +// remove(key: string): Promise { +// return this.cache.remove(this.addPrefix(key)); +// } + +// async clear(): Promise { +// const keys = await this.getInternalKeys(); +// await Promise.all(keys.map((key) => this.cache.remove(key))); +// } - private async getInternalKeys(): Promise { - return this.cache.getKeys().then((keys) => keys.filter((key) => key.startsWith(this.prefix))); - } - - async getKeys(): Promise { - return this.getInternalKeys().then((keys) => keys.map((key) => this.removePrefix(key))); - } - - async getAll(): Promise> { - const keys = await this.getInternalKeys(); - const values = await Promise.all(keys.map((key) => this.cache.get(key))); - const map = new Map(); - keys.forEach((key, index) => { - const value = values[index]; - if (value) { - map.set(this.removePrefix(key), value) - } - }); - return map; - } -} +// private async getInternalKeys(): Promise { +// return this.cache.getKeys().then((keys) => keys.filter((key) => key.startsWith(this.prefix))); +// } + +// async getKeys(): Promise { +// return this.getInternalKeys().then((keys) => keys.map((key) => this.removePrefix(key))); +// } + +// async getAll(): Promise> { +// const keys = await this.getInternalKeys(); +// const values = await Promise.all(keys.map((key) => this.cache.get(key))); +// const map = new Map(); +// keys.forEach((key, index) => { +// const value = values[index]; +// if (value) { +// map.set(this.removePrefix(key), value) +// } +// }); +// return map; +// } +// } From 4eb8910a66f25fce1dccb555e4434c8eaa51e2c7 Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Thu, 14 Nov 2024 02:51:10 +0600 Subject: [PATCH 20/45] queing processor: wip updates --- ....spec.ts => batch_event_processor.spec.ts} | 229 ++++++++++++------ ..._processor.ts => batch_event_processor.ts} | 77 +++--- .../event_processor_factory.browser.ts | 5 +- .../event_processor_factory.ts | 25 +- .../queueing_event_processor.react_native.ts | 6 +- .../polling_datafile_manager.ts | 5 - lib/project_config/project_config_manager.ts | 5 - lib/tests/mock/mock_cache.ts | 11 +- .../cache/async_storage_cache.react_native.ts | 12 +- lib/utils/cache/cache.ts | 31 +-- .../cache/local_storage_cache.browser.ts | 14 +- lib/utils/type.ts | 2 + vitest.config.mts | 2 +- 13 files changed, 251 insertions(+), 173 deletions(-) rename lib/event_processor/{queueing_event_processor.spec.ts => batch_event_processor.spec.ts} (82%) rename lib/event_processor/{queueing_event_processor.ts => batch_event_processor.ts} (78%) diff --git a/lib/event_processor/queueing_event_processor.spec.ts b/lib/event_processor/batch_event_processor.spec.ts similarity index 82% rename from lib/event_processor/queueing_event_processor.spec.ts rename to lib/event_processor/batch_event_processor.spec.ts index 019572f6f..145cce8b8 100644 --- a/lib/event_processor/queueing_event_processor.spec.ts +++ b/lib/event_processor/batch_event_processor.spec.ts @@ -15,22 +15,19 @@ */ import { expect, describe, it, vi, beforeEach, afterEach, MockInstance } from 'vitest'; -import { EventWithId, QueueingEventProcessor } from './queueing_event_processor'; +import { EventWithId, BatchEventProcessor } from './batch_event_processor'; import { getMockSyncCache } from '../tests/mock/mock_cache'; import { createImpressionEvent } from '../tests/mock/create_event'; import { ProcessableEvent } from './eventProcessor'; import { EventDispatcher } from './eventDispatcher'; import { formatEvents } from './v1/buildEventV1'; -import { resolvablePromise } from '../utils/promise/resolvablePromise'; +import { ResolvablePromise, resolvablePromise } from '../utils/promise/resolvablePromise'; import { advanceTimersByTime } from '../../tests/testUtils'; import { getMockLogger } from '../tests/mock/mock_logger'; -import exp from 'constants'; import { getMockRepeater } from '../tests/mock/mock_repeater'; -import event from 'sinon/lib/sinon/util/event'; -import { reset } from 'sinon/lib/sinon/collection'; -import logger from '../modules/logging/logger'; import * as retry from '../utils/executor/backoff_retry_runner'; import { ServiceState } from '../service'; +import { EventDispatchResult } from '../modules/event_processor/eventProcessor'; const getMockDispatcher = () => { return { @@ -56,10 +53,10 @@ describe('QueueingEventProcessor', async () => { describe('start', () => { it('should resolve onRunning() when start() is called', async () => { const eventDispatcher = getMockDispatcher(); - const processor = new QueueingEventProcessor({ + const processor = new BatchEventProcessor({ eventDispatcher, dispatchRepeater: getMockRepeater(), - maxQueueSize: 1000, + batchSize: 1000, }); processor.start(); @@ -71,11 +68,11 @@ describe('QueueingEventProcessor', async () => { const dispatchRepeater = getMockRepeater(); const failedEventRepeater = getMockRepeater(); - const processor = new QueueingEventProcessor({ + const processor = new BatchEventProcessor({ eventDispatcher, dispatchRepeater, failedEventRepeater, - maxQueueSize: 1000, + batchSize: 1000, }); processor.start(); @@ -98,16 +95,18 @@ describe('QueueingEventProcessor', async () => { cache.set(id, { id, event }); } - const processor = new QueueingEventProcessor({ + const processor = new BatchEventProcessor({ eventDispatcher, dispatchRepeater: getMockRepeater(), - maxQueueSize: 2, + batchSize: 2, eventStore: cache, }); processor.start(); await processor.onRunning(); + await exhaustMicrotasks(); + expect(mockDispatch).toHaveBeenCalledTimes(3); expect(mockDispatch.mock.calls[0][0]).toEqual(formatEvents([events[0], events[1]])); expect(mockDispatch.mock.calls[1][0]).toEqual(formatEvents([events[2], events[3]])); @@ -116,12 +115,23 @@ describe('QueueingEventProcessor', async () => { }); describe('process', () => { + it('should return a promise that rejects if processor is not running', async () => { + const eventDispatcher = getMockDispatcher(); + const processor = new BatchEventProcessor({ + eventDispatcher, + dispatchRepeater: getMockRepeater(), + batchSize: 100, + }); + + expect(processor.process(createImpressionEvent('id-1'))).rejects.toThrow(); + }); + it('should enqueue event without dispatching immediately', async () => { const eventDispatcher = getMockDispatcher(); - const processor = new QueueingEventProcessor({ + const processor = new BatchEventProcessor({ eventDispatcher, dispatchRepeater: getMockRepeater(), - maxQueueSize: 100, + batchSize: 100, }); processor.start(); @@ -139,10 +149,10 @@ describe('QueueingEventProcessor', async () => { const mockDispatch: MockInstance = eventDispatcher.dispatchEvent; mockDispatch.mockResolvedValue({}); - const processor = new QueueingEventProcessor({ + const processor = new BatchEventProcessor({ eventDispatcher, dispatchRepeater: getMockRepeater(), - maxQueueSize: 100, + batchSize: 100, }); processor.start(); @@ -179,14 +189,51 @@ describe('QueueingEventProcessor', async () => { expect(eventDispatcher.dispatchEvent.mock.calls[1][0]).toEqual(formatEvents(events)); }); + it('should flush queue is context of the new event is different and enqueue the new event', async () => { + const eventDispatcher = getMockDispatcher(); + const mockDispatch: MockInstance = eventDispatcher.dispatchEvent; + mockDispatch.mockResolvedValue({}); + + const dispatchRepeater = getMockRepeater(); + + const processor = new BatchEventProcessor({ + eventDispatcher, + dispatchRepeater, + batchSize: 100, + }); + + processor.start(); + await processor.onRunning(); + + const events: ProcessableEvent[] = []; + for(let i = 0; i < 80; i++) { + const event = createImpressionEvent(`id-${i}`); + events.push(event); + await processor.process(event); + } + + expect(eventDispatcher.dispatchEvent).toHaveBeenCalledTimes(0); + + const newEvent = createImpressionEvent('id-a'); + newEvent.context.accountId = 'account-' + Math.random(); + await processor.process(newEvent); + + expect(eventDispatcher.dispatchEvent).toHaveBeenCalledTimes(1); + expect(eventDispatcher.dispatchEvent.mock.calls[0][0]).toEqual(formatEvents(events)); + + await dispatchRepeater.execute(0); + expect(eventDispatcher.dispatchEvent).toHaveBeenCalledTimes(2); + expect(eventDispatcher.dispatchEvent.mock.calls[1][0]).toEqual(formatEvents([newEvent])); + }); + it('should store the event in the eventStore with increasing ids', async () => { const eventDispatcher = getMockDispatcher(); const eventStore = getMockSyncCache(); - const processor = new QueueingEventProcessor({ + const processor = new BatchEventProcessor({ eventDispatcher, dispatchRepeater: getMockRepeater(), - maxQueueSize: 100, + batchSize: 100, eventStore, }); @@ -215,10 +262,10 @@ describe('QueueingEventProcessor', async () => { mockDispatch.mockResolvedValue({}); const dispatchRepeater = getMockRepeater(); - const processor = new QueueingEventProcessor({ + const processor = new BatchEventProcessor({ eventDispatcher, dispatchRepeater, - maxQueueSize: 100, + batchSize: 100, }); processor.start(); @@ -255,10 +302,10 @@ describe('QueueingEventProcessor', async () => { mockDispatch.mockRejectedValue(new Error()); const dispatchRepeater = getMockRepeater(); - const processor = new QueueingEventProcessor({ + const processor = new BatchEventProcessor({ eventDispatcher, dispatchRepeater, - maxQueueSize: 100, + batchSize: 100, }); processor.start(); @@ -288,15 +335,14 @@ describe('QueueingEventProcessor', async () => { reset: vi.fn(), }; - const processor = new QueueingEventProcessor({ + const processor = new BatchEventProcessor({ eventDispatcher, dispatchRepeater, retryConfig: { - retry: true, backoffProvider: () => backoffController, maxRetries: 3, }, - maxQueueSize: 100, + batchSize: 100, }); processor.start(); @@ -337,14 +383,13 @@ describe('QueueingEventProcessor', async () => { reset: vi.fn(), }; - const processor = new QueueingEventProcessor({ + const processor = new BatchEventProcessor({ eventDispatcher, dispatchRepeater, retryConfig: { - retry: true, backoffProvider: () => backoffController, }, - maxQueueSize: 100, + batchSize: 100, }); processor.start(); @@ -384,10 +429,10 @@ describe('QueueingEventProcessor', async () => { const eventStore = getMockSyncCache(); const dispatchRepeater = getMockRepeater(); - const processor = new QueueingEventProcessor({ + const processor = new BatchEventProcessor({ eventDispatcher, dispatchRepeater, - maxQueueSize: 100, + batchSize: 100, eventStore, }); @@ -425,10 +470,10 @@ describe('QueueingEventProcessor', async () => { const eventStore = getMockSyncCache(); const dispatchRepeater = getMockRepeater(); - const processor = new QueueingEventProcessor({ + const processor = new BatchEventProcessor({ eventDispatcher, dispatchRepeater, - maxQueueSize: 100, + batchSize: 100, eventStore, }); @@ -472,13 +517,12 @@ describe('QueueingEventProcessor', async () => { reset: vi.fn(), }; - const processor = new QueueingEventProcessor({ + const processor = new BatchEventProcessor({ eventDispatcher, dispatchRepeater, - maxQueueSize: 100, + batchSize: 100, eventStore, retryConfig: { - retry: true, backoffProvider: () => backoffController, maxRetries: 3, }, @@ -520,16 +564,15 @@ describe('QueueingEventProcessor', async () => { const eventStore = getMockSyncCache(); const logger = getMockLogger(); - const processor = new QueueingEventProcessor({ + const processor = new BatchEventProcessor({ eventDispatcher, dispatchRepeater, eventStore, retryConfig: { - retry: true, backoffProvider: () => backoffController, maxRetries: 3, }, - maxQueueSize: 100, + batchSize: 100, logger, }); @@ -573,16 +616,15 @@ describe('QueueingEventProcessor', async () => { const eventStore = getMockSyncCache(); const logger = getMockLogger(); - const processor = new QueueingEventProcessor({ + const processor = new BatchEventProcessor({ eventDispatcher, dispatchRepeater, eventStore, retryConfig: { - retry: true, backoffProvider: () => backoffController, maxRetries: 3, }, - maxQueueSize: 100, + batchSize: 100, logger, }); @@ -612,18 +654,24 @@ describe('QueueingEventProcessor', async () => { expect(logger.error).toHaveBeenCalledOnce(); }); - it('should dispatch only failed events in correct batch size and order when retryFailedEvents is called', async () => { + it.only('should dispatch only failed events in correct batch size and order when retryFailedEvents is called', async () => { const eventDispatcher = getMockDispatcher(); const mockDispatch: MockInstance = eventDispatcher.dispatchEvent; - mockDispatch.mockResolvedValue({}); + + const dispatchResults: ResolvablePromise[] = []; + for (let i = 0; i < 10; i++) { + const reuslt = resolvablePromise(); + dispatchResults.push(reuslt); + mockDispatch.mockReturnValueOnce(reuslt); + } const cache = getMockSyncCache(); const dispatchRepeater = getMockRepeater(); - const processor = new QueueingEventProcessor({ + const processor = new BatchEventProcessor({ eventDispatcher, dispatchRepeater, - maxQueueSize: 2, + batchSize: 2, eventStore: cache, }); @@ -632,43 +680,77 @@ describe('QueueingEventProcessor', async () => { expect(mockDispatch).toHaveBeenCalledTimes(0); - // these events should be active and should not be reomoved from store or dispatched with failed events + // these events should be in dispatching state and should not be reomoved from store or dispatched with failed events const eventA = createImpressionEvent('id-A'); const eventB = createImpressionEvent('id-B'); - await processor.process(eventA); await processor.process(eventB); + dispatchRepeater.execute(0); + await exhaustMicrotasks(); + + expect(mockDispatch).toHaveBeenCalledTimes(1); + + // these events should be in the queue and should not be removed from store or dispatched with failed events + const eventC = createImpressionEvent('id-C'); + const eventD = createImpressionEvent('id-D'); + await processor.process(eventC); + await processor.process(eventD); + await exhaustMicrotasks(); + let eventsInStore = [...cache.getAll().values()].sort((a, b) => a.id < b.id ? -1 : 1).map(e => e.event); expect(eventsInStore).toEqual(expect.arrayContaining([ expect.objectContaining(eventA), - expect.objectContaining(eventB) + expect.objectContaining(eventB), + expect.objectContaining(eventC), + expect.objectContaining(eventD), ])); - - const events: ProcessableEvent[] = []; + const failedEvents: ProcessableEvent[] = []; for(let i = 0; i < 5; i++) { const id = `id-${i}`; const event = createImpressionEvent(id); - events.push(event); + failedEvents.push(event); cache.set(id, { id, event }); } await processor.retryFailedEvents(); - expect(mockDispatch).toHaveBeenCalledTimes(3); - expect(mockDispatch.mock.calls[0][0]).toEqual(formatEvents([events[0], events[1]])); - expect(mockDispatch.mock.calls[1][0]).toEqual(formatEvents([events[2], events[3]])); - expect(mockDispatch.mock.calls[2][0]).toEqual(formatEvents([events[4]])); + expect(mockDispatch).toHaveBeenCalledTimes(4); + expect(mockDispatch.mock.calls[1][0]).toEqual(formatEvents([failedEvents[0], failedEvents[1]])); + expect(mockDispatch.mock.calls[2][0]).toEqual(formatEvents([failedEvents[2], failedEvents[3]])); + expect(mockDispatch.mock.calls[3][0]).toEqual(formatEvents([failedEvents[4]])); await exhaustMicrotasks(); - expect(cache.size()).toBe(2); + for(let i = 5; i < 10; i++) { + const id = `id-${i}`; + const event = createImpressionEvent(id); + failedEvents.push(event); + cache.set(id, { id, event }); + } + + await processor.retryFailedEvents(); + + expect(mockDispatch).toHaveBeenCalledTimes(7); + expect(mockDispatch.mock.calls[4][0]).toEqual(formatEvents([failedEvents[5], failedEvents[6]])); + expect(mockDispatch.mock.calls[5][0]).toEqual(formatEvents([failedEvents[7], failedEvents[8]])); + expect(mockDispatch.mock.calls[6][0]).toEqual(formatEvents([failedEvents[9]])); + + for(let i = 0; i < 7; i++) { + dispatchResults[i].resolve({}); + } + + await exhaustMicrotasks(); + + expect(cache.size()).toBe(4); eventsInStore = [...cache.getAll().values()].sort((a, b) => a.id < b.id ? -1 : 1).map(e => e.event); expect(eventsInStore).toEqual(expect.arrayContaining([ expect.objectContaining(eventA), - expect.objectContaining(eventB) + expect.objectContaining(eventB), + expect.objectContaining(eventC), + expect.objectContaining(eventD) ])); }); @@ -677,10 +759,10 @@ describe('QueueingEventProcessor', async () => { const eventDispatcher = getMockDispatcher(); const dispatchRepeater = getMockRepeater(); - const processor = new QueueingEventProcessor({ + const processor = new BatchEventProcessor({ eventDispatcher, dispatchRepeater, - maxQueueSize: 100, + batchSize: 100, }); const event = createImpressionEvent('id-1'); @@ -705,10 +787,10 @@ describe('QueueingEventProcessor', async () => { const eventDispatcher = getMockDispatcher(); const dispatchRepeater = getMockRepeater(); - const processor = new QueueingEventProcessor({ + const processor = new BatchEventProcessor({ eventDispatcher, dispatchRepeater, - maxQueueSize: 100, + batchSize: 100, }); const dispatchListener = vi.fn(); @@ -747,11 +829,11 @@ describe('QueueingEventProcessor', async () => { const dispatchRepeater = getMockRepeater(); const failedEventRepeater = getMockRepeater(); - const processor = new QueueingEventProcessor({ + const processor = new BatchEventProcessor({ eventDispatcher, dispatchRepeater, failedEventRepeater, - maxQueueSize: 2, + batchSize: 2, eventStore: cache, }); @@ -806,10 +888,10 @@ describe('QueueingEventProcessor', async () => { const eventDispatcher = getMockDispatcher(); const dispatchRepeater = getMockRepeater(); - const processor = new QueueingEventProcessor({ + const processor = new BatchEventProcessor({ eventDispatcher, dispatchRepeater, - maxQueueSize: 100, + batchSize: 100, }); processor.stop(); @@ -822,11 +904,11 @@ describe('QueueingEventProcessor', async () => { const dispatchRepeater = getMockRepeater(); const failedEventRepeater = getMockRepeater(); - const processor = new QueueingEventProcessor({ + const processor = new BatchEventProcessor({ eventDispatcher, dispatchRepeater, failedEventRepeater, - maxQueueSize: 100, + batchSize: 100, }); processor.start(); @@ -845,12 +927,12 @@ describe('QueueingEventProcessor', async () => { const dispatchRepeater = getMockRepeater(); const failedEventRepeater = getMockRepeater(); - const processor = new QueueingEventProcessor({ + const processor = new BatchEventProcessor({ eventDispatcher, closingEventDispatcher, dispatchRepeater, failedEventRepeater, - maxQueueSize: 100, + batchSize: 100, }); processor.start(); @@ -891,12 +973,11 @@ describe('QueueingEventProcessor', async () => { reset: vi.fn(), }; - const processor = new QueueingEventProcessor({ + const processor = new BatchEventProcessor({ eventDispatcher, dispatchRepeater, - maxQueueSize: 100, + batchSize: 100, retryConfig: { - retry: true, backoffProvider: () => backoffController, maxRetries: 3, } @@ -937,10 +1018,10 @@ describe('QueueingEventProcessor', async () => { reset: vi.fn(), }; - const processor = new QueueingEventProcessor({ + const processor = new BatchEventProcessor({ eventDispatcher, dispatchRepeater, - maxQueueSize: 100, + batchSize: 100, }); processor.start() diff --git a/lib/event_processor/queueing_event_processor.ts b/lib/event_processor/batch_event_processor.ts similarity index 78% rename from lib/event_processor/queueing_event_processor.ts rename to lib/event_processor/batch_event_processor.ts index 9c2da11d7..189ea98b0 100644 --- a/lib/event_processor/queueing_event_processor.ts +++ b/lib/event_processor/batch_event_processor.ts @@ -10,6 +10,7 @@ import { RunResult, runWithRetry } from "../utils/executor/backoff_retry_runner" import { isSuccessStatusCode } from "../utils/http_request_handler/http_util"; import { EventEmitter } from "../utils/event_emitter/event_emitter"; import { IdGenerator } from "../utils/id_generator"; +import { areEventContextsEqual } from "./events"; export type EventWithId = { id: string; @@ -21,17 +22,14 @@ export type RetryConfig = { backoffProvider: Producer; } -export type QueueingEventProcessorConfig = { +export type BatchEventProcessorConfig = { dispatchRepeater: Repeater, failedEventRepeater?: Repeater, batchSize: number, - maxQueueSize: number, eventStore?: Cache, eventDispatcher: EventDispatcher, closingEventDispatcher?: EventDispatcher, logger?: LoggerFacade, - retryMinBackoff?: number, - retryMaxBackoff?: number, retryConfig?: RetryConfig; startupLogs?: StartupLog[]; }; @@ -41,25 +39,25 @@ type EventBatch = { ids: string[], } -export class QueueingEventProcessor extends BaseService implements EventProcessor { +export class BatchEventProcessor extends BaseService implements EventProcessor { private eventDispatcher: EventDispatcher; private closingEventDispatcher?: EventDispatcher; private eventQueue: EventWithId[] = []; - private maxQueueSize: number; + private batchSize: number; private eventStore?: Cache; private dispatchRepeater: Repeater; private failedEventRepeater?: Repeater; private idGenerator: IdGenerator = new IdGenerator(); private runningTask: Map> = new Map(); - private activeEventIds: Set = new Set(); + private dispatchingEventIds: Set = new Set(); private eventEmitter: EventEmitter<{ dispatch: EventV1Request }> = new EventEmitter(); private retryConfig?: RetryConfig; - constructor(config: QueueingEventProcessorConfig) { + constructor(config: BatchEventProcessorConfig) { super(config.startupLogs); this.eventDispatcher = config.eventDispatcher; this.closingEventDispatcher = config.closingEventDispatcher; - this.maxQueueSize = config.maxQueueSize; + this.batchSize = config.batchSize; this.eventStore = config.eventStore; this.logger = config.logger; this.retryConfig = config.retryConfig; @@ -68,7 +66,7 @@ export class QueueingEventProcessor extends BaseService implements EventProcesso this.dispatchRepeater.setTask(() => this.flush()); this.failedEventRepeater = config.failedEventRepeater; - this.failedEventRepeater?.setTask(() => this.retryFailedEvents()); + this.failedEventRepeater?.setTask(() => this.retryFailedEvents()); } onDispatch(handler: Consumer): Fn { @@ -76,31 +74,41 @@ export class QueueingEventProcessor extends BaseService implements EventProcesso } public async retryFailedEvents(): Promise { - const failedEvents = await this.eventStore?.getAll(); - if (!failedEvents) { + if (!this.eventStore) { return; } - if (failedEvents.size == 0) { + const keys = (await this.eventStore.getKeys()).filter( + (k) => !this.dispatchingEventIds.has(k) && !this.eventQueue.find((e) => e.id === k) + ); + + const events = await this.eventStore.getBatched(keys); + const failedEvents: EventWithId[] = []; + events.forEach((e) => { + if(e) { + failedEvents.push(e); + } + }); + + if (failedEvents.length == 0) { return; } - const failedEventsArray = Array.from(failedEvents.values()).sort(); + failedEvents.sort((a, b) => a.id < b.id ? -1 : 1); const batches: EventBatch[] = []; let currentBatch: EventWithId[] = []; - failedEventsArray.forEach((event) => { - if (!this.activeEventIds.has(event.id)) { - currentBatch.push(event); - if (currentBatch.length === this.maxQueueSize) { - batches.push({ - request: formatEvents(currentBatch.map((e) => e.event)), - ids: currentBatch.map((e) => e.id), - }); - currentBatch = []; - } + failedEvents.forEach((event) => { + if (currentBatch.length === this.batchSize || + (currentBatch.length > 0 && !areEventContextsEqual(currentBatch[0].event, event.event))) { + batches.push({ + request: formatEvents(currentBatch.map((e) => e.event)), + ids: currentBatch.map((e) => e.id), + }); + currentBatch = []; } + currentBatch.push(event); }); if (currentBatch.length > 0) { @@ -144,6 +152,10 @@ export class QueueingEventProcessor extends BaseService implements EventProcesso private dispatchBatch(batch: EventBatch, closing: boolean): void { const { request, ids } = batch; + + ids.forEach((id) => { + this.dispatchingEventIds.add(id); + }); const runResult: RunResult = this.retryConfig ? runWithRetry( @@ -158,11 +170,9 @@ export class QueueingEventProcessor extends BaseService implements EventProcesso const taskId = this.idGenerator.getId(); this.runningTask.set(taskId, runResult); - console.log(runResult); - runResult.result.then((res) => { ids.forEach((id) => { - this.activeEventIds.delete(id); + this.dispatchingEventIds.delete(id); this.eventStore?.remove(id); }); return Promise.resolve(); @@ -172,7 +182,7 @@ export class QueueingEventProcessor extends BaseService implements EventProcesso this.logger?.error('Failed to dispatch events', err); }).finally(() => { this.runningTask.delete(taskId); - ids.forEach((id) => this.activeEventIds.delete(id)); + ids.forEach((id) => this.dispatchingEventIds.delete(id)); }); } @@ -186,7 +196,11 @@ export class QueueingEventProcessor extends BaseService implements EventProcesso } async process(event: ProcessableEvent): Promise { - if (this.eventQueue.length == this.maxQueueSize) { + if (!this.isRunning()) { + return Promise.reject('Event processor is not running'); + } + + if (this.eventQueue.length == this.batchSize) { this.flush(); } @@ -196,7 +210,10 @@ export class QueueingEventProcessor extends BaseService implements EventProcesso }; await this.eventStore?.set(eventWithId.id, eventWithId); - this.activeEventIds.add(eventWithId.id); + + if (this.eventQueue.length > 0 && !areEventContextsEqual(this.eventQueue[0].event, event)) { + this.flush(); + } this.eventQueue.push(eventWithId); } diff --git a/lib/event_processor/event_processor_factory.browser.ts b/lib/event_processor/event_processor_factory.browser.ts index e790649e4..e1d03a42a 100644 --- a/lib/event_processor/event_processor_factory.browser.ts +++ b/lib/event_processor/event_processor_factory.browser.ts @@ -17,7 +17,7 @@ import { getForwardingEventProcessor } from './forwarding_event_processor'; import { EventDispatcher } from './eventDispatcher'; import { EventProcessor } from './eventProcessor'; -import { QueueingEventProcessor, QueueingEventProcessorConfig } from './queueing_event_processor'; +import { BatchEventProcessor, BatchEventProcessorConfig } from './batch_event_processor'; import { getQueuingEventProcessor, QueueingEventProcessorOptions } from './event_processor_factory'; import defaultEventDispatcher from './default_dispatcher.browser'; import sendBeaconEventDispatcher from '../plugins/event_dispatcher/send_beacon_dispatcher'; @@ -39,8 +39,7 @@ export const createQueueingEventProcessor = ( (options.eventDispatcher ? options.eventDispatcher : sendBeaconEventDispatcher), flushInterval: options.flushInterval, batchSize: options.batchSize, - maxQueueSize: options.maxQueueSize, retryOptions: {}, - failedEventRetryInterval: FAILED_EVENT_RETRY_INTERVAL, + failedEventRetryOptions: {}, }); } diff --git a/lib/event_processor/event_processor_factory.ts b/lib/event_processor/event_processor_factory.ts index b29cd6761..a1d94074f 100644 --- a/lib/event_processor/event_processor_factory.ts +++ b/lib/event_processor/event_processor_factory.ts @@ -3,25 +3,29 @@ import { StartupLog } from "../service"; import { ExponentialBackoff, IntervalRepeater } from "../utils/repeater/repeater"; import { EventDispatcher } from "./eventDispatcher"; import { EventProcessor } from "./eventProcessor"; -import { QueueingEventProcessor, RetryConfig } from "./queueing_event_processor"; +import { BatchEventProcessor, RetryConfig } from "./batch_event_processor"; export const DEFAULT_EVENT_BATCH_SIZE = 10; export const DEFAULT_EVENT_FLUSH_INTERVAL = 1000; export const DEFAULT_EVENT_MAX_QUEUE_SIZE = 10000; export const DEFAULT_MIN_BACKOFF = 1000; export const DEFAULT_MAX_BACKOFF = 32000; +export const DEFAULT_FAILED_EVENT_RETRY_INTERVAL = 20 * 1000; +export const DEFAULT_FAILED_EVENT_RETRY_BATCH_COUNT = 2; export type QueueingEventProcessorOptions = { eventDispatcher?: EventDispatcher; closingEventDispatcher?: EventDispatcher; flushInterval?: number; batchSize?: number; - maxQueueSize?: number; }; export type QueueingEventProcessorFactoryOptions = Omit & { eventDispatcher: EventDispatcher; - failedEventRetryInterval?: number; + failedEventRetryOptions?: { + interval?: number, + batchCountPerRetry?: number, + }, retryOptions?: { maxRetries?: number; minBackoff?: number; @@ -31,7 +35,7 @@ export type QueueingEventProcessorFactoryOptions = Omit { const { eventDispatcher, closingEventDispatcher, retryOptions } = options; @@ -68,20 +72,21 @@ export const getQueuingEventProcessor = ( batchSize = options.batchSize; } - const maxQueueSize = options.maxQueueSize ?? DEFAULT_EVENT_MAX_QUEUE_SIZE; - const dispatchRepeater = new IntervalRepeater(flushInterval); - const failedEventRepeater = options.failedEventRetryInterval ? - new IntervalRepeater(options.failedEventRetryInterval) : undefined; + + const failedEventRetryOptions = options.failedEventRetryOptions ? { + repeater: new IntervalRepeater(options.failedEventRetryOptions.interval + || DEFAULT_FAILED_EVENT_RETRY_INTERVAL), + batchCountPerRetry: options.failedEventRetryOptions.batchCountPerRetry || DEFAULT_FAILED_EVENT_RETRY_BATCH_COUNT, + } : undefined; return new EventProcessorConstructor({ eventDispatcher, closingEventDispatcher, dispatchRepeater, - failedEventRepeater, + failedEventRetryOptions, retryConfig, batchSize, - maxQueueSize, startupLogs }); }; diff --git a/lib/event_processor/queueing_event_processor.react_native.ts b/lib/event_processor/queueing_event_processor.react_native.ts index 631631acb..43242b3bb 100644 --- a/lib/event_processor/queueing_event_processor.react_native.ts +++ b/lib/event_processor/queueing_event_processor.react_native.ts @@ -3,14 +3,14 @@ import { addEventListener as addConnectionListener, } from "@react-native-community/netinfo" -import { QueueingEventProcessor, QueueingEventProcessorConfig } from "./queueing_event_processor"; +import { BatchEventProcessor, BatchEventProcessorConfig } from "./batch_event_processor"; import { Fn } from "../utils/type"; -class ReactNativeNetInfoEventProcessor extends QueueingEventProcessor { +class ReactNativeNetInfoEventProcessor extends BatchEventProcessor { private isInternetReachable = true; private unsubscribeNetInfo?: Fn; - constructor(config: QueueingEventProcessorConfig) { + constructor(config: BatchEventProcessorConfig) { super(config); } diff --git a/lib/project_config/polling_datafile_manager.ts b/lib/project_config/polling_datafile_manager.ts index 3784fbfd6..585cb0949 100644 --- a/lib/project_config/polling_datafile_manager.ts +++ b/lib/project_config/polling_datafile_manager.ts @@ -47,7 +47,6 @@ export class PollingDatafileManager extends BaseService implements DatafileManag private cache?: PersistentKeyValueCache; private sdkKey: string; private datafileAccessToken?: string; - private logger?: LoggerFacade; constructor(config: DatafileManagerConfig) { super(); @@ -80,10 +79,6 @@ export class PollingDatafileManager extends BaseService implements DatafileManag this.datafileUrl = sprintf(urlTemplateToUse, this.sdkKey); } - setLogger(logger: LoggerFacade): void { - this.logger = logger; - } - onUpdate(listener: Consumer): Fn { return this.emitter.on('update', listener); } diff --git a/lib/project_config/project_config_manager.ts b/lib/project_config/project_config_manager.ts index c03ee9b4c..94c83902b 100644 --- a/lib/project_config/project_config_manager.ts +++ b/lib/project_config/project_config_manager.ts @@ -53,7 +53,6 @@ export class ProjectConfigManagerImpl extends BaseService implements ProjectConf public jsonSchemaValidator?: Transformer; public datafileManager?: DatafileManager; private eventEmitter: EventEmitter<{ update: ProjectConfig }> = new EventEmitter(); - private logger?: LoggerFacade; constructor(config: ProjectConfigManagerConfig) { super(); @@ -63,10 +62,6 @@ export class ProjectConfigManagerImpl extends BaseService implements ProjectConf this.datafileManager = config.datafileManager; } - setLogger(logger: LoggerFacade): void { - this.logger = logger; - } - start(): void { if (!this.isNew()) { return; diff --git a/lib/tests/mock/mock_cache.ts b/lib/tests/mock/mock_cache.ts index f8d637f2a..3f8e3928c 100644 --- a/lib/tests/mock/mock_cache.ts +++ b/lib/tests/mock/mock_cache.ts @@ -1,6 +1,12 @@ import { SyncCache } from "../../utils/cache/cache"; +import { Maybe } from "../../utils/type"; -export const getMockSyncCache = (): SyncCache => { +type SyncCacheWithAddOn = SyncCache & { + size(): number; + getAll(): Map; +}; + +export const getMockSyncCache = (): SyncCacheWithAddOn => { const cache = { operation: 'sync' as const, data: new Map(), @@ -16,6 +22,9 @@ export const getMockSyncCache = (): SyncCache => { getAll(): Map { return this.data; }, + getBatched(keys: string[]): Maybe[] { + return keys.map((key) => this.get(key)); + }, size(): number { return this.data.size; }, diff --git a/lib/utils/cache/async_storage_cache.react_native.ts b/lib/utils/cache/async_storage_cache.react_native.ts index 9fa25e48e..5fe64b0bb 100644 --- a/lib/utils/cache/async_storage_cache.react_native.ts +++ b/lib/utils/cache/async_storage_cache.react_native.ts @@ -1,3 +1,4 @@ +import { Maybe } from "../type"; import { AsyncCache } from "./cache"; import AsyncStorage from '@react-native-async-storage/async-storage'; @@ -25,15 +26,8 @@ export class AsyncStorageCache implements AsyncCache { return [... await AsyncStorage.getAllKeys()]; } - async getAll(): Promise> { - const keys = await AsyncStorage.getAllKeys(); + async getBatched(keys: string[]): Promise[]> { const items = await AsyncStorage.multiGet(keys); - const map = new Map(); - items.forEach(([key, value]) => { - if (value) { - map.set(key, JSON.parse(value)); - } - }); - return map; + return items.map(([key, value]) => value ? JSON.parse(value) : undefined); } } diff --git a/lib/utils/cache/cache.ts b/lib/utils/cache/cache.ts index eebce86ac..c2142cb5d 100644 --- a/lib/utils/cache/cache.ts +++ b/lib/utils/cache/cache.ts @@ -1,4 +1,5 @@ import { Transformer } from '../../utils/type'; +import { Maybe } from '../../utils/type'; export type CacheOp = 'sync' | 'async'; export type OpValue = Op extends 'sync' ? V : Promise; @@ -6,11 +7,11 @@ export type OpValue = Op extends 'sync' ? V : Promise; export interface CacheWithOp { operation: Op; set(key: string, value: V): OpValue; - get(key: string): OpValue; + get(key: string): OpValue>; remove(key: string): OpValue; clear(): OpValue; getKeys(): OpValue; - getAll(): OpValue>; + getBatched(keys: string[]): OpValue[]>; } export type SyncCache = CacheWithOp<'sync', V>; @@ -70,15 +71,9 @@ export class SyncPrefixCache implements SyncCache { return this.getInternalKeys().map((key) => this.removePrefix(key)); } - getAll(): Map { - const map = new Map(); - this.getInternalKeys().forEach((key) => { - const value = this.cache.get(key); - if (value) { - map.set(this.removePrefix(key), this.transformTo(value)); - } - }); - return map; + getBatched(keys: string[]): Maybe[] { + return this.cache.getBatched(keys.map((key) => this.addPrefix(key))) + .map((value) => value ? this.transformTo(value) : undefined); } } @@ -136,16 +131,8 @@ export class AyncPrefixStore implements AsyncCache { return this.getInternalKeys().then((keys) => keys.map((key) => this.removePrefix(key))); } - async getAll(): Promise> { - const keys = await this.getInternalKeys(); - const values = await Promise.all(keys.map((key) => this.cache.get(key))); - const map = new Map(); - keys.forEach((key, index) => { - const value = values[index]; - if (value) { - map.set(this.removePrefix(key), this.transformTo(value)); - } - }); - return map; + async getBatched(keys: string[]): Promise[]> { + const values = await this.cache.getBatched(keys.map((key) => this.addPrefix(key))); + return values.map((value) => value ? this.transformTo(value) : undefined); } } diff --git a/lib/utils/cache/local_storage_cache.browser.ts b/lib/utils/cache/local_storage_cache.browser.ts index 21f721f65..1bdf129d5 100644 --- a/lib/utils/cache/local_storage_cache.browser.ts +++ b/lib/utils/cache/local_storage_cache.browser.ts @@ -1,3 +1,4 @@ +import { Maybe } from "../type"; import { SyncCache } from "./cache"; export class LocalStorageCache implements SyncCache { @@ -7,7 +8,7 @@ export class LocalStorageCache implements SyncCache { localStorage.setItem(key, JSON.stringify(value)); } - public get(key: string): V | undefined { + public get(key: string): Maybe { const value = localStorage.getItem(key); return value ? JSON.parse(value) : undefined; } @@ -24,14 +25,7 @@ export class LocalStorageCache implements SyncCache { return Object.keys(localStorage); } - public getAll(): Map { - const map = new Map(); - this.getKeys().forEach((key) => { - const value = this.get(key); - if (value) { - map.set(key, value); - } - }); - return map; + getBatched(keys: string[]): Maybe[] { + return keys.map((k) => this.get(k)); } } diff --git a/lib/utils/type.ts b/lib/utils/type.ts index ee8a440b9..ddf3871aa 100644 --- a/lib/utils/type.ts +++ b/lib/utils/type.ts @@ -24,3 +24,5 @@ export type AsyncComsumer = (arg: T) => Promise; export type Producer = () => T; export type AsyncProducer = () => Promise; + +export type Maybe = T | undefined; diff --git a/vitest.config.mts b/vitest.config.mts index 673f7d1c6..61452ea16 100644 --- a/vitest.config.mts +++ b/vitest.config.mts @@ -20,7 +20,7 @@ export default defineConfig({ test: { onConsoleLog: () => true, environment: 'happy-dom', - include: ['**/*.spec.ts'], + include: ['**/batch_event_processor.spec.ts'], typecheck: { tsconfig: 'tsconfig.spec.json', }, From 4cebabc1f9b304658ecca81906dc1dfd87bed0f3 Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Thu, 14 Nov 2024 17:14:12 +0600 Subject: [PATCH 21/45] update --- .../batch_event_processor.spec.ts | 424 ++++++++++++------ 1 file changed, 281 insertions(+), 143 deletions(-) diff --git a/lib/event_processor/batch_event_processor.spec.ts b/lib/event_processor/batch_event_processor.spec.ts index 145cce8b8..4cbf53f49 100644 --- a/lib/event_processor/batch_event_processor.spec.ts +++ b/lib/event_processor/batch_event_processor.spec.ts @@ -654,106 +654,307 @@ describe('QueueingEventProcessor', async () => { expect(logger.error).toHaveBeenCalledOnce(); }); - it.only('should dispatch only failed events in correct batch size and order when retryFailedEvents is called', async () => { - const eventDispatcher = getMockDispatcher(); - const mockDispatch: MockInstance = eventDispatcher.dispatchEvent; + describe('retryFailedEvents', () => { + it('should disptach only failed events from the store and not dispatch queued events', async () => { + const eventDispatcher = getMockDispatcher(); + const mockDispatch: MockInstance = eventDispatcher.dispatchEvent; + mockDispatch.mockResolvedValue({}); + + const cache = getMockSyncCache(); + const dispatchRepeater = getMockRepeater(); + + const processor = new BatchEventProcessor({ + eventDispatcher, + dispatchRepeater, + batchSize: 100, + eventStore: cache, + }); + + processor.start(); + await processor.onRunning(); + + // these events should be in queue and should not be reomoved from store or dispatched with failed events + const eventA = createImpressionEvent('id-A'); + const eventB = createImpressionEvent('id-B'); + await processor.process(eventA); + await processor.process(eventB); + + const failedEvents: ProcessableEvent[] = []; + + for(let i = 0; i < 5; i++) { + const id = `id-${i}`; + const event = createImpressionEvent(id); + failedEvents.push(event); + cache.set(id, { id, event }); + } + + await processor.retryFailedEvents(); + await exhaustMicrotasks(); - const dispatchResults: ResolvablePromise[] = []; - for (let i = 0; i < 10; i++) { - const reuslt = resolvablePromise(); - dispatchResults.push(reuslt); - mockDispatch.mockReturnValueOnce(reuslt); - } + expect(mockDispatch).toHaveBeenCalledTimes(1); + expect(mockDispatch.mock.calls[0][0]).toEqual(formatEvents(failedEvents)); - const cache = getMockSyncCache(); - const dispatchRepeater = getMockRepeater(); + let eventsInStore = [...cache.getAll().values()].sort((a, b) => a.id < b.id ? -1 : 1).map(e => e.event); + expect(eventsInStore).toEqual(expect.arrayContaining([ + expect.objectContaining(eventA), + expect.objectContaining(eventB), + ])); + }); - const processor = new BatchEventProcessor({ - eventDispatcher, - dispatchRepeater, - batchSize: 2, - eventStore: cache, + it('should disptach only failed events from the store and not dispatch events that are being dispatched', async () => { + const eventDispatcher = getMockDispatcher(); + const mockDispatch: MockInstance = eventDispatcher.dispatchEvent; + const mockResult1 = resolvablePromise(); + const mockResult2 = resolvablePromise(); + mockDispatch.mockResolvedValueOnce(mockResult1.promise).mockRejectedValueOnce(mockResult2.promise); + + const cache = getMockSyncCache(); + const dispatchRepeater = getMockRepeater(); + + const processor = new BatchEventProcessor({ + eventDispatcher, + dispatchRepeater, + batchSize: 100, + eventStore: cache, + }); + + processor.start(); + await processor.onRunning(); + + // these events should be in dispatch and should not be reomoved from store or dispatched with failed events + const eventA = createImpressionEvent('id-A'); + const eventB = createImpressionEvent('id-B'); + await processor.process(eventA); + await processor.process(eventB); + + dispatchRepeater.execute(0); + await exhaustMicrotasks(); + expect(mockDispatch).toHaveBeenCalledTimes(1); + expect(mockDispatch.mock.calls[0][0]).toEqual(formatEvents([eventA, eventB])); + + const failedEvents: ProcessableEvent[] = []; + + for(let i = 0; i < 5; i++) { + const id = `id-${i}`; + const event = createImpressionEvent(id); + failedEvents.push(event); + cache.set(id, { id, event }); + } + + await processor.retryFailedEvents(); + await exhaustMicrotasks(); + + expect(mockDispatch).toHaveBeenCalledTimes(2); + expect(mockDispatch.mock.calls[1][0]).toEqual(formatEvents(failedEvents)); + + mockResult2.resolve({}); + await exhaustMicrotasks(); + + let eventsInStore = [...cache.getAll().values()].sort((a, b) => a.id < b.id ? -1 : 1).map(e => e.event); + expect(eventsInStore).toEqual(expect.arrayContaining([ + expect.objectContaining(eventA), + expect.objectContaining(eventB), + ])); }); - processor.start(); - await processor.onRunning(); + it('should disptach events in correct batch size and separate events with differnt contexts in separate batch', async () => { + const eventDispatcher = getMockDispatcher(); + const mockDispatch: MockInstance = eventDispatcher.dispatchEvent; + mockDispatch.mockResolvedValue({}); + + const cache = getMockSyncCache(); + const dispatchRepeater = getMockRepeater(); + + const processor = new BatchEventProcessor({ + eventDispatcher, + dispatchRepeater, + batchSize: 3, + eventStore: cache, + }); + + processor.start(); + await processor.onRunning(); + + const failedEvents: ProcessableEvent[] = []; + + for(let i = 0; i < 8; i++) { + const id = `id-${i}`; + const event = createImpressionEvent(id); - expect(mockDispatch).toHaveBeenCalledTimes(0); + if (i == 2 || i == 3) { + event.context.accountId = 'new-account'; + } - // these events should be in dispatching state and should not be reomoved from store or dispatched with failed events - const eventA = createImpressionEvent('id-A'); - const eventB = createImpressionEvent('id-B'); - await processor.process(eventA); - await processor.process(eventB); + failedEvents.push(event); + cache.set(id, { id, event }); + } + + await processor.retryFailedEvents(); + await exhaustMicrotasks(); - dispatchRepeater.execute(0); - await exhaustMicrotasks(); + // events 0 1 4 5 6 7 have one context, and 2 3 have different context + // batches should be [0, 1], [2, 3], [4, 5, 6], [7] + expect(mockDispatch).toHaveBeenCalledTimes(4); + expect(mockDispatch.mock.calls[0][0]).toEqual(formatEvents([failedEvents[0], failedEvents[1]])); + expect(mockDispatch.mock.calls[1][0]).toEqual(formatEvents([failedEvents[2], failedEvents[3]])); + expect(mockDispatch.mock.calls[2][0]).toEqual(formatEvents([failedEvents[4], failedEvents[5], failedEvents[6]])); + expect(mockDispatch.mock.calls[3][0]).toEqual(formatEvents([failedEvents[7]])); + }); + }); + + describe('when failedEventRepeater is fired', () => { + it('should disptach only failed events from the store and not dispatch queued events', async () => { + const eventDispatcher = getMockDispatcher(); + const mockDispatch: MockInstance = eventDispatcher.dispatchEvent; + mockDispatch.mockResolvedValue({}); + + const cache = getMockSyncCache(); + const dispatchRepeater = getMockRepeater(); + const failedEventRepeater = getMockRepeater(); + + const processor = new BatchEventProcessor({ + eventDispatcher, + dispatchRepeater, + failedEventRepeater, + batchSize: 100, + eventStore: cache, + }); + + processor.start(); + await processor.onRunning(); + + // these events should be in queue and should not be reomoved from store or dispatched with failed events + const eventA = createImpressionEvent('id-A'); + const eventB = createImpressionEvent('id-B'); + await processor.process(eventA); + await processor.process(eventB); + + const failedEvents: ProcessableEvent[] = []; + + for(let i = 0; i < 5; i++) { + const id = `id-${i}`; + const event = createImpressionEvent(id); + failedEvents.push(event); + cache.set(id, { id, event }); + } + + failedEventRepeater.execute(0); + await exhaustMicrotasks(); - expect(mockDispatch).toHaveBeenCalledTimes(1); - - // these events should be in the queue and should not be removed from store or dispatched with failed events - const eventC = createImpressionEvent('id-C'); - const eventD = createImpressionEvent('id-D'); - await processor.process(eventC); - await processor.process(eventD); - await exhaustMicrotasks(); + expect(mockDispatch).toHaveBeenCalledTimes(1); + expect(mockDispatch.mock.calls[0][0]).toEqual(formatEvents(failedEvents)); - let eventsInStore = [...cache.getAll().values()].sort((a, b) => a.id < b.id ? -1 : 1).map(e => e.event); - expect(eventsInStore).toEqual(expect.arrayContaining([ - expect.objectContaining(eventA), - expect.objectContaining(eventB), - expect.objectContaining(eventC), - expect.objectContaining(eventD), - ])); - - const failedEvents: ProcessableEvent[] = []; - - for(let i = 0; i < 5; i++) { - const id = `id-${i}`; - const event = createImpressionEvent(id); - failedEvents.push(event); - cache.set(id, { id, event }); - } + let eventsInStore = [...cache.getAll().values()].sort((a, b) => a.id < b.id ? -1 : 1).map(e => e.event); + expect(eventsInStore).toEqual(expect.arrayContaining([ + expect.objectContaining(eventA), + expect.objectContaining(eventB), + ])); + }); - await processor.retryFailedEvents(); + it('should disptach only failed events from the store and not dispatch events that are being dispatched', async () => { + const eventDispatcher = getMockDispatcher(); + const mockDispatch: MockInstance = eventDispatcher.dispatchEvent; + const mockResult1 = resolvablePromise(); + const mockResult2 = resolvablePromise(); + mockDispatch.mockResolvedValueOnce(mockResult1.promise).mockRejectedValueOnce(mockResult2.promise); + + const cache = getMockSyncCache(); + const dispatchRepeater = getMockRepeater(); + const failedEventRepeater = getMockRepeater(); - expect(mockDispatch).toHaveBeenCalledTimes(4); - expect(mockDispatch.mock.calls[1][0]).toEqual(formatEvents([failedEvents[0], failedEvents[1]])); - expect(mockDispatch.mock.calls[2][0]).toEqual(formatEvents([failedEvents[2], failedEvents[3]])); - expect(mockDispatch.mock.calls[3][0]).toEqual(formatEvents([failedEvents[4]])); + const processor = new BatchEventProcessor({ + eventDispatcher, + dispatchRepeater, + failedEventRepeater, + batchSize: 100, + eventStore: cache, + }); + + processor.start(); + await processor.onRunning(); + + // these events should be in dispatch and should not be reomoved from store or dispatched with failed events + const eventA = createImpressionEvent('id-A'); + const eventB = createImpressionEvent('id-B'); + await processor.process(eventA); + await processor.process(eventB); - await exhaustMicrotasks(); + dispatchRepeater.execute(0); + await exhaustMicrotasks(); + expect(mockDispatch).toHaveBeenCalledTimes(1); + expect(mockDispatch.mock.calls[0][0]).toEqual(formatEvents([eventA, eventB])); + + const failedEvents: ProcessableEvent[] = []; + + for(let i = 0; i < 5; i++) { + const id = `id-${i}`; + const event = createImpressionEvent(id); + failedEvents.push(event); + cache.set(id, { id, event }); + } + + failedEventRepeater.execute(0); + await exhaustMicrotasks(); - for(let i = 5; i < 10; i++) { - const id = `id-${i}`; - const event = createImpressionEvent(id); - failedEvents.push(event); - cache.set(id, { id, event }); - } + expect(mockDispatch).toHaveBeenCalledTimes(2); + expect(mockDispatch.mock.calls[1][0]).toEqual(formatEvents(failedEvents)); - await processor.retryFailedEvents(); + mockResult2.resolve({}); + await exhaustMicrotasks(); - expect(mockDispatch).toHaveBeenCalledTimes(7); - expect(mockDispatch.mock.calls[4][0]).toEqual(formatEvents([failedEvents[5], failedEvents[6]])); - expect(mockDispatch.mock.calls[5][0]).toEqual(formatEvents([failedEvents[7], failedEvents[8]])); - expect(mockDispatch.mock.calls[6][0]).toEqual(formatEvents([failedEvents[9]])); + let eventsInStore = [...cache.getAll().values()].sort((a, b) => a.id < b.id ? -1 : 1).map(e => e.event); + expect(eventsInStore).toEqual(expect.arrayContaining([ + expect.objectContaining(eventA), + expect.objectContaining(eventB), + ])); + }); - for(let i = 0; i < 7; i++) { - dispatchResults[i].resolve({}); - } + it('should disptach events in correct batch size and separate events with differnt contexts in separate batch', async () => { + const eventDispatcher = getMockDispatcher(); + const mockDispatch: MockInstance = eventDispatcher.dispatchEvent; + mockDispatch.mockResolvedValue({}); + + const cache = getMockSyncCache(); + const dispatchRepeater = getMockRepeater(); + const failedEventRepeater = getMockRepeater(); - await exhaustMicrotasks(); + const processor = new BatchEventProcessor({ + eventDispatcher, + dispatchRepeater, + failedEventRepeater, + batchSize: 3, + eventStore: cache, + }); + + processor.start(); + await processor.onRunning(); + + const failedEvents: ProcessableEvent[] = []; + + for(let i = 0; i < 8; i++) { + const id = `id-${i}`; + const event = createImpressionEvent(id); - expect(cache.size()).toBe(4); - eventsInStore = [...cache.getAll().values()].sort((a, b) => a.id < b.id ? -1 : 1).map(e => e.event); - expect(eventsInStore).toEqual(expect.arrayContaining([ - expect.objectContaining(eventA), - expect.objectContaining(eventB), - expect.objectContaining(eventC), - expect.objectContaining(eventD) - ])); - }); + if (i == 2 || i == 3) { + event.context.accountId = 'new-account'; + } + + failedEvents.push(event); + cache.set(id, { id, event }); + } + + failedEventRepeater.execute(0); + await exhaustMicrotasks(); + // events 0 1 4 5 6 7 have one context, and 2 3 have different context + // batches should be [0, 1], [2, 3], [4, 5, 6], [7] + expect(mockDispatch).toHaveBeenCalledTimes(4); + expect(mockDispatch.mock.calls[0][0]).toEqual(formatEvents([failedEvents[0], failedEvents[1]])); + expect(mockDispatch.mock.calls[1][0]).toEqual(formatEvents([failedEvents[2], failedEvents[3]])); + expect(mockDispatch.mock.calls[2][0]).toEqual(formatEvents([failedEvents[4], failedEvents[5], failedEvents[6]])); + expect(mockDispatch.mock.calls[3][0]).toEqual(formatEvents([failedEvents[7]])); + }); + }); it('should emit dispatch event when dispatching events', async () => { const eventDispatcher = getMockDispatcher(); @@ -820,69 +1021,6 @@ describe('QueueingEventProcessor', async () => { expect(dispatchListener).toHaveBeenCalledTimes(1); }); - it('should dispatch only failed events in correct batch size and order when failedEventRepeater is triggered', async () => { - const eventDispatcher = getMockDispatcher(); - const mockDispatch: MockInstance = eventDispatcher.dispatchEvent; - mockDispatch.mockResolvedValue({}); - - const cache = getMockSyncCache(); - const dispatchRepeater = getMockRepeater(); - const failedEventRepeater = getMockRepeater(); - - const processor = new BatchEventProcessor({ - eventDispatcher, - dispatchRepeater, - failedEventRepeater, - batchSize: 2, - eventStore: cache, - }); - - processor.start(); - await processor.onRunning(); - - expect(mockDispatch).toHaveBeenCalledTimes(0); - - // these events should be active and should not be reomoved from store or dispatched with failed events - const eventA = createImpressionEvent('id-A'); - const eventB = createImpressionEvent('id-B'); - - await processor.process(eventA); - await processor.process(eventB); - - let eventsInStore = [...cache.getAll().values()].sort((a, b) => a.id < b.id ? -1 : 1).map(e => e.event); - expect(eventsInStore).toEqual(expect.arrayContaining([ - expect.objectContaining(eventA), - expect.objectContaining(eventB) - ])); - - - const events: ProcessableEvent[] = []; - - for(let i = 0; i < 5; i++) { - const id = `id-${i}`; - const event = createImpressionEvent(id); - events.push(event); - cache.set(id, { id, event }); - } - - await failedEventRepeater.execute(0); - - expect(mockDispatch).toHaveBeenCalledTimes(3); - expect(mockDispatch.mock.calls[0][0]).toEqual(formatEvents([events[0], events[1]])); - expect(mockDispatch.mock.calls[1][0]).toEqual(formatEvents([events[2], events[3]])); - expect(mockDispatch.mock.calls[2][0]).toEqual(formatEvents([events[4]])); - - await exhaustMicrotasks(); - - expect(cache.size()).toBe(2); - eventsInStore = [...cache.getAll().values()].sort((a, b) => a.id < b.id ? -1 : 1).map(e => e.event); - expect(eventsInStore).toEqual(expect.arrayContaining([ - expect.objectContaining(eventA), - expect.objectContaining(eventB) - ])); - }); - - describe('stop', () => { it('should reject onRunning if stop is called before the processor is started', async () => { const eventDispatcher = getMockDispatcher(); From a2b7fe8cb8f64d09b02d3e1d5a8d09656360ffb4 Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Thu, 14 Nov 2024 18:14:10 +0600 Subject: [PATCH 22/45] up --- .../event_processor_factory.browser.ts | 7 +- .../event_processor_factory.ts | 17 +---- lib/index.browser.ts | 1 - lib/index.node.ts | 1 - lib/index.react_native.ts | 1 - lib/utils/cache/cache.spec.ts | 0 .../cache/local_storage_cache.browser.spec.ts | 9 +++ .../index.tests.js | 74 ------------------- .../event_processor_config_validator/index.ts | 45 ----------- 9 files changed, 17 insertions(+), 138 deletions(-) create mode 100644 lib/utils/cache/cache.spec.ts create mode 100644 lib/utils/cache/local_storage_cache.browser.spec.ts delete mode 100644 lib/utils/event_processor_config_validator/index.tests.js delete mode 100644 lib/utils/event_processor_config_validator/index.ts diff --git a/lib/event_processor/event_processor_factory.browser.ts b/lib/event_processor/event_processor_factory.browser.ts index e1d03a42a..9641eeddb 100644 --- a/lib/event_processor/event_processor_factory.browser.ts +++ b/lib/event_processor/event_processor_factory.browser.ts @@ -22,7 +22,7 @@ import { getQueuingEventProcessor, QueueingEventProcessorOptions } from './event import defaultEventDispatcher from './default_dispatcher.browser'; import sendBeaconEventDispatcher from '../plugins/event_dispatcher/send_beacon_dispatcher'; -const FAILED_EVENT_RETRY_INTERVAL = 20 * 1000; // 1 minute +export const FAILED_EVENT_RETRY_INTERVAL = 20 * 1000; export const createForwardingEventProcessor = ( eventDispatcher: EventDispatcher = defaultEventDispatcher, @@ -40,6 +40,7 @@ export const createQueueingEventProcessor = ( flushInterval: options.flushInterval, batchSize: options.batchSize, retryOptions: {}, - failedEventRetryOptions: {}, + failedEventRetryInterval: FAILED_EVENT_RETRY_INTERVAL, }); -} +}; + diff --git a/lib/event_processor/event_processor_factory.ts b/lib/event_processor/event_processor_factory.ts index a1d94074f..92b8db15a 100644 --- a/lib/event_processor/event_processor_factory.ts +++ b/lib/event_processor/event_processor_factory.ts @@ -10,8 +10,6 @@ export const DEFAULT_EVENT_FLUSH_INTERVAL = 1000; export const DEFAULT_EVENT_MAX_QUEUE_SIZE = 10000; export const DEFAULT_MIN_BACKOFF = 1000; export const DEFAULT_MAX_BACKOFF = 32000; -export const DEFAULT_FAILED_EVENT_RETRY_INTERVAL = 20 * 1000; -export const DEFAULT_FAILED_EVENT_RETRY_BATCH_COUNT = 2; export type QueueingEventProcessorOptions = { eventDispatcher?: EventDispatcher; @@ -22,10 +20,7 @@ export type QueueingEventProcessorOptions = { export type QueueingEventProcessorFactoryOptions = Omit & { eventDispatcher: EventDispatcher; - failedEventRetryOptions?: { - interval?: number, - batchCountPerRetry?: number, - }, + failedEventRetryInterval?: number; retryOptions?: { maxRetries?: number; minBackoff?: number; @@ -73,18 +68,14 @@ export const getQueuingEventProcessor = ( } const dispatchRepeater = new IntervalRepeater(flushInterval); - - const failedEventRetryOptions = options.failedEventRetryOptions ? { - repeater: new IntervalRepeater(options.failedEventRetryOptions.interval - || DEFAULT_FAILED_EVENT_RETRY_INTERVAL), - batchCountPerRetry: options.failedEventRetryOptions.batchCountPerRetry || DEFAULT_FAILED_EVENT_RETRY_BATCH_COUNT, - } : undefined; + const failedEventRepeater = options.failedEventRetryInterval ? + new IntervalRepeater(options.failedEventRetryInterval) : undefined; return new EventProcessorConstructor({ eventDispatcher, closingEventDispatcher, dispatchRepeater, - failedEventRetryOptions, + failedEventRepeater, retryConfig, batchSize, startupLogs diff --git a/lib/index.browser.ts b/lib/index.browser.ts index fd92d72c9..68c276e3b 100644 --- a/lib/index.browser.ts +++ b/lib/index.browser.ts @@ -23,7 +23,6 @@ import defaultEventDispatcher from './event_processor/default_dispatcher.browser import sendBeaconEventDispatcher from './plugins/event_dispatcher/send_beacon_dispatcher'; import * as enums from './utils/enums'; import * as loggerPlugin from './plugins/logger'; -import eventProcessorConfigValidator from './utils/event_processor_config_validator'; import { createNotificationCenter } from './core/notification_center'; import { default as eventProcessor } from './plugins/event_processor'; import { OptimizelyDecideOption, Client, Config, OptimizelyOptions } from './shared_types'; diff --git a/lib/index.node.ts b/lib/index.node.ts index 98efc5d64..554f8b9c1 100644 --- a/lib/index.node.ts +++ b/lib/index.node.ts @@ -21,7 +21,6 @@ import * as loggerPlugin from './plugins/logger'; import configValidator from './utils/config_validator'; import defaultErrorHandler from './plugins/error_handler'; import defaultEventDispatcher from './event_processor/default_dispatcher.node'; -import eventProcessorConfigValidator from './utils/event_processor_config_validator'; import { createNotificationCenter } from './core/notification_center'; import { createEventProcessor } from './plugins/event_processor'; import { OptimizelyDecideOption, Client, Config } from './shared_types'; diff --git a/lib/index.react_native.ts b/lib/index.react_native.ts index b2654823d..7fa16c6ca 100644 --- a/lib/index.react_native.ts +++ b/lib/index.react_native.ts @@ -21,7 +21,6 @@ import configValidator from './utils/config_validator'; import defaultErrorHandler from './plugins/error_handler'; import * as loggerPlugin from './plugins/logger/index.react_native'; import defaultEventDispatcher from './event_processor/default_dispatcher.browser'; -import eventProcessorConfigValidator from './utils/event_processor_config_validator'; import { createNotificationCenter } from './core/notification_center'; import { createEventProcessor } from './plugins/event_processor/index.react_native'; import { OptimizelyDecideOption, Client, Config } from './shared_types'; diff --git a/lib/utils/cache/cache.spec.ts b/lib/utils/cache/cache.spec.ts new file mode 100644 index 000000000..e69de29bb diff --git a/lib/utils/cache/local_storage_cache.browser.spec.ts b/lib/utils/cache/local_storage_cache.browser.spec.ts new file mode 100644 index 000000000..35db8d123 --- /dev/null +++ b/lib/utils/cache/local_storage_cache.browser.spec.ts @@ -0,0 +1,9 @@ +import { vi, describe, it, expect, beforeEach } from 'vitest'; + +describe('LocalStorageCache', () => { + let mockStorage = new Map(); + + beforeEach(() => { + + }) +}); \ No newline at end of file diff --git a/lib/utils/event_processor_config_validator/index.tests.js b/lib/utils/event_processor_config_validator/index.tests.js deleted file mode 100644 index 6ecc6a134..000000000 --- a/lib/utils/event_processor_config_validator/index.tests.js +++ /dev/null @@ -1,74 +0,0 @@ -/** - * Copyright 2019-2020, Optimizely - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { assert } from 'chai'; - -import eventProcessorConfigValidator from './index'; - -describe('utils/event_processor_config_validator', function() { - describe('validateEventFlushInterval', function() { - it('returns false for null & undefined', function() { - assert.isFalse(eventProcessorConfigValidator.validateEventFlushInterval(null)); - assert.isFalse(eventProcessorConfigValidator.validateEventFlushInterval(undefined)); - }); - - it('returns false for a string', function() { - assert.isFalse(eventProcessorConfigValidator.validateEventFlushInterval('not a number')); - }); - - it('returns false for an object', function() { - assert.isFalse(eventProcessorConfigValidator.validateEventFlushInterval({ value: 'not a number' })); - }); - - it('returns false for a negative integer', function() { - assert.isFalse(eventProcessorConfigValidator.validateEventFlushInterval(-1000)); - }); - - it('returns false for 0', function() { - assert.isFalse(eventProcessorConfigValidator.validateEventFlushInterval(0)); - }); - - it('returns true for a positive integer', function() { - assert.isTrue(eventProcessorConfigValidator.validateEventFlushInterval(30000)); - }); - }); - - describe('validateEventBatchSize', function() { - it('returns false for null & undefined', function() { - assert.isFalse(eventProcessorConfigValidator.validateEventBatchSize(null)); - assert.isFalse(eventProcessorConfigValidator.validateEventBatchSize(undefined)); - }); - - it('returns false for a string', function() { - assert.isFalse(eventProcessorConfigValidator.validateEventBatchSize('not a number')); - }); - - it('returns false for an object', function() { - assert.isFalse(eventProcessorConfigValidator.validateEventBatchSize({ value: 'not a number' })); - }); - - it('returns false for a negative integer', function() { - assert.isFalse(eventProcessorConfigValidator.validateEventBatchSize(-1000)); - }); - - it('returns false for 0', function() { - assert.isFalse(eventProcessorConfigValidator.validateEventBatchSize(0)); - }); - - it('returns true for a positive integer', function() { - assert.isTrue(eventProcessorConfigValidator.validateEventBatchSize(10)); - }); - }); -}); diff --git a/lib/utils/event_processor_config_validator/index.ts b/lib/utils/event_processor_config_validator/index.ts deleted file mode 100644 index e6bd304bb..000000000 --- a/lib/utils/event_processor_config_validator/index.ts +++ /dev/null @@ -1,45 +0,0 @@ -/** - * Copyright 2019-2020, Optimizely - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import fns from '../fns'; - -/** - * Return true if the argument is a valid event batch size, false otherwise - * @param {unknown} eventBatchSize - * @returns {boolean} - */ -const validateEventBatchSize = function(eventBatchSize: unknown): boolean { - if (typeof eventBatchSize === 'number' && fns.isSafeInteger(eventBatchSize)) { - return eventBatchSize >= 1; - } - return false; -} - -/** - * Return true if the argument is a valid event flush interval, false otherwise - * @param {unknown} eventFlushInterval - * @returns {boolean} - */ -const validateEventFlushInterval = function(eventFlushInterval: unknown): boolean { - if (typeof eventFlushInterval === 'number' && fns.isSafeInteger(eventFlushInterval)) { - return eventFlushInterval > 0; - } - return false; -} - -export default { - validateEventBatchSize: validateEventBatchSize, - validateEventFlushInterval: validateEventFlushInterval, -} From 37aba3400379e8fa25589e5628a39c8f77d941b1 Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Thu, 14 Nov 2024 22:06:54 +0600 Subject: [PATCH 23/45] update --- .../cache/local_storage_cache.browser.spec.ts | 68 +++++++++++++++++-- .../cache/local_storage_cache.browser.ts | 9 ++- tests/index.react_native.spec.ts | 1 - vitest.config.mts | 2 +- 4 files changed, 73 insertions(+), 7 deletions(-) diff --git a/lib/utils/cache/local_storage_cache.browser.spec.ts b/lib/utils/cache/local_storage_cache.browser.spec.ts index 35db8d123..703ce1a9a 100644 --- a/lib/utils/cache/local_storage_cache.browser.spec.ts +++ b/lib/utils/cache/local_storage_cache.browser.spec.ts @@ -1,9 +1,69 @@ import { vi, describe, it, expect, beforeEach } from 'vitest'; +import { LocalStorageCache } from './local_storage_cache.browser'; + +type TestData = { + a: number; + b: string; + d: { e: boolean }; +} describe('LocalStorageCache', () => { - let mockStorage = new Map(); - beforeEach(() => { + localStorage.clear(); + }); + + it('should store a stringified value in local storage', () => { + const cache = new LocalStorageCache(); + const data = { a: 1, b: '2', d: { e: true } }; + cache.set('key', data); + expect(localStorage.getItem('key')).toBe(JSON.stringify(data)); + }); + + it('should return undefined if get is called for a nonexistent key', () => { + const cache = new LocalStorageCache(); + expect(cache.get('nonexistent')).toBeUndefined(); + }); + + it('should return the value if get is called for an existing key', () => { + const cache = new LocalStorageCache(); + cache.set('key', 'value'); + expect(cache.get('key')).toBe('value'); + }); + + it('should return the value after json parsing if get is called for an existing key', () => { + const cache = new LocalStorageCache(); + const data = { a: 1, b: '2', d: { e: true } }; + cache.set('key', data); + expect(cache.get('key')).toEqual(data); + }); + + it('should remove the key from local storage when remove is called', () => { + const cache = new LocalStorageCache(); + cache.set('key', 'value'); + cache.remove('key'); + expect(localStorage.getItem('key')).toBeNull(); + }); + + it('should remove all keys from local storage when clear is called', () => { + const cache = new LocalStorageCache(); + cache.set('key1', 'value1'); + cache.set('key2', 'value2'); + expect(localStorage.length).toBe(2); + cache.clear(); + expect(localStorage.length).toBe(0); + }); + + it('should return all keys when getKeys is called', () => { + const cache = new LocalStorageCache(); + cache.set('key1', 'value1'); + cache.set('key2', 'value2'); + expect(cache.getKeys()).toEqual(['key1', 'key2']); + }); - }) -}); \ No newline at end of file + it('should return an array of values for an array of keys when getBatched is called', () => { + const cache = new LocalStorageCache(); + cache.set('key1', 'value1'); + cache.set('key2', 'value2'); + expect(cache.getBatched(['key1', 'key2'])).toEqual(['value1', 'value2']); + }); +}); diff --git a/lib/utils/cache/local_storage_cache.browser.ts b/lib/utils/cache/local_storage_cache.browser.ts index 1bdf129d5..b5d7413bb 100644 --- a/lib/utils/cache/local_storage_cache.browser.ts +++ b/lib/utils/cache/local_storage_cache.browser.ts @@ -22,7 +22,14 @@ export class LocalStorageCache implements SyncCache { } public getKeys(): string[] { - return Object.keys(localStorage); + const keys: string[] = []; + for(let i = 0; i < localStorage.length; i++) { + const key = localStorage.key(i); + if (key) { + keys.push(key); + } + } + return keys; } getBatched(keys: string[]): Maybe[] { diff --git a/tests/index.react_native.spec.ts b/tests/index.react_native.spec.ts index 6f076e614..0d8648403 100644 --- a/tests/index.react_native.spec.ts +++ b/tests/index.react_native.spec.ts @@ -23,7 +23,6 @@ import testData from '../lib/tests/test_data'; import packageJSON from '../package.json'; import optimizelyFactory from '../lib/index.react_native'; import configValidator from '../lib/utils/config_validator'; -import eventProcessorConfigValidator from '../lib/utils/event_processor_config_validator'; import { getMockProjectConfigManager } from '../lib/tests/mock/mock_project_config_manager'; import { createProjectConfig } from '../lib/project_config/project_config'; diff --git a/vitest.config.mts b/vitest.config.mts index 61452ea16..ef2b08c8b 100644 --- a/vitest.config.mts +++ b/vitest.config.mts @@ -20,7 +20,7 @@ export default defineConfig({ test: { onConsoleLog: () => true, environment: 'happy-dom', - include: ['**/batch_event_processor.spec.ts'], + include: ['**/local_storage_cache.browser.spec.ts'], typecheck: { tsconfig: 'tsconfig.spec.json', }, From 48c9e1da1faaf89c29bfe2f2f5d4bb2f4abc93e8 Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Thu, 14 Nov 2024 23:38:03 +0600 Subject: [PATCH 24/45] upd --- .../async_storage_cache.react_native.spec.ts | 98 +++++++++++++++++++ .../cache/local_storage_cache.browser.spec.ts | 2 +- vitest.config.mts | 2 +- 3 files changed, 100 insertions(+), 2 deletions(-) create mode 100644 lib/utils/cache/async_storage_cache.react_native.spec.ts diff --git a/lib/utils/cache/async_storage_cache.react_native.spec.ts b/lib/utils/cache/async_storage_cache.react_native.spec.ts new file mode 100644 index 000000000..dbb8d3ca5 --- /dev/null +++ b/lib/utils/cache/async_storage_cache.react_native.spec.ts @@ -0,0 +1,98 @@ + + +vi.mock('@react-native-async-storage/async-storage', () => { + const MockAsyncStorage = { + data: new Map(), + async setItem(key: string, value: string) { + this.data.set(key, value); + }, + async getItem(key: string) { + return this.data.get(key) || null; + }, + async removeItem(key: string) { + this.data.delete(key); + }, + async getAllKeys() { + return Array.from(this.data.keys()); + }, + async clear() { + this.data.clear(); + }, + async multiGet(keys: string[]) { + return keys.map(key => [key, this.data.get(key)]); + }, + } + return { default: MockAsyncStorage }; +}); + +import { vi, describe, it, expect, beforeEach } from 'vitest'; +import { AsyncStorageCache } from './async_storage_cache.react_native'; +import AsyncStorage from '@react-native-async-storage/async-storage'; + +type TestData = { + a: number; + b: string; + d: { e: boolean }; +} + + +describe('AsyncStorageCache', () => { + beforeEach(async () => { + await AsyncStorage.clear(); + }); + + it('should store a stringified value in asyncstorage', async () => { + const cache = new AsyncStorageCache(); + const data = { a: 1, b: '2', d: { e: true } }; + await cache.set('key', data); + expect(await AsyncStorage.getItem('key')).toBe(JSON.stringify(data)); + }); + + it('should return undefined if get is called for a nonexistent key', async () => { + const cache = new AsyncStorageCache(); + expect(await cache.get('nonexistent')).toBeUndefined(); + }); + + it('should return the value if get is called for an existing key', async () => { + const cache = new AsyncStorageCache(); + await cache.set('key', 'value'); + expect(await cache.get('key')).toBe('value'); + }); + + it('should return the value after json parsing if get is called for an existing key', async () => { + const cache = new AsyncStorageCache(); + const data = { a: 1, b: '2', d: { e: true } }; + await cache.set('key', data); + expect(await cache.get('key')).toEqual(data); + }); + + it('should remove the key from async storage when remove is called', async () => { + const cache = new AsyncStorageCache(); + await cache.set('key', 'value'); + await cache.remove('key'); + expect(await AsyncStorage.getItem('key')).toBeNull(); + }); + + it('should remove all keys from async storage when clear is called', async () => { + const cache = new AsyncStorageCache(); + await cache.set('key1', 'value1'); + await cache.set('key2', 'value2'); + expect((await AsyncStorage.getAllKeys()).length).toBe(2); + cache.clear(); + expect((await AsyncStorage.getAllKeys()).length).toBe(0); + }); + + it('should return all keys when getKeys is called', async () => { + const cache = new AsyncStorageCache(); + await cache.set('key1', 'value1'); + await cache.set('key2', 'value2'); + expect(await cache.getKeys()).toEqual(['key1', 'key2']); + }); + + it('should return an array of values for an array of keys when getBatched is called', async () => { + const cache = new AsyncStorageCache(); + await cache.set('key1', 'value1'); + await cache.set('key2', 'value2'); + expect(await cache.getBatched(['key1', 'key2'])).toEqual(['value1', 'value2']); + }); +}); diff --git a/lib/utils/cache/local_storage_cache.browser.spec.ts b/lib/utils/cache/local_storage_cache.browser.spec.ts index 703ce1a9a..e052246c5 100644 --- a/lib/utils/cache/local_storage_cache.browser.spec.ts +++ b/lib/utils/cache/local_storage_cache.browser.spec.ts @@ -1,4 +1,4 @@ -import { vi, describe, it, expect, beforeEach } from 'vitest'; +import { describe, it, expect, beforeEach } from 'vitest'; import { LocalStorageCache } from './local_storage_cache.browser'; type TestData = { diff --git a/vitest.config.mts b/vitest.config.mts index ef2b08c8b..ffdc58dba 100644 --- a/vitest.config.mts +++ b/vitest.config.mts @@ -20,7 +20,7 @@ export default defineConfig({ test: { onConsoleLog: () => true, environment: 'happy-dom', - include: ['**/local_storage_cache.browser.spec.ts'], + include: ['**/async_storage_cache.react_native.spec.ts'], typecheck: { tsconfig: 'tsconfig.spec.json', }, From ee194aeee283ccf7b4a31f7081a6b2f0fc5c9867 Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Fri, 15 Nov 2024 00:59:01 +0600 Subject: [PATCH 25/45] retry runner tests --- .../batch_event_processor.spec.ts | 1 - .../executor/backoff_retry_runner.spec.ts | 141 ++++++++++++++++++ lib/utils/executor/backoff_retry_runner.ts | 27 ++-- vitest.config.mts | 2 +- 4 files changed, 156 insertions(+), 15 deletions(-) create mode 100644 lib/utils/executor/backoff_retry_runner.spec.ts diff --git a/lib/event_processor/batch_event_processor.spec.ts b/lib/event_processor/batch_event_processor.spec.ts index 4cbf53f49..1555702d2 100644 --- a/lib/event_processor/batch_event_processor.spec.ts +++ b/lib/event_processor/batch_event_processor.spec.ts @@ -27,7 +27,6 @@ import { getMockLogger } from '../tests/mock/mock_logger'; import { getMockRepeater } from '../tests/mock/mock_repeater'; import * as retry from '../utils/executor/backoff_retry_runner'; import { ServiceState } from '../service'; -import { EventDispatchResult } from '../modules/event_processor/eventProcessor'; const getMockDispatcher = () => { return { diff --git a/lib/utils/executor/backoff_retry_runner.spec.ts b/lib/utils/executor/backoff_retry_runner.spec.ts new file mode 100644 index 000000000..db4383df0 --- /dev/null +++ b/lib/utils/executor/backoff_retry_runner.spec.ts @@ -0,0 +1,141 @@ +import { vi, describe, it, expect, beforeEach, afterEach } from 'vitest'; +import { runWithRetry } from './backoff_retry_runner'; +import { advanceTimersByTime } from '../../../tests/testUtils'; + +const exhaustMicrotasks = async (loop = 100) => { + for(let i = 0; i < loop; i++) { + await Promise.resolve(); + } +} + +describe('runWithRetry', () => { + beforeEach(() => { + vi.useFakeTimers(); + }); + + afterEach(() => { + vi.useRealTimers(); + }); + + it('should return the result of the task if it succeeds in first try', async () => { + const task = async () => 1; + const { result } = runWithRetry(task); + expect(await result).toBe(1); + }); + + it('should retry the task if it fails', async () => { + let count = 0; + const task = async () => { + count++; + if (count === 1) { + throw new Error('error'); + } + return 1; + }; + const { result } = runWithRetry(task); + + await exhaustMicrotasks(); + await advanceTimersByTime(0); + + expect(await result).toBe(1); + }); + + it('should retry the task up to the maxRetries before failing', async () => { + let count = 0; + const task = async () => { + count++; + throw new Error('error'); + }; + const { result } = runWithRetry(task, undefined, 5); + + for(let i = 0; i < 5; i++) { + await exhaustMicrotasks(); + await advanceTimersByTime(0); + } + + try { + await result; + } catch (e) { + expect(count).toBe(6); + } + }); + + it('should retry idefinitely if maxRetries is undefined', async () => { + let count = 0; + const task = async () => { + count++; + if (count < 500) { + throw new Error('error'); + } + return 1; + }; + + const { result } = runWithRetry(task); + + for(let i = 0; i < 500; i++) { + await exhaustMicrotasks(); + await advanceTimersByTime(0); + } + expect(await result).toBe(1); + expect(count).toBe(500); + }); + + it('should use the backoff controller to delay retries', async () => { + let count = 0; + const task = vi.fn().mockImplementation(async () => { + count++; + throw new Error('error'); + }); + + const delays = [7, 13, 19, 20, 27]; + + let backoffCount = 0; + const backoff = { + backoff: () => { + return delays[backoffCount++]; + }, + reset: () => {}, + }; + + const { result } = runWithRetry(task, backoff, 5); + result.catch(() => {}); + + expect(task).toHaveBeenCalledTimes(1); + + for(let i = 1; i <= 5; i++) { + await exhaustMicrotasks(); + await advanceTimersByTime(delays[i - 1] - 1); + expect(task).toHaveBeenCalledTimes(i); + await advanceTimersByTime(1); + expect(task).toHaveBeenCalledTimes(i + 1); + } + }); + + it('should cancel the retry if the cancel function is called', async () => { + let count = 0; + const task = async () => { + count++; + throw new Error('error'); + }; + + const { result, cancelRetry } = runWithRetry(task, undefined, 100); + + for(let i = 0; i < 5; i++) { + await exhaustMicrotasks(); + await advanceTimersByTime(0); + } + + cancelRetry(); + + for(let i = 0; i < 100; i++) { + await exhaustMicrotasks(); + await advanceTimersByTime(0); + } + + try { + await result; + } catch (e) { + expect(count).toBe(6); + } + }); +}); diff --git a/lib/utils/executor/backoff_retry_runner.ts b/lib/utils/executor/backoff_retry_runner.ts index c6ef1e04a..88f6063ec 100644 --- a/lib/utils/executor/backoff_retry_runner.ts +++ b/lib/utils/executor/backoff_retry_runner.ts @@ -8,18 +8,17 @@ export type RunResult = { cancelRetry: Fn; }; +type CancelSignal = { + cancelled: boolean; +} + const runTask = ( task: AsyncProducer, returnPromise: ResolvablePromise, + cancelSignal: CancelSignal, backoff?: BackoffController, retryRemaining?: number, -): Fn => { - let cancelled = false; - - const cancel = () => { - cancelled = true; - }; - +): void => { task().then((res) => { returnPromise.resolve(res); }).catch((e) => { @@ -27,18 +26,16 @@ const runTask = ( returnPromise.reject(e); return; } - if (cancelled) { + if (cancelSignal.cancelled) { returnPromise.reject(new Error('Retry cancelled')); return; } const delay = backoff?.backoff() ?? 0; setTimeout(() => { retryRemaining = retryRemaining === undefined ? undefined : retryRemaining - 1; - runTask(task, returnPromise, backoff, retryRemaining); + runTask(task, returnPromise, cancelSignal, backoff, retryRemaining); }, delay); }); - - return cancel; } export const runWithRetry = ( @@ -47,6 +44,10 @@ export const runWithRetry = ( maxRetries?: number ): RunResult => { const returnPromise = resolvablePromise(); - const cancel = runTask(task, returnPromise, backoff, maxRetries); - return { cancelRetry: cancel, result: returnPromise.promise }; + const cancelSignal = { cancelled: false }; + const cancelRetry = () => { + cancelSignal.cancelled = true; + } + runTask(task, returnPromise, cancelSignal, backoff, maxRetries); + return { cancelRetry, result: returnPromise.promise }; } diff --git a/vitest.config.mts b/vitest.config.mts index ffdc58dba..7e68e6efd 100644 --- a/vitest.config.mts +++ b/vitest.config.mts @@ -20,7 +20,7 @@ export default defineConfig({ test: { onConsoleLog: () => true, environment: 'happy-dom', - include: ['**/async_storage_cache.react_native.spec.ts'], + include: ['**/backoff_retry_runner.spec.ts'], typecheck: { tsconfig: 'tsconfig.spec.json', }, From 227c7b093d54e5f88d09cca2c201c37b9e45ca99 Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Fri, 15 Nov 2024 01:01:58 +0600 Subject: [PATCH 26/45] rem --- lib/utils/cache/store.ts | 227 --------------------------------------- lib/utils/queue/queue.ts | 43 -------- 2 files changed, 270 deletions(-) delete mode 100644 lib/utils/cache/store.ts delete mode 100644 lib/utils/queue/queue.ts diff --git a/lib/utils/cache/store.ts b/lib/utils/cache/store.ts deleted file mode 100644 index 6ed9fd1ef..000000000 --- a/lib/utils/cache/store.ts +++ /dev/null @@ -1,227 +0,0 @@ -// import { SyncCache, AsyncCache, Cache, CacheOp, CacheWithOp, OpValue, OperationOf } from "./cache"; - -// export const isAsync = (operation: 'sync' | 'async'): operation is 'async' => { -// return operation === 'async'; -// } - -// const transform = (op: Op, value: OpValue, transformer: (source: VS) => VT): OpValue => { -// if (op === 'async') { -// const val: Promise = value as any; -// const ret: Promise = val.then((v) => transformer(v)); -// return ret as OpValue; -// } - -// return transformer(value as VS) as OpValue; -// } - -// export const transformCache = ( -// cache: Cache, -// prefix: string, -// transformTo: (value: VS) => VT, -// transformFrom: (value: VT) => VS -// ): CacheWithOp, VT> => { -// const addPrefix = (key: string): string => { -// return `${prefix}${key}`; -// }; - -// const removePrefix = (key: string): string => { -// return key.substring(prefix.length); -// }; - -// const transformedCache: CacheWithOp, VT> = { -// operation: cache.operation, -// set: (key: string, value: VT) => cache.set(addPrefix(key), transformFrom(value)), -// get: (key: string) => { -// const prefixedKey = addPrefix(key); -// if (cache.operation === 'async') { -// const value = cache.get(prefixedKey); -// return value.then((v) => v ? transformTo(v) : undefined); -// } -// const value = cache.get(prefixedKey); -// return value ? transformTo(value) : undefined; -// }, -// remove: (key: string) => cache.remove(addPrefix(key)), -// clear: () => cache.clear(), -// getKeys: () => { -// if (cache.operation === 'async') { - -// } -// cache.getKeys(), -// } -// getAll: () => { -// const map = new Map(); -// cache.getAll().forEach((value, key) => { -// map.set(key, transformTo(value)); -// }); -// return map; -// } -// }; - -// return transformedCache; -// } - -// export class SyncPrefixStore implements SyncCache { -// private cache: SyncCache; -// private prefix: string; -// public readonly operation = 'sync'; - -// constructor(cache: SyncCache, prefix: string) { -// this.cache = cache; -// this.prefix = prefix; -// } - -// private addPrefix(key: string): string { -// return `${this.prefix}:${key}`; -// } - -// private removePrefix(key: string): string { -// return key.substring(this.prefix.length + 1); -// } - -// set(key: string, value: V): void { -// return this.cache.set(this.addPrefix(key), value); -// } - -// get(key: string): V | undefined{ -// return this.cache.get(this.addPrefix(key)); -// } - -// remove(key: string): void { -// return this.cache.remove(this.addPrefix(key)); -// } - -// clear(): void { -// this.getInternalKeys().forEach((key) => this.cache.remove(key)); -// } - -// private getInternalKeys(): string[] { -// return this.cache.getKeys().filter((key) => key.startsWith(this.prefix)); -// } - -// getKeys(): string[] { -// return this.getInternalKeys().map((key) => this.removePrefix(key)); -// } - -// getAll(): Map { -// const map = new Map(); -// this.getInternalKeys().forEach((key) => { -// const value = this.cache.get(key); -// if (value) { -// map.set(this.removePrefix(key), value); -// } -// }); -// return map; -// } -// } - -// // export class SyncPrefixStore implements SyncCache { -// // private cache: SyncCache; -// // private prefix: string; -// // public readonly operation = 'sync'; - -// // constructor(cache: SyncCache, prefix: string) { -// // this.cache = cache; -// // this.prefix = prefix; -// // } - -// // private addPrefix(key: string): string { -// // return `${this.prefix}:${key}`; -// // } - -// // private removePrefix(key: string): string { -// // return key.substring(this.prefix.length + 1); -// // } - -// // set(key: string, value: V): void { -// // return this.cache.set(this.addPrefix(key), value); -// // } - -// // get(key: string): V | undefined{ -// // return this.cache.get(this.addPrefix(key)); -// // } - -// // remove(key: string): void { -// // return this.cache.remove(this.addPrefix(key)); -// // } - -// // clear(): void { -// // this.getInternalKeys().forEach((key) => this.cache.remove(key)); -// // } - -// // private getInternalKeys(): string[] { -// // return this.cache.getKeys().filter((key) => key.startsWith(this.prefix)); -// // } - -// // getKeys(): string[] { -// // return this.getInternalKeys().map((key) => this.removePrefix(key)); -// // } - -// // getAll(): Map { -// // const map = new Map(); -// // this.getInternalKeys().forEach((key) => { -// // const value = this.cache.get(key); -// // if (value) { -// // map.set(this.removePrefix(key), value); -// // } -// // }); -// // return map; -// // } -// // } - - -// export class AyncPrefixStore implements AsyncCache { -// private cache: AsyncCache; -// private prefix: string; -// public readonly operation = 'async'; - -// constructor(cache: AsyncCache, prefix: string) { -// this.cache = cache; -// this.prefix = prefix; -// } - -// private addPrefix(key: string): string { -// return `${this.prefix}:${key}`; -// } - -// private removePrefix(key: string): string { -// return key.substring(this.prefix.length + 1); -// } - -// set(key: string, value: V): Promise { -// return this.cache.set(this.addPrefix(key), value); -// } - -// get(key: string): Promise { -// return this.cache.get(this.addPrefix(key)); -// } - -// remove(key: string): Promise { -// return this.cache.remove(this.addPrefix(key)); -// } - -// async clear(): Promise { -// const keys = await this.getInternalKeys(); -// await Promise.all(keys.map((key) => this.cache.remove(key))); -// } - -// private async getInternalKeys(): Promise { -// return this.cache.getKeys().then((keys) => keys.filter((key) => key.startsWith(this.prefix))); -// } - -// async getKeys(): Promise { -// return this.getInternalKeys().then((keys) => keys.map((key) => this.removePrefix(key))); -// } - -// async getAll(): Promise> { -// const keys = await this.getInternalKeys(); -// const values = await Promise.all(keys.map((key) => this.cache.get(key))); -// const map = new Map(); -// keys.forEach((key, index) => { -// const value = values[index]; -// if (value) { -// map.set(this.removePrefix(key), value) -// } -// }); -// return map; -// } -// } diff --git a/lib/utils/queue/queue.ts b/lib/utils/queue/queue.ts deleted file mode 100644 index adabbcaff..000000000 --- a/lib/utils/queue/queue.ts +++ /dev/null @@ -1,43 +0,0 @@ -export class Queue { - private maxQueueSize: number; - private queue: T[]; - private nItems: number; - private tail: number; - - constructor(maxQueueSize: number) { - this.maxQueueSize = maxQueueSize; - this.queue = new Array(maxQueueSize); - this.nItems = 0; - this.tail = 0; - } - - enqueue(item: T): void { - if (this.nItems === this.maxQueueSize) { - throw new Error("Queue is full"); - } - this.queue[this.tail] = item; - this.nItems++; - this.tail = (this.tail + 1) % this.maxQueueSize; - } - - dequeue(): T | undefined { - if (this.nItems === 0) { - return undefined; - } - const item = this.queue[(this.tail - this.nItems + this.maxQueueSize) % this.maxQueueSize]; - this.nItems--; - return item; - } - - size(): number { - return this.nItems; - } - - isEmpty(): boolean { - return this.nItems === 0; - } - - isFull(): boolean { - return this.nItems === this.maxQueueSize; - } -} From 20771dc20bccf5005a2d89029780b1f1455d9626 Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Fri, 15 Nov 2024 18:51:58 +0600 Subject: [PATCH 27/45] more test --- lib/tests/mock/mock_cache.ts | 40 +++- lib/utils/cache/cache.spec.ts | 335 ++++++++++++++++++++++++++++++++++ lib/utils/cache/cache.ts | 2 +- vitest.config.mts | 2 +- 4 files changed, 376 insertions(+), 3 deletions(-) diff --git a/lib/tests/mock/mock_cache.ts b/lib/tests/mock/mock_cache.ts index 3f8e3928c..a86bc8bcd 100644 --- a/lib/tests/mock/mock_cache.ts +++ b/lib/tests/mock/mock_cache.ts @@ -1,4 +1,4 @@ -import { SyncCache } from "../../utils/cache/cache"; +import { SyncCache, AsyncCache } from "../../utils/cache/cache"; import { Maybe } from "../../utils/type"; type SyncCacheWithAddOn = SyncCache & { @@ -6,6 +6,11 @@ type SyncCacheWithAddOn = SyncCache & { getAll(): Map; }; +type AsyncCacheWithAddOn = AsyncCache & { + size(): Promise; + getAll(): Promise>; +}; + export const getMockSyncCache = (): SyncCacheWithAddOn => { const cache = { operation: 'sync' as const, @@ -39,3 +44,36 @@ export const getMockSyncCache = (): SyncCacheWithAddOn => { return cache; }; + +export const getMockAsyncCache = (): AsyncCacheWithAddOn => { + const cache = { + operation: 'async' as const, + data: new Map(), + async remove(key: string): Promise { + this.data.delete(key); + }, + async clear(): Promise { + this.data.clear(); + }, + async getKeys(): Promise { + return Array.from(this.data.keys()); + }, + async getAll(): Promise> { + return this.data; + }, + async getBatched(keys: string[]): Promise[]> { + return Promise.all(keys.map((key) => this.get(key))); + }, + async size(): Promise { + return this.data.size; + }, + async get(key: string): Promise> { + return this.data.get(key); + }, + async set(key: string, value: T): Promise { + this.data.set(key, value); + } + } + + return cache; +}; diff --git a/lib/utils/cache/cache.spec.ts b/lib/utils/cache/cache.spec.ts index e69de29bb..32e2cbaea 100644 --- a/lib/utils/cache/cache.spec.ts +++ b/lib/utils/cache/cache.spec.ts @@ -0,0 +1,335 @@ +import { describe, it, expect, beforeEach } from 'vitest'; +import { SyncPrefixCache, AsyncPrefixCache } from './cache'; +import { getMockSyncCache, getMockAsyncCache } from '../../tests/mock/mock_cache'; + +describe('SyncPrefixCache', () => { + describe('set', () => { + it('should add prefix to key when setting in the underlying cache', () => { + const cache = getMockSyncCache(); + const prefixCache = new SyncPrefixCache(cache, 'prefix:', (v) => v, (v) => v); + prefixCache.set('key', 'value'); + expect(cache.get('prefix:key')).toEqual('value'); + }); + + it('should transform value when setting in the underlying cache', () => { + const cache = getMockSyncCache(); + const prefixCache = new SyncPrefixCache(cache, 'prefix:', (v) => v.toLowerCase(), (v) => v.toUpperCase()); + prefixCache.set('key', 'value'); + expect(cache.get('prefix:key')).toEqual('VALUE'); + }); + + it('should work correctly with empty prefix', () => { + const cache = getMockSyncCache(); + const prefixCache = new SyncPrefixCache(cache, '', (v) => v.toLowerCase(), (v) => v.toUpperCase()); + prefixCache.set('key', 'value'); + expect(cache.get('key')).toEqual('VALUE'); + }); + }); + + describe('get', () => { + it('should remove prefix from key when getting from the underlying cache', () => { + const cache = getMockSyncCache(); + cache.set('prefix:key', 'value'); + const prefixCache = new SyncPrefixCache(cache, 'prefix:', (v) => v, (v) => v); + expect(prefixCache.get('key')).toEqual('value'); + }); + + it('should transform value after getting from the underlying cache', () => { + const cache = getMockSyncCache(); + const prefixCache = new SyncPrefixCache(cache, 'prefix:', (v) => v.toLowerCase(), (v) => v.toUpperCase()); + cache.set('prefix:key', 'VALUE'); + expect(prefixCache.get('key')).toEqual('value'); + }); + + + it('should work correctly with empty prefix', () => { + const cache = getMockSyncCache(); + const prefixCache = new SyncPrefixCache(cache, '', (v) => v.toLowerCase(), (v) => v.toUpperCase()); + cache.set('key', 'VALUE'); + expect(prefixCache.get('key')).toEqual('value'); + }); + }); + + describe('remove', () => { + it('should remove the correct value from the underlying cache', () => { + const cache = getMockSyncCache(); + cache.set('prefix:key', 'value'); + cache.set('key', 'value'); + const prefixCache = new SyncPrefixCache(cache, 'prefix:', (v) => v, (v) => v); + prefixCache.remove('key'); + expect(cache.get('prefix:key')).toBeUndefined(); + expect(cache.get('key')).toEqual('value'); + }); + + it('should work with empty prefix', () => { + const cache = getMockSyncCache(); + cache.set('key', 'value'); + const prefixCache = new SyncPrefixCache(cache, '', (v) => v, (v) => v); + prefixCache.remove('key'); + expect(cache.get('key')).toBeUndefined(); + }); + }); + + describe('clear', () => { + it('should remove keys with correct prefix from the underlying cache', () => { + const cache = getMockSyncCache(); + cache.set('key1', 'value1'); + cache.set('key2', 'value2'); + cache.set('prefix:key1', 'value1'); + cache.set('prefix:key2', 'value2'); + + const prefixCache = new SyncPrefixCache(cache, 'prefix:', (v) => v, (v) => v); + prefixCache.clear(); + + expect(cache.get('key1')).toEqual('value1'); + expect(cache.get('key2')).toEqual('value2'); + expect(cache.get('prefix:key1')).toBeUndefined(); + expect(cache.get('prefix:key2')).toBeUndefined(); + }); + + it('should work with empty prefix', () => { + const cache = getMockSyncCache(); + cache.set('key1', 'value1'); + cache.set('key2', 'value2'); + + const prefixCache = new SyncPrefixCache(cache, '', (v) => v, (v) => v); + prefixCache.clear(); + + expect(cache.get('key1')).toBeUndefined(); + expect(cache.get('key2')).toBeUndefined(); + }); + }); + + describe('getKeys', () => { + it('should return keys with correct prefix', () => { + const cache = getMockSyncCache(); + cache.set('key1', 'value1'); + cache.set('key2', 'value2'); + cache.set('prefix:key3', 'value1'); + cache.set('prefix:key4', 'value2'); + + const prefixCache = new SyncPrefixCache(cache, 'prefix:', (v) => v, (v) => v); + + const keys = prefixCache.getKeys(); + expect(keys).toEqual(expect.arrayContaining(['key3', 'key4'])); + }); + + it('should work with empty prefix', () => { + const cache = getMockSyncCache(); + cache.set('key1', 'value1'); + cache.set('key2', 'value2'); + + const prefixCache = new SyncPrefixCache(cache, '', (v) => v, (v) => v); + + const keys = prefixCache.getKeys(); + expect(keys).toEqual(expect.arrayContaining(['key1', 'key2'])); + }); + }); + + describe('getBatched', () => { + it('should return values with correct prefix', () => { + const cache = getMockSyncCache(); + cache.set('key1', 'value1'); + cache.set('key2', 'value2'); + cache.set('key3', 'value3'); + cache.set('prefix:key1', 'prefix:value1'); + cache.set('prefix:key2', 'prefix:value2'); + + const prefixCache = new SyncPrefixCache(cache, 'prefix:', (v) => v, (v) => v); + + const values = prefixCache.getBatched(['key1', 'key2', 'key3']); + expect(values).toEqual(expect.arrayContaining(['prefix:value1', 'prefix:value2', undefined])); + }); + + it('should transform values after getting from the underlying cache', () => { + const cache = getMockSyncCache(); + cache.set('key1', 'VALUE1'); + cache.set('key2', 'VALUE2'); + cache.set('key3', 'VALUE3'); + cache.set('prefix:key1', 'PREFIX:VALUE1'); + cache.set('prefix:key2', 'PREFIX:VALUE2'); + + const prefixCache = new SyncPrefixCache(cache, 'prefix:', (v) => v.toLocaleLowerCase(), (v) => v.toUpperCase()); + + const values = prefixCache.getBatched(['key1', 'key2', 'key3']); + expect(values).toEqual(expect.arrayContaining(['prefix:value1', 'prefix:value2', undefined])); + }); + + it('should work with empty prefix', () => { + const cache = getMockSyncCache(); + cache.set('key1', 'value1'); + cache.set('key2', 'value2'); + + const prefixCache = new SyncPrefixCache(cache, '', (v) => v, (v) => v); + + const values = prefixCache.getBatched(['key1', 'key2']); + expect(values).toEqual(expect.arrayContaining(['value1', 'value2'])); + }); + }); +}); + +describe('AsyncPrefixCache', () => { + describe('set', () => { + it('should add prefix to key when setting in the underlying cache', async () => { + const cache = getMockAsyncCache(); + const prefixCache = new AsyncPrefixCache(cache, 'prefix:', (v) => v, (v) => v); + await prefixCache.set('key', 'value'); + expect(await cache.get('prefix:key')).toEqual('value'); + }); + + it('should transform value when setting in the underlying cache', async () => { + const cache = getMockAsyncCache(); + const prefixCache = new AsyncPrefixCache(cache, 'prefix:', (v) => v.toLowerCase(), (v) => v.toUpperCase()); + await prefixCache.set('key', 'value'); + expect(await cache.get('prefix:key')).toEqual('VALUE'); + }); + + it('should work correctly with empty prefix', async () => { + const cache = getMockAsyncCache(); + const prefixCache = new AsyncPrefixCache(cache, '', (v) => v.toLowerCase(), (v) => v.toUpperCase()); + await prefixCache.set('key', 'value'); + expect(await cache.get('key')).toEqual('VALUE'); + }); + }); + + describe('get', () => { + it('should remove prefix from key when getting from the underlying cache', async () => { + const cache = getMockAsyncCache(); + await cache.set('prefix:key', 'value'); + const prefixCache = new AsyncPrefixCache(cache, 'prefix:', (v) => v, (v) => v); + expect(await prefixCache.get('key')).toEqual('value'); + }); + + it('should transform value after getting from the underlying cache', async () => { + const cache = getMockAsyncCache(); + const prefixCache = new AsyncPrefixCache(cache, 'prefix:', (v) => v.toLowerCase(), (v) => v.toUpperCase()); + await cache.set('prefix:key', 'VALUE'); + expect(await prefixCache.get('key')).toEqual('value'); + }); + + + it('should work correctly with empty prefix', async () => { + const cache = getMockAsyncCache(); + const prefixCache = new AsyncPrefixCache(cache, '', (v) => v.toLowerCase(), (v) => v.toUpperCase()); + await cache.set('key', 'VALUE'); + expect(await prefixCache.get('key')).toEqual('value'); + }); + }); + + describe('remove', () => { + it('should remove the correct value from the underlying cache', async () => { + const cache = getMockAsyncCache(); + cache.set('prefix:key', 'value'); + cache.set('key', 'value'); + const prefixCache = new AsyncPrefixCache(cache, 'prefix:', (v) => v, (v) => v); + await prefixCache.remove('key'); + expect(await cache.get('prefix:key')).toBeUndefined(); + expect(await cache.get('key')).toEqual('value'); + }); + + it('should work with empty prefix', async () => { + const cache = getMockAsyncCache(); + await cache.set('key', 'value'); + const prefixCache = new AsyncPrefixCache(cache, '', (v) => v, (v) => v); + await prefixCache.remove('key'); + expect(await cache.get('key')).toBeUndefined(); + }); + }); + + describe('clear', () => { + it('should remove keys with correct prefix from the underlying cache', async () => { + const cache = getMockAsyncCache(); + await cache.set('key1', 'value1'); + await cache.set('key2', 'value2'); + await cache.set('prefix:key1', 'value1'); + await cache.set('prefix:key2', 'value2'); + + const prefixCache = new AsyncPrefixCache(cache, 'prefix:', (v) => v, (v) => v); + await prefixCache.clear(); + + expect(await cache.get('key1')).toEqual('value1'); + expect(await cache.get('key2')).toEqual('value2'); + expect(await cache.get('prefix:key1')).toBeUndefined(); + expect(await cache.get('prefix:key2')).toBeUndefined(); + }); + + it('should work with empty prefix', async () => { + const cache = getMockAsyncCache(); + await cache.set('key1', 'value1'); + await cache.set('key2', 'value2'); + + const prefixCache = new AsyncPrefixCache(cache, '', (v) => v, (v) => v); + await prefixCache.clear(); + + expect(await cache.get('key1')).toBeUndefined(); + expect(await cache.get('key2')).toBeUndefined(); + }); + }); + + describe('getKeys', () => { + it('should return keys with correct prefix', async () => { + const cache = getMockAsyncCache(); + await cache.set('key1', 'value1'); + await cache.set('key2', 'value2'); + await cache.set('prefix:key3', 'value1'); + await cache.set('prefix:key4', 'value2'); + + const prefixCache = new AsyncPrefixCache(cache, 'prefix:', (v) => v, (v) => v); + + const keys = await prefixCache.getKeys(); + expect(keys).toEqual(expect.arrayContaining(['key3', 'key4'])); + }); + + it('should work with empty prefix', async () => { + const cache = getMockAsyncCache(); + await cache.set('key1', 'value1'); + await cache.set('key2', 'value2'); + + const prefixCache = new AsyncPrefixCache(cache, '', (v) => v, (v) => v); + + const keys = await prefixCache.getKeys(); + expect(keys).toEqual(expect.arrayContaining(['key1', 'key2'])); + }); + }); + + describe('getBatched', () => { + it('should return values with correct prefix', async () => { + const cache = getMockAsyncCache(); + await cache.set('key1', 'value1'); + await cache.set('key2', 'value2'); + await cache.set('key3', 'value3'); + await cache.set('prefix:key1', 'prefix:value1'); + await cache.set('prefix:key2', 'prefix:value2'); + + const prefixCache = new AsyncPrefixCache(cache, 'prefix:', (v) => v, (v) => v); + + const values = await prefixCache.getBatched(['key1', 'key2', 'key3']); + expect(values).toEqual(expect.arrayContaining(['prefix:value1', 'prefix:value2', undefined])); + }); + + it('should transform values after getting from the underlying cache', async () => { + const cache = getMockAsyncCache(); + await cache.set('key1', 'VALUE1'); + await cache.set('key2', 'VALUE2'); + await cache.set('key3', 'VALUE3'); + await cache.set('prefix:key1', 'PREFIX:VALUE1'); + await cache.set('prefix:key2', 'PREFIX:VALUE2'); + + const prefixCache = new AsyncPrefixCache(cache, 'prefix:', (v) => v.toLocaleLowerCase(), (v) => v.toUpperCase()); + + const values = await prefixCache.getBatched(['key1', 'key2', 'key3']); + expect(values).toEqual(expect.arrayContaining(['prefix:value1', 'prefix:value2', undefined])); + }); + + it('should work with empty prefix', async () => { + const cache = getMockAsyncCache(); + await cache.set('key1', 'value1'); + await cache.set('key2', 'value2'); + + const prefixCache = new AsyncPrefixCache(cache, '', (v) => v, (v) => v); + + const values = await prefixCache.getBatched(['key1', 'key2']); + expect(values).toEqual(expect.arrayContaining(['value1', 'value2'])); + }); + }); +}); \ No newline at end of file diff --git a/lib/utils/cache/cache.ts b/lib/utils/cache/cache.ts index c2142cb5d..5daa5d0db 100644 --- a/lib/utils/cache/cache.ts +++ b/lib/utils/cache/cache.ts @@ -77,7 +77,7 @@ export class SyncPrefixCache implements SyncCache { } } -export class AyncPrefixStore implements AsyncCache { +export class AsyncPrefixCache implements AsyncCache { private cache: AsyncCache; private prefix: string; private transformTo: Transformer; diff --git a/vitest.config.mts b/vitest.config.mts index 7e68e6efd..68f166c0b 100644 --- a/vitest.config.mts +++ b/vitest.config.mts @@ -20,7 +20,7 @@ export default defineConfig({ test: { onConsoleLog: () => true, environment: 'happy-dom', - include: ['**/backoff_retry_runner.spec.ts'], + include: ['**/cache.spec.ts'], typecheck: { tsconfig: 'tsconfig.spec.json', }, From e92f027603535c7fa9caac9852783459b6d63d29 Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Fri, 15 Nov 2024 20:10:08 +0600 Subject: [PATCH 28/45] test --- lib/service.spec.ts | 34 ++++++++++++++++++++++++++++++---- vitest.config.mts | 2 +- 2 files changed, 31 insertions(+), 5 deletions(-) diff --git a/lib/service.spec.ts b/lib/service.spec.ts index 1faae69ac..12df4feff 100644 --- a/lib/service.spec.ts +++ b/lib/service.spec.ts @@ -15,14 +15,16 @@ */ import { it, expect } from 'vitest'; -import { BaseService, ServiceState } from './service'; - +import { BaseService, ServiceState, StartupLog } from './service'; +import { LogLevel } from './modules/logging'; +import { getMockLogger } from './tests/mock/mock_logger'; class TestService extends BaseService { - constructor() { - super(); + constructor(startUpLogs?: StartupLog[]) { + super(startUpLogs); } start(): void { + super.start(); this.setState(ServiceState.Running); this.startPromise.resolve(); } @@ -64,6 +66,30 @@ it('should return correct state when getState() is called', () => { expect(service.getState()).toBe(ServiceState.Failed); }); +it('should log startupLogs on start', () => { + const startUpLogs: StartupLog[] = [ + { + level: LogLevel.WARNING, + message: 'warn message', + params: [1, 2] + }, + { + level: LogLevel.ERROR, + message: 'error message', + params: [3, 4] + }, + ]; + + const logger = getMockLogger(); + const service = new TestService(startUpLogs); + service.setLogger(logger); + service.start(); + + expect(logger.log).toHaveBeenCalledTimes(2); + expect(logger.log).toHaveBeenNthCalledWith(1, LogLevel.WARNING, 'warn message', 1, 2); + expect(logger.log).toHaveBeenNthCalledWith(2, LogLevel.ERROR, 'error message', 3, 4); +}); + it('should return an appropraite promise when onRunning() is called', () => { const service1 = new TestService(); const onRunning1 = service1.onRunning(); diff --git a/vitest.config.mts b/vitest.config.mts index 68f166c0b..3cdb0691b 100644 --- a/vitest.config.mts +++ b/vitest.config.mts @@ -20,7 +20,7 @@ export default defineConfig({ test: { onConsoleLog: () => true, environment: 'happy-dom', - include: ['**/cache.spec.ts'], + include: ['**/service.spec.ts'], typecheck: { tsconfig: 'tsconfig.spec.json', }, From e1df4a3bad46c25c0881785cb725bde3d35e345a Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Fri, 15 Nov 2024 20:21:25 +0600 Subject: [PATCH 29/45] more test --- .../batch_event_processor.spec.ts | 35 ++++++++++++++++++- vitest.config.mts | 2 +- 2 files changed, 35 insertions(+), 2 deletions(-) diff --git a/lib/event_processor/batch_event_processor.spec.ts b/lib/event_processor/batch_event_processor.spec.ts index 1555702d2..68bfb2902 100644 --- a/lib/event_processor/batch_event_processor.spec.ts +++ b/lib/event_processor/batch_event_processor.spec.ts @@ -26,7 +26,8 @@ import { advanceTimersByTime } from '../../tests/testUtils'; import { getMockLogger } from '../tests/mock/mock_logger'; import { getMockRepeater } from '../tests/mock/mock_repeater'; import * as retry from '../utils/executor/backoff_retry_runner'; -import { ServiceState } from '../service'; +import { ServiceState, StartupLog } from '../service'; +import { LogLevel } from '../modules/logging'; const getMockDispatcher = () => { return { @@ -50,6 +51,38 @@ describe('QueueingEventProcessor', async () => { }); describe('start', () => { + it.only('should log startupLogs on start', () => { + const startupLogs: StartupLog[] = [ + { + level: LogLevel.WARNING, + message: 'warn message', + params: [1, 2] + }, + { + level: LogLevel.ERROR, + message: 'error message', + params: [3, 4] + }, + ]; + + const logger = getMockLogger(); + + const processor = new BatchEventProcessor({ + eventDispatcher: getMockDispatcher(), + dispatchRepeater: getMockRepeater(), + batchSize: 1000, + startupLogs, + }); + + processor.setLogger(logger); + processor.start(); + + + expect(logger.log).toHaveBeenCalledTimes(2); + expect(logger.log).toHaveBeenNthCalledWith(1, LogLevel.WARNING, 'warn message', 1, 2); + expect(logger.log).toHaveBeenNthCalledWith(2, LogLevel.ERROR, 'error message', 3, 4); + }); + it('should resolve onRunning() when start() is called', async () => { const eventDispatcher = getMockDispatcher(); const processor = new BatchEventProcessor({ diff --git a/vitest.config.mts b/vitest.config.mts index 3cdb0691b..61452ea16 100644 --- a/vitest.config.mts +++ b/vitest.config.mts @@ -20,7 +20,7 @@ export default defineConfig({ test: { onConsoleLog: () => true, environment: 'happy-dom', - include: ['**/service.spec.ts'], + include: ['**/batch_event_processor.spec.ts'], typecheck: { tsconfig: 'tsconfig.spec.json', }, From dbd3b59877130072a32252e8d9fbf865b4078ccf Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Fri, 15 Nov 2024 23:36:44 +0600 Subject: [PATCH 30/45] bak --- ...native.ts => batch_event_processor.react_native.ts} | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) rename lib/event_processor/{queueing_event_processor.react_native.ts => batch_event_processor.react_native.ts} (87%) diff --git a/lib/event_processor/queueing_event_processor.react_native.ts b/lib/event_processor/batch_event_processor.react_native.ts similarity index 87% rename from lib/event_processor/queueing_event_processor.react_native.ts rename to lib/event_processor/batch_event_processor.react_native.ts index 43242b3bb..fbf0b5dee 100644 --- a/lib/event_processor/queueing_event_processor.react_native.ts +++ b/lib/event_processor/batch_event_processor.react_native.ts @@ -22,19 +22,19 @@ class ReactNativeNetInfoEventProcessor extends BatchEventProcessor { if (!this.isInternetReachable && state.isInternetReachable) { this.isInternetReachable = true; - await this.retryFailedEvents() + this.retryFailedEvents(); } } start(): void { - this.unsubscribeNetInfo = addConnectionListener(this.connectionListener.bind(this)) - super.start() + super.start(); + this.unsubscribeNetInfo = addConnectionListener(this.connectionListener.bind(this)); } stop(): void { if (this.unsubscribeNetInfo) { - this.unsubscribeNetInfo() + this.unsubscribeNetInfo(); } - super.stop() + super.stop(); } } From 6a6416735c5f75c37b0f8a773082a28a078b75e8 Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Tue, 19 Nov 2024 00:22:15 +0600 Subject: [PATCH 31/45] more tests --- ...batch_event_processor.react_native.spec.ts | 152 ++++++++++++++++++ .../batch_event_processor.react_native.ts | 8 +- vitest.config.mts | 2 +- 3 files changed, 157 insertions(+), 5 deletions(-) create mode 100644 lib/event_processor/batch_event_processor.react_native.spec.ts diff --git a/lib/event_processor/batch_event_processor.react_native.spec.ts b/lib/event_processor/batch_event_processor.react_native.spec.ts new file mode 100644 index 000000000..37cc8fa93 --- /dev/null +++ b/lib/event_processor/batch_event_processor.react_native.spec.ts @@ -0,0 +1,152 @@ +import { vi, describe, it, expect, beforeEach } from 'vitest'; + +const mockNetInfo = vi.hoisted(() => { + const netInfo = { + listeners: [], + unsubs: [], + addEventListener(fn: any) { + this.listeners.push(fn); + const unsub = vi.fn(); + this.unsubs.push(unsub); + return unsub; + }, + pushState(state: boolean) { + for (const listener of this.listeners) { + listener({ isInternetReachable: state }); + } + }, + clear() { + this.listeners = []; + this.unsubs = []; + } + }; + return netInfo; +}); + +vi.mock('@react-native-community/netinfo', () => { + return { + addEventListener: mockNetInfo.addEventListener.bind(mockNetInfo), + }; +}); + +import { ReactNativeNetInfoEventProcessor } from './batch_event_processor.react_native'; +import { getMockLogger } from '../tests/mock/mock_logger'; +import { getMockRepeater } from '../tests/mock/mock_repeater'; +import { getMockAsyncCache } from '../tests/mock/mock_cache'; +import { createImpressionEvent } from '../tests/mock/create_event'; +import { EventWithId } from './batch_event_processor'; +import { formatEvents, ProcessableEvent } from '.'; + +const getMockDispatcher = () => { + return { + dispatchEvent: vi.fn(), + }; +}; + +const exhaustMicrotasks = async (loop = 100) => { + for(let i = 0; i < loop; i++) { + await Promise.resolve(); + } +} + + +describe('ReactNativeNetInfoEventProcessor', () => { + beforeEach(() => { + mockNetInfo.clear(); + }); + + it('should not retry failed events when reachable state does not change', async () => { + const eventDispatcher = getMockDispatcher(); + const dispatchRepeater = getMockRepeater(); + const failedEventRepeater = getMockRepeater(); + + const cache = getMockAsyncCache(); + const events: ProcessableEvent[] = []; + + for(let i = 0; i < 5; i++) { + const id = `id-${i}`; + const event = createImpressionEvent(id); + events.push(event); + await cache.set(id, { id, event }); + } + + const processor = new ReactNativeNetInfoEventProcessor({ + eventDispatcher, + dispatchRepeater, + failedEventRepeater, + batchSize: 1000, + eventStore: cache, + }); + + processor.start(); + await processor.onRunning(); + + mockNetInfo.pushState(true); + expect(eventDispatcher.dispatchEvent).not.toHaveBeenCalled(); + + mockNetInfo.pushState(true); + expect(eventDispatcher.dispatchEvent).not.toHaveBeenCalled(); + }); + + it('should retry failed events when network becomes reachable', async () => { + const eventDispatcher = getMockDispatcher(); + const dispatchRepeater = getMockRepeater(); + const failedEventRepeater = getMockRepeater(); + + const cache = getMockAsyncCache(); + const events: ProcessableEvent[] = []; + + for(let i = 0; i < 5; i++) { + const id = `id-${i}`; + const event = createImpressionEvent(id); + events.push(event); + await cache.set(id, { id, event }); + } + + const processor = new ReactNativeNetInfoEventProcessor({ + eventDispatcher, + dispatchRepeater, + failedEventRepeater, + batchSize: 1000, + eventStore: cache, + }); + + processor.start(); + await processor.onRunning(); + + mockNetInfo.pushState(false); + expect(eventDispatcher.dispatchEvent).not.toHaveBeenCalled(); + + mockNetInfo.pushState(true); + + await exhaustMicrotasks(); + + expect(eventDispatcher.dispatchEvent).toHaveBeenCalledWith(formatEvents(events)); + }); + + it('should unsubscribe from netinfo listener when stopped', async () => { + const eventDispatcher = getMockDispatcher(); + const dispatchRepeater = getMockRepeater(); + const failedEventRepeater = getMockRepeater(); + + const cache = getMockAsyncCache(); + + const processor = new ReactNativeNetInfoEventProcessor({ + eventDispatcher, + dispatchRepeater, + failedEventRepeater, + batchSize: 1000, + eventStore: cache, + }); + + processor.start(); + await processor.onRunning(); + + mockNetInfo.pushState(false); + + processor.stop(); + await processor.onTerminated(); + + expect(mockNetInfo.unsubs[0]).toHaveBeenCalled(); + }); +}); diff --git a/lib/event_processor/batch_event_processor.react_native.ts b/lib/event_processor/batch_event_processor.react_native.ts index fbf0b5dee..5cb10a1f0 100644 --- a/lib/event_processor/batch_event_processor.react_native.ts +++ b/lib/event_processor/batch_event_processor.react_native.ts @@ -1,12 +1,12 @@ import { NetInfoState, addEventListener as addConnectionListener, -} from "@react-native-community/netinfo" +} from '@react-native-community/netinfo'; -import { BatchEventProcessor, BatchEventProcessorConfig } from "./batch_event_processor"; -import { Fn } from "../utils/type"; +import { BatchEventProcessor, BatchEventProcessorConfig } from './batch_event_processor'; +import { Fn } from '../utils/type'; -class ReactNativeNetInfoEventProcessor extends BatchEventProcessor { +export class ReactNativeNetInfoEventProcessor extends BatchEventProcessor { private isInternetReachable = true; private unsubscribeNetInfo?: Fn; diff --git a/vitest.config.mts b/vitest.config.mts index 61452ea16..86e85d2cb 100644 --- a/vitest.config.mts +++ b/vitest.config.mts @@ -20,7 +20,7 @@ export default defineConfig({ test: { onConsoleLog: () => true, environment: 'happy-dom', - include: ['**/batch_event_processor.spec.ts'], + include: ['**/batch_event_processor.react_native.spec.ts'], typecheck: { tsconfig: 'tsconfig.spec.json', }, From 2c411ca92eb0c7991052f90e6848d3395237d26a Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Tue, 19 Nov 2024 04:08:00 +0600 Subject: [PATCH 32/45] new test --- .../event_processor_factory.browser.ts | 22 +- .../event_processor_factory.spec.ts | 124 +++ .../event_processor_factory.ts | 11 +- lib/event_processor/index.ts | 2 - .../pendingEventsDispatcher.ts | 86 -- lib/event_processor/pendingEventsStore.ts | 117 --- lib/event_processor/requestTracker.ts | 60 -- .../v1/v1EventProcessor.react_native.ts | 264 ------ lib/event_processor/v1/v1EventProcessor.ts | 133 --- lib/index.react_native.ts | 1 - .../event_processor/index.react_native.ts | 26 - lib/utils/cache/cache.ts | 36 +- tests/index.react_native.spec.ts | 1 - tests/pendingEventsDispatcher.spec.ts | 257 ----- tests/pendingEventsStore.spec.ts | 143 --- tests/reactNativeV1EventProcessor.spec.ts | 69 -- tests/requestTracker.spec.ts | 65 -- tests/v1EventProcessor.react_native.spec.ts | 891 ------------------ tests/v1EventProcessor.spec.ts | 582 ------------ vitest.config.mts | 2 +- 20 files changed, 167 insertions(+), 2725 deletions(-) create mode 100644 lib/event_processor/event_processor_factory.spec.ts delete mode 100644 lib/event_processor/pendingEventsDispatcher.ts delete mode 100644 lib/event_processor/pendingEventsStore.ts delete mode 100644 lib/event_processor/requestTracker.ts delete mode 100644 lib/event_processor/v1/v1EventProcessor.react_native.ts delete mode 100644 lib/event_processor/v1/v1EventProcessor.ts delete mode 100644 lib/plugins/event_processor/index.react_native.ts delete mode 100644 tests/pendingEventsDispatcher.spec.ts delete mode 100644 tests/pendingEventsStore.spec.ts delete mode 100644 tests/reactNativeV1EventProcessor.spec.ts delete mode 100644 tests/requestTracker.spec.ts delete mode 100644 tests/v1EventProcessor.react_native.spec.ts delete mode 100644 tests/v1EventProcessor.spec.ts diff --git a/lib/event_processor/event_processor_factory.browser.ts b/lib/event_processor/event_processor_factory.browser.ts index 9641eeddb..c47b00b06 100644 --- a/lib/event_processor/event_processor_factory.browser.ts +++ b/lib/event_processor/event_processor_factory.browser.ts @@ -17,12 +17,15 @@ import { getForwardingEventProcessor } from './forwarding_event_processor'; import { EventDispatcher } from './eventDispatcher'; import { EventProcessor } from './eventProcessor'; -import { BatchEventProcessor, BatchEventProcessorConfig } from './batch_event_processor'; -import { getQueuingEventProcessor, QueueingEventProcessorOptions } from './event_processor_factory'; +import { BatchEventProcessor, BatchEventProcessorConfig, EventWithId } from './batch_event_processor'; +import { getBatchEventProcessor, QueueingEventProcessorOptions } from './event_processor_factory'; import defaultEventDispatcher from './default_dispatcher.browser'; import sendBeaconEventDispatcher from '../plugins/event_dispatcher/send_beacon_dispatcher'; +import { LocalStorageCache } from '../utils/cache/local_storage_cache.browser'; +import { SyncPrefixCache } from '../utils/cache/cache'; export const FAILED_EVENT_RETRY_INTERVAL = 20 * 1000; +export const EVENT_STORE_PREFIX = 'fs_optly_pending_events'; export const createForwardingEventProcessor = ( eventDispatcher: EventDispatcher = defaultEventDispatcher, @@ -33,14 +36,23 @@ export const createForwardingEventProcessor = ( export const createQueueingEventProcessor = ( options: QueueingEventProcessorOptions ): EventProcessor => { - return getQueuingEventProcessor({ + const localStorageCache = new LocalStorageCache(); + const eventStore = new SyncPrefixCache( + localStorageCache, EVENT_STORE_PREFIX, + JSON.parse, + JSON.stringify + ); + + return getBatchEventProcessor({ eventDispatcher: options.eventDispatcher || defaultEventDispatcher, closingEventDispatcher: options.closingEventDispatcher || (options.eventDispatcher ? options.eventDispatcher : sendBeaconEventDispatcher), flushInterval: options.flushInterval, batchSize: options.batchSize, - retryOptions: {}, + retryOptions: { + maxRetries: 5, + }, failedEventRetryInterval: FAILED_EVENT_RETRY_INTERVAL, + eventStore, }); }; - diff --git a/lib/event_processor/event_processor_factory.spec.ts b/lib/event_processor/event_processor_factory.spec.ts new file mode 100644 index 000000000..a9683e7d6 --- /dev/null +++ b/lib/event_processor/event_processor_factory.spec.ts @@ -0,0 +1,124 @@ +import { describe, it, expect, beforeEach, vi, MockInstance } from 'vitest'; +import { DEFAULT_MAX_BACKOFF, DEFAULT_MIN_BACKOFF, getBatchEventProcessor } from './event_processor_factory'; +import { BatchEventProcessor, BatchEventProcessorConfig } from './batch_event_processor'; +import { ExponentialBackoff, IntervalRepeater } from '../utils/repeater/repeater'; + +vi.mock('./batch_event_processor'); +vi.mock('../utils/repeater/repeater'); + +type BatchEventProcessorConstructor = typeof BatchEventProcessor; + +const getMockEventDispatcher = () => { + return { + dispatchEvent: vi.fn(), + } +}; + +describe('getBatchEventProcessor', () => { + const MockBatchEventProcessor = vi.mocked(BatchEventProcessor); + const MockExponentialBackoff = vi.mocked(ExponentialBackoff); + + beforeEach(() => { + MockBatchEventProcessor.mockReset(); + MockExponentialBackoff.mockReset(); + }); + + it('returns an instane of BatchEventProcessor if no subclass constructor is provided', () => { + const options = { + eventDispatcher: getMockEventDispatcher(), + }; + + const processor = getBatchEventProcessor(options); + + expect(processor instanceof BatchEventProcessor).toBe(true); + }); + + it('returns an instane of the provided subclass constructor', () => { + class CustomEventProcessor extends BatchEventProcessor { + constructor(opts: BatchEventProcessorConfig) { + super(opts); + } + } + + const options = { + eventDispatcher: getMockEventDispatcher(), + }; + + const processor = getBatchEventProcessor(options, CustomEventProcessor); + + expect(processor instanceof CustomEventProcessor).toBe(true); + }); + + it('does not use retry if retryOptions is not provided', () => { + const options = { + eventDispatcher: getMockEventDispatcher(), + }; + + const processor = getBatchEventProcessor(options); + expect(MockBatchEventProcessor.mock.calls[0][0].retryConfig).toBe(undefined); + }); + + it('uses retry when retryOptions is provided', () => { + const options = { + eventDispatcher: getMockEventDispatcher(), + retryOptions: {}, + }; + + let processor = getBatchEventProcessor(options); + + const usedRetryConfig = MockBatchEventProcessor.mock.calls[0][0].retryConfig; + expect(usedRetryConfig).not.toBe(undefined); + expect(usedRetryConfig?.backoffProvider).not.toBe(undefined); + }); + + it('uses the correct maxRetries value when retryOptions is provided', () => { + const options1 = { + eventDispatcher: getMockEventDispatcher(), + retryOptions: { + maxRetries: 10, + }, + }; + + let processor1 = getBatchEventProcessor(options1); + expect(MockBatchEventProcessor.mock.calls[0][0].retryConfig?.maxRetries).toBe(10); + + const options2 = { + eventDispatcher: getMockEventDispatcher(), + retryOptions: {}, + }; + + let processor2 = getBatchEventProcessor(options2); + expect(MockBatchEventProcessor.mock.calls[0][0].retryConfig).not.toBe(undefined); + expect(MockBatchEventProcessor.mock.calls[1][0].retryConfig?.maxRetries).toBe(undefined); + }); + + it('uses exponential backoff with default parameters when retryOptions is provided without backoff values', () => { + const options = { + eventDispatcher: getMockEventDispatcher(), + retryOptions: {}, + }; + + let processor = getBatchEventProcessor(options); + const backoffProvider = MockBatchEventProcessor.mock.calls[0][0].retryConfig?.backoffProvider; + + expect(backoffProvider).not.toBe(undefined); + const backoff = backoffProvider?.(); + expect(Object.is(backoff, MockExponentialBackoff.mock.instances[0])).toBe(true); + expect(MockExponentialBackoff).toHaveBeenNthCalledWith(1, DEFAULT_MIN_BACKOFF, DEFAULT_MAX_BACKOFF, 500); + }); + + it('uses exponential backoff with provided backoff values in retryOptions', () => { + const options = { + eventDispatcher: getMockEventDispatcher(), + retryOptions: { minBackoff: 1000, maxBackoff: 2000 }, + }; + + let processor = getBatchEventProcessor(options); + const backoffProvider = MockBatchEventProcessor.mock.calls[0][0].retryConfig?.backoffProvider; + + expect(backoffProvider).not.toBe(undefined); + const backoff = backoffProvider?.(); + expect(Object.is(backoff, MockExponentialBackoff.mock.instances[0])).toBe(true); + expect(MockExponentialBackoff).toHaveBeenNthCalledWith(1, 1000, 2000, 500); + }); +}); diff --git a/lib/event_processor/event_processor_factory.ts b/lib/event_processor/event_processor_factory.ts index 92b8db15a..5bd3dd0f6 100644 --- a/lib/event_processor/event_processor_factory.ts +++ b/lib/event_processor/event_processor_factory.ts @@ -3,7 +3,8 @@ import { StartupLog } from "../service"; import { ExponentialBackoff, IntervalRepeater } from "../utils/repeater/repeater"; import { EventDispatcher } from "./eventDispatcher"; import { EventProcessor } from "./eventProcessor"; -import { BatchEventProcessor, RetryConfig } from "./batch_event_processor"; +import { BatchEventProcessor, EventWithId, RetryConfig } from "./batch_event_processor"; +import { Cache } from "../utils/cache/cache"; export const DEFAULT_EVENT_BATCH_SIZE = 10; export const DEFAULT_EVENT_FLUSH_INTERVAL = 1000; @@ -21,6 +22,7 @@ export type QueueingEventProcessorOptions = { export type QueueingEventProcessorFactoryOptions = Omit & { eventDispatcher: EventDispatcher; failedEventRetryInterval?: number; + eventStore?: Cache; retryOptions?: { maxRetries?: number; minBackoff?: number; @@ -28,11 +30,11 @@ export type QueueingEventProcessorFactoryOptions = Omit { - const { eventDispatcher, closingEventDispatcher, retryOptions } = options; + const { eventDispatcher, closingEventDispatcher, retryOptions, eventStore } = options; const retryConfig: RetryConfig | undefined = retryOptions ? { maxRetries: retryOptions.maxRetries, @@ -78,6 +80,7 @@ export const getQueuingEventProcessor = ( failedEventRepeater, retryConfig, batchSize, - startupLogs + eventStore, + startupLogs, }); }; diff --git a/lib/event_processor/index.ts b/lib/event_processor/index.ts index 0a266c4ae..8a97a8b15 100644 --- a/lib/event_processor/index.ts +++ b/lib/event_processor/index.ts @@ -17,6 +17,4 @@ export * from './events' export * from './eventProcessor' export * from './eventDispatcher' -export * from './pendingEventsDispatcher' export * from './v1/buildEventV1' -export * from './v1/v1EventProcessor' diff --git a/lib/event_processor/pendingEventsDispatcher.ts b/lib/event_processor/pendingEventsDispatcher.ts deleted file mode 100644 index cfa2c3e80..000000000 --- a/lib/event_processor/pendingEventsDispatcher.ts +++ /dev/null @@ -1,86 +0,0 @@ -/** - * Copyright 2022, 2024, Optimizely - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { getLogger } from '../modules/logging' -import { EventDispatcher, EventV1Request, EventDispatcherResponse } from './eventDispatcher' -import { PendingEventsStore, LocalStorageStore } from './pendingEventsStore' -import { uuid, getTimestamp } from '../utils/fns' - -const logger = getLogger('EventProcessor') - -export type DispatcherEntry = { - uuid: string - timestamp: number - request: EventV1Request -} - -export class PendingEventsDispatcher implements EventDispatcher { - protected dispatcher: EventDispatcher - protected store: PendingEventsStore - - constructor({ - eventDispatcher, - store, - }: { - eventDispatcher: EventDispatcher - store: PendingEventsStore - }) { - this.dispatcher = eventDispatcher - this.store = store - } - - dispatchEvent(request: EventV1Request): Promise { - return this.send( - { - uuid: uuid(), - timestamp: getTimestamp(), - request, - } - ) - } - - sendPendingEvents(): void { - const pendingEvents = this.store.values() - - logger.debug('Sending %s pending events from previous page', pendingEvents.length) - - pendingEvents.forEach(item => { - this.send(item).catch((e) => { - logger.debug(String(e)); - }); - }) - } - - protected async send(entry: DispatcherEntry): Promise { - this.store.set(entry.uuid, entry) - - const response = await this.dispatcher.dispatchEvent(entry.request); - this.store.remove(entry.uuid); - return response; - } -} - -export class LocalStoragePendingEventsDispatcher extends PendingEventsDispatcher { - constructor({ eventDispatcher }: { eventDispatcher: EventDispatcher }) { - super({ - eventDispatcher, - store: new LocalStorageStore({ - // TODO make this configurable - maxValues: 100, - key: 'fs_optly_pending_events', - }), - }) - } -} diff --git a/lib/event_processor/pendingEventsStore.ts b/lib/event_processor/pendingEventsStore.ts deleted file mode 100644 index ca8dbf0f7..000000000 --- a/lib/event_processor/pendingEventsStore.ts +++ /dev/null @@ -1,117 +0,0 @@ -/** - * Copyright 2022, 2024, Optimizely - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { objectValues } from '../utils/fns' -import { getLogger } from '../modules/logging'; - -const logger = getLogger('EventProcessor') - -export interface PendingEventsStore { - get(key: string): K | null - - set(key: string, value: K): void - - remove(key: string): void - - values(): K[] - - clear(): void - - replace(newMap: { [key: string]: K }): void -} - -interface StoreEntry { - uuid: string - timestamp: number -} - -export class LocalStorageStore implements PendingEventsStore { - protected LS_KEY: string - protected maxValues: number - - constructor({ key, maxValues = 1000 }: { key: string; maxValues?: number }) { - this.LS_KEY = key - this.maxValues = maxValues - } - - get(key: string): K | null { - return this.getMap()[key] || null - } - - set(key: string, value: K): void { - const map = this.getMap() - map[key] = value - this.replace(map) - } - - remove(key: string): void { - const map = this.getMap() - delete map[key] - this.replace(map) - } - - values(): K[] { - return objectValues(this.getMap()) - } - - clear(): void { - this.replace({}) - } - - replace(map: { [key: string]: K }): void { - try { - // This is a temporary fix to support React Native which does not have localStorage. - typeof window !== 'undefined' ? window && window.localStorage && localStorage.setItem(this.LS_KEY, JSON.stringify(map)) : localStorage.setItem(this.LS_KEY, JSON.stringify(map)) - this.clean() - } catch (e) { - logger.error(String(e)) - } - } - - private clean() { - const map = this.getMap() - const keys = Object.keys(map) - const toRemove = keys.length - this.maxValues - if (toRemove < 1) { - return - } - - const entries = keys.map(key => ({ - key, - value: map[key] - })) - - entries.sort((a, b) => a.value.timestamp - b.value.timestamp) - - for (let i = 0; i < toRemove; i++) { - delete map[entries[i].key] - } - - this.replace(map) - } - - private getMap(): { [key: string]: K } { - try { - // This is a temporary fix to support React Native which does not have localStorage. - const data = typeof window !== 'undefined' ? window && window.localStorage && localStorage.getItem(this.LS_KEY): localStorage.getItem(this.LS_KEY); - if (data) { - return (JSON.parse(data) as { [key: string]: K }) || {} - } - } catch (e: any) { - logger.error(e) - } - return {} - } -} diff --git a/lib/event_processor/requestTracker.ts b/lib/event_processor/requestTracker.ts deleted file mode 100644 index 192919884..000000000 --- a/lib/event_processor/requestTracker.ts +++ /dev/null @@ -1,60 +0,0 @@ -/** - * Copyright 2022, 2024, Optimizely - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** - * RequestTracker keeps track of in-flight requests for EventProcessor using - * an internal counter. It exposes methods for adding a new request to be - * tracked, and getting a Promise representing the completion of currently - * tracked requests. - */ -class RequestTracker { - private reqsInFlightCount = 0 - private reqsCompleteResolvers: Array<() => void> = [] - - /** - * Track the argument request (represented by a Promise). reqPromise will feed - * into the state of Promises returned by onRequestsComplete. - * @param {Promise} reqPromise - */ - public trackRequest(reqPromise: Promise): void { - this.reqsInFlightCount++ - const onReqComplete = () => { - this.reqsInFlightCount-- - if (this.reqsInFlightCount === 0) { - this.reqsCompleteResolvers.forEach(resolver => resolver()) - this.reqsCompleteResolvers = [] - } - } - reqPromise.then(onReqComplete, onReqComplete) - } - - /** - * Return a Promise that fulfills after all currently-tracked request promises - * are resolved. - * @return {Promise} - */ - public onRequestsComplete(): Promise { - return new Promise(resolve => { - if (this.reqsInFlightCount === 0) { - resolve() - } else { - this.reqsCompleteResolvers.push(resolve) - } - }) - } -} - -export default RequestTracker diff --git a/lib/event_processor/v1/v1EventProcessor.react_native.ts b/lib/event_processor/v1/v1EventProcessor.react_native.ts deleted file mode 100644 index 58592f0ec..000000000 --- a/lib/event_processor/v1/v1EventProcessor.react_native.ts +++ /dev/null @@ -1,264 +0,0 @@ -/** - * Copyright 2022-2024, Optimizely - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { - uuid as id, - objectEntries, -} from '../../utils/fns' -import { - NetInfoState, - addEventListener as addConnectionListener, -} from "@react-native-community/netinfo" -import { getLogger } from '../../modules/logging' -import { NotificationSender } from '../../core/notification_center' - -import { - getQueue, - EventProcessor, - ProcessableEvent, - sendEventNotification, - validateAndGetBatchSize, - validateAndGetFlushInterval, - DEFAULT_BATCH_SIZE, - DEFAULT_FLUSH_INTERVAL, -} from "../eventProcessor" -import { ReactNativeEventsStore } from '../reactNativeEventsStore' -import { Synchronizer } from '../synchronizer' -import { EventQueue } from '../eventQueue' -import RequestTracker from '../requestTracker' -import { areEventContextsEqual } from '../events' -import { formatEvents } from './buildEventV1' -import { - EventV1Request, - EventDispatcher, -} from '../eventDispatcher' -import { PersistentCacheProvider } from '../../shared_types' -import { ServiceState } from '../../service' -import { Consumer, Fn } from '../../utils/type' - -const logger = getLogger('ReactNativeEventProcessor') - -const DEFAULT_MAX_QUEUE_SIZE = 10000 -const PENDING_EVENTS_STORE_KEY = 'fs_optly_pending_events' -const EVENT_BUFFER_STORE_KEY = 'fs_optly_event_buffer' - -/** - * React Native Events Processor with Caching support for events when app is offline. - */ -export class LogTierV1EventProcessor implements EventProcessor { - private id = Math.random(); - private dispatcher: EventDispatcher - // expose for testing - public queue: EventQueue - private notificationSender?: NotificationSender - private requestTracker: RequestTracker - - /* eslint-disable */ - private unsubscribeNetInfo: Function | null = null - /* eslint-enable */ - private isInternetReachable = true - private pendingEventsPromise: Promise | null = null - private synchronizer: Synchronizer = new Synchronizer() - - // If a pending event fails to dispatch, this indicates skipping further events to preserve sequence in the next retry. - private shouldSkipDispatchToPreserveSequence = false - - /** - * This Stores Formatted events before dispatching. The events are removed after they are successfully dispatched. - * Stored events are retried on every new event dispatch, when connection becomes available again or when SDK initializes the next time. - */ - private pendingEventsStore: ReactNativeEventsStore - - /** - * This stores individual events generated from the SDK till they are part of the pending buffer. - * The store is cleared right before the event is formatted to be dispatched. - * This is to make sure that individual events are not lost when app closes before the buffer was flushed. - */ - private eventBufferStore: ReactNativeEventsStore - - constructor({ - dispatcher, - flushInterval = DEFAULT_FLUSH_INTERVAL, - batchSize = DEFAULT_BATCH_SIZE, - maxQueueSize = DEFAULT_MAX_QUEUE_SIZE, - notificationCenter, - persistentCacheProvider, - }: { - dispatcher: EventDispatcher - flushInterval?: number - batchSize?: number - maxQueueSize?: number - notificationCenter?: NotificationSender - persistentCacheProvider?: PersistentCacheProvider - }) { - this.dispatcher = dispatcher - this.notificationSender = notificationCenter - this.requestTracker = new RequestTracker() - - flushInterval = validateAndGetFlushInterval(flushInterval) - batchSize = validateAndGetBatchSize(batchSize) - this.queue = getQueue(batchSize, flushInterval, areEventContextsEqual, this.drainQueue.bind(this)) - this.pendingEventsStore = new ReactNativeEventsStore( - maxQueueSize, - PENDING_EVENTS_STORE_KEY, - persistentCacheProvider && persistentCacheProvider(), - ); - this.eventBufferStore = new ReactNativeEventsStore( - maxQueueSize, - EVENT_BUFFER_STORE_KEY, - persistentCacheProvider && persistentCacheProvider(), - ) - } - onDispatch(handler: Consumer): Fn { - throw new Error('Method not implemented.') - } - getState(): ServiceState { - throw new Error('Method not implemented.') - } - onRunning(): Promise { - throw new Error('Method not implemented.') - } - onTerminated(): Promise { - throw new Error('Method not implemented.') - } - - private async connectionListener(state: NetInfoState) { - if (this.isInternetReachable && !state.isInternetReachable) { - this.isInternetReachable = false - logger.debug('Internet connection lost') - return - } - if (!this.isInternetReachable && state.isInternetReachable) { - this.isInternetReachable = true - logger.debug('Internet connection is restored, attempting to dispatch pending events') - await this.processPendingEvents() - this.shouldSkipDispatchToPreserveSequence = false - } - } - - private isSuccessResponse(status: number): boolean { - return status >= 200 && status < 400 - } - - private async drainQueue(buffer: ProcessableEvent[]): Promise { - if (buffer.length === 0) { - return - } - - await this.synchronizer.getLock() - - // Retry pending failed events while draining queue - await this.processPendingEvents() - logger.debug('draining queue with %s events', buffer.length) - - const eventCacheKey = id() - const formattedEvent = formatEvents(buffer) - - // Store formatted event before dispatching to be retried later in case of failure. - await this.pendingEventsStore.set(eventCacheKey, formattedEvent) - - // Clear buffer because the buffer has become a formatted event and is already stored in pending cache. - for (const {uuid} of buffer) { - await this.eventBufferStore.remove(uuid) - } - - if (!this.shouldSkipDispatchToPreserveSequence) { - await this.dispatchEvent(eventCacheKey, formattedEvent) - } - - // Resetting skip flag because current sequence of events have all been processed - this.shouldSkipDispatchToPreserveSequence = false - - this.synchronizer.releaseLock() - } - - private async processPendingEvents(): Promise { - logger.debug('Processing pending events from offline storage') - if (!this.pendingEventsPromise) { - // Only process events if existing promise is not in progress - this.pendingEventsPromise = this.getPendingEventsPromise() - } else { - logger.debug('Already processing pending events, returning the existing promise') - } - await this.pendingEventsPromise - this.pendingEventsPromise = null - } - - private async getPendingEventsPromise(): Promise { - const formattedEvents: {[key: string]: any} = await this.pendingEventsStore.getEventsMap() - const eventEntries = objectEntries(formattedEvents) - logger.debug('Processing %s pending events', eventEntries.length) - // Using for loop to be able to wait for previous dispatch to finish before moving on to the new one - for (const [eventKey, event] of eventEntries) { - // If one event dispatch failed, skip subsequent events to preserve sequence - if (this.shouldSkipDispatchToPreserveSequence) { - return - } - await this.dispatchEvent(eventKey, event) - } - } - - private async dispatchEvent(eventCacheKey: string, event: EventV1Request): Promise { - const requestPromise = new Promise((resolve) => { - this.dispatcher.dispatchEvent(event).then((response) => { - if (!response.statusCode || this.isSuccessResponse(response.statusCode)) { - return this.pendingEventsStore.remove(eventCacheKey) - } else { - this.shouldSkipDispatchToPreserveSequence = true - logger.warn('Failed to dispatch event, Response status Code: %s', response.statusCode) - return Promise.resolve() - } - }).catch((e) => { - logger.warn('Failed to dispatch event, error: %s', e.message) - }).finally(() => resolve()) - - sendEventNotification(this.notificationSender, event) - }) - // Tracking all the requests to dispatch to make sure request is completed before fulfilling the `stop` promise - this.requestTracker.trackRequest(requestPromise) - return requestPromise - } - - public async start(): Promise { - await this.queue.start() - this.unsubscribeNetInfo = addConnectionListener(this.connectionListener.bind(this)) - - await this.processPendingEvents() - this.shouldSkipDispatchToPreserveSequence = false - - // Process individual events pending from the buffer. - const events: ProcessableEvent[] = await this.eventBufferStore.getEventsList() - await this.eventBufferStore.clear() - events.forEach(this.process.bind(this)) - } - - public async process(event: ProcessableEvent): Promise { - // Adding events to buffer store. If app closes before dispatch, we can reprocess next time the app initializes - this.eventBufferStore.set(event.uuid, event).then(() => { - this.queue.enqueue(event) - }) - } - - public async stop(): Promise { - // swallow - an error stopping this queue shouldn't prevent this from stopping - try { - this.unsubscribeNetInfo && this.unsubscribeNetInfo() - await this.queue.stop() - return this.requestTracker.onRequestsComplete() - } catch (e) { - logger.error('Error stopping EventProcessor: "%s"', Object(e).message, String(e)) - } - } -} diff --git a/lib/event_processor/v1/v1EventProcessor.ts b/lib/event_processor/v1/v1EventProcessor.ts deleted file mode 100644 index a8132311a..000000000 --- a/lib/event_processor/v1/v1EventProcessor.ts +++ /dev/null @@ -1,133 +0,0 @@ -/** - * Copyright 2022-2024, Optimizely - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { getLogger } from '../../modules/logging' -import { NotificationSender } from '../../core/notification_center' - -import { EventDispatcher, EventV1Request } from '../eventDispatcher' -import { - getQueue, - EventProcessor, - ProcessableEvent, - sendEventNotification, - validateAndGetBatchSize, - validateAndGetFlushInterval, - DEFAULT_BATCH_SIZE, - DEFAULT_FLUSH_INTERVAL, -} from '../eventProcessor' -import { EventQueue } from '../eventQueue' -import RequestTracker from '../requestTracker' -import { areEventContextsEqual } from '../events' -import { formatEvents } from './buildEventV1' -import { ServiceState } from '../../service' -import { Consumer, Fn } from '../../utils/type' - -const logger = getLogger('LogTierV1EventProcessor') - -export class LogTierV1EventProcessor implements EventProcessor { - private dispatcher: EventDispatcher - private closingDispatcher?: EventDispatcher - private queue: EventQueue - private notificationCenter?: NotificationSender - private requestTracker: RequestTracker - - constructor({ - dispatcher, - closingDispatcher, - flushInterval = DEFAULT_FLUSH_INTERVAL, - batchSize = DEFAULT_BATCH_SIZE, - notificationCenter, - }: { - dispatcher: EventDispatcher - closingDispatcher?: EventDispatcher - flushInterval?: number - batchSize?: number - notificationCenter?: NotificationSender - }) { - this.dispatcher = dispatcher - this.closingDispatcher = closingDispatcher - this.notificationCenter = notificationCenter - this.requestTracker = new RequestTracker() - - flushInterval = validateAndGetFlushInterval(flushInterval) - batchSize = validateAndGetBatchSize(batchSize) - this.queue = getQueue( - batchSize, - flushInterval, - areEventContextsEqual, - this.drainQueue.bind(this, false), - this.drainQueue.bind(this, true), - ); - } - - onDispatch(handler: Consumer): Fn { - return () => {} - } - - getState(): ServiceState { - throw new Error('Method not implemented.') - } - onRunning(): Promise { - throw new Error('Method not implemented.') - } - onTerminated(): Promise { - throw new Error('Method not implemented.') - } - - private drainQueue(useClosingDispatcher: boolean, buffer: ProcessableEvent[]): Promise { - const reqPromise = new Promise(resolve => { - logger.debug('draining queue with %s events', buffer.length) - - if (buffer.length === 0) { - resolve() - return - } - - const formattedEvent = formatEvents(buffer) - const dispatcher = useClosingDispatcher && this.closingDispatcher - ? this.closingDispatcher : this.dispatcher; - - // TODO: this does not do anything if the dispatcher fails - // to dispatch. What should be done in that case? - dispatcher.dispatchEvent(formattedEvent).finally(() => { - resolve() - }) - sendEventNotification(this.notificationCenter, formattedEvent) - }) - this.requestTracker.trackRequest(reqPromise) - return reqPromise - } - - async process(event: ProcessableEvent): Promise { - this.queue.enqueue(event) - } - - // TODO[OASIS-6649]: Don't use any type - // eslint-disable-next-line @typescript-eslint/no-explicit-any - stop(): Promise { - // swallow - an error stopping this queue shouldn't prevent this from stopping - try { - this.queue.stop() - return this.requestTracker.onRequestsComplete() - } catch (e) { - logger.error('Error stopping EventProcessor: "%s"', Object(e).message, String(e)) - } - return Promise.resolve() - } - - async start(): Promise { - await this.queue.start() - } -} diff --git a/lib/index.react_native.ts b/lib/index.react_native.ts index 7fa16c6ca..574cd4f38 100644 --- a/lib/index.react_native.ts +++ b/lib/index.react_native.ts @@ -22,7 +22,6 @@ import defaultErrorHandler from './plugins/error_handler'; import * as loggerPlugin from './plugins/logger/index.react_native'; import defaultEventDispatcher from './event_processor/default_dispatcher.browser'; import { createNotificationCenter } from './core/notification_center'; -import { createEventProcessor } from './plugins/event_processor/index.react_native'; import { OptimizelyDecideOption, Client, Config } from './shared_types'; import { BrowserOdpManager } from './plugins/odp_manager/index.browser'; import * as commonExports from './common_exports'; diff --git a/lib/plugins/event_processor/index.react_native.ts b/lib/plugins/event_processor/index.react_native.ts deleted file mode 100644 index 9481987cb..000000000 --- a/lib/plugins/event_processor/index.react_native.ts +++ /dev/null @@ -1,26 +0,0 @@ -/** - * Copyright 2022-2024, Optimizely - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { LogTierV1EventProcessor, LocalStoragePendingEventsDispatcher } from '../../event_processor/index.react_native'; - -export function createEventProcessor( - ...args: ConstructorParameters -): LogTierV1EventProcessor { - return new LogTierV1EventProcessor(...args); -} - -export default { createEventProcessor, LocalStoragePendingEventsDispatcher }; - diff --git a/lib/utils/cache/cache.ts b/lib/utils/cache/cache.ts index 5daa5d0db..e0127f12d 100644 --- a/lib/utils/cache/cache.ts +++ b/lib/utils/cache/cache.ts @@ -21,21 +21,21 @@ export type Cache = SyncCache | AsyncCache; export class SyncPrefixCache implements SyncCache { private cache: SyncCache; private prefix: string; - private transformTo: Transformer; - private transformFrom: Transformer; + private transformGet: Transformer; + private transformSet: Transformer; public readonly operation = 'sync'; constructor( cache: SyncCache, prefix: string, - transformTo: Transformer, - transformFrom: Transformer + transformGet: Transformer, + transformSet: Transformer ) { this.cache = cache; this.prefix = prefix; - this.transformTo = transformTo; - this.transformFrom = transformFrom; + this.transformGet = transformGet; + this.transformSet = transformSet; } private addPrefix(key: string): string { @@ -47,12 +47,12 @@ export class SyncPrefixCache implements SyncCache { } set(key: string, value: V): unknown { - return this.cache.set(this.addPrefix(key), this.transformFrom(value)); + return this.cache.set(this.addPrefix(key), this.transformSet(value)); } get(key: string): V | undefined { const value = this.cache.get(this.addPrefix(key)); - return value ? this.transformTo(value) : undefined; + return value ? this.transformGet(value) : undefined; } remove(key: string): unknown { @@ -73,28 +73,28 @@ export class SyncPrefixCache implements SyncCache { getBatched(keys: string[]): Maybe[] { return this.cache.getBatched(keys.map((key) => this.addPrefix(key))) - .map((value) => value ? this.transformTo(value) : undefined); + .map((value) => value ? this.transformGet(value) : undefined); } } export class AsyncPrefixCache implements AsyncCache { private cache: AsyncCache; private prefix: string; - private transformTo: Transformer; - private transformFrom: Transformer; + private transformGet: Transformer; + private transformSet: Transformer; public readonly operation = 'async'; constructor( cache: AsyncCache, prefix: string, - transformTo: Transformer, - transformFrom: Transformer + transformGet: Transformer, + transformSet: Transformer ) { this.cache = cache; this.prefix = prefix; - this.transformTo = transformTo; - this.transformFrom = transformFrom; + this.transformGet = transformGet; + this.transformSet = transformSet; } private addPrefix(key: string): string { @@ -106,12 +106,12 @@ export class AsyncPrefixCache implements AsyncCache { } set(key: string, value: V): Promise { - return this.cache.set(this.addPrefix(key), this.transformFrom(value)); + return this.cache.set(this.addPrefix(key), this.transformSet(value)); } async get(key: string): Promise { const value = await this.cache.get(this.addPrefix(key)); - return value ? this.transformTo(value) : undefined; + return value ? this.transformGet(value) : undefined; } remove(key: string): Promise { @@ -133,6 +133,6 @@ export class AsyncPrefixCache implements AsyncCache { async getBatched(keys: string[]): Promise[]> { const values = await this.cache.getBatched(keys.map((key) => this.addPrefix(key))); - return values.map((value) => value ? this.transformTo(value) : undefined); + return values.map((value) => value ? this.transformGet(value) : undefined); } } diff --git a/tests/index.react_native.spec.ts b/tests/index.react_native.spec.ts index 0d8648403..a5fab6aff 100644 --- a/tests/index.react_native.spec.ts +++ b/tests/index.react_native.spec.ts @@ -16,7 +16,6 @@ import { describe, beforeEach, afterEach, it, expect, vi } from 'vitest'; import * as logging from '../lib/modules/logging/logger'; -import * as eventProcessor from '../lib//plugins/event_processor/index.react_native'; import Optimizely from '../lib/optimizely'; import testData from '../lib/tests/test_data'; diff --git a/tests/pendingEventsDispatcher.spec.ts b/tests/pendingEventsDispatcher.spec.ts deleted file mode 100644 index d39b58e22..000000000 --- a/tests/pendingEventsDispatcher.spec.ts +++ /dev/null @@ -1,257 +0,0 @@ -/** - * Copyright 2022, 2024, Optimizely - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { describe, beforeEach, afterEach, it, expect, vi, MockInstance } from 'vitest'; - -vi.mock('../lib/utils/fns', async (importOriginal) => { - const actual: any = await importOriginal(); - return { - __esModule: true, - uuid: vi.fn(), - getTimestamp: vi.fn(), - objectValues: actual.objectValues, - } -}); - -import { - LocalStoragePendingEventsDispatcher, - PendingEventsDispatcher, - DispatcherEntry, -} from '../lib/event_processor/pendingEventsDispatcher' -import { EventDispatcher, EventDispatcherResponse, EventV1Request } from '../lib/event_processor/eventDispatcher' -import { EventV1 } from '../lib/event_processor/v1/buildEventV1' -import { PendingEventsStore, LocalStorageStore } from '../lib/event_processor/pendingEventsStore' -import { uuid, getTimestamp } from '../lib/utils/fns' -import { resolvablePromise, ResolvablePromise } from '../lib/utils/promise/resolvablePromise'; - -describe('LocalStoragePendingEventsDispatcher', () => { - let originalEventDispatcher: EventDispatcher - let pendingEventsDispatcher: PendingEventsDispatcher - let eventDispatcherResponses: Array> - - beforeEach(() => { - eventDispatcherResponses = []; - originalEventDispatcher = { - dispatchEvent: vi.fn().mockImplementation(() => { - const response = resolvablePromise() - eventDispatcherResponses.push(response) - return response.promise - }), - } - - pendingEventsDispatcher = new LocalStoragePendingEventsDispatcher({ - eventDispatcher: originalEventDispatcher, - }) - ;((getTimestamp as unknown) as MockInstance).mockReturnValue(1) - ;((uuid as unknown) as MockInstance).mockReturnValue('uuid') - }) - - afterEach(() => { - localStorage.clear() - }) - - it('should properly send the events to the passed in eventDispatcher, when callback statusCode=200', async () => { - const eventV1Request: EventV1Request = { - url: 'http://cdn.com', - httpVerb: 'POST', - params: ({ id: 'event' } as unknown) as EventV1, - } - - pendingEventsDispatcher.dispatchEvent(eventV1Request) - - eventDispatcherResponses[0].resolve({ statusCode: 200 }) - - const internalDispatchCall = ((originalEventDispatcher.dispatchEvent as unknown) as MockInstance) - .mock.calls[0] - - // assert that the original dispatch function was called with the request - expect((originalEventDispatcher.dispatchEvent as unknown) as MockInstance).toBeCalledTimes(1) - expect(internalDispatchCall[0]).toEqual(eventV1Request) - }) - - it('should properly send the events to the passed in eventDispatcher, when callback statusCode=400', () => { - const eventV1Request: EventV1Request = { - url: 'http://cdn.com', - httpVerb: 'POST', - params: ({ id: 'event' } as unknown) as EventV1, - } - - pendingEventsDispatcher.dispatchEvent(eventV1Request) - - eventDispatcherResponses[0].resolve({ statusCode: 400 }) - - const internalDispatchCall = ((originalEventDispatcher.dispatchEvent as unknown) as MockInstance) - .mock.calls[0] - - eventDispatcherResponses[0].resolve({ statusCode: 400 }) - - // assert that the original dispatch function was called with the request - expect((originalEventDispatcher.dispatchEvent as unknown) as MockInstance).toBeCalledTimes(1) - expect(internalDispatchCall[0]).toEqual(eventV1Request) - }) -}) - -describe('PendingEventsDispatcher', () => { - let originalEventDispatcher: EventDispatcher - let pendingEventsDispatcher: PendingEventsDispatcher - let store: PendingEventsStore - let eventDispatcherResponses: Array> - - beforeEach(() => { - eventDispatcherResponses = []; - - originalEventDispatcher = { - dispatchEvent: vi.fn().mockImplementation(() => { - const response = resolvablePromise() - eventDispatcherResponses.push(response) - return response.promise - }), - } - - store = new LocalStorageStore({ - key: 'test', - maxValues: 3, - }) - pendingEventsDispatcher = new PendingEventsDispatcher({ - store, - eventDispatcher: originalEventDispatcher, - }); - ((getTimestamp as unknown) as MockInstance).mockReturnValue(1); - ((uuid as unknown) as MockInstance).mockReturnValue('uuid'); - }) - - afterEach(() => { - localStorage.clear() - }) - - describe('dispatch', () => { - describe('when the dispatch is successful', () => { - it('should save the pendingEvent to the store and remove it once dispatch is completed', async () => { - const eventV1Request: EventV1Request = { - url: 'http://cdn.com', - httpVerb: 'POST', - params: ({ id: 'event' } as unknown) as EventV1, - } - - pendingEventsDispatcher.dispatchEvent(eventV1Request) - - expect(store.values()).toHaveLength(1) - expect(store.get('uuid')).toEqual({ - uuid: 'uuid', - timestamp: 1, - request: eventV1Request, - }) - - eventDispatcherResponses[0].resolve({ statusCode: 200 }) - await eventDispatcherResponses[0].promise - - const internalDispatchCall = ((originalEventDispatcher.dispatchEvent as unknown) as MockInstance) - .mock.calls[0] - - // assert that the original dispatch function was called with the request - expect( - (originalEventDispatcher.dispatchEvent as unknown) as MockInstance, - ).toBeCalledTimes(1) - expect(internalDispatchCall[0]).toEqual(eventV1Request) - - expect(store.values()).toHaveLength(0) - }) - }) - - describe('when the dispatch is unsuccessful', () => { - it('should save the pendingEvent to the store and remove it once dispatch is completed', async () => { - const eventV1Request: EventV1Request = { - url: 'http://cdn.com', - httpVerb: 'POST', - params: ({ id: 'event' } as unknown) as EventV1, - } - - pendingEventsDispatcher.dispatchEvent(eventV1Request) - - expect(store.values()).toHaveLength(1) - expect(store.get('uuid')).toEqual({ - uuid: 'uuid', - timestamp: 1, - request: eventV1Request, - }) - - eventDispatcherResponses[0].resolve({ statusCode: 400 }) - await eventDispatcherResponses[0].promise - - // manually invoke original eventDispatcher callback - const internalDispatchCall = ((originalEventDispatcher.dispatchEvent as unknown) as MockInstance) - .mock.calls[0] - - // assert that the original dispatch function was called with the request - expect( - (originalEventDispatcher.dispatchEvent as unknown) as MockInstance, - ).toBeCalledTimes(1) - expect(internalDispatchCall[0]).toEqual(eventV1Request) - - expect(store.values()).toHaveLength(0) - }) - }) - }) - - describe('sendPendingEvents', () => { - describe('when no pending events are in the store', () => { - it('should not invoked dispatch', () => { - expect(store.values()).toHaveLength(0) - - pendingEventsDispatcher.sendPendingEvents() - expect(originalEventDispatcher.dispatchEvent).not.toHaveBeenCalled() - }) - }) - - describe('when there are multiple pending events in the store', () => { - it('should dispatch all of the pending events, and remove them from store', async () => { - expect(store.values()).toHaveLength(0) - - const eventV1Request1: EventV1Request = { - url: 'http://cdn.com', - httpVerb: 'POST', - params: ({ id: 'event1' } as unknown) as EventV1, - } - - const eventV1Request2: EventV1Request = { - url: 'http://cdn.com', - httpVerb: 'POST', - params: ({ id: 'event2' } as unknown) as EventV1, - } - - store.set('uuid1', { - uuid: 'uuid1', - timestamp: 1, - request: eventV1Request1, - }) - store.set('uuid2', { - uuid: 'uuid2', - timestamp: 2, - request: eventV1Request2, - }) - - expect(store.values()).toHaveLength(2) - - pendingEventsDispatcher.sendPendingEvents() - expect(originalEventDispatcher.dispatchEvent).toHaveBeenCalledTimes(2) - - eventDispatcherResponses[0].resolve({ statusCode: 200 }) - eventDispatcherResponses[1].resolve({ statusCode: 200 }) - await Promise.all([eventDispatcherResponses[0].promise, eventDispatcherResponses[1].promise]) - expect(store.values()).toHaveLength(0) - }) - }) - }) -}) diff --git a/tests/pendingEventsStore.spec.ts b/tests/pendingEventsStore.spec.ts deleted file mode 100644 index 9c255b118..000000000 --- a/tests/pendingEventsStore.spec.ts +++ /dev/null @@ -1,143 +0,0 @@ -/** - * Copyright 2022, 2024, Optimizely - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { describe, beforeEach, afterEach, it, expect, vi, MockInstance } from 'vitest'; - -import { LocalStorageStore } from '../lib/event_processor/pendingEventsStore' - -type TestEntry = { - uuid: string - timestamp: number - value: string -} - -describe('LocalStorageStore', () => { - let store: LocalStorageStore - beforeEach(() => { - store = new LocalStorageStore({ - key: 'test_key', - maxValues: 3, - }) - }) - - afterEach(() => { - localStorage.clear() - }) - - it('should get, set and remove items', () => { - store.set('1', { - uuid: '1', - timestamp: 1, - value: 'first', - }) - - expect(store.get('1')).toEqual({ - uuid: '1', - timestamp: 1, - value: 'first', - }) - - store.set('1', { - uuid: '1', - timestamp: 2, - value: 'second', - }) - - expect(store.get('1')).toEqual({ - uuid: '1', - timestamp: 2, - value: 'second', - }) - - expect(store.values()).toHaveLength(1) - - store.remove('1') - - expect(store.values()).toHaveLength(0) - }) - - it('should allow replacement of the entire map', () => { - store.set('1', { - uuid: '1', - timestamp: 1, - value: 'first', - }) - - store.set('2', { - uuid: '2', - timestamp: 2, - value: 'second', - }) - - store.set('3', { - uuid: '3', - timestamp: 3, - value: 'third', - }) - - expect(store.values()).toEqual([ - { uuid: '1', timestamp: 1, value: 'first' }, - { uuid: '2', timestamp: 2, value: 'second' }, - { uuid: '3', timestamp: 3, value: 'third' }, - ]) - - const newMap: { [key: string]: TestEntry } = {} - store.values().forEach(item => { - newMap[item.uuid] = { - ...item, - value: 'new', - } - }) - store.replace(newMap) - - expect(store.values()).toEqual([ - { uuid: '1', timestamp: 1, value: 'new' }, - { uuid: '2', timestamp: 2, value: 'new' }, - { uuid: '3', timestamp: 3, value: 'new' }, - ]) - }) - - it(`shouldn't allow more than the configured maxValues, using timestamp to remove the oldest entries`, () => { - store.set('2', { - uuid: '2', - timestamp: 2, - value: 'second', - }) - - store.set('3', { - uuid: '3', - timestamp: 3, - value: 'third', - }) - - store.set('1', { - uuid: '1', - timestamp: 1, - value: 'first', - }) - - store.set('4', { - uuid: '4', - timestamp: 4, - value: 'fourth', - }) - - expect(store.values()).toEqual([ - { uuid: '2', timestamp: 2, value: 'second' }, - { uuid: '3', timestamp: 3, value: 'third' }, - { uuid: '4', timestamp: 4, value: 'fourth' }, - ]) - }) -}) diff --git a/tests/reactNativeV1EventProcessor.spec.ts b/tests/reactNativeV1EventProcessor.spec.ts deleted file mode 100644 index 995dd6024..000000000 --- a/tests/reactNativeV1EventProcessor.spec.ts +++ /dev/null @@ -1,69 +0,0 @@ -/** - * Copyright 2024, Optimizely - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { describe, beforeEach, it, vi, expect } from 'vitest'; - -vi.mock('@react-native-community/netinfo'); - -vi.mock('../lib/event_processor/reactNativeEventsStore'); - -import { ReactNativeEventsStore } from '../lib/event_processor/reactNativeEventsStore'; -import PersistentKeyValueCache from '../lib/plugins/key_value_cache/persistentKeyValueCache'; -import { LogTierV1EventProcessor } from '../lib/event_processor/index.react_native'; -import { PersistentCacheProvider } from '../lib/shared_types'; - -describe('LogTierV1EventProcessor', () => { - const MockedReactNativeEventsStore = vi.mocked(ReactNativeEventsStore); - - beforeEach(() => { - MockedReactNativeEventsStore.mockClear(); - }); - - it('calls the provided persistentCacheFactory and passes it to the ReactNativeEventStore constructor twice', async () => { - const getFakePersistentCache = () : PersistentKeyValueCache => { - return { - contains(k: string): Promise { - return Promise.resolve(false); - }, - get(key: string): Promise { - return Promise.resolve(undefined); - }, - remove(key: string): Promise { - return Promise.resolve(false); - }, - set(key: string, val: string): Promise { - return Promise.resolve() - } - }; - } - - let call = 0; - const fakeCaches = [getFakePersistentCache(), getFakePersistentCache()]; - const fakePersistentCacheProvider = vi.fn().mockImplementation(() => { - return fakeCaches[call++]; - }); - - const noop = () => {}; - - new LogTierV1EventProcessor({ - dispatcher: { dispatchEvent: () => Promise.resolve({}) }, - persistentCacheProvider: fakePersistentCacheProvider, - }) - - expect(fakePersistentCacheProvider).toHaveBeenCalledTimes(2); - expect(MockedReactNativeEventsStore.mock.calls[0][2] === fakeCaches[0]).toBeTruthy(); - expect(MockedReactNativeEventsStore.mock.calls[1][2] === fakeCaches[1]).toBeTruthy(); - }); -}); diff --git a/tests/requestTracker.spec.ts b/tests/requestTracker.spec.ts deleted file mode 100644 index 10c042a66..000000000 --- a/tests/requestTracker.spec.ts +++ /dev/null @@ -1,65 +0,0 @@ -/** - * Copyright 2022, 2024, Optimizely - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { describe, it, expect } from 'vitest'; - -import RequestTracker from '../lib/event_processor/requestTracker' - -describe('requestTracker', () => { - describe('onRequestsComplete', () => { - it('returns an immediately-fulfilled promise when no requests are in flight', async () => { - const tracker = new RequestTracker() - await tracker.onRequestsComplete() - }) - - it('returns a promise that fulfills after in-flight requests are complete', async () => { - let resolveReq1: () => void - const req1 = new Promise(resolve => { - resolveReq1 = resolve - }) - let resolveReq2: () => void - const req2 = new Promise(resolve => { - resolveReq2 = resolve - }) - let resolveReq3: () => void - const req3 = new Promise(resolve => { - resolveReq3 = resolve - }) - - const tracker = new RequestTracker() - tracker.trackRequest(req1) - tracker.trackRequest(req2) - tracker.trackRequest(req3) - - let reqsComplete = false - const reqsCompletePromise = tracker.onRequestsComplete().then(() => { - reqsComplete = true - }) - - resolveReq1!() - await req1 - expect(reqsComplete).toBe(false) - - resolveReq2!() - await req2 - expect(reqsComplete).toBe(false) - - resolveReq3!() - await req3 - await reqsCompletePromise - expect(reqsComplete).toBe(true) - }) - }) -}) diff --git a/tests/v1EventProcessor.react_native.spec.ts b/tests/v1EventProcessor.react_native.spec.ts deleted file mode 100644 index d0fccc4b0..000000000 --- a/tests/v1EventProcessor.react_native.spec.ts +++ /dev/null @@ -1,891 +0,0 @@ -/** - * Copyright 2022, 2024, Optimizely - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { describe, beforeEach, afterEach, it, vi, expect, Mock } from 'vitest'; - -vi.mock('@react-native-community/netinfo'); -vi.mock('@react-native-async-storage/async-storage'); - -import { NotificationSender } from '../lib/core/notification_center' -import { NOTIFICATION_TYPES } from '../lib/utils/enums' - -import { LogTierV1EventProcessor } from '../lib/event_processor/v1/v1EventProcessor.react_native' -import { - EventDispatcher, - EventV1Request, - EventDispatcherResponse, -} from '../lib/event_processor/eventDispatcher' -import { EventProcessor, ProcessableEvent } from '../lib/event_processor/eventProcessor' -import { buildImpressionEventV1, makeBatchedEventV1 } from '../lib/event_processor/v1/buildEventV1' -import AsyncStorage from '../__mocks__/@react-native-async-storage/async-storage' -import { triggerInternetState } from '../__mocks__/@react-native-community/netinfo' -import { DefaultEventQueue } from '../lib/event_processor/eventQueue' -import { resolvablePromise, ResolvablePromise } from '../lib/utils/promise/resolvablePromise'; - -function createImpressionEvent() { - return { - type: 'impression' as 'impression', - timestamp: 69, - uuid: 'uuid', - - context: { - accountId: 'accountId', - projectId: 'projectId', - clientName: 'node-sdk', - clientVersion: '3.0.0', - revision: '1', - botFiltering: true, - anonymizeIP: true, - }, - - user: { - id: 'userId', - attributes: [{ entityId: 'attr1-id', key: 'attr1-key', value: 'attr1-value' }], - }, - - layer: { - id: 'layerId', - }, - - experiment: { - id: 'expId', - key: 'expKey', - }, - - variation: { - id: 'varId', - key: 'varKey', - }, - - ruleKey: 'expKey', - flagKey: 'flagKey1', - ruleType: 'experiment', - enabled: false, - } -} - -function createConversionEvent() { - return { - type: 'conversion' as 'conversion', - timestamp: 69, - uuid: 'uuid', - - context: { - accountId: 'accountId', - projectId: 'projectId', - clientName: 'node-sdk', - clientVersion: '3.0.0', - revision: '1', - botFiltering: true, - anonymizeIP: true, - }, - - user: { - id: 'userId', - attributes: [{ entityId: 'attr1-id', key: 'attr1-key', value: 'attr1-value' }], - }, - - event: { - id: 'event-id', - key: 'event-key', - }, - - tags: { - foo: 'bar', - value: '123', - revenue: '1000', - }, - - revenue: 1000, - value: 123, - } -} - -describe('LogTierV1EventProcessorReactNative', () => { - describe('New Events', () => { - let stubDispatcher: EventDispatcher - let dispatchStub: Mock - - beforeEach(() => { - dispatchStub = vi.fn().mockResolvedValue({ statusCode: 200 }) - - stubDispatcher = { - dispatchEvent: dispatchStub, - } - }) - - afterEach(() => { - vi.resetAllMocks() - AsyncStorage.clearStore() - }) - - describe('stop()', () => { - let resolvableResponse: ResolvablePromise - beforeEach(async () => { - stubDispatcher = { - dispatchEvent(event: EventV1Request) { - dispatchStub(event) - resolvableResponse = resolvablePromise() - return resolvableResponse.promise - }, - } - }) - - it('should return a resolved promise when there is nothing in queue', async () => { - const processor = new LogTierV1EventProcessor({ - dispatcher: stubDispatcher, - flushInterval: 100, - batchSize: 100, - }) - - await processor.start() - - await processor.stop() - }) - - it('should return a promise that is resolved when the dispatcher callback returns a 200 response', async () => { - const processor = new LogTierV1EventProcessor({ - dispatcher: stubDispatcher, - flushInterval: 100, - batchSize: 100, - }) - await processor.start() - const impressionEvent = createImpressionEvent() - processor.process(impressionEvent) - - await new Promise(resolve => setTimeout(resolve, 150)) - - resolvableResponse.resolve({ statusCode: 200 }) - }) - - it('should return a promise that is resolved when the dispatcher callback returns a 400 response', async () => { - // This test is saying that even if the request fails to send but - // the `dispatcher` yielded control back, then the `.stop()` promise should be resolved - let responsePromise: ResolvablePromise - stubDispatcher = { - dispatchEvent(event: EventV1Request): Promise { - dispatchStub(event) - responsePromise = resolvablePromise() - return responsePromise.promise; - }, - } - - const processor = new LogTierV1EventProcessor({ - dispatcher: stubDispatcher, - flushInterval: 100, - batchSize: 100, - }) - await processor.start() - - const impressionEvent = createImpressionEvent() - processor.process(impressionEvent) - - await new Promise(resolve => setTimeout(resolve, 150)) - - resolvableResponse.resolve({ statusCode: 400 }) - }) - - it('should return a promise when multiple event batches are sent', async () => { - stubDispatcher = { - dispatchEvent(event: EventV1Request) { - dispatchStub(event) - return Promise.resolve({ statusCode: 200 }) - }, - } - - const processor = new LogTierV1EventProcessor({ - dispatcher: stubDispatcher, - flushInterval: 100, - batchSize: 100, - }) - - await processor.start() - - const impressionEvent1 = createImpressionEvent() - const impressionEvent2 = createImpressionEvent() - impressionEvent2.context.revision = '2' - processor.process(impressionEvent1) - processor.process(impressionEvent2) - - await new Promise(resolve => setTimeout(resolve, 150)) - await processor.stop() - expect(dispatchStub).toBeCalledTimes(2) - }) - - it('should stop accepting events after stop is called', async () => { - const dispatcher = { - dispatchEvent: vi.fn((event: EventV1Request) => { - return new Promise(resolve => { - setTimeout(() => resolve({ statusCode: 204 }), 0) - }) - }) - } - const processor = new LogTierV1EventProcessor({ - dispatcher, - flushInterval: 100, - batchSize: 3, - }) - await processor.start() - - const impressionEvent1 = createImpressionEvent() - processor.process(impressionEvent1) - await new Promise(resolve => setTimeout(resolve, 150)) - - await processor.stop() - // calling stop should haver flushed the current batch of size 1 - expect(dispatcher.dispatchEvent).toBeCalledTimes(1) - - dispatcher.dispatchEvent.mockClear(); - - // From now on, subsequent events should be ignored. - // Process 3 more, which ordinarily would have triggered - // a flush due to the batch size. - const impressionEvent2 = createImpressionEvent() - processor.process(impressionEvent2) - const impressionEvent3 = createImpressionEvent() - processor.process(impressionEvent3) - const impressionEvent4 = createImpressionEvent() - processor.process(impressionEvent4) - // Since we already stopped the processor, the dispatcher should - // not have been called again. - await new Promise(resolve => setTimeout(resolve, 150)) - expect(dispatcher.dispatchEvent).toBeCalledTimes(0) - }) - }) - - describe('when batchSize = 1', () => { - let processor: EventProcessor - beforeEach(async () => { - processor = new LogTierV1EventProcessor({ - dispatcher: stubDispatcher, - flushInterval: 100, - batchSize: 1, - }) - await processor.start() - }) - - afterEach(async () => { - await processor.stop() - }) - - it('should immediately flush events as they are processed', async () => { - const impressionEvent = createImpressionEvent() - processor.process(impressionEvent) - - await new Promise(resolve => setTimeout(resolve, 50)) - - expect(dispatchStub).toHaveBeenCalledTimes(1) - expect(dispatchStub).toHaveBeenCalledWith({ - url: 'https://logx.optimizely.com/v1/events', - httpVerb: 'POST', - params: buildImpressionEventV1(impressionEvent), - }) - }) - }) - - describe('when batchSize = 3, flushInterval = 300', () => { - let processor: EventProcessor - beforeEach(async () => { - processor = new LogTierV1EventProcessor({ - dispatcher: stubDispatcher, - flushInterval: 300, - batchSize: 3, - }) - await processor.start() - }) - - afterEach(async () => { - await processor.stop() - }) - - it('should wait until 3 events to be in the queue before it flushes', async () => { - const impressionEvent1 = createImpressionEvent() - const impressionEvent2 = createImpressionEvent() - const impressionEvent3 = createImpressionEvent() - - processor.process(impressionEvent1) - processor.process(impressionEvent2) - - await new Promise(resolve => setTimeout(resolve, 50)) - expect(dispatchStub).toHaveBeenCalledTimes(0) - - processor.process(impressionEvent3) - - await new Promise(resolve => setTimeout(resolve, 50)) - expect(dispatchStub).toHaveBeenCalledTimes(1) - expect(dispatchStub).toHaveBeenCalledWith({ - url: 'https://logx.optimizely.com/v1/events', - httpVerb: 'POST', - params: makeBatchedEventV1([ - impressionEvent1, - impressionEvent2, - impressionEvent3, - ]), - }) - }) - - it('should flush the current batch when it receives an event with a different context revision than the current batch', async () => { - const impressionEvent1 = createImpressionEvent() - const conversionEvent = createConversionEvent() - const impressionEvent2 = createImpressionEvent() - - // createImpressionEvent and createConversionEvent create events with revision '1' - // We modify this one's revision to '2' in order to test that the queue is flushed - // when an event with a different revision is processed. - impressionEvent2.context.revision = '2' - - processor.process(impressionEvent1) - processor.process(conversionEvent) - - await new Promise(resolve => setTimeout(resolve, 50)) - expect(dispatchStub).toHaveBeenCalledTimes(0) - - processor.process(impressionEvent2) - - await new Promise(resolve => setTimeout(resolve, 50)) - expect(dispatchStub).toHaveBeenCalledTimes(1) - expect(dispatchStub).toHaveBeenCalledWith({ - url: 'https://logx.optimizely.com/v1/events', - httpVerb: 'POST', - params: makeBatchedEventV1([impressionEvent1, conversionEvent]), - }) - }) - - it('should flush the current batch when it receives an event with a different context projectId than the current batch', async () => { - const impressionEvent1 = createImpressionEvent() - const conversionEvent = createConversionEvent() - const impressionEvent2 = createImpressionEvent() - - impressionEvent2.context.projectId = 'projectId2' - - processor.process(impressionEvent1) - processor.process(conversionEvent) - - await new Promise(resolve => setTimeout(resolve, 50)) - expect(dispatchStub).toHaveBeenCalledTimes(0) - - processor.process(impressionEvent2) - - await new Promise(resolve => setTimeout(resolve, 50)) - expect(dispatchStub).toHaveBeenCalledTimes(1) - expect(dispatchStub).toHaveBeenCalledWith({ - url: 'https://logx.optimizely.com/v1/events', - httpVerb: 'POST', - params: makeBatchedEventV1([impressionEvent1, conversionEvent]), - }) - }) - - it('should flush the queue when the flush interval happens', async () => { - const impressionEvent1 = createImpressionEvent() - - processor.process(impressionEvent1) - - expect(dispatchStub).toHaveBeenCalledTimes(0) - - await new Promise(resolve => setTimeout(resolve, 350)) - - expect(dispatchStub).toHaveBeenCalledTimes(1) - expect(dispatchStub).toHaveBeenCalledWith({ - url: 'https://logx.optimizely.com/v1/events', - httpVerb: 'POST', - params: makeBatchedEventV1([impressionEvent1]), - }) - - processor.process(createImpressionEvent()) - processor.process(createImpressionEvent()) - // flushing should reset queue, at this point only has two events - expect(dispatchStub).toHaveBeenCalledTimes(1) - - // clear the async storate cache to ensure next tests - // works correctly - await new Promise(resolve => setTimeout(resolve, 400)) - }) - }) - - describe('when a notification center is provided', () => { - it('should trigger a notification when the event dispatcher dispatches an event', async () => { - const dispatcher: EventDispatcher = { - dispatchEvent: vi.fn().mockResolvedValue({ statusCode: 200 }) - } - - const notificationCenter: NotificationSender = { - sendNotifications: vi.fn() - } - - const processor = new LogTierV1EventProcessor({ - dispatcher, - notificationCenter, - batchSize: 1, - }) - await processor.start() - - const impressionEvent = createImpressionEvent() - processor.process(impressionEvent) - - await new Promise(resolve => setTimeout(resolve, 150)) - expect(notificationCenter.sendNotifications).toBeCalledTimes(1) - const event = (dispatcher.dispatchEvent as Mock).mock.calls[0][0] - expect(notificationCenter.sendNotifications).toBeCalledWith(NOTIFICATION_TYPES.LOG_EVENT, event) - }) - }) - - describe('invalid batchSize', () => { - it('should ignore a batchSize of 0 and use the default', async () => { - const processor = new LogTierV1EventProcessor({ - dispatcher: stubDispatcher, - flushInterval: 30000, - batchSize: 0, - }) - await processor.start() - - const impressionEvent1 = createImpressionEvent() - processor.process(impressionEvent1) - - await new Promise(resolve => setTimeout(resolve, 150)) - expect(dispatchStub).toHaveBeenCalledTimes(0) - const impressionEvents = [impressionEvent1] - for (let i = 0; i < 9; i++) { - const evt = createImpressionEvent() - processor.process(evt) - impressionEvents.push(evt) - } - - await new Promise(resolve => setTimeout(resolve, 150)) - expect(dispatchStub).toHaveBeenCalledTimes(1) - expect(dispatchStub).toHaveBeenCalledWith({ - url: 'https://logx.optimizely.com/v1/events', - httpVerb: 'POST', - params: makeBatchedEventV1(impressionEvents), - }) - }) - }) - }) - - describe('Pending Events', () => { - let stubDispatcher: EventDispatcher - let dispatchStub: Mock - - beforeEach(() => { - dispatchStub = vi.fn() - }) - - afterEach(() => { - vi.clearAllMocks() - AsyncStorage.clearStore() - }) - - describe('Retry Pending Events', () => { - describe('App start', () => { - it('should dispatch all the pending events in correct order', async () => { - let receivedEvents: EventV1Request[] = [] - - stubDispatcher = { - dispatchEvent(event: EventV1Request) { - dispatchStub(event) - return Promise.resolve({ statusCode: 400 }) - }, - } - - let processor = new LogTierV1EventProcessor({ - dispatcher: stubDispatcher, - flushInterval: 100, - batchSize: 1, - }) - - await processor.start() - let event1 = createConversionEvent() - event1.user.id = 'user1' - let event2 = createConversionEvent() - event2.user.id = 'user2' - let event3 = createConversionEvent() - event3.user.id = 'user3' - let event4 = createConversionEvent() - event4.user.id = 'user4' - - processor.process(event1) - processor.process(event2) - processor.process(event3) - processor.process(event4) - - await new Promise(resolve => setTimeout(resolve, 100)) - - expect(dispatchStub).toBeCalledTimes(4) - - await processor.stop() - - vi.clearAllMocks() - - receivedEvents = [] - stubDispatcher = { - dispatchEvent(event: EventV1Request) { - receivedEvents.push(event) - dispatchStub(event) - return Promise.resolve({ statusCode: 200 }) - }, - } - - processor = new LogTierV1EventProcessor({ - dispatcher: stubDispatcher, - flushInterval: 100, - batchSize: 1, - }) - - await processor.start() - - receivedEvents.forEach((e, i) => { - expect(e.params.visitors[0].visitor_id).toEqual(`user${i+1}`) - }) - - expect(dispatchStub).toBeCalledTimes(4) - - await processor.stop() - }) - - it('should process all the events left in buffer when the app closed last time', async () => { - stubDispatcher = { - dispatchEvent(event: EventV1Request) { - dispatchStub(event) - return Promise.resolve({ statusCode: 200 }) - }, - } - - let processor = new LogTierV1EventProcessor({ - dispatcher: stubDispatcher, - flushInterval: 1000, - batchSize: 4, - }) - - await processor.start() - let event1 = createConversionEvent() - event1.user.id = 'user1' - event1.uuid = 'user1' - let event2 = createConversionEvent() - event2.user.id = 'user2' - event2.uuid = 'user2' - - processor.process(event1) - processor.process(event2) - - await new Promise(resolve => setTimeout(resolve, 100)) - - // Explicitly stopping the timer to simulate app close - ;(processor.queue as DefaultEventQueue).timer.stop() - - let receivedEvents: EventV1Request[] = [] - stubDispatcher = { - dispatchEvent(event: EventV1Request) { - receivedEvents.push(event) - dispatchStub(event) - return Promise.resolve({ statusCode: 200 }) - }, - } - - processor = new LogTierV1EventProcessor({ - dispatcher: stubDispatcher, - flushInterval: 100, - batchSize: 4, - }) - - await processor.start() - - await new Promise(resolve => setTimeout(resolve, 150)) - expect(dispatchStub).toBeCalledTimes(1) - expect(receivedEvents.length).toEqual(1) - const receivedEvent = receivedEvents[0] - - receivedEvent.params.visitors.forEach((v, i) => { - expect(v.visitor_id).toEqual(`user${i+1}`) - }) - - await processor.stop() - }) - - it('should dispatch pending events first and then process events in buffer store', async () => { - stubDispatcher = { - dispatchEvent(event: EventV1Request) { - dispatchStub(event) - return Promise.resolve({ statusCode: 400 }) - }, - } - - let processor = new LogTierV1EventProcessor({ - dispatcher: stubDispatcher, - flushInterval: 300, - batchSize: 3, - }) - - await processor.start() - - for (let i = 0; i < 8; i++) { - let event = createConversionEvent() - event.user.id = `user${i}` - event.uuid = `user${i}` - processor.process(event) - } - - await new Promise(resolve => setTimeout(resolve, 50)) - - expect(dispatchStub).toBeCalledTimes(2) - - ;(processor.queue as DefaultEventQueue).timer.stop() - - vi.clearAllMocks() - - const visitorIds: string[] = [] - stubDispatcher = { - dispatchEvent(event: EventV1Request) { - dispatchStub(event) - event.params.visitors.forEach(visitor => visitorIds.push(visitor.visitor_id)) - return Promise.resolve({ statusCode: 200 }) - }, - } - - processor = new LogTierV1EventProcessor({ - dispatcher: stubDispatcher, - flushInterval: 200, - batchSize: 3, - }) - - await processor.start() - - expect(dispatchStub).toBeCalledTimes(2) - - await new Promise(resolve => setTimeout(resolve, 250)) - expect(visitorIds.length).toEqual(8) - expect(visitorIds).toEqual(['user0', 'user1', 'user2', 'user3', 'user4', 'user5', 'user6', 'user7']) - }) - }) - - describe('When a new event is dispatched', () => { - it('should dispatch all the pending events first and then new event in correct order', async () => { - let receivedVisitorIds: string[] = [] - let dispatchCount = 0 - stubDispatcher = { - dispatchEvent(event: EventV1Request) { - dispatchStub(event) - dispatchCount++ - if (dispatchCount > 4) { - event.params.visitors.forEach(visitor => receivedVisitorIds.push(visitor.visitor_id)) - return Promise.resolve({ statusCode: 200 }) - } else { - return Promise.resolve({ statusCode: 400 }) - } - }, - } - - let processor = new LogTierV1EventProcessor({ - dispatcher: stubDispatcher, - flushInterval: 100, - batchSize: 1, - }) - - await processor.start() - let event1 = createConversionEvent() - event1.user.id = event1.uuid = 'user1' - let event2 = createConversionEvent() - event2.user.id = event2.uuid = 'user2' - let event3 = createConversionEvent() - event3.user.id = event3.uuid = 'user3' - let event4 = createConversionEvent() - event4.user.id = event4.uuid = 'user4' - - processor.process(event1) - processor.process(event2) - processor.process(event3) - processor.process(event4) - - await new Promise(resolve => setTimeout(resolve, 100)) - - // Four events will return response code 400 which means only the first pending event will be tried each time and rest will be skipped - expect(dispatchStub).toBeCalledTimes(4) - - vi.resetAllMocks() - - let event5 = createConversionEvent() - event5.user.id = event5.uuid = 'user5' - - processor.process(event5) - - await new Promise(resolve => setTimeout(resolve, 100)) - expect(dispatchStub).toBeCalledTimes(5) - expect(receivedVisitorIds).toEqual(['user1', 'user2', 'user3', 'user4', 'user5']) - await processor.stop() - }) - - it('should skip dispatching subsequent events if an event fails to dispatch', async () => { - let receivedVisitorIds: string[] = [] - let dispatchCount = 0 - stubDispatcher = { - dispatchEvent(event: EventV1Request) { - dispatchStub(event) - dispatchCount++ - event.params.visitors.forEach(visitor => receivedVisitorIds.push(visitor.visitor_id)) - return Promise.resolve({ statusCode: 400 }) - }, - } - - let processor = new LogTierV1EventProcessor({ - dispatcher: stubDispatcher, - flushInterval: 100, - batchSize: 1, - }) - - await processor.start() - let event1 = createConversionEvent() - event1.user.id = event1.uuid = 'user1' - let event2 = createConversionEvent() - event2.user.id = event2.uuid = 'user2' - let event3 = createConversionEvent() - event3.user.id = event3.uuid = 'user3' - let event4 = createConversionEvent() - event4.user.id = event4.uuid = 'user4' - - processor.process(event1) - await new Promise(resolve => setTimeout(resolve, 50)) - expect(dispatchStub).toBeCalledTimes(1) - - processor.process(event2) - await new Promise(resolve => setTimeout(resolve, 50)) - expect(dispatchStub).toBeCalledTimes(2) - - processor.process(event3) - await new Promise(resolve => setTimeout(resolve, 50)) - expect(dispatchStub).toBeCalledTimes(3) - - processor.process(event4) - await new Promise(resolve => setTimeout(resolve, 50)) - expect(dispatchStub).toBeCalledTimes(4) - - expect(dispatchCount).toEqual(4) - - // subsequent events were skipped with each attempt because of request failure - expect(receivedVisitorIds).toEqual(['user1', 'user1', 'user1', 'user1']) - await processor.stop() - }) - }) - - describe('When internet connection is restored', () => { - it('should dispatch all the pending events in correct order when internet connection is restored', async () => { - let receivedVisitorIds: string[] = [] - let dispatchCount = 0 - stubDispatcher = { - dispatchEvent(event: EventV1Request) { - dispatchStub(event) - dispatchCount++ - if (dispatchCount > 4) { - event.params.visitors.forEach(visitor => receivedVisitorIds.push(visitor.visitor_id)) - return Promise.resolve({ statusCode: 200 }) - } else { - return Promise.resolve({ statusCode: 400 }) - } - }, - } - - let processor = new LogTierV1EventProcessor({ - dispatcher: stubDispatcher, - flushInterval: 100, - batchSize: 1, - }) - - await processor.start() - triggerInternetState(false) - let event1 = createConversionEvent() - event1.user.id = event1.uuid = 'user1' - let event2 = createConversionEvent() - event2.user.id = event2.uuid = 'user2' - let event3 = createConversionEvent() - event3.user.id = event3.uuid = 'user3' - let event4 = createConversionEvent() - event4.user.id = event4.uuid = 'user4' - - processor.process(event1) - processor.process(event2) - processor.process(event3) - processor.process(event4) - - await new Promise(resolve => setTimeout(resolve, 50)) - - // Four events will return response code 400 which means only the first pending event will be tried each time and rest will be skipped - expect(dispatchStub).toBeCalledTimes(4) - - vi.resetAllMocks() - - triggerInternetState(true) - await new Promise(resolve => setTimeout(resolve, 50)) - expect(dispatchStub).toBeCalledTimes(4) - expect(receivedVisitorIds).toEqual(['user1', 'user2', 'user3', 'user4']) - await processor.stop() - }) - - it('should not dispatch duplicate events if internet is lost and restored twice in a short interval', async () => { - let receivedVisitorIds: string[] = [] - let dispatchCount = 0 - stubDispatcher = { - dispatchEvent(event: EventV1Request) { - dispatchStub(event) - dispatchCount++ - if (dispatchCount > 4) { - event.params.visitors.forEach(visitor => receivedVisitorIds.push(visitor.visitor_id)) - return Promise.resolve({ statusCode: 200 }) - } else { - return Promise.resolve({ statusCode: 400 }) - } - }, - } - - let processor = new LogTierV1EventProcessor({ - dispatcher: stubDispatcher, - flushInterval: 100, - batchSize: 1, - }) - - await processor.start() - triggerInternetState(false) - let event1 = createConversionEvent() - event1.user.id = event1.uuid = 'user1' - let event2 = createConversionEvent() - event2.user.id = event2.uuid = 'user2' - let event3 = createConversionEvent() - event3.user.id = event3.uuid = 'user3' - let event4 = createConversionEvent() - event4.user.id = event4.uuid = 'user4' - - processor.process(event1) - processor.process(event2) - processor.process(event3) - processor.process(event4) - - await new Promise(resolve => setTimeout(resolve, 100)) - - // Four events will return response code 400 which means only the first pending event will be tried each time and rest will be skipped - expect(dispatchStub).toBeCalledTimes(4) - - vi.resetAllMocks() - - triggerInternetState(true) - triggerInternetState(false) - triggerInternetState(true) - triggerInternetState(false) - triggerInternetState(true) - - await new Promise(resolve => setTimeout(resolve, 100)) - expect(dispatchStub).toBeCalledTimes(4) - expect(receivedVisitorIds).toEqual(['user1', 'user2', 'user3', 'user4']) - await processor.stop() - }) - }) - }) - }) -}) diff --git a/tests/v1EventProcessor.spec.ts b/tests/v1EventProcessor.spec.ts deleted file mode 100644 index bd7333bee..000000000 --- a/tests/v1EventProcessor.spec.ts +++ /dev/null @@ -1,582 +0,0 @@ -/** - * Copyright 2022, 2024, Optimizely - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { describe, beforeEach, afterEach, it, vi, expect, Mock } from 'vitest'; - -import { LogTierV1EventProcessor } from '../lib/event_processor/v1/v1EventProcessor' -import { - EventDispatcher, - EventV1Request, - EventDispatcherResponse, -} from '../lib/event_processor/eventDispatcher' -import { EventProcessor } from '../lib/event_processor/eventProcessor' -import { buildImpressionEventV1, makeBatchedEventV1 } from '../lib/event_processor/v1/buildEventV1' -import { NotificationCenter, NotificationSender } from '../lib/core/notification_center' -import { NOTIFICATION_TYPES } from '../lib/utils/enums' -import { resolvablePromise, ResolvablePromise } from '../lib/utils/promise/resolvablePromise'; - -function createImpressionEvent() { - return { - type: 'impression' as 'impression', - timestamp: 69, - uuid: 'uuid', - - context: { - accountId: 'accountId', - projectId: 'projectId', - clientName: 'node-sdk', - clientVersion: '3.0.0', - revision: '1', - botFiltering: true, - anonymizeIP: true, - }, - - user: { - id: 'userId', - attributes: [{ entityId: 'attr1-id', key: 'attr1-key', value: 'attr1-value' }], - }, - - layer: { - id: 'layerId', - }, - - experiment: { - id: 'expId', - key: 'expKey', - }, - - variation: { - id: 'varId', - key: 'varKey', - }, - - ruleKey: 'expKey', - flagKey: 'flagKey1', - ruleType: 'experiment', - enabled: true, - } -} - -function createConversionEvent() { - return { - type: 'conversion' as 'conversion', - timestamp: 69, - uuid: 'uuid', - - context: { - accountId: 'accountId', - projectId: 'projectId', - clientName: 'node-sdk', - clientVersion: '3.0.0', - revision: '1', - botFiltering: true, - anonymizeIP: true, - }, - - user: { - id: 'userId', - attributes: [{ entityId: 'attr1-id', key: 'attr1-key', value: 'attr1-value' }], - }, - - event: { - id: 'event-id', - key: 'event-key', - }, - - tags: { - foo: 'bar', - value: '123', - revenue: '1000', - }, - - revenue: 1000, - value: 123, - } -} - -describe('LogTierV1EventProcessor', () => { - let stubDispatcher: EventDispatcher - let dispatchStub: Mock - // TODO change this to ProjectConfig when js-sdk-models is available - let testProjectConfig: any - - beforeEach(() => { - vi.useFakeTimers() - - testProjectConfig = {} - dispatchStub = vi.fn() - - stubDispatcher = { - dispatchEvent(event: EventV1Request): Promise { - dispatchStub(event) - return Promise.resolve({ statusCode: 200 }) - }, - } - }) - - afterEach(() => { - vi.resetAllMocks() - }) - - describe('stop()', () => { - let resposePromise: ResolvablePromise - beforeEach(() => { - stubDispatcher = { - dispatchEvent(event: EventV1Request): Promise { - dispatchStub(event) - return Promise.resolve({ statusCode: 200 }) - }, - } - stubDispatcher = { - dispatchEvent(event: EventV1Request): Promise { - dispatchStub(event) - resposePromise = resolvablePromise() - return resposePromise.promise - }, - } - }) - - it('should return a resolved promise when there is nothing in queue', () => - new Promise((done) => { - const processor = new LogTierV1EventProcessor({ - dispatcher: stubDispatcher, - flushInterval: 100, - batchSize: 100, - }) - - processor.stop().then(() => { - done() - }) - }) - ) - - it('should return a promise that is resolved when the dispatcher callback returns a 200 response', () => - new Promise((done) => { - const processor = new LogTierV1EventProcessor({ - dispatcher: stubDispatcher, - flushInterval: 100, - batchSize: 100, - }) - processor.start() - - const impressionEvent = createImpressionEvent() - processor.process(impressionEvent) - - processor.stop().then(() => { - done() - }) - - resposePromise.resolve({ statusCode: 200 }) - }) - ) - - it('should return a promise that is resolved when the dispatcher callback returns a 400 response', () => - new Promise((done) => { - // This test is saying that even if the request fails to send but - // the `dispatcher` yielded control back, then the `.stop()` promise should be resolved - stubDispatcher = { - dispatchEvent(event: EventV1Request): Promise { - dispatchStub(event) - resposePromise = resolvablePromise() - return Promise.resolve({statusCode: 400}) - }, - } - - const processor = new LogTierV1EventProcessor({ - dispatcher: stubDispatcher, - flushInterval: 100, - batchSize: 100, - }) - processor.start() - - const impressionEvent = createImpressionEvent() - processor.process(impressionEvent) - - processor.stop().then(() => { - done() - }) - }) - ) - - it('should return a promise when multiple event batches are sent', () => - new Promise((done) => { - stubDispatcher = { - dispatchEvent(event: EventV1Request): Promise { - dispatchStub(event) - return Promise.resolve({ statusCode: 200 }) - }, - } - - const processor = new LogTierV1EventProcessor({ - dispatcher: stubDispatcher, - flushInterval: 100, - batchSize: 100, - }) - processor.start() - - const impressionEvent1 = createImpressionEvent() - const impressionEvent2 = createImpressionEvent() - impressionEvent2.context.revision = '2' - processor.process(impressionEvent1) - processor.process(impressionEvent2) - - processor.stop().then(() => { - expect(dispatchStub).toBeCalledTimes(2) - done() - }) - }) - ) - - it('should stop accepting events after stop is called', () => { - const dispatcher = { - dispatchEvent: vi.fn((event: EventV1Request) => { - return new Promise((resolve) => { - setTimeout(() => resolve({ statusCode: 204 }), 0) - }) - }) - } - const processor = new LogTierV1EventProcessor({ - dispatcher, - flushInterval: 100, - batchSize: 3, - }) - processor.start() - - const impressionEvent1 = createImpressionEvent() - processor.process(impressionEvent1) - processor.stop() - // calling stop should haver flushed the current batch of size 1 - expect(dispatcher.dispatchEvent).toBeCalledTimes(1) - - dispatcher.dispatchEvent.mockClear(); - - // From now on, subsequent events should be ignored. - // Process 3 more, which ordinarily would have triggered - // a flush due to the batch size. - const impressionEvent2 = createImpressionEvent() - processor.process(impressionEvent2) - const impressionEvent3 = createImpressionEvent() - processor.process(impressionEvent3) - const impressionEvent4 = createImpressionEvent() - processor.process(impressionEvent4) - // Since we already stopped the processor, the dispatcher should - // not have been called again. - expect(dispatcher.dispatchEvent).toBeCalledTimes(0) - }) - - it('should resolve the stop promise after all dispatcher requests are done', async () => { - const dispatchPromises: Array> = [] - const dispatcher = { - dispatchEvent: vi.fn((event: EventV1Request) => { - const response = resolvablePromise(); - dispatchPromises.push(response); - return response.promise; - }) - } - - const processor = new LogTierV1EventProcessor({ - dispatcher, - flushInterval: 100, - batchSize: 2, - }) - processor.start() - - for (let i = 0; i < 4; i++) { - processor.process(createImpressionEvent()) - } - expect(dispatchPromises.length).toBe(2) - - let stopPromiseResolved = false - const stopPromise = processor.stop().then(() => { - stopPromiseResolved = true - }) - expect(stopPromiseResolved).toBe(false) - - dispatchPromises[0].resolve({ statusCode: 204 }) - vi.advanceTimersByTime(100) - expect(stopPromiseResolved).toBe(false) - dispatchPromises[1].resolve({ statusCode: 204 }) - await stopPromise - expect(stopPromiseResolved).toBe(true) - }) - - it('should use the provided closingDispatcher to dispatch events on stop', async () => { - const dispatcher = { - dispatchEvent: vi.fn(), - } - - const closingDispatcher = { - dispatchEvent: vi.fn(), - } - - const processor = new LogTierV1EventProcessor({ - dispatcher, - closingDispatcher, - flushInterval: 100000, - batchSize: 20, - }); - - processor.start() - - const events : any = []; - - for (let i = 0; i < 4; i++) { - const event = createImpressionEvent(); - processor.process(event); - events.push(event); - } - - processor.stop(); - vi.runAllTimers(); - - expect(dispatcher.dispatchEvent).not.toHaveBeenCalled(); - expect(closingDispatcher.dispatchEvent).toHaveBeenCalledTimes(1); - - const [data] = closingDispatcher.dispatchEvent.mock.calls[0]; - expect(data.params).toEqual(makeBatchedEventV1(events)); - }) - }) - - describe('when batchSize = 1', () => { - let processor: EventProcessor - beforeEach(() => { - processor = new LogTierV1EventProcessor({ - dispatcher: stubDispatcher, - flushInterval: 100, - batchSize: 1, - }) - processor.start() - }) - - afterEach(() => { - processor.stop() - }) - - it('should immediately flush events as they are processed', () => { - const impressionEvent = createImpressionEvent() - processor.process(impressionEvent) - - expect(dispatchStub).toHaveBeenCalledTimes(1) - expect(dispatchStub).toHaveBeenCalledWith({ - url: 'https://logx.optimizely.com/v1/events', - httpVerb: 'POST', - params: buildImpressionEventV1(impressionEvent), - }) - }) - }) - - describe('when batchSize = 3, flushInterval = 100', () => { - let processor: EventProcessor - beforeEach(() => { - processor = new LogTierV1EventProcessor({ - dispatcher: stubDispatcher, - flushInterval: 100, - batchSize: 3, - }) - processor.start() - }) - - afterEach(() => { - processor.stop() - }) - - it('should wait until 3 events to be in the queue before it flushes', () => { - const impressionEvent1 = createImpressionEvent() - const impressionEvent2 = createImpressionEvent() - const impressionEvent3 = createImpressionEvent() - - processor.process(impressionEvent1) - processor.process(impressionEvent2) - - expect(dispatchStub).toHaveBeenCalledTimes(0) - - processor.process(impressionEvent3) - - expect(dispatchStub).toHaveBeenCalledTimes(1) - expect(dispatchStub).toHaveBeenCalledWith({ - url: 'https://logx.optimizely.com/v1/events', - httpVerb: 'POST', - params: makeBatchedEventV1([ - impressionEvent1, - impressionEvent2, - impressionEvent3, - ]), - }) - }) - - it('should flush the current batch when it receives an event with a different context revision than the current batch', async () => { - const impressionEvent1 = createImpressionEvent() - const conversionEvent = createConversionEvent() - const impressionEvent2 = createImpressionEvent() - - // createImpressionEvent and createConversionEvent create events with revision '1' - // We modify this one's revision to '2' in order to test that the queue is flushed - // when an event with a different revision is processed. - impressionEvent2.context.revision = '2' - - processor.process(impressionEvent1) - processor.process(conversionEvent) - - expect(dispatchStub).toHaveBeenCalledTimes(0) - - processor.process(impressionEvent2) - - expect(dispatchStub).toHaveBeenCalledTimes(1) - expect(dispatchStub).toHaveBeenCalledWith({ - url: 'https://logx.optimizely.com/v1/events', - httpVerb: 'POST', - params: makeBatchedEventV1([impressionEvent1, conversionEvent]), - }) - - await processor.stop() - - expect(dispatchStub).toHaveBeenCalledTimes(2) - - expect(dispatchStub).toHaveBeenCalledWith({ - url: 'https://logx.optimizely.com/v1/events', - httpVerb: 'POST', - params: makeBatchedEventV1([impressionEvent2]), - }) - }) - - it('should flush the current batch when it receives an event with a different context projectId than the current batch', async () => { - const impressionEvent1 = createImpressionEvent() - const conversionEvent = createConversionEvent() - const impressionEvent2 = createImpressionEvent() - - impressionEvent2.context.projectId = 'projectId2' - - processor.process(impressionEvent1) - processor.process(conversionEvent) - - expect(dispatchStub).toHaveBeenCalledTimes(0) - - processor.process(impressionEvent2) - - expect(dispatchStub).toHaveBeenCalledTimes(1) - expect(dispatchStub).toHaveBeenCalledWith({ - url: 'https://logx.optimizely.com/v1/events', - httpVerb: 'POST', - params: makeBatchedEventV1([impressionEvent1, conversionEvent]), - }) - - await processor.stop() - - expect(dispatchStub).toHaveBeenCalledTimes(2) - - expect(dispatchStub).toHaveBeenCalledWith({ - url: 'https://logx.optimizely.com/v1/events', - httpVerb: 'POST', - params: makeBatchedEventV1([impressionEvent2]), - }) - }) - - it('should flush the queue when the flush interval happens', () => { - const impressionEvent1 = createImpressionEvent() - - processor.process(impressionEvent1) - - expect(dispatchStub).toHaveBeenCalledTimes(0) - - vi.advanceTimersByTime(100) - - expect(dispatchStub).toHaveBeenCalledTimes(1) - expect(dispatchStub).toHaveBeenCalledWith({ - url: 'https://logx.optimizely.com/v1/events', - httpVerb: 'POST', - params: makeBatchedEventV1([impressionEvent1]), - }) - - processor.process(createImpressionEvent()) - processor.process(createImpressionEvent()) - // flushing should reset queue, at this point only has two events - expect(dispatchStub).toHaveBeenCalledTimes(1) - }) - - }) - - describe('when a notification center is provided', () => { - it('should trigger a notification when the event dispatcher dispatches an event', async () => { - const dispatcher: EventDispatcher = { - dispatchEvent: vi.fn().mockResolvedValue({ statusCode: 200 }) - } - - const notificationCenter: NotificationSender = { - sendNotifications: vi.fn() - } - - const processor = new LogTierV1EventProcessor({ - dispatcher, - notificationCenter, - batchSize: 1, - }) - await processor.start() - - const impressionEvent1 = createImpressionEvent() - processor.process(impressionEvent1) - - expect(notificationCenter.sendNotifications).toBeCalledTimes(1) - const event = (dispatcher.dispatchEvent as Mock).mock.calls[0][0] - expect(notificationCenter.sendNotifications).toBeCalledWith(NOTIFICATION_TYPES.LOG_EVENT, event) - }) - }) - - describe('invalid flushInterval or batchSize', () => { - it('should ignore a flushInterval of 0 and use the default', () => { - const processor = new LogTierV1EventProcessor({ - dispatcher: stubDispatcher, - flushInterval: 0, - batchSize: 10, - }) - processor.start() - - const impressionEvent1 = createImpressionEvent() - processor.process(impressionEvent1) - expect(dispatchStub).toHaveBeenCalledTimes(0) - vi.advanceTimersByTime(30000) - expect(dispatchStub).toHaveBeenCalledTimes(1) - expect(dispatchStub).toHaveBeenCalledWith({ - url: 'https://logx.optimizely.com/v1/events', - httpVerb: 'POST', - params: makeBatchedEventV1([impressionEvent1]), - }) - }) - - it('should ignore a batchSize of 0 and use the default', () => { - const processor = new LogTierV1EventProcessor({ - dispatcher: stubDispatcher, - flushInterval: 30000, - batchSize: 0, - }) - processor.start() - - const impressionEvent1 = createImpressionEvent() - processor.process(impressionEvent1) - expect(dispatchStub).toHaveBeenCalledTimes(0) - const impressionEvents = [impressionEvent1] - for (let i = 0; i < 9; i++) { - const evt = createImpressionEvent() - processor.process(evt) - impressionEvents.push(evt) - } - expect(dispatchStub).toHaveBeenCalledTimes(1) - expect(dispatchStub).toHaveBeenCalledWith({ - url: 'https://logx.optimizely.com/v1/events', - httpVerb: 'POST', - params: makeBatchedEventV1(impressionEvents), - }) - }) - }) -}) diff --git a/vitest.config.mts b/vitest.config.mts index 86e85d2cb..7aa0cc1a8 100644 --- a/vitest.config.mts +++ b/vitest.config.mts @@ -20,7 +20,7 @@ export default defineConfig({ test: { onConsoleLog: () => true, environment: 'happy-dom', - include: ['**/batch_event_processor.react_native.spec.ts'], + include: ['**/event_processor_factory.spec.ts'], typecheck: { tsconfig: 'tsconfig.spec.json', }, From 6032ba0375aae6889f9e05749b395d1cf4681ba9 Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Tue, 19 Nov 2024 20:05:02 +0600 Subject: [PATCH 33/45] factory test --- .../event_processor_factory.spec.ts | 185 +++++++++++++++++- .../event_processor_factory.ts | 4 +- 2 files changed, 184 insertions(+), 5 deletions(-) diff --git a/lib/event_processor/event_processor_factory.spec.ts b/lib/event_processor/event_processor_factory.spec.ts index a9683e7d6..ec79c6b45 100644 --- a/lib/event_processor/event_processor_factory.spec.ts +++ b/lib/event_processor/event_processor_factory.spec.ts @@ -1,7 +1,9 @@ import { describe, it, expect, beforeEach, vi, MockInstance } from 'vitest'; -import { DEFAULT_MAX_BACKOFF, DEFAULT_MIN_BACKOFF, getBatchEventProcessor } from './event_processor_factory'; -import { BatchEventProcessor, BatchEventProcessorConfig } from './batch_event_processor'; +import { DEFAULT_EVENT_BATCH_SIZE, DEFAULT_EVENT_FLUSH_INTERVAL, DEFAULT_MAX_BACKOFF, DEFAULT_MIN_BACKOFF, getBatchEventProcessor } from './event_processor_factory'; +import { BatchEventProcessor, BatchEventProcessorConfig, EventWithId } from './batch_event_processor'; import { ExponentialBackoff, IntervalRepeater } from '../utils/repeater/repeater'; +import { getMockSyncCache } from '../tests/mock/mock_cache'; +import { LogLevel } from '../modules/logging'; vi.mock('./batch_event_processor'); vi.mock('../utils/repeater/repeater'); @@ -17,10 +19,12 @@ const getMockEventDispatcher = () => { describe('getBatchEventProcessor', () => { const MockBatchEventProcessor = vi.mocked(BatchEventProcessor); const MockExponentialBackoff = vi.mocked(ExponentialBackoff); + const MockIntervalRepeater = vi.mocked(IntervalRepeater); beforeEach(() => { MockBatchEventProcessor.mockReset(); MockExponentialBackoff.mockReset(); + MockIntervalRepeater.mockReset(); }); it('returns an instane of BatchEventProcessor if no subclass constructor is provided', () => { @@ -55,6 +59,7 @@ describe('getBatchEventProcessor', () => { }; const processor = getBatchEventProcessor(options); + expect(Object.is(processor, MockBatchEventProcessor.mock.instances[0])).toBe(true); expect(MockBatchEventProcessor.mock.calls[0][0].retryConfig).toBe(undefined); }); @@ -66,6 +71,7 @@ describe('getBatchEventProcessor', () => { let processor = getBatchEventProcessor(options); + expect(Object.is(processor, MockBatchEventProcessor.mock.instances[0])).toBe(true); const usedRetryConfig = MockBatchEventProcessor.mock.calls[0][0].retryConfig; expect(usedRetryConfig).not.toBe(undefined); expect(usedRetryConfig?.backoffProvider).not.toBe(undefined); @@ -80,6 +86,7 @@ describe('getBatchEventProcessor', () => { }; let processor1 = getBatchEventProcessor(options1); + expect(Object.is(processor1, MockBatchEventProcessor.mock.instances[0])).toBe(true); expect(MockBatchEventProcessor.mock.calls[0][0].retryConfig?.maxRetries).toBe(10); const options2 = { @@ -88,6 +95,7 @@ describe('getBatchEventProcessor', () => { }; let processor2 = getBatchEventProcessor(options2); + expect(Object.is(processor2, MockBatchEventProcessor.mock.instances[1])).toBe(true); expect(MockBatchEventProcessor.mock.calls[0][0].retryConfig).not.toBe(undefined); expect(MockBatchEventProcessor.mock.calls[1][0].retryConfig?.maxRetries).toBe(undefined); }); @@ -99,8 +107,9 @@ describe('getBatchEventProcessor', () => { }; let processor = getBatchEventProcessor(options); - const backoffProvider = MockBatchEventProcessor.mock.calls[0][0].retryConfig?.backoffProvider; + expect(Object.is(processor, MockBatchEventProcessor.mock.instances[0])).toBe(true); + const backoffProvider = MockBatchEventProcessor.mock.calls[0][0].retryConfig?.backoffProvider; expect(backoffProvider).not.toBe(undefined); const backoff = backoffProvider?.(); expect(Object.is(backoff, MockExponentialBackoff.mock.instances[0])).toBe(true); @@ -114,6 +123,7 @@ describe('getBatchEventProcessor', () => { }; let processor = getBatchEventProcessor(options); + expect(Object.is(processor, MockBatchEventProcessor.mock.instances[0])).toBe(true); const backoffProvider = MockBatchEventProcessor.mock.calls[0][0].retryConfig?.backoffProvider; expect(backoffProvider).not.toBe(undefined); @@ -121,4 +131,173 @@ describe('getBatchEventProcessor', () => { expect(Object.is(backoff, MockExponentialBackoff.mock.instances[0])).toBe(true); expect(MockExponentialBackoff).toHaveBeenNthCalledWith(1, 1000, 2000, 500); }); + + it('uses a IntervalRepeater with default flush interval and adds a startup log if flushInterval is not provided', () => { + const options = { + eventDispatcher: getMockEventDispatcher(), + }; + + let processor = getBatchEventProcessor(options); + + expect(Object.is(processor, MockBatchEventProcessor.mock.instances[0])).toBe(true); + const usedRepeater = MockBatchEventProcessor.mock.calls[0][0].dispatchRepeater; + expect(Object.is(usedRepeater, MockIntervalRepeater.mock.instances[0])).toBe(true); + expect(MockIntervalRepeater).toHaveBeenNthCalledWith(1, DEFAULT_EVENT_FLUSH_INTERVAL); + + const startupLogs = MockBatchEventProcessor.mock.calls[0][0].startupLogs; + expect(startupLogs).toEqual(expect.arrayContaining([{ + level: LogLevel.WARNING, + message: 'Invalid flushInterval %s, defaulting to %s', + params: [undefined, DEFAULT_EVENT_FLUSH_INTERVAL], + }])); + }); + + it('uses default flush interval and adds a startup log if flushInterval is less than 1', () => { + const options = { + eventDispatcher: getMockEventDispatcher(), + flushInterval: -1, + }; + + let processor = getBatchEventProcessor(options); + + expect(Object.is(processor, MockBatchEventProcessor.mock.instances[0])).toBe(true); + const usedRepeater = MockBatchEventProcessor.mock.calls[0][0].dispatchRepeater; + expect(Object.is(usedRepeater, MockIntervalRepeater.mock.instances[0])).toBe(true); + expect(MockIntervalRepeater).toHaveBeenNthCalledWith(1, DEFAULT_EVENT_FLUSH_INTERVAL); + + const startupLogs = MockBatchEventProcessor.mock.calls[0][0].startupLogs; + expect(startupLogs).toEqual(expect.arrayContaining([{ + level: LogLevel.WARNING, + message: 'Invalid flushInterval %s, defaulting to %s', + params: [-1, DEFAULT_EVENT_FLUSH_INTERVAL], + }])); + }); + + it('uses a IntervalRepeater with provided flushInterval and adds no startup log if provided flushInterval is valid', () => { + const options = { + eventDispatcher: getMockEventDispatcher(), + flushInterval: 12345, + }; + + let processor = getBatchEventProcessor(options); + + expect(Object.is(processor, MockBatchEventProcessor.mock.instances[0])).toBe(true); + const usedRepeater = MockBatchEventProcessor.mock.calls[0][0].dispatchRepeater; + expect(Object.is(usedRepeater, MockIntervalRepeater.mock.instances[0])).toBe(true); + expect(MockIntervalRepeater).toHaveBeenNthCalledWith(1, 12345); + + const startupLogs = MockBatchEventProcessor.mock.calls[0][0].startupLogs; + expect(startupLogs?.find((log) => log.message === 'Invalid flushInterval %s, defaulting to %s')).toBe(undefined); + }); + + + it('uses a IntervalRepeater with default flush interval and adds a startup log if flushInterval is not provided', () => { + const options = { + eventDispatcher: getMockEventDispatcher(), + }; + + let processor = getBatchEventProcessor(options); + + expect(Object.is(processor, MockBatchEventProcessor.mock.instances[0])).toBe(true); + expect(MockBatchEventProcessor.mock.calls[0][0].batchSize).toBe(DEFAULT_EVENT_BATCH_SIZE); + + const startupLogs = MockBatchEventProcessor.mock.calls[0][0].startupLogs; + expect(startupLogs).toEqual(expect.arrayContaining([{ + level: LogLevel.WARNING, + message: 'Invalid batchSize %s, defaulting to %s', + params: [undefined, DEFAULT_EVENT_BATCH_SIZE], + }])); + }); + + it('uses default size and adds a startup log if provided batchSize is less than 1', () => { + const options = { + eventDispatcher: getMockEventDispatcher(), + batchSize: -1, + }; + + let processor = getBatchEventProcessor(options); + + expect(Object.is(processor, MockBatchEventProcessor.mock.instances[0])).toBe(true); + expect(MockBatchEventProcessor.mock.calls[0][0].batchSize).toBe(DEFAULT_EVENT_BATCH_SIZE); + + const startupLogs = MockBatchEventProcessor.mock.calls[0][0].startupLogs; + expect(startupLogs).toEqual(expect.arrayContaining([{ + level: LogLevel.WARNING, + message: 'Invalid batchSize %s, defaulting to %s', + params: [-1, DEFAULT_EVENT_BATCH_SIZE], + }])); + }); + + it('does not use a failedEventRepeater if failedEventRetryInterval is not provided', () => { + const options = { + eventDispatcher: getMockEventDispatcher(), + }; + + let processor = getBatchEventProcessor(options); + + expect(Object.is(processor, MockBatchEventProcessor.mock.instances[0])).toBe(true); + expect(MockBatchEventProcessor.mock.calls[0][0].failedEventRepeater).toBe(undefined); + }); + + it('uses a IntervalRepeater with provided failedEventRetryInterval as failedEventRepeater', () => { + const options = { + eventDispatcher: getMockEventDispatcher(), + failedEventRetryInterval: 12345, + }; + + let processor = getBatchEventProcessor(options); + + expect(Object.is(processor, MockBatchEventProcessor.mock.instances[0])).toBe(true); + expect(Object.is(MockBatchEventProcessor.mock.calls[0][0].failedEventRepeater, MockIntervalRepeater.mock.instances[1])).toBe(true); + expect(MockIntervalRepeater).toHaveBeenNthCalledWith(2, 12345); + }); + + it('uses the provided eventDispatcher', () => { + const eventDispatcher = getMockEventDispatcher(); + const options = { + eventDispatcher, + }; + + let processor = getBatchEventProcessor(options); + + expect(Object.is(processor, MockBatchEventProcessor.mock.instances[0])).toBe(true); + expect(MockBatchEventProcessor.mock.calls[0][0].eventDispatcher).toBe(eventDispatcher); + }); + + it('does not use any closingEventDispatcher if not provided', () => { + const options = { + eventDispatcher: getMockEventDispatcher(), + }; + + let processor = getBatchEventProcessor(options); + + expect(Object.is(processor, MockBatchEventProcessor.mock.instances[0])).toBe(true); + expect(MockBatchEventProcessor.mock.calls[0][0].closingEventDispatcher).toBe(undefined); + }); + + it('uses the provided closingEventDispatcher', () => { + const closingEventDispatcher = getMockEventDispatcher(); + const options = { + eventDispatcher: getMockEventDispatcher(), + closingEventDispatcher, + }; + + let processor = getBatchEventProcessor(options); + + expect(Object.is(processor, MockBatchEventProcessor.mock.instances[0])).toBe(true); + expect(MockBatchEventProcessor.mock.calls[0][0].closingEventDispatcher).toBe(closingEventDispatcher); + }); + + it('uses the provided eventStore', () => { + const eventStore = getMockSyncCache(); + const options = { + eventDispatcher: getMockEventDispatcher(), + eventStore, + }; + + let processor = getBatchEventProcessor(options); + + expect(Object.is(processor, MockBatchEventProcessor.mock.instances[0])).toBe(true); + expect(MockBatchEventProcessor.mock.calls[0][0].eventStore).toBe(eventStore); + }); }); diff --git a/lib/event_processor/event_processor_factory.ts b/lib/event_processor/event_processor_factory.ts index 5bd3dd0f6..ca097c66d 100644 --- a/lib/event_processor/event_processor_factory.ts +++ b/lib/event_processor/event_processor_factory.ts @@ -51,7 +51,7 @@ export const getBatchEventProcessor = ( if (options.flushInterval === undefined || options.flushInterval <= 0) { startupLogs.push({ level: LogLevel.WARNING, - message: 'Invalid eventFlushInterval %s, defaulting to %s', + message: 'Invalid flushInterval %s, defaulting to %s', params: [options.flushInterval, DEFAULT_EVENT_FLUSH_INTERVAL], }); } else { @@ -62,7 +62,7 @@ export const getBatchEventProcessor = ( if (options.batchSize === undefined || options.batchSize <= 0) { startupLogs.push({ level: LogLevel.WARNING, - message: 'Invalid eventBatchSize %s, defaulting to %s', + message: 'Invalid batchSize %s, defaulting to %s', params: [options.batchSize, DEFAULT_EVENT_BATCH_SIZE], }); } else { From 5b0447679d4e4aefc663e4d8482b473968042a32 Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Tue, 19 Nov 2024 22:53:12 +0600 Subject: [PATCH 34/45] cleanup --- lib/core/event_builder/build_event_v1.ts | 2 +- lib/core/event_builder/index.ts | 2 +- .../batch_event_processor.spec.ts | 8 +- .../default_dispatcher.browser.ts | 2 +- .../default_dispatcher.node.ts | 2 +- lib/event_processor/default_dispatcher.ts | 2 +- lib/event_processor/eventProcessor.ts | 54 --- lib/event_processor/eventQueue.ts | 162 -------- .../event_processor_factory.spec.ts | 34 +- .../forwarding_event_processor.ts | 10 +- lib/event_processor/index.react_native.ts | 23 -- lib/event_processor/index.ts | 20 - lib/event_processor/managed.ts | 4 - lib/event_processor/reactNativeEventsStore.ts | 84 ----- lib/event_processor/synchronizer.ts | 42 --- lib/index.browser.tests.js | 8 - lib/index.browser.ts | 2 - lib/index.node.tests.js | 23 +- lib/index.node.ts | 1 - lib/optimizely/index.tests.js | 1 - lib/optimizely/index.ts | 2 +- lib/optimizely_user_context/index.tests.js | 1 - .../send_beacon_dispatcher.ts | 2 +- lib/plugins/event_processor/index.ts | 25 -- lib/shared_types.ts | 7 +- lib/utils/event_tag_utils/index.ts | 2 +- tests/eventQueue.spec.ts | 290 --------------- tests/reactNativeEventsStore.spec.ts | 351 ------------------ 28 files changed, 46 insertions(+), 1120 deletions(-) delete mode 100644 lib/event_processor/eventQueue.ts delete mode 100644 lib/event_processor/index.react_native.ts delete mode 100644 lib/event_processor/index.ts delete mode 100644 lib/event_processor/managed.ts delete mode 100644 lib/event_processor/reactNativeEventsStore.ts delete mode 100644 lib/event_processor/synchronizer.ts delete mode 100644 lib/plugins/event_processor/index.ts delete mode 100644 tests/eventQueue.spec.ts delete mode 100644 tests/reactNativeEventsStore.spec.ts diff --git a/lib/core/event_builder/build_event_v1.ts b/lib/core/event_builder/build_event_v1.ts index 1ca9c63ea..0479dc79a 100644 --- a/lib/core/event_builder/build_event_v1.ts +++ b/lib/core/event_builder/build_event_v1.ts @@ -17,7 +17,7 @@ import { EventTags, ConversionEvent, ImpressionEvent, -} from '../../event_processor'; +} from '../../event_processor/events'; import { Event } from '../../shared_types'; diff --git a/lib/core/event_builder/index.ts b/lib/core/event_builder/index.ts index 707cb178c..20efd53c7 100644 --- a/lib/core/event_builder/index.ts +++ b/lib/core/event_builder/index.ts @@ -14,7 +14,7 @@ * limitations under the License. */ import { LoggerFacade } from '../../modules/logging'; -import { EventV1 as CommonEventParams } from '../../event_processor'; +import { EventV1 as CommonEventParams } from '../../event_processor/v1/buildEventV1'; import fns from '../../utils/fns'; import { CONTROL_ATTRIBUTES, RESERVED_EVENT_KEYWORDS } from '../../utils/enums'; diff --git a/lib/event_processor/batch_event_processor.spec.ts b/lib/event_processor/batch_event_processor.spec.ts index 68bfb2902..beeeb2f63 100644 --- a/lib/event_processor/batch_event_processor.spec.ts +++ b/lib/event_processor/batch_event_processor.spec.ts @@ -726,7 +726,7 @@ describe('QueueingEventProcessor', async () => { expect(mockDispatch).toHaveBeenCalledTimes(1); expect(mockDispatch.mock.calls[0][0]).toEqual(formatEvents(failedEvents)); - let eventsInStore = [...cache.getAll().values()].sort((a, b) => a.id < b.id ? -1 : 1).map(e => e.event); + const eventsInStore = [...cache.getAll().values()].sort((a, b) => a.id < b.id ? -1 : 1).map(e => e.event); expect(eventsInStore).toEqual(expect.arrayContaining([ expect.objectContaining(eventA), expect.objectContaining(eventB), @@ -782,7 +782,7 @@ describe('QueueingEventProcessor', async () => { mockResult2.resolve({}); await exhaustMicrotasks(); - let eventsInStore = [...cache.getAll().values()].sort((a, b) => a.id < b.id ? -1 : 1).map(e => e.event); + const eventsInStore = [...cache.getAll().values()].sort((a, b) => a.id < b.id ? -1 : 1).map(e => e.event); expect(eventsInStore).toEqual(expect.arrayContaining([ expect.objectContaining(eventA), expect.objectContaining(eventB), @@ -876,7 +876,7 @@ describe('QueueingEventProcessor', async () => { expect(mockDispatch).toHaveBeenCalledTimes(1); expect(mockDispatch.mock.calls[0][0]).toEqual(formatEvents(failedEvents)); - let eventsInStore = [...cache.getAll().values()].sort((a, b) => a.id < b.id ? -1 : 1).map(e => e.event); + const eventsInStore = [...cache.getAll().values()].sort((a, b) => a.id < b.id ? -1 : 1).map(e => e.event); expect(eventsInStore).toEqual(expect.arrayContaining([ expect.objectContaining(eventA), expect.objectContaining(eventB), @@ -934,7 +934,7 @@ describe('QueueingEventProcessor', async () => { mockResult2.resolve({}); await exhaustMicrotasks(); - let eventsInStore = [...cache.getAll().values()].sort((a, b) => a.id < b.id ? -1 : 1).map(e => e.event); + const eventsInStore = [...cache.getAll().values()].sort((a, b) => a.id < b.id ? -1 : 1).map(e => e.event); expect(eventsInStore).toEqual(expect.arrayContaining([ expect.objectContaining(eventA), expect.objectContaining(eventB), diff --git a/lib/event_processor/default_dispatcher.browser.ts b/lib/event_processor/default_dispatcher.browser.ts index 12cdf5a3e..d4601700c 100644 --- a/lib/event_processor/default_dispatcher.browser.ts +++ b/lib/event_processor/default_dispatcher.browser.ts @@ -15,7 +15,7 @@ */ import { BrowserRequestHandler } from "../utils/http_request_handler/browser_request_handler"; -import { EventDispatcher } from '../event_processor'; +import { EventDispatcher } from '../event_processor/eventDispatcher'; import { DefaultEventDispatcher } from './default_dispatcher'; const eventDispatcher: EventDispatcher = new DefaultEventDispatcher(new BrowserRequestHandler()); diff --git a/lib/event_processor/default_dispatcher.node.ts b/lib/event_processor/default_dispatcher.node.ts index 8d2cd852c..75e00aff3 100644 --- a/lib/event_processor/default_dispatcher.node.ts +++ b/lib/event_processor/default_dispatcher.node.ts @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { EventDispatcher } from '../event_processor'; +import { EventDispatcher } from '../event_processor/eventDispatcher'; import { NodeRequestHandler } from '../utils/http_request_handler/node_request_handler'; import { DefaultEventDispatcher } from './default_dispatcher'; diff --git a/lib/event_processor/default_dispatcher.ts b/lib/event_processor/default_dispatcher.ts index 2097cb82c..ce8dd5b59 100644 --- a/lib/event_processor/default_dispatcher.ts +++ b/lib/event_processor/default_dispatcher.ts @@ -14,7 +14,7 @@ * limitations under the License. */ import { RequestHandler } from '../utils/http_request_handler/http'; -import { EventDispatcher, EventDispatcherResponse, EventV1Request } from '../event_processor'; +import { EventDispatcher, EventDispatcherResponse, EventV1Request } from '../event_processor/eventDispatcher'; export class DefaultEventDispatcher implements EventDispatcher { private requestHandler: RequestHandler; diff --git a/lib/event_processor/eventProcessor.ts b/lib/event_processor/eventProcessor.ts index b8e43ce89..a9af38163 100644 --- a/lib/event_processor/eventProcessor.ts +++ b/lib/event_processor/eventProcessor.ts @@ -15,7 +15,6 @@ */ import { ConversionEvent, ImpressionEvent } from './events' import { EventV1Request } from './eventDispatcher' -import { EventQueue, DefaultEventQueue, SingleEventQueue, EventQueueSink } from './eventQueue' import { getLogger } from '../modules/logging' import { NOTIFICATION_TYPES } from '../utils/enums' import { NotificationSender } from '../core/notification_center' @@ -33,56 +32,3 @@ export interface EventProcessor extends Service { process(event: ProcessableEvent): Promise; onDispatch(handler: Consumer): Fn; } - -export function validateAndGetFlushInterval(flushInterval: number): number { - if (flushInterval <= 0) { - logger.warn( - `Invalid flushInterval ${flushInterval}, defaulting to ${DEFAULT_FLUSH_INTERVAL}`, - ) - flushInterval = DEFAULT_FLUSH_INTERVAL - } - return flushInterval -} - -export function validateAndGetBatchSize(batchSize: number): number { - batchSize = Math.floor(batchSize) - if (batchSize < 1) { - logger.warn( - `Invalid batchSize ${batchSize}, defaulting to ${DEFAULT_BATCH_SIZE}`, - ) - batchSize = DEFAULT_BATCH_SIZE - } - batchSize = Math.max(1, batchSize) - return batchSize -} - -export function getQueue( - batchSize: number, - flushInterval: number, - batchComparator: (eventA: ProcessableEvent, eventB: ProcessableEvent) => boolean, - sink: EventQueueSink, - closingSink?: EventQueueSink -): EventQueue { - let queue: EventQueue - if (batchSize > 1) { - queue = new DefaultEventQueue({ - flushInterval, - maxQueueSize: batchSize, - sink, - closingSink, - batchComparator, - }) - } else { - queue = new SingleEventQueue({ sink }) - } - return queue -} - -export function sendEventNotification(notificationSender: NotificationSender | undefined, event: EventV1Request): void { - if (notificationSender) { - notificationSender.sendNotifications( - NOTIFICATION_TYPES.LOG_EVENT, - event, - ) - } -} diff --git a/lib/event_processor/eventQueue.ts b/lib/event_processor/eventQueue.ts deleted file mode 100644 index 3b8a71966..000000000 --- a/lib/event_processor/eventQueue.ts +++ /dev/null @@ -1,162 +0,0 @@ -/** - * Copyright 2022-2024, Optimizely - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { getLogger } from '../modules/logging'; -// TODO change this to use Managed from js-sdk-models when available -import { Managed } from './managed'; - -const logger = getLogger('EventProcessor'); - -export type EventQueueSink = (buffer: K[]) => Promise; - -export interface EventQueue extends Managed { - enqueue(event: K): void; -} - -export interface EventQueueFactory { - createEventQueue(config: { sink: EventQueueSink, flushInterval: number, maxQueueSize: number }): EventQueue; -} - -class Timer { - private timeout: number; - private callback: () => void; - private timeoutId?: number; - - constructor({ timeout, callback }: { timeout: number; callback: () => void }) { - this.timeout = Math.max(timeout, 0); - this.callback = callback; - } - - start(): void { - this.timeoutId = setTimeout(this.callback, this.timeout) as any; - } - - refresh(): void { - this.stop(); - this.start(); - } - - stop(): void { - if (this.timeoutId) { - clearTimeout(this.timeoutId as any); - } - } -} - -export class SingleEventQueue implements EventQueue { - private sink: EventQueueSink; - - constructor({ sink }: { sink: EventQueueSink }) { - this.sink = sink; - } - - start(): Promise { - // no-op - return Promise.resolve(); - } - - stop(): Promise { - // no-op - return Promise.resolve(); - } - - enqueue(event: K): void { - this.sink([event]); - } -} - -export class DefaultEventQueue implements EventQueue { - // expose for testing - public timer: Timer; - private buffer: K[]; - private maxQueueSize: number; - private sink: EventQueueSink; - private closingSink?: EventQueueSink; - // batchComparator is called to determine whether two events can be included - // together in the same batch - private batchComparator: (eventA: K, eventB: K) => boolean; - private started: boolean; - - constructor({ - flushInterval, - maxQueueSize, - sink, - closingSink, - batchComparator, - }: { - flushInterval: number; - maxQueueSize: number; - sink: EventQueueSink; - closingSink?: EventQueueSink; - batchComparator: (eventA: K, eventB: K) => boolean; - }) { - this.buffer = []; - this.maxQueueSize = Math.max(maxQueueSize, 1); - this.sink = sink; - this.closingSink = closingSink; - this.batchComparator = batchComparator; - this.timer = new Timer({ - callback: this.flush.bind(this), - timeout: flushInterval, - }); - this.started = false; - } - - start(): Promise { - this.started = true; - // dont start the timer until the first event is enqueued - - return Promise.resolve(); - } - - stop(): Promise { - this.started = false; - const result = this.closingSink ? this.closingSink(this.buffer) : this.sink(this.buffer); - this.buffer = []; - this.timer.stop(); - return result; - } - - enqueue(event: K): void { - if (!this.started) { - logger.warn('Queue is stopped, not accepting event'); - return; - } - - // If new event cannot be included into the current batch, flush so it can - // be in its own new batch. - const bufferedEvent: K | undefined = this.buffer[0]; - if (bufferedEvent && !this.batchComparator(bufferedEvent, event)) { - this.flush(); - } - - // start the timer when the first event is put in - if (this.buffer.length === 0) { - this.timer.refresh(); - } - this.buffer.push(event); - - if (this.buffer.length >= this.maxQueueSize) { - this.flush(); - } - } - - flush(): void { - this.sink(this.buffer); - this.buffer = []; - this.timer.stop(); - } -} diff --git a/lib/event_processor/event_processor_factory.spec.ts b/lib/event_processor/event_processor_factory.spec.ts index ec79c6b45..618c04d79 100644 --- a/lib/event_processor/event_processor_factory.spec.ts +++ b/lib/event_processor/event_processor_factory.spec.ts @@ -8,8 +8,6 @@ import { LogLevel } from '../modules/logging'; vi.mock('./batch_event_processor'); vi.mock('../utils/repeater/repeater'); -type BatchEventProcessorConstructor = typeof BatchEventProcessor; - const getMockEventDispatcher = () => { return { dispatchEvent: vi.fn(), @@ -69,7 +67,7 @@ describe('getBatchEventProcessor', () => { retryOptions: {}, }; - let processor = getBatchEventProcessor(options); + const processor = getBatchEventProcessor(options); expect(Object.is(processor, MockBatchEventProcessor.mock.instances[0])).toBe(true); const usedRetryConfig = MockBatchEventProcessor.mock.calls[0][0].retryConfig; @@ -85,7 +83,7 @@ describe('getBatchEventProcessor', () => { }, }; - let processor1 = getBatchEventProcessor(options1); + const processor1 = getBatchEventProcessor(options1); expect(Object.is(processor1, MockBatchEventProcessor.mock.instances[0])).toBe(true); expect(MockBatchEventProcessor.mock.calls[0][0].retryConfig?.maxRetries).toBe(10); @@ -94,7 +92,7 @@ describe('getBatchEventProcessor', () => { retryOptions: {}, }; - let processor2 = getBatchEventProcessor(options2); + const processor2 = getBatchEventProcessor(options2); expect(Object.is(processor2, MockBatchEventProcessor.mock.instances[1])).toBe(true); expect(MockBatchEventProcessor.mock.calls[0][0].retryConfig).not.toBe(undefined); expect(MockBatchEventProcessor.mock.calls[1][0].retryConfig?.maxRetries).toBe(undefined); @@ -106,7 +104,7 @@ describe('getBatchEventProcessor', () => { retryOptions: {}, }; - let processor = getBatchEventProcessor(options); + const processor = getBatchEventProcessor(options); expect(Object.is(processor, MockBatchEventProcessor.mock.instances[0])).toBe(true); const backoffProvider = MockBatchEventProcessor.mock.calls[0][0].retryConfig?.backoffProvider; @@ -122,7 +120,7 @@ describe('getBatchEventProcessor', () => { retryOptions: { minBackoff: 1000, maxBackoff: 2000 }, }; - let processor = getBatchEventProcessor(options); + const processor = getBatchEventProcessor(options); expect(Object.is(processor, MockBatchEventProcessor.mock.instances[0])).toBe(true); const backoffProvider = MockBatchEventProcessor.mock.calls[0][0].retryConfig?.backoffProvider; @@ -137,7 +135,7 @@ describe('getBatchEventProcessor', () => { eventDispatcher: getMockEventDispatcher(), }; - let processor = getBatchEventProcessor(options); + const processor = getBatchEventProcessor(options); expect(Object.is(processor, MockBatchEventProcessor.mock.instances[0])).toBe(true); const usedRepeater = MockBatchEventProcessor.mock.calls[0][0].dispatchRepeater; @@ -158,7 +156,7 @@ describe('getBatchEventProcessor', () => { flushInterval: -1, }; - let processor = getBatchEventProcessor(options); + const processor = getBatchEventProcessor(options); expect(Object.is(processor, MockBatchEventProcessor.mock.instances[0])).toBe(true); const usedRepeater = MockBatchEventProcessor.mock.calls[0][0].dispatchRepeater; @@ -179,7 +177,7 @@ describe('getBatchEventProcessor', () => { flushInterval: 12345, }; - let processor = getBatchEventProcessor(options); + const processor = getBatchEventProcessor(options); expect(Object.is(processor, MockBatchEventProcessor.mock.instances[0])).toBe(true); const usedRepeater = MockBatchEventProcessor.mock.calls[0][0].dispatchRepeater; @@ -196,7 +194,7 @@ describe('getBatchEventProcessor', () => { eventDispatcher: getMockEventDispatcher(), }; - let processor = getBatchEventProcessor(options); + const processor = getBatchEventProcessor(options); expect(Object.is(processor, MockBatchEventProcessor.mock.instances[0])).toBe(true); expect(MockBatchEventProcessor.mock.calls[0][0].batchSize).toBe(DEFAULT_EVENT_BATCH_SIZE); @@ -215,7 +213,7 @@ describe('getBatchEventProcessor', () => { batchSize: -1, }; - let processor = getBatchEventProcessor(options); + const processor = getBatchEventProcessor(options); expect(Object.is(processor, MockBatchEventProcessor.mock.instances[0])).toBe(true); expect(MockBatchEventProcessor.mock.calls[0][0].batchSize).toBe(DEFAULT_EVENT_BATCH_SIZE); @@ -233,7 +231,7 @@ describe('getBatchEventProcessor', () => { eventDispatcher: getMockEventDispatcher(), }; - let processor = getBatchEventProcessor(options); + const processor = getBatchEventProcessor(options); expect(Object.is(processor, MockBatchEventProcessor.mock.instances[0])).toBe(true); expect(MockBatchEventProcessor.mock.calls[0][0].failedEventRepeater).toBe(undefined); @@ -245,7 +243,7 @@ describe('getBatchEventProcessor', () => { failedEventRetryInterval: 12345, }; - let processor = getBatchEventProcessor(options); + const processor = getBatchEventProcessor(options); expect(Object.is(processor, MockBatchEventProcessor.mock.instances[0])).toBe(true); expect(Object.is(MockBatchEventProcessor.mock.calls[0][0].failedEventRepeater, MockIntervalRepeater.mock.instances[1])).toBe(true); @@ -258,7 +256,7 @@ describe('getBatchEventProcessor', () => { eventDispatcher, }; - let processor = getBatchEventProcessor(options); + const processor = getBatchEventProcessor(options); expect(Object.is(processor, MockBatchEventProcessor.mock.instances[0])).toBe(true); expect(MockBatchEventProcessor.mock.calls[0][0].eventDispatcher).toBe(eventDispatcher); @@ -269,7 +267,7 @@ describe('getBatchEventProcessor', () => { eventDispatcher: getMockEventDispatcher(), }; - let processor = getBatchEventProcessor(options); + const processor = getBatchEventProcessor(options); expect(Object.is(processor, MockBatchEventProcessor.mock.instances[0])).toBe(true); expect(MockBatchEventProcessor.mock.calls[0][0].closingEventDispatcher).toBe(undefined); @@ -282,7 +280,7 @@ describe('getBatchEventProcessor', () => { closingEventDispatcher, }; - let processor = getBatchEventProcessor(options); + const processor = getBatchEventProcessor(options); expect(Object.is(processor, MockBatchEventProcessor.mock.instances[0])).toBe(true); expect(MockBatchEventProcessor.mock.calls[0][0].closingEventDispatcher).toBe(closingEventDispatcher); @@ -295,7 +293,7 @@ describe('getBatchEventProcessor', () => { eventStore, }; - let processor = getBatchEventProcessor(options); + const processor = getBatchEventProcessor(options); expect(Object.is(processor, MockBatchEventProcessor.mock.instances[0])).toBe(true); expect(MockBatchEventProcessor.mock.calls[0][0].eventStore).toBe(eventStore); diff --git a/lib/event_processor/forwarding_event_processor.ts b/lib/event_processor/forwarding_event_processor.ts index 83acb2b33..1fc06ebc9 100644 --- a/lib/event_processor/forwarding_event_processor.ts +++ b/lib/event_processor/forwarding_event_processor.ts @@ -14,15 +14,11 @@ * limitations under the License. */ -import { - EventProcessor, - EventV1Request, - ProcessableEvent, -} from '.'; -import { NotificationSender } from '../core/notification_center'; + +import { EventV1Request } from './eventDispatcher'; +import { EventProcessor, ProcessableEvent } from './eventProcessor'; import { EventDispatcher } from '../shared_types'; -import { NOTIFICATION_TYPES } from '../utils/enums'; import { formatEvents } from '../core/event_builder/build_event_v1'; import { BaseService, ServiceState } from '../service'; import { EventEmitter } from '../utils/event_emitter/event_emitter'; diff --git a/lib/event_processor/index.react_native.ts b/lib/event_processor/index.react_native.ts deleted file mode 100644 index 27a6f3a3a..000000000 --- a/lib/event_processor/index.react_native.ts +++ /dev/null @@ -1,23 +0,0 @@ -/** - * Copyright 2022, 2024, Optimizely - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -export * from './events' -export * from './eventProcessor' -export * from './eventDispatcher' -export * from './managed' -export * from './pendingEventsDispatcher' -export * from './v1/buildEventV1' -export * from './v1/v1EventProcessor.react_native' diff --git a/lib/event_processor/index.ts b/lib/event_processor/index.ts deleted file mode 100644 index 8a97a8b15..000000000 --- a/lib/event_processor/index.ts +++ /dev/null @@ -1,20 +0,0 @@ -/** - * Copyright 2022, 2024, Optimizely - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -export * from './events' -export * from './eventProcessor' -export * from './eventDispatcher' -export * from './v1/buildEventV1' diff --git a/lib/event_processor/managed.ts b/lib/event_processor/managed.ts deleted file mode 100644 index 03f30d179..000000000 --- a/lib/event_processor/managed.ts +++ /dev/null @@ -1,4 +0,0 @@ -export interface Managed { - start(): Promise; - stop(): Promise; -} diff --git a/lib/event_processor/reactNativeEventsStore.ts b/lib/event_processor/reactNativeEventsStore.ts deleted file mode 100644 index cf7dce9c8..000000000 --- a/lib/event_processor/reactNativeEventsStore.ts +++ /dev/null @@ -1,84 +0,0 @@ - -/** - * Copyright 2022, 2024, Optimizely - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { getLogger } from '../modules/logging' -import { objectValues } from '../utils/fns' - -import { Synchronizer } from './synchronizer' -import ReactNativeAsyncStorageCache from '../plugins/key_value_cache/reactNativeAsyncStorageCache'; -import PersistentKeyValueCache from '../plugins/key_value_cache/persistentKeyValueCache'; - -const logger = getLogger('ReactNativeEventsStore') - -/** - * A key value store which stores objects of type T with string keys - */ -export class ReactNativeEventsStore { - private maxSize: number - private storeKey: string - private synchronizer: Synchronizer = new Synchronizer() - private cache: PersistentKeyValueCache; - - constructor(maxSize: number, storeKey: string, cache?: PersistentKeyValueCache) { - this.maxSize = maxSize - this.storeKey = storeKey - this.cache = cache || new ReactNativeAsyncStorageCache() - } - - public async set(key: string, event: T): Promise { - await this.synchronizer.getLock() - const eventsMap: {[key: string]: T} = await this.getEventsMap(); - if (Object.keys(eventsMap).length < this.maxSize) { - eventsMap[key] = event - await this.cache.set(this.storeKey, JSON.stringify(eventsMap)) - } else { - logger.warn('React native events store is full. Store key: %s', this.storeKey) - } - this.synchronizer.releaseLock() - return key - } - - public async get(key: string): Promise { - await this.synchronizer.getLock() - const eventsMap: {[key: string]: T} = await this.getEventsMap() - this.synchronizer.releaseLock() - return eventsMap[key] - } - - public async getEventsMap(): Promise<{[key: string]: T}> { - const cachedValue = await this.cache.get(this.storeKey) || '{}'; - return JSON.parse(cachedValue) - } - - public async getEventsList(): Promise { - await this.synchronizer.getLock() - const eventsMap: {[key: string]: T} = await this.getEventsMap() - this.synchronizer.releaseLock() - return objectValues(eventsMap) - } - - public async remove(key: string): Promise { - await this.synchronizer.getLock() - const eventsMap: {[key: string]: T} = await this.getEventsMap() - eventsMap[key] && delete eventsMap[key] - await this.cache.set(this.storeKey, JSON.stringify(eventsMap)) - this.synchronizer.releaseLock() - } - - public async clear(): Promise { - await this.cache.remove(this.storeKey) - } -} diff --git a/lib/event_processor/synchronizer.ts b/lib/event_processor/synchronizer.ts deleted file mode 100644 index f0659d7af..000000000 --- a/lib/event_processor/synchronizer.ts +++ /dev/null @@ -1,42 +0,0 @@ -/** - * Copyright 2022, 2024, Optimizely - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** - * This synchronizer makes sure the operations are atomic using promises. - */ -export class Synchronizer { - private lockPromises: Promise[] = [] - private resolvers: any[] = [] - - // Adds a promise to the existing list and returns the promise so that the code block can wait for its turn - public async getLock(): Promise { - this.lockPromises.push(new Promise(resolve => this.resolvers.push(resolve))) - if (this.lockPromises.length === 1) { - return - } - await this.lockPromises[this.lockPromises.length - 2] - } - - // Resolves first promise in the array so that the code block waiting on the first promise can continue execution - public releaseLock(): void { - if (this.lockPromises.length > 0) { - this.lockPromises.shift() - const resolver = this.resolvers.shift() - resolver() - return - } - } -} diff --git a/lib/index.browser.tests.js b/lib/index.browser.tests.js index 3d3952189..3d38655ed 100644 --- a/lib/index.browser.tests.js +++ b/lib/index.browser.tests.js @@ -18,13 +18,11 @@ import logging, { getLogger } from './modules/logging/logger'; import { assert } from 'chai'; import sinon from 'sinon'; -import { default as eventProcessor } from './plugins/event_processor'; import Optimizely from './optimizely'; import testData from './tests/test_data'; import packageJSON from '../package.json'; import optimizelyFactory from './index.browser'; import configValidator from './utils/config_validator'; -import eventProcessorConfigValidator from './utils/event_processor_config_validator'; import OptimizelyUserContext from './optimizely_user_context'; import { LOG_MESSAGES, ODP_EVENT_ACTION } from './utils/enums'; @@ -36,7 +34,6 @@ import { OdpEvent } from './core/odp/odp_event'; import { getMockProjectConfigManager } from './tests/mock/mock_project_config_manager'; import { createProjectConfig } from './project_config/project_config'; -var LocalStoragePendingEventsDispatcher = eventProcessor.LocalStoragePendingEventsDispatcher; class MockLocalStorage { store = {}; @@ -110,12 +107,9 @@ describe('javascript-sdk (Browser)', function() { sinon.stub(configValidator, 'validate'); global.XMLHttpRequest = sinon.useFakeXMLHttpRequest(); - - sinon.stub(LocalStoragePendingEventsDispatcher.prototype, 'sendPendingEvents'); }); afterEach(function() { - LocalStoragePendingEventsDispatcher.prototype.sendPendingEvents.restore(); optimizelyFactory.__internalResetRetryState(); console.error.restore(); configValidator.validate.restore(); @@ -143,8 +137,6 @@ describe('javascript-sdk (Browser)', function() { eventDispatcher: fakeEventDispatcher, logger: silentLogger, }); - - sinon.assert.notCalled(LocalStoragePendingEventsDispatcher.prototype.sendPendingEvents); }); }); diff --git a/lib/index.browser.ts b/lib/index.browser.ts index 68c276e3b..48fbd9ef7 100644 --- a/lib/index.browser.ts +++ b/lib/index.browser.ts @@ -16,7 +16,6 @@ import logHelper from './modules/logging/logger'; import { getLogger, setErrorHandler, getErrorHandler, LogLevel } from './modules/logging'; -import { LocalStoragePendingEventsDispatcher } from './event_processor'; import configValidator from './utils/config_validator'; import defaultErrorHandler from './plugins/error_handler'; import defaultEventDispatcher from './event_processor/default_dispatcher.browser'; @@ -24,7 +23,6 @@ import sendBeaconEventDispatcher from './plugins/event_dispatcher/send_beacon_di import * as enums from './utils/enums'; import * as loggerPlugin from './plugins/logger'; import { createNotificationCenter } from './core/notification_center'; -import { default as eventProcessor } from './plugins/event_processor'; import { OptimizelyDecideOption, Client, Config, OptimizelyOptions } from './shared_types'; import { BrowserOdpManager } from './plugins/odp_manager/index.browser'; import Optimizely from './optimizely'; diff --git a/lib/index.node.tests.js b/lib/index.node.tests.js index 8ff0edeff..aa0f8743e 100644 --- a/lib/index.node.tests.js +++ b/lib/index.node.tests.js @@ -15,7 +15,6 @@ */ import { assert } from 'chai'; import sinon from 'sinon'; -import * as eventProcessor from './plugins/event_processor'; import * as enums from './utils/enums'; import Optimizely from './optimizely'; @@ -54,17 +53,17 @@ describe('optimizelyFactory', function() { console.error.restore(); }); - it('should not throw if the provided config is not valid and log an error if logger is passed in', function() { - configValidator.validate.throws(new Error('Invalid config or something')); - var localLogger = loggerPlugin.createLogger({ logLevel: enums.LOG_LEVEL.INFO }); - assert.doesNotThrow(function() { - var optlyInstance = optimizelyFactory.createInstance({ - projectConfigManager: getMockProjectConfigManager(), - logger: localLogger, - }); - }); - sinon.assert.calledWith(localLogger.log, enums.LOG_LEVEL.ERROR); - }); + // it('should not throw if the provided config is not valid and log an error if logger is passed in', function() { + // configValidator.validate.throws(new Error('Invalid config or something')); + // var localLogger = loggerPlugin.createLogger({ logLevel: enums.LOG_LEVEL.INFO }); + // assert.doesNotThrow(function() { + // var optlyInstance = optimizelyFactory.createInstance({ + // projectConfigManager: getMockProjectConfigManager(), + // logger: localLogger, + // }); + // }); + // sinon.assert.calledWith(localLogger.log, enums.LOG_LEVEL.ERROR); + // }); it('should not throw if the provided config is not valid and log an error if no logger is provided', function() { configValidator.validate.throws(new Error('Invalid config or something')); diff --git a/lib/index.node.ts b/lib/index.node.ts index 554f8b9c1..bc68461cc 100644 --- a/lib/index.node.ts +++ b/lib/index.node.ts @@ -22,7 +22,6 @@ import configValidator from './utils/config_validator'; import defaultErrorHandler from './plugins/error_handler'; import defaultEventDispatcher from './event_processor/default_dispatcher.node'; import { createNotificationCenter } from './core/notification_center'; -import { createEventProcessor } from './plugins/event_processor'; import { OptimizelyDecideOption, Client, Config } from './shared_types'; import { NodeOdpManager } from './plugins/odp_manager/index.node'; import * as commonExports from './common_exports'; diff --git a/lib/optimizely/index.tests.js b/lib/optimizely/index.tests.js index 7c233c74a..f0dd8e00e 100644 --- a/lib/optimizely/index.tests.js +++ b/lib/optimizely/index.tests.js @@ -34,7 +34,6 @@ import * as jsonSchemaValidator from '../utils/json_schema_validator'; import * as projectConfig from '../project_config/project_config'; import testData from '../tests/test_data'; import { getForwardingEventProcessor } from '../event_processor/forwarding_event_processor'; -import { createEventProcessor } from '../plugins/event_processor'; import { createNotificationCenter } from '../core/notification_center'; import { createProjectConfig } from '../project_config/project_config'; import { getMockProjectConfigManager } from '../tests/mock/mock_project_config_manager'; diff --git a/lib/optimizely/index.ts b/lib/optimizely/index.ts index 023c68d40..c59dfb72e 100644 --- a/lib/optimizely/index.ts +++ b/lib/optimizely/index.ts @@ -17,7 +17,7 @@ import { LoggerFacade, ErrorHandler } from '../modules/logging'; import { sprintf, objectValues } from '../utils/fns'; import { NotificationCenter } from '../core/notification_center'; -import { EventProcessor } from '../event_processor'; +import { EventProcessor } from '../event_processor/eventProcessor'; import { IOdpManager } from '../core/odp/odp_manager'; import { OdpConfig } from '../core/odp/odp_config'; diff --git a/lib/optimizely_user_context/index.tests.js b/lib/optimizely_user_context/index.tests.js index 7e1d4ed1d..0d7a66f2a 100644 --- a/lib/optimizely_user_context/index.tests.js +++ b/lib/optimizely_user_context/index.tests.js @@ -23,7 +23,6 @@ import { NOTIFICATION_TYPES } from '../utils/enums'; import OptimizelyUserContext from './'; import { createLogger } from '../plugins/logger'; -import { createEventProcessor } from '../plugins/event_processor'; import { createNotificationCenter } from '../core/notification_center'; import Optimizely from '../optimizely'; import errorHandler from '../plugins/error_handler'; diff --git a/lib/plugins/event_dispatcher/send_beacon_dispatcher.ts b/lib/plugins/event_dispatcher/send_beacon_dispatcher.ts index 3dabf0401..1e8c04577 100644 --- a/lib/plugins/event_dispatcher/send_beacon_dispatcher.ts +++ b/lib/plugins/event_dispatcher/send_beacon_dispatcher.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { EventDispatcher, EventDispatcherResponse } from '../../event_processor'; +import { EventDispatcher, EventDispatcherResponse } from '../../event_processor/eventDispatcher'; export type Event = { url: string; diff --git a/lib/plugins/event_processor/index.ts b/lib/plugins/event_processor/index.ts deleted file mode 100644 index 3fc0c3cad..000000000 --- a/lib/plugins/event_processor/index.ts +++ /dev/null @@ -1,25 +0,0 @@ -/** - * Copyright 2020, 2022-2023, Optimizely - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { LogTierV1EventProcessor, LocalStoragePendingEventsDispatcher } from '../../event_processor'; - -export function createEventProcessor( - ...args: ConstructorParameters -): LogTierV1EventProcessor { - return new LogTierV1EventProcessor(...args); -} - -export default { createEventProcessor, LocalStoragePendingEventsDispatcher }; diff --git a/lib/shared_types.ts b/lib/shared_types.ts index 8902820eb..f27657378 100644 --- a/lib/shared_types.ts +++ b/lib/shared_types.ts @@ -20,7 +20,6 @@ */ import { ErrorHandler, LogHandler, LogLevel, LoggerFacade } from './modules/logging'; -import { EventProcessor, EventDispatcher } from './event_processor'; import { NotificationCenter as NotificationCenterImpl } from './core/notification_center'; import { NOTIFICATION_TYPES } from './utils/enums'; @@ -39,9 +38,11 @@ import { IUserAgentParser } from './core/odp/user_agent_parser'; import PersistentCache from './plugins/key_value_cache/persistentKeyValueCache'; import { ProjectConfig } from './project_config/project_config'; import { ProjectConfigManager } from './project_config/project_config_manager'; +import { EventDispatcher } from './event_processor/eventDispatcher'; +import { EventProcessor } from './event_processor/eventProcessor'; -export { EventDispatcher, EventProcessor } from './event_processor'; - +export { EventDispatcher } from './event_processor/eventDispatcher'; +export { EventProcessor } from './event_processor/eventProcessor'; export interface BucketerParams { experimentId: string; experimentKey: string; diff --git a/lib/utils/event_tag_utils/index.ts b/lib/utils/event_tag_utils/index.ts index aa256ef1b..1be540540 100644 --- a/lib/utils/event_tag_utils/index.ts +++ b/lib/utils/event_tag_utils/index.ts @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { EventTags } from '../../event_processor'; +import { EventTags } from '../../event_processor/events'; import { LoggerFacade } from '../../modules/logging'; import { diff --git a/tests/eventQueue.spec.ts b/tests/eventQueue.spec.ts deleted file mode 100644 index f794248dd..000000000 --- a/tests/eventQueue.spec.ts +++ /dev/null @@ -1,290 +0,0 @@ -/** - * Copyright 2022, 2024, Optimizely - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { describe, beforeEach, afterEach, it, expect, vi } from 'vitest'; - -import { DefaultEventQueue, SingleEventQueue } from '../lib/event_processor/eventQueue' - -describe('eventQueue', () => { - beforeEach(() => { - vi.useFakeTimers() - }) - - afterEach(() => { - vi.useRealTimers() - vi.resetAllMocks() - }) - - describe('SingleEventQueue', () => { - it('should immediately invoke the sink function when items are enqueued', () => { - const sinkFn = vi.fn() - const queue = new SingleEventQueue({ - sink: sinkFn, - }) - - queue.start() - - queue.enqueue(1) - - expect(sinkFn).toBeCalledTimes(1) - expect(sinkFn).toHaveBeenLastCalledWith([1]) - - queue.enqueue(2) - expect(sinkFn).toBeCalledTimes(2) - expect(sinkFn).toHaveBeenLastCalledWith([2]) - - queue.stop() - }) - }) - - describe('DefaultEventQueue', () => { - it('should treat maxQueueSize = -1 as 1', () => { - const sinkFn = vi.fn() - const queue = new DefaultEventQueue({ - flushInterval: 100, - maxQueueSize: -1, - sink: sinkFn, - batchComparator: () => true - }) - - queue.start() - - queue.enqueue(1) - expect(sinkFn).toHaveBeenCalledTimes(1) - expect(sinkFn).toHaveBeenCalledWith([1]) - queue.enqueue(2) - expect(sinkFn).toHaveBeenCalledTimes(2) - expect(sinkFn).toHaveBeenCalledWith([2]) - - queue.stop() - }) - - it('should treat maxQueueSize = 0 as 1', () => { - const sinkFn = vi.fn() - const queue = new DefaultEventQueue({ - flushInterval: 100, - maxQueueSize: 0, - sink: sinkFn, - batchComparator: () => true - }) - - queue.start() - - queue.enqueue(1) - expect(sinkFn).toHaveBeenCalledTimes(1) - expect(sinkFn).toHaveBeenCalledWith([1]) - queue.enqueue(2) - expect(sinkFn).toHaveBeenCalledTimes(2) - expect(sinkFn).toHaveBeenCalledWith([2]) - - queue.stop() - }) - - it('should invoke the sink function when maxQueueSize is reached', () => { - const sinkFn = vi.fn() - const queue = new DefaultEventQueue({ - flushInterval: 100, - maxQueueSize: 3, - sink: sinkFn, - batchComparator: () => true - }) - - queue.start() - - queue.enqueue(1) - queue.enqueue(2) - expect(sinkFn).not.toHaveBeenCalled() - - queue.enqueue(3) - expect(sinkFn).toHaveBeenCalledTimes(1) - expect(sinkFn).toHaveBeenCalledWith([1, 2, 3]) - - queue.enqueue(4) - queue.enqueue(5) - queue.enqueue(6) - expect(sinkFn).toHaveBeenCalledTimes(2) - expect(sinkFn).toHaveBeenCalledWith([4, 5, 6]) - - queue.stop() - }) - - it('should invoke the sink function when the interval has expired', () => { - const sinkFn = vi.fn() - const queue = new DefaultEventQueue({ - flushInterval: 100, - maxQueueSize: 100, - sink: sinkFn, - batchComparator: () => true - }) - - queue.start() - - queue.enqueue(1) - queue.enqueue(2) - expect(sinkFn).not.toHaveBeenCalled() - - vi.advanceTimersByTime(100) - - expect(sinkFn).toHaveBeenCalledTimes(1) - expect(sinkFn).toHaveBeenCalledWith([1, 2]) - - queue.enqueue(3) - vi.advanceTimersByTime(100) - - expect(sinkFn).toHaveBeenCalledTimes(2) - expect(sinkFn).toHaveBeenCalledWith([3]) - - queue.stop() - }) - - it('should invoke the sink function when an item incompatable with the current batch (according to batchComparator) is received', () => { - const sinkFn = vi.fn() - const queue = new DefaultEventQueue({ - flushInterval: 100, - maxQueueSize: 100, - sink: sinkFn, - // This batchComparator returns true when the argument strings start with the same letter - batchComparator: (s1, s2) => s1[0] === s2[0] - }) - - queue.start() - - queue.enqueue('a1') - queue.enqueue('a2') - // After enqueuing these strings, both starting with 'a', the sinkFn should not yet be called. Thus far all the items enqueued are - // compatible according to the batchComparator. - expect(sinkFn).not.toHaveBeenCalled() - - // Enqueuing a string starting with 'b' should cause the sinkFn to be called - queue.enqueue('b1') - expect(sinkFn).toHaveBeenCalledTimes(1) - expect(sinkFn).toHaveBeenCalledWith(['a1', 'a2']) - }) - - it('stop() should flush the existing queue and call timer.stop()', () => { - const sinkFn = vi.fn() - const queue = new DefaultEventQueue({ - flushInterval: 100, - maxQueueSize: 100, - sink: sinkFn, - batchComparator: () => true - }) - - vi.spyOn(queue.timer, 'stop') - - queue.start() - queue.enqueue(1) - - // stop + start is called when the first item is enqueued - expect(queue.timer.stop).toHaveBeenCalledTimes(1) - - queue.stop() - - expect(sinkFn).toHaveBeenCalledTimes(1) - expect(sinkFn).toHaveBeenCalledWith([1]) - expect(queue.timer.stop).toHaveBeenCalledTimes(2) - }) - - it('flush() should clear the current batch', () => { - const sinkFn = vi.fn() - const queue = new DefaultEventQueue({ - flushInterval: 100, - maxQueueSize: 100, - sink: sinkFn, - batchComparator: () => true - }) - - vi.spyOn(queue.timer, 'refresh') - - queue.start() - queue.enqueue(1) - queue.flush() - - expect(sinkFn).toHaveBeenCalledTimes(1) - expect(sinkFn).toHaveBeenCalledWith([1]) - expect(queue.timer.refresh).toBeCalledTimes(1) - - queue.stop() - }) - - it('stop() should return a promise', () => { - const promise = Promise.resolve() - const sinkFn = vi.fn().mockReturnValue(promise) - const queue = new DefaultEventQueue({ - flushInterval: 100, - maxQueueSize: 100, - sink: sinkFn, - batchComparator: () => true - }) - - expect(queue.stop()).toBe(promise) - }) - - it('should start the timer when the first event is put into the queue', () => { - const sinkFn = vi.fn() - const queue = new DefaultEventQueue({ - flushInterval: 100, - maxQueueSize: 100, - sink: sinkFn, - batchComparator: () => true - }) - - queue.start() - vi.advanceTimersByTime(99) - queue.enqueue(1) - - vi.advanceTimersByTime(2) - expect(sinkFn).toHaveBeenCalledTimes(0) - vi.advanceTimersByTime(98) - - expect(sinkFn).toHaveBeenCalledTimes(1) - expect(sinkFn).toHaveBeenCalledWith([1]) - - vi.advanceTimersByTime(500) - // ensure sink function wasnt called again since no events have - // been added - expect(sinkFn).toHaveBeenCalledTimes(1) - - queue.enqueue(2) - - vi.advanceTimersByTime(100) - expect(sinkFn).toHaveBeenCalledTimes(2) - expect(sinkFn).toHaveBeenLastCalledWith([2]) - - queue.stop() - - }) - - it('should not enqueue additional events after stop() is called', () => { - const sinkFn = vi.fn() - const queue = new DefaultEventQueue({ - flushInterval: 30000, - maxQueueSize: 3, - sink: sinkFn, - batchComparator: () => true - }) - queue.start() - queue.enqueue(1) - queue.stop() - expect(sinkFn).toHaveBeenCalledTimes(1) - expect(sinkFn).toHaveBeenCalledWith([1]) - sinkFn.mockClear() - queue.enqueue(2) - queue.enqueue(3) - queue.enqueue(4) - expect(sinkFn).toBeCalledTimes(0) - }) - }) -}) diff --git a/tests/reactNativeEventsStore.spec.ts b/tests/reactNativeEventsStore.spec.ts deleted file mode 100644 index d7155a629..000000000 --- a/tests/reactNativeEventsStore.spec.ts +++ /dev/null @@ -1,351 +0,0 @@ -/** - * Copyright 2022, 2024, Optimizely - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { describe, beforeEach, it, vi, expect } from 'vitest'; - - -const { mockMap, mockGet, mockSet, mockRemove, mockContains } = vi.hoisted(() => { - const mockMap = new Map(); - - const mockGet = vi.fn().mockImplementation((key) => { - return Promise.resolve(mockMap.get(key)); - }); - - const mockSet = vi.fn().mockImplementation((key, value) => { - mockMap.set(key, value); - return Promise.resolve(); - }); - - const mockRemove = vi.fn().mockImplementation((key) => { - if (mockMap.has(key)) { - mockMap.delete(key); - return Promise.resolve(true); - } - return Promise.resolve(false); - }); - - const mockContains = vi.fn().mockImplementation((key) => { - return Promise.resolve(mockMap.has(key)); - }); - - return { mockMap, mockGet, mockSet, mockRemove, mockContains }; -}); - -vi.mock('../lib/plugins/key_value_cache/reactNativeAsyncStorageCache', () => { - const MockReactNativeAsyncStorageCache = vi.fn(); - MockReactNativeAsyncStorageCache.prototype.get = mockGet; - MockReactNativeAsyncStorageCache.prototype.set = mockSet; - MockReactNativeAsyncStorageCache.prototype.contains = mockContains; - MockReactNativeAsyncStorageCache.prototype.remove = mockRemove; - return { 'default': MockReactNativeAsyncStorageCache }; -}); - -import ReactNativeAsyncStorageCache from '../lib/plugins/key_value_cache/reactNativeAsyncStorageCache'; - -import { ReactNativeEventsStore } from '../lib/event_processor/reactNativeEventsStore' - -const STORE_KEY = 'test-store' - -describe('ReactNativeEventsStore', () => { - const MockedReactNativeAsyncStorageCache = vi.mocked(ReactNativeAsyncStorageCache); - let store: ReactNativeEventsStore - - beforeEach(() => { - MockedReactNativeAsyncStorageCache.mockClear(); - mockGet.mockClear(); - mockContains.mockClear(); - mockSet.mockClear(); - mockRemove.mockClear(); - mockMap.clear(); - store = new ReactNativeEventsStore(5, STORE_KEY) - }) - - describe('constructor', () => { - beforeEach(() => { - MockedReactNativeAsyncStorageCache.mockClear(); - mockGet.mockClear(); - mockContains.mockClear(); - mockSet.mockClear(); - mockRemove.mockClear(); - mockMap.clear(); - }); - - it('uses the user provided cache', () => { - const cache = { - get: vi.fn(), - contains: vi.fn(), - set: vi.fn(), - remove: vi.fn(), - }; - - const store = new ReactNativeEventsStore(5, STORE_KEY, cache); - store.clear(); - expect(cache.remove).toHaveBeenCalled(); - }); - - it('uses ReactNativeAsyncStorageCache if no cache is provided', () => { - const store = new ReactNativeEventsStore(5, STORE_KEY); - store.clear(); - expect(MockedReactNativeAsyncStorageCache).toHaveBeenCalledTimes(1); - expect(mockRemove).toHaveBeenCalled(); - }); - }); - - describe('set', () => { - beforeEach(() => { - MockedReactNativeAsyncStorageCache.mockClear(); - mockGet.mockClear(); - mockContains.mockClear(); - mockSet.mockClear(); - mockRemove.mockClear(); - mockMap.clear(); - }); - - it('should store all the events correctly in the store', async () => { - await store.set('event1', {'name': 'event1'}) - await store.set('event2', {'name': 'event2'}) - await store.set('event3', {'name': 'event3'}) - await store.set('event4', {'name': 'event4'}) - const storedPendingEvents = JSON.parse(mockMap.get(STORE_KEY)); - expect(storedPendingEvents).toEqual({ - "event1": { "name": "event1" }, - "event2": { "name": "event2" }, - "event3": { "name": "event3" }, - "event4": { "name": "event4" }, - }) - }) - - it('should store all the events when set asynchronously', async () => { - await store.set('event1', {'name': 'event1'}) - await store.set('event2', {'name': 'event2'}) - await store.set('event3', {'name': 'event3'}) - await store.set('event4', {'name': 'event4'}) - const storedPendingEvents = JSON.parse(mockMap.get(STORE_KEY)); - expect(storedPendingEvents).toEqual({ - "event1": { "name": "event1" }, - "event2": { "name": "event2" }, - "event3": { "name": "event3" }, - "event4": { "name": "event4" }, - }) - }) - }) - - describe('get', () => { - beforeEach(() => { - MockedReactNativeAsyncStorageCache.mockClear(); - mockGet.mockClear(); - mockContains.mockClear(); - mockSet.mockClear(); - mockRemove.mockClear(); - mockMap.clear(); - }); - - it('should correctly get items', async () => { - await store.set('event1', {'name': 'event1'}) - await store.set('event2', {'name': 'event2'}) - await store.set('event3', {'name': 'event3'}) - await store.set('event4', {'name': 'event4'}) - expect(await store.get('event1')).toEqual({'name': 'event1'}) - expect(await store.get('event2')).toEqual({'name': 'event2'}) - expect(await store.get('event3')).toEqual({'name': 'event3'}) - expect(await store.get('event4')).toEqual({'name': 'event4'}) - }) - }) - - describe('getEventsMap', () => { - beforeEach(() => { - MockedReactNativeAsyncStorageCache.mockClear(); - mockGet.mockClear(); - mockContains.mockClear(); - mockSet.mockClear(); - mockRemove.mockClear(); - mockMap.clear(); - }); - - it('should get the whole map correctly', async () => { - await store.set('event1', {'name': 'event1'}) - await store.set('event2', {'name': 'event2'}) - await store.set('event3', {'name': 'event3'}) - await store.set('event4', {'name': 'event4'}) - const mapResult = await store.getEventsMap() - expect(mapResult).toEqual({ - "event1": { "name": "event1" }, - "event2": { "name": "event2" }, - "event3": { "name": "event3" }, - "event4": { "name": "event4" }, - }) - }) - }) - - describe('getEventsList', () => { - beforeEach(() => { - MockedReactNativeAsyncStorageCache.mockClear(); - mockGet.mockClear(); - mockContains.mockClear(); - mockSet.mockClear(); - mockRemove.mockClear(); - mockMap.clear(); - }); - - it('should get all the events as a list', async () => { - await store.set('event1', {'name': 'event1'}) - await store.set('event2', {'name': 'event2'}) - await store.set('event3', {'name': 'event3'}) - await store.set('event4', {'name': 'event4'}) - const listResult = await store.getEventsList() - expect(listResult).toEqual([ - { "name": "event1" }, - { "name": "event2" }, - { "name": "event3" }, - { "name": "event4" }, - ]) - }) - }) - - describe('remove', () => { - beforeEach(() => { - MockedReactNativeAsyncStorageCache.mockClear(); - mockGet.mockClear(); - mockContains.mockClear(); - mockSet.mockClear(); - mockRemove.mockClear(); - mockMap.clear(); - }); - - it('should correctly remove items from the store', async () => { - await store.set('event1', {'name': 'event1'}) - await store.set('event2', {'name': 'event2'}) - await store.set('event3', {'name': 'event3'}) - await store.set('event4', {'name': 'event4'}) - let storedPendingEvents = JSON.parse(mockMap.get(STORE_KEY)); - expect(storedPendingEvents).toEqual({ - "event1": { "name": "event1" }, - "event2": { "name": "event2" }, - "event3": { "name": "event3" }, - "event4": { "name": "event4" }, - }) - - await store.remove('event1') - storedPendingEvents = JSON.parse(mockMap.get(STORE_KEY)); - expect(storedPendingEvents).toEqual({ - "event2": { "name": "event2" }, - "event3": { "name": "event3" }, - "event4": { "name": "event4" }, - }) - - await store.remove('event2') - storedPendingEvents = JSON.parse(mockMap.get(STORE_KEY)); - expect(storedPendingEvents).toEqual({ - "event3": { "name": "event3" }, - "event4": { "name": "event4" }, - }) - }) - - it('should correctly remove items from the store when removed asynchronously', async () => { - await store.set('event1', {'name': 'event1'}) - await store.set('event2', {'name': 'event2'}) - await store.set('event3', {'name': 'event3'}) - await store.set('event4', {'name': 'event4'}) - let storedPendingEvents = JSON.parse(mockMap.get(STORE_KEY)); - expect(storedPendingEvents).toEqual({ - "event1": { "name": "event1" }, - "event2": { "name": "event2" }, - "event3": { "name": "event3" }, - "event4": { "name": "event4" }, - }) - - const promises = [] - await store.remove('event1') - await store.remove('event2') - await store.remove('event3') - storedPendingEvents = JSON.parse(mockMap.get(STORE_KEY)); - expect(storedPendingEvents).toEqual({ "event4": { "name": "event4" }}) - }) - }) - - describe('clear', () => { - beforeEach(() => { - MockedReactNativeAsyncStorageCache.mockClear(); - mockGet.mockClear(); - mockContains.mockClear(); - mockSet.mockClear(); - mockRemove.mockClear(); - mockMap.clear(); - }); - - it('should clear the whole store',async () => { - await store.set('event1', {'name': 'event1'}) - await store.set('event2', {'name': 'event2'}) - await store.set('event3', {'name': 'event3'}) - await store.set('event4', {'name': 'event4'}) - let storedPendingEvents = JSON.parse(mockMap.get(STORE_KEY)); - expect(storedPendingEvents).toEqual({ - "event1": { "name": "event1" }, - "event2": { "name": "event2" }, - "event3": { "name": "event3" }, - "event4": { "name": "event4" }, - }) - await store.clear() - storedPendingEvents = storedPendingEvents = JSON.parse(mockMap.get(STORE_KEY) || '{}'); - expect(storedPendingEvents).toEqual({}) - }) - }) - - describe('maxSize', () => { - beforeEach(() => { - MockedReactNativeAsyncStorageCache.mockClear(); - mockGet.mockClear(); - mockContains.mockClear(); - mockSet.mockClear(); - mockRemove.mockClear(); - mockMap.clear(); - }); - - it('should not add anymore events if the store if full', async () => { - await store.set('event1', {'name': 'event1'}) - await store.set('event2', {'name': 'event2'}) - await store.set('event3', {'name': 'event3'}) - await store.set('event4', {'name': 'event4'}) - - let storedPendingEvents = JSON.parse(mockMap.get(STORE_KEY)); - expect(storedPendingEvents).toEqual({ - "event1": { "name": "event1" }, - "event2": { "name": "event2" }, - "event3": { "name": "event3" }, - "event4": { "name": "event4" }, - }) - await store.set('event5', {'name': 'event5'}) - - storedPendingEvents = JSON.parse(mockMap.get(STORE_KEY)); - expect(storedPendingEvents).toEqual({ - "event1": { "name": "event1" }, - "event2": { "name": "event2" }, - "event3": { "name": "event3" }, - "event4": { "name": "event4" }, - "event5": { "name": "event5" }, - }) - - await store.set('event6', {'name': 'event6'}) - storedPendingEvents = JSON.parse(mockMap.get(STORE_KEY)); - expect(storedPendingEvents).toEqual({ - "event1": { "name": "event1" }, - "event2": { "name": "event2" }, - "event3": { "name": "event3" }, - "event4": { "name": "event4" }, - "event5": { "name": "event5" }, - }) - }) - }) -}) From 8727c9b214f3e7f17e9bbf3de181064fc0833182 Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Wed, 20 Nov 2024 02:10:16 +0600 Subject: [PATCH 35/45] factory test --- .../event_processor_factory.browser.spec.ts | 32 ++++++++++++++++++- .../event_processor_factory.browser.ts | 20 ++++++------ .../event_processor_factory.ts | 7 ++-- vitest.config.mts | 2 +- 4 files changed, 47 insertions(+), 14 deletions(-) diff --git a/lib/event_processor/event_processor_factory.browser.spec.ts b/lib/event_processor/event_processor_factory.browser.spec.ts index b63471a29..7ff7e28a8 100644 --- a/lib/event_processor/event_processor_factory.browser.spec.ts +++ b/lib/event_processor/event_processor_factory.browser.spec.ts @@ -24,9 +24,15 @@ vi.mock('./forwarding_event_processor', () => { return { getForwardingEventProcessor }; }); -import { createForwardingEventProcessor } from './event_processor_factory.browser'; +vi.mock('./event_processor_factory', () => { + const getBatchEventProcessor = vi.fn().mockReturnValue({}); + return { getBatchEventProcessor }; +}); + +import { createForwardingEventProcessor, createBatchEventProcessor } from './event_processor_factory.browser'; import { getForwardingEventProcessor } from './forwarding_event_processor'; import browserDefaultEventDispatcher from './default_dispatcher.browser'; +import { getBatchEventProcessor } from './event_processor_factory'; describe('createForwardingEventProcessor', () => { const mockGetForwardingEventProcessor = vi.mocked(getForwardingEventProcessor); @@ -53,3 +59,27 @@ describe('createForwardingEventProcessor', () => { expect(mockGetForwardingEventProcessor).toHaveBeenNthCalledWith(1, browserDefaultEventDispatcher); }); }); + + +describe('createBatchEventProcessor', () => { + const mockGetBatchEventProcessor = vi.mocked(getBatchEventProcessor); + + beforeEach(() => { + mockGetBatchEventProcessor.mockClear(); + }); + + it('uses localStorageCache and SyncPrefixCache to create eventStore', () => { + const options = { + eventDispatcher: { + dispatchEvent: vi.fn(), + }, + flushInterval: 1000, + batchSize: 10, + }; + + const processor = createBatchEventProcessor(options); + expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + const eventStore = mockGetBatchEventProcessor.mock.calls[0][0].eventStore; + expect + }); +}); diff --git a/lib/event_processor/event_processor_factory.browser.ts b/lib/event_processor/event_processor_factory.browser.ts index c47b00b06..6f45c314c 100644 --- a/lib/event_processor/event_processor_factory.browser.ts +++ b/lib/event_processor/event_processor_factory.browser.ts @@ -17,12 +17,12 @@ import { getForwardingEventProcessor } from './forwarding_event_processor'; import { EventDispatcher } from './eventDispatcher'; import { EventProcessor } from './eventProcessor'; -import { BatchEventProcessor, BatchEventProcessorConfig, EventWithId } from './batch_event_processor'; -import { getBatchEventProcessor, QueueingEventProcessorOptions } from './event_processor_factory'; +import { EventWithId } from './batch_event_processor'; +import { getBatchEventProcessor, BatchEventProcessorOptions } from './event_processor_factory'; import defaultEventDispatcher from './default_dispatcher.browser'; import sendBeaconEventDispatcher from '../plugins/event_dispatcher/send_beacon_dispatcher'; import { LocalStorageCache } from '../utils/cache/local_storage_cache.browser'; -import { SyncPrefixCache } from '../utils/cache/cache'; +import { SyncPrefixCache, AsyncPrefixCache, Cache } from '../utils/cache/cache'; export const FAILED_EVENT_RETRY_INTERVAL = 20 * 1000; export const EVENT_STORE_PREFIX = 'fs_optly_pending_events'; @@ -33,14 +33,16 @@ export const createForwardingEventProcessor = ( return getForwardingEventProcessor(eventDispatcher); }; -export const createQueueingEventProcessor = ( - options: QueueingEventProcessorOptions +const identity = (v: T): T => v; + +export const createBatchEventProcessor = ( + options: BatchEventProcessorOptions ): EventProcessor => { - const localStorageCache = new LocalStorageCache(); - const eventStore = new SyncPrefixCache( + const localStorageCache = new LocalStorageCache(); + const eventStore = new SyncPrefixCache( localStorageCache, EVENT_STORE_PREFIX, - JSON.parse, - JSON.stringify + identity, + identity, ); return getBatchEventProcessor({ diff --git a/lib/event_processor/event_processor_factory.ts b/lib/event_processor/event_processor_factory.ts index ca097c66d..1f1443955 100644 --- a/lib/event_processor/event_processor_factory.ts +++ b/lib/event_processor/event_processor_factory.ts @@ -12,14 +12,15 @@ export const DEFAULT_EVENT_MAX_QUEUE_SIZE = 10000; export const DEFAULT_MIN_BACKOFF = 1000; export const DEFAULT_MAX_BACKOFF = 32000; -export type QueueingEventProcessorOptions = { +export type BatchEventProcessorOptions = { eventDispatcher?: EventDispatcher; closingEventDispatcher?: EventDispatcher; flushInterval?: number; batchSize?: number; + eventStore?: Cache; }; -export type QueueingEventProcessorFactoryOptions = Omit & { +export type BatchEventProcessorFactoryOptions = Omit & { eventDispatcher: EventDispatcher; failedEventRetryInterval?: number; eventStore?: Cache; @@ -31,7 +32,7 @@ export type QueueingEventProcessorFactoryOptions = Omit { const { eventDispatcher, closingEventDispatcher, retryOptions, eventStore } = options; diff --git a/vitest.config.mts b/vitest.config.mts index 7aa0cc1a8..3b07cdc13 100644 --- a/vitest.config.mts +++ b/vitest.config.mts @@ -20,7 +20,7 @@ export default defineConfig({ test: { onConsoleLog: () => true, environment: 'happy-dom', - include: ['**/event_processor_factory.spec.ts'], + include: ['**/event_processor_factory.browser.spec.ts'], typecheck: { tsconfig: 'tsconfig.spec.json', }, From 851da33f96c6c0bbc830f9c4f523a266604fd273 Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Wed, 20 Nov 2024 22:26:24 +0600 Subject: [PATCH 36/45] test --- .../event_processor_factory.browser.spec.ts | 112 +++++++++++++++++- .../event_processor_factory.browser.ts | 6 +- 2 files changed, 110 insertions(+), 8 deletions(-) diff --git a/lib/event_processor/event_processor_factory.browser.spec.ts b/lib/event_processor/event_processor_factory.browser.spec.ts index 7ff7e28a8..29723ed8a 100644 --- a/lib/event_processor/event_processor_factory.browser.spec.ts +++ b/lib/event_processor/event_processor_factory.browser.spec.ts @@ -20,16 +20,33 @@ vi.mock('./default_dispatcher.browser', () => { }); vi.mock('./forwarding_event_processor', () => { - const getForwardingEventProcessor = vi.fn().mockReturnValue({}); + const getForwardingEventProcessor = vi.fn().mockImplementation(() => { + return {}; + }); return { getForwardingEventProcessor }; }); vi.mock('./event_processor_factory', () => { - const getBatchEventProcessor = vi.fn().mockReturnValue({}); + const getBatchEventProcessor = vi.fn().mockImplementation(() => { + return {}; + }); return { getBatchEventProcessor }; }); -import { createForwardingEventProcessor, createBatchEventProcessor } from './event_processor_factory.browser'; +vi.mock('../utils/cache/local_storage_cache.browser', () => { + return { LocalStorageCache: vi.fn() }; +}); + +vi.mock('../utils/cache/cache', () => { + return { SyncPrefixCache: vi.fn() }; +}); + + +import defaultEventDispatcher from './default_dispatcher.browser'; +import { LocalStorageCache } from '../utils/cache/local_storage_cache.browser'; +import { SyncPrefixCache } from '../utils/cache/cache'; +import { createForwardingEventProcessor, createBatchEventProcessor, EVENT_STORE_PREFIX, FAILED_EVENT_RETRY_INTERVAL } from './event_processor_factory.browser'; +import sendBeaconEventDispatcher from '../plugins/event_dispatcher/send_beacon_dispatcher'; import { getForwardingEventProcessor } from './forwarding_event_processor'; import browserDefaultEventDispatcher from './default_dispatcher.browser'; import { getBatchEventProcessor } from './event_processor_factory'; @@ -60,12 +77,15 @@ describe('createForwardingEventProcessor', () => { }); }); - describe('createBatchEventProcessor', () => { const mockGetBatchEventProcessor = vi.mocked(getBatchEventProcessor); + const MockLocalStorageCache = vi.mocked(LocalStorageCache); + const MockSyncPrefixCache = vi.mocked(SyncPrefixCache); beforeEach(() => { mockGetBatchEventProcessor.mockClear(); + MockLocalStorageCache.mockClear(); + MockSyncPrefixCache.mockClear(); }); it('uses localStorageCache and SyncPrefixCache to create eventStore', () => { @@ -80,6 +100,88 @@ describe('createBatchEventProcessor', () => { const processor = createBatchEventProcessor(options); expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); const eventStore = mockGetBatchEventProcessor.mock.calls[0][0].eventStore; - expect + expect(Object.is(eventStore, MockSyncPrefixCache.mock.results[0].value)).toBe(true); + + const [cache, prefix, transformGet, transformSet] = MockSyncPrefixCache.mock.calls[0]; + expect(Object.is(cache, MockLocalStorageCache.mock.results[0].value)).toBe(true); + expect(prefix).toBe(EVENT_STORE_PREFIX); + + // transformGet and transformSet should be identity functions + expect(transformGet('value')).toBe('value'); + expect(transformSet('value')).toBe('value'); + }); + + it('uses the provided eventDispatcher', () => { + const eventDispatcher = { + dispatchEvent: vi.fn(), + }; + + const processor = createBatchEventProcessor({ eventDispatcher }); + expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[0][0].eventDispatcher).toBe(eventDispatcher); + }); + + it('uses the default broser event dispatcher if none is provided', () => { + const processor = createBatchEventProcessor({ }); + expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[0][0].eventDispatcher).toBe(defaultEventDispatcher); + }); + + it('uses the provided closingEventDispatcher', () => { + const closingEventDispatcher = { + dispatchEvent: vi.fn(), + }; + + const processor = createBatchEventProcessor({ closingEventDispatcher }); + expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[0][0].closingEventDispatcher).toBe(closingEventDispatcher); + }); + + it('does not use any closingEventDispatcher if eventDispatcher is provided but closingEventDispatcher is not', () => { + const eventDispatcher = { + dispatchEvent: vi.fn(), + }; + + const processor = createBatchEventProcessor({ eventDispatcher }); + expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[0][0].closingEventDispatcher).toBe(undefined); + }); + + it('uses the default sendBeacon event dispatcher if neither eventDispatcher nor closingEventDispatcher is provided', () => { + const processor = createBatchEventProcessor({ }); + expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[0][0].closingEventDispatcher).toBe(sendBeaconEventDispatcher); + }); + + it('uses the provided flushInterval', () => { + const processor1 = createBatchEventProcessor({ flushInterval: 2000 }); + expect(Object.is(processor1, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[0][0].flushInterval).toBe(2000); + + const processor2 = createBatchEventProcessor({ }); + expect(Object.is(processor2, mockGetBatchEventProcessor.mock.results[1].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[1][0].flushInterval).toBe(undefined); + }); + + it('uses the provided batchSize', () => { + const processor1 = createBatchEventProcessor({ batchSize: 20 }); + expect(Object.is(processor1, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[0][0].batchSize).toBe(20); + + const processor2 = createBatchEventProcessor({ }); + expect(Object.is(processor2, mockGetBatchEventProcessor.mock.results[1].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[1][0].batchSize).toBe(undefined); + }); + + it('uses maxRetries value of 5', () => { + const processor = createBatchEventProcessor({ }); + expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[0][0].retryOptions?.maxRetries).toBe(5); + }); + + it('uses the default failedEventRetryInterval', () => { + const processor = createBatchEventProcessor({ }); + expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[0][0].failedEventRetryInterval).toBe(FAILED_EVENT_RETRY_INTERVAL); }); }); diff --git a/lib/event_processor/event_processor_factory.browser.ts b/lib/event_processor/event_processor_factory.browser.ts index 6f45c314c..b471efc82 100644 --- a/lib/event_processor/event_processor_factory.browser.ts +++ b/lib/event_processor/event_processor_factory.browser.ts @@ -22,10 +22,10 @@ import { getBatchEventProcessor, BatchEventProcessorOptions } from './event_proc import defaultEventDispatcher from './default_dispatcher.browser'; import sendBeaconEventDispatcher from '../plugins/event_dispatcher/send_beacon_dispatcher'; import { LocalStorageCache } from '../utils/cache/local_storage_cache.browser'; -import { SyncPrefixCache, AsyncPrefixCache, Cache } from '../utils/cache/cache'; +import { SyncPrefixCache } from '../utils/cache/cache'; export const FAILED_EVENT_RETRY_INTERVAL = 20 * 1000; -export const EVENT_STORE_PREFIX = 'fs_optly_pending_events'; +export const EVENT_STORE_PREFIX = 'optly_event:'; export const createForwardingEventProcessor = ( eventDispatcher: EventDispatcher = defaultEventDispatcher, @@ -48,7 +48,7 @@ export const createBatchEventProcessor = ( return getBatchEventProcessor({ eventDispatcher: options.eventDispatcher || defaultEventDispatcher, closingEventDispatcher: options.closingEventDispatcher || - (options.eventDispatcher ? options.eventDispatcher : sendBeaconEventDispatcher), + (options.eventDispatcher ? undefined : sendBeaconEventDispatcher), flushInterval: options.flushInterval, batchSize: options.batchSize, retryOptions: { From 7bb5c64ddec04ae47dc2f66eff5fe3dfacebc938 Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Wed, 20 Nov 2024 22:47:37 +0600 Subject: [PATCH 37/45] up --- .../event_processor_factory.browser.spec.ts | 3 +- .../event_processor_factory.browser.ts | 4 +- .../event_processor_factory.react_native.ts | 56 +++++++++++++++++++ .../event_processor_factory.ts | 2 + 4 files changed, 61 insertions(+), 4 deletions(-) diff --git a/lib/event_processor/event_processor_factory.browser.spec.ts b/lib/event_processor/event_processor_factory.browser.spec.ts index 29723ed8a..20da2ce7e 100644 --- a/lib/event_processor/event_processor_factory.browser.spec.ts +++ b/lib/event_processor/event_processor_factory.browser.spec.ts @@ -45,7 +45,8 @@ vi.mock('../utils/cache/cache', () => { import defaultEventDispatcher from './default_dispatcher.browser'; import { LocalStorageCache } from '../utils/cache/local_storage_cache.browser'; import { SyncPrefixCache } from '../utils/cache/cache'; -import { createForwardingEventProcessor, createBatchEventProcessor, EVENT_STORE_PREFIX, FAILED_EVENT_RETRY_INTERVAL } from './event_processor_factory.browser'; +import { createForwardingEventProcessor, createBatchEventProcessor } from './event_processor_factory.browser'; +import { EVENT_STORE_PREFIX, FAILED_EVENT_RETRY_INTERVAL } from './event_processor_factory'; import sendBeaconEventDispatcher from '../plugins/event_dispatcher/send_beacon_dispatcher'; import { getForwardingEventProcessor } from './forwarding_event_processor'; import browserDefaultEventDispatcher from './default_dispatcher.browser'; diff --git a/lib/event_processor/event_processor_factory.browser.ts b/lib/event_processor/event_processor_factory.browser.ts index b471efc82..476186030 100644 --- a/lib/event_processor/event_processor_factory.browser.ts +++ b/lib/event_processor/event_processor_factory.browser.ts @@ -23,9 +23,7 @@ import defaultEventDispatcher from './default_dispatcher.browser'; import sendBeaconEventDispatcher from '../plugins/event_dispatcher/send_beacon_dispatcher'; import { LocalStorageCache } from '../utils/cache/local_storage_cache.browser'; import { SyncPrefixCache } from '../utils/cache/cache'; - -export const FAILED_EVENT_RETRY_INTERVAL = 20 * 1000; -export const EVENT_STORE_PREFIX = 'optly_event:'; +import { EVENT_STORE_PREFIX, FAILED_EVENT_RETRY_INTERVAL } from './event_processor_factory'; export const createForwardingEventProcessor = ( eventDispatcher: EventDispatcher = defaultEventDispatcher, diff --git a/lib/event_processor/event_processor_factory.react_native.ts b/lib/event_processor/event_processor_factory.react_native.ts index 3763a15c1..740cbff21 100644 --- a/lib/event_processor/event_processor_factory.react_native.ts +++ b/lib/event_processor/event_processor_factory.react_native.ts @@ -17,9 +17,65 @@ import { getForwardingEventProcessor } from './forwarding_event_processor'; import { EventDispatcher } from './eventDispatcher'; import { EventProcessor } from './eventProcessor'; import defaultEventDispatcher from './default_dispatcher.browser'; +import { BatchEventProcessorOptions, getBatchEventProcessor } from './event_processor_factory'; +import { EVENT_STORE_PREFIX, FAILED_EVENT_RETRY_INTERVAL } from './event_processor_factory'; +import { AsyncPrefixCache, Cache, SyncPrefixCache } from '../utils/cache/cache'; +import { EventWithId } from './batch_event_processor'; export const createForwardingEventProcessor = ( eventDispatcher: EventDispatcher = defaultEventDispatcher, ): EventProcessor => { return getForwardingEventProcessor(eventDispatcher); }; + +const identity = (v: T): T => v; + +const getDefaultEventStore = () => { + const AsyncStorageCache = require('../utils/cache/async_storage_cache.react_native').AsyncStorageCache; + + const asyncStorageCache = new AsyncStorageCache(); + + const eventStore = new AsyncPrefixCache( + asyncStorageCache, + EVENT_STORE_PREFIX, + identity, + identity, + ); + + return eventStore; +} + +const getPrefixEventStore = (cache: Cache): Cache => { + if (cache.operation === 'async') { + return new AsyncPrefixCache( + cache, + EVENT_STORE_PREFIX, + JSON.parse, + JSON.stringify, + ); + } else { + return new SyncPrefixCache( + cache, + EVENT_STORE_PREFIX, + JSON.parse, + JSON.stringify, + ); + } +}; + +export const createBatchEventProcessor = ( + options: BatchEventProcessorOptions +): EventProcessor => { + const eventStore = options.eventStore ? getPrefixEventStore(options.eventStore) : getDefaultEventStore(); + + return getBatchEventProcessor({ + eventDispatcher: options.eventDispatcher || defaultEventDispatcher, + flushInterval: options.flushInterval, + batchSize: options.batchSize, + retryOptions: { + maxRetries: 5, + }, + failedEventRetryInterval: FAILED_EVENT_RETRY_INTERVAL, + eventStore, + }); +}; diff --git a/lib/event_processor/event_processor_factory.ts b/lib/event_processor/event_processor_factory.ts index 1f1443955..62ab49656 100644 --- a/lib/event_processor/event_processor_factory.ts +++ b/lib/event_processor/event_processor_factory.ts @@ -11,6 +11,8 @@ export const DEFAULT_EVENT_FLUSH_INTERVAL = 1000; export const DEFAULT_EVENT_MAX_QUEUE_SIZE = 10000; export const DEFAULT_MIN_BACKOFF = 1000; export const DEFAULT_MAX_BACKOFF = 32000; +export const FAILED_EVENT_RETRY_INTERVAL = 20 * 1000; +export const EVENT_STORE_PREFIX = 'optly_event:'; export type BatchEventProcessorOptions = { eventDispatcher?: EventDispatcher; From 8d73fad7902e05ac59a8ac420fbb05da3d95d235 Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Thu, 21 Nov 2024 00:13:46 +0600 Subject: [PATCH 38/45] react naive --- .../event_processor_factory.browser copy.ts | 60 ++++++ ...ent_processor_factory.browser.spec copy.ts | 187 ++++++++++++++++++ .../event_processor_factory.browser.spec.ts | 16 +- ...ent_processor_factory.react_native.spec.ts | 156 ++++++++++++++- .../event_processor_factory.react_native.ts | 6 +- vitest.config.mts | 2 +- 6 files changed, 408 insertions(+), 19 deletions(-) create mode 100644 lib/event_processor/event_processor_factory.browser copy.ts create mode 100644 lib/event_processor/event_processor_factory.browser.spec copy.ts diff --git a/lib/event_processor/event_processor_factory.browser copy.ts b/lib/event_processor/event_processor_factory.browser copy.ts new file mode 100644 index 000000000..b471efc82 --- /dev/null +++ b/lib/event_processor/event_processor_factory.browser copy.ts @@ -0,0 +1,60 @@ +/** + * Copyright 2024, Optimizely + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { getForwardingEventProcessor } from './forwarding_event_processor'; +import { EventDispatcher } from './eventDispatcher'; +import { EventProcessor } from './eventProcessor'; +import { EventWithId } from './batch_event_processor'; +import { getBatchEventProcessor, BatchEventProcessorOptions } from './event_processor_factory'; +import defaultEventDispatcher from './default_dispatcher.browser'; +import sendBeaconEventDispatcher from '../plugins/event_dispatcher/send_beacon_dispatcher'; +import { LocalStorageCache } from '../utils/cache/local_storage_cache.browser'; +import { SyncPrefixCache } from '../utils/cache/cache'; + +export const FAILED_EVENT_RETRY_INTERVAL = 20 * 1000; +export const EVENT_STORE_PREFIX = 'optly_event:'; + +export const createForwardingEventProcessor = ( + eventDispatcher: EventDispatcher = defaultEventDispatcher, +): EventProcessor => { + return getForwardingEventProcessor(eventDispatcher); +}; + +const identity = (v: T): T => v; + +export const createBatchEventProcessor = ( + options: BatchEventProcessorOptions +): EventProcessor => { + const localStorageCache = new LocalStorageCache(); + const eventStore = new SyncPrefixCache( + localStorageCache, EVENT_STORE_PREFIX, + identity, + identity, + ); + + return getBatchEventProcessor({ + eventDispatcher: options.eventDispatcher || defaultEventDispatcher, + closingEventDispatcher: options.closingEventDispatcher || + (options.eventDispatcher ? undefined : sendBeaconEventDispatcher), + flushInterval: options.flushInterval, + batchSize: options.batchSize, + retryOptions: { + maxRetries: 5, + }, + failedEventRetryInterval: FAILED_EVENT_RETRY_INTERVAL, + eventStore, + }); +}; diff --git a/lib/event_processor/event_processor_factory.browser.spec copy.ts b/lib/event_processor/event_processor_factory.browser.spec copy.ts new file mode 100644 index 000000000..29723ed8a --- /dev/null +++ b/lib/event_processor/event_processor_factory.browser.spec copy.ts @@ -0,0 +1,187 @@ +/** + * Copyright 2024, Optimizely + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { vi, describe, it, expect, beforeEach } from 'vitest'; + +vi.mock('./default_dispatcher.browser', () => { + return { default: {} }; +}); + +vi.mock('./forwarding_event_processor', () => { + const getForwardingEventProcessor = vi.fn().mockImplementation(() => { + return {}; + }); + return { getForwardingEventProcessor }; +}); + +vi.mock('./event_processor_factory', () => { + const getBatchEventProcessor = vi.fn().mockImplementation(() => { + return {}; + }); + return { getBatchEventProcessor }; +}); + +vi.mock('../utils/cache/local_storage_cache.browser', () => { + return { LocalStorageCache: vi.fn() }; +}); + +vi.mock('../utils/cache/cache', () => { + return { SyncPrefixCache: vi.fn() }; +}); + + +import defaultEventDispatcher from './default_dispatcher.browser'; +import { LocalStorageCache } from '../utils/cache/local_storage_cache.browser'; +import { SyncPrefixCache } from '../utils/cache/cache'; +import { createForwardingEventProcessor, createBatchEventProcessor, EVENT_STORE_PREFIX, FAILED_EVENT_RETRY_INTERVAL } from './event_processor_factory.browser'; +import sendBeaconEventDispatcher from '../plugins/event_dispatcher/send_beacon_dispatcher'; +import { getForwardingEventProcessor } from './forwarding_event_processor'; +import browserDefaultEventDispatcher from './default_dispatcher.browser'; +import { getBatchEventProcessor } from './event_processor_factory'; + +describe('createForwardingEventProcessor', () => { + const mockGetForwardingEventProcessor = vi.mocked(getForwardingEventProcessor); + + beforeEach(() => { + mockGetForwardingEventProcessor.mockClear(); + }); + + it('returns forwarding event processor by calling getForwardingEventProcessor with the provided dispatcher', () => { + const eventDispatcher = { + dispatchEvent: vi.fn(), + }; + + const processor = createForwardingEventProcessor(eventDispatcher); + + expect(Object.is(processor, mockGetForwardingEventProcessor.mock.results[0].value)).toBe(true); + expect(mockGetForwardingEventProcessor).toHaveBeenNthCalledWith(1, eventDispatcher); + }); + + it('uses the browser default event dispatcher if none is provided', () => { + const processor = createForwardingEventProcessor(); + + expect(Object.is(processor, mockGetForwardingEventProcessor.mock.results[0].value)).toBe(true); + expect(mockGetForwardingEventProcessor).toHaveBeenNthCalledWith(1, browserDefaultEventDispatcher); + }); +}); + +describe('createBatchEventProcessor', () => { + const mockGetBatchEventProcessor = vi.mocked(getBatchEventProcessor); + const MockLocalStorageCache = vi.mocked(LocalStorageCache); + const MockSyncPrefixCache = vi.mocked(SyncPrefixCache); + + beforeEach(() => { + mockGetBatchEventProcessor.mockClear(); + MockLocalStorageCache.mockClear(); + MockSyncPrefixCache.mockClear(); + }); + + it('uses localStorageCache and SyncPrefixCache to create eventStore', () => { + const options = { + eventDispatcher: { + dispatchEvent: vi.fn(), + }, + flushInterval: 1000, + batchSize: 10, + }; + + const processor = createBatchEventProcessor(options); + expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + const eventStore = mockGetBatchEventProcessor.mock.calls[0][0].eventStore; + expect(Object.is(eventStore, MockSyncPrefixCache.mock.results[0].value)).toBe(true); + + const [cache, prefix, transformGet, transformSet] = MockSyncPrefixCache.mock.calls[0]; + expect(Object.is(cache, MockLocalStorageCache.mock.results[0].value)).toBe(true); + expect(prefix).toBe(EVENT_STORE_PREFIX); + + // transformGet and transformSet should be identity functions + expect(transformGet('value')).toBe('value'); + expect(transformSet('value')).toBe('value'); + }); + + it('uses the provided eventDispatcher', () => { + const eventDispatcher = { + dispatchEvent: vi.fn(), + }; + + const processor = createBatchEventProcessor({ eventDispatcher }); + expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[0][0].eventDispatcher).toBe(eventDispatcher); + }); + + it('uses the default broser event dispatcher if none is provided', () => { + const processor = createBatchEventProcessor({ }); + expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[0][0].eventDispatcher).toBe(defaultEventDispatcher); + }); + + it('uses the provided closingEventDispatcher', () => { + const closingEventDispatcher = { + dispatchEvent: vi.fn(), + }; + + const processor = createBatchEventProcessor({ closingEventDispatcher }); + expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[0][0].closingEventDispatcher).toBe(closingEventDispatcher); + }); + + it('does not use any closingEventDispatcher if eventDispatcher is provided but closingEventDispatcher is not', () => { + const eventDispatcher = { + dispatchEvent: vi.fn(), + }; + + const processor = createBatchEventProcessor({ eventDispatcher }); + expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[0][0].closingEventDispatcher).toBe(undefined); + }); + + it('uses the default sendBeacon event dispatcher if neither eventDispatcher nor closingEventDispatcher is provided', () => { + const processor = createBatchEventProcessor({ }); + expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[0][0].closingEventDispatcher).toBe(sendBeaconEventDispatcher); + }); + + it('uses the provided flushInterval', () => { + const processor1 = createBatchEventProcessor({ flushInterval: 2000 }); + expect(Object.is(processor1, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[0][0].flushInterval).toBe(2000); + + const processor2 = createBatchEventProcessor({ }); + expect(Object.is(processor2, mockGetBatchEventProcessor.mock.results[1].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[1][0].flushInterval).toBe(undefined); + }); + + it('uses the provided batchSize', () => { + const processor1 = createBatchEventProcessor({ batchSize: 20 }); + expect(Object.is(processor1, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[0][0].batchSize).toBe(20); + + const processor2 = createBatchEventProcessor({ }); + expect(Object.is(processor2, mockGetBatchEventProcessor.mock.results[1].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[1][0].batchSize).toBe(undefined); + }); + + it('uses maxRetries value of 5', () => { + const processor = createBatchEventProcessor({ }); + expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[0][0].retryOptions?.maxRetries).toBe(5); + }); + + it('uses the default failedEventRetryInterval', () => { + const processor = createBatchEventProcessor({ }); + expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[0][0].failedEventRetryInterval).toBe(FAILED_EVENT_RETRY_INTERVAL); + }); +}); diff --git a/lib/event_processor/event_processor_factory.browser.spec.ts b/lib/event_processor/event_processor_factory.browser.spec.ts index 20da2ce7e..07aeae0c6 100644 --- a/lib/event_processor/event_processor_factory.browser.spec.ts +++ b/lib/event_processor/event_processor_factory.browser.spec.ts @@ -30,7 +30,7 @@ vi.mock('./event_processor_factory', () => { const getBatchEventProcessor = vi.fn().mockImplementation(() => { return {}; }); - return { getBatchEventProcessor }; + return { getBatchEventProcessor, EVENT_STORE_PREFIX: 'test_prefix', FAILED_EVENT_RETRY_INTERVAL: 1000 }; }); vi.mock('../utils/cache/local_storage_cache.browser', () => { @@ -89,16 +89,8 @@ describe('createBatchEventProcessor', () => { MockSyncPrefixCache.mockClear(); }); - it('uses localStorageCache and SyncPrefixCache to create eventStore', () => { - const options = { - eventDispatcher: { - dispatchEvent: vi.fn(), - }, - flushInterval: 1000, - batchSize: 10, - }; - - const processor = createBatchEventProcessor(options); + it('uses LocalStorageCache and SyncPrefixCache to create eventStore', () => { + const processor = createBatchEventProcessor({}); expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); const eventStore = mockGetBatchEventProcessor.mock.calls[0][0].eventStore; expect(Object.is(eventStore, MockSyncPrefixCache.mock.results[0].value)).toBe(true); @@ -122,7 +114,7 @@ describe('createBatchEventProcessor', () => { expect(mockGetBatchEventProcessor.mock.calls[0][0].eventDispatcher).toBe(eventDispatcher); }); - it('uses the default broser event dispatcher if none is provided', () => { + it('uses the default browser event dispatcher if none is provided', () => { const processor = createBatchEventProcessor({ }); expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); expect(mockGetBatchEventProcessor.mock.calls[0][0].eventDispatcher).toBe(defaultEventDispatcher); diff --git a/lib/event_processor/event_processor_factory.react_native.spec.ts b/lib/event_processor/event_processor_factory.react_native.spec.ts index 6de989534..398701818 100644 --- a/lib/event_processor/event_processor_factory.react_native.spec.ts +++ b/lib/event_processor/event_processor_factory.react_native.spec.ts @@ -25,9 +25,28 @@ vi.mock('./forwarding_event_processor', () => { return { getForwardingEventProcessor }; }); -import { createForwardingEventProcessor } from './event_processor_factory.react_native'; +vi.mock('./event_processor_factory', () => { + const getBatchEventProcessor = vi.fn().mockImplementation(() => { + return {}; + }); + return { getBatchEventProcessor, EVENT_STORE_PREFIX: 'test_prefix', FAILED_EVENT_RETRY_INTERVAL: 1000 }; +}); + +vi.mock('../utils/cache/async_storage_cache.react_native', () => { + return { AsyncStorageCache: vi.fn() }; +}); + +vi.mock('../utils/cache/cache', () => { + return { SyncPrefixCache: vi.fn(), AsyncPrefixCache: vi.fn() }; +}); + +import { createForwardingEventProcessor, createBatchEventProcessor } from './event_processor_factory.react_native'; import { getForwardingEventProcessor } from './forwarding_event_processor'; -import browserDefaultEventDispatcher from './default_dispatcher.browser'; +import defaultEventDispatcher from './default_dispatcher.browser'; +import { EVENT_STORE_PREFIX, FAILED_EVENT_RETRY_INTERVAL } from './event_processor_factory'; +import { getBatchEventProcessor } from './event_processor_factory'; +import { AsyncCache, AsyncPrefixCache, SyncCache, SyncPrefixCache } from '../utils/cache/cache'; +import { AsyncStorageCache } from '../utils/cache/async_storage_cache.react_native'; describe('createForwardingEventProcessor', () => { const mockGetForwardingEventProcessor = vi.mocked(getForwardingEventProcessor); @@ -51,6 +70,137 @@ describe('createForwardingEventProcessor', () => { const processor = createForwardingEventProcessor(); expect(Object.is(processor, mockGetForwardingEventProcessor.mock.results[0].value)).toBe(true); - expect(mockGetForwardingEventProcessor).toHaveBeenNthCalledWith(1, browserDefaultEventDispatcher); + expect(mockGetForwardingEventProcessor).toHaveBeenNthCalledWith(1, defaultEventDispatcher); + }); +}); + +describe('createBatchEventProcessor', () => { + const mockGetBatchEventProcessor = vi.mocked(getBatchEventProcessor); + const MockAsyncStorageCache = vi.mocked(AsyncStorageCache); + const MockSyncPrefixCache = vi.mocked(SyncPrefixCache); + const MockAsyncPrefixCache = vi.mocked(AsyncPrefixCache); + + beforeEach(() => { + mockGetBatchEventProcessor.mockClear(); + MockAsyncStorageCache.mockClear(); + MockSyncPrefixCache.mockClear(); + MockAsyncPrefixCache.mockClear(); + }); + + it('uses AsyncStorageCache and AsyncPrefixCache to create eventStore if no eventStore is provided', () => { + const processor = createBatchEventProcessor({}); + + expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + const eventStore = mockGetBatchEventProcessor.mock.calls[0][0].eventStore; + expect(Object.is(eventStore, MockAsyncPrefixCache.mock.results[0].value)).toBe(true); + + const [cache, prefix, transformGet, transformSet] = MockAsyncPrefixCache.mock.calls[0]; + expect(Object.is(cache, MockAsyncStorageCache.mock.results[0].value)).toBe(true); + expect(prefix).toBe(EVENT_STORE_PREFIX); + + // transformGet and transformSet should be identity functions + expect(transformGet('value')).toBe('value'); + expect(transformSet('value')).toBe('value'); + }); + + it('wraps the provided eventStore in a SyncPrefixCache if a SyncCache is provided as eventStore', () => { + const eventStore = { + operation: 'sync', + } as SyncCache; + + const processor = createBatchEventProcessor({ eventStore }); + expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + + expect(mockGetBatchEventProcessor.mock.calls[0][0].eventStore).toBe(MockSyncPrefixCache.mock.results[0].value); + const [cache, prefix, transformGet, transformSet] = MockSyncPrefixCache.mock.calls[0]; + + expect(cache).toBe(eventStore); + expect(prefix).toBe(EVENT_STORE_PREFIX); + + // transformGet and transformSet should be JSON.parse and JSON.stringify + expect(transformGet('{"value": 1}')).toEqual({ value: 1 }); + expect(transformSet({ value: 1 })).toBe('{"value":1}'); + }); + + it('wraps the provided eventStore in a AsyncPrefixCache if a AsyncCache is provided as eventStore', () => { + const eventStore = { + operation: 'async', + } as AsyncCache; + + const processor = createBatchEventProcessor({ eventStore }); + expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + + expect(mockGetBatchEventProcessor.mock.calls[0][0].eventStore).toBe(MockAsyncPrefixCache.mock.results[0].value); + const [cache, prefix, transformGet, transformSet] = MockAsyncPrefixCache.mock.calls[0]; + + expect(cache).toBe(eventStore); + expect(prefix).toBe(EVENT_STORE_PREFIX); + + // transformGet and transformSet should be JSON.parse and JSON.stringify + expect(transformGet('{"value": 1}')).toEqual({ value: 1 }); + expect(transformSet({ value: 1 })).toBe('{"value":1}'); + }); + + + it('uses the provided eventDispatcher', () => { + const eventDispatcher = { + dispatchEvent: vi.fn(), + }; + + const processor = createBatchEventProcessor({ eventDispatcher }); + expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[0][0].eventDispatcher).toBe(eventDispatcher); + }); + + it('uses the default browser event dispatcher if none is provided', () => { + const processor = createBatchEventProcessor({ }); + expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[0][0].eventDispatcher).toBe(defaultEventDispatcher); + }); + + it('uses the provided closingEventDispatcher', () => { + const closingEventDispatcher = { + dispatchEvent: vi.fn(), + }; + + const processor = createBatchEventProcessor({ closingEventDispatcher }); + expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[0][0].closingEventDispatcher).toBe(closingEventDispatcher); + + const processor2 = createBatchEventProcessor({ }); + expect(Object.is(processor2, mockGetBatchEventProcessor.mock.results[1].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[1][0].closingEventDispatcher).toBe(undefined); + }); + + it('uses the provided flushInterval', () => { + const processor1 = createBatchEventProcessor({ flushInterval: 2000 }); + expect(Object.is(processor1, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[0][0].flushInterval).toBe(2000); + + const processor2 = createBatchEventProcessor({ }); + expect(Object.is(processor2, mockGetBatchEventProcessor.mock.results[1].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[1][0].flushInterval).toBe(undefined); + }); + + it('uses the provided batchSize', () => { + const processor1 = createBatchEventProcessor({ batchSize: 20 }); + expect(Object.is(processor1, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[0][0].batchSize).toBe(20); + + const processor2 = createBatchEventProcessor({ }); + expect(Object.is(processor2, mockGetBatchEventProcessor.mock.results[1].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[1][0].batchSize).toBe(undefined); + }); + + it('uses maxRetries value of 5', () => { + const processor = createBatchEventProcessor({ }); + expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[0][0].retryOptions?.maxRetries).toBe(5); + }); + + it('uses the default failedEventRetryInterval', () => { + const processor = createBatchEventProcessor({ }); + expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[0][0].failedEventRetryInterval).toBe(FAILED_EVENT_RETRY_INTERVAL); }); }); diff --git a/lib/event_processor/event_processor_factory.react_native.ts b/lib/event_processor/event_processor_factory.react_native.ts index 740cbff21..97b27a8e9 100644 --- a/lib/event_processor/event_processor_factory.react_native.ts +++ b/lib/event_processor/event_processor_factory.react_native.ts @@ -21,6 +21,7 @@ import { BatchEventProcessorOptions, getBatchEventProcessor } from './event_proc import { EVENT_STORE_PREFIX, FAILED_EVENT_RETRY_INTERVAL } from './event_processor_factory'; import { AsyncPrefixCache, Cache, SyncPrefixCache } from '../utils/cache/cache'; import { EventWithId } from './batch_event_processor'; +import { AsyncStorageCache } from '../utils/cache/async_storage_cache.react_native'; export const createForwardingEventProcessor = ( eventDispatcher: EventDispatcher = defaultEventDispatcher, @@ -31,9 +32,7 @@ export const createForwardingEventProcessor = ( const identity = (v: T): T => v; const getDefaultEventStore = () => { - const AsyncStorageCache = require('../utils/cache/async_storage_cache.react_native').AsyncStorageCache; - - const asyncStorageCache = new AsyncStorageCache(); + const asyncStorageCache = new AsyncStorageCache(); const eventStore = new AsyncPrefixCache( asyncStorageCache, @@ -70,6 +69,7 @@ export const createBatchEventProcessor = ( return getBatchEventProcessor({ eventDispatcher: options.eventDispatcher || defaultEventDispatcher, + closingEventDispatcher: options.closingEventDispatcher, flushInterval: options.flushInterval, batchSize: options.batchSize, retryOptions: { diff --git a/vitest.config.mts b/vitest.config.mts index 3b07cdc13..a97ad1788 100644 --- a/vitest.config.mts +++ b/vitest.config.mts @@ -20,7 +20,7 @@ export default defineConfig({ test: { onConsoleLog: () => true, environment: 'happy-dom', - include: ['**/event_processor_factory.browser.spec.ts'], + include: ['**/event_processor_factory.react_native.spec.ts'], typecheck: { tsconfig: 'tsconfig.spec.json', }, From 59b678b5a87d5ea225613ef11d0090c04ba1d38c Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Thu, 21 Nov 2024 00:41:10 +0600 Subject: [PATCH 39/45] node test --- .../event_processor_factory.node.spec.ts | 145 +++++++++++++++++- .../event_processor_factory.node.ts | 20 +++ .../event_processor_factory.react_native.ts | 22 +-- .../event_processor_factory.ts | 20 ++- vitest.config.mts | 2 +- 5 files changed, 186 insertions(+), 23 deletions(-) diff --git a/lib/event_processor/event_processor_factory.node.spec.ts b/lib/event_processor/event_processor_factory.node.spec.ts index 36d4ea1fa..b359c7a05 100644 --- a/lib/event_processor/event_processor_factory.node.spec.ts +++ b/lib/event_processor/event_processor_factory.node.spec.ts @@ -24,9 +24,29 @@ vi.mock('./forwarding_event_processor', () => { return { getForwardingEventProcessor }; }); -import { createForwardingEventProcessor } from './event_processor_factory.node'; +vi.mock('./event_processor_factory', async (importOriginal) => { + const getBatchEventProcessor = vi.fn().mockImplementation(() => { + return {}; + }); + const original: any = await importOriginal(); + return { ...original, getBatchEventProcessor }; +}); + +vi.mock('../utils/cache/async_storage_cache.react_native', () => { + return { AsyncStorageCache: vi.fn() }; +}); + +vi.mock('../utils/cache/cache', () => { + return { SyncPrefixCache: vi.fn(), AsyncPrefixCache: vi.fn() }; +}); + +import { createBatchEventProcessor, createForwardingEventProcessor } from './event_processor_factory.node'; import { getForwardingEventProcessor } from './forwarding_event_processor'; import nodeDefaultEventDispatcher from './default_dispatcher.node'; +import { EVENT_STORE_PREFIX, FAILED_EVENT_RETRY_INTERVAL } from './event_processor_factory'; +import { getBatchEventProcessor } from './event_processor_factory'; +import { AsyncCache, AsyncPrefixCache, SyncCache, SyncPrefixCache } from '../utils/cache/cache'; +import { AsyncStorageCache } from '../utils/cache/async_storage_cache.react_native'; describe('createForwardingEventProcessor', () => { const mockGetForwardingEventProcessor = vi.mocked(getForwardingEventProcessor); @@ -53,3 +73,126 @@ describe('createForwardingEventProcessor', () => { expect(mockGetForwardingEventProcessor).toHaveBeenNthCalledWith(1, nodeDefaultEventDispatcher); }); }); + +describe('createBatchEventProcessor', () => { + const mockGetBatchEventProcessor = vi.mocked(getBatchEventProcessor); + const MockAsyncStorageCache = vi.mocked(AsyncStorageCache); + const MockSyncPrefixCache = vi.mocked(SyncPrefixCache); + const MockAsyncPrefixCache = vi.mocked(AsyncPrefixCache); + + beforeEach(() => { + mockGetBatchEventProcessor.mockClear(); + MockAsyncStorageCache.mockClear(); + MockSyncPrefixCache.mockClear(); + MockAsyncPrefixCache.mockClear(); + }); + + it('uses no default event store if no eventStore is provided', () => { + const processor = createBatchEventProcessor({}); + + expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + const eventStore = mockGetBatchEventProcessor.mock.calls[0][0].eventStore; + expect(eventStore).toBe(undefined); + }); + + it('wraps the provided eventStore in a SyncPrefixCache if a SyncCache is provided as eventStore', () => { + const eventStore = { + operation: 'sync', + } as SyncCache; + + const processor = createBatchEventProcessor({ eventStore }); + expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + + expect(mockGetBatchEventProcessor.mock.calls[0][0].eventStore).toBe(MockSyncPrefixCache.mock.results[0].value); + const [cache, prefix, transformGet, transformSet] = MockSyncPrefixCache.mock.calls[0]; + + expect(cache).toBe(eventStore); + expect(prefix).toBe(EVENT_STORE_PREFIX); + + // transformGet and transformSet should be JSON.parse and JSON.stringify + expect(transformGet('{"value": 1}')).toEqual({ value: 1 }); + expect(transformSet({ value: 1 })).toBe('{"value":1}'); + }); + + it('wraps the provided eventStore in a AsyncPrefixCache if a AsyncCache is provided as eventStore', () => { + const eventStore = { + operation: 'async', + } as AsyncCache; + + const processor = createBatchEventProcessor({ eventStore }); + expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + + expect(mockGetBatchEventProcessor.mock.calls[0][0].eventStore).toBe(MockAsyncPrefixCache.mock.results[0].value); + const [cache, prefix, transformGet, transformSet] = MockAsyncPrefixCache.mock.calls[0]; + + expect(cache).toBe(eventStore); + expect(prefix).toBe(EVENT_STORE_PREFIX); + + // transformGet and transformSet should be JSON.parse and JSON.stringify + expect(transformGet('{"value": 1}')).toEqual({ value: 1 }); + expect(transformSet({ value: 1 })).toBe('{"value":1}'); + }); + + + it('uses the provided eventDispatcher', () => { + const eventDispatcher = { + dispatchEvent: vi.fn(), + }; + + const processor = createBatchEventProcessor({ eventDispatcher }); + expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[0][0].eventDispatcher).toBe(eventDispatcher); + }); + + it('uses the default node event dispatcher if none is provided', () => { + const processor = createBatchEventProcessor({ }); + expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[0][0].eventDispatcher).toBe(nodeDefaultEventDispatcher); + }); + + it('uses the provided closingEventDispatcher', () => { + const closingEventDispatcher = { + dispatchEvent: vi.fn(), + }; + + const processor = createBatchEventProcessor({ closingEventDispatcher }); + expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[0][0].closingEventDispatcher).toBe(closingEventDispatcher); + + const processor2 = createBatchEventProcessor({ }); + expect(Object.is(processor2, mockGetBatchEventProcessor.mock.results[1].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[1][0].closingEventDispatcher).toBe(undefined); + }); + + it('uses the provided flushInterval', () => { + const processor1 = createBatchEventProcessor({ flushInterval: 2000 }); + expect(Object.is(processor1, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[0][0].flushInterval).toBe(2000); + + const processor2 = createBatchEventProcessor({ }); + expect(Object.is(processor2, mockGetBatchEventProcessor.mock.results[1].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[1][0].flushInterval).toBe(undefined); + }); + + it('uses the provided batchSize', () => { + const processor1 = createBatchEventProcessor({ batchSize: 20 }); + expect(Object.is(processor1, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[0][0].batchSize).toBe(20); + + const processor2 = createBatchEventProcessor({ }); + expect(Object.is(processor2, mockGetBatchEventProcessor.mock.results[1].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[1][0].batchSize).toBe(undefined); + }); + + it('uses maxRetries value of 10', () => { + const processor = createBatchEventProcessor({ }); + expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[0][0].retryOptions?.maxRetries).toBe(10); + }); + + it('uses the default failedEventRetryInterval', () => { + const processor = createBatchEventProcessor({ }); + expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[0][0].failedEventRetryInterval).toBe(FAILED_EVENT_RETRY_INTERVAL); + }); +}); diff --git a/lib/event_processor/event_processor_factory.node.ts b/lib/event_processor/event_processor_factory.node.ts index ae793ce4f..d82717780 100644 --- a/lib/event_processor/event_processor_factory.node.ts +++ b/lib/event_processor/event_processor_factory.node.ts @@ -17,9 +17,29 @@ import { getForwardingEventProcessor } from './forwarding_event_processor'; import { EventDispatcher } from './eventDispatcher'; import { EventProcessor } from './eventProcessor'; import defaultEventDispatcher from './default_dispatcher.node'; +import { BatchEventProcessorOptions, FAILED_EVENT_RETRY_INTERVAL, getBatchEventProcessor, getPrefixEventStore } from './event_processor_factory'; export const createForwardingEventProcessor = ( eventDispatcher: EventDispatcher = defaultEventDispatcher, ): EventProcessor => { return getForwardingEventProcessor(eventDispatcher); }; + + +export const createBatchEventProcessor = ( + options: BatchEventProcessorOptions +): EventProcessor => { + const eventStore = options.eventStore ? getPrefixEventStore(options.eventStore) : undefined; + + return getBatchEventProcessor({ + eventDispatcher: options.eventDispatcher || defaultEventDispatcher, + closingEventDispatcher: options.closingEventDispatcher, + flushInterval: options.flushInterval, + batchSize: options.batchSize, + retryOptions: { + maxRetries: 10, + }, + failedEventRetryInterval: FAILED_EVENT_RETRY_INTERVAL, + eventStore, + }); +}; diff --git a/lib/event_processor/event_processor_factory.react_native.ts b/lib/event_processor/event_processor_factory.react_native.ts index 97b27a8e9..e18a3033e 100644 --- a/lib/event_processor/event_processor_factory.react_native.ts +++ b/lib/event_processor/event_processor_factory.react_native.ts @@ -17,9 +17,9 @@ import { getForwardingEventProcessor } from './forwarding_event_processor'; import { EventDispatcher } from './eventDispatcher'; import { EventProcessor } from './eventProcessor'; import defaultEventDispatcher from './default_dispatcher.browser'; -import { BatchEventProcessorOptions, getBatchEventProcessor } from './event_processor_factory'; +import { BatchEventProcessorOptions, getBatchEventProcessor, getPrefixEventStore } from './event_processor_factory'; import { EVENT_STORE_PREFIX, FAILED_EVENT_RETRY_INTERVAL } from './event_processor_factory'; -import { AsyncPrefixCache, Cache, SyncPrefixCache } from '../utils/cache/cache'; +import { AsyncPrefixCache } from '../utils/cache/cache'; import { EventWithId } from './batch_event_processor'; import { AsyncStorageCache } from '../utils/cache/async_storage_cache.react_native'; @@ -44,24 +44,6 @@ const getDefaultEventStore = () => { return eventStore; } -const getPrefixEventStore = (cache: Cache): Cache => { - if (cache.operation === 'async') { - return new AsyncPrefixCache( - cache, - EVENT_STORE_PREFIX, - JSON.parse, - JSON.stringify, - ); - } else { - return new SyncPrefixCache( - cache, - EVENT_STORE_PREFIX, - JSON.parse, - JSON.stringify, - ); - } -}; - export const createBatchEventProcessor = ( options: BatchEventProcessorOptions ): EventProcessor => { diff --git a/lib/event_processor/event_processor_factory.ts b/lib/event_processor/event_processor_factory.ts index 62ab49656..4e1e62f3c 100644 --- a/lib/event_processor/event_processor_factory.ts +++ b/lib/event_processor/event_processor_factory.ts @@ -4,7 +4,7 @@ import { ExponentialBackoff, IntervalRepeater } from "../utils/repeater/repeater import { EventDispatcher } from "./eventDispatcher"; import { EventProcessor } from "./eventProcessor"; import { BatchEventProcessor, EventWithId, RetryConfig } from "./batch_event_processor"; -import { Cache } from "../utils/cache/cache"; +import { AsyncPrefixCache, Cache, SyncPrefixCache } from "../utils/cache/cache"; export const DEFAULT_EVENT_BATCH_SIZE = 10; export const DEFAULT_EVENT_FLUSH_INTERVAL = 1000; @@ -14,6 +14,24 @@ export const DEFAULT_MAX_BACKOFF = 32000; export const FAILED_EVENT_RETRY_INTERVAL = 20 * 1000; export const EVENT_STORE_PREFIX = 'optly_event:'; +export const getPrefixEventStore = (cache: Cache): Cache => { + if (cache.operation === 'async') { + return new AsyncPrefixCache( + cache, + EVENT_STORE_PREFIX, + JSON.parse, + JSON.stringify, + ); + } else { + return new SyncPrefixCache( + cache, + EVENT_STORE_PREFIX, + JSON.parse, + JSON.stringify, + ); + } +}; + export type BatchEventProcessorOptions = { eventDispatcher?: EventDispatcher; closingEventDispatcher?: EventDispatcher; diff --git a/vitest.config.mts b/vitest.config.mts index a97ad1788..dd2c0addc 100644 --- a/vitest.config.mts +++ b/vitest.config.mts @@ -20,7 +20,7 @@ export default defineConfig({ test: { onConsoleLog: () => true, environment: 'happy-dom', - include: ['**/event_processor_factory.react_native.spec.ts'], + include: ['**/event_processor_factory.node.spec.ts'], typecheck: { tsconfig: 'tsconfig.spec.json', }, From 9d684fe735d449c01681fbd5b6afed85b86d94cc Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Thu, 21 Nov 2024 00:46:06 +0600 Subject: [PATCH 40/45] more test --- lib/event_processor/event_processor_factory.node.spec.ts | 8 +++++++- lib/event_processor/event_processor_factory.node.ts | 2 +- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/lib/event_processor/event_processor_factory.node.spec.ts b/lib/event_processor/event_processor_factory.node.spec.ts index b359c7a05..a511e2e06 100644 --- a/lib/event_processor/event_processor_factory.node.spec.ts +++ b/lib/event_processor/event_processor_factory.node.spec.ts @@ -190,9 +190,15 @@ describe('createBatchEventProcessor', () => { expect(mockGetBatchEventProcessor.mock.calls[0][0].retryOptions?.maxRetries).toBe(10); }); - it('uses the default failedEventRetryInterval', () => { + it('uses no failed event retry if an eventStore is not provided', () => { const processor = createBatchEventProcessor({ }); expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[0][0].failedEventRetryInterval).toBe(undefined); + }); + + it('uses the default failedEventRetryInterval if an eventStore is provided', () => { + const processor = createBatchEventProcessor({ eventStore: {} as any }); + expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); expect(mockGetBatchEventProcessor.mock.calls[0][0].failedEventRetryInterval).toBe(FAILED_EVENT_RETRY_INTERVAL); }); }); diff --git a/lib/event_processor/event_processor_factory.node.ts b/lib/event_processor/event_processor_factory.node.ts index d82717780..7bfd43c6a 100644 --- a/lib/event_processor/event_processor_factory.node.ts +++ b/lib/event_processor/event_processor_factory.node.ts @@ -39,7 +39,7 @@ export const createBatchEventProcessor = ( retryOptions: { maxRetries: 10, }, - failedEventRetryInterval: FAILED_EVENT_RETRY_INTERVAL, + failedEventRetryInterval: eventStore ? FAILED_EVENT_RETRY_INTERVAL : undefined, eventStore, }); }; From 96400ee8644e5c3f862d0dd8872464083a712359 Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Thu, 21 Nov 2024 00:56:54 +0600 Subject: [PATCH 41/45] up --- ...batch_event_processor.react_native.spec.ts | 7 +- .../event_processor_factory.browser copy.ts | 60 ------ ...ent_processor_factory.browser.spec copy.ts | 187 ------------------ .../event_processor_factory.browser.spec.ts | 5 +- ...ent_processor_factory.react_native.spec.ts | 5 +- .../forwarding_event_processor.spec.ts | 5 +- lib/index.browser.ts | 4 +- lib/index.node.ts | 5 +- lib/index.react_native.ts | 4 +- vitest.config.mts | 2 +- 10 files changed, 24 insertions(+), 260 deletions(-) delete mode 100644 lib/event_processor/event_processor_factory.browser copy.ts delete mode 100644 lib/event_processor/event_processor_factory.browser.spec copy.ts diff --git a/lib/event_processor/batch_event_processor.react_native.spec.ts b/lib/event_processor/batch_event_processor.react_native.spec.ts index 37cc8fa93..d592e17f3 100644 --- a/lib/event_processor/batch_event_processor.react_native.spec.ts +++ b/lib/event_processor/batch_event_processor.react_native.spec.ts @@ -33,9 +33,12 @@ import { ReactNativeNetInfoEventProcessor } from './batch_event_processor.react_ import { getMockLogger } from '../tests/mock/mock_logger'; import { getMockRepeater } from '../tests/mock/mock_repeater'; import { getMockAsyncCache } from '../tests/mock/mock_cache'; -import { createImpressionEvent } from '../tests/mock/create_event'; + import { EventWithId } from './batch_event_processor'; -import { formatEvents, ProcessableEvent } from '.'; +import { EventDispatcher } from './eventDispatcher'; +import { formatEvents } from './v1/buildEventV1'; +import { createImpressionEvent } from '../tests/mock/create_event'; +import { ProcessableEvent } from './eventProcessor'; const getMockDispatcher = () => { return { diff --git a/lib/event_processor/event_processor_factory.browser copy.ts b/lib/event_processor/event_processor_factory.browser copy.ts deleted file mode 100644 index b471efc82..000000000 --- a/lib/event_processor/event_processor_factory.browser copy.ts +++ /dev/null @@ -1,60 +0,0 @@ -/** - * Copyright 2024, Optimizely - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { getForwardingEventProcessor } from './forwarding_event_processor'; -import { EventDispatcher } from './eventDispatcher'; -import { EventProcessor } from './eventProcessor'; -import { EventWithId } from './batch_event_processor'; -import { getBatchEventProcessor, BatchEventProcessorOptions } from './event_processor_factory'; -import defaultEventDispatcher from './default_dispatcher.browser'; -import sendBeaconEventDispatcher from '../plugins/event_dispatcher/send_beacon_dispatcher'; -import { LocalStorageCache } from '../utils/cache/local_storage_cache.browser'; -import { SyncPrefixCache } from '../utils/cache/cache'; - -export const FAILED_EVENT_RETRY_INTERVAL = 20 * 1000; -export const EVENT_STORE_PREFIX = 'optly_event:'; - -export const createForwardingEventProcessor = ( - eventDispatcher: EventDispatcher = defaultEventDispatcher, -): EventProcessor => { - return getForwardingEventProcessor(eventDispatcher); -}; - -const identity = (v: T): T => v; - -export const createBatchEventProcessor = ( - options: BatchEventProcessorOptions -): EventProcessor => { - const localStorageCache = new LocalStorageCache(); - const eventStore = new SyncPrefixCache( - localStorageCache, EVENT_STORE_PREFIX, - identity, - identity, - ); - - return getBatchEventProcessor({ - eventDispatcher: options.eventDispatcher || defaultEventDispatcher, - closingEventDispatcher: options.closingEventDispatcher || - (options.eventDispatcher ? undefined : sendBeaconEventDispatcher), - flushInterval: options.flushInterval, - batchSize: options.batchSize, - retryOptions: { - maxRetries: 5, - }, - failedEventRetryInterval: FAILED_EVENT_RETRY_INTERVAL, - eventStore, - }); -}; diff --git a/lib/event_processor/event_processor_factory.browser.spec copy.ts b/lib/event_processor/event_processor_factory.browser.spec copy.ts deleted file mode 100644 index 29723ed8a..000000000 --- a/lib/event_processor/event_processor_factory.browser.spec copy.ts +++ /dev/null @@ -1,187 +0,0 @@ -/** - * Copyright 2024, Optimizely - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { vi, describe, it, expect, beforeEach } from 'vitest'; - -vi.mock('./default_dispatcher.browser', () => { - return { default: {} }; -}); - -vi.mock('./forwarding_event_processor', () => { - const getForwardingEventProcessor = vi.fn().mockImplementation(() => { - return {}; - }); - return { getForwardingEventProcessor }; -}); - -vi.mock('./event_processor_factory', () => { - const getBatchEventProcessor = vi.fn().mockImplementation(() => { - return {}; - }); - return { getBatchEventProcessor }; -}); - -vi.mock('../utils/cache/local_storage_cache.browser', () => { - return { LocalStorageCache: vi.fn() }; -}); - -vi.mock('../utils/cache/cache', () => { - return { SyncPrefixCache: vi.fn() }; -}); - - -import defaultEventDispatcher from './default_dispatcher.browser'; -import { LocalStorageCache } from '../utils/cache/local_storage_cache.browser'; -import { SyncPrefixCache } from '../utils/cache/cache'; -import { createForwardingEventProcessor, createBatchEventProcessor, EVENT_STORE_PREFIX, FAILED_EVENT_RETRY_INTERVAL } from './event_processor_factory.browser'; -import sendBeaconEventDispatcher from '../plugins/event_dispatcher/send_beacon_dispatcher'; -import { getForwardingEventProcessor } from './forwarding_event_processor'; -import browserDefaultEventDispatcher from './default_dispatcher.browser'; -import { getBatchEventProcessor } from './event_processor_factory'; - -describe('createForwardingEventProcessor', () => { - const mockGetForwardingEventProcessor = vi.mocked(getForwardingEventProcessor); - - beforeEach(() => { - mockGetForwardingEventProcessor.mockClear(); - }); - - it('returns forwarding event processor by calling getForwardingEventProcessor with the provided dispatcher', () => { - const eventDispatcher = { - dispatchEvent: vi.fn(), - }; - - const processor = createForwardingEventProcessor(eventDispatcher); - - expect(Object.is(processor, mockGetForwardingEventProcessor.mock.results[0].value)).toBe(true); - expect(mockGetForwardingEventProcessor).toHaveBeenNthCalledWith(1, eventDispatcher); - }); - - it('uses the browser default event dispatcher if none is provided', () => { - const processor = createForwardingEventProcessor(); - - expect(Object.is(processor, mockGetForwardingEventProcessor.mock.results[0].value)).toBe(true); - expect(mockGetForwardingEventProcessor).toHaveBeenNthCalledWith(1, browserDefaultEventDispatcher); - }); -}); - -describe('createBatchEventProcessor', () => { - const mockGetBatchEventProcessor = vi.mocked(getBatchEventProcessor); - const MockLocalStorageCache = vi.mocked(LocalStorageCache); - const MockSyncPrefixCache = vi.mocked(SyncPrefixCache); - - beforeEach(() => { - mockGetBatchEventProcessor.mockClear(); - MockLocalStorageCache.mockClear(); - MockSyncPrefixCache.mockClear(); - }); - - it('uses localStorageCache and SyncPrefixCache to create eventStore', () => { - const options = { - eventDispatcher: { - dispatchEvent: vi.fn(), - }, - flushInterval: 1000, - batchSize: 10, - }; - - const processor = createBatchEventProcessor(options); - expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); - const eventStore = mockGetBatchEventProcessor.mock.calls[0][0].eventStore; - expect(Object.is(eventStore, MockSyncPrefixCache.mock.results[0].value)).toBe(true); - - const [cache, prefix, transformGet, transformSet] = MockSyncPrefixCache.mock.calls[0]; - expect(Object.is(cache, MockLocalStorageCache.mock.results[0].value)).toBe(true); - expect(prefix).toBe(EVENT_STORE_PREFIX); - - // transformGet and transformSet should be identity functions - expect(transformGet('value')).toBe('value'); - expect(transformSet('value')).toBe('value'); - }); - - it('uses the provided eventDispatcher', () => { - const eventDispatcher = { - dispatchEvent: vi.fn(), - }; - - const processor = createBatchEventProcessor({ eventDispatcher }); - expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); - expect(mockGetBatchEventProcessor.mock.calls[0][0].eventDispatcher).toBe(eventDispatcher); - }); - - it('uses the default broser event dispatcher if none is provided', () => { - const processor = createBatchEventProcessor({ }); - expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); - expect(mockGetBatchEventProcessor.mock.calls[0][0].eventDispatcher).toBe(defaultEventDispatcher); - }); - - it('uses the provided closingEventDispatcher', () => { - const closingEventDispatcher = { - dispatchEvent: vi.fn(), - }; - - const processor = createBatchEventProcessor({ closingEventDispatcher }); - expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); - expect(mockGetBatchEventProcessor.mock.calls[0][0].closingEventDispatcher).toBe(closingEventDispatcher); - }); - - it('does not use any closingEventDispatcher if eventDispatcher is provided but closingEventDispatcher is not', () => { - const eventDispatcher = { - dispatchEvent: vi.fn(), - }; - - const processor = createBatchEventProcessor({ eventDispatcher }); - expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); - expect(mockGetBatchEventProcessor.mock.calls[0][0].closingEventDispatcher).toBe(undefined); - }); - - it('uses the default sendBeacon event dispatcher if neither eventDispatcher nor closingEventDispatcher is provided', () => { - const processor = createBatchEventProcessor({ }); - expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); - expect(mockGetBatchEventProcessor.mock.calls[0][0].closingEventDispatcher).toBe(sendBeaconEventDispatcher); - }); - - it('uses the provided flushInterval', () => { - const processor1 = createBatchEventProcessor({ flushInterval: 2000 }); - expect(Object.is(processor1, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); - expect(mockGetBatchEventProcessor.mock.calls[0][0].flushInterval).toBe(2000); - - const processor2 = createBatchEventProcessor({ }); - expect(Object.is(processor2, mockGetBatchEventProcessor.mock.results[1].value)).toBe(true); - expect(mockGetBatchEventProcessor.mock.calls[1][0].flushInterval).toBe(undefined); - }); - - it('uses the provided batchSize', () => { - const processor1 = createBatchEventProcessor({ batchSize: 20 }); - expect(Object.is(processor1, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); - expect(mockGetBatchEventProcessor.mock.calls[0][0].batchSize).toBe(20); - - const processor2 = createBatchEventProcessor({ }); - expect(Object.is(processor2, mockGetBatchEventProcessor.mock.results[1].value)).toBe(true); - expect(mockGetBatchEventProcessor.mock.calls[1][0].batchSize).toBe(undefined); - }); - - it('uses maxRetries value of 5', () => { - const processor = createBatchEventProcessor({ }); - expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); - expect(mockGetBatchEventProcessor.mock.calls[0][0].retryOptions?.maxRetries).toBe(5); - }); - - it('uses the default failedEventRetryInterval', () => { - const processor = createBatchEventProcessor({ }); - expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); - expect(mockGetBatchEventProcessor.mock.calls[0][0].failedEventRetryInterval).toBe(FAILED_EVENT_RETRY_INTERVAL); - }); -}); diff --git a/lib/event_processor/event_processor_factory.browser.spec.ts b/lib/event_processor/event_processor_factory.browser.spec.ts index 07aeae0c6..5bd615ebe 100644 --- a/lib/event_processor/event_processor_factory.browser.spec.ts +++ b/lib/event_processor/event_processor_factory.browser.spec.ts @@ -26,11 +26,12 @@ vi.mock('./forwarding_event_processor', () => { return { getForwardingEventProcessor }; }); -vi.mock('./event_processor_factory', () => { +vi.mock('./event_processor_factory', async (importOriginal) => { const getBatchEventProcessor = vi.fn().mockImplementation(() => { return {}; }); - return { getBatchEventProcessor, EVENT_STORE_PREFIX: 'test_prefix', FAILED_EVENT_RETRY_INTERVAL: 1000 }; + const original: any = await importOriginal(); + return { ...original, getBatchEventProcessor }; }); vi.mock('../utils/cache/local_storage_cache.browser', () => { diff --git a/lib/event_processor/event_processor_factory.react_native.spec.ts b/lib/event_processor/event_processor_factory.react_native.spec.ts index 398701818..8570e4988 100644 --- a/lib/event_processor/event_processor_factory.react_native.spec.ts +++ b/lib/event_processor/event_processor_factory.react_native.spec.ts @@ -25,11 +25,12 @@ vi.mock('./forwarding_event_processor', () => { return { getForwardingEventProcessor }; }); -vi.mock('./event_processor_factory', () => { +vi.mock('./event_processor_factory', async (importOriginal) => { const getBatchEventProcessor = vi.fn().mockImplementation(() => { return {}; }); - return { getBatchEventProcessor, EVENT_STORE_PREFIX: 'test_prefix', FAILED_EVENT_RETRY_INTERVAL: 1000 }; + const original: any = await importOriginal(); + return { ...original, getBatchEventProcessor }; }); vi.mock('../utils/cache/async_storage_cache.react_native', () => { diff --git a/lib/event_processor/forwarding_event_processor.spec.ts b/lib/event_processor/forwarding_event_processor.spec.ts index b6d1b5303..99e6d77fe 100644 --- a/lib/event_processor/forwarding_event_processor.spec.ts +++ b/lib/event_processor/forwarding_event_processor.spec.ts @@ -16,10 +16,9 @@ import { expect, describe, it, vi } from 'vitest'; import { getForwardingEventProcessor } from './forwarding_event_processor'; -import { EventDispatcher, formatEvents, makeBatchedEventV1 } from '.'; - +import { EventDispatcher } from './eventDispatcher'; +import { formatEvents, makeBatchedEventV1 } from './v1/buildEventV1'; import { createImpressionEvent } from '../tests/mock/create_event'; -import exp from 'constants'; import { ServiceState } from '../service'; const getMockEventDispatcher = (): EventDispatcher => { diff --git a/lib/index.browser.ts b/lib/index.browser.ts index 48fbd9ef7..f7b7ba98c 100644 --- a/lib/index.browser.ts +++ b/lib/index.browser.ts @@ -31,7 +31,7 @@ import { getUserAgentParser } from './plugins/odp/user_agent_parser/index.browse import * as commonExports from './common_exports'; import { PollingConfigManagerConfig } from './project_config/config_manager_factory'; import { createPollingProjectConfigManager } from './project_config/config_manager_factory.browser'; -import { createForwardingEventProcessor } from './event_processor/event_processor_factory.browser'; +import { createBatchEventProcessor, createForwardingEventProcessor } from './event_processor/event_processor_factory.browser'; const logger = getLogger(); logHelper.setLogHandler(loggerPlugin.createLogger()); @@ -196,6 +196,7 @@ export { getUserAgentParser, createPollingProjectConfigManager, createForwardingEventProcessor, + createBatchEventProcessor, }; export * from './common_exports'; @@ -215,6 +216,7 @@ export default { getUserAgentParser, createPollingProjectConfigManager, createForwardingEventProcessor, + createBatchEventProcessor, }; export * from './export_types'; diff --git a/lib/index.node.ts b/lib/index.node.ts index bc68461cc..12acf9960 100644 --- a/lib/index.node.ts +++ b/lib/index.node.ts @@ -26,7 +26,8 @@ import { OptimizelyDecideOption, Client, Config } from './shared_types'; import { NodeOdpManager } from './plugins/odp_manager/index.node'; import * as commonExports from './common_exports'; import { createPollingProjectConfigManager } from './project_config/config_manager_factory.node'; -import { createForwardingEventProcessor } from './event_processor/event_processor_factory.node'; +import { createForwardingEventProcessor, createBatchEventProcessor } from './event_processor/event_processor_factory.node'; +import { create } from 'domain'; const logger = getLogger(); setLogLevel(LogLevel.ERROR); @@ -143,6 +144,7 @@ export { OptimizelyDecideOption, createPollingProjectConfigManager, createForwardingEventProcessor, + createBatchEventProcessor, }; export * from './common_exports'; @@ -159,6 +161,7 @@ export default { OptimizelyDecideOption, createPollingProjectConfigManager, createForwardingEventProcessor, + createBatchEventProcessor, }; export * from './export_types'; diff --git a/lib/index.react_native.ts b/lib/index.react_native.ts index 574cd4f38..41cf71369 100644 --- a/lib/index.react_native.ts +++ b/lib/index.react_native.ts @@ -26,7 +26,7 @@ import { OptimizelyDecideOption, Client, Config } from './shared_types'; import { BrowserOdpManager } from './plugins/odp_manager/index.browser'; import * as commonExports from './common_exports'; import { createPollingProjectConfigManager } from './project_config/config_manager_factory.react_native'; -import { createForwardingEventProcessor } from './event_processor/event_processor_factory.react_native'; +import { createBatchEventProcessor, createForwardingEventProcessor } from './event_processor/event_processor_factory.react_native'; import 'fast-text-encoding'; import 'react-native-get-random-values'; @@ -146,6 +146,7 @@ export { OptimizelyDecideOption, createPollingProjectConfigManager, createForwardingEventProcessor, + createBatchEventProcessor, }; export * from './common_exports'; @@ -162,6 +163,7 @@ export default { OptimizelyDecideOption, createPollingProjectConfigManager, createForwardingEventProcessor, + createBatchEventProcessor, }; export * from './export_types'; diff --git a/vitest.config.mts b/vitest.config.mts index dd2c0addc..673f7d1c6 100644 --- a/vitest.config.mts +++ b/vitest.config.mts @@ -20,7 +20,7 @@ export default defineConfig({ test: { onConsoleLog: () => true, environment: 'happy-dom', - include: ['**/event_processor_factory.node.spec.ts'], + include: ['**/*.spec.ts'], typecheck: { tsconfig: 'tsconfig.spec.json', }, From d40119a631cf1668db0acb06a435061a92c3b626 Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Thu, 21 Nov 2024 22:11:39 +0600 Subject: [PATCH 42/45] review --- .../batch_event_processor.react_native.spec.ts | 16 ++++++++++++++++ .../batch_event_processor.react_native.ts | 16 ++++++++++++++++ lib/event_processor/batch_event_processor.ts | 16 ++++++++++++++++ lib/event_processor/eventProcessor.ts | 4 ---- .../event_processor_factory.spec.ts | 16 ++++++++++++++++ lib/event_processor/event_processor_factory.ts | 16 ++++++++++++++++ .../forwarding_event_processor.spec.ts | 3 --- lib/index.node.ts | 1 - lib/optimizely/index.ts | 2 -- lib/tests/mock/create_event.ts | 17 +++++++++++++++-- lib/tests/mock/mock_cache.ts | 16 ++++++++++++++++ .../async_storage_cache.react_native.spec.ts | 15 +++++++++++++++ .../cache/async_storage_cache.react_native.ts | 16 ++++++++++++++++ lib/utils/cache/cache.spec.ts | 18 +++++++++++++++++- lib/utils/cache/cache.ts | 16 ++++++++++++++++ .../cache/local_storage_cache.browser.spec.ts | 16 ++++++++++++++++ lib/utils/cache/local_storage_cache.browser.ts | 16 ++++++++++++++++ .../executor/backoff_retry_runner.spec.ts | 2 -- lib/utils/executor/backoff_retry_runner.ts | 3 +-- lib/utils/id_generator/index.ts | 16 ++++++++++++++++ 20 files changed, 224 insertions(+), 17 deletions(-) diff --git a/lib/event_processor/batch_event_processor.react_native.spec.ts b/lib/event_processor/batch_event_processor.react_native.spec.ts index d592e17f3..e18a3e745 100644 --- a/lib/event_processor/batch_event_processor.react_native.spec.ts +++ b/lib/event_processor/batch_event_processor.react_native.spec.ts @@ -1,3 +1,19 @@ +/** + * Copyright 2024, Optimizely + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + import { vi, describe, it, expect, beforeEach } from 'vitest'; const mockNetInfo = vi.hoisted(() => { diff --git a/lib/event_processor/batch_event_processor.react_native.ts b/lib/event_processor/batch_event_processor.react_native.ts index 5cb10a1f0..4de16b59e 100644 --- a/lib/event_processor/batch_event_processor.react_native.ts +++ b/lib/event_processor/batch_event_processor.react_native.ts @@ -1,3 +1,19 @@ +/** + * Copyright 2024, Optimizely + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + import { NetInfoState, addEventListener as addConnectionListener, diff --git a/lib/event_processor/batch_event_processor.ts b/lib/event_processor/batch_event_processor.ts index 189ea98b0..7cad445cd 100644 --- a/lib/event_processor/batch_event_processor.ts +++ b/lib/event_processor/batch_event_processor.ts @@ -1,3 +1,19 @@ +/** + * Copyright 2024, Optimizely + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + import { EventProcessor, ProcessableEvent } from "./eventProcessor"; import { Cache } from "../utils/cache/cache"; import { EventDispatcher, EventDispatcherResponse, EventV1Request } from "./eventDispatcher"; diff --git a/lib/event_processor/eventProcessor.ts b/lib/event_processor/eventProcessor.ts index a9af38163..656beab90 100644 --- a/lib/event_processor/eventProcessor.ts +++ b/lib/event_processor/eventProcessor.ts @@ -16,16 +16,12 @@ import { ConversionEvent, ImpressionEvent } from './events' import { EventV1Request } from './eventDispatcher' import { getLogger } from '../modules/logging' -import { NOTIFICATION_TYPES } from '../utils/enums' -import { NotificationSender } from '../core/notification_center' import { Service } from '../service' import { Consumer, Fn } from '../utils/type'; export const DEFAULT_FLUSH_INTERVAL = 30000 // Unit is ms - default flush interval is 30s export const DEFAULT_BATCH_SIZE = 10 -const logger = getLogger('EventProcessor') - export type ProcessableEvent = ConversionEvent | ImpressionEvent export interface EventProcessor extends Service { diff --git a/lib/event_processor/event_processor_factory.spec.ts b/lib/event_processor/event_processor_factory.spec.ts index 618c04d79..2f3d45408 100644 --- a/lib/event_processor/event_processor_factory.spec.ts +++ b/lib/event_processor/event_processor_factory.spec.ts @@ -1,3 +1,19 @@ +/** + * Copyright 2024, Optimizely + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + import { describe, it, expect, beforeEach, vi, MockInstance } from 'vitest'; import { DEFAULT_EVENT_BATCH_SIZE, DEFAULT_EVENT_FLUSH_INTERVAL, DEFAULT_MAX_BACKOFF, DEFAULT_MIN_BACKOFF, getBatchEventProcessor } from './event_processor_factory'; import { BatchEventProcessor, BatchEventProcessorConfig, EventWithId } from './batch_event_processor'; diff --git a/lib/event_processor/event_processor_factory.ts b/lib/event_processor/event_processor_factory.ts index 4e1e62f3c..3e2cc0d7c 100644 --- a/lib/event_processor/event_processor_factory.ts +++ b/lib/event_processor/event_processor_factory.ts @@ -1,3 +1,19 @@ +/** + * Copyright 2024, Optimizely + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + import { LogLevel } from "../common_exports"; import { StartupLog } from "../service"; import { ExponentialBackoff, IntervalRepeater } from "../utils/repeater/repeater"; diff --git a/lib/event_processor/forwarding_event_processor.spec.ts b/lib/event_processor/forwarding_event_processor.spec.ts index 99e6d77fe..41393109a 100644 --- a/lib/event_processor/forwarding_event_processor.spec.ts +++ b/lib/event_processor/forwarding_event_processor.spec.ts @@ -30,7 +30,6 @@ const getMockEventDispatcher = (): EventDispatcher => { describe('ForwardingEventProcessor', () => { it('should resolve onRunning() when start is called', async () => { const dispatcher = getMockEventDispatcher(); - const mockDispatch = vi.mocked(dispatcher.dispatchEvent); const processor = getForwardingEventProcessor(dispatcher); @@ -56,7 +55,6 @@ describe('ForwardingEventProcessor', () => { it('should emit dispatch event when event is dispatched', async() => { const dispatcher = getMockEventDispatcher(); - const mockDispatch = vi.mocked(dispatcher.dispatchEvent); const processor = getForwardingEventProcessor(dispatcher); @@ -76,7 +74,6 @@ describe('ForwardingEventProcessor', () => { it('should remove dispatch listener when the function returned from onDispatch is called', async() => { const dispatcher = getMockEventDispatcher(); - const mockDispatch = vi.mocked(dispatcher.dispatchEvent); const processor = getForwardingEventProcessor(dispatcher); diff --git a/lib/index.node.ts b/lib/index.node.ts index 12acf9960..ba4290d53 100644 --- a/lib/index.node.ts +++ b/lib/index.node.ts @@ -27,7 +27,6 @@ import { NodeOdpManager } from './plugins/odp_manager/index.node'; import * as commonExports from './common_exports'; import { createPollingProjectConfigManager } from './project_config/config_manager_factory.node'; import { createForwardingEventProcessor, createBatchEventProcessor } from './event_processor/event_processor_factory.node'; -import { create } from 'domain'; const logger = getLogger(); setLogLevel(LogLevel.ERROR); diff --git a/lib/optimizely/index.ts b/lib/optimizely/index.ts index c59dfb72e..f9b29a6b4 100644 --- a/lib/optimizely/index.ts +++ b/lib/optimizely/index.ts @@ -20,7 +20,6 @@ import { NotificationCenter } from '../core/notification_center'; import { EventProcessor } from '../event_processor/eventProcessor'; import { IOdpManager } from '../core/odp/odp_manager'; -import { OdpConfig } from '../core/odp/odp_config'; import { OdpEvent } from '../core/odp/odp_event'; import { OptimizelySegmentOption } from '../core/odp/optimizely_segment_option'; @@ -28,7 +27,6 @@ import { UserAttributes, EventTags, OptimizelyConfig, - OnReadyResult, UserProfileService, Variation, FeatureFlag, diff --git a/lib/tests/mock/create_event.ts b/lib/tests/mock/create_event.ts index 5d84bf534..ec5dd9949 100644 --- a/lib/tests/mock/create_event.ts +++ b/lib/tests/mock/create_event.ts @@ -1,5 +1,18 @@ -import { EventV1 } from "../../event_processor"; -import { Event } from "../../shared_types"; +/** + * Copyright 2024, Optimizely + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ export function createImpressionEvent(id = 'uuid'): any { return { diff --git a/lib/tests/mock/mock_cache.ts b/lib/tests/mock/mock_cache.ts index a86bc8bcd..5a542deae 100644 --- a/lib/tests/mock/mock_cache.ts +++ b/lib/tests/mock/mock_cache.ts @@ -1,3 +1,19 @@ +/** + * Copyright 2022-2024, Optimizely + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + import { SyncCache, AsyncCache } from "../../utils/cache/cache"; import { Maybe } from "../../utils/type"; diff --git a/lib/utils/cache/async_storage_cache.react_native.spec.ts b/lib/utils/cache/async_storage_cache.react_native.spec.ts index dbb8d3ca5..d1a7954e4 100644 --- a/lib/utils/cache/async_storage_cache.react_native.spec.ts +++ b/lib/utils/cache/async_storage_cache.react_native.spec.ts @@ -1,4 +1,19 @@ +/** + * Copyright 2022-2024, Optimizely + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ vi.mock('@react-native-async-storage/async-storage', () => { const MockAsyncStorage = { diff --git a/lib/utils/cache/async_storage_cache.react_native.ts b/lib/utils/cache/async_storage_cache.react_native.ts index 5fe64b0bb..529287a6c 100644 --- a/lib/utils/cache/async_storage_cache.react_native.ts +++ b/lib/utils/cache/async_storage_cache.react_native.ts @@ -1,3 +1,19 @@ +/** + * Copyright 2022-2024, Optimizely + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + import { Maybe } from "../type"; import { AsyncCache } from "./cache"; import AsyncStorage from '@react-native-async-storage/async-storage'; diff --git a/lib/utils/cache/cache.spec.ts b/lib/utils/cache/cache.spec.ts index 32e2cbaea..150fe4884 100644 --- a/lib/utils/cache/cache.spec.ts +++ b/lib/utils/cache/cache.spec.ts @@ -1,4 +1,20 @@ -import { describe, it, expect, beforeEach } from 'vitest'; +/** + * Copyright 2022-2024, Optimizely + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { describe, it, expect } from 'vitest'; import { SyncPrefixCache, AsyncPrefixCache } from './cache'; import { getMockSyncCache, getMockAsyncCache } from '../../tests/mock/mock_cache'; diff --git a/lib/utils/cache/cache.ts b/lib/utils/cache/cache.ts index e0127f12d..46dcebbda 100644 --- a/lib/utils/cache/cache.ts +++ b/lib/utils/cache/cache.ts @@ -1,3 +1,19 @@ +/** + * Copyright 2022-2024, Optimizely + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + import { Transformer } from '../../utils/type'; import { Maybe } from '../../utils/type'; diff --git a/lib/utils/cache/local_storage_cache.browser.spec.ts b/lib/utils/cache/local_storage_cache.browser.spec.ts index e052246c5..37e0735ba 100644 --- a/lib/utils/cache/local_storage_cache.browser.spec.ts +++ b/lib/utils/cache/local_storage_cache.browser.spec.ts @@ -1,3 +1,19 @@ +/** + * Copyright 2022-2024, Optimizely + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + import { describe, it, expect, beforeEach } from 'vitest'; import { LocalStorageCache } from './local_storage_cache.browser'; diff --git a/lib/utils/cache/local_storage_cache.browser.ts b/lib/utils/cache/local_storage_cache.browser.ts index b5d7413bb..594b722d2 100644 --- a/lib/utils/cache/local_storage_cache.browser.ts +++ b/lib/utils/cache/local_storage_cache.browser.ts @@ -1,3 +1,19 @@ +/** + * Copyright 2022-2024, Optimizely + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + import { Maybe } from "../type"; import { SyncCache } from "./cache"; diff --git a/lib/utils/executor/backoff_retry_runner.spec.ts b/lib/utils/executor/backoff_retry_runner.spec.ts index db4383df0..6e2674b10 100644 --- a/lib/utils/executor/backoff_retry_runner.spec.ts +++ b/lib/utils/executor/backoff_retry_runner.spec.ts @@ -81,9 +81,7 @@ describe('runWithRetry', () => { }); it('should use the backoff controller to delay retries', async () => { - let count = 0; const task = vi.fn().mockImplementation(async () => { - count++; throw new Error('error'); }); diff --git a/lib/utils/executor/backoff_retry_runner.ts b/lib/utils/executor/backoff_retry_runner.ts index 88f6063ec..504412c24 100644 --- a/lib/utils/executor/backoff_retry_runner.ts +++ b/lib/utils/executor/backoff_retry_runner.ts @@ -1,7 +1,6 @@ import { resolvablePromise, ResolvablePromise } from "../promise/resolvablePromise"; import { BackoffController } from "../repeater/repeater"; -import { AsyncFn, AsyncProducer, Fn } from "../type"; -import { scheduleMicrotask } from "../microtask"; +import { AsyncProducer, Fn } from "../type"; export type RunResult = { result: Promise; diff --git a/lib/utils/id_generator/index.ts b/lib/utils/id_generator/index.ts index 80fa7ca2b..5f3c72387 100644 --- a/lib/utils/id_generator/index.ts +++ b/lib/utils/id_generator/index.ts @@ -1,3 +1,19 @@ +/** + * Copyright 2022-2024, Optimizely + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + const idSuffixBase = 10_000; export class IdGenerator { From 5daa4e0ade33afe5d642f63abdd938768ca52ca2 Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Fri, 22 Nov 2024 01:20:04 +0600 Subject: [PATCH 43/45] up --- .../batch_event_processor.react_native.ts | 9 ++-- ...ent_processor_factory.react_native.spec.ts | 43 ++++++++++++++++++- .../event_processor_factory.react_native.ts | 6 ++- .../@react-native-community/netinfo.ts | 38 ++++++++++++++++ 4 files changed, 88 insertions(+), 8 deletions(-) create mode 100644 lib/utils/import.react_native/@react-native-community/netinfo.ts diff --git a/lib/event_processor/batch_event_processor.react_native.ts b/lib/event_processor/batch_event_processor.react_native.ts index 4de16b59e..ac5110de4 100644 --- a/lib/event_processor/batch_event_processor.react_native.ts +++ b/lib/event_processor/batch_event_processor.react_native.ts @@ -14,10 +14,7 @@ * limitations under the License. */ -import { - NetInfoState, - addEventListener as addConnectionListener, -} from '@react-native-community/netinfo'; +import { NetInfoState, addEventListener } from '../utils/import.react_native/@react-native-community/netinfo'; import { BatchEventProcessor, BatchEventProcessorConfig } from './batch_event_processor'; import { Fn } from '../utils/type'; @@ -44,7 +41,9 @@ export class ReactNativeNetInfoEventProcessor extends BatchEventProcessor { start(): void { super.start(); - this.unsubscribeNetInfo = addConnectionListener(this.connectionListener.bind(this)); + if (addEventListener) { + this.unsubscribeNetInfo = addEventListener(this.connectionListener.bind(this)); + } } stop(): void { diff --git a/lib/event_processor/event_processor_factory.react_native.spec.ts b/lib/event_processor/event_processor_factory.react_native.spec.ts index 8570e4988..0d0d968a9 100644 --- a/lib/event_processor/event_processor_factory.react_native.spec.ts +++ b/lib/event_processor/event_processor_factory.react_native.spec.ts @@ -41,6 +41,30 @@ vi.mock('../utils/cache/cache', () => { return { SyncPrefixCache: vi.fn(), AsyncPrefixCache: vi.fn() }; }); +vi.mock('@react-native-community/netinfo', () => { + return { NetInfoState: {}, addEventListener: vi.fn() }; +}); + +let isNetInfoAvailable = false; + +await vi.hoisted(async () => { + await mockRequireNetInfo(); +}); + +async function mockRequireNetInfo() { + const {Module} = await import('module'); + const M: any = Module; + + M._load_original = M._load; + M._load = (uri: string, parent: string) => { + if (uri === '@react-native-community/netinfo') { + if (isNetInfoAvailable) return {}; + throw new Error('Module not found: @react-native-community/netinfo'); + } + return M._load_original(uri, parent); + }; +} + import { createForwardingEventProcessor, createBatchEventProcessor } from './event_processor_factory.react_native'; import { getForwardingEventProcessor } from './forwarding_event_processor'; import defaultEventDispatcher from './default_dispatcher.browser'; @@ -48,14 +72,17 @@ import { EVENT_STORE_PREFIX, FAILED_EVENT_RETRY_INTERVAL } from './event_process import { getBatchEventProcessor } from './event_processor_factory'; import { AsyncCache, AsyncPrefixCache, SyncCache, SyncPrefixCache } from '../utils/cache/cache'; import { AsyncStorageCache } from '../utils/cache/async_storage_cache.react_native'; +import { ReactNativeNetInfoEventProcessor } from './batch_event_processor.react_native'; +import { BatchEventProcessor } from './batch_event_processor'; describe('createForwardingEventProcessor', () => { const mockGetForwardingEventProcessor = vi.mocked(getForwardingEventProcessor); beforeEach(() => { mockGetForwardingEventProcessor.mockClear(); + isNetInfoAvailable = false; }); - + it('returns forwarding event processor by calling getForwardingEventProcessor with the provided dispatcher', () => { const eventDispatcher = { dispatchEvent: vi.fn(), @@ -82,12 +109,26 @@ describe('createBatchEventProcessor', () => { const MockAsyncPrefixCache = vi.mocked(AsyncPrefixCache); beforeEach(() => { + isNetInfoAvailable = false; mockGetBatchEventProcessor.mockClear(); MockAsyncStorageCache.mockClear(); MockSyncPrefixCache.mockClear(); MockAsyncPrefixCache.mockClear(); }); + it('returns an instance of ReacNativeNetInfoEventProcessor if netinfo can be required', async () => { + isNetInfoAvailable = true; + const processor = createBatchEventProcessor({}); + expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); + expect(mockGetBatchEventProcessor.mock.calls[0][1]).toBe(ReactNativeNetInfoEventProcessor); + }); + + it('returns an instance of BatchEventProcessor if netinfo cannot be required', async () => { + isNetInfoAvailable = false; + const processor = createBatchEventProcessor({});; + expect(mockGetBatchEventProcessor.mock.calls[0][1]).toBe(BatchEventProcessor); + }); + it('uses AsyncStorageCache and AsyncPrefixCache to create eventStore if no eventStore is provided', () => { const processor = createBatchEventProcessor({}); diff --git a/lib/event_processor/event_processor_factory.react_native.ts b/lib/event_processor/event_processor_factory.react_native.ts index e18a3033e..84c11e375 100644 --- a/lib/event_processor/event_processor_factory.react_native.ts +++ b/lib/event_processor/event_processor_factory.react_native.ts @@ -20,8 +20,10 @@ import defaultEventDispatcher from './default_dispatcher.browser'; import { BatchEventProcessorOptions, getBatchEventProcessor, getPrefixEventStore } from './event_processor_factory'; import { EVENT_STORE_PREFIX, FAILED_EVENT_RETRY_INTERVAL } from './event_processor_factory'; import { AsyncPrefixCache } from '../utils/cache/cache'; -import { EventWithId } from './batch_event_processor'; +import { BatchEventProcessor, EventWithId } from './batch_event_processor'; import { AsyncStorageCache } from '../utils/cache/async_storage_cache.react_native'; +import { ReactNativeNetInfoEventProcessor } from './batch_event_processor.react_native'; +import { isAvailable as isNetInfoAvailable } from '../utils/import.react_native/@react-native-community/netinfo'; export const createForwardingEventProcessor = ( eventDispatcher: EventDispatcher = defaultEventDispatcher, @@ -59,5 +61,5 @@ export const createBatchEventProcessor = ( }, failedEventRetryInterval: FAILED_EVENT_RETRY_INTERVAL, eventStore, - }); + }, isNetInfoAvailable() ? ReactNativeNetInfoEventProcessor : BatchEventProcessor); }; diff --git a/lib/utils/import.react_native/@react-native-community/netinfo.ts b/lib/utils/import.react_native/@react-native-community/netinfo.ts new file mode 100644 index 000000000..53ac6c30e --- /dev/null +++ b/lib/utils/import.react_native/@react-native-community/netinfo.ts @@ -0,0 +1,38 @@ +/** + * Copyright 2024, Optimizely + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import type { NetInfoSubscription, NetInfoChangeHandler } from '@react-native-community/netinfo'; +import { Maybe } from '../../type'; + +export { NetInfoState } from '@react-native-community/netinfo'; +export type NetInfoAddEventListerType = (listener: NetInfoChangeHandler) => NetInfoSubscription; + +let addEventListener: Maybe = undefined; + +const requireNetInfo = () => { + try { + return require('@react-native-community/netinfo'); + } catch (e) { + return undefined; + } +} + +export const isAvailable = () => requireNetInfo() !== undefined; + +const netinfo = requireNetInfo(); +addEventListener = netinfo?.addEventListener; + +export { addEventListener }; From 372a1bfb4eba3ac782685ac883bd63330620c9b1 Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Fri, 22 Nov 2024 02:21:36 +0600 Subject: [PATCH 44/45] up --- lib/event_processor/batch_event_processor.react_native.spec.ts | 2 +- .../event_processor_factory.react_native.spec.ts | 3 ++- .../import.react_native/@react-native-community/netinfo.ts | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/lib/event_processor/batch_event_processor.react_native.spec.ts b/lib/event_processor/batch_event_processor.react_native.spec.ts index e18a3e745..68ccd6016 100644 --- a/lib/event_processor/batch_event_processor.react_native.spec.ts +++ b/lib/event_processor/batch_event_processor.react_native.spec.ts @@ -39,7 +39,7 @@ const mockNetInfo = vi.hoisted(() => { return netInfo; }); -vi.mock('@react-native-community/netinfo', () => { +vi.mock('../utils/import.react_native/@react-native-community/netinfo', () => { return { addEventListener: mockNetInfo.addEventListener.bind(mockNetInfo), }; diff --git a/lib/event_processor/event_processor_factory.react_native.spec.ts b/lib/event_processor/event_processor_factory.react_native.spec.ts index 0d0d968a9..93e7a05ad 100644 --- a/lib/event_processor/event_processor_factory.react_native.spec.ts +++ b/lib/event_processor/event_processor_factory.react_native.spec.ts @@ -125,7 +125,8 @@ describe('createBatchEventProcessor', () => { it('returns an instance of BatchEventProcessor if netinfo cannot be required', async () => { isNetInfoAvailable = false; - const processor = createBatchEventProcessor({});; + const processor = createBatchEventProcessor({}); + expect(Object.is(processor, mockGetBatchEventProcessor.mock.results[0].value)).toBe(true); expect(mockGetBatchEventProcessor.mock.calls[0][1]).toBe(BatchEventProcessor); }); diff --git a/lib/utils/import.react_native/@react-native-community/netinfo.ts b/lib/utils/import.react_native/@react-native-community/netinfo.ts index 53ac6c30e..434a0a1b3 100644 --- a/lib/utils/import.react_native/@react-native-community/netinfo.ts +++ b/lib/utils/import.react_native/@react-native-community/netinfo.ts @@ -30,7 +30,7 @@ const requireNetInfo = () => { } } -export const isAvailable = () => requireNetInfo() !== undefined; +export const isAvailable = (): boolean => requireNetInfo() !== undefined; const netinfo = requireNetInfo(); addEventListener = netinfo?.addEventListener; From a51ff8dd295295bee3c1c3bcd71d74e8b62384d7 Mon Sep 17 00:00:00 2001 From: Raju Ahmed Date: Fri, 22 Nov 2024 02:29:15 +0600 Subject: [PATCH 45/45] up --- lib/event_processor/batch_event_processor.spec.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/event_processor/batch_event_processor.spec.ts b/lib/event_processor/batch_event_processor.spec.ts index beeeb2f63..715b4452b 100644 --- a/lib/event_processor/batch_event_processor.spec.ts +++ b/lib/event_processor/batch_event_processor.spec.ts @@ -51,7 +51,7 @@ describe('QueueingEventProcessor', async () => { }); describe('start', () => { - it.only('should log startupLogs on start', () => { + it('should log startupLogs on start', () => { const startupLogs: StartupLog[] = [ { level: LogLevel.WARNING,