diff --git a/lib/core/decision_service/cmab/cmab_service.ts b/lib/core/decision_service/cmab/cmab_service.ts index b4f958fbf..094e10bbb 100644 --- a/lib/core/decision_service/cmab/cmab_service.ts +++ b/lib/core/decision_service/cmab/cmab_service.ts @@ -18,7 +18,7 @@ import { LoggerFacade } from "../../../logging/logger"; import { IOptimizelyUserContext } from "../../../optimizely_user_context"; import { ProjectConfig } from "../../../project_config/project_config" import { OptimizelyDecideOption, UserAttributes } from "../../../shared_types" -import { Cache } from "../../../utils/cache/cache"; +import { Cache, CacheWithRemove } from "../../../utils/cache/cache"; import { CmabClient } from "./cmab_client"; import { v4 as uuidV4 } from 'uuid'; import murmurhash from "murmurhash"; @@ -53,12 +53,12 @@ export type CmabCacheValue = { export type CmabServiceOptions = { logger?: LoggerFacade; - cmabCache: Cache; + cmabCache: CacheWithRemove; cmabClient: CmabClient; } export class DefaultCmabService implements CmabService { - private cmabCache: Cache; + private cmabCache: CacheWithRemove; private cmabClient: CmabClient; private logger?: LoggerFacade; @@ -81,7 +81,7 @@ export class DefaultCmabService implements CmabService { } if (options[OptimizelyDecideOption.RESET_CMAB_CACHE]) { - this.cmabCache.clear(); + this.cmabCache.reset(); } const cacheKey = this.getCacheKey(userContext.getUserId(), ruleId); @@ -90,7 +90,7 @@ export class DefaultCmabService implements CmabService { this.cmabCache.remove(cacheKey); } - const cachedValue = await this.cmabCache.get(cacheKey); + const cachedValue = await this.cmabCache.lookup(cacheKey); const attributesJson = JSON.stringify(filteredAttributes, Object.keys(filteredAttributes).sort()); const attributesHash = String(murmurhash.v3(attributesJson)); @@ -104,7 +104,7 @@ export class DefaultCmabService implements CmabService { } const variation = await this.fetchDecision(ruleId, userContext.getUserId(), filteredAttributes); - this.cmabCache.set(cacheKey, { + this.cmabCache.save(cacheKey, { attributesHash, variationId: variation.variationId, cmabUuid: variation.cmabUuid, diff --git a/lib/event_processor/batch_event_processor.ts b/lib/event_processor/batch_event_processor.ts index baf7a2d86..5fa7c3f2f 100644 --- a/lib/event_processor/batch_event_processor.ts +++ b/lib/event_processor/batch_event_processor.ts @@ -1,5 +1,5 @@ /** - * Copyright 2024, Optimizely + * Copyright 2024-2025, Optimizely * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,7 +15,7 @@ */ import { EventProcessor, ProcessableEvent } from "./event_processor"; -import { Cache } from "../utils/cache/cache"; +import { getBatchedAsync, getBatchedSync, Store } from "../utils/cache/store"; import { EventDispatcher, EventDispatcherResponse, LogEvent } from "./event_dispatcher/event_dispatcher"; import { buildLogEvent } from "./event_builder/log_event"; import { BackoffController, ExponentialBackoff, IntervalRepeater, Repeater } from "../utils/repeater/repeater"; @@ -49,7 +49,7 @@ export type BatchEventProcessorConfig = { dispatchRepeater: Repeater, failedEventRepeater?: Repeater, batchSize: number, - eventStore?: Cache, + eventStore?: Store, eventDispatcher: EventDispatcher, closingEventDispatcher?: EventDispatcher, logger?: LoggerFacade, @@ -69,7 +69,7 @@ export class BatchEventProcessor extends BaseService implements EventProcessor { private closingEventDispatcher?: EventDispatcher; private eventQueue: EventWithId[] = []; private batchSize: number; - private eventStore?: Cache; + private eventStore?: Store; private dispatchRepeater: Repeater; private failedEventRepeater?: Repeater; private idGenerator: IdGenerator = new IdGenerator(); @@ -114,7 +114,9 @@ export class BatchEventProcessor extends BaseService implements EventProcessor { (k) => !this.dispatchingEventIds.has(k) && !this.eventQueue.find((e) => e.id === k) ); - const events = await this.eventStore.getBatched(keys); + const events = await (this.eventStore.operation === 'sync' ? + getBatchedSync(this.eventStore, keys) : getBatchedAsync(this.eventStore, keys)); + const failedEvents: EventWithId[] = []; events.forEach((e) => { if(e) { diff --git a/lib/event_processor/event_processor_factory.browser.spec.ts b/lib/event_processor/event_processor_factory.browser.spec.ts index dcc7ce497..475b36353 100644 --- a/lib/event_processor/event_processor_factory.browser.spec.ts +++ b/lib/event_processor/event_processor_factory.browser.spec.ts @@ -1,5 +1,5 @@ /** - * Copyright 2024, Optimizely + * Copyright 2024-2025, Optimizely * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,14 +41,14 @@ vi.mock('../utils/cache/local_storage_cache.browser', () => { return { LocalStorageCache: vi.fn() }; }); -vi.mock('../utils/cache/cache', () => { - return { SyncPrefixCache: vi.fn() }; +vi.mock('../utils/cache/store', () => { + return { SyncPrefixStore: vi.fn() }; }); import defaultEventDispatcher from './event_dispatcher/default_dispatcher.browser'; import { LocalStorageCache } from '../utils/cache/local_storage_cache.browser'; -import { SyncPrefixCache } from '../utils/cache/cache'; +import { SyncPrefixStore } from '../utils/cache/store'; import { createForwardingEventProcessor, createBatchEventProcessor } from './event_processor_factory.browser'; import { EVENT_STORE_PREFIX, extractEventProcessor, FAILED_EVENT_RETRY_INTERVAL } from './event_processor_factory'; import sendBeaconEventDispatcher from './event_dispatcher/send_beacon_dispatcher.browser'; @@ -85,21 +85,21 @@ describe('createForwardingEventProcessor', () => { describe('createBatchEventProcessor', () => { const mockGetOpaqueBatchEventProcessor = vi.mocked(getOpaqueBatchEventProcessor); const MockLocalStorageCache = vi.mocked(LocalStorageCache); - const MockSyncPrefixCache = vi.mocked(SyncPrefixCache); + const MockSyncPrefixStore = vi.mocked(SyncPrefixStore); beforeEach(() => { mockGetOpaqueBatchEventProcessor.mockClear(); MockLocalStorageCache.mockClear(); - MockSyncPrefixCache.mockClear(); + MockSyncPrefixStore.mockClear(); }); - it('uses LocalStorageCache and SyncPrefixCache to create eventStore', () => { + it('uses LocalStorageCache and SyncPrefixStore to create eventStore', () => { const processor = createBatchEventProcessor({}); expect(Object.is(processor, mockGetOpaqueBatchEventProcessor.mock.results[0].value)).toBe(true); const eventStore = mockGetOpaqueBatchEventProcessor.mock.calls[0][0].eventStore; - expect(Object.is(eventStore, MockSyncPrefixCache.mock.results[0].value)).toBe(true); + expect(Object.is(eventStore, MockSyncPrefixStore.mock.results[0].value)).toBe(true); - const [cache, prefix, transformGet, transformSet] = MockSyncPrefixCache.mock.calls[0]; + const [cache, prefix, transformGet, transformSet] = MockSyncPrefixStore.mock.calls[0]; expect(Object.is(cache, MockLocalStorageCache.mock.results[0].value)).toBe(true); expect(prefix).toBe(EVENT_STORE_PREFIX); diff --git a/lib/event_processor/event_processor_factory.browser.ts b/lib/event_processor/event_processor_factory.browser.ts index 39d8e169d..ff53b0298 100644 --- a/lib/event_processor/event_processor_factory.browser.ts +++ b/lib/event_processor/event_processor_factory.browser.ts @@ -27,7 +27,7 @@ import { import defaultEventDispatcher from './event_dispatcher/default_dispatcher.browser'; import sendBeaconEventDispatcher from './event_dispatcher/send_beacon_dispatcher.browser'; import { LocalStorageCache } from '../utils/cache/local_storage_cache.browser'; -import { SyncPrefixCache } from '../utils/cache/cache'; +import { SyncPrefixStore } from '../utils/cache/store'; import { EVENT_STORE_PREFIX, FAILED_EVENT_RETRY_INTERVAL } from './event_processor_factory'; export const DEFAULT_EVENT_BATCH_SIZE = 10; @@ -45,7 +45,7 @@ export const createBatchEventProcessor = ( options: BatchEventProcessorOptions = {} ): OpaqueEventProcessor => { const localStorageCache = new LocalStorageCache(); - const eventStore = new SyncPrefixCache( + const eventStore = new SyncPrefixStore( localStorageCache, EVENT_STORE_PREFIX, identity, identity, diff --git a/lib/event_processor/event_processor_factory.node.spec.ts b/lib/event_processor/event_processor_factory.node.spec.ts index 487230748..43d65ee44 100644 --- a/lib/event_processor/event_processor_factory.node.spec.ts +++ b/lib/event_processor/event_processor_factory.node.spec.ts @@ -1,5 +1,5 @@ /** - * Copyright 2024, Optimizely + * Copyright 2024-2025, Optimizely * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -39,8 +39,8 @@ vi.mock('../utils/cache/async_storage_cache.react_native', () => { return { AsyncStorageCache: vi.fn() }; }); -vi.mock('../utils/cache/cache', () => { - return { SyncPrefixCache: vi.fn(), AsyncPrefixCache: vi.fn() }; +vi.mock('../utils/cache/store', () => { + return { SyncPrefixStore: vi.fn(), AsyncPrefixStore: vi.fn() }; }); import { createBatchEventProcessor, createForwardingEventProcessor } from './event_processor_factory.node'; @@ -48,7 +48,7 @@ import { getForwardingEventProcessor } from './forwarding_event_processor'; import nodeDefaultEventDispatcher from './event_dispatcher/default_dispatcher.node'; import { EVENT_STORE_PREFIX, extractEventProcessor, FAILED_EVENT_RETRY_INTERVAL } from './event_processor_factory'; import { getOpaqueBatchEventProcessor } from './event_processor_factory'; -import { AsyncCache, AsyncPrefixCache, SyncCache, SyncPrefixCache } from '../utils/cache/cache'; +import { AsyncStore, AsyncPrefixStore, SyncStore, SyncPrefixStore } from '../utils/cache/store'; import { AsyncStorageCache } from '../utils/cache/async_storage_cache.react_native'; describe('createForwardingEventProcessor', () => { @@ -80,14 +80,14 @@ describe('createForwardingEventProcessor', () => { describe('createBatchEventProcessor', () => { const mockGetOpaqueBatchEventProcessor = vi.mocked(getOpaqueBatchEventProcessor); const MockAsyncStorageCache = vi.mocked(AsyncStorageCache); - const MockSyncPrefixCache = vi.mocked(SyncPrefixCache); - const MockAsyncPrefixCache = vi.mocked(AsyncPrefixCache); + const MockSyncPrefixStore = vi.mocked(SyncPrefixStore); + const MockAsyncPrefixStore = vi.mocked(AsyncPrefixStore); beforeEach(() => { mockGetOpaqueBatchEventProcessor.mockClear(); MockAsyncStorageCache.mockClear(); - MockSyncPrefixCache.mockClear(); - MockAsyncPrefixCache.mockClear(); + MockSyncPrefixStore.mockClear(); + MockAsyncPrefixStore.mockClear(); }); it('uses no default event store if no eventStore is provided', () => { @@ -98,16 +98,16 @@ describe('createBatchEventProcessor', () => { expect(eventStore).toBe(undefined); }); - it('wraps the provided eventStore in a SyncPrefixCache if a SyncCache is provided as eventStore', () => { + it('wraps the provided eventStore in a SyncPrefixStore if a SyncCache is provided as eventStore', () => { const eventStore = { operation: 'sync', - } as SyncCache; + } as SyncStore; const processor = createBatchEventProcessor({ eventStore }); expect(Object.is(processor, mockGetOpaqueBatchEventProcessor.mock.results[0].value)).toBe(true); - expect(mockGetOpaqueBatchEventProcessor.mock.calls[0][0].eventStore).toBe(MockSyncPrefixCache.mock.results[0].value); - const [cache, prefix, transformGet, transformSet] = MockSyncPrefixCache.mock.calls[0]; + expect(mockGetOpaqueBatchEventProcessor.mock.calls[0][0].eventStore).toBe(MockSyncPrefixStore.mock.results[0].value); + const [cache, prefix, transformGet, transformSet] = MockSyncPrefixStore.mock.calls[0]; expect(cache).toBe(eventStore); expect(prefix).toBe(EVENT_STORE_PREFIX); @@ -117,16 +117,16 @@ describe('createBatchEventProcessor', () => { expect(transformSet({ value: 1 })).toBe('{"value":1}'); }); - it('wraps the provided eventStore in a AsyncPrefixCache if a AsyncCache is provided as eventStore', () => { + it('wraps the provided eventStore in a AsyncPrefixStore if a AsyncCache is provided as eventStore', () => { const eventStore = { operation: 'async', - } as AsyncCache; + } as AsyncStore; const processor = createBatchEventProcessor({ eventStore }); expect(Object.is(processor, mockGetOpaqueBatchEventProcessor.mock.results[0].value)).toBe(true); - expect(mockGetOpaqueBatchEventProcessor.mock.calls[0][0].eventStore).toBe(MockAsyncPrefixCache.mock.results[0].value); - const [cache, prefix, transformGet, transformSet] = MockAsyncPrefixCache.mock.calls[0]; + expect(mockGetOpaqueBatchEventProcessor.mock.calls[0][0].eventStore).toBe(MockAsyncPrefixStore.mock.results[0].value); + const [cache, prefix, transformGet, transformSet] = MockAsyncPrefixStore.mock.calls[0]; expect(cache).toBe(eventStore); expect(prefix).toBe(EVENT_STORE_PREFIX); diff --git a/lib/event_processor/event_processor_factory.react_native.spec.ts b/lib/event_processor/event_processor_factory.react_native.spec.ts index 131654a79..733b494d2 100644 --- a/lib/event_processor/event_processor_factory.react_native.spec.ts +++ b/lib/event_processor/event_processor_factory.react_native.spec.ts @@ -1,5 +1,5 @@ /** - * Copyright 2024, Optimizely + * Copyright 2024-2025, Optimizely * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -40,8 +40,8 @@ vi.mock('../utils/cache/async_storage_cache.react_native', () => { return { AsyncStorageCache: vi.fn() }; }); -vi.mock('../utils/cache/cache', () => { - return { SyncPrefixCache: vi.fn(), AsyncPrefixCache: vi.fn() }; +vi.mock('../utils/cache/store', () => { + return { SyncPrefixStore: vi.fn(), AsyncPrefixStore: vi.fn() }; }); vi.mock('@react-native-community/netinfo', () => { @@ -79,7 +79,7 @@ import { getForwardingEventProcessor } from './forwarding_event_processor'; import defaultEventDispatcher from './event_dispatcher/default_dispatcher.browser'; import { EVENT_STORE_PREFIX, extractEventProcessor, FAILED_EVENT_RETRY_INTERVAL, getPrefixEventStore } from './event_processor_factory'; import { getOpaqueBatchEventProcessor } from './event_processor_factory'; -import { AsyncCache, AsyncPrefixCache, SyncCache, SyncPrefixCache } from '../utils/cache/cache'; +import { AsyncStore, AsyncPrefixStore, SyncStore, SyncPrefixStore } from '../utils/cache/store'; import { AsyncStorageCache } from '../utils/cache/async_storage_cache.react_native'; import { ReactNativeNetInfoEventProcessor } from './batch_event_processor.react_native'; import { BatchEventProcessor } from './batch_event_processor'; @@ -115,15 +115,15 @@ describe('createForwardingEventProcessor', () => { describe('createBatchEventProcessor', () => { const mockGetOpaqueBatchEventProcessor = vi.mocked(getOpaqueBatchEventProcessor); const MockAsyncStorageCache = vi.mocked(AsyncStorageCache); - const MockSyncPrefixCache = vi.mocked(SyncPrefixCache); - const MockAsyncPrefixCache = vi.mocked(AsyncPrefixCache); + const MockSyncPrefixStore = vi.mocked(SyncPrefixStore); + const MockAsyncPrefixStore = vi.mocked(AsyncPrefixStore); beforeEach(() => { isNetInfoAvailable = false; mockGetOpaqueBatchEventProcessor.mockClear(); MockAsyncStorageCache.mockClear(); - MockSyncPrefixCache.mockClear(); - MockAsyncPrefixCache.mockClear(); + MockSyncPrefixStore.mockClear(); + MockAsyncPrefixStore.mockClear(); }); it('returns an instance of ReacNativeNetInfoEventProcessor if netinfo can be required', async () => { @@ -140,14 +140,14 @@ describe('createBatchEventProcessor', () => { expect(mockGetOpaqueBatchEventProcessor.mock.calls[0][1]).toBe(BatchEventProcessor); }); - it('uses AsyncStorageCache and AsyncPrefixCache to create eventStore if no eventStore is provided', () => { + it('uses AsyncStorageCache and AsyncPrefixStore to create eventStore if no eventStore is provided', () => { const processor = createBatchEventProcessor({}); expect(Object.is(processor, mockGetOpaqueBatchEventProcessor.mock.results[0].value)).toBe(true); const eventStore = mockGetOpaqueBatchEventProcessor.mock.calls[0][0].eventStore; - expect(Object.is(eventStore, MockAsyncPrefixCache.mock.results[0].value)).toBe(true); + expect(Object.is(eventStore, MockAsyncPrefixStore.mock.results[0].value)).toBe(true); - const [cache, prefix, transformGet, transformSet] = MockAsyncPrefixCache.mock.calls[0]; + const [cache, prefix, transformGet, transformSet] = MockAsyncPrefixStore.mock.calls[0]; expect(Object.is(cache, MockAsyncStorageCache.mock.results[0].value)).toBe(true); expect(prefix).toBe(EVENT_STORE_PREFIX); @@ -177,7 +177,7 @@ describe('createBatchEventProcessor', () => { isAsyncStorageAvailable = false; const eventStore = { operation: 'sync', - } as SyncCache; + } as SyncStore; const { AsyncStorageCache } = await vi.importActual< typeof import('../utils/cache/async_storage_cache.react_native') @@ -192,16 +192,16 @@ describe('createBatchEventProcessor', () => { isAsyncStorageAvailable = true; }); - it('wraps the provided eventStore in a SyncPrefixCache if a SyncCache is provided as eventStore', () => { + it('wraps the provided eventStore in a SyncPrefixStore if a SyncCache is provided as eventStore', () => { const eventStore = { operation: 'sync', - } as SyncCache; + } as SyncStore; const processor = createBatchEventProcessor({ eventStore }); expect(Object.is(processor, mockGetOpaqueBatchEventProcessor.mock.results[0].value)).toBe(true); - expect(mockGetOpaqueBatchEventProcessor.mock.calls[0][0].eventStore).toBe(MockSyncPrefixCache.mock.results[0].value); - const [cache, prefix, transformGet, transformSet] = MockSyncPrefixCache.mock.calls[0]; + expect(mockGetOpaqueBatchEventProcessor.mock.calls[0][0].eventStore).toBe(MockSyncPrefixStore.mock.results[0].value); + const [cache, prefix, transformGet, transformSet] = MockSyncPrefixStore.mock.calls[0]; expect(cache).toBe(eventStore); expect(prefix).toBe(EVENT_STORE_PREFIX); @@ -211,16 +211,16 @@ describe('createBatchEventProcessor', () => { expect(transformSet({ value: 1 })).toBe('{"value":1}'); }); - it('wraps the provided eventStore in a AsyncPrefixCache if a AsyncCache is provided as eventStore', () => { + it('wraps the provided eventStore in a AsyncPrefixStore if a AsyncCache is provided as eventStore', () => { const eventStore = { operation: 'async', - } as AsyncCache; + } as AsyncStore; const processor = createBatchEventProcessor({ eventStore }); expect(Object.is(processor, mockGetOpaqueBatchEventProcessor.mock.results[0].value)).toBe(true); - expect(mockGetOpaqueBatchEventProcessor.mock.calls[0][0].eventStore).toBe(MockAsyncPrefixCache.mock.results[0].value); - const [cache, prefix, transformGet, transformSet] = MockAsyncPrefixCache.mock.calls[0]; + expect(mockGetOpaqueBatchEventProcessor.mock.calls[0][0].eventStore).toBe(MockAsyncPrefixStore.mock.results[0].value); + const [cache, prefix, transformGet, transformSet] = MockAsyncPrefixStore.mock.calls[0]; expect(cache).toBe(eventStore); expect(prefix).toBe(EVENT_STORE_PREFIX); diff --git a/lib/event_processor/event_processor_factory.react_native.ts b/lib/event_processor/event_processor_factory.react_native.ts index 66e4a302b..02c0e2cf7 100644 --- a/lib/event_processor/event_processor_factory.react_native.ts +++ b/lib/event_processor/event_processor_factory.react_native.ts @@ -25,7 +25,7 @@ import { wrapEventProcessor, } from './event_processor_factory'; import { EVENT_STORE_PREFIX, FAILED_EVENT_RETRY_INTERVAL } from './event_processor_factory'; -import { AsyncPrefixCache } from '../utils/cache/cache'; +import { AsyncPrefixStore } from '../utils/cache/store'; import { BatchEventProcessor, EventWithId } from './batch_event_processor'; import { AsyncStorageCache } from '../utils/cache/async_storage_cache.react_native'; import { ReactNativeNetInfoEventProcessor } from './batch_event_processor.react_native'; @@ -45,7 +45,7 @@ const identity = (v: T): T => v; const getDefaultEventStore = () => { const asyncStorageCache = new AsyncStorageCache(); - const eventStore = new AsyncPrefixCache( + const eventStore = new AsyncPrefixStore( asyncStorageCache, EVENT_STORE_PREFIX, identity, diff --git a/lib/event_processor/event_processor_factory.ts b/lib/event_processor/event_processor_factory.ts index 7be0a1be4..e931d5b1f 100644 --- a/lib/event_processor/event_processor_factory.ts +++ b/lib/event_processor/event_processor_factory.ts @@ -1,5 +1,5 @@ /** - * Copyright 2024, Optimizely + * Copyright 2024-2025, Optimizely * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,23 +20,23 @@ import { ExponentialBackoff, IntervalRepeater } from "../utils/repeater/repeater import { EventDispatcher } from "./event_dispatcher/event_dispatcher"; import { EventProcessor } from "./event_processor"; import { BatchEventProcessor, DEFAULT_MAX_BACKOFF, DEFAULT_MIN_BACKOFF, EventWithId, RetryConfig } from "./batch_event_processor"; -import { AsyncPrefixCache, Cache, SyncPrefixCache } from "../utils/cache/cache"; +import { AsyncPrefixStore, Store, SyncPrefixStore } from "../utils/cache/store"; export const FAILED_EVENT_RETRY_INTERVAL = 20 * 1000; export const EVENT_STORE_PREFIX = 'optly_event:'; -export const getPrefixEventStore = (cache: Cache): Cache => { - if (cache.operation === 'async') { - return new AsyncPrefixCache( - cache, +export const getPrefixEventStore = (store: Store): Store => { + if (store.operation === 'async') { + return new AsyncPrefixStore( + store, EVENT_STORE_PREFIX, JSON.parse, JSON.stringify, ); } else { - return new SyncPrefixCache( - cache, + return new SyncPrefixStore( + store, EVENT_STORE_PREFIX, JSON.parse, JSON.stringify, @@ -55,7 +55,7 @@ export type BatchEventProcessorOptions = { closingEventDispatcher?: EventDispatcher; flushInterval?: number; batchSize?: number; - eventStore?: Cache; + eventStore?: Store; }; export type BatchEventProcessorFactoryOptions = Omit & { @@ -64,7 +64,7 @@ export type BatchEventProcessorFactoryOptions = Omit; + eventStore?: Store; retryOptions?: { maxRetries?: number; minBackoff?: number; diff --git a/lib/odp/segment_manager/odp_segment_manager.ts b/lib/odp/segment_manager/odp_segment_manager.ts index 8ba589dd4..4ff125672 100644 --- a/lib/odp/segment_manager/odp_segment_manager.ts +++ b/lib/odp/segment_manager/odp_segment_manager.ts @@ -1,5 +1,5 @@ /** - * Copyright 2022-2024, Optimizely + * Copyright 2022-2025, Optimizely * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -94,11 +94,11 @@ export class DefaultOdpSegmentManager implements OdpSegmentManager { const resetCache = options?.includes(OptimizelySegmentOption.RESET_CACHE); if (resetCache) { - this.segmentsCache.clear(); + this.segmentsCache.reset(); } if (!ignoreCache) { - const cachedSegments = await this.segmentsCache.get(cacheKey); + const cachedSegments = await this.segmentsCache.lookup(cacheKey); if (cachedSegments) { return cachedSegments; } @@ -113,7 +113,7 @@ export class DefaultOdpSegmentManager implements OdpSegmentManager { ); if (segments && !ignoreCache) { - this.segmentsCache.set(cacheKey, segments); + this.segmentsCache.save(cacheKey, segments); } return segments; @@ -125,6 +125,6 @@ export class DefaultOdpSegmentManager implements OdpSegmentManager { updateConfig(config: OdpIntegrationConfig): void { this.odpIntegrationConfig = config; - this.segmentsCache.clear(); + this.segmentsCache.reset(); } } diff --git a/lib/project_config/config_manager_factory.ts b/lib/project_config/config_manager_factory.ts index 6f01c2589..763c235d0 100644 --- a/lib/project_config/config_manager_factory.ts +++ b/lib/project_config/config_manager_factory.ts @@ -1,5 +1,5 @@ /** - * Copyright 2024, Optimizely + * Copyright 2024-2025, Optimizely * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -19,12 +19,12 @@ import { Transformer } from "../utils/type"; import { DatafileManagerConfig } from "./datafile_manager"; import { ProjectConfigManagerImpl, ProjectConfigManager } from "./project_config_manager"; import { PollingDatafileManager } from "./polling_datafile_manager"; -import { Cache } from "../utils/cache/cache"; import { DEFAULT_UPDATE_INTERVAL } from './constant'; import { ExponentialBackoff, IntervalRepeater } from "../utils/repeater/repeater"; import { StartupLog } from "../service"; import { MIN_UPDATE_INTERVAL, UPDATE_INTERVAL_BELOW_MINIMUM_MESSAGE } from './constant'; import { LogLevel } from '../logging/logger' +import { Store } from "../utils/cache/store"; const configManagerSymbol: unique symbol = Symbol(); @@ -53,7 +53,7 @@ export type PollingConfigManagerConfig = { updateInterval?: number; urlTemplate?: string; datafileAccessToken?: string; - cache?: Cache; + cache?: Store; }; export type PollingConfigManagerFactoryOptions = PollingConfigManagerConfig & { requestHandler: RequestHandler }; diff --git a/lib/project_config/datafile_manager.ts b/lib/project_config/datafile_manager.ts index c1b58704b..c5765a539 100644 --- a/lib/project_config/datafile_manager.ts +++ b/lib/project_config/datafile_manager.ts @@ -1,5 +1,5 @@ /** - * Copyright 2022-2024, Optimizely + * Copyright 2022-2025, Optimizely * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,7 +14,7 @@ * limitations under the License. */ import { Service, StartupLog } from '../service'; -import { Cache } from '../utils/cache/cache'; +import { Store } from '../utils/cache/store'; import { RequestHandler } from '../utils/http_request_handler/http'; import { Fn, Consumer } from '../utils/type'; import { Repeater } from '../utils/repeater/repeater'; @@ -31,7 +31,7 @@ export type DatafileManagerConfig = { autoUpdate?: boolean; sdkKey: string; urlTemplate?: string; - cache?: Cache; + cache?: Store; datafileAccessToken?: string; initRetry?: number; repeater: Repeater; diff --git a/lib/project_config/polling_datafile_manager.ts b/lib/project_config/polling_datafile_manager.ts index fbbbeb0e0..ba8e70139 100644 --- a/lib/project_config/polling_datafile_manager.ts +++ b/lib/project_config/polling_datafile_manager.ts @@ -1,5 +1,5 @@ /** - * Copyright 2022-2024, Optimizely + * Copyright 2022-2025, Optimizely * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,7 +17,7 @@ import { sprintf } from '../utils/fns'; import { DatafileManager, DatafileManagerConfig } from './datafile_manager'; import { EventEmitter } from '../utils/event_emitter/event_emitter'; import { DEFAULT_AUTHENTICATED_URL_TEMPLATE, DEFAULT_URL_TEMPLATE } from './constant'; -import { Cache } from '../utils/cache/cache'; +import { Store } from '../utils/cache/store'; import { BaseService, ServiceState } from '../service'; import { RequestHandler, AbortableRequest, Headers, Response } from '../utils/http_request_handler/http'; import { Repeater } from '../utils/repeater/repeater'; @@ -53,7 +53,7 @@ export class PollingDatafileManager extends BaseService implements DatafileManag private datafileUrl: string; private currentRequest?: AbortableRequest; private cacheKey: string; - private cache?: Cache; + private cache?: Store; private sdkKey: string; private datafileAccessToken?: string; diff --git a/lib/tests/mock/mock_cache.ts b/lib/tests/mock/mock_cache.ts index 5a542deae..21a89e7a4 100644 --- a/lib/tests/mock/mock_cache.ts +++ b/lib/tests/mock/mock_cache.ts @@ -1,5 +1,5 @@ /** - * Copyright 2022-2024, Optimizely + * Copyright 2022-2025, Optimizely * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,6 +15,7 @@ */ import { SyncCache, AsyncCache } from "../../utils/cache/cache"; +import { SyncStore, AsyncStore } from "../../utils/cache/store"; import { Maybe } from "../../utils/type"; type SyncCacheWithAddOn = SyncCache & { @@ -27,7 +28,17 @@ type AsyncCacheWithAddOn = AsyncCache & { getAll(): Promise>; }; -export const getMockSyncCache = (): SyncCacheWithAddOn => { +type SyncStoreWithAddOn = SyncStore & { + size(): number; + getAll(): Map; +}; + +type AsyncStoreWithAddOn = AsyncStore & { + size(): Promise; + getAll(): Promise>; +}; + +export const getMockSyncCache = (): SyncCacheWithAddOn & SyncStoreWithAddOn => { const cache = { operation: 'sync' as const, data: new Map(), @@ -37,6 +48,9 @@ export const getMockSyncCache = (): SyncCacheWithAddOn => { clear(): void { this.data.clear(); }, + reset(): void { + this.clear(); + }, getKeys(): string[] { return Array.from(this.data.keys()); }, @@ -52,8 +66,14 @@ export const getMockSyncCache = (): SyncCacheWithAddOn => { get(key: string): T | undefined { return this.data.get(key); }, + lookup(key: string): T | undefined { + return this.get(key); + }, set(key: string, value: T): void { this.data.set(key, value); + }, + save(key: string, value: T): void { + this.data.set(key, value); } } @@ -61,7 +81,7 @@ export const getMockSyncCache = (): SyncCacheWithAddOn => { }; -export const getMockAsyncCache = (): AsyncCacheWithAddOn => { +export const getMockAsyncCache = (): AsyncCacheWithAddOn & AsyncStoreWithAddOn => { const cache = { operation: 'async' as const, data: new Map(), @@ -71,6 +91,9 @@ export const getMockAsyncCache = (): AsyncCacheWithAddOn => { async clear(): Promise { this.data.clear(); }, + async reset(): Promise { + this.clear(); + }, async getKeys(): Promise { return Array.from(this.data.keys()); }, @@ -86,8 +109,14 @@ export const getMockAsyncCache = (): AsyncCacheWithAddOn => { async get(key: string): Promise> { return this.data.get(key); }, + async lookup(key: string): Promise> { + return this.get(key); + }, async set(key: string, value: T): Promise { this.data.set(key, value); + }, + async save(key: string, value: T): Promise { + return this.set(key, value); } } diff --git a/lib/utils/cache/async_storage_cache.react_native.ts b/lib/utils/cache/async_storage_cache.react_native.ts index 4656496d2..e5e76024e 100644 --- a/lib/utils/cache/async_storage_cache.react_native.ts +++ b/lib/utils/cache/async_storage_cache.react_native.ts @@ -1,5 +1,5 @@ /** - * Copyright 2022-2024, Optimizely + * Copyright 2022-2025, Optimizely * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,10 +15,10 @@ */ import { Maybe } from "../type"; -import { AsyncCache } from "./cache"; +import { AsyncStore } from "./store"; import { getDefaultAsyncStorage } from "../import.react_native/@react-native-async-storage/async-storage"; -export class AsyncStorageCache implements AsyncCache { +export class AsyncStorageCache implements AsyncStore { public readonly operation = 'async'; private asyncStorage = getDefaultAsyncStorage(); diff --git a/lib/utils/cache/cache.ts b/lib/utils/cache/cache.ts index 46dcebbda..ada8a5ac6 100644 --- a/lib/utils/cache/cache.ts +++ b/lib/utils/cache/cache.ts @@ -1,5 +1,5 @@ /** - * Copyright 2022-2024, Optimizely + * Copyright 2022-2025, Optimizely * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -13,142 +13,24 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import { OpType, OpValue } from '../../utils/type'; -import { Transformer } from '../../utils/type'; -import { Maybe } from '../../utils/type'; - -export type CacheOp = 'sync' | 'async'; -export type OpValue = Op extends 'sync' ? V : Promise; - -export interface CacheWithOp { - operation: Op; - set(key: string, value: V): OpValue; - get(key: string): OpValue>; - remove(key: string): OpValue; - clear(): OpValue; - getKeys(): OpValue; - getBatched(keys: string[]): OpValue[]>; +export interface OpCache { + operation: OP; + save(key: string, value: V): OpValue; + lookup(key: string): OpValue; + reset(): OpValue; } -export type SyncCache = CacheWithOp<'sync', V>; -export type AsyncCache = CacheWithOp<'async', V>; -export type Cache = SyncCache | AsyncCache; - -export class SyncPrefixCache implements SyncCache { - private cache: SyncCache; - private prefix: string; - private transformGet: Transformer; - private transformSet: Transformer; - - public readonly operation = 'sync'; - - constructor( - cache: SyncCache, - prefix: string, - transformGet: Transformer, - transformSet: Transformer - ) { - this.cache = cache; - this.prefix = prefix; - this.transformGet = transformGet; - this.transformSet = transformSet; - } - - private addPrefix(key: string): string { - return `${this.prefix}${key}`; - } - - private removePrefix(key: string): string { - return key.substring(this.prefix.length); - } - - set(key: string, value: V): unknown { - return this.cache.set(this.addPrefix(key), this.transformSet(value)); - } - - get(key: string): V | undefined { - const value = this.cache.get(this.addPrefix(key)); - return value ? this.transformGet(value) : undefined; - } - - remove(key: string): unknown { - return this.cache.remove(this.addPrefix(key)); - } - - clear(): void { - this.getInternalKeys().forEach((key) => this.cache.remove(key)); - } - - private getInternalKeys(): string[] { - return this.cache.getKeys().filter((key) => key.startsWith(this.prefix)); - } +export type SyncCache = OpCache<'sync', V>; +export type AsyncCache = OpCache<'async', V>; - getKeys(): string[] { - return this.getInternalKeys().map((key) => this.removePrefix(key)); - } +export type Cache = SyncCache | AsyncCache; - getBatched(keys: string[]): Maybe[] { - return this.cache.getBatched(keys.map((key) => this.addPrefix(key))) - .map((value) => value ? this.transformGet(value) : undefined); - } +export interface OpCacheWithRemove extends OpCache { + remove(key: string): OpValue; } -export class AsyncPrefixCache implements AsyncCache { - private cache: AsyncCache; - private prefix: string; - private transformGet: Transformer; - private transformSet: Transformer; - - public readonly operation = 'async'; - - constructor( - cache: AsyncCache, - prefix: string, - transformGet: Transformer, - transformSet: Transformer - ) { - this.cache = cache; - this.prefix = prefix; - this.transformGet = transformGet; - this.transformSet = transformSet; - } - - private addPrefix(key: string): string { - return `${this.prefix}${key}`; - } - - private removePrefix(key: string): string { - return key.substring(this.prefix.length); - } - - set(key: string, value: V): Promise { - return this.cache.set(this.addPrefix(key), this.transformSet(value)); - } - - async get(key: string): Promise { - const value = await this.cache.get(this.addPrefix(key)); - return value ? this.transformGet(value) : undefined; - } - - remove(key: string): Promise { - return this.cache.remove(this.addPrefix(key)); - } - - async clear(): Promise { - const keys = await this.getInternalKeys(); - await Promise.all(keys.map((key) => this.cache.remove(key))); - } - - private async getInternalKeys(): Promise { - return this.cache.getKeys().then((keys) => keys.filter((key) => key.startsWith(this.prefix))); - } - - async getKeys(): Promise { - return this.getInternalKeys().then((keys) => keys.map((key) => this.removePrefix(key))); - } - - async getBatched(keys: string[]): Promise[]> { - const values = await this.cache.getBatched(keys.map((key) => this.addPrefix(key))); - return values.map((value) => value ? this.transformGet(value) : undefined); - } -} +export type SyncCacheWithRemove = OpCacheWithRemove<'sync', V>; +export type AsyncCacheWithRemove = OpCacheWithRemove<'async', V>; +export type CacheWithRemove = SyncCacheWithRemove | AsyncCacheWithRemove; diff --git a/lib/utils/cache/in_memory_lru_cache.spec.ts b/lib/utils/cache/in_memory_lru_cache.spec.ts index c6ab08780..81c1e4a96 100644 --- a/lib/utils/cache/in_memory_lru_cache.spec.ts +++ b/lib/utils/cache/in_memory_lru_cache.spec.ts @@ -1,5 +1,5 @@ /** - * Copyright 2024, Optimizely + * Copyright 2024-2025, Optimizely * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,105 +20,87 @@ import { wait } from '../../tests/testUtils'; describe('InMemoryLruCache', () => { it('should save and get values correctly', () => { const cache = new InMemoryLruCache(2); - cache.set('a', 1); - cache.set('b', 2); - expect(cache.get('a')).toBe(1); - expect(cache.get('b')).toBe(2); + cache.save('a', 1); + cache.save('b', 2); + expect(cache.lookup('a')).toBe(1); + expect(cache.lookup('b')).toBe(2); }); it('should return undefined for non-existent keys', () => { const cache = new InMemoryLruCache(2); - expect(cache.get('a')).toBe(undefined); + expect(cache.lookup('a')).toBe(undefined); }); it('should return all keys in cache when getKeys is called', () => { const cache = new InMemoryLruCache(20); - cache.set('a', 1); - cache.set('b', 2); - cache.set('c', 3); - cache.set('d', 4); + cache.save('a', 1); + cache.save('b', 2); + cache.save('c', 3); + cache.save('d', 4); expect(cache.getKeys()).toEqual(expect.arrayContaining(['d', 'c', 'b', 'a'])); }); - it('should evict least recently used keys when full', () => { const cache = new InMemoryLruCache(3); - cache.set('a', 1); - cache.set('b', 2); - cache.set('c', 3); + cache.save('a', 1); + cache.save('b', 2); + cache.save('c', 3); - expect(cache.get('b')).toBe(2); - expect(cache.get('c')).toBe(3); - expect(cache.get('a')).toBe(1); + expect(cache.lookup('b')).toBe(2); + expect(cache.lookup('c')).toBe(3); + expect(cache.lookup('a')).toBe(1); expect(cache.getKeys()).toEqual(expect.arrayContaining(['a', 'c', 'b'])); // key use order is now a c b. next insert should evict b - cache.set('d', 4); - expect(cache.get('b')).toBe(undefined); + cache.save('d', 4); + expect(cache.lookup('b')).toBe(undefined); expect(cache.getKeys()).toEqual(expect.arrayContaining(['d', 'a', 'c'])); // key use order is now d a c. setting c should put it at the front - cache.set('c', 5); + cache.save('c', 5); // key use order is now c d a. next insert should evict a - cache.set('e', 6); - expect(cache.get('a')).toBe(undefined); + cache.save('e', 6); + expect(cache.lookup('a')).toBe(undefined); expect(cache.getKeys()).toEqual(expect.arrayContaining(['e', 'c', 'd'])); // key use order is now e c d. reading d should put it at the front - expect(cache.get('d')).toBe(4); + expect(cache.lookup('d')).toBe(4); // key use order is now d e c. next insert should evict c - cache.set('f', 7); - expect(cache.get('c')).toBe(undefined); + cache.save('f', 7); + expect(cache.lookup('c')).toBe(undefined); expect(cache.getKeys()).toEqual(expect.arrayContaining(['f', 'd', 'e'])); }); it('should not return expired values when get is called', async () => { const cache = new InMemoryLruCache(2, 100); - cache.set('a', 1); - cache.set('b', 2); - expect(cache.get('a')).toBe(1); - expect(cache.get('b')).toBe(2); + cache.save('a', 1); + cache.save('b', 2); + expect(cache.lookup('a')).toBe(1); + expect(cache.lookup('b')).toBe(2); await wait(150); - expect(cache.get('a')).toBe(undefined); - expect(cache.get('b')).toBe(undefined); + expect(cache.lookup('a')).toBe(undefined); + expect(cache.lookup('b')).toBe(undefined); }); it('should remove values correctly', () => { const cache = new InMemoryLruCache(2); - cache.set('a', 1); - cache.set('b', 2); - cache.set('c', 3); + cache.save('a', 1); + cache.save('b', 2); + cache.save('c', 3); cache.remove('a'); - expect(cache.get('a')).toBe(undefined); - expect(cache.get('b')).toBe(2); - expect(cache.get('c')).toBe(3); + expect(cache.lookup('a')).toBe(undefined); + expect(cache.lookup('b')).toBe(2); + expect(cache.lookup('c')).toBe(3); }); it('should clear all values correctly', () => { const cache = new InMemoryLruCache(2); - cache.set('a', 1); - cache.set('b', 2); - cache.clear(); - expect(cache.get('a')).toBe(undefined); - expect(cache.get('b')).toBe(undefined); - }); - - it('should return correct values when getBatched is called', () => { - const cache = new InMemoryLruCache(2); - cache.set('a', 1); - cache.set('b', 2); - expect(cache.getBatched(['a', 'b', 'c'])).toEqual([1, 2, undefined]); - }); - - it('should not return expired values when getBatched is called', async () => { - const cache = new InMemoryLruCache(2, 100); - cache.set('a', 1); - cache.set('b', 2); - expect(cache.getBatched(['a', 'b'])).toEqual([1, 2]); - - await wait(150); - expect(cache.getBatched(['a', 'b'])).toEqual([undefined, undefined]); + cache.save('a', 1); + cache.save('b', 2); + cache.reset(); + expect(cache.lookup('a')).toBe(undefined); + expect(cache.lookup('b')).toBe(undefined); }); }); diff --git a/lib/utils/cache/in_memory_lru_cache.ts b/lib/utils/cache/in_memory_lru_cache.ts index 1b4d3a7bd..6ed92d1fd 100644 --- a/lib/utils/cache/in_memory_lru_cache.ts +++ b/lib/utils/cache/in_memory_lru_cache.ts @@ -1,5 +1,5 @@ /** - * Copyright 2022-2024, Optimizely + * Copyright 2022-2025, Optimizely * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,14 +15,14 @@ */ import { Maybe } from "../type"; -import { SyncCache } from "./cache"; +import { SyncCacheWithRemove } from "./cache"; type CacheElement = { value: V; expiresAt?: number; }; -export class InMemoryLruCache implements SyncCache { +export class InMemoryLruCache implements SyncCacheWithRemove { public operation = 'sync' as const; private data: Map> = new Map(); private maxSize: number; @@ -33,7 +33,7 @@ export class InMemoryLruCache implements SyncCache { this.ttl = ttl; } - get(key: string): Maybe { + lookup(key: string): Maybe { const element = this.data.get(key); if (!element) return undefined; this.data.delete(key); @@ -46,7 +46,7 @@ export class InMemoryLruCache implements SyncCache { return element.value; } - set(key: string, value: V): void { + save(key: string, value: V): void { this.data.delete(key); if (this.data.size === this.maxSize) { @@ -64,15 +64,11 @@ export class InMemoryLruCache implements SyncCache { this.data.delete(key); } - clear(): void { + reset(): void { this.data.clear(); } getKeys(): string[] { return Array.from(this.data.keys()); } - - getBatched(keys: string[]): Maybe[] { - return keys.map((key) => this.get(key)); - } } diff --git a/lib/utils/cache/local_storage_cache.browser.ts b/lib/utils/cache/local_storage_cache.browser.ts index 594b722d2..b16d77571 100644 --- a/lib/utils/cache/local_storage_cache.browser.ts +++ b/lib/utils/cache/local_storage_cache.browser.ts @@ -1,5 +1,5 @@ /** - * Copyright 2022-2024, Optimizely + * Copyright 2022-2025, Optimizely * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,9 +15,9 @@ */ import { Maybe } from "../type"; -import { SyncCache } from "./cache"; +import { SyncStore } from "./store"; -export class LocalStorageCache implements SyncCache { +export class LocalStorageCache implements SyncStore { public readonly operation = 'sync'; public set(key: string, value: V): void { diff --git a/lib/utils/cache/cache.spec.ts b/lib/utils/cache/store.spec.ts similarity index 71% rename from lib/utils/cache/cache.spec.ts rename to lib/utils/cache/store.spec.ts index 150fe4884..a99226844 100644 --- a/lib/utils/cache/cache.spec.ts +++ b/lib/utils/cache/store.spec.ts @@ -1,5 +1,5 @@ /** - * Copyright 2022-2024, Optimizely + * Copyright 2022-2025, Optimizely * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,28 +15,28 @@ */ import { describe, it, expect } from 'vitest'; -import { SyncPrefixCache, AsyncPrefixCache } from './cache'; +import { SyncPrefixStore, AsyncPrefixStore } from './store'; import { getMockSyncCache, getMockAsyncCache } from '../../tests/mock/mock_cache'; -describe('SyncPrefixCache', () => { +describe('SyncPrefixStore', () => { describe('set', () => { it('should add prefix to key when setting in the underlying cache', () => { const cache = getMockSyncCache(); - const prefixCache = new SyncPrefixCache(cache, 'prefix:', (v) => v, (v) => v); + const prefixCache = new SyncPrefixStore(cache, 'prefix:', (v) => v, (v) => v); prefixCache.set('key', 'value'); expect(cache.get('prefix:key')).toEqual('value'); }); it('should transform value when setting in the underlying cache', () => { const cache = getMockSyncCache(); - const prefixCache = new SyncPrefixCache(cache, 'prefix:', (v) => v.toLowerCase(), (v) => v.toUpperCase()); + const prefixCache = new SyncPrefixStore(cache, 'prefix:', (v) => v.toLowerCase(), (v) => v.toUpperCase()); prefixCache.set('key', 'value'); expect(cache.get('prefix:key')).toEqual('VALUE'); }); it('should work correctly with empty prefix', () => { const cache = getMockSyncCache(); - const prefixCache = new SyncPrefixCache(cache, '', (v) => v.toLowerCase(), (v) => v.toUpperCase()); + const prefixCache = new SyncPrefixStore(cache, '', (v) => v.toLowerCase(), (v) => v.toUpperCase()); prefixCache.set('key', 'value'); expect(cache.get('key')).toEqual('VALUE'); }); @@ -46,13 +46,13 @@ describe('SyncPrefixCache', () => { it('should remove prefix from key when getting from the underlying cache', () => { const cache = getMockSyncCache(); cache.set('prefix:key', 'value'); - const prefixCache = new SyncPrefixCache(cache, 'prefix:', (v) => v, (v) => v); + const prefixCache = new SyncPrefixStore(cache, 'prefix:', (v) => v, (v) => v); expect(prefixCache.get('key')).toEqual('value'); }); it('should transform value after getting from the underlying cache', () => { const cache = getMockSyncCache(); - const prefixCache = new SyncPrefixCache(cache, 'prefix:', (v) => v.toLowerCase(), (v) => v.toUpperCase()); + const prefixCache = new SyncPrefixStore(cache, 'prefix:', (v) => v.toLowerCase(), (v) => v.toUpperCase()); cache.set('prefix:key', 'VALUE'); expect(prefixCache.get('key')).toEqual('value'); }); @@ -60,7 +60,7 @@ describe('SyncPrefixCache', () => { it('should work correctly with empty prefix', () => { const cache = getMockSyncCache(); - const prefixCache = new SyncPrefixCache(cache, '', (v) => v.toLowerCase(), (v) => v.toUpperCase()); + const prefixCache = new SyncPrefixStore(cache, '', (v) => v.toLowerCase(), (v) => v.toUpperCase()); cache.set('key', 'VALUE'); expect(prefixCache.get('key')).toEqual('value'); }); @@ -71,7 +71,7 @@ describe('SyncPrefixCache', () => { const cache = getMockSyncCache(); cache.set('prefix:key', 'value'); cache.set('key', 'value'); - const prefixCache = new SyncPrefixCache(cache, 'prefix:', (v) => v, (v) => v); + const prefixCache = new SyncPrefixStore(cache, 'prefix:', (v) => v, (v) => v); prefixCache.remove('key'); expect(cache.get('prefix:key')).toBeUndefined(); expect(cache.get('key')).toEqual('value'); @@ -80,42 +80,12 @@ describe('SyncPrefixCache', () => { it('should work with empty prefix', () => { const cache = getMockSyncCache(); cache.set('key', 'value'); - const prefixCache = new SyncPrefixCache(cache, '', (v) => v, (v) => v); + const prefixCache = new SyncPrefixStore(cache, '', (v) => v, (v) => v); prefixCache.remove('key'); expect(cache.get('key')).toBeUndefined(); }); }); - describe('clear', () => { - it('should remove keys with correct prefix from the underlying cache', () => { - const cache = getMockSyncCache(); - cache.set('key1', 'value1'); - cache.set('key2', 'value2'); - cache.set('prefix:key1', 'value1'); - cache.set('prefix:key2', 'value2'); - - const prefixCache = new SyncPrefixCache(cache, 'prefix:', (v) => v, (v) => v); - prefixCache.clear(); - - expect(cache.get('key1')).toEqual('value1'); - expect(cache.get('key2')).toEqual('value2'); - expect(cache.get('prefix:key1')).toBeUndefined(); - expect(cache.get('prefix:key2')).toBeUndefined(); - }); - - it('should work with empty prefix', () => { - const cache = getMockSyncCache(); - cache.set('key1', 'value1'); - cache.set('key2', 'value2'); - - const prefixCache = new SyncPrefixCache(cache, '', (v) => v, (v) => v); - prefixCache.clear(); - - expect(cache.get('key1')).toBeUndefined(); - expect(cache.get('key2')).toBeUndefined(); - }); - }); - describe('getKeys', () => { it('should return keys with correct prefix', () => { const cache = getMockSyncCache(); @@ -124,7 +94,7 @@ describe('SyncPrefixCache', () => { cache.set('prefix:key3', 'value1'); cache.set('prefix:key4', 'value2'); - const prefixCache = new SyncPrefixCache(cache, 'prefix:', (v) => v, (v) => v); + const prefixCache = new SyncPrefixStore(cache, 'prefix:', (v) => v, (v) => v); const keys = prefixCache.getKeys(); expect(keys).toEqual(expect.arrayContaining(['key3', 'key4'])); @@ -135,7 +105,7 @@ describe('SyncPrefixCache', () => { cache.set('key1', 'value1'); cache.set('key2', 'value2'); - const prefixCache = new SyncPrefixCache(cache, '', (v) => v, (v) => v); + const prefixCache = new SyncPrefixStore(cache, '', (v) => v, (v) => v); const keys = prefixCache.getKeys(); expect(keys).toEqual(expect.arrayContaining(['key1', 'key2'])); @@ -151,7 +121,7 @@ describe('SyncPrefixCache', () => { cache.set('prefix:key1', 'prefix:value1'); cache.set('prefix:key2', 'prefix:value2'); - const prefixCache = new SyncPrefixCache(cache, 'prefix:', (v) => v, (v) => v); + const prefixCache = new SyncPrefixStore(cache, 'prefix:', (v) => v, (v) => v); const values = prefixCache.getBatched(['key1', 'key2', 'key3']); expect(values).toEqual(expect.arrayContaining(['prefix:value1', 'prefix:value2', undefined])); @@ -165,7 +135,7 @@ describe('SyncPrefixCache', () => { cache.set('prefix:key1', 'PREFIX:VALUE1'); cache.set('prefix:key2', 'PREFIX:VALUE2'); - const prefixCache = new SyncPrefixCache(cache, 'prefix:', (v) => v.toLocaleLowerCase(), (v) => v.toUpperCase()); + const prefixCache = new SyncPrefixStore(cache, 'prefix:', (v) => v.toLocaleLowerCase(), (v) => v.toUpperCase()); const values = prefixCache.getBatched(['key1', 'key2', 'key3']); expect(values).toEqual(expect.arrayContaining(['prefix:value1', 'prefix:value2', undefined])); @@ -176,7 +146,7 @@ describe('SyncPrefixCache', () => { cache.set('key1', 'value1'); cache.set('key2', 'value2'); - const prefixCache = new SyncPrefixCache(cache, '', (v) => v, (v) => v); + const prefixCache = new SyncPrefixStore(cache, '', (v) => v, (v) => v); const values = prefixCache.getBatched(['key1', 'key2']); expect(values).toEqual(expect.arrayContaining(['value1', 'value2'])); @@ -184,25 +154,25 @@ describe('SyncPrefixCache', () => { }); }); -describe('AsyncPrefixCache', () => { +describe('AsyncPrefixStore', () => { describe('set', () => { it('should add prefix to key when setting in the underlying cache', async () => { const cache = getMockAsyncCache(); - const prefixCache = new AsyncPrefixCache(cache, 'prefix:', (v) => v, (v) => v); + const prefixCache = new AsyncPrefixStore(cache, 'prefix:', (v) => v, (v) => v); await prefixCache.set('key', 'value'); expect(await cache.get('prefix:key')).toEqual('value'); }); it('should transform value when setting in the underlying cache', async () => { const cache = getMockAsyncCache(); - const prefixCache = new AsyncPrefixCache(cache, 'prefix:', (v) => v.toLowerCase(), (v) => v.toUpperCase()); + const prefixCache = new AsyncPrefixStore(cache, 'prefix:', (v) => v.toLowerCase(), (v) => v.toUpperCase()); await prefixCache.set('key', 'value'); expect(await cache.get('prefix:key')).toEqual('VALUE'); }); it('should work correctly with empty prefix', async () => { const cache = getMockAsyncCache(); - const prefixCache = new AsyncPrefixCache(cache, '', (v) => v.toLowerCase(), (v) => v.toUpperCase()); + const prefixCache = new AsyncPrefixStore(cache, '', (v) => v.toLowerCase(), (v) => v.toUpperCase()); await prefixCache.set('key', 'value'); expect(await cache.get('key')).toEqual('VALUE'); }); @@ -212,13 +182,13 @@ describe('AsyncPrefixCache', () => { it('should remove prefix from key when getting from the underlying cache', async () => { const cache = getMockAsyncCache(); await cache.set('prefix:key', 'value'); - const prefixCache = new AsyncPrefixCache(cache, 'prefix:', (v) => v, (v) => v); + const prefixCache = new AsyncPrefixStore(cache, 'prefix:', (v) => v, (v) => v); expect(await prefixCache.get('key')).toEqual('value'); }); it('should transform value after getting from the underlying cache', async () => { const cache = getMockAsyncCache(); - const prefixCache = new AsyncPrefixCache(cache, 'prefix:', (v) => v.toLowerCase(), (v) => v.toUpperCase()); + const prefixCache = new AsyncPrefixStore(cache, 'prefix:', (v) => v.toLowerCase(), (v) => v.toUpperCase()); await cache.set('prefix:key', 'VALUE'); expect(await prefixCache.get('key')).toEqual('value'); }); @@ -226,7 +196,7 @@ describe('AsyncPrefixCache', () => { it('should work correctly with empty prefix', async () => { const cache = getMockAsyncCache(); - const prefixCache = new AsyncPrefixCache(cache, '', (v) => v.toLowerCase(), (v) => v.toUpperCase()); + const prefixCache = new AsyncPrefixStore(cache, '', (v) => v.toLowerCase(), (v) => v.toUpperCase()); await cache.set('key', 'VALUE'); expect(await prefixCache.get('key')).toEqual('value'); }); @@ -237,7 +207,7 @@ describe('AsyncPrefixCache', () => { const cache = getMockAsyncCache(); cache.set('prefix:key', 'value'); cache.set('key', 'value'); - const prefixCache = new AsyncPrefixCache(cache, 'prefix:', (v) => v, (v) => v); + const prefixCache = new AsyncPrefixStore(cache, 'prefix:', (v) => v, (v) => v); await prefixCache.remove('key'); expect(await cache.get('prefix:key')).toBeUndefined(); expect(await cache.get('key')).toEqual('value'); @@ -246,42 +216,12 @@ describe('AsyncPrefixCache', () => { it('should work with empty prefix', async () => { const cache = getMockAsyncCache(); await cache.set('key', 'value'); - const prefixCache = new AsyncPrefixCache(cache, '', (v) => v, (v) => v); + const prefixCache = new AsyncPrefixStore(cache, '', (v) => v, (v) => v); await prefixCache.remove('key'); expect(await cache.get('key')).toBeUndefined(); }); }); - describe('clear', () => { - it('should remove keys with correct prefix from the underlying cache', async () => { - const cache = getMockAsyncCache(); - await cache.set('key1', 'value1'); - await cache.set('key2', 'value2'); - await cache.set('prefix:key1', 'value1'); - await cache.set('prefix:key2', 'value2'); - - const prefixCache = new AsyncPrefixCache(cache, 'prefix:', (v) => v, (v) => v); - await prefixCache.clear(); - - expect(await cache.get('key1')).toEqual('value1'); - expect(await cache.get('key2')).toEqual('value2'); - expect(await cache.get('prefix:key1')).toBeUndefined(); - expect(await cache.get('prefix:key2')).toBeUndefined(); - }); - - it('should work with empty prefix', async () => { - const cache = getMockAsyncCache(); - await cache.set('key1', 'value1'); - await cache.set('key2', 'value2'); - - const prefixCache = new AsyncPrefixCache(cache, '', (v) => v, (v) => v); - await prefixCache.clear(); - - expect(await cache.get('key1')).toBeUndefined(); - expect(await cache.get('key2')).toBeUndefined(); - }); - }); - describe('getKeys', () => { it('should return keys with correct prefix', async () => { const cache = getMockAsyncCache(); @@ -290,7 +230,7 @@ describe('AsyncPrefixCache', () => { await cache.set('prefix:key3', 'value1'); await cache.set('prefix:key4', 'value2'); - const prefixCache = new AsyncPrefixCache(cache, 'prefix:', (v) => v, (v) => v); + const prefixCache = new AsyncPrefixStore(cache, 'prefix:', (v) => v, (v) => v); const keys = await prefixCache.getKeys(); expect(keys).toEqual(expect.arrayContaining(['key3', 'key4'])); @@ -301,7 +241,7 @@ describe('AsyncPrefixCache', () => { await cache.set('key1', 'value1'); await cache.set('key2', 'value2'); - const prefixCache = new AsyncPrefixCache(cache, '', (v) => v, (v) => v); + const prefixCache = new AsyncPrefixStore(cache, '', (v) => v, (v) => v); const keys = await prefixCache.getKeys(); expect(keys).toEqual(expect.arrayContaining(['key1', 'key2'])); @@ -317,7 +257,7 @@ describe('AsyncPrefixCache', () => { await cache.set('prefix:key1', 'prefix:value1'); await cache.set('prefix:key2', 'prefix:value2'); - const prefixCache = new AsyncPrefixCache(cache, 'prefix:', (v) => v, (v) => v); + const prefixCache = new AsyncPrefixStore(cache, 'prefix:', (v) => v, (v) => v); const values = await prefixCache.getBatched(['key1', 'key2', 'key3']); expect(values).toEqual(expect.arrayContaining(['prefix:value1', 'prefix:value2', undefined])); @@ -331,7 +271,7 @@ describe('AsyncPrefixCache', () => { await cache.set('prefix:key1', 'PREFIX:VALUE1'); await cache.set('prefix:key2', 'PREFIX:VALUE2'); - const prefixCache = new AsyncPrefixCache(cache, 'prefix:', (v) => v.toLocaleLowerCase(), (v) => v.toUpperCase()); + const prefixCache = new AsyncPrefixStore(cache, 'prefix:', (v) => v.toLocaleLowerCase(), (v) => v.toUpperCase()); const values = await prefixCache.getBatched(['key1', 'key2', 'key3']); expect(values).toEqual(expect.arrayContaining(['prefix:value1', 'prefix:value2', undefined])); @@ -342,7 +282,7 @@ describe('AsyncPrefixCache', () => { await cache.set('key1', 'value1'); await cache.set('key2', 'value2'); - const prefixCache = new AsyncPrefixCache(cache, '', (v) => v, (v) => v); + const prefixCache = new AsyncPrefixStore(cache, '', (v) => v, (v) => v); const values = await prefixCache.getBatched(['key1', 'key2']); expect(values).toEqual(expect.arrayContaining(['value1', 'value2'])); diff --git a/lib/utils/cache/store.ts b/lib/utils/cache/store.ts new file mode 100644 index 000000000..c13852f65 --- /dev/null +++ b/lib/utils/cache/store.ts @@ -0,0 +1,174 @@ +/** + * Copyright 2025, Optimizely + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { Transformer } from '../../utils/type'; +import { Maybe } from '../../utils/type'; +import { OpType, OpValue } from '../../utils/type'; + +export interface OpStore { + operation: OP; + set(key: string, value: V): OpValue; + get(key: string): OpValue>; + remove(key: string): OpValue; + getKeys(): OpValue; +} + +export type SyncStore = OpStore<'sync', V>; +export type AsyncStore = OpStore<'async', V>; +export type Store = SyncStore | AsyncStore; + +export abstract class SyncStoreWithBatchedGet implements SyncStore { + operation = 'sync' as const; + abstract set(key: string, value: V): unknown; + abstract get(key: string): Maybe; + abstract remove(key: string): unknown; + abstract getKeys(): string[]; + abstract getBatched(keys: string[]): Maybe[]; +} + +export abstract class AsyncStoreWithBatchedGet implements AsyncStore { + operation = 'async' as const; + abstract set(key: string, value: V): Promise; + abstract get(key: string): Promise>; + abstract remove(key: string): Promise; + abstract getKeys(): Promise; + abstract getBatched(keys: string[]): Promise[]>; +} + +export const getBatchedSync = (store: SyncStore, keys: string[]): Maybe[] => { + if (store instanceof SyncStoreWithBatchedGet) { + return store.getBatched(keys); + } + return keys.map((key) => store.get(key)); +}; + +export const getBatchedAsync = (store: AsyncStore, keys: string[]): Promise[]> => { + if (store instanceof AsyncStoreWithBatchedGet) { + return store.getBatched(keys); + } + return Promise.all(keys.map((key) => store.get(key))); +}; + +export class SyncPrefixStore extends SyncStoreWithBatchedGet implements SyncStore { + private store: SyncStore; + private prefix: string; + private transformGet: Transformer; + private transformSet: Transformer; + + public readonly operation = 'sync'; + + constructor( + store: SyncStore, + prefix: string, + transformGet: Transformer, + transformSet: Transformer + ) { + super(); + this.store = store; + this.prefix = prefix; + this.transformGet = transformGet; + this.transformSet = transformSet; + } + + private addPrefix(key: string): string { + return `${this.prefix}${key}`; + } + + private removePrefix(key: string): string { + return key.substring(this.prefix.length); + } + + set(key: string, value: V): unknown { + return this.store.set(this.addPrefix(key), this.transformSet(value)); + } + + get(key: string): V | undefined { + const value = this.store.get(this.addPrefix(key)); + return value ? this.transformGet(value) : undefined; + } + + remove(key: string): unknown { + return this.store.remove(this.addPrefix(key)); + } + + private getInternalKeys(): string[] { + return this.store.getKeys().filter((key) => key.startsWith(this.prefix)); + } + + getKeys(): string[] { + return this.getInternalKeys().map((key) => this.removePrefix(key)); + } + + getBatched(keys: string[]): Maybe[] { + return getBatchedSync(this.store, keys.map((key) => this.addPrefix(key))) + .map((value) => value ? this.transformGet(value) : undefined); + } +} + +export class AsyncPrefixStore implements AsyncStore { + private cache: AsyncStore; + private prefix: string; + private transformGet: Transformer; + private transformSet: Transformer; + + public readonly operation = 'async'; + + constructor( + cache: AsyncStore, + prefix: string, + transformGet: Transformer, + transformSet: Transformer + ) { + this.cache = cache; + this.prefix = prefix; + this.transformGet = transformGet; + this.transformSet = transformSet; + } + + private addPrefix(key: string): string { + return `${this.prefix}${key}`; + } + + private removePrefix(key: string): string { + return key.substring(this.prefix.length); + } + + set(key: string, value: V): Promise { + return this.cache.set(this.addPrefix(key), this.transformSet(value)); + } + + async get(key: string): Promise { + const value = await this.cache.get(this.addPrefix(key)); + return value ? this.transformGet(value) : undefined; + } + + remove(key: string): Promise { + return this.cache.remove(this.addPrefix(key)); + } + + private async getInternalKeys(): Promise { + return this.cache.getKeys().then((keys) => keys.filter((key) => key.startsWith(this.prefix))); + } + + async getKeys(): Promise { + return this.getInternalKeys().then((keys) => keys.map((key) => this.removePrefix(key))); + } + + async getBatched(keys: string[]): Promise[]> { + const values = await getBatchedAsync(this.cache, keys.map((key) => this.addPrefix(key))); + return values.map((value) => value ? this.transformGet(value) : undefined); + } +} diff --git a/lib/vuid/vuid_manager.ts b/lib/vuid/vuid_manager.ts index 32ca67103..dd0c0322a 100644 --- a/lib/vuid/vuid_manager.ts +++ b/lib/vuid/vuid_manager.ts @@ -1,5 +1,5 @@ /** - * Copyright 2022-2024, Optimizely + * Copyright 2022-2025, Optimizely * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,7 +14,7 @@ * limitations under the License. */ import { LoggerFacade } from '../logging/logger'; -import { Cache } from '../utils/cache/cache'; +import { Store } from '../utils/cache/store'; import { AsyncProducer, Maybe } from '../utils/type'; import { isVuid, makeVuid } from './vuid'; @@ -27,7 +27,7 @@ export interface VuidManager { export class VuidCacheManager { private logger?: LoggerFacade; private vuidCacheKey = 'optimizely-vuid'; - private cache?: Cache; + private cache?: Store; // if this value is not undefined, this means the same value is in the cache. // if this is undefined, it could either mean that there is no value in the cache // or that there is a value in the cache but it has not been loaded yet or failed @@ -35,12 +35,12 @@ export class VuidCacheManager { private vuid?: string; private waitPromise: Promise = Promise.resolve(); - constructor(cache?: Cache, logger?: LoggerFacade) { + constructor(cache?: Store, logger?: LoggerFacade) { this.cache = cache; this.logger = logger; } - setCache(cache: Cache): void { + setCache(cache: Store): void { this.cache = cache; this.vuid = undefined; } @@ -92,14 +92,14 @@ export class VuidCacheManager { export type VuidManagerConfig = { enableVuid?: boolean; - vuidCache: Cache; + vuidCache: Store; vuidCacheManager: VuidCacheManager; } export class DefaultVuidManager implements VuidManager { private vuidCacheManager: VuidCacheManager; private vuid?: string; - private vuidCache: Cache; + private vuidCache: Store; private vuidEnabled = false; constructor(config: VuidManagerConfig) { diff --git a/lib/vuid/vuid_manager_factory.ts b/lib/vuid/vuid_manager_factory.ts index 61ac36966..ccc4ce2b2 100644 --- a/lib/vuid/vuid_manager_factory.ts +++ b/lib/vuid/vuid_manager_factory.ts @@ -1,5 +1,5 @@ /** - * Copyright 2024, Optimizely + * Copyright 2024-2025, Optimizely * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,11 +14,11 @@ * limitations under the License. */ -import { Cache } from '../utils/cache/cache'; +import { Store } from '../utils/cache/store'; import { VuidManager } from './vuid_manager'; export type VuidManagerOptions = { - vuidCache?: Cache; + vuidCache?: Store; enableVuid?: boolean; }