diff --git a/.changeset/tasty-shirts-judge.md b/.changeset/tasty-shirts-judge.md new file mode 100644 index 0000000..e055f7b --- /dev/null +++ b/.changeset/tasty-shirts-judge.md @@ -0,0 +1,5 @@ +--- +'bentocache': minor +--- + +Rework the logs issued by bentocache to make them much cleaner, more consistent and make debug easier diff --git a/benchmarks/package.json b/benchmarks/package.json index cd5885d..518758f 100644 --- a/benchmarks/package.json +++ b/benchmarks/package.json @@ -9,7 +9,7 @@ "keywords": [], "main": "index.js", "scripts": { - "test": "echo \"Error: no test specified\" && exit 1" + "bench:all": "pnpm tsx all.ts" }, "devDependencies": { "@keyv/redis": "^4.2.0", diff --git a/packages/bentocache/src/bento_cache_options.ts b/packages/bentocache/src/bento_cache_options.ts index e01c18f..d01cb13 100644 --- a/packages/bentocache/src/bento_cache_options.ts +++ b/packages/bentocache/src/bento_cache_options.ts @@ -3,16 +3,11 @@ import lodash from '@poppinss/utils/lodash' import string from '@poppinss/utils/string' import { noopLogger } from '@julr/utils/logger' +import { Logger } from './logger.js' import { resolveTtl } from './helpers.js' import type { FactoryError } from './errors.js' import { JsonSerializer } from './serializers/json.js' -import type { - CacheSerializer, - Duration, - Emitter, - Logger, - RawBentoCacheOptions, -} from './types/main.js' +import type { CacheSerializer, Duration, Emitter, RawBentoCacheOptions } from './types/main.js' const defaultSerializer = new JsonSerializer() @@ -57,7 +52,7 @@ export class BentoCacheOptions { /** * The logger used throughout the library */ - logger: Logger = noopLogger() + logger: Logger /** * The emitter used throughout the library @@ -102,7 +97,7 @@ export class BentoCacheOptions { this.serializer = this.#options.serializer ?? defaultSerializer this.l2CircuitBreakerDuration = resolveTtl(this.#options.l2CircuitBreakerDuration, null) - this.logger = this.#options.logger!.child({ pkg: 'bentocache' }) + this.logger = new Logger(this.#options.logger ?? noopLogger()) this.onFactoryError = this.#options.onFactoryError } diff --git a/packages/bentocache/src/bus/bus.ts b/packages/bentocache/src/bus/bus.ts index 9884b1b..e90ba0c 100644 --- a/packages/bentocache/src/bus/bus.ts +++ b/packages/bentocache/src/bus/bus.ts @@ -1,10 +1,11 @@ import { Bus as BoringBus } from '@boringnode/bus' import type { Transport } from '@boringnode/bus/types/main' +import type { Logger } from '../logger.js' import { busEvents } from '../events/bus_events.js' import { CacheBusMessageType } from '../types/bus.js' import type { LocalCache } from '../cache/facades/local_cache.js' -import type { BusOptions, CacheBusMessage, Emitter, Logger } from '../types/main.js' +import type { BusOptions, CacheBusMessage, Emitter } from '../types/main.js' /** * The bus is used to notify other processes about cache changes. diff --git a/packages/bentocache/src/cache/cache.ts b/packages/bentocache/src/cache/cache.ts index 409fda8..77015a2 100644 --- a/packages/bentocache/src/cache/cache.ts +++ b/packages/bentocache/src/cache/cache.ts @@ -5,6 +5,7 @@ import { CacheBusMessageType } from '../types/main.js' import { cacheEvents } from '../events/cache_events.js' import type { CacheProvider } from '../types/provider.js' import { GetSetHandler } from './get_set/get_set_handler.js' +import type { BentoCacheOptions } from '../bento_cache_options.js' import type { Factory, ClearOptions, @@ -25,11 +26,13 @@ export class Cache implements CacheProvider { #getSetHandler: GetSetHandler #stack: CacheStack + #options: BentoCacheOptions constructor(name: string, stack: CacheStack) { this.name = name this.#stack = stack + this.#options = stack.options this.#getSetHandler = new GetSetHandler(this.#stack) } @@ -45,44 +48,44 @@ export class Cache implements CacheProvider { } get(options: GetOptions): Promise - async get(keyOrOptions: GetOptions): Promise { - const key = keyOrOptions.key - const providedOptions = keyOrOptions - const defaultValueFn = this.#resolveDefaultValue(keyOrOptions.defaultValue) + async get(rawOptions: GetOptions): Promise { + const key = rawOptions.key + const defaultValueFn = this.#resolveDefaultValue(rawOptions.defaultValue) - const options = this.#stack.defaultOptions.cloneWith(providedOptions) - const localItem = this.#stack.l1?.get(key, options) + const options = this.#stack.defaultOptions.cloneWith(rawOptions) + this.#options.logger.logMethod({ method: 'get', key, options, cacheName: this.name }) - if (localItem !== undefined && !localItem.isLogicallyExpired()) { - this.#stack.emit(cacheEvents.hit(key, localItem.getValue(), this.name)) - return localItem.getValue() + const localItem = this.#stack.l1?.get(key, options) + if (localItem?.isGraced === false) { + this.#stack.emit(cacheEvents.hit(key, localItem.entry.getValue(), this.name)) + this.#options.logger.logL1Hit({ cacheName: this.name, key, options }) + return localItem.entry.getValue() } const remoteItem = await this.#stack.l2?.get(key, options) - if (remoteItem !== undefined && !remoteItem.isLogicallyExpired()) { - this.#stack.l1?.set(key, remoteItem.serialize(), options) - this.#stack.emit(cacheEvents.hit(key, remoteItem.getValue(), this.name)) - return remoteItem.getValue() - } - - if (!options.isGraceEnabled()) { - this.#stack.emit(cacheEvents.miss(key, this.name)) - return this.#resolveDefaultValue(defaultValueFn) + if (remoteItem?.isGraced === false) { + this.#stack.l1?.set(key, remoteItem.entry.serialize(), options) + this.#stack.emit(cacheEvents.hit(key, remoteItem.entry.getValue(), this.name)) + this.#options.logger.logL2Hit({ cacheName: this.name, key, options }) + return remoteItem.entry.getValue() } - if (remoteItem) { - this.#stack.l1?.set(key, remoteItem.serialize(), options) - this.#stack.emit(cacheEvents.hit(key, remoteItem.serialize(), this.name, true)) - return remoteItem.getValue() + if (remoteItem && options.isGraceEnabled()) { + this.#stack.l1?.set(key, remoteItem.entry.serialize(), options) + this.#stack.emit(cacheEvents.hit(key, remoteItem.entry.serialize(), this.name, true)) + this.#options.logger.logL2Hit({ cacheName: this.name, key, options, graced: true }) + return remoteItem.entry.getValue() } - if (localItem) { - this.#stack.emit(cacheEvents.hit(key, localItem.serialize(), this.name, true)) - return localItem.getValue() + if (localItem && options.isGraceEnabled()) { + this.#stack.emit(cacheEvents.hit(key, localItem.entry.serialize(), this.name, true)) + this.#options.logger.logL1Hit({ cacheName: this.name, key, options, graced: true }) + return localItem.entry.getValue() } this.#stack.emit(cacheEvents.miss(key, this.name)) + this.#options.logger.debug({ key, cacheName: this.name }, 'cache miss. using default value') return this.#resolveDefaultValue(defaultValueFn) } @@ -90,9 +93,16 @@ export class Cache implements CacheProvider { * Set a value in the cache * Returns true if the value was set, false otherwise */ - set(options: SetOptions) { - const cacheOptions = this.#stack.defaultOptions.cloneWith(options) - return this.#stack.set(options.key, options.value, cacheOptions) + set(rawOptions: SetOptions) { + const options = this.#stack.defaultOptions.cloneWith(rawOptions) + this.#options.logger.logMethod({ + method: 'set', + options, + key: rawOptions.key, + cacheName: this.name, + }) + + return this.#stack.set(rawOptions.key, rawOptions.value, options) } /** @@ -107,18 +117,25 @@ export class Cache implements CacheProvider { * Retrieve an item from the cache if it exists, otherwise store the value * provided by the factory and return it */ - getOrSet(options: GetOrSetOptions): Promise { - const cacheOptions = this.#stack.defaultOptions.cloneWith(options) - return this.#getSetHandler.handle(options.key, options.factory, cacheOptions) + getOrSet(rawOptions: GetOrSetOptions): Promise { + const options = this.#stack.defaultOptions.cloneWith(rawOptions) + this.#options.logger.logMethod({ + method: 'getOrSet', + key: rawOptions.key, + cacheName: this.name, + options, + }) + + return this.#getSetHandler.handle(rawOptions.key, rawOptions.factory, options) } /** * Retrieve an item from the cache if it exists, otherwise store the value * provided by the factory forever and return it */ - getOrSetForever(options: GetOrSetForeverOptions): Promise { - const cacheOptions = this.#stack.defaultOptions.cloneWith({ ttl: null, ...options }) - return this.#getSetHandler.handle(options.key, options.factory, cacheOptions) + getOrSetForever(rawOptions: GetOrSetForeverOptions): Promise { + const options = this.#stack.defaultOptions.cloneWith({ ttl: null, ...rawOptions }) + return this.#getSetHandler.handle(rawOptions.key, rawOptions.factory, options) } /** @@ -126,9 +143,15 @@ export class Cache implements CacheProvider { */ async has(options: HasOptions) { const key = options.key - const cacheOptions = this.#stack.defaultOptions.cloneWith(options) - - const inRemote = await this.#stack.l2?.has(key, cacheOptions) + const entryOptions = this.#stack.defaultOptions.cloneWith(options) + this.#options.logger.logMethod({ + method: 'has', + key, + cacheName: this.name, + options: entryOptions, + }) + + const inRemote = await this.#stack.l2?.has(key, entryOptions) const inLocal = this.#stack.l1?.has(key) return !!(inRemote || inLocal) @@ -155,12 +178,13 @@ export class Cache implements CacheProvider { * Delete a key from the cache, emit cache:deleted event and * publish invalidation through the bus */ - async delete(options: DeleteOptions): Promise { - const key = options.key - const cacheOptions = this.#stack.defaultOptions.cloneWith(options) + async delete(rawOptions: DeleteOptions): Promise { + const key = rawOptions.key + const options = this.#stack.defaultOptions.cloneWith(rawOptions) + this.#options.logger.logMethod({ method: 'delete', key, cacheName: this.name, options }) - this.#stack.l1?.delete(key, cacheOptions) - await this.#stack.l2?.delete(key, cacheOptions) + this.#stack.l1?.delete(key, options) + await this.#stack.l2?.delete(key, options) this.#stack.emit(cacheEvents.deleted(key, this.name)) await this.#stack.publish({ type: CacheBusMessageType.Delete, keys: [key] }) @@ -173,12 +197,18 @@ export class Cache implements CacheProvider { * Then emit cache:deleted events for each key * And finally publish invalidation through the bus */ - async deleteMany(options: DeleteManyOptions): Promise { - const keys = options.keys - const cacheOptions = this.#stack.defaultOptions.cloneWith(options) - - this.#stack.l1?.deleteMany(keys, cacheOptions) - await this.#stack.l2?.deleteMany(keys, cacheOptions) + async deleteMany(rawOptions: DeleteManyOptions): Promise { + const keys = rawOptions.keys + const options = this.#stack.defaultOptions.cloneWith(rawOptions) + this.#options.logger.logMethod({ + method: 'deleteMany', + key: keys, + cacheName: this.name, + options, + }) + + this.#stack.l1?.deleteMany(keys, options) + await this.#stack.l2?.deleteMany(keys, options) keys.forEach((key) => this.#stack.emit(cacheEvents.deleted(key, this.name))) await this.#stack.publish({ type: CacheBusMessageType.Delete, keys }) @@ -189,12 +219,13 @@ export class Cache implements CacheProvider { /** * Remove all items from the cache */ - async clear(options?: ClearOptions) { - const cacheOptions = this.#stack.defaultOptions.cloneWith(options) + async clear(rawOptions?: ClearOptions) { + const options = this.#stack.defaultOptions.cloneWith(rawOptions) + this.#options.logger.logMethod({ method: 'clear', cacheName: this.name, options }) await Promise.all([ this.#stack.l1?.clear(), - this.#stack.l2?.clear(cacheOptions), + this.#stack.l2?.clear(options), this.#stack.publish({ type: CacheBusMessageType.Clear, keys: [] }), ]) diff --git a/packages/bentocache/src/cache/cache_stack.ts b/packages/bentocache/src/cache/cache_stack.ts index be2237b..8437638 100644 --- a/packages/bentocache/src/cache/cache_stack.ts +++ b/packages/bentocache/src/cache/cache_stack.ts @@ -2,6 +2,7 @@ import { is } from '@julr/utils/is' import lodash from '@poppinss/utils/lodash' import { Bus } from '../bus/bus.js' +import type { Logger } from '../logger.js' import { UndefinedValueError } from '../errors.js' import { LocalCache } from './facades/local_cache.js' import { BaseDriver } from '../drivers/base_driver.js' @@ -15,7 +16,6 @@ import { type CacheEvent, type CacheStackDrivers, type CacheBusMessage, - type Logger, CacheBusMessageType, } from '../types/main.js' @@ -65,9 +65,8 @@ export class CacheStack extends BaseDriver { { retryQueue: { enabled: true, maxSize: undefined } }, busOptions, ) - const newBus = new Bus(this.name, this.#busDriver, this.logger, this.emitter, this.#busOptions) - return newBus + return new Bus(this.name, this.#busDriver, this.logger, this.emitter, this.#busOptions) } namespace(namespace: string): CacheStack { diff --git a/packages/bentocache/src/cache/facades/local_cache.ts b/packages/bentocache/src/cache/facades/local_cache.ts index 95fab36..d1470e6 100644 --- a/packages/bentocache/src/cache/facades/local_cache.ts +++ b/packages/bentocache/src/cache/facades/local_cache.ts @@ -1,6 +1,7 @@ +import type { Logger } from '../../logger.js' import { CacheEntry } from '../cache_entry/cache_entry.js' +import type { L1CacheDriver, CacheSerializer } from '../../types/main.js' import type { CacheEntryOptions } from '../cache_entry/cache_entry_options.js' -import type { Logger, L1CacheDriver, CacheSerializer } from '../../types/main.js' /** * LocalCache is a wrapper around a CacheDriver that provides a @@ -14,7 +15,7 @@ export class LocalCache { constructor(driver: L1CacheDriver, logger: Logger, serializer: CacheSerializer | undefined) { this.#driver = driver this.#serializer = serializer - this.#logger = logger.child({ context: 'bentocache.localCache' }) + this.#logger = logger.child({ layer: 'l1' }) } /** @@ -24,18 +25,26 @@ export class LocalCache { /** * Try to get the item from the local cache */ - this.#logger.trace({ key, opId: options.id }, 'try getting local cache item') + this.#logger.trace({ key, opId: options.id }, 'try getting from l1 cache') const value = this.#driver.get(key) /** * If the item is not found, return undefined */ if (value === undefined) { - this.#logger.trace({ key, opId: options.id }, 'local cache item not found') + this.#logger.debug({ key, opId: options.id }, 'cache miss') return } - return CacheEntry.fromDriver(key, value, this.#serializer) + const entry = CacheEntry.fromDriver(key, value, this.#serializer) + const isGraced = entry.isLogicallyExpired() + if (isGraced) { + this.#logger.debug({ key, opId: options.id }, 'cache hit (graced)') + } else { + this.#logger.debug({ key, opId: options.id }, 'cache hit') + } + + return { entry, isGraced } } /** @@ -53,7 +62,7 @@ export class LocalCache { /** * Save the item to the local cache */ - this.#logger.trace({ key, value, opId: options.id }, 'saving local cache item') + this.#logger.debug({ key, opId: options.id }, 'saving item') this.#driver.set(key, value, physicalTtl) } @@ -61,7 +70,7 @@ export class LocalCache { * Delete an item from the local cache */ delete(key: string, options?: CacheEntryOptions) { - this.#logger.trace({ key, opId: options?.id }, 'deleting local cache item') + this.#logger.debug({ key, opId: options?.id }, 'deleting item') return this.#driver.delete(key) } @@ -73,7 +82,7 @@ export class LocalCache { * is down and the grace period is enabled */ logicallyExpire(key: string) { - this.#logger.trace({ key }, 'logically expiring local cache item') + this.#logger.debug({ key }, 'logically expiring item') const value = this.#driver.get(key) if (value === undefined) return @@ -86,7 +95,7 @@ export class LocalCache { * Delete many item from the local cache */ deleteMany(keys: string[], options: CacheEntryOptions) { - this.#logger.trace({ keys, options, opId: options.id }, 'deleting local cache items') + this.#logger.debug({ keys, options, opId: options.id }, 'deleting items') this.#driver.deleteMany(keys) } diff --git a/packages/bentocache/src/cache/facades/remote_cache.ts b/packages/bentocache/src/cache/facades/remote_cache.ts index 14758cd..5c4b91c 100644 --- a/packages/bentocache/src/cache/facades/remote_cache.ts +++ b/packages/bentocache/src/cache/facades/remote_cache.ts @@ -1,8 +1,9 @@ import { is } from '@julr/utils/is' +import type { Logger } from '../../logger.js' +import type { L2CacheDriver } from '../../types/main.js' import { CacheEntry } from '../cache_entry/cache_entry.js' import { CircuitBreaker } from '../../circuit_breaker/index.js' -import type { L2CacheDriver, Logger } from '../../types/main.js' import type { BentoCacheOptions } from '../../bento_cache_options.js' import type { CacheEntryOptions } from '../cache_entry/cache_entry_options.js' @@ -30,22 +31,22 @@ export class RemoteCache { ? new CircuitBreaker({ breakDuration: options.l2CircuitBreakerDuration }) : undefined - this.#logger = logger.child({ context: 'bentocache.remoteCache' }) + this.#logger = logger.child({ layer: 'l2' }) } /** * Try to execute a cache operation and fallback to a default value * if the operation fails */ - async #tryCacheOperation( + async #tryCacheOperation( operation: string, options: CacheEntryOptions, fallbackValue: unknown, - fn: () => any, - ) { + fn: () => T, + ): Promise { if (this.#circuitBreaker?.isOpen()) { this.#logger.error({ opId: options.id }, `circuit breaker is open. ignoring operation`) - return fallbackValue + return fallbackValue as any } try { @@ -63,7 +64,7 @@ export class RemoteCache { (is.undefined(options.suppressL2Errors) && this.#hasL1Backup) || options.suppressL2Errors ) { - return fallbackValue + return fallbackValue as any } throw error @@ -78,7 +79,15 @@ export class RemoteCache { const value = await this.#driver.get(key) if (value === undefined) return - return CacheEntry.fromDriver(key, value, this.#options.serializer) + const entry = CacheEntry.fromDriver(key, value, this.#options.serializer) + const isGraced = entry.isLogicallyExpired() + if (isGraced) { + this.#logger.debug({ key, opId: options.id }, 'cache hit (graced)') + } else { + this.#logger.debug({ key, opId: options.id }, 'cache hit') + } + + return { entry, isGraced } }) } @@ -87,6 +96,7 @@ export class RemoteCache { */ async set(key: string, value: string, options: CacheEntryOptions) { return await this.#tryCacheOperation('set', options, false, async () => { + this.#logger.debug({ key, opId: options.id }, 'saving item') await this.#driver.set(key, value, options.getPhysicalTtl()) return true }) @@ -97,6 +107,7 @@ export class RemoteCache { */ async delete(key: string, options: CacheEntryOptions) { return await this.#tryCacheOperation('delete', options, false, async () => { + this.#logger.debug({ key, opId: options.id }, 'deleting item') return await this.#driver.delete(key) }) } @@ -106,6 +117,7 @@ export class RemoteCache { */ async deleteMany(keys: string[], options: CacheEntryOptions) { return await this.#tryCacheOperation('deleteMany', options, false, async () => { + this.#logger.debug({ keys, opId: options.id }, 'deleting items') return await this.#driver.deleteMany(keys) }) } diff --git a/packages/bentocache/src/cache/factory_runner.ts b/packages/bentocache/src/cache/factory_runner.ts index 2dcf3d1..ddd7d50 100644 --- a/packages/bentocache/src/cache/factory_runner.ts +++ b/packages/bentocache/src/cache/factory_runner.ts @@ -38,9 +38,14 @@ export class FactoryRunner { if (result === this.#skipSymbol) return + this.#stack.logger.info({ cache: this.#stack.name, opId: options.id, key }, 'factory success') await this.#stack.set(key, result, options) return result } catch (error) { + this.#stack.logger.warn( + { cache: this.#stack.name, opId: options.id, key, error }, + 'factory failed', + ) options.onFactoryError?.(new errors.E_FACTORY_ERROR(key, error, isBackground)) if (!isBackground) throw new errors.E_FACTORY_ERROR(key, error) @@ -57,6 +62,14 @@ export class FactoryRunner { lockReleaser: MutexInterface.Releaser, ) { const timeout = options.factoryTimeout(hasFallback) + if (timeout) { + this.#stack.logger.info( + { cache: this.#stack.name, opId: options.id, key }, + `running factory with ${timeout.type} timeout of ${timeout.duration}ms`, + ) + } else { + this.#stack.logger.info({ cache: this.#stack.name, opId: options.id, key }, 'running factory') + } /** * If the timeout is 0, we will not wait for the factory to resolve @@ -71,6 +84,10 @@ export class FactoryRunner { const result = await pTimeout(runFactory, { milliseconds: timeout?.duration ?? Number.POSITIVE_INFINITY, fallback: async () => { + this.#stack.logger.warn( + { cache: this.#stack.name, opId: options.id, key }, + `factory timed out after ${timeout?.duration}ms`, + ) throw new timeout!.exception(key) }, }) diff --git a/packages/bentocache/src/cache/get_set/single_tier_handler.ts b/packages/bentocache/src/cache/get_set/single_tier_handler.ts index 8ab690c..8cded8b 100644 --- a/packages/bentocache/src/cache/get_set/single_tier_handler.ts +++ b/packages/bentocache/src/cache/get_set/single_tier_handler.ts @@ -7,7 +7,7 @@ import { FactoryRunner } from '../factory_runner.js' import type { Factory } from '../../types/helpers.js' import type { CacheEvent } from '../../types/events.js' import { cacheEvents } from '../../events/cache_events.js' -import type { CacheEntry } from '../cache_entry/cache_entry.js' +import type { GetCacheValueReturn } from '../../types/internals/index.js' import type { CacheEntryOptions } from '../cache_entry/cache_entry_options.js' export class SingleTierHandler { @@ -35,11 +35,15 @@ export class SingleTierHandler { /** * Returns a value from the remote cache and emit a CacheHit event */ - async #returnRemoteCacheValue(key: string, item: CacheEntry, options: CacheEntryOptions) { - this.logger.trace({ key, cache: this.stack.name, opId: options.id }, 'remote cache hit') + async #returnRemoteCacheValue( + key: string, + item: GetCacheValueReturn, + options: CacheEntryOptions, + ) { + this.logger.logL2Hit({ cacheName: this.stack.name, key, options }) - this.#emit(cacheEvents.hit(key, item.getValue(), this.stack.name)) - return item.getValue() + this.#emit(cacheEvents.hit(key, item.entry.getValue(), this.stack.name)) + return item.entry.getValue() } /** @@ -55,19 +59,13 @@ export class SingleTierHandler { #returnGracedValueOrThrow( key: string, - item: CacheEntry | undefined, + item: GetCacheValueReturn | undefined, options: CacheEntryOptions, err: Error, ) { if (options.isGraceEnabled() && item) { - const isLogicallyExpired = item.isLogicallyExpired() - this.#emit(cacheEvents.hit(key, item.getValue(), this.stack.name, isLogicallyExpired)) - this.logger.trace( - { key, cache: this.stack.name, opId: options.id }, - 'remote cache hit (graced)', - ) - - return item.getValue() + this.#emit(cacheEvents.hit(key, item.entry.getValue(), this.stack.name, item.isGraced)) + return item.entry.getValue() } throw err @@ -75,7 +73,7 @@ export class SingleTierHandler { async #applyFallbackAndReturnGracedValue( key: string, - item: CacheEntry, + item: GetCacheValueReturn, options: CacheEntryOptions, ) { if (options.grace && options.graceBackoff) { @@ -84,29 +82,26 @@ export class SingleTierHandler { 'apply fallback duration', ) - this.stack.l2?.set(key, item.applyBackoff(options.graceBackoff).serialize() as any, options) + this.stack.l2?.set( + key, + item.entry.applyBackoff(options.graceBackoff).serialize() as any, + options, + ) } this.logger.trace({ key, cache: this.stack.name, opId: options.id }, 'returns stale value') - this.#emit(cacheEvents.hit(key, item.getValue(), this.stack.name, true)) - return item.getValue() - } - - /** - * Check if a cache item is not undefined and not logically expired - */ - #isItemValid(item: CacheEntry | undefined): item is CacheEntry { - return !!item && !item.isLogicallyExpired() + this.#emit(cacheEvents.hit(key, item.entry.getValue(), this.stack.name, true)) + return item.entry.getValue() } async handle(key: string, factory: Factory, options: CacheEntryOptions) { - let remoteItem: CacheEntry | undefined + let remoteItem: GetCacheValueReturn | undefined /** * Check in the remote cache first if we have something */ remoteItem = await this.stack.l2?.get(key, options) - if (this.#isItemValid(remoteItem)) { + if (remoteItem?.isGraced === false) { return this.#returnRemoteCacheValue(key, remoteItem, options) } @@ -126,7 +121,7 @@ export class SingleTierHandler { * already set the value */ remoteItem = await this.stack.l2?.get(key, options) - if (this.#isItemValid(remoteItem)) { + if (remoteItem?.isGraced === false) { this.#locks.release(key, releaser) return this.#returnRemoteCacheValue(key, remoteItem, options) } diff --git a/packages/bentocache/src/cache/get_set/two_tier_handler.ts b/packages/bentocache/src/cache/get_set/two_tier_handler.ts index ae39ee9..8ea6708 100644 --- a/packages/bentocache/src/cache/get_set/two_tier_handler.ts +++ b/packages/bentocache/src/cache/get_set/two_tier_handler.ts @@ -7,7 +7,7 @@ import { FactoryRunner } from '../factory_runner.js' import type { Factory } from '../../types/helpers.js' import type { CacheEvent } from '../../types/events.js' import { cacheEvents } from '../../events/cache_events.js' -import type { CacheEntry } from '../cache_entry/cache_entry.js' +import type { GetCacheValueReturn } from '../../types/internals/index.js' import type { CacheEntryOptions } from '../cache_entry/cache_entry_options.js' export class TwoTierHandler { @@ -35,31 +35,23 @@ export class TwoTierHandler { /** * Returns a value from the local cache and emit a CacheHit event */ - #returnLocalCacheValue( - key: string, - item: CacheEntry, - options: CacheEntryOptions, - logMsg?: string, - ) { - const isLogicallyExpired = item.isLogicallyExpired() - logMsg = logMsg ?? 'local cache hit' - - this.#emit(cacheEvents.hit(key, item.getValue(), this.stack.name, isLogicallyExpired)) - this.logger.trace({ key, cache: this.stack.name, opId: options.id }, logMsg) - - return item.getValue() + #returnL1Value(key: string, item: GetCacheValueReturn) { + this.#emit(cacheEvents.hit(key, item.entry.getValue(), this.stack.name, item.isGraced)) + return item.entry.getValue() } /** * Returns a value from the remote cache and emit a CacheHit event */ - async #returnRemoteCacheValue(key: string, item: CacheEntry, options: CacheEntryOptions) { - this.logger.trace({ key, cache: this.stack.name, opId: options.id }, 'remote cache hit') - - this.stack.l1?.set(key, item.serialize(), options) + async #returnRemoteCacheValue( + key: string, + item: GetCacheValueReturn, + options: CacheEntryOptions, + ) { + this.stack.l1?.set(key, item.entry.serialize(), options) - this.#emit(cacheEvents.hit(key, item.getValue(), this.stack.name)) - return item.getValue() + this.#emit(cacheEvents.hit(key, item.entry.getValue(), this.stack.name)) + return item.entry.getValue() } /** @@ -75,20 +67,17 @@ export class TwoTierHandler { #returnGracedValueOrThrow( key: string, - item: CacheEntry | undefined, + item: GetCacheValueReturn | undefined, options: CacheEntryOptions, err: Error, ) { - if (options.isGraceEnabled() && item) { - return this.#returnLocalCacheValue(key, item, options, 'local cache hit (graced)') - } - + if (options.isGraceEnabled() && item) return this.#returnL1Value(key, item) throw err } async #applyFallbackAndReturnGracedValue( key: string, - item: CacheEntry, + item: GetCacheValueReturn, options: CacheEntryOptions, ) { if (options.grace && options.graceBackoff) { @@ -97,26 +86,19 @@ export class TwoTierHandler { 'apply fallback duration', ) - this.stack.l1?.set(key, item.applyBackoff(options.graceBackoff).serialize(), options) + this.stack.l1?.set(key, item.entry.applyBackoff(options.graceBackoff).serialize(), options) } this.logger.trace({ key, cache: this.stack.name, opId: options.id }, 'returns stale value') - this.#emit(cacheEvents.hit(key, item.getValue(), this.stack.name, true)) - return item.getValue() - } - - /** - * Check if a cache item is not undefined and not logically expired - */ - #isItemValid(item: CacheEntry | undefined): item is CacheEntry { - return !!item && !item.isLogicallyExpired() + this.#emit(cacheEvents.hit(key, item.entry.getValue(), this.stack.name, true)) + return item.entry.getValue() } async #lockAndHandle( key: string, factory: Factory, options: CacheEntryOptions, - localItem?: CacheEntry, + localItem?: GetCacheValueReturn, ) { /** * Since we didn't find a valid item in the local cache, we need to @@ -126,8 +108,10 @@ export class TwoTierHandler { */ let releaser: MutexInterface.Releaser try { + this.logger.trace({ key, cache: this.stack.name, opId: options.id }, 'acquiring lock...') releaser = await this.#acquireLock(key, !!localItem, options) } catch (err) { + this.logger.trace({ key, cache: this.stack.name, opId: options.id }, 'lock failed') return this.#returnGracedValueOrThrow(key, localItem, options, err) } @@ -138,9 +122,9 @@ export class TwoTierHandler { * could have written a value while we were waiting for the lock. */ localItem = this.stack.l1?.get(key, options) - if (this.#isItemValid(localItem)) { + if (localItem?.isGraced === false) { this.#locks.release(key, releaser) - return this.#returnLocalCacheValue(key, localItem, options, 'local cache hit after lock') + return this.#returnL1Value(key, localItem) } /** @@ -149,7 +133,7 @@ export class TwoTierHandler { * and returns it. */ const remoteItem = await this.stack.l2?.get(key, options) - if (this.#isItemValid(remoteItem)) { + if (remoteItem?.isGraced === false) { this.#locks.release(key, releaser) return this.#returnRemoteCacheValue(key, remoteItem, options) } @@ -158,6 +142,7 @@ export class TwoTierHandler { const hasFallback = !!localItem || !!remoteItem const result = await this.#factoryRunner.run(key, factory, hasFallback, options, releaser) this.#emit(cacheEvents.miss(key, this.stack.name)) + return result } catch (err) { /** @@ -192,16 +177,14 @@ export class TwoTierHandler { * First we check the local cache. If we have a valid item, just * returns it without acquiring a lock. */ - const localItem = this.stack.l1?.get(key, options) - if (this.#isItemValid(localItem)) { - return this.#returnLocalCacheValue(key, localItem, options) - } + const item = this.stack.l1?.get(key, options) + if (item?.isGraced === false) return this.#returnL1Value(key, item) /** * Next, delegate to the lock-and-handle async method so we can keep * this method synchronous and avoid an overhead of async/await * in case we have a valid item in the local cache. */ - return this.#lockAndHandle(key, factory, options, localItem) + return this.#lockAndHandle(key, factory, options, item) } } diff --git a/packages/bentocache/src/logger.ts b/packages/bentocache/src/logger.ts new file mode 100644 index 0000000..e2af989 --- /dev/null +++ b/packages/bentocache/src/logger.ts @@ -0,0 +1,85 @@ +import type { Logger as InternalLogger, LogObject } from '@julr/utils/logger' + +import type { CacheEntryOptions } from './cache/cache_entry/cache_entry_options.js' + +export class Logger { + internalLogger: InternalLogger + + constructor(internalLogger: InternalLogger) { + this.internalLogger = internalLogger + } + + child(obj: LogObject) { + return new Logger(this.internalLogger.child(obj)) + } + + trace(msg: any, obj?: any) { + this.internalLogger.trace(msg, obj) + } + + debug(msg: any, obj?: any) { + this.internalLogger.debug(msg, obj) + } + + warn(msg: any, obj?: any) { + this.internalLogger.warn(msg, obj) + } + + error(msg: any, obj?: any) { + this.internalLogger.error(msg, obj) + } + + fatal(msg: any, obj?: any) { + this.internalLogger.fatal(msg, obj) + } + + info(msg: any, obj?: any) { + this.internalLogger.info(msg, obj) + } + + logMethod(options: { + cacheName: string + options: CacheEntryOptions + key?: string | string[] + method: string + }) { + this.internalLogger.debug( + { cacheName: options.cacheName, opId: options.options.id, key: options.key }, + `'${options.method}' method called`, + ) + } + + logL1Hit(options: { + cacheName: string + key: string + options: CacheEntryOptions + graced?: boolean + }) { + this.internalLogger.debug( + { + cacheName: options.cacheName, + opId: options.options.id, + key: options.key, + graced: options.graced, + }, + 'memory hit', + ) + } + + logL2Hit(options: { + cacheName: string + key: string + options: CacheEntryOptions + graced?: boolean + }) { + this.internalLogger.debug( + { + cacheName: options.cacheName, + opId: options.options.id, + key: options.key, + graced: options.graced, + }, + 'remote hit', + ) + } +} diff --git a/packages/bentocache/src/types/internals/index.ts b/packages/bentocache/src/types/internals/index.ts new file mode 100644 index 0000000..42e0523 --- /dev/null +++ b/packages/bentocache/src/types/internals/index.ts @@ -0,0 +1,6 @@ +import type { CacheEntry } from '../../cache/cache_entry/cache_entry.js' + +export type GetCacheValueReturn = { + entry: CacheEntry + isGraced: boolean +} diff --git a/packages/bentocache/tests/cache/local_cache.spec.ts b/packages/bentocache/tests/cache/local_cache.spec.ts index 07868ea..e787a00 100644 --- a/packages/bentocache/tests/cache/local_cache.spec.ts +++ b/packages/bentocache/tests/cache/local_cache.spec.ts @@ -1,6 +1,7 @@ import { test } from '@japa/runner' import { testLogger } from '@julr/utils/logger' +import { Logger } from '../../src/logger.js' import { MemoryDriver } from '../../src/drivers/memory.js' import { JsonSerializer } from '../../src/serializers/json.js' import { LocalCache } from '../../src/cache/facades/local_cache.js' @@ -8,7 +9,11 @@ import { createCacheEntryOptions } from '../../src/cache/cache_entry/cache_entry test.group('Local Cache', () => { test('logically expire should works', ({ assert }) => { - const localCache = new LocalCache(new MemoryDriver(), testLogger(), new JsonSerializer()) + const localCache = new LocalCache( + new MemoryDriver(), + new Logger(testLogger()), + new JsonSerializer(), + ) const options = createCacheEntryOptions({ ttl: '30m' }) const logicalExpiration = Date.now() + 1000 * 60 * 30 @@ -21,13 +26,13 @@ test.group('Local Cache', () => { const entryUpdated = localCache.get('foo', options) - assert.isFalse(entry.isLogicallyExpired()) - assert.isTrue(entryUpdated?.isLogicallyExpired()) + assert.isFalse(entry.entry.isLogicallyExpired()) + assert.isTrue(entryUpdated?.entry.isLogicallyExpired()) }) test('logically expire should keep the same physical ttl', ({ assert }) => { const driver = new MemoryDriver() - const localCache = new LocalCache(driver, testLogger(), new JsonSerializer()) + const localCache = new LocalCache(driver, new Logger(testLogger()), new JsonSerializer()) const options = createCacheEntryOptions({ ttl: '30m' }) const logicalExpiration = Date.now() + 1000 * 60 * 30 diff --git a/packages/bentocache/tests/cache/one_tier_local.spec.ts b/packages/bentocache/tests/cache/one_tier_local.spec.ts index 7614f4b..597dd78 100644 --- a/packages/bentocache/tests/cache/one_tier_local.spec.ts +++ b/packages/bentocache/tests/cache/one_tier_local.spec.ts @@ -448,7 +448,7 @@ test.group('One tier tests', () => { }) const res = local.get('key1', stack.defaultOptions)! - const logicalExpiration = res.getLogicalExpiration() + const logicalExpiration = res.entry.getLogicalExpiration() const inTwoDays = dayjs().add(2, 'day') assert.isTrue(dayjs(logicalExpiration).isSame(inTwoDays, 'day')) diff --git a/packages/bentocache/tests/cache/remote_cache.spec.ts b/packages/bentocache/tests/cache/remote_cache.spec.ts index 6cddcb1..3a9bd69 100644 --- a/packages/bentocache/tests/cache/remote_cache.spec.ts +++ b/packages/bentocache/tests/cache/remote_cache.spec.ts @@ -1,6 +1,7 @@ import { test } from '@japa/runner' import { testLogger } from '@julr/utils/logger' +import { Logger } from '../../src/logger.js' import { REDIS_CREDENTIALS } from '../helpers/index.js' import { RedisDriver } from '../../src/drivers/redis.js' import { ChaosCache } from '../helpers/chaos/chaos_cache.js' @@ -12,7 +13,12 @@ test.group('Remote Cache', () => { test('should rethrows errors if suppressL2Errors is disabled', async ({ assert, cleanup }) => { const logger = testLogger() const chaosCacheDriver = new ChaosCache(new RedisDriver({ connection: REDIS_CREDENTIALS })) - const cache = new RemoteCache(chaosCacheDriver, logger, true, new BentoCacheOptions({})) + const cache = new RemoteCache( + chaosCacheDriver, + new Logger(logger), + true, + new BentoCacheOptions({}), + ) cleanup(() => chaosCacheDriver.disconnect()) @@ -26,13 +32,19 @@ test.group('Remote Cache', () => { await assert.rejects(() => cache.deleteMany(['foo'], options)) await assert.rejects(() => cache.has('foo', options)) - assert.deepEqual(logger.logs.length, 5) + const errors = logger.logs.filter((log) => log.level === 'error') + assert.deepEqual(errors.length, 5) }) test('should ignore errors if suppressL2Errors is enabled', async ({ assert, cleanup }) => { const logger = testLogger() const chaosCacheDriver = new ChaosCache(new RedisDriver({ connection: REDIS_CREDENTIALS })) - const cache = new RemoteCache(chaosCacheDriver, logger, true, new BentoCacheOptions({})) + const cache = new RemoteCache( + chaosCacheDriver, + new Logger(logger), + true, + new BentoCacheOptions({}), + ) cleanup(() => chaosCacheDriver.disconnect()) @@ -46,7 +58,8 @@ test.group('Remote Cache', () => { await assert.doesNotReject(() => cache.deleteMany(['foo'], options)) await assert.doesNotReject(() => cache.has('foo', options)) - assert.deepEqual(logger.logs.length, 5) + const errors = logger.logs.filter((log) => log.level === 'error') + assert.deepEqual(errors.length, 5) }) test('rethrow errors if suppressL2Errors is not explicity set and we have not l1', async ({ @@ -55,7 +68,12 @@ test.group('Remote Cache', () => { }) => { const logger = testLogger() const chaosCacheDriver = new ChaosCache(new RedisDriver({ connection: REDIS_CREDENTIALS })) - const cache = new RemoteCache(chaosCacheDriver, logger, false, new BentoCacheOptions({})) + const cache = new RemoteCache( + chaosCacheDriver, + new Logger(logger), + false, + new BentoCacheOptions({}), + ) cleanup(() => chaosCacheDriver.disconnect()) @@ -69,7 +87,8 @@ test.group('Remote Cache', () => { await assert.rejects(() => cache.deleteMany(['foo'], options)) await assert.rejects(() => cache.has('foo', options)) - assert.deepEqual(logger.logs.length, 5) + const errors = logger.logs.filter((log) => log.level === 'error') + assert.deepEqual(errors.length, 5) }) test('suppress errors if suppressL2Errors is explicitly set to true and we have not l1', async ({ @@ -78,7 +97,12 @@ test.group('Remote Cache', () => { }) => { const logger = testLogger() const chaosCacheDriver = new ChaosCache(new RedisDriver({ connection: REDIS_CREDENTIALS })) - const cache = new RemoteCache(chaosCacheDriver, logger, false, new BentoCacheOptions({})) + const cache = new RemoteCache( + chaosCacheDriver, + new Logger(logger), + false, + new BentoCacheOptions({}), + ) cleanup(() => chaosCacheDriver.disconnect()) @@ -92,6 +116,7 @@ test.group('Remote Cache', () => { await assert.doesNotReject(() => cache.deleteMany(['foo'], options)) await assert.doesNotReject(() => cache.has('foo', options)) - assert.deepEqual(logger.logs.length, 5) + const errors = logger.logs.filter((log) => log.level === 'error') + assert.deepEqual(errors.length, 5) }) }) diff --git a/packages/bentocache/tests/cache/timeouts.spec.ts b/packages/bentocache/tests/cache/timeouts.spec.ts index d134c5d..51b5175 100644 --- a/packages/bentocache/tests/cache/timeouts.spec.ts +++ b/packages/bentocache/tests/cache/timeouts.spec.ts @@ -147,8 +147,8 @@ test.group('Soft Timeout', () => { const r3 = await remote.get('key', stack.defaultOptions) assert.deepEqual(r1, 'graced value') - assert.deepEqual(r2?.getValue(), 'new factory value') - assert.deepEqual(r3?.getValue(), 'new factory value') + assert.deepEqual(r2?.entry.getValue(), 'new factory value') + assert.deepEqual(r3?.entry.getValue(), 'new factory value') }) test('background factory should not generate an unhandled promise rejection', async ({ @@ -272,7 +272,7 @@ test.group('Hard timeout', () => { const r3 = await remote.get('key', stack.defaultOptions) assert.deepEqual(r1, undefined) - assert.deepEqual(r2?.getValue(), 'new factory value') - assert.deepEqual(r3?.getValue(), 'new factory value') + assert.deepEqual(r2?.entry.getValue(), 'new factory value') + assert.deepEqual(r3?.entry.getValue(), 'new factory value') }) }) diff --git a/packages/bentocache/tests/cache/two_tier.spec.ts b/packages/bentocache/tests/cache/two_tier.spec.ts index 9f43141..d4c0506 100644 --- a/packages/bentocache/tests/cache/two_tier.spec.ts +++ b/packages/bentocache/tests/cache/two_tier.spec.ts @@ -145,7 +145,7 @@ test.group('Cache', () => { await cache.get({ key: 'foo' }) const value = local.get('foo', stack.defaultOptions) - assert.deepEqual(value?.getValue(), 'bar') + assert.deepEqual(value?.entry.getValue(), 'bar') }) test('return default value if item not found in local and remote', async ({ assert }) => { @@ -192,7 +192,7 @@ test.group('Cache', () => { const localeValue = local.get('key1', stack.defaultOptions) assert.deepEqual(value, 'bar') - assert.deepEqual(localeValue?.getValue(), 'bar') + assert.deepEqual(localeValue?.entry.getValue(), 'bar') }) test('store values in both when key does not exists in local and remote', async ({ assert }) => { @@ -204,8 +204,8 @@ test.group('Cache', () => { const remoteValue = await remote.get('key1', stack.defaultOptions) assert.deepEqual(value, 'bar') - assert.deepEqual(localeValue!.getValue(), 'bar') - assert.deepEqual(remoteValue!.getValue(), 'bar') + assert.deepEqual(localeValue!.entry.getValue(), 'bar') + assert.deepEqual(remoteValue!.entry.getValue(), 'bar') }) test('with specific ttl', async ({ assert }) => { @@ -311,7 +311,7 @@ test.group('Cache', () => { await sleep(100) const entry = local.get('key1', stack.defaultOptions) - assert.deepEqual(entry?.isLogicallyExpired(), true) + assert.deepEqual(entry?.isGraced, true) await sleep(2000) @@ -354,8 +354,8 @@ test.group('Cache', () => { const r1 = local.get('foo', stack.defaultOptions) const r2 = await remote.get('foo', stack.defaultOptions) - assert.deepEqual(r1!.getValue(), 'bar') - assert.deepEqual(r2!.getValue(), 'bar') + assert.deepEqual(r1!.entry.getValue(), 'bar') + assert.deepEqual(r2!.entry.getValue(), 'bar') }) test('set should expires others local cache', async ({ assert }) => { @@ -379,7 +379,7 @@ test.group('Cache', () => { await sleep(100) assert.isDefined(r1) - assert.isBelow(r1!.getLogicalExpiration(), Date.now()) + assert.isBelow(r1!.entry.getLogicalExpiration(), Date.now()) assert.equal(r2, 'baz') }) @@ -654,8 +654,8 @@ test.group('Cache', () => { assert.deepEqual(r1, 'bar') assert.deepEqual(r2, 'bar') assert.isUndefined(r3) - assert.deepEqual(r4?.getValue(), 'bar') - assert.deepEqual(r5?.getValue(), 'bar') + assert.deepEqual(r4?.entry.getValue(), 'bar') + assert.deepEqual(r5?.entry.getValue(), 'bar') }) test('Bus shouldnt receive messages emitted by itself', async ({ assert }) => { @@ -667,7 +667,7 @@ test.group('Cache', () => { const r2 = local.get('foo', stack.defaultOptions) assert.deepEqual(r1, { foo: 'bar' }) - assert.deepEqual(r2?.getValue(), { foo: 'bar' }) + assert.deepEqual(r2?.entry.getValue(), { foo: 'bar' }) }) test('when local and remote hitted items are logically it should prioritize remote', async ({ diff --git a/playground/src/index.tsx b/playground/src/index.tsx index 84c42a6..f666728 100644 --- a/playground/src/index.tsx +++ b/playground/src/index.tsx @@ -9,7 +9,10 @@ const app = new Hono().use(logger()) const slowFetcher = async (url: string, timeout: number = 1000) => { await setTimeout(timeout) - return fetch(url).then((response) => response.json()) + return fetch(url).then(async (response) => ({ + ...(await response.json()), + fetchedAt: new Date().toISOString(), + })) } app.get('/cache-user/:id', async (c) => { @@ -44,10 +47,31 @@ app.get('/cached-user/:id', async (c) => { ) }) -app.get('/', (c) => { - return c.text('Hello Hono!') +app.get('/get-set-post/:id', async (c) => { + const id = c.req.param('id') + const user = await bento.getOrSet({ + key: `posts-${id}`, + factory: async () => { + const result = await slowFetcher(`https://jsonplaceholder.typicode.com/posts/${id}`) + setTimeout(5000).then(() => console.log('5s passed. expired')) + + return result + }, + ttl: '5s', + grace: '10m', + }) + + return c.html( +
+

User {id}

+

From cache or set

+
{JSON.stringify(user, null, 2)}
+
, + ) }) +app.get('/', (c) => c.text('Hello Hono!')) + const port = 3042 -console.log(`Server is running on http://localhost:${port}`) serve({ fetch: app.fetch, port }) +console.log(`Server is running on http://localhost:${port}`)