-
-
Save lucwj/e5d34d9883583ab978aa9b61a7acc61e to your computer and use it in GitHub Desktop.
Cache composing in MoleculerJS
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/** | |
POC of cache composing. | |
The idea behind this madnes was to compose multiple layers of LRU type caches, | |
with the first layer being local (LRUMemcache) with a 1:1 with configured TTL, and for each subsequent tier 1:n of the configured ttl. | |
In effect that would mean that when a cached item gets shifted out of the first layer due to being regarded as the least recently used, | |
it would still exising in the second layer giving it a grace period of sorts before deleting the cache entry from all layers. | |
BUT WHY you may ask? | |
Why not? Integrating with older systems where speed has not been a focus, and also serving up a shitload of static data for each and every request | |
I would rather talk to a redis instance on the second or third layer. That is what inspired me firstly. You can ofcourse do a shitload of crazy | |
with this aswell -- whatever makes your boat etc etc. | |
How to use? | |
Merely import the CacheComposer in to your moleculer.config.js | |
and wrap whatever cache configuration you are using like so: | |
.... | |
cacher: new CacheComposer({ | |
cachers: [ | |
new MemoryLRUCacher({ | |
// Maximum items | |
max: 100, | |
}), | |
new RedisCacher({ | |
// Prefix for keys | |
prefix: "MOL", | |
// Turns Redis client monitoring on. | |
monitor: false, | |
// Redis settings | |
redis: { | |
host: "redis-server", | |
port: 6379, | |
password: "1234", | |
db: 0 | |
} | |
}), | |
] | |
}) | |
.... | |
*/ | |
import {CacherOptions, Cachers, GenericObject, ServiceBroker, LoggerInstance, LoggerFactory} from "moleculer"; | |
import BaseCacher from "moleculer/src/cachers/base"; | |
type ComposedCacherAction = keyof Partial<Pick<Cachers.Base, 'set' | 'get' | 'del' | 'clean' | 'close' | 'getCacheKey' | 'getWithTTL' | 'defaultKeygen'>>; | |
type ComposedCacherActionArgs = ((i: number) => any[]) | any[]; | |
export default class CacheComposer extends BaseCacher implements Cachers.Base { | |
private cachers: Cachers.Base[]; | |
public opts: CacherOptions & { cachers: Cachers.Base[] }; | |
public logger: LoggerInstance; | |
constructor(opts) { | |
super(opts); | |
this.cachers = (opts.cachers || []); | |
} | |
public init(broker: ServiceBroker): void { | |
super.init(broker); | |
this.cachers.forEach(c => c.init(broker)); | |
this.cachers = this.cachers.map(c => { | |
// @ts-ignore | |
const name = c.__proto__.constructor.name; | |
// @ts-ignore | |
c.logger = broker.getLogger(`cache-composer.${name}`); | |
return c; | |
}); | |
} | |
public async clean(match?: string | Array<string>): Promise<any> { | |
return Promise.all(this.cachers.map(c => c.clean(match))); | |
} | |
public async close(): Promise<any> { | |
return Promise.all(this.cachers.map(c => c.close())); | |
} | |
public defaultKeygen(...args): string { | |
return this.searchCachersSync<string>(this.cachers, args, 'defaultKeygen'); | |
} | |
public async del(...args): Promise<void> { | |
return this.sequentialCachersAsync<void>(this.cachers, args, 'del'); | |
} | |
public async get(...args): Promise<GenericObject | null> { | |
return this.sequentialCachersAsync<GenericObject | null>(this.cachers, args, 'get'); | |
} | |
public getCacheKey(...args): string { | |
return this.searchCachersSync<string>(this.cachers, args, 'getCacheKey'); | |
} | |
public async getWithTTL(...args): Promise<GenericObject | null> { | |
return this.sequentialCachersAsync<any>(this.cachers, args, 'getWithTTL'); | |
} | |
public async set(key: string, data: any, ttl?: number): Promise<any> { | |
const argsFn = (i) => [key, data, ttl ? ttl * i : ttl]; | |
return this.parallelCachersAsync<any>(this.cachers, argsFn, 'set'); | |
} | |
protected execCacher<R>(cacher: Cachers.Base, args, method: ComposedCacherAction): R { | |
return cacher[method].apply(cacher, args); | |
} | |
protected async sequentialCachersAsync<R>(cachers: Cachers.Base[], args: ComposedCacherActionArgs, method: ComposedCacherAction, i: number = 1): Promise<R | null> { | |
if (cachers.length === 0) { | |
return Promise.resolve(null); | |
} | |
const [cacher, ...otherCachers] = cachers; | |
const props = typeof args === "function" ? args(i) : args; | |
let result; | |
switch (method) { | |
case "get": | |
const [key] = props; | |
const {data, ttl} = await this.getWithTTL(key); | |
result = data; | |
if(i > 1) { | |
const [c1] = this.cachers; | |
this.execCacher(c1, [key, data, ttl/i],'set'); | |
} | |
break; | |
default: | |
result = await this.execCacher<Promise<R>>(cacher, props, method); | |
break; | |
} | |
if (result) { | |
return Promise.resolve(result); | |
} | |
return this.sequentialCachersAsync<R>(otherCachers, args, method, i++); | |
} | |
protected async parallelCachersAsync<R>(cachers: Cachers.Base[], args: ComposedCacherActionArgs, method: ComposedCacherAction, i: number = 1): Promise<R | null> { | |
if (cachers.length === 0) { | |
return Promise.resolve(null); | |
} | |
const props = typeof args === "function" ? args(i) : args; | |
const [result] = await Promise.all<R>(cachers.map(c => this.execCacher(c, props, method))); | |
return Promise.resolve<R>(result); | |
} | |
protected searchCachersSync<R>(cachers: Cachers.Base[], args, method: ComposedCacherAction): R { | |
if (cachers.length === 0) { | |
return null; | |
} | |
const [cacher, ...otherCachers] = cachers; | |
const result = this.execCacher<R>(cacher, args, method); | |
if (result) { | |
return result; | |
} | |
return this.searchCachersSync(otherCachers, args, method); | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment