I wanted to try an experiment around improving caching performance in Node.js while using redis. One of the ideas I came up with was to build a leveled cache that can work primarily with something in-memory (like eidetic), falling back to a secondary cache like redis. I really wanted to support some sort of compression for the secondary cache to improve the payloads being sent over the network. The toy had some promising results in my early experiments. The payloads being sent over the network were often megabytes in size, so a ~60-90% improvement in size resulted in less I/O time.
DISCLAIMER: This isn't production ready. You have been warned!
var zlib = require('zlib');
var Buffer = require('buffer').Buffer;
function defaultCalculationForL2CacheSecondTtl(ttl) {
return ttl * 1.15;
}
function LCache(l1Cache, l2Cache, calculateL2CacheSeconds) {
if (!l1Cache || !l2Cache) {
throw new Error('LCache initialized without cache');
}
this.L1Cache = l1Cache;
this.L2Cache = l2Cache;
this.calculateL2CacheSeconds = calculateL2CacheSeconds || defaultCalculationForL2CacheSecondTtl;
}
LCache.prototype.updateL1Cache = function lCacheUpdateL1Cache(key, value, callback) {
var self = this;
self.L1Cache.set(key, value, function updateL1Cache(updateL1CacheError) {
if (updateL1CacheError) {
callback(updateL1CacheError, null);
return;
}
self.L2Cache.ttl(key, function l2CacheTtl(l2CacheTtlError, l2Ttl) {
self.L1Cache.expire(key, l2Ttl || 30, function l1CacheExpire(l1CacheExpireError) {
callback(l1CacheExpireError, value);
});
});
});
};
LCache.prototype.fallbackGet = function lCacheFallbackGet(key, callback) {
var self = this;
self.L2Cache.get(key, function l2CacheGet(l2CacheGetError, l2Value){
if (!l2Value) {
callback(l2CacheGetError, null);
return;
}
zlib.gunzip(new Buffer(l2Value, 'binary'), function (gunzipError, payload) {
if (gunzipError || !payload) {
callback(gunzipError || new Error('no payload'), null);
return;
}
var l2ValueUnzipped = payload.toString();
self.updateL1Cache(key, l2ValueUnzipped, callback);
});
});
};
LCache.prototype.get = function lCacheGet(key, callback) {
var self = this;
this.L1Cache.get(key, function l1CacheGet(cacheGetError, l1Value) {
if (l1Value) {
callback(cacheGetError, l1Value);
return;
}
self.fallbackGet(key, callback);
});
};
LCache.prototype.set = function lCacheSet(key, value, callback) {
var self = this;
if (!value) {
setImmediate(callback(new Error('Cannot cache empty value')));
return;
}
this.L1Cache.set(key, value, function l2CacheSet(l1CacheSetError) {
zlib.gzip(value, function (gzipError, gzippedOutput) {
self.L2Cache.set(key, gzippedOutput.toString('binary'), function l2CacheSet(l2CacheSetError) {
//var improvement = ((value.length - gzippedOutput.length)/value.length)*100;
//console.log(' pre-zip length: ' + value.length);
//console.log('post-zip length: ' + gzippedOutput.length);
//console.log(' improvement: ' + improvement.toFixed(2) + '%');
callback(l2CacheSetError || l1CacheSetError);
});
});
});
};
LCache.prototype.expire = function lCacheExpire(key, ttl, callback) {
var self = this;
this.L1Cache.expire(key, ttl, function l1CacheExpire(l1CacheExpireError) {
var l2Cachettl = self.calculateL2CacheSeconds(ttl);
self.L2Cache.expire(key, l2Cachettl, function l2CacheExpire(l2CacheExpireError) {
callback(l2CacheExpireError || l1CacheExpireError);
});
});
};
module.exports = LCache;