-
-
Save pilbot/9d0567ef1daf556449fb to your computer and use it in GitHub Desktop.
function ChunkyCache(cache, chunkSize){ | |
return { | |
put: function (key, value, timeout) { | |
var json = JSON.stringify(value); | |
var cSize = Math.floor(chunkSize / 2); | |
var chunks = []; | |
var index = 0; | |
while (index < json.length){ | |
cKey = key + "_" + index; | |
chunks.push(cKey); | |
cache.put(cKey, json.substr(index, cSize), timeout+5); | |
index += cSize; | |
} | |
var superBlk = { | |
chunkSize: chunkSize, | |
chunks: chunks, | |
length: json.length | |
}; | |
cache.put(key, JSON.stringify(superBlk), timeout); | |
}, | |
get: function (key) { | |
var superBlkCache = cache.get(key); | |
if (superBlkCache != null) { | |
var superBlk = JSON.parse(superBlkCache); | |
chunks = superBlk.chunks.map(function (cKey){ | |
return cache.get(cKey); | |
}); | |
if (chunks.every(function (c) { return c != null; })){ | |
return JSON.parse(chunks.join('')); | |
} | |
} | |
} | |
}; | |
}; | |
function testGetCacheFrom(){ | |
var sheet = SpreadsheetApp.getActiveSpreadsheet().getSheetByName('Data'); | |
var data = sheet.getDataRange().getValues(); | |
var chunky = ChunkyCache(CacheService.getDocumentCache(), 1024*90); | |
chunky.put('Data', data, 120); | |
var check = chunky.get('Data'); | |
var sheetPut = SpreadsheetApp.getActiveSpreadsheet().getSheetByName('Out'); | |
for (c in check) { | |
sheetPut.appendRow(check[c]); | |
} | |
} |
What is the maximum time, we can keep data in cache any why is there +5 with timeout.
Thanks for the code.
It helps me a lot in my project.
LIFE SAVER -- THANK YOU SO MUCH!!!
this is excellent, thank you. This should be included in google script cache service
Hi, still great code for use, and it's 5 years now.
The limit is still same, see the reference:
The maximum amount of data that can be stored per key is 100KB
I wondered if it works fine with larger "byte" size as your code gives the char size, and 1 char is not always 1 byte,
Here's the code for check:
var chunky = ChunkyCache(CacheService.getDocumentCache(), 1024*200);
var s = '🧞♂️'.repeat(1024 * 400); // 🧞♂️
chunky.put('Data', s, 120);
var check = chunky.get('Data');
// console.log(check);
// console.log(Utilities.newBlob(s).getBytes().length / 1024);
The final row of sample code ↑ checks real K-byte syze. Appeared, Google Quota checks NOT size of bytes, but simple text length. In this case your code is perfect.
Your code also devides chunk size by 2:
var cSize = Math.floor(chunkSize / 2);
It is really interesting because it gives axtra room for total size. Real size would be twice lower in this case. I though of the other limitation:
The cap for cached items is 1,000
It should be better to consider that number of parts (cached items) is limited. So there's another upper limit, and we better make a chunk bigger if we have to store more than 1,000 parts (chunks) of cahce data.
I wondered if it works fine with larger "byte" size as your code gives the char size, and 1 char is not always 1 byte,
This may depend on the specifics of the coding language. https://262.ecma-international.org/5.1/#sec-4.3.16
4.3.16 String value
primitive value that is a finite ordered sequence of zero or more 16-bit unsigned integer
NOTE A String value is a member of the String type. Each integer value in the sequence usually represents a single 16-bit unit of UTF-16 text. However, ECMAScript does not place any restrictions or requirements on the values except that they must be 16-bit unsigned integers.
It is really interesting because it gives axtra room for total size. Real size would be twice lower in this case.
Check the @ChrisBaker97's fork https://gist.github.com/ChrisBaker97/c19ff5ee1a43a1d68178c74f39962516
Hi, thank you very much for this piece of code, me too I wish GAS would allow bigger caching space:
I had to add a remove method for my project and linted the code, so I am sharing here in case someone needs it someday:
function ChunkyCache(cache, chunkSize) {
return {
put: function put(key, value, timeout) {
const json = JSON.stringify(value);
const cSize = Math.floor(chunkSize / 2);
const chunks = [];
let index = 0;
while (index < json.length) {
const cKey = `${key}_${index}`;
chunks.push(cKey);
cache.put(cKey, json.substr(index, cSize), timeout + 5);
index += cSize;
}
const superBlk = {
chunkSize,
chunks,
length: json.length,
};
cache.put(key, JSON.stringify(superBlk), timeout);
},
get: function get(key) {
const superBlkCache = cache.get(key);
if (superBlkCache != null) {
const superBlk = JSON.parse(superBlkCache);
const chunks = superBlk.chunks.map(cKey => {
cache.get(cKey)
});
if (chunks.every(c => c != null)) {
return JSON.parse(chunks.join(''));
}
}
return null;
},
remove: function remove(key) {
const superBlkCache = cache.get(key);
if (superBlkCache != null) {
const superBlk = JSON.parse(superBlkCache);
return cache.removeAll([...superBlk.chunks]);
}
return null;
}
};
}
@RealSlimMahdi , I have the same code =)
https://gist.github.com/contributorpw/afbd50ad22767c7b758ceda94c53e393
@RealSlimMahdi , I have the same code =)
https://gist.github.com/contributorpw/afbd50ad22767c7b758ceda94c53e393
Haha funny @contributorpw, your doc is better =]
@RealSlimMahdi , look at the remove method. They are identical =)
Yes, I have seen 🤝
The
ChunkyCache
allows storage of objects that are larger than 100Kb by splitting them on a configurable number of bytes. It usesJSON.stringify
to export the object so some types will be coalesced.The test function will copy all data in the spreadsheet
Data
to the spreadsheetOut
going through the cache.