mirror of https://github.com/gorhill/uBlock.git
Add support for cloud storage compression
Cloud storage is a limited resource, and thus it makes sense to support data compression before sending the data to cloud storage. A new hidden setting allows to toggle on cloud storage compression: name: cloudStorageCompression default: false By default, this hidden setting is `false`, and a user must set it to `true` to enable compression of cloud storage items. This hidden setting will eventually be toggled to `true` by default, when there is good confidence a majority of users are using a version of uBO which can properly handle compressed cloud storage items. A cursory assessment shows that compressed items are roughly 40-50% smaller in size.
This commit is contained in:
parent
de6a9e311f
commit
d8b6b31eca
|
@ -1540,10 +1540,10 @@ vAPI.cloud = (( ) => {
|
|||
// good thing given chrome.storage.sync.MAX_WRITE_OPERATIONS_PER_MINUTE
|
||||
// and chrome.storage.sync.MAX_WRITE_OPERATIONS_PER_HOUR.
|
||||
|
||||
const getCoarseChunkCount = async function(dataKey) {
|
||||
const getCoarseChunkCount = async function(datakey) {
|
||||
const keys = {};
|
||||
for ( let i = 0; i < maxChunkCountPerItem; i += 16 ) {
|
||||
keys[dataKey + i.toString()] = '';
|
||||
keys[datakey + i.toString()] = '';
|
||||
}
|
||||
let bin;
|
||||
try {
|
||||
|
@ -1553,13 +1553,13 @@ vAPI.cloud = (( ) => {
|
|||
}
|
||||
let chunkCount = 0;
|
||||
for ( let i = 0; i < maxChunkCountPerItem; i += 16 ) {
|
||||
if ( bin[dataKey + i.toString()] === '' ) { break; }
|
||||
if ( bin[datakey + i.toString()] === '' ) { break; }
|
||||
chunkCount = i + 16;
|
||||
}
|
||||
return chunkCount;
|
||||
};
|
||||
|
||||
const deleteChunks = function(dataKey, start) {
|
||||
const deleteChunks = function(datakey, start) {
|
||||
const keys = [];
|
||||
|
||||
// No point in deleting more than:
|
||||
|
@ -1570,34 +1570,37 @@ vAPI.cloud = (( ) => {
|
|||
Math.ceil(maxStorageSize / maxChunkSize)
|
||||
);
|
||||
for ( let i = start; i < n; i++ ) {
|
||||
keys.push(dataKey + i.toString());
|
||||
keys.push(datakey + i.toString());
|
||||
}
|
||||
if ( keys.length !== 0 ) {
|
||||
webext.storage.sync.remove(keys);
|
||||
}
|
||||
};
|
||||
|
||||
const push = async function(dataKey, data) {
|
||||
let bin = {
|
||||
'source': options.deviceName || options.defaultDeviceName,
|
||||
'tstamp': Date.now(),
|
||||
'data': data,
|
||||
'size': 0
|
||||
const push = async function(details) {
|
||||
const { datakey, data, encode } = details;
|
||||
const item = {
|
||||
source: options.deviceName || options.defaultDeviceName,
|
||||
tstamp: Date.now(),
|
||||
data,
|
||||
};
|
||||
bin.size = JSON.stringify(bin).length;
|
||||
const item = JSON.stringify(bin);
|
||||
const json = JSON.stringify(item);
|
||||
const encoded = encode instanceof Function
|
||||
? await encode(json)
|
||||
: json;
|
||||
|
||||
// Chunkify taking into account QUOTA_BYTES_PER_ITEM:
|
||||
// https://developer.chrome.com/extensions/storage#property-sync
|
||||
// "The maximum size (in bytes) of each individual item in sync
|
||||
// "storage, as measured by the JSON stringification of its value
|
||||
// "plus its key length."
|
||||
bin = {};
|
||||
let chunkCount = Math.ceil(item.length / maxChunkSize);
|
||||
const bin = {};
|
||||
const chunkCount = Math.ceil(encoded.length / maxChunkSize);
|
||||
for ( let i = 0; i < chunkCount; i++ ) {
|
||||
bin[dataKey + i.toString()] = item.substr(i * maxChunkSize, maxChunkSize);
|
||||
bin[datakey + i.toString()]
|
||||
= encoded.substr(i * maxChunkSize, maxChunkSize);
|
||||
}
|
||||
bin[dataKey + chunkCount.toString()] = ''; // Sentinel
|
||||
bin[datakey + chunkCount.toString()] = ''; // Sentinel
|
||||
|
||||
try {
|
||||
await webext.storage.sync.set(bin);
|
||||
|
@ -1606,18 +1609,19 @@ vAPI.cloud = (( ) => {
|
|||
}
|
||||
|
||||
// Remove potentially unused trailing chunks
|
||||
deleteChunks(dataKey, chunkCount);
|
||||
deleteChunks(datakey, chunkCount);
|
||||
};
|
||||
|
||||
const pull = async function(dataKey) {
|
||||
const pull = async function(details) {
|
||||
const { datakey, decode } = details;
|
||||
|
||||
const result = await getCoarseChunkCount(dataKey);
|
||||
const result = await getCoarseChunkCount(datakey);
|
||||
if ( typeof result !== 'number' ) {
|
||||
return result;
|
||||
}
|
||||
const chunkKeys = {};
|
||||
for ( let i = 0; i < result; i++ ) {
|
||||
chunkKeys[dataKey + i.toString()] = '';
|
||||
chunkKeys[datakey + i.toString()] = '';
|
||||
}
|
||||
|
||||
let bin;
|
||||
|
@ -1633,31 +1637,35 @@ vAPI.cloud = (( ) => {
|
|||
// happen when the number of chunks is a multiple of
|
||||
// chunkCountPerFetch. Hence why we must also test against
|
||||
// undefined.
|
||||
let json = [], jsonSlice;
|
||||
let encoded = [];
|
||||
let i = 0;
|
||||
for (;;) {
|
||||
jsonSlice = bin[dataKey + i.toString()];
|
||||
if ( jsonSlice === '' || jsonSlice === undefined ) { break; }
|
||||
json.push(jsonSlice);
|
||||
const slice = bin[datakey + i.toString()];
|
||||
if ( slice === '' || slice === undefined ) { break; }
|
||||
encoded.push(slice);
|
||||
i += 1;
|
||||
}
|
||||
encoded = encoded.join('');
|
||||
const json = decode instanceof Function
|
||||
? await decode(encoded)
|
||||
: encoded;
|
||||
let entry = null;
|
||||
try {
|
||||
entry = JSON.parse(json.join(''));
|
||||
entry = JSON.parse(json);
|
||||
} catch(ex) {
|
||||
}
|
||||
return entry;
|
||||
};
|
||||
|
||||
const used = async function(dataKey) {
|
||||
const used = async function(datakey) {
|
||||
if ( webext.storage.sync.getBytesInUse instanceof Function === false ) {
|
||||
return;
|
||||
}
|
||||
const coarseCount = await getCoarseChunkCount(dataKey);
|
||||
const coarseCount = await getCoarseChunkCount(datakey);
|
||||
if ( typeof coarseCount !== 'number' ) { return; }
|
||||
const keys = [];
|
||||
for ( let i = 0; i < coarseCount; i++ ) {
|
||||
keys.push(`${dataKey}${i}`);
|
||||
keys.push(`${datakey}${i}`);
|
||||
}
|
||||
let results;
|
||||
try {
|
||||
|
|
|
@ -47,6 +47,7 @@ const µBlock = (( ) => { // jshint ignore:line
|
|||
cacheStorageAPI: 'unset',
|
||||
cacheStorageCompression: true,
|
||||
cacheControlForFirefox1376932: 'no-cache, no-store, must-revalidate',
|
||||
cloudStorageCompression: false,
|
||||
cnameIgnoreList: 'unset',
|
||||
cnameIgnore1stParty: true,
|
||||
cnameIgnoreExceptions: true,
|
||||
|
|
|
@ -195,22 +195,48 @@
|
|||
return dbPromise;
|
||||
};
|
||||
|
||||
const fromBlob = function(data) {
|
||||
if ( data instanceof Blob === false ) {
|
||||
return Promise.resolve(data);
|
||||
}
|
||||
return new Promise(resolve => {
|
||||
const blobReader = new FileReader();
|
||||
blobReader.onloadend = ev => {
|
||||
resolve(new Uint8Array(ev.target.result));
|
||||
};
|
||||
blobReader.readAsArrayBuffer(data);
|
||||
});
|
||||
};
|
||||
|
||||
const toBlob = function(data) {
|
||||
const value = data instanceof Uint8Array
|
||||
? new Blob([ data ])
|
||||
: data;
|
||||
return Promise.resolve(value);
|
||||
};
|
||||
|
||||
const compress = function(store, key, data) {
|
||||
return µBlock.lz4Codec.encode(data, toBlob).then(value => {
|
||||
store.push({ key, value });
|
||||
});
|
||||
};
|
||||
|
||||
const decompress = function(store, key, data) {
|
||||
return µBlock.lz4Codec.decode(data, fromBlob).then(data => {
|
||||
store[key] = data;
|
||||
});
|
||||
};
|
||||
|
||||
const getFromDb = async function(keys, keyvalStore, callback) {
|
||||
if ( typeof callback !== 'function' ) { return; }
|
||||
if ( keys.length === 0 ) { return callback(keyvalStore); }
|
||||
const promises = [];
|
||||
const gotOne = function() {
|
||||
if ( typeof this.result !== 'object' ) { return; }
|
||||
keyvalStore[this.result.key] = this.result.value;
|
||||
if ( this.result.value instanceof Blob === false ) { return; }
|
||||
promises.push(
|
||||
µBlock.lz4Codec.decode(
|
||||
this.result.key,
|
||||
this.result.value
|
||||
).then(result => {
|
||||
keyvalStore[result.key] = result.data;
|
||||
})
|
||||
);
|
||||
const { key, value } = this.result;
|
||||
keyvalStore[key] = value;
|
||||
if ( value instanceof Blob === false ) { return; }
|
||||
promises.push(decompress(keyvalStore, key, value));
|
||||
};
|
||||
try {
|
||||
const db = await getDb();
|
||||
|
@ -265,16 +291,10 @@
|
|||
});
|
||||
return;
|
||||
}
|
||||
keyvalStore[entry.key] = entry.value;
|
||||
const { key, value } = entry;
|
||||
keyvalStore[key] = value;
|
||||
if ( entry.value instanceof Blob === false ) { return; }
|
||||
promises.push(
|
||||
µBlock.lz4Codec.decode(
|
||||
entry.key,
|
||||
entry.value
|
||||
).then(result => {
|
||||
keyvalStore[result.key] = result.value;
|
||||
})
|
||||
);
|
||||
promises.push(decompress(keyvalStore, key, value));
|
||||
}).catch(reason => {
|
||||
console.info(`cacheStorage.getAllFromDb() failed: ${reason}`);
|
||||
callback();
|
||||
|
@ -297,19 +317,14 @@
|
|||
const entries = [];
|
||||
const dontCompress =
|
||||
µBlock.hiddenSettings.cacheStorageCompression !== true;
|
||||
const handleEncodingResult = result => {
|
||||
entries.push({ key: result.key, value: result.data });
|
||||
};
|
||||
for ( const key of keys ) {
|
||||
const data = keyvalStore[key];
|
||||
const isString = typeof data === 'string';
|
||||
const value = keyvalStore[key];
|
||||
const isString = typeof value === 'string';
|
||||
if ( isString === false || dontCompress ) {
|
||||
entries.push({ key, value: data });
|
||||
entries.push({ key, value });
|
||||
continue;
|
||||
}
|
||||
promises.push(
|
||||
µBlock.lz4Codec.encode(key, data).then(handleEncodingResult)
|
||||
);
|
||||
promises.push(compress(entries, key, value));
|
||||
}
|
||||
const finish = ( ) => {
|
||||
if ( callback === undefined ) { return; }
|
||||
|
|
|
@ -48,7 +48,7 @@ if ( self.cloud.datakey === '' ) { return; }
|
|||
/******************************************************************************/
|
||||
|
||||
const fetchStorageUsed = async function() {
|
||||
const elem = widget.querySelector('#cloudCapacity');
|
||||
let elem = widget.querySelector('#cloudCapacity');
|
||||
if ( elem.classList.contains('hide') ) { return; }
|
||||
const result = await vAPI.messaging.send('cloudWidget', {
|
||||
what: 'cloudUsed',
|
||||
|
@ -58,10 +58,16 @@ const fetchStorageUsed = async function() {
|
|||
elem.classList.add('hide');
|
||||
return;
|
||||
}
|
||||
const units = ' ' + vAPI.i18n('genericBytes');
|
||||
elem.title = result.max.toLocaleString() + units;
|
||||
const total = (result.total / result.max * 100).toFixed(1);
|
||||
elem.firstElementChild.style.width = `${total}%`;
|
||||
elem = elem.firstElementChild;
|
||||
elem.style.width = `${total}%`;
|
||||
elem.title = result.total.toLocaleString() + units;
|
||||
const used = (result.used / result.total * 100).toFixed(1);
|
||||
elem.firstElementChild.firstElementChild.style.width = `${used}%`;
|
||||
elem = elem.firstElementChild;
|
||||
elem.style.width = `${used}%`;
|
||||
elem.title = result.used.toLocaleString() + units;
|
||||
};
|
||||
|
||||
/******************************************************************************/
|
||||
|
|
|
@ -32,7 +32,8 @@
|
|||
|
||||
**/
|
||||
|
||||
µBlock.lz4Codec = (function() { // >>>> Start of private namespace
|
||||
{
|
||||
// >>>> Start of private namespace
|
||||
|
||||
/******************************************************************************/
|
||||
|
||||
|
@ -96,28 +97,15 @@ const ttlManage = function(count) {
|
|||
ttlTimer = vAPI.setTimeout(destroy, ttlDelay);
|
||||
};
|
||||
|
||||
const uint8ArrayFromBlob = function(key, data) {
|
||||
if ( data instanceof Blob === false ) {
|
||||
return Promise.resolve({ key, data });
|
||||
}
|
||||
return new Promise(resolve => {
|
||||
let blobReader = new FileReader();
|
||||
blobReader.onloadend = ev => {
|
||||
resolve({ key, data: new Uint8Array(ev.target.result) });
|
||||
};
|
||||
blobReader.readAsArrayBuffer(data);
|
||||
});
|
||||
};
|
||||
|
||||
const encodeValue = function(key, value) {
|
||||
const encodeValue = function(dataIn) {
|
||||
if ( !lz4CodecInstance ) { return; }
|
||||
//let t0 = window.performance.now();
|
||||
if ( textEncoder === undefined ) {
|
||||
textEncoder = new TextEncoder();
|
||||
}
|
||||
let inputArray = textEncoder.encode(value);
|
||||
let inputSize = inputArray.byteLength;
|
||||
let outputArray = lz4CodecInstance.encodeBlock(inputArray, 8);
|
||||
const inputArray = textEncoder.encode(dataIn);
|
||||
const inputSize = inputArray.byteLength;
|
||||
const outputArray = lz4CodecInstance.encodeBlock(inputArray, 8);
|
||||
if ( outputArray instanceof Uint8Array === false ) { return; }
|
||||
outputArray[0] = 0x18;
|
||||
outputArray[1] = 0x4D;
|
||||
|
@ -129,7 +117,6 @@ const encodeValue = function(key, value) {
|
|||
outputArray[7] = (inputSize >>> 24) & 0xFF;
|
||||
//console.info(
|
||||
// 'uBO: [%s] compressed %d KB => %d KB (%s%%) in %s ms',
|
||||
// key,
|
||||
// inputArray.byteLength >> 10,
|
||||
// outputArray.byteLength >> 10,
|
||||
// (outputArray.byteLength / inputArray.byteLength * 100).toFixed(0),
|
||||
|
@ -138,66 +125,71 @@ const encodeValue = function(key, value) {
|
|||
return outputArray;
|
||||
};
|
||||
|
||||
const decodeValue = function(key, inputArray) {
|
||||
const decodeValue = function(inputArray) {
|
||||
if ( !lz4CodecInstance ) { return; }
|
||||
//let t0 = window.performance.now();
|
||||
if (
|
||||
inputArray[0] !== 0x18 || inputArray[1] !== 0x4D ||
|
||||
inputArray[2] !== 0x22 || inputArray[3] !== 0x04
|
||||
) {
|
||||
console.error('decodeValue: invalid input array');
|
||||
return;
|
||||
}
|
||||
let outputSize =
|
||||
const outputSize =
|
||||
(inputArray[4] << 0) | (inputArray[5] << 8) |
|
||||
(inputArray[6] << 16) | (inputArray[7] << 24);
|
||||
let outputArray = lz4CodecInstance.decodeBlock(inputArray, 8, outputSize);
|
||||
const outputArray = lz4CodecInstance.decodeBlock(inputArray, 8, outputSize);
|
||||
if ( outputArray instanceof Uint8Array === false ) { return; }
|
||||
if ( textDecoder === undefined ) {
|
||||
textDecoder = new TextDecoder();
|
||||
}
|
||||
let value = textDecoder.decode(outputArray);
|
||||
const s = textDecoder.decode(outputArray);
|
||||
//console.info(
|
||||
// 'uBO: [%s] decompressed %d KB => %d KB (%s%%) in %s ms',
|
||||
// key,
|
||||
// inputArray.byteLength >>> 10,
|
||||
// outputSize >>> 10,
|
||||
// (inputArray.byteLength / outputSize * 100).toFixed(0),
|
||||
// (window.performance.now() - t0).toFixed(1)
|
||||
//);
|
||||
return value;
|
||||
return s;
|
||||
};
|
||||
|
||||
return {
|
||||
encode: function(key, dataIn) {
|
||||
µBlock.lz4Codec = {
|
||||
// Arguments:
|
||||
// dataIn: must be a string
|
||||
// Returns:
|
||||
// A Uint8Array, or the input string as is if compression is not
|
||||
// possible.
|
||||
encode: async function(dataIn, serialize = undefined) {
|
||||
if ( typeof dataIn !== 'string' || dataIn.length < 4096 ) {
|
||||
return Promise.resolve({ key, data: dataIn });
|
||||
return dataIn;
|
||||
}
|
||||
ttlManage(1);
|
||||
return init().then(( ) => {
|
||||
ttlManage(-1);
|
||||
let dataOut = encodeValue(key, dataIn) || dataIn;
|
||||
if ( dataOut instanceof Uint8Array ) {
|
||||
dataOut = new Blob([ dataOut ]);
|
||||
}
|
||||
return { key, data: dataOut || dataIn };
|
||||
});
|
||||
await init();
|
||||
let dataOut = encodeValue(dataIn);
|
||||
ttlManage(-1);
|
||||
if ( serialize instanceof Function ) {
|
||||
dataOut = await serialize(dataOut);
|
||||
}
|
||||
return dataOut || dataIn;
|
||||
},
|
||||
decode: function(key, dataIn) {
|
||||
if ( dataIn instanceof Blob === false ) {
|
||||
return Promise.resolve({ key, data: dataIn });
|
||||
// Arguments:
|
||||
// dataIn: must be a Uint8Array
|
||||
// Returns:
|
||||
// A string, or the input argument as is if decompression is not
|
||||
// possible.
|
||||
decode: async function(dataIn, deserialize = undefined) {
|
||||
if ( deserialize instanceof Function ) {
|
||||
dataIn = await deserialize(dataIn);
|
||||
}
|
||||
if ( dataIn instanceof Uint8Array === false ) {
|
||||
return dataIn;
|
||||
}
|
||||
ttlManage(1);
|
||||
return Promise.all([
|
||||
init(),
|
||||
uint8ArrayFromBlob(key, dataIn)
|
||||
]).then(results => {
|
||||
ttlManage(-1);
|
||||
let result = results[1];
|
||||
return {
|
||||
key: result.key,
|
||||
data: decodeValue(result.key, result.data) || result.data
|
||||
};
|
||||
});
|
||||
await init();
|
||||
const dataOut = decodeValue(dataIn);
|
||||
ttlManage(-1);
|
||||
return dataOut || dataIn;
|
||||
},
|
||||
relinquish: function() {
|
||||
ttlDelay = 1;
|
||||
|
@ -207,4 +199,5 @@ return {
|
|||
|
||||
/******************************************************************************/
|
||||
|
||||
})(); // <<<< End of private namespace
|
||||
// <<<< End of private namespace
|
||||
}
|
||||
|
|
|
@ -798,6 +798,33 @@ vAPI.messaging.listen({
|
|||
{
|
||||
// >>>>> start of local scope
|
||||
|
||||
const fromBase64 = function(encoded) {
|
||||
if ( typeof encoded !== 'string' ) {
|
||||
return Promise.resolve(encoded);
|
||||
}
|
||||
let u8array;
|
||||
try {
|
||||
u8array = µBlock.denseBase64.decode(encoded);
|
||||
} catch(ex) {
|
||||
}
|
||||
return Promise.resolve(u8array !== undefined ? u8array : encoded);
|
||||
};
|
||||
|
||||
const toBase64 = function(data) {
|
||||
const value = data instanceof Uint8Array
|
||||
? µBlock.denseBase64.encode(data)
|
||||
: data;
|
||||
return Promise.resolve(value);
|
||||
};
|
||||
|
||||
const compress = function(json) {
|
||||
return µBlock.lz4Codec.encode(json, toBase64);
|
||||
};
|
||||
|
||||
const decompress = function(encoded) {
|
||||
return µBlock.lz4Codec.decode(encoded, fromBase64);
|
||||
};
|
||||
|
||||
const onMessage = function(request, sender, callback) {
|
||||
// Cloud storage support is optional.
|
||||
if ( µBlock.cloudStorageSupported !== true ) {
|
||||
|
@ -819,12 +846,16 @@ const onMessage = function(request, sender, callback) {
|
|||
return;
|
||||
|
||||
case 'cloudPull':
|
||||
return vAPI.cloud.pull(request.datakey).then(result => {
|
||||
request.decode = decompress;
|
||||
return vAPI.cloud.pull(request).then(result => {
|
||||
callback(result);
|
||||
});
|
||||
|
||||
case 'cloudPush':
|
||||
return vAPI.cloud.push(request.datakey, request.data).then(result => {
|
||||
if ( µBlock.hiddenSettings.cloudStorageCompression ) {
|
||||
request.encode = compress;
|
||||
}
|
||||
return vAPI.cloud.push(request).then(result => {
|
||||
callback(result);
|
||||
});
|
||||
|
||||
|
|
287
src/js/utils.js
287
src/js/utils.js
|
@ -530,118 +530,225 @@
|
|||
|
||||
/******************************************************************************/
|
||||
|
||||
// Custom base64 encoder/decoder
|
||||
//
|
||||
// TODO:
|
||||
// Could expand the LZ4 codec API to be able to return UTF8-safe string
|
||||
// representation of a compressed buffer, and thus the code below could be
|
||||
// moved LZ4 codec-side.
|
||||
// Custom base64 codecs. These codecs are meant to encode/decode typed arrays
|
||||
// to/from strings.
|
||||
|
||||
// https://github.com/uBlockOrigin/uBlock-issues/issues/461
|
||||
// Provide a fallback encoding for Chromium 59 and less by issuing a plain
|
||||
// JSON string. The fallback can be removed once min supported version is
|
||||
// above 59.
|
||||
|
||||
µBlock.base64 = new (class {
|
||||
constructor() {
|
||||
this.valToDigit = new Uint8Array(64);
|
||||
this.digitToVal = new Uint8Array(128);
|
||||
const chars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz@%";
|
||||
// TODO: rename µBlock.base64 to µBlock.SparseBase64, now that
|
||||
// µBlock.DenseBase64 has been introduced.
|
||||
// TODO: Should no longer need to test presence of TextEncoder/TextDecoder.
|
||||
|
||||
{
|
||||
const valToDigit = new Uint8Array(64);
|
||||
const digitToVal = new Uint8Array(128);
|
||||
{
|
||||
const chars = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz@%';
|
||||
for ( let i = 0, n = chars.length; i < n; i++ ) {
|
||||
const c = chars.charCodeAt(i);
|
||||
this.valToDigit[i] = c;
|
||||
this.digitToVal[c] = i;
|
||||
valToDigit[i] = c;
|
||||
digitToVal[c] = i;
|
||||
}
|
||||
this.magic = 'Base64_1';
|
||||
}
|
||||
|
||||
encode(arrbuf, arrlen) {
|
||||
const inputLength = (arrlen + 3) >>> 2;
|
||||
const inbuf = new Uint32Array(arrbuf, 0, inputLength);
|
||||
const outputLength = this.magic.length + 7 + inputLength * 7;
|
||||
const outbuf = new Uint8Array(outputLength);
|
||||
// magic bytes
|
||||
let j = 0;
|
||||
for ( let i = 0; i < this.magic.length; i++ ) {
|
||||
outbuf[j++] = this.magic.charCodeAt(i);
|
||||
}
|
||||
// array size
|
||||
let v = inputLength;
|
||||
do {
|
||||
outbuf[j++] = this.valToDigit[v & 0b111111];
|
||||
v >>>= 6;
|
||||
} while ( v !== 0 );
|
||||
outbuf[j++] = 0x20 /* ' ' */;
|
||||
// array content
|
||||
for ( let i = 0; i < inputLength; i++ ) {
|
||||
v = inbuf[i];
|
||||
// The sparse base64 codec is best for buffers which contains a lot of
|
||||
// small u32 integer values. Those small u32 integer values are better
|
||||
// represented with stringified integers, because small values can be
|
||||
// represented with fewer bits than the usual base64 codec. For example,
|
||||
// 0 become '0 ', i.e. 16 bits instead of 48 bits with official base64
|
||||
// codec.
|
||||
|
||||
µBlock.base64 = {
|
||||
magic: 'Base64_1',
|
||||
|
||||
encode: function(arrbuf, arrlen) {
|
||||
const inputLength = (arrlen + 3) >>> 2;
|
||||
const inbuf = new Uint32Array(arrbuf, 0, inputLength);
|
||||
const outputLength = this.magic.length + 7 + inputLength * 7;
|
||||
const outbuf = new Uint8Array(outputLength);
|
||||
// magic bytes
|
||||
let j = 0;
|
||||
for ( let i = 0; i < this.magic.length; i++ ) {
|
||||
outbuf[j++] = this.magic.charCodeAt(i);
|
||||
}
|
||||
// array size
|
||||
let v = inputLength;
|
||||
do {
|
||||
outbuf[j++] = this.valToDigit[v & 0b111111];
|
||||
outbuf[j++] = valToDigit[v & 0b111111];
|
||||
v >>>= 6;
|
||||
} while ( v !== 0 );
|
||||
outbuf[j++] = 0x20 /* ' ' */;
|
||||
}
|
||||
if ( typeof TextDecoder === 'undefined' ) {
|
||||
return JSON.stringify(
|
||||
Array.from(new Uint32Array(outbuf.buffer, 0, j >>> 2))
|
||||
);
|
||||
}
|
||||
const textDecoder = new TextDecoder();
|
||||
return textDecoder.decode(new Uint8Array(outbuf.buffer, 0, j));
|
||||
}
|
||||
|
||||
decode(instr, arrbuf) {
|
||||
if ( instr.charCodeAt(0) === 0x5B /* '[' */ ) {
|
||||
const inbuf = JSON.parse(instr);
|
||||
if ( arrbuf instanceof ArrayBuffer === false ) {
|
||||
return new Uint32Array(inbuf);
|
||||
// array content
|
||||
for ( let i = 0; i < inputLength; i++ ) {
|
||||
v = inbuf[i];
|
||||
do {
|
||||
outbuf[j++] = valToDigit[v & 0b111111];
|
||||
v >>>= 6;
|
||||
} while ( v !== 0 );
|
||||
outbuf[j++] = 0x20 /* ' ' */;
|
||||
}
|
||||
if ( typeof TextDecoder === 'undefined' ) {
|
||||
return JSON.stringify(
|
||||
Array.from(new Uint32Array(outbuf.buffer, 0, j >>> 2))
|
||||
);
|
||||
}
|
||||
const textDecoder = new TextDecoder();
|
||||
return textDecoder.decode(new Uint8Array(outbuf.buffer, 0, j));
|
||||
},
|
||||
|
||||
decode: function(instr, arrbuf) {
|
||||
if ( instr.charCodeAt(0) === 0x5B /* '[' */ ) {
|
||||
const inbuf = JSON.parse(instr);
|
||||
if ( arrbuf instanceof ArrayBuffer === false ) {
|
||||
return new Uint32Array(inbuf);
|
||||
}
|
||||
const outbuf = new Uint32Array(arrbuf);
|
||||
outbuf.set(inbuf);
|
||||
return outbuf;
|
||||
}
|
||||
if ( instr.startsWith(this.magic) === false ) {
|
||||
throw new Error('Invalid µBlock.base64 encoding');
|
||||
}
|
||||
const inputLength = instr.length;
|
||||
const outputLength = this.decodeSize(instr) >> 2;
|
||||
const outbuf = arrbuf instanceof ArrayBuffer === false
|
||||
? new Uint32Array(outputLength)
|
||||
: new Uint32Array(arrbuf);
|
||||
let i = instr.indexOf(' ', this.magic.length) + 1;
|
||||
if ( i === -1 ) {
|
||||
throw new Error('Invalid µBlock.base64 encoding');
|
||||
}
|
||||
// array content
|
||||
let j = 0;
|
||||
for (;;) {
|
||||
if ( j === outputLength || i >= inputLength ) { break; }
|
||||
let v = 0, l = 0;
|
||||
for (;;) {
|
||||
const c = instr.charCodeAt(i++);
|
||||
if ( c === 0x20 /* ' ' */ ) { break; }
|
||||
v += digitToVal[c] << l;
|
||||
l += 6;
|
||||
}
|
||||
outbuf[j++] = v;
|
||||
}
|
||||
if ( i < inputLength || j < outputLength ) {
|
||||
throw new Error('Invalid µBlock.base64 encoding');
|
||||
}
|
||||
const outbuf = new Uint32Array(arrbuf);
|
||||
outbuf.set(inbuf);
|
||||
return outbuf;
|
||||
}
|
||||
if ( instr.startsWith(this.magic) === false ) {
|
||||
throw new Error('Invalid µBlock.base64 encoding');
|
||||
}
|
||||
const inputLength = instr.length;
|
||||
const outputLength = this.decodeSize(instr) >> 2;
|
||||
const outbuf = arrbuf instanceof ArrayBuffer === false
|
||||
? new Uint32Array(outputLength)
|
||||
: new Uint32Array(arrbuf);
|
||||
let i = instr.indexOf(' ', this.magic.length) + 1;
|
||||
if ( i === -1 ) {
|
||||
throw new Error('Invalid µBlock.base64 encoding');
|
||||
}
|
||||
// array content
|
||||
let j = 0;
|
||||
for (;;) {
|
||||
if ( j === outputLength || i >= inputLength ) { break; }
|
||||
let v = 0, l = 0;
|
||||
},
|
||||
|
||||
decodeSize: function(instr) {
|
||||
if ( instr.startsWith(this.magic) === false ) { return 0; }
|
||||
let v = 0, l = 0, i = this.magic.length;
|
||||
for (;;) {
|
||||
const c = instr.charCodeAt(i++);
|
||||
if ( c === 0x20 /* ' ' */ ) { break; }
|
||||
v += this.digitToVal[c] << l;
|
||||
v += digitToVal[c] << l;
|
||||
l += 6;
|
||||
}
|
||||
outbuf[j++] = v;
|
||||
}
|
||||
if ( i < inputLength || j < outputLength ) {
|
||||
throw new Error('Invalid µBlock.base64 encoding');
|
||||
}
|
||||
return outbuf;
|
||||
}
|
||||
return v << 2;
|
||||
},
|
||||
};
|
||||
|
||||
decodeSize(instr) {
|
||||
if ( instr.startsWith(this.magic) === false ) { return 0; }
|
||||
let v = 0, l = 0, i = this.magic.length;
|
||||
for (;;) {
|
||||
const c = instr.charCodeAt(i++);
|
||||
if ( c === 0x20 /* ' ' */ ) { break; }
|
||||
v += this.digitToVal[c] << l;
|
||||
l += 6;
|
||||
}
|
||||
return v << 2;
|
||||
}
|
||||
})();
|
||||
// The dense base64 codec is best for typed buffers which values are
|
||||
// more random. For example, buffer contents as a result of compression
|
||||
// contain less repetitive values and thus the content is more
|
||||
// random-looking.
|
||||
|
||||
// TODO: Investigate that in Firefox, creating a new Uint8Array from the
|
||||
// ArrayBuffer fails, the content of the resulting Uint8Array is
|
||||
// non-sensical. WASM-related?
|
||||
|
||||
µBlock.denseBase64 = {
|
||||
magic: 'DenseBase64_1',
|
||||
|
||||
encode: function(input) {
|
||||
const m = input.length % 3;
|
||||
const n = input.length - m;
|
||||
let outputLength = n / 3 * 4;
|
||||
if ( m !== 0 ) {
|
||||
outputLength += m + 1;
|
||||
}
|
||||
const output = new Uint8Array(outputLength);
|
||||
let j = 0;
|
||||
for ( let i = 0; i < n; i += 3) {
|
||||
const i1 = input[i+0];
|
||||
const i2 = input[i+1];
|
||||
const i3 = input[i+2];
|
||||
output[j+0] = valToDigit[ i1 >>> 2];
|
||||
output[j+1] = valToDigit[i1 << 4 & 0b110000 | i2 >>> 4];
|
||||
output[j+2] = valToDigit[i2 << 2 & 0b111100 | i3 >>> 6];
|
||||
output[j+3] = valToDigit[i3 & 0b111111 ];
|
||||
j += 4;
|
||||
}
|
||||
if ( m !== 0 ) {
|
||||
const i1 = input[n];
|
||||
output[j+0] = valToDigit[i1 >>> 2];
|
||||
if ( m === 1 ) { // 1 value
|
||||
output[j+1] = valToDigit[i1 << 4 & 0b110000];
|
||||
} else { // 2 values
|
||||
const i2 = input[n+1];
|
||||
output[j+1] = valToDigit[i1 << 4 & 0b110000 | i2 >>> 4];
|
||||
output[j+2] = valToDigit[i2 << 2 & 0b111100 ];
|
||||
}
|
||||
}
|
||||
const textDecoder = new TextDecoder();
|
||||
const b64str = textDecoder.decode(output);
|
||||
return this.magic + b64str;
|
||||
},
|
||||
|
||||
decode: function(instr, arrbuf) {
|
||||
if ( instr.startsWith(this.magic) === false ) {
|
||||
throw new Error('Invalid µBlock.denseBase64 encoding');
|
||||
}
|
||||
const outputLength = this.decodeSize(instr);
|
||||
const outbuf = arrbuf instanceof ArrayBuffer === false
|
||||
? new Uint8Array(outputLength)
|
||||
: new Uint8Array(arrbuf);
|
||||
const inputLength = instr.length - this.magic.length;
|
||||
let i = this.magic.length;
|
||||
let j = 0;
|
||||
const m = inputLength & 3;
|
||||
const n = i + inputLength - m;
|
||||
while ( i < n ) {
|
||||
const i1 = digitToVal[instr.charCodeAt(i+0)];
|
||||
const i2 = digitToVal[instr.charCodeAt(i+1)];
|
||||
const i3 = digitToVal[instr.charCodeAt(i+2)];
|
||||
const i4 = digitToVal[instr.charCodeAt(i+3)];
|
||||
i += 4;
|
||||
outbuf[j+0] = i1 << 2 | i2 >>> 4;
|
||||
outbuf[j+1] = i2 << 4 & 0b11110000 | i3 >>> 2;
|
||||
outbuf[j+2] = i3 << 6 & 0b11000000 | i4;
|
||||
j += 3;
|
||||
}
|
||||
if ( m !== 0 ) {
|
||||
const i1 = digitToVal[instr.charCodeAt(i+0)];
|
||||
const i2 = digitToVal[instr.charCodeAt(i+1)];
|
||||
outbuf[j+0] = i1 << 2 | i2 >>> 4;
|
||||
if ( m === 3 ) {
|
||||
const i3 = digitToVal[instr.charCodeAt(i+2)];
|
||||
outbuf[j+1] = i2 << 4 & 0b11110000 | i3 >>> 2;
|
||||
}
|
||||
}
|
||||
return outbuf;
|
||||
},
|
||||
|
||||
decodeSize: function(instr) {
|
||||
if ( instr.startsWith(this.magic) === false ) { return 0; }
|
||||
const inputLength = instr.length - this.magic.length;
|
||||
const m = inputLength & 3;
|
||||
const n = inputLength - m;
|
||||
let outputLength = (n >>> 2) * 3;
|
||||
if ( m !== 0 ) {
|
||||
outputLength += m - 1;
|
||||
}
|
||||
return outputLength;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/******************************************************************************/
|
||||
|
||||
|
|
Loading…
Reference in New Issue