#if LZ4
addToLibrary({
$LZ4__deps: ['$FS', '$preloadPlugins', '$getUniqueRunDependency', '$addRunDependency', '$removeRunDependency'],
$LZ4: {
DIR_MODE: {{{ cDefs.S_IFDIR | 0o777 }}},
FILE_MODE: {{{ cDefs.S_IFREG | 0o777 }}},
CHUNK_SIZE: -1,
codec: null,
init() {
if (LZ4.codec) return;
LZ4.codec = (() => {
{{{ read('../third_party/mini-lz4.js') }}};
return MiniLZ4;
})();
LZ4.CHUNK_SIZE = LZ4.codec.CHUNK_SIZE;
},
loadPackage(pack, preloadPlugin) {
LZ4.init();
var compressedData = pack['compressedData'] || LZ4.codec.compressPackage(pack['data']);
assert(compressedData['cachedIndexes'].length === compressedData['cachedChunks'].length);
for (var i = 0; i < compressedData['cachedIndexes'].length; i++) {
compressedData['cachedIndexes'][i] = -1;
compressedData['cachedChunks'][i] = compressedData['data'].subarray(compressedData['cachedOffset'] + i*LZ4.CHUNK_SIZE,
compressedData['cachedOffset'] + (i+1)*LZ4.CHUNK_SIZE);
assert(compressedData['cachedChunks'][i].length === LZ4.CHUNK_SIZE);
}
for (var file of pack['metadata'].files) {
var dir = PATH.dirname(file.filename);
var name = PATH.basename(file.filename);
FS.createPath('', dir, true, true);
var parent = FS.analyzePath(dir).object;
LZ4.createNode(parent, name, LZ4.FILE_MODE, 0, {
compressedData,
start: file.start,
end: file.end,
});
}
if (preloadPlugin) {
Browser.init();
for (var file of pack['metadata'].files) {
var fullname = file.filename;
for (var plugin of preloadPlugins) {
if (plugin['canHandle'](fullname)) {
var dep = getUniqueRunDependency('fp ' + fullname);
addRunDependency(dep);
var finish = () => removeRunDependency(dep);
var byteArray = FS.readFile(fullname);
#if ASSERTIONS
assert(plugin['handle'].constructor.name === 'AsyncFunction', 'Filesystem plugin handlers must be async functions (See #24914)')
#endif
plugin['handle'](byteArray, fullname).then(finish).catch(finish);
break;
}
}
}
}
},
createNode(parent, name, mode, dev, contents, mtime) {
var node = FS.createNode(parent, name, mode);
node.mode = mode;
node.node_ops = LZ4.node_ops;
node.stream_ops = LZ4.stream_ops;
this.atime = this.mtime = this.ctime = (mtime || new Date).getTime();
assert(LZ4.FILE_MODE !== LZ4.DIR_MODE);
if (mode === LZ4.FILE_MODE) {
node.size = contents.end - contents.start;
node.contents = contents;
} else {
node.size = 4096;
node.contents = {};
}
if (parent) {
parent.contents[name] = node;
}
return node;
},
node_ops: {
getattr(node) {
return {
dev: 1,
ino: node.id,
mode: node.mode,
nlink: 1,
uid: 0,
gid: 0,
rdev: 0,
size: node.size,
atime: new Date(node.atime),
mtime: new Date(node.mtime),
ctime: new Date(node.ctime),
blksize: 4096,
blocks: Math.ceil(node.size / 4096),
};
},
setattr(node, attr) {
for (const key of ['mode', 'atime', 'mtime', 'ctime']) {
if (attr[key]) {
node[key] = attr[key];
}
}
},
lookup(parent, name) {
throw new FS.ErrnoError({{{ cDefs.ENOENT }}});
},
mknod(parent, name, mode, dev) {
throw new FS.ErrnoError({{{ cDefs.EPERM }}});
},
rename(oldNode, newDir, newName) {
throw new FS.ErrnoError({{{ cDefs.EPERM }}});
},
unlink(parent, name) {
throw new FS.ErrnoError({{{ cDefs.EPERM }}});
},
rmdir(parent, name) {
throw new FS.ErrnoError({{{ cDefs.EPERM }}});
},
readdir(node) {
throw new FS.ErrnoError({{{ cDefs.EPERM }}});
},
symlink(parent, newName, oldPath) {
throw new FS.ErrnoError({{{ cDefs.EPERM }}});
},
},
stream_ops: {
read(stream, buffer, offset, length, position) {
length = Math.min(length, stream.node.size - position);
if (length <= 0) return 0;
var contents = stream.node.contents;
var compressedData = contents.compressedData;
var written = 0;
while (written < length) {
var start = contents.start + position + written;
var desired = length - written;
var chunkIndex = Math.floor(start / LZ4.CHUNK_SIZE);
var compressedStart = compressedData['offsets'][chunkIndex];
var compressedSize = compressedData['sizes'][chunkIndex];
var currChunk;
if (compressedData['successes'][chunkIndex]) {
var found = compressedData['cachedIndexes'].indexOf(chunkIndex);
if (found >= 0) {
currChunk = compressedData['cachedChunks'][found];
} else {
compressedData['cachedIndexes'].pop();
compressedData['cachedIndexes'].unshift(chunkIndex);
currChunk = compressedData['cachedChunks'].pop();
compressedData['cachedChunks'].unshift(currChunk);
if (compressedData['debug']) {
out('decompressing chunk ' + chunkIndex);
Module['decompressedChunks'] = (Module['decompressedChunks'] || 0) + 1;
}
var compressed = compressedData['data'].subarray(compressedStart, compressedStart + compressedSize);
var originalSize = LZ4.codec.uncompress(compressed, currChunk);
if (chunkIndex < compressedData['successes'].length-1) assert(originalSize === LZ4.CHUNK_SIZE);
}
} else {
currChunk = compressedData['data'].subarray(compressedStart, compressedStart + LZ4.CHUNK_SIZE);
}
var startInChunk = start % LZ4.CHUNK_SIZE;
var endInChunk = Math.min(startInChunk + desired, LZ4.CHUNK_SIZE);
buffer.set(currChunk.subarray(startInChunk, endInChunk), offset + written);
var currWritten = endInChunk - startInChunk;
written += currWritten;
}
return written;
},
write(stream, buffer, offset, length, position) {
throw new FS.ErrnoError({{{ cDefs.EIO }}});
},
llseek(stream, offset, whence) {
var position = offset;
if (whence === {{{ cDefs.SEEK_CUR }}}) {
position += stream.position;
} else if (whence === {{{ cDefs.SEEK_END }}}) {
if (FS.isFile(stream.node.mode)) {
position += stream.node.size;
}
}
if (position < 0) {
throw new FS.ErrnoError({{{ cDefs.EINVAL }}});
}
return position;
},
},
},
});
if (LibraryManager.library['$FS__deps']) {
LibraryManager.library['$FS__deps'].push('$LZ4');
} else {
warn('FS does not seem to be in use (no preloaded files etc.), LZ4 will not do anything');
}
#endif