Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
emscripten-core
GitHub Repository: emscripten-core/emscripten
Path: blob/main/src/lib/liblz4.js
4150 views
1
/**
2
* @license
3
* Copyright 2015 The Emscripten Authors
4
* SPDX-License-Identifier: MIT
5
*/
6
7
#if LZ4
8
addToLibrary({
9
$LZ4__deps: ['$FS', '$preloadPlugins', '$getUniqueRunDependency', '$addRunDependency', '$removeRunDependency'],
10
$LZ4: {
11
DIR_MODE: {{{ cDefs.S_IFDIR | 0o777 }}},
12
FILE_MODE: {{{ cDefs.S_IFREG | 0o777 }}},
13
CHUNK_SIZE: -1,
14
codec: null,
15
init() {
16
if (LZ4.codec) return;
17
LZ4.codec = (() => {
18
{{{ read('../third_party/mini-lz4.js') }}};
19
return MiniLZ4;
20
})();
21
LZ4.CHUNK_SIZE = LZ4.codec.CHUNK_SIZE;
22
},
23
loadPackage(pack, preloadPlugin) {
24
LZ4.init();
25
var compressedData = pack['compressedData'] || LZ4.codec.compressPackage(pack['data']);
26
assert(compressedData['cachedIndexes'].length === compressedData['cachedChunks'].length);
27
for (var i = 0; i < compressedData['cachedIndexes'].length; i++) {
28
compressedData['cachedIndexes'][i] = -1;
29
compressedData['cachedChunks'][i] = compressedData['data'].subarray(compressedData['cachedOffset'] + i*LZ4.CHUNK_SIZE,
30
compressedData['cachedOffset'] + (i+1)*LZ4.CHUNK_SIZE);
31
assert(compressedData['cachedChunks'][i].length === LZ4.CHUNK_SIZE);
32
}
33
for (var file of pack['metadata'].files) {
34
var dir = PATH.dirname(file.filename);
35
var name = PATH.basename(file.filename);
36
FS.createPath('', dir, true, true);
37
var parent = FS.analyzePath(dir).object;
38
LZ4.createNode(parent, name, LZ4.FILE_MODE, 0, {
39
compressedData,
40
start: file.start,
41
end: file.end,
42
});
43
}
44
// Preload files if necessary. This code is largely similar to
45
// createPreloadedFile in library_fs.js. However, a main difference here
46
// is that we only decompress the file if it can be preloaded.
47
// Abstracting out the common parts seems to be more effort than it is
48
// worth.
49
if (preloadPlugin) {
50
Browser.init();
51
for (var file of pack['metadata'].files) {
52
var fullname = file.filename;
53
for (var plugin of preloadPlugins) {
54
if (plugin['canHandle'](fullname)) {
55
var dep = getUniqueRunDependency('fp ' + fullname);
56
addRunDependency(dep);
57
var finish = () => removeRunDependency(dep);
58
var byteArray = FS.readFile(fullname);
59
#if ASSERTIONS
60
assert(plugin['handle'].constructor.name === 'AsyncFunction', 'Filesystem plugin handlers must be async functions (See #24914)')
61
#endif
62
plugin['handle'](byteArray, fullname).then(finish).catch(finish);
63
break;
64
}
65
}
66
}
67
}
68
},
69
createNode(parent, name, mode, dev, contents, mtime) {
70
var node = FS.createNode(parent, name, mode);
71
node.mode = mode;
72
node.node_ops = LZ4.node_ops;
73
node.stream_ops = LZ4.stream_ops;
74
this.atime = this.mtime = this.ctime = (mtime || new Date).getTime();
75
assert(LZ4.FILE_MODE !== LZ4.DIR_MODE);
76
if (mode === LZ4.FILE_MODE) {
77
node.size = contents.end - contents.start;
78
node.contents = contents;
79
} else {
80
node.size = 4096;
81
node.contents = {};
82
}
83
if (parent) {
84
parent.contents[name] = node;
85
}
86
return node;
87
},
88
node_ops: {
89
getattr(node) {
90
return {
91
dev: 1,
92
ino: node.id,
93
mode: node.mode,
94
nlink: 1,
95
uid: 0,
96
gid: 0,
97
rdev: 0,
98
size: node.size,
99
atime: new Date(node.atime),
100
mtime: new Date(node.mtime),
101
ctime: new Date(node.ctime),
102
blksize: 4096,
103
blocks: Math.ceil(node.size / 4096),
104
};
105
},
106
setattr(node, attr) {
107
for (const key of ['mode', 'atime', 'mtime', 'ctime']) {
108
if (attr[key]) {
109
node[key] = attr[key];
110
}
111
}
112
},
113
lookup(parent, name) {
114
throw new FS.ErrnoError({{{ cDefs.ENOENT }}});
115
},
116
mknod(parent, name, mode, dev) {
117
throw new FS.ErrnoError({{{ cDefs.EPERM }}});
118
},
119
rename(oldNode, newDir, newName) {
120
throw new FS.ErrnoError({{{ cDefs.EPERM }}});
121
},
122
unlink(parent, name) {
123
throw new FS.ErrnoError({{{ cDefs.EPERM }}});
124
},
125
rmdir(parent, name) {
126
throw new FS.ErrnoError({{{ cDefs.EPERM }}});
127
},
128
readdir(node) {
129
throw new FS.ErrnoError({{{ cDefs.EPERM }}});
130
},
131
symlink(parent, newName, oldPath) {
132
throw new FS.ErrnoError({{{ cDefs.EPERM }}});
133
},
134
},
135
stream_ops: {
136
read(stream, buffer, offset, length, position) {
137
//out('LZ4 read ' + [offset, length, position]);
138
length = Math.min(length, stream.node.size - position);
139
if (length <= 0) return 0;
140
var contents = stream.node.contents;
141
var compressedData = contents.compressedData;
142
var written = 0;
143
while (written < length) {
144
var start = contents.start + position + written; // start index in uncompressed data
145
var desired = length - written;
146
//out('current read: ' + ['start', start, 'desired', desired]);
147
var chunkIndex = Math.floor(start / LZ4.CHUNK_SIZE);
148
var compressedStart = compressedData['offsets'][chunkIndex];
149
var compressedSize = compressedData['sizes'][chunkIndex];
150
var currChunk;
151
if (compressedData['successes'][chunkIndex]) {
152
var found = compressedData['cachedIndexes'].indexOf(chunkIndex);
153
if (found >= 0) {
154
currChunk = compressedData['cachedChunks'][found];
155
} else {
156
// decompress the chunk
157
compressedData['cachedIndexes'].pop();
158
compressedData['cachedIndexes'].unshift(chunkIndex);
159
currChunk = compressedData['cachedChunks'].pop();
160
compressedData['cachedChunks'].unshift(currChunk);
161
if (compressedData['debug']) {
162
out('decompressing chunk ' + chunkIndex);
163
Module['decompressedChunks'] = (Module['decompressedChunks'] || 0) + 1;
164
}
165
var compressed = compressedData['data'].subarray(compressedStart, compressedStart + compressedSize);
166
//var t = Date.now();
167
var originalSize = LZ4.codec.uncompress(compressed, currChunk);
168
//out('decompress time: ' + (Date.now() - t));
169
if (chunkIndex < compressedData['successes'].length-1) assert(originalSize === LZ4.CHUNK_SIZE); // all but the last chunk must be full-size
170
}
171
} else {
172
// uncompressed
173
currChunk = compressedData['data'].subarray(compressedStart, compressedStart + LZ4.CHUNK_SIZE);
174
}
175
var startInChunk = start % LZ4.CHUNK_SIZE;
176
var endInChunk = Math.min(startInChunk + desired, LZ4.CHUNK_SIZE);
177
buffer.set(currChunk.subarray(startInChunk, endInChunk), offset + written);
178
var currWritten = endInChunk - startInChunk;
179
written += currWritten;
180
}
181
return written;
182
},
183
write(stream, buffer, offset, length, position) {
184
throw new FS.ErrnoError({{{ cDefs.EIO }}});
185
},
186
llseek(stream, offset, whence) {
187
var position = offset;
188
if (whence === {{{ cDefs.SEEK_CUR }}}) {
189
position += stream.position;
190
} else if (whence === {{{ cDefs.SEEK_END }}}) {
191
if (FS.isFile(stream.node.mode)) {
192
position += stream.node.size;
193
}
194
}
195
if (position < 0) {
196
throw new FS.ErrnoError({{{ cDefs.EINVAL }}});
197
}
198
return position;
199
},
200
},
201
},
202
});
203
if (LibraryManager.library['$FS__deps']) {
204
LibraryManager.library['$FS__deps'].push('$LZ4'); // LZ4=1, so auto-include us
205
} else {
206
warn('FS does not seem to be in use (no preloaded files etc.), LZ4 will not do anything');
207
}
208
#endif
209
210
211