perf(runtime/fs): optimize readFile by using a single large buffer (#12057)

* perf(runtime/fs): optimize readFile by using a single large buffer
* handle extended/truncated files during read

Allocate an extra byte in our read buffer to detect "overflow" then fallback to unsized readAll for remainder of extended file, this is a slowpath that should rarely happen in practice
This commit is contained in:
Aaron O'Mullan 2021-09-16 20:28:15 +02:00 committed by GitHub
parent 868f38d452
commit 00948a6d68
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 73 additions and 43 deletions

View file

@ -11,6 +11,7 @@
const {
Uint8Array,
ArrayPrototypePush,
MathMin,
TypedArrayPrototypeSubarray,
TypedArrayPrototypeSet,
} = window.__bootstrap.primordials;
@ -96,10 +97,7 @@
return nread === 0 ? null : nread;
}
async function read(
rid,
buffer,
) {
async function read(rid, buffer) {
if (buffer.length === 0) {
return 0;
}
@ -117,10 +115,10 @@
return await core.opAsync("op_write_async", rid, data);
}
const READ_PER_ITER = 32 * 1024;
const READ_PER_ITER = 16 * 1024; // 16kb, see https://github.com/denoland/deno/issues/10157
async function readAll(r) {
return await readAllInner(r);
function readAll(r) {
return readAllInner(r);
}
async function readAllInner(r, options) {
const buffers = [];
@ -138,6 +136,26 @@
throw new DOMException("The read operation was aborted.", "AbortError");
}
return concatBuffers(buffers);
}
function readAllSync(r) {
const buffers = [];
while (true) {
const buf = new Uint8Array(READ_PER_ITER);
const read = r.readSync(buf);
if (typeof read == "number") {
ArrayPrototypePush(buffers, buf.subarray(0, read));
} else {
break;
}
}
return concatBuffers(buffers);
}
function concatBuffers(buffers) {
let totalLen = 0;
for (const buf of buffers) {
totalLen += buf.byteLength;
@ -154,33 +172,55 @@
return contents;
}
function readAllSync(r) {
const buffers = [];
function readAllSyncSized(r, size) {
const buf = new Uint8Array(size + 1); // 1B to detect extended files
let cursor = 0;
while (true) {
const buf = new Uint8Array(READ_PER_ITER);
const read = r.readSync(buf);
while (cursor < size) {
const sliceEnd = MathMin(size + 1, cursor + READ_PER_ITER);
const slice = buf.subarray(cursor, sliceEnd);
const read = r.readSync(slice);
if (typeof read == "number") {
ArrayPrototypePush(buffers, new Uint8Array(buf.buffer, 0, read));
cursor += read;
} else {
break;
}
}
let totalLen = 0;
for (const buf of buffers) {
totalLen += buf.byteLength;
// Handle truncated or extended files during read
if (cursor > size) {
// Read remaining and concat
return concatBuffers([buf, readAllSync(r)]);
} else { // cursor == size
return buf.subarray(0, cursor);
}
}
async function readAllInnerSized(r, size, options) {
const buf = new Uint8Array(size + 1); // 1B to detect extended files
let cursor = 0;
const signal = options?.signal ?? null;
while (!signal?.aborted && cursor < size) {
const sliceEnd = MathMin(size + 1, cursor + READ_PER_ITER);
const slice = buf.subarray(cursor, sliceEnd);
const read = await r.read(slice);
if (typeof read == "number") {
cursor += read;
} else {
break;
}
}
if (signal?.aborted) {
throw new DOMException("The read operation was aborted.", "AbortError");
}
const contents = new Uint8Array(totalLen);
let n = 0;
for (const buf of buffers) {
TypedArrayPrototypeSet(contents, buf, n);
n += buf.byteLength;
// Handle truncated or extended files during read
if (cursor > size) {
// Read remaining and concat
return concatBuffers([buf, await readAllInner(r, options)]);
} else {
return buf.subarray(0, cursor);
}
return contents;
}
window.__bootstrap.io = {
@ -195,5 +235,7 @@
readAll,
readAllInner,
readAllSync,
readAllSyncSized,
readAllInnerSized,
};
})(this);

View file

@ -4,13 +4,13 @@
((window) => {
const core = window.Deno.core;
const { open, openSync } = window.__bootstrap.files;
const { readAllInner, readAllSync } = window.__bootstrap.io;
const { readAllSyncSized, readAllInnerSized } = window.__bootstrap.io;
function readFileSync(path) {
const file = openSync(path);
try {
const contents = readAllSync(file);
return contents;
const { size } = file.statSync();
return readAllSyncSized(file, size);
} finally {
file.close();
}
@ -19,31 +19,19 @@
async function readFile(path, options) {
const file = await open(path);
try {
const contents = await readAllInner(file, options);
return contents;
const { size } = await file.stat();
return await readAllInnerSized(file, size, options);
} finally {
file.close();
}
}
function readTextFileSync(path) {
const file = openSync(path);
try {
const contents = readAllSync(file);
return core.decode(contents);
} finally {
file.close();
}
return core.decode(readFileSync(path));
}
async function readTextFile(path, options) {
const file = await open(path);
try {
const contents = await readAllInner(file, options);
return core.decode(contents);
} finally {
file.close();
}
return core.decode(await readFile(path, options));
}
window.__bootstrap.readFile = {