-
-
Notifications
You must be signed in to change notification settings - Fork 34.3k
fs: enable chunked reading for large files in readFileHandle #56022
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from 20 commits
b1e4ac0
4e6fa90
fab698d
9ea02ac
83405a7
0675e3b
45bf4de
553af67
14c4288
925c923
c9f73e6
2de146f
77f5e86
7fbb376
ff0137f
2160d30
c5be59a
9dfd3b3
c4337a2
8dce6dc
2036e4a
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -46,7 +46,6 @@ const { | |
| }, | ||
| } = require('internal/errors'); | ||
| const { isArrayBufferView } = require('internal/util/types'); | ||
|
|
||
| const { | ||
| constants: { | ||
| kIoMaxLength, | ||
|
|
@@ -528,17 +527,25 @@ async function readFileHandle(filehandle, options) { | |
|
|
||
| let size = 0; | ||
| let length = 0; | ||
| if ((statFields[1/* mode */] & S_IFMT) === S_IFREG) { | ||
| size = statFields[8/* size */]; | ||
|
|
||
| if ((statFields[1 /* mode */] & S_IFMT) === S_IFREG) { | ||
| size = statFields[8 /* size */]; | ||
mertcanaltin marked this conversation as resolved.
Show resolved
Hide resolved
|
||
|
|
||
| if (size > kIoMaxLength) { | ||
| const limit = kIoMaxLength; | ||
| process.emitWarning( | ||
| `Detected \`fs.readFile()\` to read a file larger than the recommended limit (${size} > ${limit} bytes). Please consider using \`fs.createReadStream()\` instead to minimize memory overhead and increase performance.`, | ||
| 'LargeFileWarning', | ||
| ERR_FS_FILE_TOO_LARGE, | ||
| ); | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Thanks, I try to use the |
||
| } | ||
|
|
||
| length = encoding ? MathMin(size, kReadFileBufferLength) : size; | ||
| } | ||
| if (length === 0) { | ||
| length = kReadFileUnknownBufferLength; | ||
| } | ||
|
|
||
| if (size > kIoMaxLength) | ||
| throw new ERR_FS_FILE_TOO_LARGE(size); | ||
|
|
||
| let totalRead = 0; | ||
| const noSize = size === 0; | ||
| let buffer = Buffer.allocUnsafeSlow(length); | ||
|
|
@@ -562,8 +569,8 @@ async function readFileHandle(filehandle, options) { | |
| totalRead += bytesRead; | ||
|
|
||
| if (bytesRead === 0 || | ||
| totalRead === size || | ||
| (bytesRead !== buffer.length && !chunkedRead && !noSize)) { | ||
| totalRead === size || | ||
| (bytesRead !== buffer.length && !chunkedRead && !noSize)) { | ||
| const singleRead = bytesRead === totalRead; | ||
|
|
||
| const bytesToCheck = chunkedRead ? totalRead : bytesRead; | ||
|
|
@@ -586,9 +593,10 @@ async function readFileHandle(filehandle, options) { | |
| result += decoder.end(buffer); | ||
| return result; | ||
| } | ||
| const readBuffer = bytesRead !== buffer.length ? | ||
| buffer.subarray(0, bytesRead) : | ||
| buffer; | ||
| const readBuffer = | ||
| bytesRead !== buffer.length ? | ||
| buffer.subarray(0, bytesRead) : | ||
| buffer; | ||
| if (encoding) { | ||
| result += decoder.write(readBuffer); | ||
| } else if (size !== 0) { | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -5,19 +5,24 @@ const common = require('../common'); | |
| // The following tests validate base functionality for the fs.promises | ||
| // FileHandle.readFile method. | ||
|
|
||
| const fs = require('fs'); | ||
| const fs = require('node:fs'); | ||
| const { | ||
| open, | ||
| readFile, | ||
| writeFile, | ||
| truncate, | ||
| writeFile, | ||
| } = fs.promises; | ||
| const path = require('path'); | ||
| const path = require('node:path'); | ||
| const os = require('node:os'); | ||
| const tmpdir = require('../common/tmpdir'); | ||
| const tick = require('../common/tick'); | ||
| const assert = require('assert'); | ||
| const assert = require('node:assert'); | ||
| const tmpDir = tmpdir.path; | ||
|
|
||
| const skipMessage = 'intensive toString tests due to memory confinements'; | ||
| if (!common.enoughTestMem) | ||
| common.skip(skipMessage); | ||
|
|
||
| tmpdir.refresh(); | ||
|
|
||
| async function validateReadFile() { | ||
|
|
@@ -33,6 +38,34 @@ async function validateReadFile() { | |
| assert.deepStrictEqual(buffer, readFileData); | ||
| } | ||
|
|
||
| async function validateLargeFileSupport() { | ||
| const LARGE_FILE_SIZE = 2147483647 + 10 * 1024 * 1024; // INT32_MAX + 10 MB | ||
| const FILE_PATH = path.join(os.tmpdir(), 'large-virtual-file.bin'); | ||
|
|
||
| if (!tmpdir.hasEnoughSpace(LARGE_FILE_SIZE)) { | ||
| common.printSkipMessage(`Not enough space in ${os.tmpdir()}`); | ||
| return; | ||
| } | ||
|
|
||
| function createVirtualLargeFile() { | ||
| return Buffer.alloc(LARGE_FILE_SIZE, 'A'); | ||
| } | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This is quite a large allocation that may fail on some our more resource challenge builders. I believe there's a utility in
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. thanks I find this method |
||
|
|
||
| const virtualFile = createVirtualLargeFile(); | ||
|
|
||
| await writeFile(FILE_PATH, virtualFile); | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. It's also possible that the FS on test runners could completely run out of space with this large of a file
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I did a check for this place too, thanks for the suggestion |
||
|
|
||
| const buffer = await readFile(FILE_PATH); | ||
|
|
||
| assert.strictEqual( | ||
| buffer.length, | ||
| LARGE_FILE_SIZE, | ||
| `Expected file size to be ${LARGE_FILE_SIZE}, but got ${buffer.length}` | ||
| ); | ||
|
|
||
| await fs.promises.unlink(FILE_PATH); | ||
| } | ||
|
|
||
| async function validateReadFileProc() { | ||
| // Test to make sure reading a file under the /proc directory works. Adapted | ||
| // from test-fs-read-file-sync-hostname.js. | ||
|
|
@@ -92,30 +125,43 @@ async function doReadAndCancel() { | |
| }, 'tick-1'); | ||
| } | ||
|
|
||
| // Validate file size is within range for reading | ||
| // For validates the ability of the filesystem module to handle large files | ||
| { | ||
| // Variable taken from https://github.com/nodejs/node/blob/1377163f3351/lib/internal/fs/promises.js#L5 | ||
| const kIoMaxLength = 2 ** 31 - 1; | ||
| const largeFileSize = 5 * 1024 * 1024 * 1024; // 5 GiB | ||
| const newFile = path.resolve(tmpDir, 'dogs-running3.txt'); | ||
|
|
||
| if (!tmpdir.hasEnoughSpace(kIoMaxLength)) { | ||
mertcanaltin marked this conversation as resolved.
Show resolved
Hide resolved
|
||
| // truncate() will fail with ENOSPC if there is not enough space. | ||
| if (!tmpdir.hasEnoughSpace(largeFileSize)) { | ||
| common.printSkipMessage(`Not enough space in ${tmpDir}`); | ||
| } else { | ||
| const newFile = path.resolve(tmpDir, 'dogs-running3.txt'); | ||
| await writeFile(newFile, Buffer.from('0')); | ||
| await truncate(newFile, kIoMaxLength + 1); | ||
| await truncate(newFile, largeFileSize); | ||
|
|
||
| await using fileHandle = await open(newFile, 'r'); | ||
|
|
||
| await assert.rejects(fileHandle.readFile(), { | ||
| name: 'RangeError', | ||
| code: 'ERR_FS_FILE_TOO_LARGE' | ||
| let warningEmitted = false; | ||
| let warningMessage = ''; | ||
| process.once('warning', (warning) => { | ||
| if (warning.name === 'LargeFileWarning') { | ||
| warningEmitted = true; | ||
| warningMessage = warning.message; | ||
| } | ||
| }); | ||
|
|
||
| const data = await fileHandle.readFile(); | ||
|
|
||
| const expectedWarningMsg = 'Expected LargeFileWarning to be emitted for 5GB file'; | ||
| assert.strictEqual(warningEmitted, true, expectedWarningMsg); | ||
|
|
||
| assert.match(warningMessage, | ||
| /larger than the recommended limit \(\d+ > \d+ bytes\)/); | ||
|
|
||
| console.log(`File read successfully, size: ${data.length} bytes`); | ||
| } | ||
| } | ||
| } | ||
|
|
||
| validateReadFile() | ||
| .then(validateLargeFileSupport) | ||
| .then(validateReadFileProc) | ||
| .then(doReadAndCancel) | ||
| .then(common.mustCall()); | ||

Uh oh!
There was an error while loading. Please reload this page.