Skip to content

Commit 719d293

Browse files
committed
buffer: add endings option, remove Node.js specific encoding option
Signed-off-by: James M Snell <[email protected]>
1 parent 0ae84b1 commit 719d293

File tree

3 files changed

+56
-24
lines changed

3 files changed

+56
-24
lines changed

doc/api/buffer.md

+6-3
Original file line numberDiff line numberDiff line change
@@ -465,8 +465,9 @@ added: v15.7.0
465465
of string, {ArrayBuffer}, {TypedArray}, {DataView}, or {Blob} objects, or
466466
any mix of such objects, that will be stored within the `Blob`.
467467
* `options` {Object}
468-
* `encoding` {string} The character encoding to use for string sources.
469-
**Default:** `'utf8'`.
468+
* `endings` {string} One of either `'transparent'` or `'native'`. When set
469+
to `'native'`, line endings in string source parts will be converted to
470+
the platform native line-ending as specified by `require('os').EOL`.
470471
* `type` {string} The Blob content-type. The intent is for `type` to convey
471472
the MIME media type of the data, however no validation of the type format
472473
is performed.
@@ -476,7 +477,9 @@ Creates a new `Blob` object containing a concatenation of the given sources.
476477
{ArrayBuffer}, {TypedArray}, {DataView}, and {Buffer} sources are copied into
477478
the 'Blob' and can therefore be safely modified after the 'Blob' is created.
478479

479-
String sources are also copied into the `Blob`.
480+
String sources are encoded as UTF-8 byte sequences and copied into the blob.
481+
Unmatched surrogate pairs within each string part will be replaced by Unicode
482+
0xFFFD replacement characters.
480483

481484
### `blob.arrayBuffer()`
482485
<!-- YAML

lib/internal/blob.js

+39-12
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,10 @@ const {
2424
getDataObject,
2525
} = internalBinding('blob');
2626

27-
const { TextDecoder } = require('internal/encoding');
27+
const {
28+
TextDecoder,
29+
TextEncoder,
30+
} = require('internal/encoding');
2831

2932
const {
3033
makeTransferable,
@@ -48,6 +51,7 @@ const {
4851
AbortError,
4952
codes: {
5053
ERR_INVALID_ARG_TYPE,
54+
ERR_INVALID_ARG_VALUE,
5155
ERR_INVALID_THIS,
5256
ERR_BUFFER_TOO_LARGE,
5357
}
@@ -65,10 +69,11 @@ const kArrayBufferPromise = Symbol('kArrayBufferPromise');
6569

6670
const disallowedTypeCharacters = /[^\u{0020}-\u{007E}]/u;
6771

68-
let Buffer;
6972
let ReadableStream;
7073
let URL;
74+
let EOL;
7175

76+
const enc = new TextEncoder();
7277

7378
// Yes, lazy loading is annoying but because of circular
7479
// references between the url, internal/blob, and buffer
@@ -79,29 +84,35 @@ function lazyURL(id) {
7984
return new URL(id);
8085
}
8186

82-
function lazyBuffer() {
83-
Buffer ??= require('buffer').Buffer;
84-
return Buffer;
85-
}
86-
8787
function lazyReadableStream(options) {
8888
ReadableStream ??=
8989
require('internal/webstreams/readablestream').ReadableStream;
9090
return new ReadableStream(options);
9191
}
9292

93+
// TODO(@jasnell): This is annoying but this has to be lazy because
94+
// requiring the 'os' module too early causes building Node.js to
95+
// fail with an unknown reference failure.
96+
function lazyEOL() {
97+
EOL ??= require('os').EOL;
98+
return EOL;
99+
}
100+
93101
function isBlob(object) {
94102
return object?.[kHandle] !== undefined;
95103
}
96104

97-
function getSource(source, encoding) {
105+
function getSource(source, endings) {
98106
if (isBlob(source))
99107
return [source.size, source[kHandle]];
100108

101109
if (isAnyArrayBuffer(source)) {
102110
source = new Uint8Array(source);
103111
} else if (!isArrayBufferView(source)) {
104-
source = lazyBuffer().from(`${source}`, encoding);
112+
source = `${source}`;
113+
if (endings === 'native')
114+
source = source.replaceAll(/(\n|\r\n)/g, lazyEOL());
115+
source = enc.encode(source);
105116
}
106117

107118
// We copy into a new Uint8Array because the underlying
@@ -113,6 +124,16 @@ function getSource(source, encoding) {
113124
}
114125

115126
class Blob {
127+
/**
128+
* @typedef {string|ArrayBuffer|ArrayBufferView|Blob} SourcePart
129+
*
130+
* @param {SourcePart[]} [sources]
131+
* @param {{
132+
* endings? : string,
133+
* type? : string,
134+
* }} options
135+
* @returns
136+
*/
116137
constructor(sources = [], options = {}) {
117138
emitExperimentalWarning('buffer.Blob');
118139
if (sources === null ||
@@ -121,12 +142,18 @@ class Blob {
121142
throw new ERR_INVALID_ARG_TYPE('sources', 'Iterable', sources);
122143
}
123144
validateObject(options, 'options');
124-
const { encoding = 'utf8' } = options;
125-
let { type = '' } = options;
145+
let {
146+
type = '',
147+
endings = 'transparent',
148+
} = options;
149+
150+
endings = `${endings}`;
151+
if (endings !== 'transparent' || endings !== 'native')
152+
throw new ERR_INVALID_ARG_VALUE('options.endings', endings);
126153

127154
let length = 0;
128155
const sources_ = ArrayFrom(sources, (source) => {
129-
const { 0: len, 1: src } = getSource(source, encoding);
156+
const { 0: len, 1: src } = getSource(source, endings);
130157
length += len;
131158
return src;
132159
});

test/parallel/test-blob.js

+11-9
Original file line numberDiff line numberDiff line change
@@ -45,15 +45,6 @@ assert.throws(() => new Blob({}), {
4545
assert.strictEqual(new Blob([], { type: {} }).type, '[object object]');
4646
}
4747

48-
{
49-
const b = new Blob(['616263'], { encoding: 'hex', type: 'foo' });
50-
assert.strictEqual(b.size, 3);
51-
assert.strictEqual(b.type, 'foo');
52-
b.text().then(common.mustCall((text) => {
53-
assert.strictEqual(text, 'abc');
54-
}));
55-
}
56-
5748
{
5849
const b = new Blob([Buffer.from('abc')]);
5950
assert.strictEqual(b.size, 3);
@@ -216,3 +207,14 @@ assert.throws(() => new Blob({}), {
216207
res = await reader.read();
217208
assert(res.done);
218209
})().then(common.mustCall());
210+
211+
{
212+
const b = new Blob(['hello\n'], { endings: 'native' });
213+
assert.strictEqual(b.size, 7);
214+
215+
[1, {}, 'foo'].forEach((endings) => {
216+
assert.throws(() => new Blob([], { endings }), {
217+
code: 'ERR_INVALID_ARG_VALUE',
218+
});
219+
});
220+
}

0 commit comments

Comments
 (0)