From d22346de40f27157ea81f94479ba42bfc0122f5a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C3=ABl=20Zasso?= Date: Thu, 23 Mar 2017 12:53:08 +0100 Subject: [PATCH 01/67] deps: fix async await desugaring in V8 This is a backport of https://codereview.chromium.org/2672313003/. The patch did not land in V8 because it was superseded by another one but it is much easier to backport to V8 5.5, was reviewed and passed tests. Original commit message: [async await] Fix async function desugaring Previously we rewrote the return statement in an async function from `return expr;` to `return %ResolvePromise(.promise, expr), .promise`. This can lead to incorrect behavior in the presence of try-finally. This patch stores the `expr` of the return statement in a temporary variable, resolves and returns the promise at the end of the finally block. BUG=v8:5896 PR-URL: https://github.com/nodejs/node/pull/12004 Fixes: https://github.com/nodejs/node/issues/11960 Reviewed-By: James M Snell Reviewed-By: Ben Noordhuis --- deps/v8/include/v8-version.h | 2 +- deps/v8/src/parsing/parser-base.h | 15 +- deps/v8/src/parsing/parser.cc | 194 +++++++++++++++---- deps/v8/src/parsing/parser.h | 9 +- deps/v8/test/mjsunit/regress/regress-5896.js | 14 ++ 5 files changed, 187 insertions(+), 47 deletions(-) create mode 100644 deps/v8/test/mjsunit/regress/regress-5896.js diff --git a/deps/v8/include/v8-version.h b/deps/v8/include/v8-version.h index 2751bb168981ad..759767d7eb55cb 100644 --- a/deps/v8/include/v8-version.h +++ b/deps/v8/include/v8-version.h @@ -11,7 +11,7 @@ #define V8_MAJOR_VERSION 5 #define V8_MINOR_VERSION 5 #define V8_BUILD_NUMBER 372 -#define V8_PATCH_LEVEL 42 +#define V8_PATCH_LEVEL 43 // Use 1 for candidates and 0 otherwise. // (Boolean macro values are not supported by all preprocessors.) diff --git a/deps/v8/src/parsing/parser-base.h b/deps/v8/src/parsing/parser-base.h index 1ebbee4959e8f2..86da9e0c50a36c 100644 --- a/deps/v8/src/parsing/parser-base.h +++ b/deps/v8/src/parsing/parser-base.h @@ -411,7 +411,7 @@ class ParserBase { } void set_promise_variable(typename Types::Variable* variable) { - DCHECK(variable != NULL); + DCHECK_NOT_NULL(variable); DCHECK(IsAsyncFunction(kind())); promise_variable_ = variable; } @@ -419,6 +419,15 @@ class ParserBase { return promise_variable_; } + void set_async_return_variable(typename Types::Variable* variable) { + DCHECK_NOT_NULL(variable); + DCHECK(IsAsyncFunction(kind())); + async_return_variable_ = variable; + } + typename Types::Variable* async_return_variable() const { + return async_return_variable_; + } + const ZoneList& destructuring_assignments_to_rewrite() const { return destructuring_assignments_to_rewrite_; @@ -488,6 +497,8 @@ class ParserBase { // For async functions, this variable holds a temporary for the Promise // being created as output of the async function. Variable* promise_variable_; + Variable* async_return_variable_; + Variable* is_rejection_variable_; FunctionState** function_state_stack_; FunctionState* outer_function_state_; @@ -1468,6 +1479,8 @@ ParserBase::FunctionState::FunctionState( expected_property_count_(0), generator_object_variable_(nullptr), promise_variable_(nullptr), + async_return_variable_(nullptr), + is_rejection_variable_(nullptr), function_state_stack_(function_state_stack), outer_function_state_(*function_state_stack), destructuring_assignments_to_rewrite_(16, scope->zone()), diff --git a/deps/v8/src/parsing/parser.cc b/deps/v8/src/parsing/parser.cc index 2933f7d5470b4c..a0648473f95cfa 100644 --- a/deps/v8/src/parsing/parser.cc +++ b/deps/v8/src/parsing/parser.cc @@ -1701,10 +1701,17 @@ Expression* Parser::RewriteReturn(Expression* return_value, int pos) { return_value = factory()->NewConditional(is_undefined, ThisExpression(pos), is_object_conditional, pos); } + if (is_generator()) { return_value = BuildIteratorResult(return_value, true); } else if (is_async_function()) { - return_value = BuildResolvePromise(return_value, return_value->position()); + // In an async function, + // return expr; + // is rewritten as + // return .async_return_value = expr; + return_value = factory()->NewAssignment( + Token::ASSIGN, factory()->NewVariableProxy(AsyncReturnVariable()), + return_value, kNoSourcePosition); } return return_value; } @@ -2991,60 +2998,152 @@ Block* Parser::BuildParameterInitializationBlock( return init_block; } +Block* Parser::BuildRejectPromiseOnExceptionForParameters(Block* inner_block) { + // .promise = %AsyncFunctionPromiseCreate(); + // try { + // + // } catch (.catch) { + // return %RejectPromise(.promise, .catch), .promise; + // } + Block* result = factory()->NewBlock(nullptr, 2, true, kNoSourcePosition); + { + // .promise = %AsyncFunctionPromiseCreate(); + Expression* create_promise = factory()->NewCallRuntime( + Context::ASYNC_FUNCTION_PROMISE_CREATE_INDEX, + new (zone()) ZoneList(0, zone()), kNoSourcePosition); + Assignment* assign_promise = factory()->NewAssignment( + Token::INIT, factory()->NewVariableProxy(PromiseVariable()), + create_promise, kNoSourcePosition); + Statement* set_promise = + factory()->NewExpressionStatement(assign_promise, kNoSourcePosition); + result->statements()->Add(set_promise, zone()); + } + // catch (.catch) { + // return %RejectPromise(.promise, .catch), .promise; + // } + Scope* catch_scope = NewScope(CATCH_SCOPE); + catch_scope->set_is_hidden(); + Variable* catch_variable = + catch_scope->DeclareLocal(ast_value_factory()->dot_catch_string(), VAR, + kCreatedInitialized, NORMAL_VARIABLE); + Block* catch_block = factory()->NewBlock(nullptr, 1, true, kNoSourcePosition); + { + // return %RejectPromise(.promise, .catch), .promise; + Expression* reject_return_promise = factory()->NewBinaryOperation( + Token::COMMA, BuildRejectPromise(catch_variable), + factory()->NewVariableProxy(PromiseVariable(), kNoSourcePosition), + kNoSourcePosition); + catch_block->statements()->Add( + factory()->NewReturnStatement(reject_return_promise, kNoSourcePosition), + zone()); + } + TryStatement* try_catch_statement = + factory()->NewTryCatchStatementForAsyncAwait(inner_block, catch_scope, + catch_variable, catch_block, + kNoSourcePosition); + result->statements()->Add(try_catch_statement, zone()); + return result; +} + Block* Parser::BuildRejectPromiseOnException(Block* inner_block, bool* ok) { + // .is_rejection = false; // .promise = %AsyncFunctionPromiseCreate(); // try { // // } catch (.catch) { - // %RejectPromise(.promise, .catch); - // return .promise; + // .is_rejection = true; + // .async_return_value = .catch; // } finally { + // .is_rejection + // ? %RejectPromise(.promise, .async_return_value) + // : %ResolvePromise(.promise, .async_return_value); // %AsyncFunctionPromiseRelease(.promise); + // return .promise; // } - Block* result = factory()->NewBlock(nullptr, 2, true, kNoSourcePosition); + Block* result = factory()->NewBlock(nullptr, 3, true, kNoSourcePosition); - // .promise = %AsyncFunctionPromiseCreate(); - Statement* set_promise; + Variable* is_rejection_var = + scope()->NewTemporary(ast_value_factory()->empty_string()); { + // .is_rejection = false; + Assignment* set_is_rejection = factory()->NewAssignment( + Token::INIT, factory()->NewVariableProxy(is_rejection_var), + factory()->NewBooleanLiteral(false, kNoSourcePosition), + kNoSourcePosition); + result->statements()->Add( + factory()->NewExpressionStatement(set_is_rejection, kNoSourcePosition), + zone()); + // .promise = %AsyncFunctionPromiseCreate(); Expression* create_promise = factory()->NewCallRuntime( Context::ASYNC_FUNCTION_PROMISE_CREATE_INDEX, new (zone()) ZoneList(0, zone()), kNoSourcePosition); Assignment* assign_promise = factory()->NewAssignment( Token::INIT, factory()->NewVariableProxy(PromiseVariable()), create_promise, kNoSourcePosition); - set_promise = + Statement* set_promise = factory()->NewExpressionStatement(assign_promise, kNoSourcePosition); + result->statements()->Add(set_promise, zone()); } - result->statements()->Add(set_promise, zone()); - // catch (.catch) { return %RejectPromise(.promise, .catch), .promise } + // catch (.catch) { + // .is_rejection = true; + // .async_return_value = .catch; + // } Scope* catch_scope = NewScope(CATCH_SCOPE); catch_scope->set_is_hidden(); Variable* catch_variable = catch_scope->DeclareLocal(ast_value_factory()->dot_catch_string(), VAR, kCreatedInitialized, NORMAL_VARIABLE); Block* catch_block = factory()->NewBlock(nullptr, 1, true, kNoSourcePosition); - - Expression* promise_reject = BuildRejectPromise( - factory()->NewVariableProxy(catch_variable), kNoSourcePosition); - ReturnStatement* return_promise_reject = - factory()->NewReturnStatement(promise_reject, kNoSourcePosition); - catch_block->statements()->Add(return_promise_reject, zone()); + { + // .is_rejection = true; + DCHECK_NOT_NULL(is_rejection_var); + Assignment* set_is_rejection = factory()->NewAssignment( + Token::ASSIGN, factory()->NewVariableProxy(is_rejection_var), + factory()->NewBooleanLiteral(true, kNoSourcePosition), + kNoSourcePosition); + catch_block->statements()->Add( + factory()->NewExpressionStatement(set_is_rejection, kNoSourcePosition), + zone()); + // .async_return_value = .catch; + Assignment* set_async_return_var = factory()->NewAssignment( + Token::ASSIGN, factory()->NewVariableProxy(AsyncReturnVariable()), + factory()->NewVariableProxy(catch_variable), kNoSourcePosition); + catch_block->statements()->Add(factory()->NewExpressionStatement( + set_async_return_var, kNoSourcePosition), + zone()); + } TryStatement* try_catch_statement = factory()->NewTryCatchStatementForAsyncAwait(inner_block, catch_scope, catch_variable, catch_block, kNoSourcePosition); - - // There is no TryCatchFinally node, so wrap it in an outer try/finally Block* outer_try_block = factory()->NewBlock(nullptr, 1, true, kNoSourcePosition); outer_try_block->statements()->Add(try_catch_statement, zone()); - // finally { %AsyncFunctionPromiseRelease(.promise) } + // finally { + // .is_rejection + // ? %RejectPromise(.promise, .async_return_value) + // : %ResolvePromise(.promise, .async_return_value); + // %AsyncFunctionPromiseRelease(.promise); + // return .promise; + // } Block* finally_block = factory()->NewBlock(nullptr, 1, true, kNoSourcePosition); { + // .is_rejection + // ? %RejectPromise(.promise, .async_return_value) + // : %ResolvePromise(.promise, .async_return_value); + Expression* resolve_or_reject_promise = + factory()->NewConditional(factory()->NewVariableProxy(is_rejection_var), + BuildRejectPromise(AsyncReturnVariable()), + BuildResolvePromise(), kNoSourcePosition); + finally_block->statements()->Add( + factory()->NewExpressionStatement(resolve_or_reject_promise, + kNoSourcePosition), + zone()); + // %AsyncFunctionPromiseRelease(.promise); ZoneList* args = new (zone()) ZoneList(1, zone()); args->Add(factory()->NewVariableProxy(PromiseVariable()), zone()); Expression* call_promise_release = factory()->NewCallRuntime( @@ -3052,11 +3151,15 @@ Block* Parser::BuildRejectPromiseOnException(Block* inner_block, bool* ok) { Statement* promise_release = factory()->NewExpressionStatement( call_promise_release, kNoSourcePosition); finally_block->statements()->Add(promise_release, zone()); + + // return .promise; + Statement* return_promise = factory()->NewReturnStatement( + factory()->NewVariableProxy(PromiseVariable()), kNoSourcePosition); + finally_block->statements()->Add(return_promise, zone()); } Statement* try_finally_statement = factory()->NewTryFinallyStatement( outer_try_block, finally_block, kNoSourcePosition); - result->statements()->Add(try_finally_statement, zone()); return result; } @@ -3072,31 +3175,25 @@ Expression* Parser::BuildCreateJSGeneratorObject(int pos, FunctionKind kind) { pos); } -Expression* Parser::BuildResolvePromise(Expression* value, int pos) { - // %ResolvePromise(.promise, value), .promise +Expression* Parser::BuildResolvePromise() { + // %ResolvePromise(.promise, .async_return_variable), .promise ZoneList* args = new (zone()) ZoneList(2, zone()); args->Add(factory()->NewVariableProxy(PromiseVariable()), zone()); - args->Add(value, zone()); - Expression* call_runtime = - factory()->NewCallRuntime(Context::PROMISE_RESOLVE_INDEX, args, pos); - return factory()->NewBinaryOperation( - Token::COMMA, call_runtime, - factory()->NewVariableProxy(PromiseVariable()), pos); + args->Add(factory()->NewVariableProxy(AsyncReturnVariable()), zone()); + return factory()->NewCallRuntime(Context::PROMISE_RESOLVE_INDEX, args, + kNoSourcePosition); } -Expression* Parser::BuildRejectPromise(Expression* value, int pos) { - // %RejectPromiseNoDebugEvent(.promise, value, true), .promise +Expression* Parser::BuildRejectPromise(Variable* value) { + // %RejectPromiseNoDebugEvent(.promise, .value, true) // The NoDebugEvent variant disables the additional debug event for the // rejection since a debug event already happened for the exception that got // us here. ZoneList* args = new (zone()) ZoneList(2, zone()); args->Add(factory()->NewVariableProxy(PromiseVariable()), zone()); - args->Add(value, zone()); - Expression* call_runtime = factory()->NewCallRuntime( - Context::REJECT_PROMISE_NO_DEBUG_EVENT_INDEX, args, pos); - return factory()->NewBinaryOperation( - Token::COMMA, call_runtime, - factory()->NewVariableProxy(PromiseVariable()), pos); + args->Add(factory()->NewVariableProxy(value), zone()); + return factory()->NewCallRuntime( + Context::REJECT_PROMISE_NO_DEBUG_EVENT_INDEX, args, kNoSourcePosition); } Variable* Parser::PromiseVariable() { @@ -3111,6 +3208,19 @@ Variable* Parser::PromiseVariable() { return promise; } +Variable* Parser::AsyncReturnVariable() { + // Based on the various compilation paths, there are many different + // code paths which may be the first to access the return value + // temporary. Whichever comes first should create it and stash it in + // the FunctionState. + Variable* async_return = function_state_->async_return_variable(); + if (async_return == nullptr) { + async_return = scope()->NewTemporary(ast_value_factory()->empty_string()); + function_state_->set_async_return_variable(async_return); + } + return async_return; +} + Expression* Parser::BuildInitialYield(int pos, FunctionKind kind) { Expression* allocation = BuildCreateJSGeneratorObject(pos, kind); VariableProxy* init_proxy = @@ -3235,7 +3345,7 @@ ZoneList* Parser::ParseEagerFunctionBody( // TODO(littledan): Merge the two rejection blocks into one if (IsAsyncFunction(kind)) { - init_block = BuildRejectPromiseOnException(init_block, CHECK_OK); + init_block = BuildRejectPromiseOnExceptionForParameters(init_block); } DCHECK_NOT_NULL(init_block); @@ -4176,14 +4286,16 @@ void Parser::RewriteAsyncFunctionBody(ZoneList* body, Block* block, // .generator_object = %CreateGeneratorObject(); // BuildRejectPromiseOnException({ // ... block ... - // return %ResolvePromise(.promise, expr), .promise; + // .async_return_var = expr; // }) // } - return_value = BuildResolvePromise(return_value, return_value->position()); - block->statements()->Add( - factory()->NewReturnStatement(return_value, return_value->position()), - zone()); + Assignment* set_async_return_var = factory()->NewAssignment( + Token::ASSIGN, factory()->NewVariableProxy(AsyncReturnVariable()), + return_value, kNoSourcePosition); + block->statements()->Add(factory()->NewExpressionStatement( + set_async_return_var, kNoSourcePosition), + zone()); block = BuildRejectPromiseOnException(block, CHECK_OK_VOID); body->Add(block, zone()); } diff --git a/deps/v8/src/parsing/parser.h b/deps/v8/src/parsing/parser.h index 418bedf81b4156..6ff4336f9b44a3 100644 --- a/deps/v8/src/parsing/parser.h +++ b/deps/v8/src/parsing/parser.h @@ -491,6 +491,7 @@ class Parser : public ParserBase { Block* BuildParameterInitializationBlock( const ParserFormalParameters& parameters, bool* ok); Block* BuildRejectPromiseOnException(Block* block, bool* ok); + Block* BuildRejectPromiseOnExceptionForParameters(Block* block); // Consumes the ending }. ZoneList* ParseEagerFunctionBody( @@ -576,9 +577,10 @@ class Parser : public ParserBase { Expression* BuildInitialYield(int pos, FunctionKind kind); Expression* BuildCreateJSGeneratorObject(int pos, FunctionKind kind); - Expression* BuildResolvePromise(Expression* value, int pos); - Expression* BuildRejectPromise(Expression* value, int pos); + Expression* BuildResolvePromise(); + Expression* BuildRejectPromise(Variable* value); Variable* PromiseVariable(); + Variable* AsyncReturnVariable(); // Generic AST generator for throwing errors from compiled code. Expression* NewThrowError(Runtime::FunctionId function_id, @@ -983,8 +985,7 @@ class Parser : public ParserBase { auto* init_block = BuildParameterInitializationBlock(parameters, ok); if (!*ok) return; if (is_async) { - init_block = BuildRejectPromiseOnException(init_block, ok); - if (!*ok) return; + init_block = BuildRejectPromiseOnExceptionForParameters(init_block); } if (init_block != nullptr) body->Add(init_block, zone()); } diff --git a/deps/v8/test/mjsunit/regress/regress-5896.js b/deps/v8/test/mjsunit/regress/regress-5896.js new file mode 100644 index 00000000000000..acd9a32dfa7d49 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-5896.js @@ -0,0 +1,14 @@ +// Copyright 2017 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// Flags: --allow-natives-syntax +async function foo() { + try { + return 1; + } finally { + return Promise.resolve().then(() => { return 2; }); + } +} +foo() + .then(value => { found = value; assertEquals(2, value) }) + .catch((e) => { %AbortJS(''+e); }); From 17d16e8f3d192d62779b2fcb4a372cddad1eb92b Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Fri, 17 Mar 2017 13:46:46 -0700 Subject: [PATCH 02/67] buffer: remove unneeded eslint-disable comment lib/buffer.js uses a function declaration for `Buffer`. So it never uses an instance of `Buffer` in the global scope. Therefore the disabling of the `require-buffer` custom rule is not needed. Remove the comment. PR-URL: https://github.com/nodejs/node/pull/11906 Reviewed-By: James M Snell Reviewed-By: Colin Ihrig Reviewed-By: Luigi Pinca Reviewed-By: Roman Reiss --- lib/buffer.js | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/buffer.js b/lib/buffer.js index 27e2b5393e7de0..ca73549eba4d00 100644 --- a/lib/buffer.js +++ b/lib/buffer.js @@ -1,4 +1,3 @@ -/* eslint-disable require-buffer */ 'use strict'; const binding = process.binding('buffer'); From 8ab26cf508b9a213ca708ed0ccad925806160ec5 Mon Sep 17 00:00:00 2001 From: Danny Nemer Date: Mon, 21 Sep 2015 10:08:36 -0400 Subject: [PATCH 03/67] readline: add option to stop duplicates in history Adds `options.deDupeHistory` for `readline.createInterface(options)`. If `options.deDupeHistory` is `true`, when a new input line being added to the history list duplicates an older one, removes the older line from the list. Defaults to `false`. Many users would appreciate this option, as it is a common setting in shells. This option certainly should not be default behavior, as it would be problematic in applications such as the `repl`, which inherits from the readline `Interface`. Extends documentation to reflect this API addition. Adds tests for when `options.deDupeHistory` is truthy, and when `options.deDupeHistory` is falsey. PR-URL: https://github.com/nodejs/node/pull/2982 Reviewed-By: Jeremiah Senkpiel --- doc/api/readline.md | 3 ++ lib/readline.js | 9 ++++ test/parallel/test-readline-interface.js | 61 ++++++++++++++++++++++++ 3 files changed, 73 insertions(+) diff --git a/doc/api/readline.md b/doc/api/readline.md index 878721bb4d3f37..e471daecae3f5a 100644 --- a/doc/api/readline.md +++ b/doc/api/readline.md @@ -370,6 +370,9 @@ changes: `crlfDelay` milliseconds, both `\r` and `\n` will be treated as separate end-of-line input. Default to `100` milliseconds. `crlfDelay` will be coerced to `[100, 2000]` range. + * `deDupeHistory` {boolean} If `true`, when a new input line added to the + history list duplicates an older one, this removes the older line from the + list. Defaults to `false`. The `readline.createInterface()` method creates a new `readline.Interface` instance. diff --git a/lib/readline.js b/lib/readline.js index 52bdd976155439..04cc0f718a5f07 100644 --- a/lib/readline.js +++ b/lib/readline.js @@ -39,6 +39,7 @@ function Interface(input, output, completer, terminal) { EventEmitter.call(this); var historySize; + var deDupeHistory = false; let crlfDelay; let prompt = '> '; @@ -48,6 +49,7 @@ function Interface(input, output, completer, terminal) { completer = input.completer; terminal = input.terminal; historySize = input.historySize; + deDupeHistory = input.deDupeHistory; if (input.prompt !== undefined) { prompt = input.prompt; } @@ -80,6 +82,7 @@ function Interface(input, output, completer, terminal) { this.output = output; this.input = input; this.historySize = historySize; + this.deDupeHistory = !!deDupeHistory; this.crlfDelay = Math.max(kMincrlfDelay, Math.min(kMaxcrlfDelay, crlfDelay >>> 0)); @@ -257,6 +260,12 @@ Interface.prototype._addHistory = function() { if (this.line.trim().length === 0) return this.line; if (this.history.length === 0 || this.history[0] !== this.line) { + if (this.deDupeHistory) { + // Remove older history line if identical to new one + const dupIndex = this.history.indexOf(this.line); + if (dupIndex !== -1) this.history.splice(dupIndex, 1); + } + this.history.unshift(this.line); // Only store so many diff --git a/test/parallel/test-readline-interface.js b/test/parallel/test-readline-interface.js index 53c132dc30ec25..09ace7824ec525 100644 --- a/test/parallel/test-readline-interface.js +++ b/test/parallel/test-readline-interface.js @@ -305,6 +305,67 @@ function isWarned(emitter) { return false; }); + // duplicate lines are removed from history when `options.deDupeHistory` + // is `true` + fi = new FakeInput(); + rli = new readline.Interface({ + input: fi, + output: fi, + terminal: true, + deDupeHistory: true + }); + expectedLines = ['foo', 'bar', 'baz', 'bar', 'bat', 'bat']; + callCount = 0; + rli.on('line', function(line) { + assert.strictEqual(line, expectedLines[callCount]); + callCount++; + }); + fi.emit('data', expectedLines.join('\n') + '\n'); + assert.strictEqual(callCount, expectedLines.length); + fi.emit('keypress', '.', { name: 'up' }); // 'bat' + assert.strictEqual(rli.line, expectedLines[--callCount]); + fi.emit('keypress', '.', { name: 'up' }); // 'bar' + assert.notStrictEqual(rli.line, expectedLines[--callCount]); + assert.strictEqual(rli.line, expectedLines[--callCount]); + fi.emit('keypress', '.', { name: 'up' }); // 'baz' + assert.strictEqual(rli.line, expectedLines[--callCount]); + fi.emit('keypress', '.', { name: 'up' }); // 'foo' + assert.notStrictEqual(rli.line, expectedLines[--callCount]); + assert.strictEqual(rli.line, expectedLines[--callCount]); + assert.strictEqual(callCount, 0); + rli.close(); + + // duplicate lines are not removed from history when `options.deDupeHistory` + // is `false` + fi = new FakeInput(); + rli = new readline.Interface({ + input: fi, + output: fi, + terminal: true, + deDupeHistory: false + }); + expectedLines = ['foo', 'bar', 'baz', 'bar', 'bat', 'bat']; + callCount = 0; + rli.on('line', function(line) { + assert.strictEqual(line, expectedLines[callCount]); + callCount++; + }); + fi.emit('data', expectedLines.join('\n') + '\n'); + assert.strictEqual(callCount, expectedLines.length); + fi.emit('keypress', '.', { name: 'up' }); // 'bat' + assert.strictEqual(rli.line, expectedLines[--callCount]); + fi.emit('keypress', '.', { name: 'up' }); // 'bar' + assert.notStrictEqual(rli.line, expectedLines[--callCount]); + assert.strictEqual(rli.line, expectedLines[--callCount]); + fi.emit('keypress', '.', { name: 'up' }); // 'baz' + assert.strictEqual(rli.line, expectedLines[--callCount]); + fi.emit('keypress', '.', { name: 'up' }); // 'bar' + assert.strictEqual(rli.line, expectedLines[--callCount]); + fi.emit('keypress', '.', { name: 'up' }); // 'foo' + assert.strictEqual(rli.line, expectedLines[--callCount]); + assert.strictEqual(callCount, 0); + rli.close(); + // sending a multi-byte utf8 char over multiple writes const buf = Buffer.from('☮', 'utf8'); fi = new FakeInput(); From c51d925c84918dbfbfa1bfe98d1948079265ae48 Mon Sep 17 00:00:00 2001 From: Daijiro Wachi Date: Mon, 20 Mar 2017 11:40:54 -0700 Subject: [PATCH 04/67] url: restrict setting protocol to "file" Since file URLs can not have `username/password/port`, the specification was updated to restrict setting protocol to "file". Refs: https://github.com/whatwg/url/pull/269 Fixes: https://github.com/nodejs/node/issues/11785 PR-URL: https://github.com/nodejs/node/pull/11887 Reviewed-By: James M Snell Reviewed-By: Timothy Gu Reviewed-By: Joyee Cheung --- lib/internal/url.js | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/lib/internal/url.js b/lib/internal/url.js index 3dc317a8bbb7c3..ad3f0d6f0bbab0 100644 --- a/lib/internal/url.js +++ b/lib/internal/url.js @@ -122,6 +122,13 @@ function onParseProtocolComplete(flags, protocol, username, password, if ((s && !newIsSpecial) || (!s && newIsSpecial)) { return; } + if (protocol === 'file:' && + (ctx.username || ctx.password || ctx.port !== undefined)) { + return; + } + if (ctx.scheme === 'file:' && !ctx.host) { + return; + } if (newIsSpecial) { ctx.flags |= binding.URL_FLAGS_SPECIAL; } else { From 169f187f16dd6ed4a9543453d3c8b06dc02fc425 Mon Sep 17 00:00:00 2001 From: Daijiro Wachi Date: Mon, 20 Mar 2017 11:43:30 -0700 Subject: [PATCH 05/67] test: synchronize WPT url setters tests data Synchronize url-setter-test to upstream. Refs: https://github.com/w3c/web-platform-tests/pull/5112 PR-URL: https://github.com/nodejs/node/pull/11887 Reviewed-By: James M Snell Reviewed-By: Timothy Gu Reviewed-By: Joyee Cheung --- test/fixtures/url-setter-tests.json | 51 ++++++++++++++++++++++++++++- 1 file changed, 50 insertions(+), 1 deletion(-) diff --git a/test/fixtures/url-setter-tests.json b/test/fixtures/url-setter-tests.json index 4876b9940c1d98..d0138204b3fe6c 100644 --- a/test/fixtures/url-setter-tests.json +++ b/test/fixtures/url-setter-tests.json @@ -111,6 +111,55 @@ "protocol": "http:" } }, + { + "href": "gopher://example.net:1234", + "new_value": "file", + "expected": { + "href": "gopher://example.net:1234/", + "protocol": "gopher:" + } + }, + { + "href": "wss://x:x@example.net:1234", + "new_value": "file", + "expected": { + "href": "wss://x:x@example.net:1234/", + "protocol": "wss:" + } + }, + { + "comment": "Can’t switch from file URL with no host", + "href": "file://localhost/", + "new_value": "http", + "expected": { + "href": "file:///", + "protocol": "file:" + } + }, + { + "href": "file:///test", + "new_value": "gopher", + "expected": { + "href": "file:///test", + "protocol": "file:" + } + }, + { + "href": "file:", + "new_value": "wss", + "expected": { + "href": "file:///", + "protocol": "file:" + } + }, + { + "href": "file://hi/path", + "new_value": "s", + "expected": { + "href": "file://hi/path", + "protocol": "file:" + } + }, { "href": "https://example.net", "new_value": "s", @@ -1177,4 +1226,4 @@ } } ] -} \ No newline at end of file +} From 68b23be51fb5836f7f7befaa108998a97f59be7e Mon Sep 17 00:00:00 2001 From: Jyotman Singh Date: Sat, 18 Mar 2017 20:42:00 +0530 Subject: [PATCH 06/67] doc: add missing word in stream.md PR-URL: https://github.com/nodejs/node/pull/11914 Fixes: https://github.com/nodejs/node/issues/11913 Reviewed-By: Luigi Pinca Reviewed-By: Jeremiah Senkpiel Reviewed-By: Yuta Hiroto Reviewed-By: Timothy Gu Reviewed-By: Colin Ihrig Reviewed-By: James M Snell --- doc/api/stream.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/api/stream.md b/doc/api/stream.md index 289250c638ff15..23b8059ed99329 100644 --- a/doc/api/stream.md +++ b/doc/api/stream.md @@ -556,7 +556,7 @@ that the stream will *remain* paused once those destinations drain and ask for more data. *Note*: If a [Readable][] is switched into flowing mode and there are no -consumers available handle the data, that data will be lost. This can occur, +consumers available to handle the data, that data will be lost. This can occur, for instance, when the `readable.resume()` method is called without a listener attached to the `'data'` event, or when a `'data'` event handler is removed from the stream. From d6da1705cd28084a992a025770e32f70017250dd Mon Sep 17 00:00:00 2001 From: Bartosz Sosnowski Date: Tue, 14 Mar 2017 18:22:53 +0100 Subject: [PATCH 07/67] src: ensure that fd 0-2 are valid on windows Check that stdin, stdout and stderr are valid file descriptors on Windows. If not, reopen them with 'nul' file. Refs: https://github.com/nodejs/node/pull/875 Fixes: https://github.com/nodejs/node/issues/11656 PR-URL: https://github.com/nodejs/node/pull/11863 Reviewed-By: Ben Noordhuis Reviewed-By: James M Snell Reviewed-By: Jeremiah Senkpiel Reviewed-By: Anna Henningsen --- src/node.cc | 13 +++++++++++++ test/fixtures/spawn_closed_stdio.py | 8 ++++++++ test/parallel/test-stdio-closed.js | 14 +++++++++++++- 3 files changed, 34 insertions(+), 1 deletion(-) create mode 100644 test/fixtures/spawn_closed_stdio.py diff --git a/src/node.cc b/src/node.cc index 35d29bda93eea8..002ebd50500d13 100644 --- a/src/node.cc +++ b/src/node.cc @@ -4161,6 +4161,19 @@ inline void PlatformInit() { } while (min + 1 < max); } #endif // __POSIX__ +#ifdef _WIN32 + for (int fd = 0; fd <= 2; ++fd) { + auto handle = reinterpret_cast(_get_osfhandle(fd)); + if (handle == INVALID_HANDLE_VALUE || + GetFileType(handle) == FILE_TYPE_UNKNOWN) { + // Ignore _close result. If it fails or not depends on used Windows + // version. We will just check _open result. + _close(fd); + if (fd != _open("nul", _O_RDWR)) + ABORT(); + } + } +#endif // _WIN32 } diff --git a/test/fixtures/spawn_closed_stdio.py b/test/fixtures/spawn_closed_stdio.py new file mode 100644 index 00000000000000..b5de2552c2b13e --- /dev/null +++ b/test/fixtures/spawn_closed_stdio.py @@ -0,0 +1,8 @@ +import os +import sys +import subprocess +os.close(0) +os.close(1) +os.close(2) +exit_code = subprocess.call(sys.argv[1:], shell=False) +sys.exit(exit_code) diff --git a/test/parallel/test-stdio-closed.js b/test/parallel/test-stdio-closed.js index 98e4f980d50dd6..2313140a26aea7 100644 --- a/test/parallel/test-stdio-closed.js +++ b/test/parallel/test-stdio-closed.js @@ -3,9 +3,21 @@ const common = require('../common'); const assert = require('assert'); const spawn = require('child_process').spawn; const fs = require('fs'); +const path = require('path'); if (common.isWindows) { - common.skip('platform not supported.'); + if (process.argv[2] === 'child') { + process.stdin; + process.stdout; + process.stderr; + return; + } + const python = process.env.PYTHON || 'python'; + const script = path.join(common.fixturesDir, 'spawn_closed_stdio.py'); + const proc = spawn(python, [script, process.execPath, __filename, 'child']); + proc.on('exit', common.mustCall(function(exitCode) { + assert.strictEqual(exitCode, 0); + })); return; } From 04fa28e6dc42010299c644a9f7dc401c66ffcad4 Mon Sep 17 00:00:00 2001 From: Roman Reiss Date: Mon, 20 Mar 2017 21:24:05 +0100 Subject: [PATCH 08/67] doc: fix gitter badge in README GitHub now renders Markdown in CommonMark where spaces in a link destination are invalid and need to be escaped. PR-URL: https://github.com/nodejs/node/pull/11944 Reviewed-By: Anna Henningsen Reviewed-By: Ben Noordhuis --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 5957e5e526c4cd..e2d66b90be14b4 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # Node.js -[![Gitter](https://badges.gitter.im/Join Chat.svg)](https://gitter.im/nodejs/node?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) [![CII Best Practices](https://bestpractices.coreinfrastructure.org/projects/29/badge)](https://bestpractices.coreinfrastructure.org/projects/29) +[![Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/nodejs/node?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) [![CII Best Practices](https://bestpractices.coreinfrastructure.org/projects/29/badge)](https://bestpractices.coreinfrastructure.org/projects/29) Node.js is a JavaScript runtime built on Chrome's V8 JavaScript engine. Node.js uses an event-driven, non-blocking I/O model that makes it lightweight and From 114f9d619d1da11f5120393da9c192320b685b9a Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Wed, 8 Mar 2017 13:39:57 +0100 Subject: [PATCH 09/67] test: add hasCrypto check to tls-socket-close Currently test-tls-socket-close will fail if node was built using --without-ssl. This commit adds a check to verify is crypto support exists and if not skip this test. PR-URL: https://github.com/nodejs/node/pull/11911 Reviewed-By: Ben Noordhuis Reviewed-By: Luigi Pinca Reviewed-By: Yuta Hiroto Reviewed-By: Colin Ihrig Reviewed-By: Anna Henningsen Reviewed-By: James M Snell --- test/parallel/test-tls-socket-close.js | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/test/parallel/test-tls-socket-close.js b/test/parallel/test-tls-socket-close.js index 440c0c4ff7cde7..4e7382f3407a8a 100644 --- a/test/parallel/test-tls-socket-close.js +++ b/test/parallel/test-tls-socket-close.js @@ -1,5 +1,9 @@ 'use strict'; const common = require('../common'); +if (!common.hasCrypto) { + common.skip('missing crypto'); + return; +} const assert = require('assert'); const tls = require('tls'); From 85eb1bc0a92c93152ddfc301227e133dc707121a Mon Sep 17 00:00:00 2001 From: Lucas Lago Date: Fri, 17 Mar 2017 17:58:53 -0300 Subject: [PATCH 10/67] benchmark: remove v8ForceOptimization calls MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This removes common.v8ForceOptimization calls from url and vm benchmark files. PR-URL: https://github.com/nodejs/node/pull/11908 Fixes: https://github.com/nodejs/node/issues/11895 Reviewed-By: Timothy Gu Reviewed-By: Luigi Pinca Reviewed-By: James M Snell Reviewed-By: Michaël Zasso --- benchmark/url/whatwg-url-idna.js | 2 -- benchmark/vm/run-in-context.js | 2 -- benchmark/vm/run-in-this-context.js | 1 - 3 files changed, 5 deletions(-) diff --git a/benchmark/url/whatwg-url-idna.js b/benchmark/url/whatwg-url-idna.js index 41b4c639de97b6..3d0ea3dc8fe516 100644 --- a/benchmark/url/whatwg-url-idna.js +++ b/benchmark/url/whatwg-url-idna.js @@ -37,8 +37,6 @@ function main(conf) { const input = inputs[conf.input][to]; const method = to === 'ascii' ? domainToASCII : domainToUnicode; - common.v8ForceOptimization(method, input); - bench.start(); for (var i = 0; i < n; i++) { method(input); diff --git a/benchmark/vm/run-in-context.js b/benchmark/vm/run-in-context.js index 62ebe29146e705..d1d4b5a7abbf20 100644 --- a/benchmark/vm/run-in-context.js +++ b/benchmark/vm/run-in-context.js @@ -23,8 +23,6 @@ function main(conf) { const contextifiedSandbox = vm.createContext(); - common.v8ForceOptimization(vm.runInContext, - '0', contextifiedSandbox, options); bench.start(); for (; i < n; i++) vm.runInContext('0', contextifiedSandbox, options); diff --git a/benchmark/vm/run-in-this-context.js b/benchmark/vm/run-in-this-context.js index f66fd31a1a949e..f3a7e969287d50 100644 --- a/benchmark/vm/run-in-this-context.js +++ b/benchmark/vm/run-in-this-context.js @@ -21,7 +21,6 @@ function main(conf) { var i = 0; - common.v8ForceOptimization(vm.runInThisContext, '0', options); bench.start(); for (; i < n; i++) vm.runInThisContext('0', options); From d62ddbe14565a5263f9979c5d776d479fd47262c Mon Sep 17 00:00:00 2001 From: Vse Mozhet Byt Date: Fri, 17 Mar 2017 20:03:06 +0200 Subject: [PATCH 11/67] benchmark: fix fs\bench-realpathSync.js Make it call-site-cwd-independent. PR-URL: https://github.com/nodejs/node/pull/11904 Reviewed-By: James M Snell Reviewed-By: Anna Henningsen --- benchmark/fs/bench-realpathSync.js | 2 ++ 1 file changed, 2 insertions(+) diff --git a/benchmark/fs/bench-realpathSync.js b/benchmark/fs/bench-realpathSync.js index ae1c78d30d1b35..bf1a38a746e150 100644 --- a/benchmark/fs/bench-realpathSync.js +++ b/benchmark/fs/bench-realpathSync.js @@ -3,6 +3,8 @@ const common = require('../common'); const fs = require('fs'); const path = require('path'); + +process.chdir(__dirname); const resolved_path = path.resolve(__dirname, '../../lib/'); const relative_path = path.relative(__dirname, '../../lib/'); From 019a20adb5914211814c2146fd457bb2ad607203 Mon Sep 17 00:00:00 2001 From: Timothy Gu Date: Thu, 16 Mar 2017 16:46:12 -0700 Subject: [PATCH 12/67] src: make PercentDecode return void It only returns 0, nor is it likely to have any error conditions in the future. PR-URL: https://github.com/nodejs/node/pull/11922 Reviewed-By: Ben Noordhuis Reviewed-By: Colin Ihrig Reviewed-By: Joyee Cheung Reviewed-By: James M Snell --- src/node_url.cc | 3 +-- src/node_url.h | 11 +++++------ 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/src/node_url.cc b/src/node_url.cc index 5027399e89dd71..762f8723106198 100644 --- a/src/node_url.cc +++ b/src/node_url.cc @@ -375,8 +375,7 @@ namespace url { } // First, we have to percent decode - if (PercentDecode(input, length, &decoded) < 0) - goto end; + PercentDecode(input, length, &decoded); // If there are any invalid UTF8 byte sequences, we have to fail. // Unfortunately this means iterating through the string and checking diff --git a/src/node_url.h b/src/node_url.h index 198c29938b7d22..ba05cd6fed65d2 100644 --- a/src/node_url.h +++ b/src/node_url.h @@ -376,11 +376,11 @@ static inline unsigned hex2bin(const char ch) { return static_cast(-1); } -static inline int PercentDecode(const char* input, - size_t len, - std::string* dest) { +static inline void PercentDecode(const char* input, + size_t len, + std::string* dest) { if (len == 0) - return 0; + return; dest->reserve(len); const char* pointer = input; const char* end = input + len; @@ -399,11 +399,10 @@ static inline int PercentDecode(const char* input, unsigned a = hex2bin(pointer[1]); unsigned b = hex2bin(pointer[2]); char c = static_cast(a * 16 + b); - *dest += static_cast(c); + *dest += c; pointer += 3; } } - return 0; } #define SPECIALS(XX) \ From 9ba551f7e3cc89b7ecac1db235d10bc24e5d60da Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Sat, 18 Mar 2017 20:02:00 -0700 Subject: [PATCH 13/67] test: fix flaky test-tls-socket-close Replace timer/timeout race with event-based ordering, eliminating test flakiness. PR-URL: https://github.com/nodejs/node/pull/11921 Fixes: https://github.com/nodejs/node/issues/11912 Reviewed-By: Colin Ihrig Reviewed-By: Santiago Gimeno Reviewed-By: James M Snell --- test/parallel/test-tls-socket-close.js | 45 ++++++++++++++++---------- 1 file changed, 28 insertions(+), 17 deletions(-) diff --git a/test/parallel/test-tls-socket-close.js b/test/parallel/test-tls-socket-close.js index 4e7382f3407a8a..617062a4f2d5bf 100644 --- a/test/parallel/test-tls-socket-close.js +++ b/test/parallel/test-tls-socket-close.js @@ -13,35 +13,46 @@ const net = require('net'); const key = fs.readFileSync(common.fixturesDir + '/keys/agent2-key.pem'); const cert = fs.readFileSync(common.fixturesDir + '/keys/agent2-cert.pem'); -const T = 100; - +let tlsSocket; // tls server const tlsServer = tls.createServer({ cert, key }, (socket) => { - setTimeout(() => { - socket.on('error', (error) => { - assert.strictEqual(error.code, 'EINVAL'); - tlsServer.close(); - netServer.close(); - }); - socket.write('bar'); - }, T * 2); + tlsSocket = socket; + socket.on('error', common.mustCall((error) => { + assert.strictEqual(error.code, 'EINVAL'); + tlsServer.close(); + netServer.close(); + })); }); +let netSocket; // plain tcp server const netServer = net.createServer((socket) => { - // if client wants to use tls + // if client wants to use tls tlsServer.emit('connection', socket); - socket.setTimeout(T, () => { - // this breaks if TLSSocket is already managing the socket: - socket.destroy(); - }); + netSocket = socket; }).listen(0, common.mustCall(function() { - // connect client tls.connect({ host: 'localhost', port: this.address().port, rejectUnauthorized: false - }).write('foo'); + }).write('foo', 'utf8', common.mustCall(() => { + assert(netSocket); + netSocket.setTimeout(1, common.mustCall(() => { + assert(tlsSocket); + // this breaks if TLSSocket is already managing the socket: + netSocket.destroy(); + const interval = setInterval(() => { + // Checking this way allows us to do the write at a time that causes a + // segmentation fault (not always, but often) in Node.js 7.7.3 and + // earlier. If we instead, for example, wait on the `close` event, then + // it will not segmentation fault, which is what this test is all about. + if (tlsSocket._handle._parent.bytesRead === 0) { + tlsSocket.write('bar'); + clearInterval(interval); + } + }, 1); + })); + })); })); From 55a112689ade1441a22cb3db630774f098f10f56 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Sun, 19 Mar 2017 14:36:09 -0700 Subject: [PATCH 14/67] test: add test for child_process.execFile() While `child_process.execFile()` gets called in places in the test suite, there are no explicit test for it and there are parts of the implementation that are not covered by tests. This adds a minimal test that increases (but does not complete) coverage for the implementation. PR-URL: https://github.com/nodejs/node/pull/11929 Reviewed-By: Colin Ihrig Reviewed-By: Luigi Pinca Reviewed-By: James M Snell Reviewed-By: Yuta Hiroto --- test/parallel/test-child-process-execfile.js | 21 ++++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 test/parallel/test-child-process-execfile.js diff --git a/test/parallel/test-child-process-execfile.js b/test/parallel/test-child-process-execfile.js new file mode 100644 index 00000000000000..ab36aa7b156f99 --- /dev/null +++ b/test/parallel/test-child-process-execfile.js @@ -0,0 +1,21 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const execFile = require('child_process').execFile; +const path = require('path'); + +const fixture = path.join(common.fixturesDir, 'exit.js'); + +{ + execFile( + process.execPath, + [fixture, 42], + common.mustCall((e) => { + // Check that arguments are included in message + assert.strictEqual(e.message.trim(), + `Command failed: ${process.execPath} ${fixture} 42`); + assert.strictEqual(e.code, 42); + }) + ); +} From 59f71f5661ec5557368d490d2b18ec335298f05d Mon Sep 17 00:00:00 2001 From: Timothy Gu Date: Sun, 19 Mar 2017 13:35:47 -0700 Subject: [PATCH 15/67] src, buffer: do not segfault on out-of-range index Also add test cases for partial writes and invalid indices. PR-URL: https://github.com/nodejs/node/pull/11927 Fixes: https://github.com/nodejs/node/issues/8724 Reviewed-By: James M Snell Reviewed-By: Anna Henningsen --- src/node_buffer.cc | 28 ++++++--- test/parallel/test-buffer-write-noassert.js | 63 ++++++++++++++++++--- 2 files changed, 75 insertions(+), 16 deletions(-) diff --git a/src/node_buffer.cc b/src/node_buffer.cc index 0e96fd6deed8d3..d17fe07d339d23 100644 --- a/src/node_buffer.cc +++ b/src/node_buffer.cc @@ -148,7 +148,8 @@ void CallbackInfo::WeakCallback(Isolate* isolate) { // Parse index for external array data. inline MUST_USE_RESULT bool ParseArrayIndex(Local arg, size_t def, - size_t* ret) { + size_t* ret, + size_t needed = 0) { if (arg->IsUndefined()) { *ret = def; return true; @@ -162,7 +163,7 @@ inline MUST_USE_RESULT bool ParseArrayIndex(Local arg, // Check that the result fits in a size_t. const uint64_t kSizeMax = static_cast(static_cast(-1)); // coverity[pointless_expression] - if (static_cast(tmp_i) > kSizeMax) + if (static_cast(tmp_i) > kSizeMax - needed) return false; *ret = static_cast(tmp_i); @@ -793,17 +794,28 @@ void WriteFloatGeneric(const FunctionCallbackInfo& args) { CHECK_NE(ts_obj_data, nullptr); T val = args[1]->NumberValue(env->context()).FromMaybe(0); - size_t offset = args[2]->IntegerValue(env->context()).FromMaybe(0); size_t memcpy_num = sizeof(T); + size_t offset; - if (should_assert) { - THROW_AND_RETURN_IF_OOB(offset + memcpy_num >= memcpy_num); - THROW_AND_RETURN_IF_OOB(offset + memcpy_num <= ts_obj_length); + // If the offset is negative or larger than the size of the ArrayBuffer, + // throw an error (if needed) and return directly. + if (!ParseArrayIndex(args[2], 0, &offset, memcpy_num) || + offset >= ts_obj_length) { + if (should_assert) + THROW_AND_RETURN_IF_OOB(false); + return; } - if (offset + memcpy_num > ts_obj_length) - memcpy_num = ts_obj_length - offset; + // If the offset is too large for the entire value, but small enough to fit + // part of the value, throw an error and return only if should_assert is + // true. Otherwise, write the part of the value that fits. + if (offset + memcpy_num > ts_obj_length) { + if (should_assert) + THROW_AND_RETURN_IF_OOB(false); + else + memcpy_num = ts_obj_length - offset; + } union NoAlias { T val; diff --git a/test/parallel/test-buffer-write-noassert.js b/test/parallel/test-buffer-write-noassert.js index 7b2f8588c697eb..561cf8f3783ea6 100644 --- a/test/parallel/test-buffer-write-noassert.js +++ b/test/parallel/test-buffer-write-noassert.js @@ -4,6 +4,8 @@ const assert = require('assert'); // testing buffer write functions +const outOfRange = /^RangeError: (?:Index )?out of range(?: index)?$/; + function write(funx, args, result, res) { { const buf = Buffer.alloc(9); @@ -11,14 +13,8 @@ function write(funx, args, result, res) { assert.deepStrictEqual(buf, res); } - { - const invalidArgs = Array.from(args); - invalidArgs[1] = -1; - assert.throws( - () => Buffer.alloc(9)[funx](...invalidArgs), - /^RangeError: (?:Index )?out of range(?: index)?$/ - ); - } + writeInvalidOffset(-1); + writeInvalidOffset(9); { const error = /Int/.test(funx) ? @@ -37,6 +33,15 @@ function write(funx, args, result, res) { assert.deepStrictEqual(buf2, res); } + function writeInvalidOffset(offset) { + const newArgs = Array.from(args); + newArgs[1] = offset; + assert.throws(() => Buffer.alloc(9)[funx](...newArgs), outOfRange); + + const buf = Buffer.alloc(9); + buf[funx](...newArgs, true); + assert.deepStrictEqual(buf, Buffer.alloc(9)); + } } write('writeInt8', [1, 0], 1, Buffer.from([1, 0, 0, 0, 0, 0, 0, 0, 0])); @@ -57,3 +62,45 @@ write('writeDoubleBE', [1, 1], 9, Buffer.from([0, 63, 240, 0, 0, 0, 0, 0, 0])); write('writeDoubleLE', [1, 1], 9, Buffer.from([0, 0, 0, 0, 0, 0, 0, 240, 63])); write('writeFloatBE', [1, 1], 5, Buffer.from([0, 63, 128, 0, 0, 0, 0, 0, 0])); write('writeFloatLE', [1, 1], 5, Buffer.from([0, 0, 0, 128, 63, 0, 0, 0, 0])); + +function writePartial(funx, args, result, res) { + assert.throws(() => Buffer.alloc(9)[funx](...args), outOfRange); + const buf = Buffer.alloc(9); + assert.strictEqual(buf[funx](...args, true), result); + assert.deepStrictEqual(buf, res); +} + +// Test partial writes (cases where the buffer isn't large enough to hold the +// entire value, but is large enough to hold parts of it). +writePartial('writeIntBE', [0x0eadbeef, 6, 4], 10, + Buffer.from([0, 0, 0, 0, 0, 0, 0x0e, 0xad, 0xbe])); +writePartial('writeIntLE', [0x0eadbeef, 6, 4], 10, + Buffer.from([0, 0, 0, 0, 0, 0, 0xef, 0xbe, 0xad])); +writePartial('writeInt16BE', [0x1234, 8], 10, + Buffer.from([0, 0, 0, 0, 0, 0, 0, 0, 0x12])); +writePartial('writeInt16LE', [0x1234, 8], 10, + Buffer.from([0, 0, 0, 0, 0, 0, 0, 0, 0x34])); +writePartial('writeInt32BE', [0x0eadbeef, 6], 10, + Buffer.from([0, 0, 0, 0, 0, 0, 0x0e, 0xad, 0xbe])); +writePartial('writeInt32LE', [0x0eadbeef, 6], 10, + Buffer.from([0, 0, 0, 0, 0, 0, 0xef, 0xbe, 0xad])); +writePartial('writeUIntBE', [0xdeadbeef, 6, 4], 10, + Buffer.from([0, 0, 0, 0, 0, 0, 0xde, 0xad, 0xbe])); +writePartial('writeUIntLE', [0xdeadbeef, 6, 4], 10, + Buffer.from([0, 0, 0, 0, 0, 0, 0xef, 0xbe, 0xad])); +writePartial('writeUInt16BE', [0x1234, 8], 10, + Buffer.from([0, 0, 0, 0, 0, 0, 0, 0, 0x12])); +writePartial('writeUInt16LE', [0x1234, 8], 10, + Buffer.from([0, 0, 0, 0, 0, 0, 0, 0, 0x34])); +writePartial('writeUInt32BE', [0xdeadbeef, 6], 10, + Buffer.from([0, 0, 0, 0, 0, 0, 0xde, 0xad, 0xbe])); +writePartial('writeUInt32LE', [0xdeadbeef, 6], 10, + Buffer.from([0, 0, 0, 0, 0, 0, 0xef, 0xbe, 0xad])); +writePartial('writeDoubleBE', [1, 2], 10, + Buffer.from([0, 0, 63, 240, 0, 0, 0, 0, 0])); +writePartial('writeDoubleLE', [1, 2], 10, + Buffer.from([0, 0, 0, 0, 0, 0, 0, 0, 240])); +writePartial('writeFloatBE', [1, 6], 10, + Buffer.from([0, 0, 0, 0, 0, 0, 63, 128, 0])); +writePartial('writeFloatLE', [1, 6], 10, + Buffer.from([0, 0, 0, 0, 0, 0, 0, 0, 128])); From 2f4ad6fea2fb6a57c78413fb44120cf7fd24ad7c Mon Sep 17 00:00:00 2001 From: Vse Mozhet Byt Date: Sun, 19 Mar 2017 15:02:17 +0200 Subject: [PATCH 16/67] benchmark: harmonize progress bar + stderr output Add a space for minimal readability. PR-URL: https://github.com/nodejs/node/pull/11925 Reviewed-By: Joyee Cheung Reviewed-By: James M Snell --- benchmark/_benchmark_progress.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/benchmark/_benchmark_progress.js b/benchmark/_benchmark_progress.js index 4b42248f246858..4afae2f77d4d2f 100644 --- a/benchmark/_benchmark_progress.js +++ b/benchmark/_benchmark_progress.js @@ -104,7 +104,7 @@ class BenchmarkProgress { `| ${fraction(completedFiles, scheduledFiles)} files ` + `| ${fraction(completedRunsForFile, runsPerFile)} runs ` + `| ${fraction(completedConfig, scheduledConfig)} configs]` + - `: ${caption}`; + `: ${caption} `; } updateProgress(finished) { From 3e4ecca0beef152217ff8f532921552b0cff9ace Mon Sep 17 00:00:00 2001 From: TheBeastOfCaerbannog Date: Tue, 21 Mar 2017 12:02:26 +0300 Subject: [PATCH 17/67] build: don't create directory for NDK toolchain Let make-standalone-toolchain.sh create directory. PR-URL: https://github.com/nodejs/node/pull/11916 Reviewed-By: Ben Noordhuis Reviewed-By: James M Snell --- android-configure | 25 +++++++++++++++++++------ 1 file changed, 19 insertions(+), 6 deletions(-) diff --git a/android-configure b/android-configure index 1dc238ebd0c522..59f0a40c1ac214 100755 --- a/android-configure +++ b/android-configure @@ -38,13 +38,26 @@ case $ARCH in ;; esac +NDK_PATH=$1 +function make_toolchain { + $NDK_PATH/build/tools/make-standalone-toolchain.sh \ + --toolchain=$TOOLCHAIN_NAME-$CC_VER \ + --arch=$ARCH \ + --install-dir=$TOOLCHAIN \ + --platform=android-21 +} + export TOOLCHAIN=$PWD/android-toolchain -mkdir -p $TOOLCHAIN -$1/build/tools/make-standalone-toolchain.sh \ - --toolchain=$TOOLCHAIN_NAME-$CC_VER \ - --arch=$ARCH \ - --install-dir=$TOOLCHAIN \ - --platform=android-21 +if [ -d "$TOOLCHAIN" ]; then + read -r -p "NDK toolchain already exists. Replace it? [y/N]" response + case "$response" in + [Yy]) + rm -rf "$TOOLCHAIN" + make_toolchain + esac +else + make_toolchain +fi export PATH=$TOOLCHAIN/bin:$PATH export AR=$TOOLCHAIN/bin/$SUFFIX-ar export CC=$TOOLCHAIN/bin/$SUFFIX-gcc From 486bd1bd9b6badb028d879b2d5e521995d1c7c5d Mon Sep 17 00:00:00 2001 From: Bradley Farias Date: Fri, 17 Mar 2017 08:16:44 -0500 Subject: [PATCH 18/67] doc: require uses fs root for '/' prefix PR-URL: https://github.com/nodejs/node/pull/11897 Fixes: https://github.com/nodejs/node/issues/7151 Reviewed-By: James M Snell Reviewed-By: Colin Ihrig --- doc/api/modules.md | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/doc/api/modules.md b/doc/api/modules.md index a0c669f20956e8..397964c2511aed 100644 --- a/doc/api/modules.md +++ b/doc/api/modules.md @@ -149,11 +149,13 @@ require(X) from module at path Y 1. If X is a core module, a. return the core module b. STOP -2. If X begins with './' or '/' or '../' +2. If X begins with '/' + a. set Y to be the filesystem root +3. If X begins with './' or '/' or '../' a. LOAD_AS_FILE(Y + X) b. LOAD_AS_DIRECTORY(Y + X) -3. LOAD_NODE_MODULES(X, dirname(Y)) -4. THROW "not found" +4. LOAD_NODE_MODULES(X, dirname(Y)) +5. THROW "not found" LOAD_AS_FILE(X) 1. If X is a file, load X as JavaScript text. STOP From 0f2642ee36031adac63eb159822b7e2f7bfea033 Mon Sep 17 00:00:00 2001 From: DavidCai Date: Fri, 17 Mar 2017 14:13:58 +0800 Subject: [PATCH 19/67] errors: remove needless lazyAssert PR-URL: https://github.com/nodejs/node/pull/11891 Reviewed-By: Timothy Gu Reviewed-By: Colin Ihrig Reviewed-By: James M Snell Reviewed-By: Daijiro Wachi --- lib/internal/errors.js | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/lib/internal/errors.js b/lib/internal/errors.js index 3cab1409422d6f..01e4e482cb0e69 100644 --- a/lib/internal/errors.js +++ b/lib/internal/errors.js @@ -6,16 +6,11 @@ // value statically and permanently identifies the error. While the error // message may change, the code should not. +const assert = require('assert'); const kCode = Symbol('code'); const messages = new Map(); -var assert, util; -function lazyAssert() { - if (!assert) - assert = require('assert'); - return assert; -} - +var util; function lazyUtil() { if (!util) util = require('util'); @@ -41,7 +36,6 @@ function makeNodeError(Base) { } function message(key, args) { - const assert = lazyAssert(); assert.strictEqual(typeof key, 'string'); const util = lazyUtil(); const msg = messages.get(key); @@ -60,7 +54,6 @@ function message(key, args) { // Utility function for registering the error codes. Only used here. Exported // *only* to allow for testing. function E(sym, val) { - const assert = lazyAssert(); assert(messages.has(sym) === false, `Error symbol: ${sym} was already used.`); messages.set(sym, typeof val === 'function' ? val : String(val)); } From 96ad336d9e78e379a602be38e2534dae0e2621a7 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Sun, 19 Mar 2017 20:49:47 -0700 Subject: [PATCH 20/67] doc: remove superfluous sample assert code Remove superfluous sample code. Since `assert()` is documented as an alias of `assert.ok()` and nothing more, the sample code for `assert.ok()` is sufficient. PR-URL: https://github.com/nodejs/node/pull/11933 Reviewed-By: Luigi Pinca Reviewed-By: Vse Mozhet Byt Reviewed-By: Gibson Fahnestock Reviewed-By: Colin Ihrig Reviewed-By: James M Snell --- doc/api/assert.md | 17 +---------------- 1 file changed, 1 insertion(+), 16 deletions(-) diff --git a/doc/api/assert.md b/doc/api/assert.md index 4dc58000390b15..30140f0000b791 100644 --- a/doc/api/assert.md +++ b/doc/api/assert.md @@ -12,22 +12,7 @@ added: v0.5.9 * `value` {any} * `message` {any} -An alias of [`assert.ok()`][] . - -```js -const assert = require('assert'); - -assert(true); -// OK -assert(1); -// OK -assert(false); -// throws "AssertionError: false == true" -assert(0); -// throws "AssertionError: 0 == true" -assert(false, 'it\'s false'); -// throws "AssertionError: it's false" -``` +An alias of [`assert.ok()`][]. ## assert.deepEqual(actual, expected[, message]) - -Streams can be either [Readable][], [Writable][], or both ([Duplex][]). - -All streams are EventEmitters, but they also have other custom methods -and properties depending on whether they are Readable, Writable, or -Duplex. - -If a stream is both Readable and Writable, then it implements all of -the methods and events. So, a [Duplex][] or [Transform][] stream is -fully described by this API, though their implementation may be -somewhat different. - -It is not necessary to implement Stream interfaces in order to consume -streams in your programs. If you **are** implementing streaming -interfaces in your own program, please also refer to -[API for Stream Implementors][]. - -Almost all Node.js programs, no matter how simple, use Streams in some -way. Here is an example of using Streams in an Node.js program: - -```js -const http = require('http'); - -var server = http.createServer( (req, res) => { - // req is an http.IncomingMessage, which is a Readable Stream - // res is an http.ServerResponse, which is a Writable Stream - - var body = ''; - // we want to get the data as utf8 strings - // If you don't set an encoding, then you'll get Buffer objects - req.setEncoding('utf8'); - - // Readable streams emit 'data' events once a listener is added - req.on('data', (chunk) => { - body += chunk; - }); - - // the end event tells you that you have entire body - req.on('end', () => { - try { - var data = JSON.parse(body); - } catch (er) { - // uh oh! bad json! - res.statusCode = 400; - return res.end(`error: ${er.message}`); - } - - // write back something interesting to the user: - res.write(typeof data); - res.end(); - }); -}); - -server.listen(1337); - -// $ curl localhost:1337 -d '{}' -// object -// $ curl localhost:1337 -d '"foo"' -// string -// $ curl localhost:1337 -d 'not json' -// error: Unexpected token o -``` - -### Class: stream.Duplex - -Duplex streams are streams that implement both the [Readable][] and -[Writable][] interfaces. - -Examples of Duplex streams include: - -* [TCP sockets][] -* [zlib streams][zlib] -* [crypto streams][crypto] - -### Class: stream.Readable - - - -The Readable stream interface is the abstraction for a *source* of -data that you are reading from. In other words, data comes *out* of a -Readable stream. - -A Readable stream will not start emitting data until you indicate that -you are ready to receive it. - -Readable streams have two "modes": a **flowing mode** and a **paused -mode**. When in flowing mode, data is read from the underlying system -and provided to your program as fast as possible. In paused mode, you -must explicitly call [`stream.read()`][stream-read] to get chunks of data out. -Streams start out in paused mode. - -**Note**: If no data event handlers are attached, and there are no -[`stream.pipe()`][] destinations, and the stream is switched into flowing -mode, then data will be lost. - -You can switch to flowing mode by doing any of the following: - -* Adding a [`'data'`][] event handler to listen for data. -* Calling the [`stream.resume()`][stream-resume] method to explicitly open the - flow. -* Calling the [`stream.pipe()`][] method to send the data to a [Writable][]. - -You can switch back to paused mode by doing either of the following: - -* If there are no pipe destinations, by calling the - [`stream.pause()`][stream-pause] method. -* If there are pipe destinations, by removing any [`'data'`][] event - handlers, and removing all pipe destinations by calling the - [`stream.unpipe()`][] method. - -Note that, for backwards compatibility reasons, removing [`'data'`][] -event handlers will **not** automatically pause the stream. Also, if -there are piped destinations, then calling [`stream.pause()`][stream-pause] will -not guarantee that the stream will *remain* paused once those -destinations drain and ask for more data. - -Examples of readable streams include: - -* [HTTP responses, on the client][http-incoming-message] -* [HTTP requests, on the server][http-incoming-message] -* [fs read streams][] -* [zlib streams][zlib] -* [crypto streams][crypto] -* [TCP sockets][] -* [child process stdout and stderr][] -* [`process.stdin`][] - -#### Event: 'close' - -Emitted when the stream and any of its underlying resources (a file -descriptor, for example) have been closed. The event indicates that -no more events will be emitted, and no further computation will occur. - -Not all streams will emit the `'close'` event. - -#### Event: 'data' - -* `chunk` {Buffer|String} The chunk of data. - -Attaching a `'data'` event listener to a stream that has not been -explicitly paused will switch the stream into flowing mode. Data will -then be passed as soon as it is available. - -If you just want to get all the data out of the stream as fast as -possible, this is the best way to do so. - -```js -var readable = getReadableStreamSomehow(); -readable.on('data', (chunk) => { - console.log('got %d bytes of data', chunk.length); -}); -``` - -#### Event: 'end' - -This event fires when there will be no more data to read. - -Note that the `'end'` event **will not fire** unless the data is -completely consumed. This can be done by switching into flowing mode, -or by calling [`stream.read()`][stream-read] repeatedly until you get to the -end. - -```js -var readable = getReadableStreamSomehow(); -readable.on('data', (chunk) => { - console.log('got %d bytes of data', chunk.length); -}); -readable.on('end', () => { - console.log('there will be no more data.'); -}); -``` - -#### Event: 'error' - -* {Error Object} - -Emitted if there was an error receiving data. - -#### Event: 'readable' - -When a chunk of data can be read from the stream, it will emit a -`'readable'` event. - -In some cases, listening for a `'readable'` event will cause some data -to be read into the internal buffer from the underlying system, if it -hadn't already. - -```javascript -var readable = getReadableStreamSomehow(); -readable.on('readable', () => { - // there is some data to read now -}); -``` - -Once the internal buffer is drained, a `'readable'` event will fire -again when more data is available. - -The `'readable'` event is not emitted in the "flowing" mode with the -sole exception of the last one, on end-of-stream. - -The `'readable'` event indicates that the stream has new information: -either new data is available or the end of the stream has been reached. -In the former case, [`stream.read()`][stream-read] will return that data. In the -latter case, [`stream.read()`][stream-read] will return null. For instance, in -the following example, `foo.txt` is an empty file: - -```js -const fs = require('fs'); -var rr = fs.createReadStream('foo.txt'); -rr.on('readable', () => { - console.log('readable:', rr.read()); -}); -rr.on('end', () => { - console.log('end'); -}); -``` - -The output of running this script is: - -``` -$ node test.js -readable: null -end -``` - -#### readable.isPaused() - -* Return: {Boolean} - -This method returns whether or not the `readable` has been **explicitly** -paused by client code (using [`stream.pause()`][stream-pause] without a -corresponding [`stream.resume()`][stream-resume]). - -```js -var readable = new stream.Readable - -readable.isPaused() // === false -readable.pause() -readable.isPaused() // === true -readable.resume() -readable.isPaused() // === false -``` - -#### readable.pause() - -* Return: `this` - -This method will cause a stream in flowing mode to stop emitting -[`'data'`][] events, switching out of flowing mode. Any data that becomes -available will remain in the internal buffer. - -```js -var readable = getReadableStreamSomehow(); -readable.on('data', (chunk) => { - console.log('got %d bytes of data', chunk.length); - readable.pause(); - console.log('there will be no more data for 1 second'); - setTimeout(() => { - console.log('now data will start flowing again'); - readable.resume(); - }, 1000); -}); -``` - -#### readable.pipe(destination[, options]) - -* `destination` {stream.Writable} The destination for writing data -* `options` {Object} Pipe options - * `end` {Boolean} End the writer when the reader ends. Default = `true` - -This method pulls all the data out of a readable stream, and writes it -to the supplied destination, automatically managing the flow so that -the destination is not overwhelmed by a fast readable stream. - -Multiple destinations can be piped to safely. - -```js -var readable = getReadableStreamSomehow(); -var writable = fs.createWriteStream('file.txt'); -// All the data from readable goes into 'file.txt' -readable.pipe(writable); -``` - -This function returns the destination stream, so you can set up pipe -chains like so: - -```js -var r = fs.createReadStream('file.txt'); -var z = zlib.createGzip(); -var w = fs.createWriteStream('file.txt.gz'); -r.pipe(z).pipe(w); -``` - -For example, emulating the Unix `cat` command: - -```js -process.stdin.pipe(process.stdout); -``` - -By default [`stream.end()`][stream-end] is called on the destination when the -source stream emits [`'end'`][], so that `destination` is no longer writable. -Pass `{ end: false }` as `options` to keep the destination stream open. - -This keeps `writer` open so that "Goodbye" can be written at the -end. - -```js -reader.pipe(writer, { end: false }); -reader.on('end', () => { - writer.end('Goodbye\n'); -}); -``` - -Note that [`process.stderr`][] and [`process.stdout`][] are never closed until -the process exits, regardless of the specified options. - -#### readable.read([size]) - -* `size` {Number} Optional argument to specify how much data to read. -* Return {String|Buffer|Null} - -The `read()` method pulls some data out of the internal buffer and -returns it. If there is no data available, then it will return -`null`. - -If you pass in a `size` argument, then it will return that many -bytes. If `size` bytes are not available, then it will return `null`, -unless we've ended, in which case it will return the data remaining -in the buffer. - -If you do not specify a `size` argument, then it will return all the -data in the internal buffer. - -This method should only be called in paused mode. In flowing mode, -this method is called automatically until the internal buffer is -drained. - -```js -var readable = getReadableStreamSomehow(); -readable.on('readable', () => { - var chunk; - while (null !== (chunk = readable.read())) { - console.log('got %d bytes of data', chunk.length); - } -}); -``` - -If this method returns a data chunk, then it will also trigger the -emission of a [`'data'`][] event. - -Note that calling [`stream.read([size])`][stream-read] after the [`'end'`][] -event has been triggered will return `null`. No runtime error will be raised. - -#### readable.resume() - -* Return: `this` - -This method will cause the readable stream to resume emitting [`'data'`][] -events. - -This method will switch the stream into flowing mode. If you do *not* -want to consume the data from a stream, but you *do* want to get to -its [`'end'`][] event, you can call [`stream.resume()`][stream-resume] to open -the flow of data. - -```js -var readable = getReadableStreamSomehow(); -readable.resume(); -readable.on('end', () => { - console.log('got to the end, but did not read anything'); -}); -``` - -#### readable.setEncoding(encoding) - -* `encoding` {String} The encoding to use. -* Return: `this` - -Call this function to cause the stream to return strings of the specified -encoding instead of Buffer objects. For example, if you do -`readable.setEncoding('utf8')`, then the output data will be interpreted as -UTF-8 data, and returned as strings. If you do `readable.setEncoding('hex')`, -then the data will be encoded in hexadecimal string format. - -This properly handles multi-byte characters that would otherwise be -potentially mangled if you simply pulled the Buffers directly and -called [`buf.toString(encoding)`][] on them. If you want to read the data -as strings, always use this method. - -Also you can disable any encoding at all with `readable.setEncoding(null)`. -This approach is very useful if you deal with binary data or with large -multi-byte strings spread out over multiple chunks. - -```js -var readable = getReadableStreamSomehow(); -readable.setEncoding('utf8'); -readable.on('data', (chunk) => { - assert.equal(typeof chunk, 'string'); - console.log('got %d characters of string data', chunk.length); -}); -``` - -#### readable.unpipe([destination]) - -* `destination` {stream.Writable} Optional specific stream to unpipe - -This method will remove the hooks set up for a previous [`stream.pipe()`][] -call. - -If the destination is not specified, then all pipes are removed. - -If the destination is specified, but no pipe is set up for it, then -this is a no-op. - -```js -var readable = getReadableStreamSomehow(); -var writable = fs.createWriteStream('file.txt'); -// All the data from readable goes into 'file.txt', -// but only for the first second -readable.pipe(writable); -setTimeout(() => { - console.log('stop writing to file.txt'); - readable.unpipe(writable); - console.log('manually close the file stream'); - writable.end(); -}, 1000); -``` - -#### readable.unshift(chunk) - -* `chunk` {Buffer|String} Chunk of data to unshift onto the read queue - -This is useful in certain cases where a stream is being consumed by a -parser, which needs to "un-consume" some data that it has -optimistically pulled out of the source, so that the stream can be -passed on to some other party. - -Note that `stream.unshift(chunk)` cannot be called after the [`'end'`][] event -has been triggered; a runtime error will be raised. - -If you find that you must often call `stream.unshift(chunk)` in your -programs, consider implementing a [Transform][] stream instead. (See [API -for Stream Implementors][].) - -```js -// Pull off a header delimited by \n\n -// use unshift() if we get too much -// Call the callback with (error, header, stream) -const StringDecoder = require('string_decoder').StringDecoder; -function parseHeader(stream, callback) { - stream.on('error', callback); - stream.on('readable', onReadable); - var decoder = new StringDecoder('utf8'); - var header = ''; - function onReadable() { - var chunk; - while (null !== (chunk = stream.read())) { - var str = decoder.write(chunk); - if (str.match(/\n\n/)) { - // found the header boundary - var split = str.split(/\n\n/); - header += split.shift(); - var remaining = split.join('\n\n'); - var buf = new Buffer(remaining, 'utf8'); - if (buf.length) - stream.unshift(buf); - stream.removeListener('error', callback); - stream.removeListener('readable', onReadable); - // now the body of the message can be read from the stream. - callback(null, header, stream); - } else { - // still reading the header. - header += str; - } - } - } -} -``` - -Note that, unlike [`stream.push(chunk)`][stream-push], `stream.unshift(chunk)` -will not end the reading process by resetting the internal reading state of the -stream. This can cause unexpected results if `unshift()` is called during a -read (i.e. from within a [`stream._read()`][stream-_read] implementation on a -custom stream). Following the call to `unshift()` with an immediate -[`stream.push('')`][stream-push] will reset the reading state appropriately, -however it is best to simply avoid calling `unshift()` while in the process of -performing a read. - -#### readable.wrap(stream) - -* `stream` {Stream} An "old style" readable stream - -Versions of Node.js prior to v0.10 had streams that did not implement the -entire Streams API as it is today. (See [Compatibility][] for -more information.) - -If you are using an older Node.js library that emits [`'data'`][] events and -has a [`stream.pause()`][stream-pause] method that is advisory only, then you -can use the `wrap()` method to create a [Readable][] stream that uses the old -stream as its data source. - -You will very rarely ever need to call this function, but it exists -as a convenience for interacting with old Node.js programs and libraries. - -For example: - -```js -const OldReader = require('./old-api-module.js').OldReader; -const Readable = require('stream').Readable; -const oreader = new OldReader; -const myReader = new Readable().wrap(oreader); - -myReader.on('readable', () => { - myReader.read(); // etc. -}); -``` - -### Class: stream.Transform - -Transform streams are [Duplex][] streams where the output is in some way -computed from the input. They implement both the [Readable][] and -[Writable][] interfaces. - -Examples of Transform streams include: - -* [zlib streams][zlib] -* [crypto streams][crypto] - -### Class: stream.Writable - - - -The Writable stream interface is an abstraction for a *destination* -that you are writing data *to*. - -Examples of writable streams include: - -* [HTTP requests, on the client][] -* [HTTP responses, on the server][] -* [fs write streams][] -* [zlib streams][zlib] -* [crypto streams][crypto] -* [TCP sockets][] -* [child process stdin][] -* [`process.stdout`][], [`process.stderr`][] - -#### Event: 'drain' - -If a [`stream.write(chunk)`][stream-write] call returns `false`, then the -`'drain'` event will indicate when it is appropriate to begin writing more data -to the stream. - -```js -// Write the data to the supplied writable stream one million times. -// Be attentive to back-pressure. -function writeOneMillionTimes(writer, data, encoding, callback) { - var i = 1000000; - write(); - function write() { - var ok = true; - do { - i -= 1; - if (i === 0) { - // last time! - writer.write(data, encoding, callback); - } else { - // see if we should continue, or wait - // don't pass the callback, because we're not done yet. - ok = writer.write(data, encoding); - } - } while (i > 0 && ok); - if (i > 0) { - // had to stop early! - // write some more once it drains - writer.once('drain', write); - } - } -} -``` - -#### Event: 'error' - -* {Error} - -Emitted if there was an error when writing or piping data. - -#### Event: 'finish' - -When the [`stream.end()`][stream-end] method has been called, and all data has -been flushed to the underlying system, this event is emitted. - -```javascript -var writer = getWritableStreamSomehow(); -for (var i = 0; i < 100; i ++) { - writer.write('hello, #${i}!\n'); -} -writer.end('this is the end\n'); -writer.on('finish', () => { - console.error('all writes are now complete.'); -}); -``` - -#### Event: 'pipe' - -* `src` {stream.Readable} source stream that is piping to this writable - -This is emitted whenever the [`stream.pipe()`][] method is called on a readable -stream, adding this writable to its set of destinations. - -```js -var writer = getWritableStreamSomehow(); -var reader = getReadableStreamSomehow(); -writer.on('pipe', (src) => { - console.error('something is piping into the writer'); - assert.equal(src, reader); -}); -reader.pipe(writer); -``` - -#### Event: 'unpipe' - -* `src` {[Readable][] Stream} The source stream that - [unpiped][`stream.unpipe()`] this writable - -This is emitted whenever the [`stream.unpipe()`][] method is called on a -readable stream, removing this writable from its set of destinations. - -```js -var writer = getWritableStreamSomehow(); -var reader = getReadableStreamSomehow(); -writer.on('unpipe', (src) => { - console.error('something has stopped piping into the writer'); - assert.equal(src, reader); -}); -reader.pipe(writer); -reader.unpipe(writer); -``` - -#### writable.cork() - -Forces buffering of all writes. - -Buffered data will be flushed either at [`stream.uncork()`][] or at -[`stream.end()`][stream-end] call. - -#### writable.end([chunk][, encoding][, callback]) - -* `chunk` {String|Buffer} Optional data to write -* `encoding` {String} The encoding, if `chunk` is a String -* `callback` {Function} Optional callback for when the stream is finished - -Call this method when no more data will be written to the stream. If supplied, -the callback is attached as a listener on the [`'finish'`][] event. - -Calling [`stream.write()`][stream-write] after calling -[`stream.end()`][stream-end] will raise an error. - -```js -// write 'hello, ' and then end with 'world!' -var file = fs.createWriteStream('example.txt'); -file.write('hello, '); -file.end('world!'); -// writing more now is not allowed! -``` - -#### writable.setDefaultEncoding(encoding) - -* `encoding` {String} The new default encoding - -Sets the default encoding for a writable stream. - -#### writable.uncork() - -Flush all data, buffered since [`stream.cork()`][] call. - -#### writable.write(chunk[, encoding][, callback]) - -* `chunk` {String|Buffer} The data to write -* `encoding` {String} The encoding, if `chunk` is a String -* `callback` {Function} Callback for when this chunk of data is flushed -* Returns: {Boolean} `true` if the data was handled completely. - -This method writes some data to the underlying system, and calls the -supplied callback once the data has been fully handled. - -The return value indicates if you should continue writing right now. -If the data had to be buffered internally, then it will return -`false`. Otherwise, it will return `true`. - -This return value is strictly advisory. You MAY continue to write, -even if it returns `false`. However, writes will be buffered in -memory, so it is best not to do this excessively. Instead, wait for -the [`'drain'`][] event before writing more data. - - -## API for Stream Implementors - - - -To implement any sort of stream, the pattern is the same: - -1. Extend the appropriate parent class in your own subclass. (The - [`util.inherits()`][] method is particularly helpful for this.) -2. Call the appropriate parent class constructor in your constructor, - to be sure that the internal mechanisms are set up properly. -3. Implement one or more specific methods, as detailed below. - -The class to extend and the method(s) to implement depend on the sort -of stream class you are writing: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-

Use-case

-
-

Class

-
-

Method(s) to implement

-
-

Reading only

-
-

[Readable](#stream_class_stream_readable_1)

-
-

[_read][stream-_read]

-
-

Writing only

-
-

[Writable](#stream_class_stream_writable_1)

-
-

[_write][stream-_write], [_writev][stream-_writev]

-
-

Reading and writing

-
-

[Duplex](#stream_class_stream_duplex_1)

-
-

[_read][stream-_read], [_write][stream-_write], [_writev][stream-_writev]

-
-

Operate on written data, then read the result

-
-

[Transform](#stream_class_stream_transform_1)

-
-

[_transform][stream-_transform], [_flush][stream-_flush]

-
- -In your implementation code, it is very important to never call the methods -described in [API for Stream Consumers][]. Otherwise, you can potentially cause -adverse side effects in programs that consume your streaming interfaces. - -### Class: stream.Duplex - - - -A "duplex" stream is one that is both Readable and Writable, such as a TCP -socket connection. - -Note that `stream.Duplex` is an abstract class designed to be extended -with an underlying implementation of the [`stream._read(size)`][stream-_read] -and [`stream._write(chunk, encoding, callback)`][stream-_write] methods as you -would with a Readable or Writable stream class. - -Since JavaScript doesn't have multiple prototypal inheritance, this class -prototypally inherits from Readable, and then parasitically from Writable. It is -thus up to the user to implement both the low-level -[`stream._read(n)`][stream-_read] method as well as the low-level -[`stream._write(chunk, encoding, callback)`][stream-_write] method on extension -duplex classes. - -#### new stream.Duplex(options) - -* `options` {Object} Passed to both Writable and Readable - constructors. Also has the following fields: - * `allowHalfOpen` {Boolean} Default = `true`. If set to `false`, then - the stream will automatically end the readable side when the - writable side ends and vice versa. - * `readableObjectMode` {Boolean} Default = `false`. Sets `objectMode` - for readable side of the stream. Has no effect if `objectMode` - is `true`. - * `writableObjectMode` {Boolean} Default = `false`. Sets `objectMode` - for writable side of the stream. Has no effect if `objectMode` - is `true`. - -In classes that extend the Duplex class, make sure to call the -constructor so that the buffering settings can be properly -initialized. - -### Class: stream.PassThrough - -This is a trivial implementation of a [Transform][] stream that simply -passes the input bytes across to the output. Its purpose is mainly -for examples and testing, but there are occasionally use cases where -it can come in handy as a building block for novel sorts of streams. - -### Class: stream.Readable - - - -`stream.Readable` is an abstract class designed to be extended with an -underlying implementation of the [`stream._read(size)`][stream-_read] method. - -Please see [API for Stream Consumers][] for how to consume -streams in your programs. What follows is an explanation of how to -implement Readable streams in your programs. - -#### new stream.Readable([options]) - -* `options` {Object} - * `highWaterMark` {Number} The maximum number of bytes to store in - the internal buffer before ceasing to read from the underlying - resource. Default = `16384` (16kb), or `16` for `objectMode` streams - * `encoding` {String} If specified, then buffers will be decoded to - strings using the specified encoding. Default = `null` - * `objectMode` {Boolean} Whether this stream should behave - as a stream of objects. Meaning that [`stream.read(n)`][stream-read] returns - a single value instead of a Buffer of size n. Default = `false` - * `read` {Function} Implementation for the [`stream._read()`][stream-_read] - method. - -In classes that extend the Readable class, make sure to call the -Readable constructor so that the buffering settings can be properly -initialized. - -#### readable.\_read(size) - -* `size` {Number} Number of bytes to read asynchronously - -Note: **Implement this method, but do NOT call it directly.** - -This method is prefixed with an underscore because it is internal to the -class that defines it and should only be called by the internal Readable -class methods. All Readable stream implementations must provide a \_read -method to fetch data from the underlying resource. - -When `_read()` is called, if data is available from the resource, the `_read()` -implementation should start pushing that data into the read queue by calling -[`this.push(dataChunk)`][stream-push]. `_read()` should continue reading from -the resource and pushing data until push returns `false`, at which point it -should stop reading from the resource. Only when `_read()` is called again after -it has stopped should it start reading more data from the resource and pushing -that data onto the queue. - -Note: once the `_read()` method is called, it will not be called again until -the [`stream.push()`][stream-push] method is called. - -The `size` argument is advisory. Implementations where a "read" is a -single call that returns data can use this to know how much data to -fetch. Implementations where that is not relevant, such as TCP or -TLS, may ignore this argument, and simply provide data whenever it -becomes available. There is no need, for example to "wait" until -`size` bytes are available before calling [`stream.push(chunk)`][stream-push]. - -#### readable.push(chunk[, encoding]) - - -* `chunk` {Buffer|Null|String} Chunk of data to push into the read queue -* `encoding` {String} Encoding of String chunks. Must be a valid - Buffer encoding, such as `'utf8'` or `'ascii'` -* return {Boolean} Whether or not more pushes should be performed - -Note: **This method should be called by Readable implementors, NOT -by consumers of Readable streams.** - -If a value other than null is passed, The `push()` method adds a chunk of data -into the queue for subsequent stream processors to consume. If `null` is -passed, it signals the end of the stream (EOF), after which no more data -can be written. - -The data added with `push()` can be pulled out by calling the -[`stream.read()`][stream-read] method when the [`'readable'`][] event fires. - -This API is designed to be as flexible as possible. For example, -you may be wrapping a lower-level source which has some sort of -pause/resume mechanism, and a data callback. In those cases, you -could wrap the low-level source object by doing something like this: - -```js -// source is an object with readStop() and readStart() methods, -// and an `ondata` member that gets called when it has data, and -// an `onend` member that gets called when the data is over. - -util.inherits(SourceWrapper, Readable); - -function SourceWrapper(options) { - Readable.call(this, options); - - this._source = getLowlevelSourceObject(); - - // Every time there's data, we push it into the internal buffer. - this._source.ondata = (chunk) => { - // if push() returns false, then we need to stop reading from source - if (!this.push(chunk)) - this._source.readStop(); - }; - - // When the source ends, we push the EOF-signaling `null` chunk - this._source.onend = () => { - this.push(null); - }; -} - -// _read will be called when the stream wants to pull more data in -// the advisory size argument is ignored in this case. -SourceWrapper.prototype._read = function(size) { - this._source.readStart(); -}; -``` - -#### Example: A Counting Stream - - - -This is a basic example of a Readable stream. It emits the numerals -from 1 to 1,000,000 in ascending order, and then ends. - -```js -const Readable = require('stream').Readable; -const util = require('util'); -util.inherits(Counter, Readable); - -function Counter(opt) { - Readable.call(this, opt); - this._max = 1000000; - this._index = 1; -} - -Counter.prototype._read = function() { - var i = this._index++; - if (i > this._max) - this.push(null); - else { - var str = '' + i; - var buf = new Buffer(str, 'ascii'); - this.push(buf); - } -}; -``` - -#### Example: SimpleProtocol v1 (Sub-optimal) - -This is similar to the `parseHeader` function described -[here](#stream_readable_unshift_chunk), but implemented as a custom stream. -Also, note that this implementation does not convert the incoming data to a -string. - -However, this would be better implemented as a [Transform][] stream. See -[SimpleProtocol v2][] for a better implementation. - -```js -// A parser for a simple data protocol. -// The "header" is a JSON object, followed by 2 \n characters, and -// then a message body. -// -// NOTE: This can be done more simply as a Transform stream! -// Using Readable directly for this is sub-optimal. See the -// alternative example below under the Transform section. - -const Readable = require('stream').Readable; -const util = require('util'); - -util.inherits(SimpleProtocol, Readable); - -function SimpleProtocol(source, options) { - if (!(this instanceof SimpleProtocol)) - return new SimpleProtocol(source, options); - - Readable.call(this, options); - this._inBody = false; - this._sawFirstCr = false; - - // source is a readable stream, such as a socket or file - this._source = source; - - var self = this; - source.on('end', () => { - self.push(null); - }); - - // give it a kick whenever the source is readable - // read(0) will not consume any bytes - source.on('readable', () => { - self.read(0); - }); - - this._rawHeader = []; - this.header = null; -} - -SimpleProtocol.prototype._read = function(n) { - if (!this._inBody) { - var chunk = this._source.read(); - - // if the source doesn't have data, we don't have data yet. - if (chunk === null) - return this.push(''); - - // check if the chunk has a \n\n - var split = -1; - for (var i = 0; i < chunk.length; i++) { - if (chunk[i] === 10) { // '\n' - if (this._sawFirstCr) { - split = i; - break; - } else { - this._sawFirstCr = true; - } - } else { - this._sawFirstCr = false; - } - } - - if (split === -1) { - // still waiting for the \n\n - // stash the chunk, and try again. - this._rawHeader.push(chunk); - this.push(''); - } else { - this._inBody = true; - var h = chunk.slice(0, split); - this._rawHeader.push(h); - var header = Buffer.concat(this._rawHeader).toString(); - try { - this.header = JSON.parse(header); - } catch (er) { - this.emit('error', new Error('invalid simple protocol data')); - return; - } - // now, because we got some extra data, unshift the rest - // back into the read queue so that our consumer will see it. - var b = chunk.slice(split); - this.unshift(b); - // calling unshift by itself does not reset the reading state - // of the stream; since we're inside _read, doing an additional - // push('') will reset the state appropriately. - this.push(''); - - // and let them know that we are done parsing the header. - this.emit('header', this.header); - } - } else { - // from there on, just provide the data to our consumer. - // careful not to push(null), since that would indicate EOF. - var chunk = this._source.read(); - if (chunk) this.push(chunk); - } -}; - -// Usage: -// var parser = new SimpleProtocol(source); -// Now parser is a readable stream that will emit 'header' -// with the parsed header data. -``` - -### Class: stream.Transform - -A "transform" stream is a duplex stream where the output is causally -connected in some way to the input, such as a [zlib][] stream or a -[crypto][] stream. - -There is no requirement that the output be the same size as the input, -the same number of chunks, or arrive at the same time. For example, a -Hash stream will only ever have a single chunk of output which is -provided when the input is ended. A zlib stream will produce output -that is either much smaller or much larger than its input. - -Rather than implement the [`stream._read()`][stream-_read] and -[`stream._write()`][stream-_write] methods, Transform classes must implement the -[`stream._transform()`][stream-_transform] method, and may optionally -also implement the [`stream._flush()`][stream-_flush] method. (See below.) - -#### new stream.Transform([options]) - -* `options` {Object} Passed to both Writable and Readable - constructors. Also has the following fields: - * `transform` {Function} Implementation for the - [`stream._transform()`][stream-_transform] method. - * `flush` {Function} Implementation for the [`stream._flush()`][stream-_flush] - method. - -In classes that extend the Transform class, make sure to call the -constructor so that the buffering settings can be properly -initialized. - -#### Events: 'finish' and 'end' - -The [`'finish'`][] and [`'end'`][] events are from the parent Writable -and Readable classes respectively. The `'finish'` event is fired after -[`stream.end()`][stream-end] is called and all chunks have been processed by -[`stream._transform()`][stream-_transform], `'end'` is fired after all data has -been output which is after the callback in [`stream._flush()`][stream-_flush] -has been called. - -#### transform.\_flush(callback) - -* `callback` {Function} Call this function (optionally with an error - argument) when you are done flushing any remaining data. - -Note: **This function MUST NOT be called directly.** It MAY be implemented -by child classes, and if so, will be called by the internal Transform -class methods only. - -In some cases, your transform operation may need to emit a bit more -data at the end of the stream. For example, a `Zlib` compression -stream will store up some internal state so that it can optimally -compress the output. At the end, however, it needs to do the best it -can with what is left, so that the data will be complete. - -In those cases, you can implement a `_flush()` method, which will be -called at the very end, after all the written data is consumed, but -before emitting [`'end'`][] to signal the end of the readable side. Just -like with [`stream._transform()`][stream-_transform], call -`transform.push(chunk)` zero or more times, as appropriate, and call `callback` -when the flush operation is complete. - -This method is prefixed with an underscore because it is internal to -the class that defines it, and should not be called directly by user -programs. However, you **are** expected to override this method in -your own extension classes. - -#### transform.\_transform(chunk, encoding, callback) - -* `chunk` {Buffer|String} The chunk to be transformed. Will **always** - be a buffer unless the `decodeStrings` option was set to `false`. -* `encoding` {String} If the chunk is a string, then this is the - encoding type. If chunk is a buffer, then this is the special - value - 'buffer', ignore it in this case. -* `callback` {Function} Call this function (optionally with an error - argument and data) when you are done processing the supplied chunk. - -Note: **This function MUST NOT be called directly.** It should be -implemented by child classes, and called by the internal Transform -class methods only. - -All Transform stream implementations must provide a `_transform()` -method to accept input and produce output. - -`_transform()` should do whatever has to be done in this specific -Transform class, to handle the bytes being written, and pass them off -to the readable portion of the interface. Do asynchronous I/O, -process things, and so on. - -Call `transform.push(outputChunk)` 0 or more times to generate output -from this input chunk, depending on how much data you want to output -as a result of this chunk. - -Call the callback function only when the current chunk is completely -consumed. Note that there may or may not be output as a result of any -particular input chunk. If you supply a second argument to the callback -it will be passed to the push method. In other words the following are -equivalent: - -```js -transform.prototype._transform = function (data, encoding, callback) { - this.push(data); - callback(); -}; - -transform.prototype._transform = function (data, encoding, callback) { - callback(null, data); -}; -``` - -This method is prefixed with an underscore because it is internal to -the class that defines it, and should not be called directly by user -programs. However, you **are** expected to override this method in -your own extension classes. - -#### Example: `SimpleProtocol` parser v2 - -The example [here](#stream_example_simpleprotocol_v1_sub_optimal) of a simple -protocol parser can be implemented simply by using the higher level -[Transform][] stream class, similar to the `parseHeader` and `SimpleProtocol -v1` examples. - -In this example, rather than providing the input as an argument, it -would be piped into the parser, which is a more idiomatic Node.js stream -approach. - -```javascript -const util = require('util'); -const Transform = require('stream').Transform; -util.inherits(SimpleProtocol, Transform); - -function SimpleProtocol(options) { - if (!(this instanceof SimpleProtocol)) - return new SimpleProtocol(options); - - Transform.call(this, options); - this._inBody = false; - this._sawFirstCr = false; - this._rawHeader = []; - this.header = null; -} - -SimpleProtocol.prototype._transform = function(chunk, encoding, done) { - if (!this._inBody) { - // check if the chunk has a \n\n - var split = -1; - for (var i = 0; i < chunk.length; i++) { - if (chunk[i] === 10) { // '\n' - if (this._sawFirstCr) { - split = i; - break; - } else { - this._sawFirstCr = true; - } - } else { - this._sawFirstCr = false; - } - } - - if (split === -1) { - // still waiting for the \n\n - // stash the chunk, and try again. - this._rawHeader.push(chunk); - } else { - this._inBody = true; - var h = chunk.slice(0, split); - this._rawHeader.push(h); - var header = Buffer.concat(this._rawHeader).toString(); - try { - this.header = JSON.parse(header); - } catch (er) { - this.emit('error', new Error('invalid simple protocol data')); - return; - } - // and let them know that we are done parsing the header. - this.emit('header', this.header); - - // now, because we got some extra data, emit this first. - this.push(chunk.slice(split)); - } - } else { - // from there on, just provide the data to our consumer as-is. - this.push(chunk); - } - done(); -}; - -// Usage: -// var parser = new SimpleProtocol(); -// source.pipe(parser) -// Now parser is a readable stream that will emit 'header' -// with the parsed header data. -``` - -### Class: stream.Writable - - - -`stream.Writable` is an abstract class designed to be extended with an -underlying implementation of the -[`stream._write(chunk, encoding, callback)`][stream-_write] method. - -Please see [API for Stream Consumers][] for how to consume -writable streams in your programs. What follows is an explanation of -how to implement Writable streams in your programs. - -#### new stream.Writable([options]) - -* `options` {Object} - * `highWaterMark` {Number} Buffer level when - [`stream.write()`][stream-write] starts returning `false`. Default = `16384` - (16kb), or `16` for `objectMode` streams. - * `decodeStrings` {Boolean} Whether or not to decode strings into - Buffers before passing them to [`stream._write()`][stream-_write]. - Default = `true` - * `objectMode` {Boolean} Whether or not the - [`stream.write(anyObj)`][stream-write] is a valid operation. If set you can - write arbitrary data instead of only `Buffer` / `String` data. - Default = `false` - * `write` {Function} Implementation for the - [`stream._write()`][stream-_write] method. - * `writev` {Function} Implementation for the - [`stream._writev()`][stream-_writev] method. - -In classes that extend the Writable class, make sure to call the -constructor so that the buffering settings can be properly -initialized. - -#### writable.\_write(chunk, encoding, callback) - -* `chunk` {Buffer|String} The chunk to be written. Will **always** - be a buffer unless the `decodeStrings` option was set to `false`. -* `encoding` {String} If the chunk is a string, then this is the - encoding type. If chunk is a buffer, then this is the special - value - 'buffer', ignore it in this case. -* `callback` {Function} Call this function (optionally with an error - argument) when you are done processing the supplied chunk. - -All Writable stream implementations must provide a -[`stream._write()`][stream-_write] method to send data to the underlying -resource. - -Note: **This function MUST NOT be called directly.** It should be -implemented by child classes, and called by the internal Writable -class methods only. - -Call the callback using the standard `callback(error)` pattern to -signal that the write completed successfully or with an error. - -If the `decodeStrings` flag is set in the constructor options, then -`chunk` may be a string rather than a Buffer, and `encoding` will -indicate the sort of string that it is. This is to support -implementations that have an optimized handling for certain string -data encodings. If you do not explicitly set the `decodeStrings` -option to `false`, then you can safely ignore the `encoding` argument, -and assume that `chunk` will always be a Buffer. - -This method is prefixed with an underscore because it is internal to -the class that defines it, and should not be called directly by user -programs. However, you **are** expected to override this method in -your own extension classes. - -#### writable.\_writev(chunks, callback) - -* `chunks` {Array} The chunks to be written. Each chunk has following - format: `{ chunk: ..., encoding: ... }`. -* `callback` {Function} Call this function (optionally with an error - argument) when you are done processing the supplied chunks. - -Note: **This function MUST NOT be called directly.** It may be -implemented by child classes, and called by the internal Writable -class methods only. - -This function is completely optional to implement. In most cases it is -unnecessary. If implemented, it will be called with all the chunks -that are buffered in the write queue. - - -## Simplified Constructor API - - - -In simple cases there is now the added benefit of being able to construct a -stream without inheritance. - -This can be done by passing the appropriate methods as constructor options: - -Examples: - -### Duplex - -```js -var duplex = new stream.Duplex({ - read: function(n) { - // sets this._read under the hood - - // push data onto the read queue, passing null - // will signal the end of the stream (EOF) - this.push(chunk); - }, - write: function(chunk, encoding, next) { - // sets this._write under the hood - - // An optional error can be passed as the first argument - next() - } -}); - -// or - -var duplex = new stream.Duplex({ - read: function(n) { - // sets this._read under the hood - - // push data onto the read queue, passing null - // will signal the end of the stream (EOF) - this.push(chunk); - }, - writev: function(chunks, next) { - // sets this._writev under the hood - - // An optional error can be passed as the first argument - next() - } -}); -``` - -### Readable - -```js -var readable = new stream.Readable({ - read: function(n) { - // sets this._read under the hood - - // push data onto the read queue, passing null - // will signal the end of the stream (EOF) - this.push(chunk); - } -}); -``` - -### Transform - -```js -var transform = new stream.Transform({ - transform: function(chunk, encoding, next) { - // sets this._transform under the hood - - // generate output as many times as needed - // this.push(chunk); - - // call when the current chunk is consumed - next(); - }, - flush: function(done) { - // sets this._flush under the hood - - // generate output as many times as needed - // this.push(chunk); - - done(); - } -}); -``` - -### Writable - -```js -var writable = new stream.Writable({ - write: function(chunk, encoding, next) { - // sets this._write under the hood - - // An optional error can be passed as the first argument - next() - } -}); - -// or - -var writable = new stream.Writable({ - writev: function(chunks, next) { - // sets this._writev under the hood - - // An optional error can be passed as the first argument - next() - } -}); -``` - -## Streams: Under the Hood - - - -### Buffering - - - -Both Writable and Readable streams will buffer data on an internal -object which can be retrieved from `_writableState.getBuffer()` or -`_readableState.buffer`, respectively. - -The amount of data that will potentially be buffered depends on the -`highWaterMark` option which is passed into the constructor. - -Buffering in Readable streams happens when the implementation calls -[`stream.push(chunk)`][stream-push]. If the consumer of the Stream does not -call [`stream.read()`][stream-read], then the data will sit in the internal -queue until it is consumed. - -Buffering in Writable streams happens when the user calls -[`stream.write(chunk)`][stream-write] repeatedly, even when it returns `false`. - -The purpose of streams, especially with the [`stream.pipe()`][] method, is to -limit the buffering of data to acceptable levels, so that sources and -destinations of varying speed will not overwhelm the available memory. - -### Compatibility with Older Node.js Versions - - - -In versions of Node.js prior to v0.10, the Readable stream interface was -simpler, but also less powerful and less useful. - -* Rather than waiting for you to call the [`stream.read()`][stream-read] method, - [`'data'`][] events would start emitting immediately. If you needed to do - some I/O to decide how to handle data, then you had to store the chunks - in some kind of buffer so that they would not be lost. -* The [`stream.pause()`][stream-pause] method was advisory, rather than - guaranteed. This meant that you still had to be prepared to receive - [`'data'`][] events even when the stream was in a paused state. - -In Node.js v0.10, the [Readable][] class was added. -For backwards compatibility with older Node.js programs, Readable streams -switch into "flowing mode" when a [`'data'`][] event handler is added, or -when the [`stream.resume()`][stream-resume] method is called. The effect is -that, even if you are not using the new [`stream.read()`][stream-read] method -and [`'readable'`][] event, you no longer have to worry about losing -[`'data'`][] chunks. - -Most programs will continue to function normally. However, this -introduces an edge case in the following conditions: - -* No [`'data'`][] event handler is added. -* The [`stream.resume()`][stream-resume] method is never called. -* The stream is not piped to any writable destination. - -For example, consider the following code: - -```js -// WARNING! BROKEN! -net.createServer((socket) => { - - // we add an 'end' method, but never consume the data - socket.on('end', () => { - // It will never get here. - socket.end('I got your message (but didnt read it)\n'); - }); - -}).listen(1337); -``` - -In versions of Node.js prior to v0.10, the incoming message data would be -simply discarded. However, in Node.js v0.10 and beyond, -the socket will remain paused forever. - -The workaround in this situation is to call the -[`stream.resume()`][stream-resume] method to start the flow of data: - -```js -// Workaround -net.createServer((socket) => { - - socket.on('end', () => { - socket.end('I got your message (but didnt read it)\n'); - }); - - // start the flow of data, discarding it. - socket.resume(); - -}).listen(1337); -``` - -In addition to new Readable streams switching into flowing mode, -pre-v0.10 style streams can be wrapped in a Readable class using the -[`stream.wrap()`][] method. - - -### Object Mode - - - -Normally, Streams operate on Strings and Buffers exclusively. - -Streams that are in **object mode** can emit generic JavaScript values -other than Buffers and Strings. - -A Readable stream in object mode will always return a single item from -a call to [`stream.read(size)`][stream-read], regardless of what the size -argument is. - -A Writable stream in object mode will always ignore the `encoding` -argument to [`stream.write(data, encoding)`][stream-write]. - -The special value `null` still retains its special value for object -mode streams. That is, for object mode readable streams, `null` as a -return value from [`stream.read()`][stream-read] indicates that there is no more -data, and [`stream.push(null)`][stream-push] will signal the end of stream data -(`EOF`). - -No streams in Node.js core are object mode streams. This pattern is only -used by userland streaming libraries. - -You should set `objectMode` in your stream child class constructor on -the options object. Setting `objectMode` mid-stream is not safe. - -For Duplex streams `objectMode` can be set exclusively for readable or -writable side with `readableObjectMode` and `writableObjectMode` -respectively. These options can be used to implement parsers and -serializers with Transform streams. - -```js -const util = require('util'); -const StringDecoder = require('string_decoder').StringDecoder; -const Transform = require('stream').Transform; -util.inherits(JSONParseStream, Transform); - -// Gets \n-delimited JSON string data, and emits the parsed objects -function JSONParseStream() { - if (!(this instanceof JSONParseStream)) - return new JSONParseStream(); - - Transform.call(this, { readableObjectMode : true }); - - this._buffer = ''; - this._decoder = new StringDecoder('utf8'); -} - -JSONParseStream.prototype._transform = function(chunk, encoding, cb) { - this._buffer += this._decoder.write(chunk); - // split on newlines - var lines = this._buffer.split(/\r?\n/); - // keep the last partial line buffered - this._buffer = lines.pop(); - for (var l = 0; l < lines.length; l++) { - var line = lines[l]; - try { - var obj = JSON.parse(line); - } catch (er) { - this.emit('error', er); - return; - } - // push the parsed object out to the readable consumer - this.push(obj); - } - cb(); -}; - -JSONParseStream.prototype._flush = function(cb) { - // Just handle any leftover - var rem = this._buffer.trim(); - if (rem) { - try { - var obj = JSON.parse(rem); - } catch (er) { - this.emit('error', er); - return; - } - // push the parsed object out to the readable consumer - this.push(obj); - } - cb(); -}; -``` - -### `stream.read(0)` - -There are some cases where you want to trigger a refresh of the -underlying readable stream mechanisms, without actually consuming any -data. In that case, you can call `stream.read(0)`, which will always -return null. - -If the internal read buffer is below the `highWaterMark`, and the -stream is not currently reading, then calling `stream.read(0)` will trigger -a low-level [`stream._read()`][stream-_read] call. - -There is almost never a need to do this. However, you will see some -cases in Node.js's internals where this is done, particularly in the -Readable stream class internals. - -### `stream.push('')` - -Pushing a zero-byte string or Buffer (when not in [Object mode][]) has an -interesting side effect. Because it *is* a call to -[`stream.push()`][stream-push], it will end the `reading` process. However, it -does *not* add any data to the readable buffer, so there's nothing for -a user to consume. - -Very rarely, there are cases where you have no data to provide now, -but the consumer of your stream (or, perhaps, another bit of your own -code) will know when to check again, by calling [`stream.read(0)`][stream-read]. -In those cases, you *may* call `stream.push('')`. - -So far, the only use case for this functionality is in the -[`tls.CryptoStream`][] class, which is deprecated in Node.js/io.js v1.0. If you -find that you have to use `stream.push('')`, please consider another -approach, because it almost certainly indicates that something is -horribly wrong. - -[`'data'`]: #stream_event_data -[`'drain'`]: #stream_event_drain -[`'end'`]: #stream_event_end -[`'finish'`]: #stream_event_finish -[`'readable'`]: #stream_event_readable -[`buf.toString(encoding)`]: https://nodejs.org/docs/v5.8.0/api/buffer.html#buffer_buf_tostring_encoding_start_end -[`EventEmitter`]: https://nodejs.org/docs/v5.8.0/api/events.html#events_class_eventemitter -[`process.stderr`]: https://nodejs.org/docs/v5.8.0/api/process.html#process_process_stderr -[`process.stdin`]: https://nodejs.org/docs/v5.8.0/api/process.html#process_process_stdin -[`process.stdout`]: https://nodejs.org/docs/v5.8.0/api/process.html#process_process_stdout -[`stream.cork()`]: #stream_writable_cork -[`stream.pipe()`]: #stream_readable_pipe_destination_options -[`stream.uncork()`]: #stream_writable_uncork -[`stream.unpipe()`]: #stream_readable_unpipe_destination -[`stream.wrap()`]: #stream_readable_wrap_stream -[`tls.CryptoStream`]: https://nodejs.org/docs/v5.8.0/api/tls.html#tls_class_cryptostream -[`util.inherits()`]: https://nodejs.org/docs/v5.8.0/api/util.html#util_util_inherits_constructor_superconstructor -[API for Stream Consumers]: #stream_api_for_stream_consumers -[API for Stream Implementors]: #stream_api_for_stream_implementors -[child process stdin]: https://nodejs.org/docs/v5.8.0/api/child_process.html#child_process_child_stdin -[child process stdout and stderr]: https://nodejs.org/docs/v5.8.0/api/child_process.html#child_process_child_stdout -[Compatibility]: #stream_compatibility_with_older_node_js_versions -[crypto]: crypto.html -[Duplex]: #stream_class_stream_duplex -[fs read streams]: https://nodejs.org/docs/v5.8.0/api/fs.html#fs_class_fs_readstream -[fs write streams]: https://nodejs.org/docs/v5.8.0/api/fs.html#fs_class_fs_writestream -[HTTP requests, on the client]: https://nodejs.org/docs/v5.8.0/api/http.html#http_class_http_clientrequest -[HTTP responses, on the server]: https://nodejs.org/docs/v5.8.0/api/http.html#http_class_http_serverresponse -[http-incoming-message]: https://nodejs.org/docs/v5.8.0/api/http.html#http_class_http_incomingmessage -[Object mode]: #stream_object_mode -[Readable]: #stream_class_stream_readable -[SimpleProtocol v2]: #stream_example_simpleprotocol_parser_v2 -[stream-_flush]: #stream_transform_flush_callback -[stream-_read]: #stream_readable_read_size_1 -[stream-_transform]: #stream_transform_transform_chunk_encoding_callback -[stream-_write]: #stream_writable_write_chunk_encoding_callback_1 -[stream-_writev]: #stream_writable_writev_chunks_callback -[stream-end]: #stream_writable_end_chunk_encoding_callback -[stream-pause]: #stream_readable_pause -[stream-push]: #stream_readable_push_chunk_encoding -[stream-read]: #stream_readable_read_size -[stream-resume]: #stream_readable_resume -[stream-write]: #stream_writable_write_chunk_encoding_callback -[TCP sockets]: https://nodejs.org/docs/v5.8.0/api/net.html#net_class_net_socket -[Transform]: #stream_class_stream_transform -[Writable]: #stream_class_stream_writable -[zlib]: zlib.html diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md deleted file mode 100644 index c141a99c26c638..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md +++ /dev/null @@ -1,58 +0,0 @@ -# streams WG Meeting 2015-01-30 - -## Links - -* **Google Hangouts Video**: http://www.youtube.com/watch?v=I9nDOSGfwZg -* **GitHub Issue**: https://github.com/iojs/readable-stream/issues/106 -* **Original Minutes Google Doc**: https://docs.google.com/document/d/17aTgLnjMXIrfjgNaTUnHQO7m3xgzHR2VXBTmi03Qii4/ - -## Agenda - -Extracted from https://github.com/iojs/readable-stream/labels/wg-agenda prior to meeting. - -* adopt a charter [#105](https://github.com/iojs/readable-stream/issues/105) -* release and versioning strategy [#101](https://github.com/iojs/readable-stream/issues/101) -* simpler stream creation [#102](https://github.com/iojs/readable-stream/issues/102) -* proposal: deprecate implicit flowing of streams [#99](https://github.com/iojs/readable-stream/issues/99) - -## Minutes - -### adopt a charter - -* group: +1's all around - -### What versioning scheme should be adopted? -* group: +1’s 3.0.0 -* domenic+group: pulling in patches from other sources where appropriate -* mikeal: version independently, suggesting versions for io.js -* mikeal+domenic: work with TC to notify in advance of changes -simpler stream creation - -### streamline creation of streams -* sam: streamline creation of streams -* domenic: nice simple solution posted - but, we lose the opportunity to change the model - may not be backwards incompatible (double check keys) - - **action item:** domenic will check - -### remove implicit flowing of streams on(‘data’) -* add isFlowing / isPaused -* mikeal: worrying that we’re documenting polyfill methods – confuses users -* domenic: more reflective API is probably good, with warning labels for users -* new section for mad scientists (reflective stream access) -* calvin: name the “third state” -* mikeal: maybe borrow the name from whatwg? -* domenic: we’re missing the “third state” -* consensus: kind of difficult to name the third state -* mikeal: figure out differences in states / compat -* mathias: always flow on data – eliminates third state - * explore what it breaks - -**action items:** -* ask isaac for ability to list packages by what public io.js APIs they use (esp. Stream) -* ask rod/build for infrastructure -* **chris**: explore the “flow on data” approach -* add isPaused/isFlowing -* add new docs section -* move isPaused to that section diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/duplex.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/duplex.js deleted file mode 100644 index ca807af87620dd..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/duplex.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./lib/_stream_duplex.js") diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_duplex.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_duplex.js deleted file mode 100644 index 736693b8400fed..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_duplex.js +++ /dev/null @@ -1,75 +0,0 @@ -// a duplex stream is just a stream that is both readable and writable. -// Since JS doesn't have multiple prototypal inheritance, this class -// prototypally inherits from Readable, and then parasitically from -// Writable. - -'use strict'; - -/**/ - -var objectKeys = Object.keys || function (obj) { - var keys = []; - for (var key in obj) { - keys.push(key); - }return keys; -}; -/**/ - -module.exports = Duplex; - -/**/ -var processNextTick = require('process-nextick-args'); -/**/ - -/**/ -var util = require('core-util-is'); -util.inherits = require('inherits'); -/**/ - -var Readable = require('./_stream_readable'); -var Writable = require('./_stream_writable'); - -util.inherits(Duplex, Readable); - -var keys = objectKeys(Writable.prototype); -for (var v = 0; v < keys.length; v++) { - var method = keys[v]; - if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; -} - -function Duplex(options) { - if (!(this instanceof Duplex)) return new Duplex(options); - - Readable.call(this, options); - Writable.call(this, options); - - if (options && options.readable === false) this.readable = false; - - if (options && options.writable === false) this.writable = false; - - this.allowHalfOpen = true; - if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; - - this.once('end', onend); -} - -// the no-half-open enforcer -function onend() { - // if we allow half-open state, or if the writable side ended, - // then we're ok. - if (this.allowHalfOpen || this._writableState.ended) return; - - // no more data can be written. - // But allow more writes to happen in this tick. - processNextTick(onEndNT, this); -} - -function onEndNT(self) { - self.end(); -} - -function forEach(xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } -} \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_passthrough.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_passthrough.js deleted file mode 100644 index d06f71f1868d77..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_passthrough.js +++ /dev/null @@ -1,26 +0,0 @@ -// a passthrough stream. -// basically just the most minimal sort of Transform stream. -// Every written chunk gets output as-is. - -'use strict'; - -module.exports = PassThrough; - -var Transform = require('./_stream_transform'); - -/**/ -var util = require('core-util-is'); -util.inherits = require('inherits'); -/**/ - -util.inherits(PassThrough, Transform); - -function PassThrough(options) { - if (!(this instanceof PassThrough)) return new PassThrough(options); - - Transform.call(this, options); -} - -PassThrough.prototype._transform = function (chunk, encoding, cb) { - cb(null, chunk); -}; \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_readable.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_readable.js deleted file mode 100644 index 54a9d5c553d69e..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_readable.js +++ /dev/null @@ -1,880 +0,0 @@ -'use strict'; - -module.exports = Readable; - -/**/ -var processNextTick = require('process-nextick-args'); -/**/ - -/**/ -var isArray = require('isarray'); -/**/ - -/**/ -var Buffer = require('buffer').Buffer; -/**/ - -Readable.ReadableState = ReadableState; - -var EE = require('events'); - -/**/ -var EElistenerCount = function (emitter, type) { - return emitter.listeners(type).length; -}; -/**/ - -/**/ -var Stream; -(function () { - try { - Stream = require('st' + 'ream'); - } catch (_) {} finally { - if (!Stream) Stream = require('events').EventEmitter; - } -})(); -/**/ - -var Buffer = require('buffer').Buffer; - -/**/ -var util = require('core-util-is'); -util.inherits = require('inherits'); -/**/ - -/**/ -var debugUtil = require('util'); -var debug = undefined; -if (debugUtil && debugUtil.debuglog) { - debug = debugUtil.debuglog('stream'); -} else { - debug = function () {}; -} -/**/ - -var StringDecoder; - -util.inherits(Readable, Stream); - -var Duplex; -function ReadableState(options, stream) { - Duplex = Duplex || require('./_stream_duplex'); - - options = options || {}; - - // object stream flag. Used to make read(n) ignore n and to - // make all the buffer merging and length checks go away - this.objectMode = !!options.objectMode; - - if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.readableObjectMode; - - // the point at which it stops calling _read() to fill the buffer - // Note: 0 is a valid value, means "don't call _read preemptively ever" - var hwm = options.highWaterMark; - var defaultHwm = this.objectMode ? 16 : 16 * 1024; - this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm; - - // cast to ints. - this.highWaterMark = ~ ~this.highWaterMark; - - this.buffer = []; - this.length = 0; - this.pipes = null; - this.pipesCount = 0; - this.flowing = null; - this.ended = false; - this.endEmitted = false; - this.reading = false; - - // a flag to be able to tell if the onwrite cb is called immediately, - // or on a later tick. We set this to true at first, because any - // actions that shouldn't happen until "later" should generally also - // not happen before the first write call. - this.sync = true; - - // whenever we return null, then we set a flag to say - // that we're awaiting a 'readable' event emission. - this.needReadable = false; - this.emittedReadable = false; - this.readableListening = false; - this.resumeScheduled = false; - - // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = options.defaultEncoding || 'utf8'; - - // when piping, we only care about 'readable' events that happen - // after read()ing all the bytes and not getting any pushback. - this.ranOut = false; - - // the number of writers that are awaiting a drain event in .pipe()s - this.awaitDrain = 0; - - // if true, a maybeReadMore has been scheduled - this.readingMore = false; - - this.decoder = null; - this.encoding = null; - if (options.encoding) { - if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; - this.decoder = new StringDecoder(options.encoding); - this.encoding = options.encoding; - } -} - -var Duplex; -function Readable(options) { - Duplex = Duplex || require('./_stream_duplex'); - - if (!(this instanceof Readable)) return new Readable(options); - - this._readableState = new ReadableState(options, this); - - // legacy - this.readable = true; - - if (options && typeof options.read === 'function') this._read = options.read; - - Stream.call(this); -} - -// Manually shove something into the read() buffer. -// This returns true if the highWaterMark has not been hit yet, -// similar to how Writable.write() returns true if you should -// write() some more. -Readable.prototype.push = function (chunk, encoding) { - var state = this._readableState; - - if (!state.objectMode && typeof chunk === 'string') { - encoding = encoding || state.defaultEncoding; - if (encoding !== state.encoding) { - chunk = new Buffer(chunk, encoding); - encoding = ''; - } - } - - return readableAddChunk(this, state, chunk, encoding, false); -}; - -// Unshift should *always* be something directly out of read() -Readable.prototype.unshift = function (chunk) { - var state = this._readableState; - return readableAddChunk(this, state, chunk, '', true); -}; - -Readable.prototype.isPaused = function () { - return this._readableState.flowing === false; -}; - -function readableAddChunk(stream, state, chunk, encoding, addToFront) { - var er = chunkInvalid(state, chunk); - if (er) { - stream.emit('error', er); - } else if (chunk === null) { - state.reading = false; - onEofChunk(stream, state); - } else if (state.objectMode || chunk && chunk.length > 0) { - if (state.ended && !addToFront) { - var e = new Error('stream.push() after EOF'); - stream.emit('error', e); - } else if (state.endEmitted && addToFront) { - var e = new Error('stream.unshift() after end event'); - stream.emit('error', e); - } else { - var skipAdd; - if (state.decoder && !addToFront && !encoding) { - chunk = state.decoder.write(chunk); - skipAdd = !state.objectMode && chunk.length === 0; - } - - if (!addToFront) state.reading = false; - - // Don't add to the buffer if we've decoded to an empty string chunk and - // we're not in object mode - if (!skipAdd) { - // if we want the data now, just emit it. - if (state.flowing && state.length === 0 && !state.sync) { - stream.emit('data', chunk); - stream.read(0); - } else { - // update the buffer info. - state.length += state.objectMode ? 1 : chunk.length; - if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); - - if (state.needReadable) emitReadable(stream); - } - } - - maybeReadMore(stream, state); - } - } else if (!addToFront) { - state.reading = false; - } - - return needMoreData(state); -} - -// if it's past the high water mark, we can push in some more. -// Also, if we have no data yet, we can stand some -// more bytes. This is to work around cases where hwm=0, -// such as the repl. Also, if the push() triggered a -// readable event, and the user called read(largeNumber) such that -// needReadable was set, then we ought to push more, so that another -// 'readable' event will be triggered. -function needMoreData(state) { - return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); -} - -// backwards compatibility. -Readable.prototype.setEncoding = function (enc) { - if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; - this._readableState.decoder = new StringDecoder(enc); - this._readableState.encoding = enc; - return this; -}; - -// Don't raise the hwm > 8MB -var MAX_HWM = 0x800000; -function computeNewHighWaterMark(n) { - if (n >= MAX_HWM) { - n = MAX_HWM; - } else { - // Get the next highest power of 2 - n--; - n |= n >>> 1; - n |= n >>> 2; - n |= n >>> 4; - n |= n >>> 8; - n |= n >>> 16; - n++; - } - return n; -} - -function howMuchToRead(n, state) { - if (state.length === 0 && state.ended) return 0; - - if (state.objectMode) return n === 0 ? 0 : 1; - - if (n === null || isNaN(n)) { - // only flow one buffer at a time - if (state.flowing && state.buffer.length) return state.buffer[0].length;else return state.length; - } - - if (n <= 0) return 0; - - // If we're asking for more than the target buffer level, - // then raise the water mark. Bump up to the next highest - // power of 2, to prevent increasing it excessively in tiny - // amounts. - if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); - - // don't have that much. return null, unless we've ended. - if (n > state.length) { - if (!state.ended) { - state.needReadable = true; - return 0; - } else { - return state.length; - } - } - - return n; -} - -// you can override either this method, or the async _read(n) below. -Readable.prototype.read = function (n) { - debug('read', n); - var state = this._readableState; - var nOrig = n; - - if (typeof n !== 'number' || n > 0) state.emittedReadable = false; - - // if we're doing read(0) to trigger a readable event, but we - // already have a bunch of data in the buffer, then just trigger - // the 'readable' event and move on. - if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { - debug('read: emitReadable', state.length, state.ended); - if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); - return null; - } - - n = howMuchToRead(n, state); - - // if we've ended, and we're now clear, then finish it up. - if (n === 0 && state.ended) { - if (state.length === 0) endReadable(this); - return null; - } - - // All the actual chunk generation logic needs to be - // *below* the call to _read. The reason is that in certain - // synthetic stream cases, such as passthrough streams, _read - // may be a completely synchronous operation which may change - // the state of the read buffer, providing enough data when - // before there was *not* enough. - // - // So, the steps are: - // 1. Figure out what the state of things will be after we do - // a read from the buffer. - // - // 2. If that resulting state will trigger a _read, then call _read. - // Note that this may be asynchronous, or synchronous. Yes, it is - // deeply ugly to write APIs this way, but that still doesn't mean - // that the Readable class should behave improperly, as streams are - // designed to be sync/async agnostic. - // Take note if the _read call is sync or async (ie, if the read call - // has returned yet), so that we know whether or not it's safe to emit - // 'readable' etc. - // - // 3. Actually pull the requested chunks out of the buffer and return. - - // if we need a readable event, then we need to do some reading. - var doRead = state.needReadable; - debug('need readable', doRead); - - // if we currently have less than the highWaterMark, then also read some - if (state.length === 0 || state.length - n < state.highWaterMark) { - doRead = true; - debug('length less than watermark', doRead); - } - - // however, if we've ended, then there's no point, and if we're already - // reading, then it's unnecessary. - if (state.ended || state.reading) { - doRead = false; - debug('reading or ended', doRead); - } - - if (doRead) { - debug('do read'); - state.reading = true; - state.sync = true; - // if the length is currently zero, then we *need* a readable event. - if (state.length === 0) state.needReadable = true; - // call internal read method - this._read(state.highWaterMark); - state.sync = false; - } - - // If _read pushed data synchronously, then `reading` will be false, - // and we need to re-evaluate how much data we can return to the user. - if (doRead && !state.reading) n = howMuchToRead(nOrig, state); - - var ret; - if (n > 0) ret = fromList(n, state);else ret = null; - - if (ret === null) { - state.needReadable = true; - n = 0; - } - - state.length -= n; - - // If we have nothing in the buffer, then we want to know - // as soon as we *do* get something into the buffer. - if (state.length === 0 && !state.ended) state.needReadable = true; - - // If we tried to read() past the EOF, then emit end on the next tick. - if (nOrig !== n && state.ended && state.length === 0) endReadable(this); - - if (ret !== null) this.emit('data', ret); - - return ret; -}; - -function chunkInvalid(state, chunk) { - var er = null; - if (!Buffer.isBuffer(chunk) && typeof chunk !== 'string' && chunk !== null && chunk !== undefined && !state.objectMode) { - er = new TypeError('Invalid non-string/buffer chunk'); - } - return er; -} - -function onEofChunk(stream, state) { - if (state.ended) return; - if (state.decoder) { - var chunk = state.decoder.end(); - if (chunk && chunk.length) { - state.buffer.push(chunk); - state.length += state.objectMode ? 1 : chunk.length; - } - } - state.ended = true; - - // emit 'readable' now to make sure it gets picked up. - emitReadable(stream); -} - -// Don't emit readable right away in sync mode, because this can trigger -// another read() call => stack overflow. This way, it might trigger -// a nextTick recursion warning, but that's not so bad. -function emitReadable(stream) { - var state = stream._readableState; - state.needReadable = false; - if (!state.emittedReadable) { - debug('emitReadable', state.flowing); - state.emittedReadable = true; - if (state.sync) processNextTick(emitReadable_, stream);else emitReadable_(stream); - } -} - -function emitReadable_(stream) { - debug('emit readable'); - stream.emit('readable'); - flow(stream); -} - -// at this point, the user has presumably seen the 'readable' event, -// and called read() to consume some data. that may have triggered -// in turn another _read(n) call, in which case reading = true if -// it's in progress. -// However, if we're not ended, or reading, and the length < hwm, -// then go ahead and try to read some more preemptively. -function maybeReadMore(stream, state) { - if (!state.readingMore) { - state.readingMore = true; - processNextTick(maybeReadMore_, stream, state); - } -} - -function maybeReadMore_(stream, state) { - var len = state.length; - while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { - debug('maybeReadMore read 0'); - stream.read(0); - if (len === state.length) - // didn't get any data, stop spinning. - break;else len = state.length; - } - state.readingMore = false; -} - -// abstract method. to be overridden in specific implementation classes. -// call cb(er, data) where data is <= n in length. -// for virtual (non-string, non-buffer) streams, "length" is somewhat -// arbitrary, and perhaps not very meaningful. -Readable.prototype._read = function (n) { - this.emit('error', new Error('not implemented')); -}; - -Readable.prototype.pipe = function (dest, pipeOpts) { - var src = this; - var state = this._readableState; - - switch (state.pipesCount) { - case 0: - state.pipes = dest; - break; - case 1: - state.pipes = [state.pipes, dest]; - break; - default: - state.pipes.push(dest); - break; - } - state.pipesCount += 1; - debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); - - var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; - - var endFn = doEnd ? onend : cleanup; - if (state.endEmitted) processNextTick(endFn);else src.once('end', endFn); - - dest.on('unpipe', onunpipe); - function onunpipe(readable) { - debug('onunpipe'); - if (readable === src) { - cleanup(); - } - } - - function onend() { - debug('onend'); - dest.end(); - } - - // when the dest drains, it reduces the awaitDrain counter - // on the source. This would be more elegant with a .once() - // handler in flow(), but adding and removing repeatedly is - // too slow. - var ondrain = pipeOnDrain(src); - dest.on('drain', ondrain); - - var cleanedUp = false; - function cleanup() { - debug('cleanup'); - // cleanup event handlers once the pipe is broken - dest.removeListener('close', onclose); - dest.removeListener('finish', onfinish); - dest.removeListener('drain', ondrain); - dest.removeListener('error', onerror); - dest.removeListener('unpipe', onunpipe); - src.removeListener('end', onend); - src.removeListener('end', cleanup); - src.removeListener('data', ondata); - - cleanedUp = true; - - // if the reader is waiting for a drain event from this - // specific writer, then it would cause it to never start - // flowing again. - // So, if this is awaiting a drain, then we just call it now. - // If we don't know, then assume that we are waiting for one. - if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); - } - - src.on('data', ondata); - function ondata(chunk) { - debug('ondata'); - var ret = dest.write(chunk); - if (false === ret) { - // If the user unpiped during `dest.write()`, it is possible - // to get stuck in a permanently paused state if that write - // also returned false. - if (state.pipesCount === 1 && state.pipes[0] === dest && src.listenerCount('data') === 1 && !cleanedUp) { - debug('false write response, pause', src._readableState.awaitDrain); - src._readableState.awaitDrain++; - } - src.pause(); - } - } - - // if the dest has an error, then stop piping into it. - // however, don't suppress the throwing behavior for this. - function onerror(er) { - debug('onerror', er); - unpipe(); - dest.removeListener('error', onerror); - if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); - } - // This is a brutally ugly hack to make sure that our error handler - // is attached before any userland ones. NEVER DO THIS. - if (!dest._events || !dest._events.error) dest.on('error', onerror);else if (isArray(dest._events.error)) dest._events.error.unshift(onerror);else dest._events.error = [onerror, dest._events.error]; - - // Both close and finish should trigger unpipe, but only once. - function onclose() { - dest.removeListener('finish', onfinish); - unpipe(); - } - dest.once('close', onclose); - function onfinish() { - debug('onfinish'); - dest.removeListener('close', onclose); - unpipe(); - } - dest.once('finish', onfinish); - - function unpipe() { - debug('unpipe'); - src.unpipe(dest); - } - - // tell the dest that it's being piped to - dest.emit('pipe', src); - - // start the flow if it hasn't been started already. - if (!state.flowing) { - debug('pipe resume'); - src.resume(); - } - - return dest; -}; - -function pipeOnDrain(src) { - return function () { - var state = src._readableState; - debug('pipeOnDrain', state.awaitDrain); - if (state.awaitDrain) state.awaitDrain--; - if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { - state.flowing = true; - flow(src); - } - }; -} - -Readable.prototype.unpipe = function (dest) { - var state = this._readableState; - - // if we're not piping anywhere, then do nothing. - if (state.pipesCount === 0) return this; - - // just one destination. most common case. - if (state.pipesCount === 1) { - // passed in one, but it's not the right one. - if (dest && dest !== state.pipes) return this; - - if (!dest) dest = state.pipes; - - // got a match. - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; - if (dest) dest.emit('unpipe', this); - return this; - } - - // slow case. multiple pipe destinations. - - if (!dest) { - // remove all. - var dests = state.pipes; - var len = state.pipesCount; - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; - - for (var _i = 0; _i < len; _i++) { - dests[_i].emit('unpipe', this); - }return this; - } - - // try to find the right one. - var i = indexOf(state.pipes, dest); - if (i === -1) return this; - - state.pipes.splice(i, 1); - state.pipesCount -= 1; - if (state.pipesCount === 1) state.pipes = state.pipes[0]; - - dest.emit('unpipe', this); - - return this; -}; - -// set up data events if they are asked for -// Ensure readable listeners eventually get something -Readable.prototype.on = function (ev, fn) { - var res = Stream.prototype.on.call(this, ev, fn); - - // If listening to data, and it has not explicitly been paused, - // then call resume to start the flow of data on the next tick. - if (ev === 'data' && false !== this._readableState.flowing) { - this.resume(); - } - - if (ev === 'readable' && !this._readableState.endEmitted) { - var state = this._readableState; - if (!state.readableListening) { - state.readableListening = true; - state.emittedReadable = false; - state.needReadable = true; - if (!state.reading) { - processNextTick(nReadingNextTick, this); - } else if (state.length) { - emitReadable(this, state); - } - } - } - - return res; -}; -Readable.prototype.addListener = Readable.prototype.on; - -function nReadingNextTick(self) { - debug('readable nexttick read 0'); - self.read(0); -} - -// pause() and resume() are remnants of the legacy readable stream API -// If the user uses them, then switch into old mode. -Readable.prototype.resume = function () { - var state = this._readableState; - if (!state.flowing) { - debug('resume'); - state.flowing = true; - resume(this, state); - } - return this; -}; - -function resume(stream, state) { - if (!state.resumeScheduled) { - state.resumeScheduled = true; - processNextTick(resume_, stream, state); - } -} - -function resume_(stream, state) { - if (!state.reading) { - debug('resume read 0'); - stream.read(0); - } - - state.resumeScheduled = false; - stream.emit('resume'); - flow(stream); - if (state.flowing && !state.reading) stream.read(0); -} - -Readable.prototype.pause = function () { - debug('call pause flowing=%j', this._readableState.flowing); - if (false !== this._readableState.flowing) { - debug('pause'); - this._readableState.flowing = false; - this.emit('pause'); - } - return this; -}; - -function flow(stream) { - var state = stream._readableState; - debug('flow', state.flowing); - if (state.flowing) { - do { - var chunk = stream.read(); - } while (null !== chunk && state.flowing); - } -} - -// wrap an old-style stream as the async data source. -// This is *not* part of the readable stream interface. -// It is an ugly unfortunate mess of history. -Readable.prototype.wrap = function (stream) { - var state = this._readableState; - var paused = false; - - var self = this; - stream.on('end', function () { - debug('wrapped end'); - if (state.decoder && !state.ended) { - var chunk = state.decoder.end(); - if (chunk && chunk.length) self.push(chunk); - } - - self.push(null); - }); - - stream.on('data', function (chunk) { - debug('wrapped data'); - if (state.decoder) chunk = state.decoder.write(chunk); - - // don't skip over falsy values in objectMode - if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; - - var ret = self.push(chunk); - if (!ret) { - paused = true; - stream.pause(); - } - }); - - // proxy all the other methods. - // important when wrapping filters and duplexes. - for (var i in stream) { - if (this[i] === undefined && typeof stream[i] === 'function') { - this[i] = function (method) { - return function () { - return stream[method].apply(stream, arguments); - }; - }(i); - } - } - - // proxy certain important events. - var events = ['error', 'close', 'destroy', 'pause', 'resume']; - forEach(events, function (ev) { - stream.on(ev, self.emit.bind(self, ev)); - }); - - // when we try to consume some more bytes, simply unpause the - // underlying stream. - self._read = function (n) { - debug('wrapped _read', n); - if (paused) { - paused = false; - stream.resume(); - } - }; - - return self; -}; - -// exposed for testing purposes only. -Readable._fromList = fromList; - -// Pluck off n bytes from an array of buffers. -// Length is the combined lengths of all the buffers in the list. -function fromList(n, state) { - var list = state.buffer; - var length = state.length; - var stringMode = !!state.decoder; - var objectMode = !!state.objectMode; - var ret; - - // nothing in the list, definitely empty. - if (list.length === 0) return null; - - if (length === 0) ret = null;else if (objectMode) ret = list.shift();else if (!n || n >= length) { - // read it all, truncate the array. - if (stringMode) ret = list.join('');else if (list.length === 1) ret = list[0];else ret = Buffer.concat(list, length); - list.length = 0; - } else { - // read just some of it. - if (n < list[0].length) { - // just take a part of the first list item. - // slice is the same for buffers and strings. - var buf = list[0]; - ret = buf.slice(0, n); - list[0] = buf.slice(n); - } else if (n === list[0].length) { - // first list is a perfect match - ret = list.shift(); - } else { - // complex case. - // we have enough to cover it, but it spans past the first buffer. - if (stringMode) ret = '';else ret = new Buffer(n); - - var c = 0; - for (var i = 0, l = list.length; i < l && c < n; i++) { - var buf = list[0]; - var cpy = Math.min(n - c, buf.length); - - if (stringMode) ret += buf.slice(0, cpy);else buf.copy(ret, c, 0, cpy); - - if (cpy < buf.length) list[0] = buf.slice(cpy);else list.shift(); - - c += cpy; - } - } - } - - return ret; -} - -function endReadable(stream) { - var state = stream._readableState; - - // If we get here before consuming all the bytes, then that is a - // bug in node. Should never happen. - if (state.length > 0) throw new Error('endReadable called on non-empty stream'); - - if (!state.endEmitted) { - state.ended = true; - processNextTick(endReadableNT, state, stream); - } -} - -function endReadableNT(state, stream) { - // Check that we didn't get one last unshift. - if (!state.endEmitted && state.length === 0) { - state.endEmitted = true; - stream.readable = false; - stream.emit('end'); - } -} - -function forEach(xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } -} - -function indexOf(xs, x) { - for (var i = 0, l = xs.length; i < l; i++) { - if (xs[i] === x) return i; - } - return -1; -} \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_transform.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_transform.js deleted file mode 100644 index 625cdc17698059..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_transform.js +++ /dev/null @@ -1,180 +0,0 @@ -// a transform stream is a readable/writable stream where you do -// something with the data. Sometimes it's called a "filter", -// but that's not a great name for it, since that implies a thing where -// some bits pass through, and others are simply ignored. (That would -// be a valid example of a transform, of course.) -// -// While the output is causally related to the input, it's not a -// necessarily symmetric or synchronous transformation. For example, -// a zlib stream might take multiple plain-text writes(), and then -// emit a single compressed chunk some time in the future. -// -// Here's how this works: -// -// The Transform stream has all the aspects of the readable and writable -// stream classes. When you write(chunk), that calls _write(chunk,cb) -// internally, and returns false if there's a lot of pending writes -// buffered up. When you call read(), that calls _read(n) until -// there's enough pending readable data buffered up. -// -// In a transform stream, the written data is placed in a buffer. When -// _read(n) is called, it transforms the queued up data, calling the -// buffered _write cb's as it consumes chunks. If consuming a single -// written chunk would result in multiple output chunks, then the first -// outputted bit calls the readcb, and subsequent chunks just go into -// the read buffer, and will cause it to emit 'readable' if necessary. -// -// This way, back-pressure is actually determined by the reading side, -// since _read has to be called to start processing a new chunk. However, -// a pathological inflate type of transform can cause excessive buffering -// here. For example, imagine a stream where every byte of input is -// interpreted as an integer from 0-255, and then results in that many -// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in -// 1kb of data being output. In this case, you could write a very small -// amount of input, and end up with a very large amount of output. In -// such a pathological inflating mechanism, there'd be no way to tell -// the system to stop doing the transform. A single 4MB write could -// cause the system to run out of memory. -// -// However, even in such a pathological case, only a single written chunk -// would be consumed, and then the rest would wait (un-transformed) until -// the results of the previous transformed chunk were consumed. - -'use strict'; - -module.exports = Transform; - -var Duplex = require('./_stream_duplex'); - -/**/ -var util = require('core-util-is'); -util.inherits = require('inherits'); -/**/ - -util.inherits(Transform, Duplex); - -function TransformState(stream) { - this.afterTransform = function (er, data) { - return afterTransform(stream, er, data); - }; - - this.needTransform = false; - this.transforming = false; - this.writecb = null; - this.writechunk = null; - this.writeencoding = null; -} - -function afterTransform(stream, er, data) { - var ts = stream._transformState; - ts.transforming = false; - - var cb = ts.writecb; - - if (!cb) return stream.emit('error', new Error('no writecb in Transform class')); - - ts.writechunk = null; - ts.writecb = null; - - if (data !== null && data !== undefined) stream.push(data); - - cb(er); - - var rs = stream._readableState; - rs.reading = false; - if (rs.needReadable || rs.length < rs.highWaterMark) { - stream._read(rs.highWaterMark); - } -} - -function Transform(options) { - if (!(this instanceof Transform)) return new Transform(options); - - Duplex.call(this, options); - - this._transformState = new TransformState(this); - - // when the writable side finishes, then flush out anything remaining. - var stream = this; - - // start out asking for a readable event once data is transformed. - this._readableState.needReadable = true; - - // we have implemented the _read method, and done the other things - // that Readable wants before the first _read call, so unset the - // sync guard flag. - this._readableState.sync = false; - - if (options) { - if (typeof options.transform === 'function') this._transform = options.transform; - - if (typeof options.flush === 'function') this._flush = options.flush; - } - - this.once('prefinish', function () { - if (typeof this._flush === 'function') this._flush(function (er) { - done(stream, er); - });else done(stream); - }); -} - -Transform.prototype.push = function (chunk, encoding) { - this._transformState.needTransform = false; - return Duplex.prototype.push.call(this, chunk, encoding); -}; - -// This is the part where you do stuff! -// override this function in implementation classes. -// 'chunk' is an input chunk. -// -// Call `push(newChunk)` to pass along transformed output -// to the readable side. You may call 'push' zero or more times. -// -// Call `cb(err)` when you are done with this chunk. If you pass -// an error, then that'll put the hurt on the whole operation. If you -// never call cb(), then you'll never get another chunk. -Transform.prototype._transform = function (chunk, encoding, cb) { - throw new Error('not implemented'); -}; - -Transform.prototype._write = function (chunk, encoding, cb) { - var ts = this._transformState; - ts.writecb = cb; - ts.writechunk = chunk; - ts.writeencoding = encoding; - if (!ts.transforming) { - var rs = this._readableState; - if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); - } -}; - -// Doesn't matter what the args are here. -// _transform does all the work. -// That we got here means that the readable side wants more data. -Transform.prototype._read = function (n) { - var ts = this._transformState; - - if (ts.writechunk !== null && ts.writecb && !ts.transforming) { - ts.transforming = true; - this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); - } else { - // mark that we need a transform, so that any data that comes in - // will get processed, now that we've asked for it. - ts.needTransform = true; - } -}; - -function done(stream, er) { - if (er) return stream.emit('error', er); - - // if there's nothing in the write buffer, then that means - // that nothing more will ever be provided - var ws = stream._writableState; - var ts = stream._transformState; - - if (ws.length) throw new Error('calling transform done when ws.length != 0'); - - if (ts.transforming) throw new Error('calling transform done when still transforming'); - - return stream.push(null); -} \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_writable.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_writable.js deleted file mode 100644 index 95916c992a9507..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_writable.js +++ /dev/null @@ -1,516 +0,0 @@ -// A bit simpler than readable streams. -// Implement an async ._write(chunk, encoding, cb), and it'll handle all -// the drain event emission and buffering. - -'use strict'; - -module.exports = Writable; - -/**/ -var processNextTick = require('process-nextick-args'); -/**/ - -/**/ -var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : processNextTick; -/**/ - -/**/ -var Buffer = require('buffer').Buffer; -/**/ - -Writable.WritableState = WritableState; - -/**/ -var util = require('core-util-is'); -util.inherits = require('inherits'); -/**/ - -/**/ -var internalUtil = { - deprecate: require('util-deprecate') -}; -/**/ - -/**/ -var Stream; -(function () { - try { - Stream = require('st' + 'ream'); - } catch (_) {} finally { - if (!Stream) Stream = require('events').EventEmitter; - } -})(); -/**/ - -var Buffer = require('buffer').Buffer; - -util.inherits(Writable, Stream); - -function nop() {} - -function WriteReq(chunk, encoding, cb) { - this.chunk = chunk; - this.encoding = encoding; - this.callback = cb; - this.next = null; -} - -var Duplex; -function WritableState(options, stream) { - Duplex = Duplex || require('./_stream_duplex'); - - options = options || {}; - - // object stream flag to indicate whether or not this stream - // contains buffers or objects. - this.objectMode = !!options.objectMode; - - if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.writableObjectMode; - - // the point at which write() starts returning false - // Note: 0 is a valid value, means that we always return false if - // the entire buffer is not flushed immediately on write() - var hwm = options.highWaterMark; - var defaultHwm = this.objectMode ? 16 : 16 * 1024; - this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm; - - // cast to ints. - this.highWaterMark = ~ ~this.highWaterMark; - - this.needDrain = false; - // at the start of calling end() - this.ending = false; - // when end() has been called, and returned - this.ended = false; - // when 'finish' is emitted - this.finished = false; - - // should we decode strings into buffers before passing to _write? - // this is here so that some node-core streams can optimize string - // handling at a lower level. - var noDecode = options.decodeStrings === false; - this.decodeStrings = !noDecode; - - // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = options.defaultEncoding || 'utf8'; - - // not an actual buffer we keep track of, but a measurement - // of how much we're waiting to get pushed to some underlying - // socket or file. - this.length = 0; - - // a flag to see when we're in the middle of a write. - this.writing = false; - - // when true all writes will be buffered until .uncork() call - this.corked = 0; - - // a flag to be able to tell if the onwrite cb is called immediately, - // or on a later tick. We set this to true at first, because any - // actions that shouldn't happen until "later" should generally also - // not happen before the first write call. - this.sync = true; - - // a flag to know if we're processing previously buffered items, which - // may call the _write() callback in the same tick, so that we don't - // end up in an overlapped onwrite situation. - this.bufferProcessing = false; - - // the callback that's passed to _write(chunk,cb) - this.onwrite = function (er) { - onwrite(stream, er); - }; - - // the callback that the user supplies to write(chunk,encoding,cb) - this.writecb = null; - - // the amount that is being written when _write is called. - this.writelen = 0; - - this.bufferedRequest = null; - this.lastBufferedRequest = null; - - // number of pending user-supplied write callbacks - // this must be 0 before 'finish' can be emitted - this.pendingcb = 0; - - // emit prefinish if the only thing we're waiting for is _write cbs - // This is relevant for synchronous Transform streams - this.prefinished = false; - - // True if the error was already emitted and should not be thrown again - this.errorEmitted = false; - - // count buffered requests - this.bufferedRequestCount = 0; - - // create the two objects needed to store the corked requests - // they are not a linked list, as no new elements are inserted in there - this.corkedRequestsFree = new CorkedRequest(this); - this.corkedRequestsFree.next = new CorkedRequest(this); -} - -WritableState.prototype.getBuffer = function writableStateGetBuffer() { - var current = this.bufferedRequest; - var out = []; - while (current) { - out.push(current); - current = current.next; - } - return out; -}; - -(function () { - try { - Object.defineProperty(WritableState.prototype, 'buffer', { - get: internalUtil.deprecate(function () { - return this.getBuffer(); - }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.') - }); - } catch (_) {} -})(); - -var Duplex; -function Writable(options) { - Duplex = Duplex || require('./_stream_duplex'); - - // Writable ctor is applied to Duplexes, though they're not - // instanceof Writable, they're instanceof Readable. - if (!(this instanceof Writable) && !(this instanceof Duplex)) return new Writable(options); - - this._writableState = new WritableState(options, this); - - // legacy. - this.writable = true; - - if (options) { - if (typeof options.write === 'function') this._write = options.write; - - if (typeof options.writev === 'function') this._writev = options.writev; - } - - Stream.call(this); -} - -// Otherwise people can pipe Writable streams, which is just wrong. -Writable.prototype.pipe = function () { - this.emit('error', new Error('Cannot pipe. Not readable.')); -}; - -function writeAfterEnd(stream, cb) { - var er = new Error('write after end'); - // TODO: defer error events consistently everywhere, not just the cb - stream.emit('error', er); - processNextTick(cb, er); -} - -// If we get something that is not a buffer, string, null, or undefined, -// and we're not in objectMode, then that's an error. -// Otherwise stream chunks are all considered to be of length=1, and the -// watermarks determine how many objects to keep in the buffer, rather than -// how many bytes or characters. -function validChunk(stream, state, chunk, cb) { - var valid = true; - - if (!Buffer.isBuffer(chunk) && typeof chunk !== 'string' && chunk !== null && chunk !== undefined && !state.objectMode) { - var er = new TypeError('Invalid non-string/buffer chunk'); - stream.emit('error', er); - processNextTick(cb, er); - valid = false; - } - return valid; -} - -Writable.prototype.write = function (chunk, encoding, cb) { - var state = this._writableState; - var ret = false; - - if (typeof encoding === 'function') { - cb = encoding; - encoding = null; - } - - if (Buffer.isBuffer(chunk)) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; - - if (typeof cb !== 'function') cb = nop; - - if (state.ended) writeAfterEnd(this, cb);else if (validChunk(this, state, chunk, cb)) { - state.pendingcb++; - ret = writeOrBuffer(this, state, chunk, encoding, cb); - } - - return ret; -}; - -Writable.prototype.cork = function () { - var state = this._writableState; - - state.corked++; -}; - -Writable.prototype.uncork = function () { - var state = this._writableState; - - if (state.corked) { - state.corked--; - - if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); - } -}; - -Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { - // node::ParseEncoding() requires lower case. - if (typeof encoding === 'string') encoding = encoding.toLowerCase(); - if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); - this._writableState.defaultEncoding = encoding; -}; - -function decodeChunk(state, chunk, encoding) { - if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { - chunk = new Buffer(chunk, encoding); - } - return chunk; -} - -// if we're already writing something, then just put this -// in the queue, and wait our turn. Otherwise, call _write -// If we return false, then we need a drain event, so set that flag. -function writeOrBuffer(stream, state, chunk, encoding, cb) { - chunk = decodeChunk(state, chunk, encoding); - - if (Buffer.isBuffer(chunk)) encoding = 'buffer'; - var len = state.objectMode ? 1 : chunk.length; - - state.length += len; - - var ret = state.length < state.highWaterMark; - // we must ensure that previous needDrain will not be reset to false. - if (!ret) state.needDrain = true; - - if (state.writing || state.corked) { - var last = state.lastBufferedRequest; - state.lastBufferedRequest = new WriteReq(chunk, encoding, cb); - if (last) { - last.next = state.lastBufferedRequest; - } else { - state.bufferedRequest = state.lastBufferedRequest; - } - state.bufferedRequestCount += 1; - } else { - doWrite(stream, state, false, len, chunk, encoding, cb); - } - - return ret; -} - -function doWrite(stream, state, writev, len, chunk, encoding, cb) { - state.writelen = len; - state.writecb = cb; - state.writing = true; - state.sync = true; - if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); - state.sync = false; -} - -function onwriteError(stream, state, sync, er, cb) { - --state.pendingcb; - if (sync) processNextTick(cb, er);else cb(er); - - stream._writableState.errorEmitted = true; - stream.emit('error', er); -} - -function onwriteStateUpdate(state) { - state.writing = false; - state.writecb = null; - state.length -= state.writelen; - state.writelen = 0; -} - -function onwrite(stream, er) { - var state = stream._writableState; - var sync = state.sync; - var cb = state.writecb; - - onwriteStateUpdate(state); - - if (er) onwriteError(stream, state, sync, er, cb);else { - // Check if we're actually ready to finish, but don't emit yet - var finished = needFinish(state); - - if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { - clearBuffer(stream, state); - } - - if (sync) { - /**/ - asyncWrite(afterWrite, stream, state, finished, cb); - /**/ - } else { - afterWrite(stream, state, finished, cb); - } - } -} - -function afterWrite(stream, state, finished, cb) { - if (!finished) onwriteDrain(stream, state); - state.pendingcb--; - cb(); - finishMaybe(stream, state); -} - -// Must force callback to be called on nextTick, so that we don't -// emit 'drain' before the write() consumer gets the 'false' return -// value, and has a chance to attach a 'drain' listener. -function onwriteDrain(stream, state) { - if (state.length === 0 && state.needDrain) { - state.needDrain = false; - stream.emit('drain'); - } -} - -// if there's something in the buffer waiting, then process it -function clearBuffer(stream, state) { - state.bufferProcessing = true; - var entry = state.bufferedRequest; - - if (stream._writev && entry && entry.next) { - // Fast case, write everything using _writev() - var l = state.bufferedRequestCount; - var buffer = new Array(l); - var holder = state.corkedRequestsFree; - holder.entry = entry; - - var count = 0; - while (entry) { - buffer[count] = entry; - entry = entry.next; - count += 1; - } - - doWrite(stream, state, true, state.length, buffer, '', holder.finish); - - // doWrite is always async, defer these to save a bit of time - // as the hot path ends with doWrite - state.pendingcb++; - state.lastBufferedRequest = null; - state.corkedRequestsFree = holder.next; - holder.next = null; - } else { - // Slow case, write chunks one-by-one - while (entry) { - var chunk = entry.chunk; - var encoding = entry.encoding; - var cb = entry.callback; - var len = state.objectMode ? 1 : chunk.length; - - doWrite(stream, state, false, len, chunk, encoding, cb); - entry = entry.next; - // if we didn't call the onwrite immediately, then - // it means that we need to wait until it does. - // also, that means that the chunk and cb are currently - // being processed, so move the buffer counter past them. - if (state.writing) { - break; - } - } - - if (entry === null) state.lastBufferedRequest = null; - } - - state.bufferedRequestCount = 0; - state.bufferedRequest = entry; - state.bufferProcessing = false; -} - -Writable.prototype._write = function (chunk, encoding, cb) { - cb(new Error('not implemented')); -}; - -Writable.prototype._writev = null; - -Writable.prototype.end = function (chunk, encoding, cb) { - var state = this._writableState; - - if (typeof chunk === 'function') { - cb = chunk; - chunk = null; - encoding = null; - } else if (typeof encoding === 'function') { - cb = encoding; - encoding = null; - } - - if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); - - // .end() fully uncorks - if (state.corked) { - state.corked = 1; - this.uncork(); - } - - // ignore unnecessary end() calls. - if (!state.ending && !state.finished) endWritable(this, state, cb); -}; - -function needFinish(state) { - return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; -} - -function prefinish(stream, state) { - if (!state.prefinished) { - state.prefinished = true; - stream.emit('prefinish'); - } -} - -function finishMaybe(stream, state) { - var need = needFinish(state); - if (need) { - if (state.pendingcb === 0) { - prefinish(stream, state); - state.finished = true; - stream.emit('finish'); - } else { - prefinish(stream, state); - } - } - return need; -} - -function endWritable(stream, state, cb) { - state.ending = true; - finishMaybe(stream, state); - if (cb) { - if (state.finished) processNextTick(cb);else stream.once('finish', cb); - } - state.ended = true; - stream.writable = false; -} - -// It seems a linked list but it is not -// there will be only 2 of these for each stream -function CorkedRequest(state) { - var _this = this; - - this.next = null; - this.entry = null; - - this.finish = function (err) { - var entry = _this.entry; - _this.entry = null; - while (entry) { - var cb = entry.callback; - state.pendingcb--; - cb(err); - entry = entry.next; - } - if (state.corkedRequestsFree) { - state.corkedRequestsFree.next = _this; - } else { - state.corkedRequestsFree = _this; - } - }; -} \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/LICENSE b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/LICENSE deleted file mode 100644 index d8d7f9437dbf5a..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/LICENSE +++ /dev/null @@ -1,19 +0,0 @@ -Copyright Node.js contributors. All rights reserved. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/README.md b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/README.md deleted file mode 100644 index 5a76b4149c5eb5..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# core-util-is - -The `util.is*` functions introduced in Node v0.12. diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/float.patch b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/float.patch deleted file mode 100644 index a06d5c05f75fd5..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/float.patch +++ /dev/null @@ -1,604 +0,0 @@ -diff --git a/lib/util.js b/lib/util.js -index a03e874..9074e8e 100644 ---- a/lib/util.js -+++ b/lib/util.js -@@ -19,430 +19,6 @@ - // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE - // USE OR OTHER DEALINGS IN THE SOFTWARE. - --var formatRegExp = /%[sdj%]/g; --exports.format = function(f) { -- if (!isString(f)) { -- var objects = []; -- for (var i = 0; i < arguments.length; i++) { -- objects.push(inspect(arguments[i])); -- } -- return objects.join(' '); -- } -- -- var i = 1; -- var args = arguments; -- var len = args.length; -- var str = String(f).replace(formatRegExp, function(x) { -- if (x === '%%') return '%'; -- if (i >= len) return x; -- switch (x) { -- case '%s': return String(args[i++]); -- case '%d': return Number(args[i++]); -- case '%j': -- try { -- return JSON.stringify(args[i++]); -- } catch (_) { -- return '[Circular]'; -- } -- default: -- return x; -- } -- }); -- for (var x = args[i]; i < len; x = args[++i]) { -- if (isNull(x) || !isObject(x)) { -- str += ' ' + x; -- } else { -- str += ' ' + inspect(x); -- } -- } -- return str; --}; -- -- --// Mark that a method should not be used. --// Returns a modified function which warns once by default. --// If --no-deprecation is set, then it is a no-op. --exports.deprecate = function(fn, msg) { -- // Allow for deprecating things in the process of starting up. -- if (isUndefined(global.process)) { -- return function() { -- return exports.deprecate(fn, msg).apply(this, arguments); -- }; -- } -- -- if (process.noDeprecation === true) { -- return fn; -- } -- -- var warned = false; -- function deprecated() { -- if (!warned) { -- if (process.throwDeprecation) { -- throw new Error(msg); -- } else if (process.traceDeprecation) { -- console.trace(msg); -- } else { -- console.error(msg); -- } -- warned = true; -- } -- return fn.apply(this, arguments); -- } -- -- return deprecated; --}; -- -- --var debugs = {}; --var debugEnviron; --exports.debuglog = function(set) { -- if (isUndefined(debugEnviron)) -- debugEnviron = process.env.NODE_DEBUG || ''; -- set = set.toUpperCase(); -- if (!debugs[set]) { -- if (new RegExp('\\b' + set + '\\b', 'i').test(debugEnviron)) { -- var pid = process.pid; -- debugs[set] = function() { -- var msg = exports.format.apply(exports, arguments); -- console.error('%s %d: %s', set, pid, msg); -- }; -- } else { -- debugs[set] = function() {}; -- } -- } -- return debugs[set]; --}; -- -- --/** -- * Echos the value of a value. Trys to print the value out -- * in the best way possible given the different types. -- * -- * @param {Object} obj The object to print out. -- * @param {Object} opts Optional options object that alters the output. -- */ --/* legacy: obj, showHidden, depth, colors*/ --function inspect(obj, opts) { -- // default options -- var ctx = { -- seen: [], -- stylize: stylizeNoColor -- }; -- // legacy... -- if (arguments.length >= 3) ctx.depth = arguments[2]; -- if (arguments.length >= 4) ctx.colors = arguments[3]; -- if (isBoolean(opts)) { -- // legacy... -- ctx.showHidden = opts; -- } else if (opts) { -- // got an "options" object -- exports._extend(ctx, opts); -- } -- // set default options -- if (isUndefined(ctx.showHidden)) ctx.showHidden = false; -- if (isUndefined(ctx.depth)) ctx.depth = 2; -- if (isUndefined(ctx.colors)) ctx.colors = false; -- if (isUndefined(ctx.customInspect)) ctx.customInspect = true; -- if (ctx.colors) ctx.stylize = stylizeWithColor; -- return formatValue(ctx, obj, ctx.depth); --} --exports.inspect = inspect; -- -- --// http://en.wikipedia.org/wiki/ANSI_escape_code#graphics --inspect.colors = { -- 'bold' : [1, 22], -- 'italic' : [3, 23], -- 'underline' : [4, 24], -- 'inverse' : [7, 27], -- 'white' : [37, 39], -- 'grey' : [90, 39], -- 'black' : [30, 39], -- 'blue' : [34, 39], -- 'cyan' : [36, 39], -- 'green' : [32, 39], -- 'magenta' : [35, 39], -- 'red' : [31, 39], -- 'yellow' : [33, 39] --}; -- --// Don't use 'blue' not visible on cmd.exe --inspect.styles = { -- 'special': 'cyan', -- 'number': 'yellow', -- 'boolean': 'yellow', -- 'undefined': 'grey', -- 'null': 'bold', -- 'string': 'green', -- 'date': 'magenta', -- // "name": intentionally not styling -- 'regexp': 'red' --}; -- -- --function stylizeWithColor(str, styleType) { -- var style = inspect.styles[styleType]; -- -- if (style) { -- return '\u001b[' + inspect.colors[style][0] + 'm' + str + -- '\u001b[' + inspect.colors[style][1] + 'm'; -- } else { -- return str; -- } --} -- -- --function stylizeNoColor(str, styleType) { -- return str; --} -- -- --function arrayToHash(array) { -- var hash = {}; -- -- array.forEach(function(val, idx) { -- hash[val] = true; -- }); -- -- return hash; --} -- -- --function formatValue(ctx, value, recurseTimes) { -- // Provide a hook for user-specified inspect functions. -- // Check that value is an object with an inspect function on it -- if (ctx.customInspect && -- value && -- isFunction(value.inspect) && -- // Filter out the util module, it's inspect function is special -- value.inspect !== exports.inspect && -- // Also filter out any prototype objects using the circular check. -- !(value.constructor && value.constructor.prototype === value)) { -- var ret = value.inspect(recurseTimes, ctx); -- if (!isString(ret)) { -- ret = formatValue(ctx, ret, recurseTimes); -- } -- return ret; -- } -- -- // Primitive types cannot have properties -- var primitive = formatPrimitive(ctx, value); -- if (primitive) { -- return primitive; -- } -- -- // Look up the keys of the object. -- var keys = Object.keys(value); -- var visibleKeys = arrayToHash(keys); -- -- if (ctx.showHidden) { -- keys = Object.getOwnPropertyNames(value); -- } -- -- // Some type of object without properties can be shortcutted. -- if (keys.length === 0) { -- if (isFunction(value)) { -- var name = value.name ? ': ' + value.name : ''; -- return ctx.stylize('[Function' + name + ']', 'special'); -- } -- if (isRegExp(value)) { -- return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp'); -- } -- if (isDate(value)) { -- return ctx.stylize(Date.prototype.toString.call(value), 'date'); -- } -- if (isError(value)) { -- return formatError(value); -- } -- } -- -- var base = '', array = false, braces = ['{', '}']; -- -- // Make Array say that they are Array -- if (isArray(value)) { -- array = true; -- braces = ['[', ']']; -- } -- -- // Make functions say that they are functions -- if (isFunction(value)) { -- var n = value.name ? ': ' + value.name : ''; -- base = ' [Function' + n + ']'; -- } -- -- // Make RegExps say that they are RegExps -- if (isRegExp(value)) { -- base = ' ' + RegExp.prototype.toString.call(value); -- } -- -- // Make dates with properties first say the date -- if (isDate(value)) { -- base = ' ' + Date.prototype.toUTCString.call(value); -- } -- -- // Make error with message first say the error -- if (isError(value)) { -- base = ' ' + formatError(value); -- } -- -- if (keys.length === 0 && (!array || value.length == 0)) { -- return braces[0] + base + braces[1]; -- } -- -- if (recurseTimes < 0) { -- if (isRegExp(value)) { -- return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp'); -- } else { -- return ctx.stylize('[Object]', 'special'); -- } -- } -- -- ctx.seen.push(value); -- -- var output; -- if (array) { -- output = formatArray(ctx, value, recurseTimes, visibleKeys, keys); -- } else { -- output = keys.map(function(key) { -- return formatProperty(ctx, value, recurseTimes, visibleKeys, key, array); -- }); -- } -- -- ctx.seen.pop(); -- -- return reduceToSingleString(output, base, braces); --} -- -- --function formatPrimitive(ctx, value) { -- if (isUndefined(value)) -- return ctx.stylize('undefined', 'undefined'); -- if (isString(value)) { -- var simple = '\'' + JSON.stringify(value).replace(/^"|"$/g, '') -- .replace(/'/g, "\\'") -- .replace(/\\"/g, '"') + '\''; -- return ctx.stylize(simple, 'string'); -- } -- if (isNumber(value)) { -- // Format -0 as '-0'. Strict equality won't distinguish 0 from -0, -- // so instead we use the fact that 1 / -0 < 0 whereas 1 / 0 > 0 . -- if (value === 0 && 1 / value < 0) -- return ctx.stylize('-0', 'number'); -- return ctx.stylize('' + value, 'number'); -- } -- if (isBoolean(value)) -- return ctx.stylize('' + value, 'boolean'); -- // For some reason typeof null is "object", so special case here. -- if (isNull(value)) -- return ctx.stylize('null', 'null'); --} -- -- --function formatError(value) { -- return '[' + Error.prototype.toString.call(value) + ']'; --} -- -- --function formatArray(ctx, value, recurseTimes, visibleKeys, keys) { -- var output = []; -- for (var i = 0, l = value.length; i < l; ++i) { -- if (hasOwnProperty(value, String(i))) { -- output.push(formatProperty(ctx, value, recurseTimes, visibleKeys, -- String(i), true)); -- } else { -- output.push(''); -- } -- } -- keys.forEach(function(key) { -- if (!key.match(/^\d+$/)) { -- output.push(formatProperty(ctx, value, recurseTimes, visibleKeys, -- key, true)); -- } -- }); -- return output; --} -- -- --function formatProperty(ctx, value, recurseTimes, visibleKeys, key, array) { -- var name, str, desc; -- desc = Object.getOwnPropertyDescriptor(value, key) || { value: value[key] }; -- if (desc.get) { -- if (desc.set) { -- str = ctx.stylize('[Getter/Setter]', 'special'); -- } else { -- str = ctx.stylize('[Getter]', 'special'); -- } -- } else { -- if (desc.set) { -- str = ctx.stylize('[Setter]', 'special'); -- } -- } -- if (!hasOwnProperty(visibleKeys, key)) { -- name = '[' + key + ']'; -- } -- if (!str) { -- if (ctx.seen.indexOf(desc.value) < 0) { -- if (isNull(recurseTimes)) { -- str = formatValue(ctx, desc.value, null); -- } else { -- str = formatValue(ctx, desc.value, recurseTimes - 1); -- } -- if (str.indexOf('\n') > -1) { -- if (array) { -- str = str.split('\n').map(function(line) { -- return ' ' + line; -- }).join('\n').substr(2); -- } else { -- str = '\n' + str.split('\n').map(function(line) { -- return ' ' + line; -- }).join('\n'); -- } -- } -- } else { -- str = ctx.stylize('[Circular]', 'special'); -- } -- } -- if (isUndefined(name)) { -- if (array && key.match(/^\d+$/)) { -- return str; -- } -- name = JSON.stringify('' + key); -- if (name.match(/^"([a-zA-Z_][a-zA-Z_0-9]*)"$/)) { -- name = name.substr(1, name.length - 2); -- name = ctx.stylize(name, 'name'); -- } else { -- name = name.replace(/'/g, "\\'") -- .replace(/\\"/g, '"') -- .replace(/(^"|"$)/g, "'"); -- name = ctx.stylize(name, 'string'); -- } -- } -- -- return name + ': ' + str; --} -- -- --function reduceToSingleString(output, base, braces) { -- var numLinesEst = 0; -- var length = output.reduce(function(prev, cur) { -- numLinesEst++; -- if (cur.indexOf('\n') >= 0) numLinesEst++; -- return prev + cur.replace(/\u001b\[\d\d?m/g, '').length + 1; -- }, 0); -- -- if (length > 60) { -- return braces[0] + -- (base === '' ? '' : base + '\n ') + -- ' ' + -- output.join(',\n ') + -- ' ' + -- braces[1]; -- } -- -- return braces[0] + base + ' ' + output.join(', ') + ' ' + braces[1]; --} -- -- - // NOTE: These type checking functions intentionally don't use `instanceof` - // because it is fragile and can be easily faked with `Object.create()`. - function isArray(ar) { -@@ -522,166 +98,10 @@ function isPrimitive(arg) { - exports.isPrimitive = isPrimitive; - - function isBuffer(arg) { -- return arg instanceof Buffer; -+ return Buffer.isBuffer(arg); - } - exports.isBuffer = isBuffer; - - function objectToString(o) { - return Object.prototype.toString.call(o); --} -- -- --function pad(n) { -- return n < 10 ? '0' + n.toString(10) : n.toString(10); --} -- -- --var months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', -- 'Oct', 'Nov', 'Dec']; -- --// 26 Feb 16:19:34 --function timestamp() { -- var d = new Date(); -- var time = [pad(d.getHours()), -- pad(d.getMinutes()), -- pad(d.getSeconds())].join(':'); -- return [d.getDate(), months[d.getMonth()], time].join(' '); --} -- -- --// log is just a thin wrapper to console.log that prepends a timestamp --exports.log = function() { -- console.log('%s - %s', timestamp(), exports.format.apply(exports, arguments)); --}; -- -- --/** -- * Inherit the prototype methods from one constructor into another. -- * -- * The Function.prototype.inherits from lang.js rewritten as a standalone -- * function (not on Function.prototype). NOTE: If this file is to be loaded -- * during bootstrapping this function needs to be rewritten using some native -- * functions as prototype setup using normal JavaScript does not work as -- * expected during bootstrapping (see mirror.js in r114903). -- * -- * @param {function} ctor Constructor function which needs to inherit the -- * prototype. -- * @param {function} superCtor Constructor function to inherit prototype from. -- */ --exports.inherits = function(ctor, superCtor) { -- ctor.super_ = superCtor; -- ctor.prototype = Object.create(superCtor.prototype, { -- constructor: { -- value: ctor, -- enumerable: false, -- writable: true, -- configurable: true -- } -- }); --}; -- --exports._extend = function(origin, add) { -- // Don't do anything if add isn't an object -- if (!add || !isObject(add)) return origin; -- -- var keys = Object.keys(add); -- var i = keys.length; -- while (i--) { -- origin[keys[i]] = add[keys[i]]; -- } -- return origin; --}; -- --function hasOwnProperty(obj, prop) { -- return Object.prototype.hasOwnProperty.call(obj, prop); --} -- -- --// Deprecated old stuff. -- --exports.p = exports.deprecate(function() { -- for (var i = 0, len = arguments.length; i < len; ++i) { -- console.error(exports.inspect(arguments[i])); -- } --}, 'util.p: Use console.error() instead'); -- -- --exports.exec = exports.deprecate(function() { -- return require('child_process').exec.apply(this, arguments); --}, 'util.exec is now called `child_process.exec`.'); -- -- --exports.print = exports.deprecate(function() { -- for (var i = 0, len = arguments.length; i < len; ++i) { -- process.stdout.write(String(arguments[i])); -- } --}, 'util.print: Use console.log instead'); -- -- --exports.puts = exports.deprecate(function() { -- for (var i = 0, len = arguments.length; i < len; ++i) { -- process.stdout.write(arguments[i] + '\n'); -- } --}, 'util.puts: Use console.log instead'); -- -- --exports.debug = exports.deprecate(function(x) { -- process.stderr.write('DEBUG: ' + x + '\n'); --}, 'util.debug: Use console.error instead'); -- -- --exports.error = exports.deprecate(function(x) { -- for (var i = 0, len = arguments.length; i < len; ++i) { -- process.stderr.write(arguments[i] + '\n'); -- } --}, 'util.error: Use console.error instead'); -- -- --exports.pump = exports.deprecate(function(readStream, writeStream, callback) { -- var callbackCalled = false; -- -- function call(a, b, c) { -- if (callback && !callbackCalled) { -- callback(a, b, c); -- callbackCalled = true; -- } -- } -- -- readStream.addListener('data', function(chunk) { -- if (writeStream.write(chunk) === false) readStream.pause(); -- }); -- -- writeStream.addListener('drain', function() { -- readStream.resume(); -- }); -- -- readStream.addListener('end', function() { -- writeStream.end(); -- }); -- -- readStream.addListener('close', function() { -- call(); -- }); -- -- readStream.addListener('error', function(err) { -- writeStream.end(); -- call(err); -- }); -- -- writeStream.addListener('error', function(err) { -- readStream.destroy(); -- call(err); -- }); --}, 'util.pump(): Use readableStream.pipe() instead'); -- -- --var uv; --exports._errnoException = function(err, syscall) { -- if (isUndefined(uv)) uv = process.binding('uv'); -- var errname = uv.errname(err); -- var e = new Error(syscall + ' ' + errname); -- e.code = errname; -- e.errno = errname; -- e.syscall = syscall; -- return e; --}; -+} \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/lib/util.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/lib/util.js deleted file mode 100644 index ff4c851c075a2f..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/lib/util.js +++ /dev/null @@ -1,107 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -// NOTE: These type checking functions intentionally don't use `instanceof` -// because it is fragile and can be easily faked with `Object.create()`. - -function isArray(arg) { - if (Array.isArray) { - return Array.isArray(arg); - } - return objectToString(arg) === '[object Array]'; -} -exports.isArray = isArray; - -function isBoolean(arg) { - return typeof arg === 'boolean'; -} -exports.isBoolean = isBoolean; - -function isNull(arg) { - return arg === null; -} -exports.isNull = isNull; - -function isNullOrUndefined(arg) { - return arg == null; -} -exports.isNullOrUndefined = isNullOrUndefined; - -function isNumber(arg) { - return typeof arg === 'number'; -} -exports.isNumber = isNumber; - -function isString(arg) { - return typeof arg === 'string'; -} -exports.isString = isString; - -function isSymbol(arg) { - return typeof arg === 'symbol'; -} -exports.isSymbol = isSymbol; - -function isUndefined(arg) { - return arg === void 0; -} -exports.isUndefined = isUndefined; - -function isRegExp(re) { - return objectToString(re) === '[object RegExp]'; -} -exports.isRegExp = isRegExp; - -function isObject(arg) { - return typeof arg === 'object' && arg !== null; -} -exports.isObject = isObject; - -function isDate(d) { - return objectToString(d) === '[object Date]'; -} -exports.isDate = isDate; - -function isError(e) { - return (objectToString(e) === '[object Error]' || e instanceof Error); -} -exports.isError = isError; - -function isFunction(arg) { - return typeof arg === 'function'; -} -exports.isFunction = isFunction; - -function isPrimitive(arg) { - return arg === null || - typeof arg === 'boolean' || - typeof arg === 'number' || - typeof arg === 'string' || - typeof arg === 'symbol' || // ES6 symbol - typeof arg === 'undefined'; -} -exports.isPrimitive = isPrimitive; - -exports.isBuffer = Buffer.isBuffer; - -function objectToString(o) { - return Object.prototype.toString.call(o); -} diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/package.json b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/package.json deleted file mode 100644 index f2794488b08868..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/package.json +++ /dev/null @@ -1,94 +0,0 @@ -{ - "_args": [ - [ - { - "raw": "core-util-is@~1.0.0", - "scope": null, - "escapedName": "core-util-is", - "name": "core-util-is", - "rawSpec": "~1.0.0", - "spec": ">=1.0.0 <1.1.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream" - ] - ], - "_from": "core-util-is@>=1.0.0 <1.1.0", - "_id": "core-util-is@1.0.2", - "_inCache": true, - "_location": "/mississippi/concat-stream/readable-stream/core-util-is", - "_nodeVersion": "4.0.0", - "_npmUser": { - "name": "isaacs", - "email": "i@izs.me" - }, - "_npmVersion": "3.3.2", - "_phantomChildren": {}, - "_requested": { - "raw": "core-util-is@~1.0.0", - "scope": null, - "escapedName": "core-util-is", - "name": "core-util-is", - "rawSpec": "~1.0.0", - "spec": ">=1.0.0 <1.1.0", - "type": "range" - }, - "_requiredBy": [ - "/mississippi/concat-stream/readable-stream" - ], - "_resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", - "_shasum": "b5fd54220aa2bc5ab57aab7140c940754503c1a7", - "_shrinkwrap": null, - "_spec": "core-util-is@~1.0.0", - "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream", - "author": { - "name": "Isaac Z. Schlueter", - "email": "i@izs.me", - "url": "http://blog.izs.me/" - }, - "bugs": { - "url": "https://github.com/isaacs/core-util-is/issues" - }, - "dependencies": {}, - "description": "The `util.is*` functions introduced in Node v0.12.", - "devDependencies": { - "tap": "^2.3.0" - }, - "directories": {}, - "dist": { - "shasum": "b5fd54220aa2bc5ab57aab7140c940754503c1a7", - "tarball": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz" - }, - "gitHead": "a177da234df5638b363ddc15fa324619a38577c8", - "homepage": "https://github.com/isaacs/core-util-is#readme", - "keywords": [ - "util", - "isBuffer", - "isArray", - "isNumber", - "isString", - "isRegExp", - "isThis", - "isThat", - "polyfill" - ], - "license": "MIT", - "main": "lib/util.js", - "maintainers": [ - { - "name": "isaacs", - "email": "i@izs.me" - } - ], - "name": "core-util-is", - "optionalDependencies": {}, - "readme": "ERROR: No README data found!", - "repository": { - "type": "git", - "url": "git://github.com/isaacs/core-util-is.git" - }, - "scripts": { - "test": "tap test.js" - }, - "version": "1.0.2" -} diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/test.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/test.js deleted file mode 100644 index 1a490c65ac8b5d..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/test.js +++ /dev/null @@ -1,68 +0,0 @@ -var assert = require('tap'); - -var t = require('./lib/util'); - -assert.equal(t.isArray([]), true); -assert.equal(t.isArray({}), false); - -assert.equal(t.isBoolean(null), false); -assert.equal(t.isBoolean(true), true); -assert.equal(t.isBoolean(false), true); - -assert.equal(t.isNull(null), true); -assert.equal(t.isNull(undefined), false); -assert.equal(t.isNull(false), false); -assert.equal(t.isNull(), false); - -assert.equal(t.isNullOrUndefined(null), true); -assert.equal(t.isNullOrUndefined(undefined), true); -assert.equal(t.isNullOrUndefined(false), false); -assert.equal(t.isNullOrUndefined(), true); - -assert.equal(t.isNumber(null), false); -assert.equal(t.isNumber('1'), false); -assert.equal(t.isNumber(1), true); - -assert.equal(t.isString(null), false); -assert.equal(t.isString('1'), true); -assert.equal(t.isString(1), false); - -assert.equal(t.isSymbol(null), false); -assert.equal(t.isSymbol('1'), false); -assert.equal(t.isSymbol(1), false); -assert.equal(t.isSymbol(Symbol()), true); - -assert.equal(t.isUndefined(null), false); -assert.equal(t.isUndefined(undefined), true); -assert.equal(t.isUndefined(false), false); -assert.equal(t.isUndefined(), true); - -assert.equal(t.isRegExp(null), false); -assert.equal(t.isRegExp('1'), false); -assert.equal(t.isRegExp(new RegExp()), true); - -assert.equal(t.isObject({}), true); -assert.equal(t.isObject([]), true); -assert.equal(t.isObject(new RegExp()), true); -assert.equal(t.isObject(new Date()), true); - -assert.equal(t.isDate(null), false); -assert.equal(t.isDate('1'), false); -assert.equal(t.isDate(new Date()), true); - -assert.equal(t.isError(null), false); -assert.equal(t.isError({ err: true }), false); -assert.equal(t.isError(new Error()), true); - -assert.equal(t.isFunction(null), false); -assert.equal(t.isFunction({ }), false); -assert.equal(t.isFunction(function() {}), true); - -assert.equal(t.isPrimitive(null), true); -assert.equal(t.isPrimitive(''), true); -assert.equal(t.isPrimitive(0), true); -assert.equal(t.isPrimitive(new Date()), false); - -assert.equal(t.isBuffer(null), false); -assert.equal(t.isBuffer({}), false); -assert.equal(t.isBuffer(new Buffer(0)), true); diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/Makefile b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/Makefile deleted file mode 100644 index 0ecc29c402c243..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/Makefile +++ /dev/null @@ -1,5 +0,0 @@ - -test: - @node_modules/.bin/tape test.js - -.PHONY: test diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/README.md b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/README.md deleted file mode 100644 index 16d2c59c6195f9..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/README.md +++ /dev/null @@ -1,60 +0,0 @@ - -# isarray - -`Array#isArray` for older browsers. - -[![build status](https://secure.travis-ci.org/juliangruber/isarray.svg)](http://travis-ci.org/juliangruber/isarray) -[![downloads](https://img.shields.io/npm/dm/isarray.svg)](https://www.npmjs.org/package/isarray) - -[![browser support](https://ci.testling.com/juliangruber/isarray.png) -](https://ci.testling.com/juliangruber/isarray) - -## Usage - -```js -var isArray = require('isarray'); - -console.log(isArray([])); // => true -console.log(isArray({})); // => false -``` - -## Installation - -With [npm](http://npmjs.org) do - -```bash -$ npm install isarray -``` - -Then bundle for the browser with -[browserify](https://github.com/substack/browserify). - -With [component](http://component.io) do - -```bash -$ component install juliangruber/isarray -``` - -## License - -(MIT) - -Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/component.json b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/component.json deleted file mode 100644 index 9e31b683889015..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/component.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "name" : "isarray", - "description" : "Array#isArray for older browsers", - "version" : "0.0.1", - "repository" : "juliangruber/isarray", - "homepage": "https://github.com/juliangruber/isarray", - "main" : "index.js", - "scripts" : [ - "index.js" - ], - "dependencies" : {}, - "keywords": ["browser","isarray","array"], - "author": { - "name": "Julian Gruber", - "email": "mail@juliangruber.com", - "url": "http://juliangruber.com" - }, - "license": "MIT" -} diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/index.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/index.js deleted file mode 100644 index a57f63495943a0..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/index.js +++ /dev/null @@ -1,5 +0,0 @@ -var toString = {}.toString; - -module.exports = Array.isArray || function (arr) { - return toString.call(arr) == '[object Array]'; -}; diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/package.json b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/package.json deleted file mode 100644 index 529beb69b774d5..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/package.json +++ /dev/null @@ -1,104 +0,0 @@ -{ - "_args": [ - [ - { - "raw": "isarray@~1.0.0", - "scope": null, - "escapedName": "isarray", - "name": "isarray", - "rawSpec": "~1.0.0", - "spec": ">=1.0.0 <1.1.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream" - ] - ], - "_from": "isarray@>=1.0.0 <1.1.0", - "_id": "isarray@1.0.0", - "_inCache": true, - "_location": "/mississippi/concat-stream/readable-stream/isarray", - "_nodeVersion": "5.1.0", - "_npmUser": { - "name": "juliangruber", - "email": "julian@juliangruber.com" - }, - "_npmVersion": "3.3.12", - "_phantomChildren": {}, - "_requested": { - "raw": "isarray@~1.0.0", - "scope": null, - "escapedName": "isarray", - "name": "isarray", - "rawSpec": "~1.0.0", - "spec": ">=1.0.0 <1.1.0", - "type": "range" - }, - "_requiredBy": [ - "/mississippi/concat-stream/readable-stream" - ], - "_resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "_shasum": "bb935d48582cba168c06834957a54a3e07124f11", - "_shrinkwrap": null, - "_spec": "isarray@~1.0.0", - "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream", - "author": { - "name": "Julian Gruber", - "email": "mail@juliangruber.com", - "url": "http://juliangruber.com" - }, - "bugs": { - "url": "https://github.com/juliangruber/isarray/issues" - }, - "dependencies": {}, - "description": "Array#isArray for older browsers", - "devDependencies": { - "tape": "~2.13.4" - }, - "directories": {}, - "dist": { - "shasum": "bb935d48582cba168c06834957a54a3e07124f11", - "tarball": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" - }, - "gitHead": "2a23a281f369e9ae06394c0fb4d2381355a6ba33", - "homepage": "https://github.com/juliangruber/isarray", - "keywords": [ - "browser", - "isarray", - "array" - ], - "license": "MIT", - "main": "index.js", - "maintainers": [ - { - "name": "juliangruber", - "email": "julian@juliangruber.com" - } - ], - "name": "isarray", - "optionalDependencies": {}, - "readme": "ERROR: No README data found!", - "repository": { - "type": "git", - "url": "git://github.com/juliangruber/isarray.git" - }, - "scripts": { - "test": "tape test.js" - }, - "testling": { - "files": "test.js", - "browsers": [ - "ie/8..latest", - "firefox/17..latest", - "firefox/nightly", - "chrome/22..latest", - "chrome/canary", - "opera/12..latest", - "opera/next", - "safari/5.1..latest", - "ipad/6.0..latest", - "iphone/6.0..latest", - "android-browser/4.2..latest" - ] - }, - "version": "1.0.0" -} diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/test.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/test.js deleted file mode 100644 index f7f7bcd19fec56..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/test.js +++ /dev/null @@ -1,19 +0,0 @@ -var isArray = require('./'); -var test = require('tape'); - -test('is array', function(t){ - t.ok(isArray([])); - t.notOk(isArray({})); - t.notOk(isArray(null)); - t.notOk(isArray(false)); - - var obj = {}; - obj[0] = true; - t.notOk(isArray(obj)); - - var arr = []; - arr.foo = 'bar'; - t.ok(isArray(arr)); - - t.end(); -}); diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/.travis.yml b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/.travis.yml deleted file mode 100644 index 36201b10017a5e..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/.travis.yml +++ /dev/null @@ -1,12 +0,0 @@ -language: node_js -node_js: - - "0.8" - - "0.10" - - "0.11" - - "0.12" - - "1.7.1" - - 1 - - 2 - - 3 - - 4 - - 5 diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/index.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/index.js deleted file mode 100644 index a4f40f845faa65..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/index.js +++ /dev/null @@ -1,43 +0,0 @@ -'use strict'; - -if (!process.version || - process.version.indexOf('v0.') === 0 || - process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) { - module.exports = nextTick; -} else { - module.exports = process.nextTick; -} - -function nextTick(fn, arg1, arg2, arg3) { - if (typeof fn !== 'function') { - throw new TypeError('"callback" argument must be a function'); - } - var len = arguments.length; - var args, i; - switch (len) { - case 0: - case 1: - return process.nextTick(fn); - case 2: - return process.nextTick(function afterTickOne() { - fn.call(null, arg1); - }); - case 3: - return process.nextTick(function afterTickTwo() { - fn.call(null, arg1, arg2); - }); - case 4: - return process.nextTick(function afterTickThree() { - fn.call(null, arg1, arg2, arg3); - }); - default: - args = new Array(len - 1); - i = 0; - while (i < args.length) { - args[i++] = arguments[i]; - } - return process.nextTick(function afterTick() { - fn.apply(null, args); - }); - } -} diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/package.json b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/package.json deleted file mode 100644 index c9d5f61669b5a8..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/package.json +++ /dev/null @@ -1,83 +0,0 @@ -{ - "_args": [ - [ - { - "raw": "process-nextick-args@~1.0.6", - "scope": null, - "escapedName": "process-nextick-args", - "name": "process-nextick-args", - "rawSpec": "~1.0.6", - "spec": ">=1.0.6 <1.1.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream" - ] - ], - "_from": "process-nextick-args@>=1.0.6 <1.1.0", - "_id": "process-nextick-args@1.0.7", - "_inCache": true, - "_location": "/mississippi/concat-stream/readable-stream/process-nextick-args", - "_nodeVersion": "5.11.0", - "_npmOperationalInternal": { - "host": "packages-12-west.internal.npmjs.com", - "tmp": "tmp/process-nextick-args-1.0.7.tgz_1462394251778_0.36989671061746776" - }, - "_npmUser": { - "name": "cwmma", - "email": "calvin.metcalf@gmail.com" - }, - "_npmVersion": "3.8.6", - "_phantomChildren": {}, - "_requested": { - "raw": "process-nextick-args@~1.0.6", - "scope": null, - "escapedName": "process-nextick-args", - "name": "process-nextick-args", - "rawSpec": "~1.0.6", - "spec": ">=1.0.6 <1.1.0", - "type": "range" - }, - "_requiredBy": [ - "/mississippi/concat-stream/readable-stream" - ], - "_resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz", - "_shasum": "150e20b756590ad3f91093f25a4f2ad8bff30ba3", - "_shrinkwrap": null, - "_spec": "process-nextick-args@~1.0.6", - "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream", - "author": "", - "bugs": { - "url": "https://github.com/calvinmetcalf/process-nextick-args/issues" - }, - "dependencies": {}, - "description": "process.nextTick but always with args", - "devDependencies": { - "tap": "~0.2.6" - }, - "directories": {}, - "dist": { - "shasum": "150e20b756590ad3f91093f25a4f2ad8bff30ba3", - "tarball": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz" - }, - "gitHead": "5c00899ab01dd32f93ad4b5743da33da91404f39", - "homepage": "https://github.com/calvinmetcalf/process-nextick-args", - "license": "MIT", - "main": "index.js", - "maintainers": [ - { - "name": "cwmma", - "email": "calvin.metcalf@gmail.com" - } - ], - "name": "process-nextick-args", - "optionalDependencies": {}, - "readme": "ERROR: No README data found!", - "repository": { - "type": "git", - "url": "git+https://github.com/calvinmetcalf/process-nextick-args.git" - }, - "scripts": { - "test": "node test.js" - }, - "version": "1.0.7" -} diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/readme.md b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/readme.md deleted file mode 100644 index 78e7cfaeb7acde..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/readme.md +++ /dev/null @@ -1,18 +0,0 @@ -process-nextick-args -===== - -[![Build Status](https://travis-ci.org/calvinmetcalf/process-nextick-args.svg?branch=master)](https://travis-ci.org/calvinmetcalf/process-nextick-args) - -```bash -npm install --save process-nextick-args -``` - -Always be able to pass arguments to process.nextTick, no matter the platform - -```js -var nextTick = require('process-nextick-args'); - -nextTick(function (a, b, c) { - console.log(a, b, c); -}, 'step', 3, 'profit'); -``` diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/test.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/test.js deleted file mode 100644 index ef15721584ac99..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/test.js +++ /dev/null @@ -1,24 +0,0 @@ -var test = require("tap").test; -var nextTick = require('./'); - -test('should work', function (t) { - t.plan(5); - nextTick(function (a) { - t.ok(a); - nextTick(function (thing) { - t.equals(thing, 7); - }, 7); - }, true); - nextTick(function (a, b, c) { - t.equals(a, 'step'); - t.equals(b, 3); - t.equals(c, 'profit'); - }, 'step', 3, 'profit'); -}); - -test('correct number of arguments', function (t) { - t.plan(1); - nextTick(function () { - t.equals(2, arguments.length, 'correct number'); - }, 1, 2); -}); diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/.npmignore b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/.npmignore deleted file mode 100644 index 206320cc1d21b9..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/.npmignore +++ /dev/null @@ -1,2 +0,0 @@ -build -test diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/LICENSE b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/LICENSE deleted file mode 100644 index 6de584a48f5c89..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -Copyright Joyent, Inc. and other Node contributors. - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to permit -persons to whom the Software is furnished to do so, subject to the -following conditions: - -The above copyright notice and this permission notice shall be included -in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/README.md b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/README.md deleted file mode 100644 index 4d2aa001501107..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/README.md +++ /dev/null @@ -1,7 +0,0 @@ -**string_decoder.js** (`require('string_decoder')`) from Node.js core - -Copyright Joyent, Inc. and other Node contributors. See LICENCE file for details. - -Version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. **Prefer the stable version over the unstable.** - -The *build/* directory contains a build script that will scrape the source from the [joyent/node](https://github.com/joyent/node) repo given a specific Node version. \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/index.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/index.js deleted file mode 100644 index b00e54fb790982..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/index.js +++ /dev/null @@ -1,221 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -var Buffer = require('buffer').Buffer; - -var isBufferEncoding = Buffer.isEncoding - || function(encoding) { - switch (encoding && encoding.toLowerCase()) { - case 'hex': case 'utf8': case 'utf-8': case 'ascii': case 'binary': case 'base64': case 'ucs2': case 'ucs-2': case 'utf16le': case 'utf-16le': case 'raw': return true; - default: return false; - } - } - - -function assertEncoding(encoding) { - if (encoding && !isBufferEncoding(encoding)) { - throw new Error('Unknown encoding: ' + encoding); - } -} - -// StringDecoder provides an interface for efficiently splitting a series of -// buffers into a series of JS strings without breaking apart multi-byte -// characters. CESU-8 is handled as part of the UTF-8 encoding. -// -// @TODO Handling all encodings inside a single object makes it very difficult -// to reason about this code, so it should be split up in the future. -// @TODO There should be a utf8-strict encoding that rejects invalid UTF-8 code -// points as used by CESU-8. -var StringDecoder = exports.StringDecoder = function(encoding) { - this.encoding = (encoding || 'utf8').toLowerCase().replace(/[-_]/, ''); - assertEncoding(encoding); - switch (this.encoding) { - case 'utf8': - // CESU-8 represents each of Surrogate Pair by 3-bytes - this.surrogateSize = 3; - break; - case 'ucs2': - case 'utf16le': - // UTF-16 represents each of Surrogate Pair by 2-bytes - this.surrogateSize = 2; - this.detectIncompleteChar = utf16DetectIncompleteChar; - break; - case 'base64': - // Base-64 stores 3 bytes in 4 chars, and pads the remainder. - this.surrogateSize = 3; - this.detectIncompleteChar = base64DetectIncompleteChar; - break; - default: - this.write = passThroughWrite; - return; - } - - // Enough space to store all bytes of a single character. UTF-8 needs 4 - // bytes, but CESU-8 may require up to 6 (3 bytes per surrogate). - this.charBuffer = new Buffer(6); - // Number of bytes received for the current incomplete multi-byte character. - this.charReceived = 0; - // Number of bytes expected for the current incomplete multi-byte character. - this.charLength = 0; -}; - - -// write decodes the given buffer and returns it as JS string that is -// guaranteed to not contain any partial multi-byte characters. Any partial -// character found at the end of the buffer is buffered up, and will be -// returned when calling write again with the remaining bytes. -// -// Note: Converting a Buffer containing an orphan surrogate to a String -// currently works, but converting a String to a Buffer (via `new Buffer`, or -// Buffer#write) will replace incomplete surrogates with the unicode -// replacement character. See https://codereview.chromium.org/121173009/ . -StringDecoder.prototype.write = function(buffer) { - var charStr = ''; - // if our last write ended with an incomplete multibyte character - while (this.charLength) { - // determine how many remaining bytes this buffer has to offer for this char - var available = (buffer.length >= this.charLength - this.charReceived) ? - this.charLength - this.charReceived : - buffer.length; - - // add the new bytes to the char buffer - buffer.copy(this.charBuffer, this.charReceived, 0, available); - this.charReceived += available; - - if (this.charReceived < this.charLength) { - // still not enough chars in this buffer? wait for more ... - return ''; - } - - // remove bytes belonging to the current character from the buffer - buffer = buffer.slice(available, buffer.length); - - // get the character that was split - charStr = this.charBuffer.slice(0, this.charLength).toString(this.encoding); - - // CESU-8: lead surrogate (D800-DBFF) is also the incomplete character - var charCode = charStr.charCodeAt(charStr.length - 1); - if (charCode >= 0xD800 && charCode <= 0xDBFF) { - this.charLength += this.surrogateSize; - charStr = ''; - continue; - } - this.charReceived = this.charLength = 0; - - // if there are no more bytes in this buffer, just emit our char - if (buffer.length === 0) { - return charStr; - } - break; - } - - // determine and set charLength / charReceived - this.detectIncompleteChar(buffer); - - var end = buffer.length; - if (this.charLength) { - // buffer the incomplete character bytes we got - buffer.copy(this.charBuffer, 0, buffer.length - this.charReceived, end); - end -= this.charReceived; - } - - charStr += buffer.toString(this.encoding, 0, end); - - var end = charStr.length - 1; - var charCode = charStr.charCodeAt(end); - // CESU-8: lead surrogate (D800-DBFF) is also the incomplete character - if (charCode >= 0xD800 && charCode <= 0xDBFF) { - var size = this.surrogateSize; - this.charLength += size; - this.charReceived += size; - this.charBuffer.copy(this.charBuffer, size, 0, size); - buffer.copy(this.charBuffer, 0, 0, size); - return charStr.substring(0, end); - } - - // or just emit the charStr - return charStr; -}; - -// detectIncompleteChar determines if there is an incomplete UTF-8 character at -// the end of the given buffer. If so, it sets this.charLength to the byte -// length that character, and sets this.charReceived to the number of bytes -// that are available for this character. -StringDecoder.prototype.detectIncompleteChar = function(buffer) { - // determine how many bytes we have to check at the end of this buffer - var i = (buffer.length >= 3) ? 3 : buffer.length; - - // Figure out if one of the last i bytes of our buffer announces an - // incomplete char. - for (; i > 0; i--) { - var c = buffer[buffer.length - i]; - - // See http://en.wikipedia.org/wiki/UTF-8#Description - - // 110XXXXX - if (i == 1 && c >> 5 == 0x06) { - this.charLength = 2; - break; - } - - // 1110XXXX - if (i <= 2 && c >> 4 == 0x0E) { - this.charLength = 3; - break; - } - - // 11110XXX - if (i <= 3 && c >> 3 == 0x1E) { - this.charLength = 4; - break; - } - } - this.charReceived = i; -}; - -StringDecoder.prototype.end = function(buffer) { - var res = ''; - if (buffer && buffer.length) - res = this.write(buffer); - - if (this.charReceived) { - var cr = this.charReceived; - var buf = this.charBuffer; - var enc = this.encoding; - res += buf.slice(0, cr).toString(enc); - } - - return res; -}; - -function passThroughWrite(buffer) { - return buffer.toString(this.encoding); -} - -function utf16DetectIncompleteChar(buffer) { - this.charReceived = buffer.length % 2; - this.charLength = this.charReceived ? 2 : 0; -} - -function base64DetectIncompleteChar(buffer) { - this.charReceived = buffer.length % 3; - this.charLength = this.charReceived ? 3 : 0; -} diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/package.json b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/package.json deleted file mode 100644 index 4bc4749854bd57..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/package.json +++ /dev/null @@ -1,87 +0,0 @@ -{ - "_args": [ - [ - { - "raw": "string_decoder@~0.10.x", - "scope": null, - "escapedName": "string_decoder", - "name": "string_decoder", - "rawSpec": "~0.10.x", - "spec": ">=0.10.0 <0.11.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream" - ] - ], - "_from": "string_decoder@>=0.10.0 <0.11.0", - "_id": "string_decoder@0.10.31", - "_inCache": true, - "_location": "/mississippi/concat-stream/readable-stream/string_decoder", - "_npmUser": { - "name": "rvagg", - "email": "rod@vagg.org" - }, - "_npmVersion": "1.4.23", - "_phantomChildren": {}, - "_requested": { - "raw": "string_decoder@~0.10.x", - "scope": null, - "escapedName": "string_decoder", - "name": "string_decoder", - "rawSpec": "~0.10.x", - "spec": ">=0.10.0 <0.11.0", - "type": "range" - }, - "_requiredBy": [ - "/mississippi/concat-stream/readable-stream" - ], - "_resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", - "_shasum": "62e203bc41766c6c28c9fc84301dab1c5310fa94", - "_shrinkwrap": null, - "_spec": "string_decoder@~0.10.x", - "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream", - "bugs": { - "url": "https://github.com/rvagg/string_decoder/issues" - }, - "dependencies": {}, - "description": "The string_decoder module from Node core", - "devDependencies": { - "tap": "~0.4.8" - }, - "directories": {}, - "dist": { - "shasum": "62e203bc41766c6c28c9fc84301dab1c5310fa94", - "tarball": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" - }, - "gitHead": "d46d4fd87cf1d06e031c23f1ba170ca7d4ade9a0", - "homepage": "https://github.com/rvagg/string_decoder", - "keywords": [ - "string", - "decoder", - "browser", - "browserify" - ], - "license": "MIT", - "main": "index.js", - "maintainers": [ - { - "name": "substack", - "email": "mail@substack.net" - }, - { - "name": "rvagg", - "email": "rod@vagg.org" - } - ], - "name": "string_decoder", - "optionalDependencies": {}, - "readme": "ERROR: No README data found!", - "repository": { - "type": "git", - "url": "git://github.com/rvagg/string_decoder.git" - }, - "scripts": { - "test": "tap test/simple/*.js" - }, - "version": "0.10.31" -} diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/History.md b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/History.md deleted file mode 100644 index acc8675372e980..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/History.md +++ /dev/null @@ -1,16 +0,0 @@ - -1.0.2 / 2015-10-07 -================== - - * use try/catch when checking `localStorage` (#3, @kumavis) - -1.0.1 / 2014-11-25 -================== - - * browser: use `console.warn()` for deprecation calls - * browser: more jsdocs - -1.0.0 / 2014-04-30 -================== - - * initial commit diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/LICENSE b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/LICENSE deleted file mode 100644 index 6a60e8c225c9ba..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/LICENSE +++ /dev/null @@ -1,24 +0,0 @@ -(The MIT License) - -Copyright (c) 2014 Nathan Rajlich - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/README.md b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/README.md deleted file mode 100644 index 75622fa7c250a6..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/README.md +++ /dev/null @@ -1,53 +0,0 @@ -util-deprecate -============== -### The Node.js `util.deprecate()` function with browser support - -In Node.js, this module simply re-exports the `util.deprecate()` function. - -In the web browser (i.e. via browserify), a browser-specific implementation -of the `util.deprecate()` function is used. - - -## API - -A `deprecate()` function is the only thing exposed by this module. - -``` javascript -// setup: -exports.foo = deprecate(foo, 'foo() is deprecated, use bar() instead'); - - -// users see: -foo(); -// foo() is deprecated, use bar() instead -foo(); -foo(); -``` - - -## License - -(The MIT License) - -Copyright (c) 2014 Nathan Rajlich - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/browser.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/browser.js deleted file mode 100644 index 549ae2f065ea5a..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/browser.js +++ /dev/null @@ -1,67 +0,0 @@ - -/** - * Module exports. - */ - -module.exports = deprecate; - -/** - * Mark that a method should not be used. - * Returns a modified function which warns once by default. - * - * If `localStorage.noDeprecation = true` is set, then it is a no-op. - * - * If `localStorage.throwDeprecation = true` is set, then deprecated functions - * will throw an Error when invoked. - * - * If `localStorage.traceDeprecation = true` is set, then deprecated functions - * will invoke `console.trace()` instead of `console.error()`. - * - * @param {Function} fn - the function to deprecate - * @param {String} msg - the string to print to the console when `fn` is invoked - * @returns {Function} a new "deprecated" version of `fn` - * @api public - */ - -function deprecate (fn, msg) { - if (config('noDeprecation')) { - return fn; - } - - var warned = false; - function deprecated() { - if (!warned) { - if (config('throwDeprecation')) { - throw new Error(msg); - } else if (config('traceDeprecation')) { - console.trace(msg); - } else { - console.warn(msg); - } - warned = true; - } - return fn.apply(this, arguments); - } - - return deprecated; -} - -/** - * Checks `localStorage` for boolean values for the given `name`. - * - * @param {String} name - * @returns {Boolean} - * @api private - */ - -function config (name) { - // accessing global.localStorage can trigger a DOMException in sandboxed iframes - try { - if (!global.localStorage) return false; - } catch (_) { - return false; - } - var val = global.localStorage[name]; - if (null == val) return false; - return String(val).toLowerCase() === 'true'; -} diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/node.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/node.js deleted file mode 100644 index 5e6fcff5ddd3fb..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/node.js +++ /dev/null @@ -1,6 +0,0 @@ - -/** - * For Node.js, simply re-export the core `util.deprecate` function. - */ - -module.exports = require('util').deprecate; diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/package.json b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/package.json deleted file mode 100644 index d350c79f1b75c0..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/package.json +++ /dev/null @@ -1,89 +0,0 @@ -{ - "_args": [ - [ - { - "raw": "util-deprecate@~1.0.1", - "scope": null, - "escapedName": "util-deprecate", - "name": "util-deprecate", - "rawSpec": "~1.0.1", - "spec": ">=1.0.1 <1.1.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream" - ] - ], - "_from": "util-deprecate@>=1.0.1 <1.1.0", - "_id": "util-deprecate@1.0.2", - "_inCache": true, - "_location": "/mississippi/concat-stream/readable-stream/util-deprecate", - "_nodeVersion": "4.1.2", - "_npmUser": { - "name": "tootallnate", - "email": "nathan@tootallnate.net" - }, - "_npmVersion": "2.14.4", - "_phantomChildren": {}, - "_requested": { - "raw": "util-deprecate@~1.0.1", - "scope": null, - "escapedName": "util-deprecate", - "name": "util-deprecate", - "rawSpec": "~1.0.1", - "spec": ">=1.0.1 <1.1.0", - "type": "range" - }, - "_requiredBy": [ - "/mississippi/concat-stream/readable-stream" - ], - "_resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "_shasum": "450d4dc9fa70de732762fbd2d4a28981419a0ccf", - "_shrinkwrap": null, - "_spec": "util-deprecate@~1.0.1", - "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream", - "author": { - "name": "Nathan Rajlich", - "email": "nathan@tootallnate.net", - "url": "http://n8.io/" - }, - "browser": "browser.js", - "bugs": { - "url": "https://github.com/TooTallNate/util-deprecate/issues" - }, - "dependencies": {}, - "description": "The Node.js `util.deprecate()` function with browser support", - "devDependencies": {}, - "directories": {}, - "dist": { - "shasum": "450d4dc9fa70de732762fbd2d4a28981419a0ccf", - "tarball": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" - }, - "gitHead": "475fb6857cd23fafff20c1be846c1350abf8e6d4", - "homepage": "https://github.com/TooTallNate/util-deprecate", - "keywords": [ - "util", - "deprecate", - "browserify", - "browser", - "node" - ], - "license": "MIT", - "main": "node.js", - "maintainers": [ - { - "name": "tootallnate", - "email": "nathan@tootallnate.net" - } - ], - "name": "util-deprecate", - "optionalDependencies": {}, - "readme": "ERROR: No README data found!", - "repository": { - "type": "git", - "url": "git://github.com/TooTallNate/util-deprecate.git" - }, - "scripts": { - "test": "echo \"Error: no test specified\" && exit 1" - }, - "version": "1.0.2" -} diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/package.json b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/package.json deleted file mode 100644 index df563e83657298..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/package.json +++ /dev/null @@ -1,113 +0,0 @@ -{ - "_args": [ - [ - { - "raw": "readable-stream@~2.0.0", - "scope": null, - "escapedName": "readable-stream", - "name": "readable-stream", - "rawSpec": "~2.0.0", - "spec": ">=2.0.0 <2.1.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream" - ] - ], - "_from": "readable-stream@>=2.0.0 <2.1.0", - "_id": "readable-stream@2.0.6", - "_inCache": true, - "_location": "/mississippi/concat-stream/readable-stream", - "_nodeVersion": "5.7.0", - "_npmOperationalInternal": { - "host": "packages-12-west.internal.npmjs.com", - "tmp": "tmp/readable-stream-2.0.6.tgz_1457893507709_0.369257491780445" - }, - "_npmUser": { - "name": "cwmma", - "email": "calvin.metcalf@gmail.com" - }, - "_npmVersion": "3.6.0", - "_phantomChildren": {}, - "_requested": { - "raw": "readable-stream@~2.0.0", - "scope": null, - "escapedName": "readable-stream", - "name": "readable-stream", - "rawSpec": "~2.0.0", - "spec": ">=2.0.0 <2.1.0", - "type": "range" - }, - "_requiredBy": [ - "/mississippi/concat-stream" - ], - "_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.0.6.tgz", - "_shasum": "8f90341e68a53ccc928788dacfcd11b36eb9b78e", - "_shrinkwrap": null, - "_spec": "readable-stream@~2.0.0", - "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream", - "browser": { - "util": false - }, - "bugs": { - "url": "https://github.com/nodejs/readable-stream/issues" - }, - "dependencies": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.1", - "isarray": "~1.0.0", - "process-nextick-args": "~1.0.6", - "string_decoder": "~0.10.x", - "util-deprecate": "~1.0.1" - }, - "description": "Streams3, a user-land copy of the stream library from Node.js", - "devDependencies": { - "tap": "~0.2.6", - "tape": "~4.5.1", - "zuul": "~3.9.0" - }, - "directories": {}, - "dist": { - "shasum": "8f90341e68a53ccc928788dacfcd11b36eb9b78e", - "tarball": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.0.6.tgz" - }, - "gitHead": "01fb5608a970b42c900b96746cadc13d27dd9d7e", - "homepage": "https://github.com/nodejs/readable-stream#readme", - "keywords": [ - "readable", - "stream", - "pipe" - ], - "license": "MIT", - "main": "readable.js", - "maintainers": [ - { - "name": "isaacs", - "email": "isaacs@npmjs.com" - }, - { - "name": "tootallnate", - "email": "nathan@tootallnate.net" - }, - { - "name": "rvagg", - "email": "rod@vagg.org" - }, - { - "name": "cwmma", - "email": "calvin.metcalf@gmail.com" - } - ], - "name": "readable-stream", - "optionalDependencies": {}, - "readme": "ERROR: No README data found!", - "repository": { - "type": "git", - "url": "git://github.com/nodejs/readable-stream.git" - }, - "scripts": { - "browser": "npm run write-zuul && zuul -- test/browser.js", - "test": "tap test/parallel/*.js test/ours/*.js", - "write-zuul": "printf \"ui: tape\nbrowsers:\n - name: $BROWSER_NAME\n version: $BROWSER_VERSION\n\">.zuul.yml" - }, - "version": "2.0.6" -} diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/passthrough.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/passthrough.js deleted file mode 100644 index 27e8d8a55165f9..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/passthrough.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./lib/_stream_passthrough.js") diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/readable.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/readable.js deleted file mode 100644 index 6222a579864dd2..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/readable.js +++ /dev/null @@ -1,12 +0,0 @@ -var Stream = (function (){ - try { - return require('st' + 'ream'); // hack to fix a circular dependency issue when used with browserify - } catch(_){} -}()); -exports = module.exports = require('./lib/_stream_readable.js'); -exports.Stream = Stream || exports; -exports.Readable = exports; -exports.Writable = require('./lib/_stream_writable.js'); -exports.Duplex = require('./lib/_stream_duplex.js'); -exports.Transform = require('./lib/_stream_transform.js'); -exports.PassThrough = require('./lib/_stream_passthrough.js'); diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/transform.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/transform.js deleted file mode 100644 index 5d482f0780e993..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/transform.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./lib/_stream_transform.js") diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/writable.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/writable.js deleted file mode 100644 index e1e9efdf3c12e9..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/writable.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./lib/_stream_writable.js") diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/package.json b/deps/npm/node_modules/mississippi/node_modules/concat-stream/package.json index abbb3d43623df7..cd01702872c3fa 100644 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/package.json +++ b/deps/npm/node_modules/mississippi/node_modules/concat-stream/package.json @@ -14,22 +14,20 @@ ] ], "_from": "concat-stream@>=1.5.0 <2.0.0", - "_id": "concat-stream@1.5.2", + "_id": "concat-stream@1.6.0", "_inCache": true, "_location": "/mississippi/concat-stream", - "_nodeVersion": "4.4.3", + "_nodeVersion": "4.6.2", "_npmOperationalInternal": { "host": "packages-12-west.internal.npmjs.com", - "tmp": "tmp/concat-stream-1.5.2.tgz_1472715196934_0.010375389130786061" + "tmp": "tmp/concat-stream-1.6.0.tgz_1482162257023_0.2988202746491879" }, "_npmUser": { "name": "mafintosh", "email": "mathiasbuus@gmail.com" }, - "_npmVersion": "2.15.9", - "_phantomChildren": { - "inherits": "2.0.3" - }, + "_npmVersion": "2.15.11", + "_phantomChildren": {}, "_requested": { "raw": "concat-stream@^1.5.0", "scope": null, @@ -42,8 +40,8 @@ "_requiredBy": [ "/mississippi" ], - "_resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.5.2.tgz", - "_shasum": "708978624d856af41a5a741defdd261da752c266", + "_resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.0.tgz", + "_shasum": "0aac662fd52be78964d5532f694784e70110acf7", "_shrinkwrap": null, "_spec": "concat-stream@^1.5.0", "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi", @@ -55,18 +53,18 @@ "url": "http://github.com/maxogden/concat-stream/issues" }, "dependencies": { - "inherits": "~2.0.1", - "readable-stream": "~2.0.0", - "typedarray": "~0.0.5" + "inherits": "^2.0.3", + "readable-stream": "^2.2.2", + "typedarray": "^0.0.6" }, "description": "writable stream that concatenates strings or binary data and calls a callback with the result", "devDependencies": { - "tape": "~2.3.2" + "tape": "^4.6.3" }, "directories": {}, "dist": { - "shasum": "708978624d856af41a5a741defdd261da752c266", - "tarball": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.5.2.tgz" + "shasum": "0aac662fd52be78964d5532f694784e70110acf7", + "tarball": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.0.tgz" }, "engines": [ "node >= 0.8" @@ -74,8 +72,8 @@ "files": [ "index.js" ], - "gitHead": "731fedd137eae89d066c249fdca070f8f16afbb8", - "homepage": "https://github.com/maxogden/concat-stream#readme", + "gitHead": "e482281642c1e011fc158f5749ef40a71c77a426", + "homepage": "https://github.com/maxogden/concat-stream", "license": "MIT", "main": "index.js", "maintainers": [ @@ -120,5 +118,5 @@ "android-browser/4.2..latest" ] }, - "version": "1.5.2" + "version": "1.6.0" } diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/readme.md b/deps/npm/node_modules/mississippi/node_modules/concat-stream/readme.md index a2e1d2b151746d..94787219906507 100644 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/readme.md +++ b/deps/npm/node_modules/mississippi/node_modules/concat-stream/readme.md @@ -16,7 +16,7 @@ There are also `objectMode` streams that emit things other than Buffers, and you ## Related -`stream-each` is part of the [mississippi stream utility collection](https://github.com/maxogden/mississippi) which includes more useful stream modules similar to this one. +`concat-stream` is part of the [mississippi stream utility collection](https://github.com/maxogden/mississippi) which includes more useful stream modules similar to this one. ### examples diff --git a/deps/npm/node_modules/mississippi/node_modules/duplexify/README.md b/deps/npm/node_modules/mississippi/node_modules/duplexify/README.md index 6ed497b4ad143e..27669f6b6b9003 100644 --- a/deps/npm/node_modules/mississippi/node_modules/duplexify/README.md +++ b/deps/npm/node_modules/mississippi/node_modules/duplexify/README.md @@ -3,7 +3,7 @@ Turn a writeable and readable stream into a single streams2 duplex stream. Similar to [duplexer2](https://github.com/deoxxa/duplexer2) except it supports both streams2 and streams1 as input -and it allows you to set the readable and writable part asynchroniously using `setReadable(stream)` and `setWritable(stream)` +and it allows you to set the readable and writable part asynchronously using `setReadable(stream)` and `setWritable(stream)` ``` npm install duplexify @@ -27,7 +27,7 @@ dup.on('data', function(data) { }) ``` -You can also set the readable and writable parts asynchroniously +You can also set the readable and writable parts asynchronously ``` js var dup = duplexify() diff --git a/deps/npm/node_modules/mississippi/node_modules/duplexify/index.js b/deps/npm/node_modules/mississippi/node_modules/duplexify/index.js index 377eb60ad78356..a04f124fa9362c 100644 --- a/deps/npm/node_modules/mississippi/node_modules/duplexify/index.js +++ b/deps/npm/node_modules/mississippi/node_modules/duplexify/index.js @@ -158,7 +158,8 @@ Duplexify.prototype._forward = function() { var data - while ((data = shift(this._readable2)) !== null) { + while (this._drained && (data = shift(this._readable2)) !== null) { + if (this.destroyed) continue this._drained = this.push(data) } diff --git a/deps/npm/node_modules/mississippi/node_modules/duplexify/package.json b/deps/npm/node_modules/mississippi/node_modules/duplexify/package.json index 485f0312ea86cb..a8de9a133f9a60 100644 --- a/deps/npm/node_modules/mississippi/node_modules/duplexify/package.json +++ b/deps/npm/node_modules/mississippi/node_modules/duplexify/package.json @@ -14,13 +14,13 @@ ] ], "_from": "duplexify@>=3.4.2 <4.0.0", - "_id": "duplexify@3.4.5", + "_id": "duplexify@3.5.0", "_inCache": true, "_location": "/mississippi/duplexify", - "_nodeVersion": "4.4.3", + "_nodeVersion": "4.6.1", "_npmOperationalInternal": { "host": "packages-12-west.internal.npmjs.com", - "tmp": "tmp/duplexify-3.4.5.tgz_1468011872327_0.44416941492818296" + "tmp": "tmp/duplexify-3.5.0.tgz_1477317448157_0.2257942291907966" }, "_npmUser": { "name": "mafintosh", @@ -43,8 +43,8 @@ "/mississippi", "/mississippi/pumpify" ], - "_resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.4.5.tgz", - "_shasum": "0e7e287a775af753bf57e6e7b7f21f183f6c3a53", + "_resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.5.0.tgz", + "_shasum": "1aa773002e1578457e9d9d4a50b0ccaaebcbd604", "_shrinkwrap": null, "_spec": "duplexify@^3.4.2", "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi", @@ -60,7 +60,7 @@ "readable-stream": "^2.0.0", "stream-shift": "^1.0.0" }, - "description": "Turn a writeable and readable stream into a streams2 duplex stream with support for async initialization and streams1/streams2 input", + "description": "Turn a writable and readable stream into a streams2 duplex stream with support for async initialization and streams1/streams2 input", "devDependencies": { "concat-stream": "^1.4.6", "tape": "^2.13.3", @@ -68,10 +68,10 @@ }, "directories": {}, "dist": { - "shasum": "0e7e287a775af753bf57e6e7b7f21f183f6c3a53", - "tarball": "https://registry.npmjs.org/duplexify/-/duplexify-3.4.5.tgz" + "shasum": "1aa773002e1578457e9d9d4a50b0ccaaebcbd604", + "tarball": "https://registry.npmjs.org/duplexify/-/duplexify-3.5.0.tgz" }, - "gitHead": "338de6776ce9b25d7ab6e91d766166245a8f070a", + "gitHead": "97f525d36ce275e52435611d70b3a77a7234eaa1", "homepage": "https://github.com/mafintosh/duplexify", "keywords": [ "duplex", @@ -100,5 +100,5 @@ "scripts": { "test": "tape test.js" }, - "version": "3.4.5" + "version": "3.5.0" } diff --git a/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/.npmignore b/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/.npmignore index 3c3629e647f5dd..3e70011a19a60e 100644 --- a/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/.npmignore +++ b/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/.npmignore @@ -1 +1,3 @@ node_modules +bundle.js +test.html diff --git a/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/example.js b/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/example.js new file mode 100644 index 00000000000000..fa6b5dab25578c --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/example.js @@ -0,0 +1,22 @@ +var writer = require('./') + +var ws = writer(write, flush) + +ws.on('finish', function () { + console.log('finished') +}) + +ws.write('hello') +ws.write('world') +ws.end() + +function write (data, enc, cb) { + // i am your normal ._write method + console.log('writing', data.toString()) + cb() +} + +function flush (cb) { + // i am called before finish is emitted + setTimeout(cb, 1000) // wait 1 sec +} diff --git a/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/index.js b/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/index.js index d7715734b4d1d9..e82e126126fd24 100644 --- a/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/index.js +++ b/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/index.js @@ -1,5 +1,5 @@ var stream = require('readable-stream') -var util = require('util') +var inherits = require('inherits') var SIGNAL_FLUSH = new Buffer([0]) @@ -21,7 +21,7 @@ function WriteStream (opts, write, flush) { this._flush = flush || null } -util.inherits(WriteStream, stream.Writable) +inherits(WriteStream, stream.Writable) WriteStream.obj = function (opts, worker, flush) { if (typeof opts === 'function') return WriteStream.obj(null, opts, worker) @@ -47,6 +47,6 @@ WriteStream.prototype.end = function (data, enc, cb) { WriteStream.prototype.destroy = function (err) { if (this.destroyed) return this.destroyed = true - if (err) this.emit('error') + if (err) this.emit('error', err) this.emit('close') } diff --git a/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/package.json b/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/package.json index a73eaeb36efeb0..cd239a22b51edd 100644 --- a/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/package.json +++ b/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/package.json @@ -14,15 +14,19 @@ ] ], "_from": "flush-write-stream@>=1.0.0 <2.0.0", - "_id": "flush-write-stream@1.0.0", + "_id": "flush-write-stream@1.0.2", "_inCache": true, "_location": "/mississippi/flush-write-stream", - "_nodeVersion": "4.1.1", + "_nodeVersion": "4.2.6", + "_npmOperationalInternal": { + "host": "packages-16-east.internal.npmjs.com", + "tmp": "tmp/flush-write-stream-1.0.2.tgz_1476614807882_0.22224654001183808" + }, "_npmUser": { "name": "mafintosh", "email": "mathiasbuus@gmail.com" }, - "_npmVersion": "2.14.4", + "_npmVersion": "2.14.12", "_phantomChildren": {}, "_requested": { "raw": "flush-write-stream@^1.0.0", @@ -36,8 +40,8 @@ "_requiredBy": [ "/mississippi" ], - "_resolved": "https://registry.npmjs.org/flush-write-stream/-/flush-write-stream-1.0.0.tgz", - "_shasum": "cc4fc24f4b4c973f80027f27cc095841639965a7", + "_resolved": "https://registry.npmjs.org/flush-write-stream/-/flush-write-stream-1.0.2.tgz", + "_shasum": "c81b90d8746766f1a609a46809946c45dd8ae417", "_shrinkwrap": null, "_spec": "flush-write-stream@^1.0.0", "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi", @@ -49,6 +53,7 @@ "url": "https://github.com/mafintosh/flush-write-stream/issues" }, "dependencies": { + "inherits": "^2.0.1", "readable-stream": "^2.0.4" }, "description": "A write stream constructor that supports a flush function that is called before finish is emitted", @@ -57,10 +62,10 @@ }, "directories": {}, "dist": { - "shasum": "cc4fc24f4b4c973f80027f27cc095841639965a7", - "tarball": "https://registry.npmjs.org/flush-write-stream/-/flush-write-stream-1.0.0.tgz" + "shasum": "c81b90d8746766f1a609a46809946c45dd8ae417", + "tarball": "https://registry.npmjs.org/flush-write-stream/-/flush-write-stream-1.0.2.tgz" }, - "gitHead": "50e81d8eeee8a9666c7d5105775a6c89b7ae9dfa", + "gitHead": "d35a4071dacbcc60fc40d798fa58fc425cba3efc", "homepage": "https://github.com/mafintosh/flush-write-stream", "license": "MIT", "main": "index.js", @@ -80,5 +85,5 @@ "scripts": { "test": "tape test.js" }, - "version": "1.0.0" + "version": "1.0.2" } diff --git a/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/test.js b/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/test.js index 7383acede6f5db..6cd0c20e1f795a 100644 --- a/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/test.js +++ b/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/test.js @@ -70,3 +70,16 @@ tape('can pass options', function (t) { process.nextTick(cb) } }) + +tape('emits error on destroy', function (t) { + var expected = new Error() + + var ws = writer({objectMode: true}, function () {}) + + ws.on('error', function (err) { + t.equal(err, expected) + }) + ws.on('close', t.end) + + ws.destroy(expected) +}) diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/.npmignore b/deps/npm/node_modules/mississippi/node_modules/parallel-transform/.npmignore similarity index 100% rename from deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/.npmignore rename to deps/npm/node_modules/mississippi/node_modules/parallel-transform/.npmignore diff --git a/deps/npm/node_modules/mississippi/node_modules/parallel-transform/LICENSE b/deps/npm/node_modules/mississippi/node_modules/parallel-transform/LICENSE new file mode 100644 index 00000000000000..4b30ed5d9509cb --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/parallel-transform/LICENSE @@ -0,0 +1,20 @@ +Copyright 2013 Mathias Buus + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/deps/npm/node_modules/mississippi/node_modules/parallel-transform/README.md b/deps/npm/node_modules/mississippi/node_modules/parallel-transform/README.md new file mode 100644 index 00000000000000..f53e130849328f --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/parallel-transform/README.md @@ -0,0 +1,54 @@ +# parallel-transform + +[Transform stream](http://nodejs.org/api/stream.html#stream_class_stream_transform_1) for Node.js that allows you to run your transforms +in parallel without changing the order of the output. + + npm install parallel-transform + +It is easy to use + +``` js +var transform = require('parallel-transform'); + +var stream = transform(10, function(data, callback) { // 10 is the parallism level + setTimeout(function() { + callback(null, data); + }, Math.random() * 1000); +}); + +for (var i = 0; i < 10; i++) { + stream.write(''+i); +} +stream.end(); + +stream.on('data', function(data) { + console.log(data); // prints 0,1,2,... +}); +stream.on('end', function() { + console.log('stream has ended'); +}); +``` + +If you run the above example you'll notice that it runs in parallel +(does not take ~1 second between each print) and that the order is preserved + +## Stream options + +All transforms are Node 0.10 streams. Per default they are created with the options `{objectMode:true}`. +If you want to use your own stream options pass them as the second parameter + +``` js +var stream = transform(10, {objectMode:false}, function(data, callback) { + // data is now a buffer + callback(null, data); +}); + +fs.createReadStream('filename').pipe(stream).pipe(process.stdout); +``` + +### Unordered +Passing the option `{ordered:false}` will output the data as soon as it's processed by a transform, without waiting to respect the order. + +## License + +MIT \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/parallel-transform/index.js b/deps/npm/node_modules/mississippi/node_modules/parallel-transform/index.js new file mode 100644 index 00000000000000..77329e4ccfecf1 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/parallel-transform/index.js @@ -0,0 +1,105 @@ +var Transform = require('readable-stream').Transform; +var inherits = require('inherits'); +var cyclist = require('cyclist'); +var util = require('util'); + +var ParallelTransform = function(maxParallel, opts, ontransform) { + if (!(this instanceof ParallelTransform)) return new ParallelTransform(maxParallel, opts, ontransform); + + if (typeof maxParallel === 'function') { + ontransform = maxParallel; + opts = null; + maxParallel = 1; + } + if (typeof opts === 'function') { + ontransform = opts; + opts = null; + } + + if (!opts) opts = {}; + if (!opts.highWaterMark) opts.highWaterMark = Math.max(maxParallel, 16); + if (opts.objectMode !== false) opts.objectMode = true; + + Transform.call(this, opts); + + this._maxParallel = maxParallel; + this._ontransform = ontransform; + this._destroyed = false; + this._flushed = false; + this._ordered = opts.ordered !== false; + this._buffer = this._ordered ? cyclist(maxParallel) : []; + this._top = 0; + this._bottom = 0; + this._ondrain = null; +}; + +inherits(ParallelTransform, Transform); + +ParallelTransform.prototype.destroy = function() { + if (this._destroyed) return; + this._destroyed = true; + this.emit('close'); +}; + +ParallelTransform.prototype._transform = function(chunk, enc, callback) { + var self = this; + var pos = this._top++; + + this._ontransform(chunk, function(err, data) { + if (self._destroyed) return; + if (err) { + self.emit('error', err); + self.push(null); + self.destroy(); + return; + } + if (self._ordered) { + self._buffer.put(pos, (data === undefined || data === null) ? null : data); + } + else { + self._buffer.push(data); + } + self._drain(); + }); + + if (this._top - this._bottom < this._maxParallel) return callback(); + this._ondrain = callback; +}; + +ParallelTransform.prototype._flush = function(callback) { + this._flushed = true; + this._ondrain = callback; + this._drain(); +}; + +ParallelTransform.prototype._drain = function() { + if (this._ordered) { + while (this._buffer.get(this._bottom) !== undefined) { + var data = this._buffer.del(this._bottom++); + if (data === null) continue; + this.push(data); + } + } + else { + while (this._buffer.length > 0) { + var data = this._buffer.pop(); + this._bottom++; + if (data === null) continue; + this.push(data); + } + } + + + if (!this._drained() || !this._ondrain) return; + + var ondrain = this._ondrain; + this._ondrain = null; + ondrain(); +}; + +ParallelTransform.prototype._drained = function() { + var diff = this._top - this._bottom; + return this._flushed ? !diff : diff < this._maxParallel; +}; + +module.exports = ParallelTransform; diff --git a/deps/npm/node_modules/mississippi/node_modules/parallel-transform/node_modules/cyclist/.npmignore b/deps/npm/node_modules/mississippi/node_modules/parallel-transform/node_modules/cyclist/.npmignore new file mode 100644 index 00000000000000..ba99195bae87cd --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/parallel-transform/node_modules/cyclist/.npmignore @@ -0,0 +1 @@ +bench diff --git a/deps/npm/node_modules/mississippi/node_modules/parallel-transform/node_modules/cyclist/README.md b/deps/npm/node_modules/mississippi/node_modules/parallel-transform/node_modules/cyclist/README.md new file mode 100644 index 00000000000000..50c35cc5fd4573 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/parallel-transform/node_modules/cyclist/README.md @@ -0,0 +1,39 @@ +# Cyclist + +Cyclist is an efficient [cyclic list](http://en.wikipedia.org/wiki/Circular_buffer) implemention for Javascript. +It is available through npm + + npm install cyclist + +## What? + +Cyclist allows you to create a list of fixed size that is cyclic. +In a cyclist list the element following the last one is the first one. +This property can be really useful when for example trying to order data +packets that can arrive out of order over a network stream. + +## Usage + +``` js +var cyclist = require('cyclist'); +var list = cyclist(4); // if size (4) is not a power of 2 it will be the follwing power of 2 + // this buffer can now hold 4 elements in total + +list.put(42, 'hello 42'); // store something and index 42 +list.put(43, 'hello 43'); // store something and index 43 + +console.log(list.get(42)); // prints hello 42 +console.log(list.get(46)); // prints hello 42 again since 46 - 42 == list.size +``` + +## API + +* `cyclist(size)` creates a new buffer +* `cyclist#get(index)` get an object stored in the buffer +* `cyclist#put(index,value)` insert an object into the buffer +* `cyclist#del(index)` delete an object from an index +* `cyclist#size` property containing current size of buffer + +## License + +MIT diff --git a/deps/npm/node_modules/mississippi/node_modules/parallel-transform/node_modules/cyclist/index.js b/deps/npm/node_modules/mississippi/node_modules/parallel-transform/node_modules/cyclist/index.js new file mode 100644 index 00000000000000..baf710c3a9a61c --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/parallel-transform/node_modules/cyclist/index.js @@ -0,0 +1,33 @@ +var ensureTwoPower = function(n) { + if (n && !(n & (n - 1))) return n; + var p = 1; + while (p < n) p <<= 1; + return p; +}; + +var Cyclist = function(size) { + if (!(this instanceof Cyclist)) return new Cyclist(size); + size = ensureTwoPower(size); + this.mask = size-1; + this.size = size; + this.values = new Array(size); +}; + +Cyclist.prototype.put = function(index, val) { + var pos = index & this.mask; + this.values[pos] = val; + return pos; +}; + +Cyclist.prototype.get = function(index) { + return this.values[index & this.mask]; +}; + +Cyclist.prototype.del = function(index) { + var pos = index & this.mask; + var val = this.values[pos]; + this.values[pos] = undefined; + return val; +}; + +module.exports = Cyclist; \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/parallel-transform/node_modules/cyclist/package.json b/deps/npm/node_modules/mississippi/node_modules/parallel-transform/node_modules/cyclist/package.json new file mode 100644 index 00000000000000..ad045eb971434b --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/parallel-transform/node_modules/cyclist/package.json @@ -0,0 +1,80 @@ +{ + "_args": [ + [ + { + "raw": "cyclist@~0.2.2", + "scope": null, + "escapedName": "cyclist", + "name": "cyclist", + "rawSpec": "~0.2.2", + "spec": ">=0.2.2 <0.3.0", + "type": "range" + }, + "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/parallel-transform" + ] + ], + "_from": "cyclist@>=0.2.2 <0.3.0", + "_id": "cyclist@0.2.2", + "_inCache": true, + "_location": "/mississippi/parallel-transform/cyclist", + "_npmUser": { + "name": "mafintosh", + "email": "mathiasbuus@gmail.com" + }, + "_npmVersion": "1.3.5", + "_phantomChildren": {}, + "_requested": { + "raw": "cyclist@~0.2.2", + "scope": null, + "escapedName": "cyclist", + "name": "cyclist", + "rawSpec": "~0.2.2", + "spec": ">=0.2.2 <0.3.0", + "type": "range" + }, + "_requiredBy": [ + "/mississippi/parallel-transform" + ], + "_resolved": "https://registry.npmjs.org/cyclist/-/cyclist-0.2.2.tgz", + "_shasum": "1b33792e11e914a2fd6d6ed6447464444e5fa640", + "_shrinkwrap": null, + "_spec": "cyclist@~0.2.2", + "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/parallel-transform", + "author": { + "name": "Mathias Buus Madsen", + "email": "mathiasbuus@gmail.com" + }, + "bugs": { + "url": "https://github.com/mafintosh/cyclist/issues" + }, + "dependencies": {}, + "description": "Cyclist is an efficient cyclic list implemention.", + "devDependencies": {}, + "directories": {}, + "dist": { + "shasum": "1b33792e11e914a2fd6d6ed6447464444e5fa640", + "tarball": "https://registry.npmjs.org/cyclist/-/cyclist-0.2.2.tgz" + }, + "homepage": "https://github.com/mafintosh/cyclist#readme", + "keywords": [ + "circular", + "buffer", + "ring", + "cyclic", + "data" + ], + "maintainers": [ + { + "name": "mafintosh", + "email": "mathiasbuus@gmail.com" + } + ], + "name": "cyclist", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git://github.com/mafintosh/cyclist.git" + }, + "version": "0.2.2" +} diff --git a/deps/npm/node_modules/mississippi/node_modules/parallel-transform/package.json b/deps/npm/node_modules/mississippi/node_modules/parallel-transform/package.json new file mode 100644 index 00000000000000..357b4898a662b3 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/parallel-transform/package.json @@ -0,0 +1,92 @@ +{ + "_args": [ + [ + { + "raw": "parallel-transform@^1.1.0", + "scope": null, + "escapedName": "parallel-transform", + "name": "parallel-transform", + "rawSpec": "^1.1.0", + "spec": ">=1.1.0 <2.0.0", + "type": "range" + }, + "/Users/zkat/Documents/code/npm/node_modules/mississippi" + ] + ], + "_from": "parallel-transform@>=1.1.0 <2.0.0", + "_id": "parallel-transform@1.1.0", + "_inCache": true, + "_location": "/mississippi/parallel-transform", + "_nodeVersion": "4.6.1", + "_npmOperationalInternal": { + "host": "packages-12-west.internal.npmjs.com", + "tmp": "tmp/parallel-transform-1.1.0.tgz_1478596056784_0.9169374129269272" + }, + "_npmUser": { + "name": "mafintosh", + "email": "mathiasbuus@gmail.com" + }, + "_npmVersion": "2.15.9", + "_phantomChildren": {}, + "_requested": { + "raw": "parallel-transform@^1.1.0", + "scope": null, + "escapedName": "parallel-transform", + "name": "parallel-transform", + "rawSpec": "^1.1.0", + "spec": ">=1.1.0 <2.0.0", + "type": "range" + }, + "_requiredBy": [ + "/mississippi" + ], + "_resolved": "https://registry.npmjs.org/parallel-transform/-/parallel-transform-1.1.0.tgz", + "_shasum": "d410f065b05da23081fcd10f28854c29bda33b06", + "_shrinkwrap": null, + "_spec": "parallel-transform@^1.1.0", + "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi", + "author": { + "name": "Mathias Buus Madsen", + "email": "mathiasbuus@gmail.com" + }, + "bugs": { + "url": "https://github.com/mafintosh/parallel-transform/issues" + }, + "dependencies": { + "cyclist": "~0.2.2", + "inherits": "^2.0.3", + "readable-stream": "^2.1.5" + }, + "description": "Transform stream that allows you to run your transforms in parallel without changing the order", + "devDependencies": {}, + "directories": {}, + "dist": { + "shasum": "d410f065b05da23081fcd10f28854c29bda33b06", + "tarball": "https://registry.npmjs.org/parallel-transform/-/parallel-transform-1.1.0.tgz" + }, + "gitHead": "1b4919bc318eb0cbd6e8ee08c4d56f405d74e643", + "homepage": "https://github.com/mafintosh/parallel-transform", + "keywords": [ + "transform", + "stream", + "parallel", + "preserve", + "order" + ], + "license": "MIT", + "maintainers": [ + { + "name": "mafintosh", + "email": "mathiasbuus@gmail.com" + } + ], + "name": "parallel-transform", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git://github.com/mafintosh/parallel-transform.git" + }, + "scripts": {}, + "version": "1.1.0" +} diff --git a/deps/npm/node_modules/mississippi/node_modules/pump/index.js b/deps/npm/node_modules/mississippi/node_modules/pump/index.js index 843da727ef88a3..060ce5f4fd3662 100644 --- a/deps/npm/node_modules/mississippi/node_modules/pump/index.js +++ b/deps/npm/node_modules/mississippi/node_modules/pump/index.js @@ -9,6 +9,7 @@ var isFn = function (fn) { } var isFS = function (stream) { + if (!fs) return false // browser return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close) } diff --git a/deps/npm/node_modules/mississippi/node_modules/pump/package.json b/deps/npm/node_modules/mississippi/node_modules/pump/package.json index 2457d2f00d2cdf..2226426b52ab4c 100644 --- a/deps/npm/node_modules/mississippi/node_modules/pump/package.json +++ b/deps/npm/node_modules/mississippi/node_modules/pump/package.json @@ -14,15 +14,19 @@ ] ], "_from": "pump@>=1.0.0 <2.0.0", - "_id": "pump@1.0.1", + "_id": "pump@1.0.2", "_inCache": true, "_location": "/mississippi/pump", - "_nodeVersion": "4.1.1", + "_nodeVersion": "4.6.2", + "_npmOperationalInternal": { + "host": "packages-18-east.internal.npmjs.com", + "tmp": "tmp/pump-1.0.2.tgz_1482243286673_0.09530888125300407" + }, "_npmUser": { "name": "mafintosh", "email": "mathiasbuus@gmail.com" }, - "_npmVersion": "2.14.4", + "_npmVersion": "2.15.11", "_phantomChildren": {}, "_requested": { "raw": "pump@^1.0.0", @@ -37,8 +41,8 @@ "/mississippi", "/mississippi/pumpify" ], - "_resolved": "https://registry.npmjs.org/pump/-/pump-1.0.1.tgz", - "_shasum": "f1f1409fb9bd1085bbdb576b43b84ec4b5eadc1a", + "_resolved": "https://registry.npmjs.org/pump/-/pump-1.0.2.tgz", + "_shasum": "3b3ee6512f94f0e575538c17995f9f16990a5d51", "_shrinkwrap": null, "_spec": "pump@^1.0.0", "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi", @@ -46,6 +50,9 @@ "name": "Mathias Buus Madsen", "email": "mathiasbuus@gmail.com" }, + "browser": { + "fs": false + }, "bugs": { "url": "https://github.com/mafintosh/pump/issues" }, @@ -57,10 +64,10 @@ "devDependencies": {}, "directories": {}, "dist": { - "shasum": "f1f1409fb9bd1085bbdb576b43b84ec4b5eadc1a", - "tarball": "https://registry.npmjs.org/pump/-/pump-1.0.1.tgz" + "shasum": "3b3ee6512f94f0e575538c17995f9f16990a5d51", + "tarball": "https://registry.npmjs.org/pump/-/pump-1.0.2.tgz" }, - "gitHead": "6abb030191e1ccb12c5f735a4f39162307f93b90", + "gitHead": "90ed7ae8923ade7c7589e3db28c29fbc5c2d42ca", "homepage": "https://github.com/mafintosh/pump", "keywords": [ "streams", @@ -85,5 +92,5 @@ "scripts": { "test": "node test.js" }, - "version": "1.0.1" + "version": "1.0.2" } diff --git a/deps/npm/node_modules/mississippi/node_modules/pump/test-browser.js b/deps/npm/node_modules/mississippi/node_modules/pump/test-browser.js new file mode 100644 index 00000000000000..80e852c7dcb9d8 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/pump/test-browser.js @@ -0,0 +1,58 @@ +var stream = require('stream') +var pump = require('./index') + +var rs = new stream.Readable() +var ws = new stream.Writable() + +rs._read = function (size) { + this.push(Buffer(size).fill('abc')) +} + +ws._write = function (chunk, encoding, cb) { + setTimeout(function () { + cb() + }, 100) +} + +var toHex = function () { + var reverse = new (require('stream').Transform)() + + reverse._transform = function (chunk, enc, callback) { + reverse.push(chunk.toString('hex')) + callback() + } + + return reverse +} + +var wsClosed = false +var rsClosed = false +var callbackCalled = false + +var check = function () { + if (wsClosed && rsClosed && callbackCalled) console.log('done') +} + +ws.on('finish', function () { + wsClosed = true + check() +}) + +rs.on('end', function () { + rsClosed = true + check() +}) + +pump(rs, toHex(), toHex(), toHex(), ws, function () { + callbackCalled = true + check() +}) + +setTimeout(function () { + rs.push(null) + rs.emit('close') +}, 1000) + +setTimeout(function () { + if (!check()) throw new Error('timeout') +}, 5000) diff --git a/deps/npm/node_modules/mississippi/node_modules/stream-each/index.js b/deps/npm/node_modules/mississippi/node_modules/stream-each/index.js index ea8d112f981f09..2c07e957a3beb2 100644 --- a/deps/npm/node_modules/mississippi/node_modules/stream-each/index.js +++ b/deps/npm/node_modules/mississippi/node_modules/stream-each/index.js @@ -1,4 +1,5 @@ var eos = require('end-of-stream') +var shift = require('stream-shift') module.exports = each @@ -41,7 +42,7 @@ function each (stream, fn, cb) { if (ended || running) return want = false - var data = stream.read() + var data = shift(stream) if (!data) { want = true return diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/.npmignore b/deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/.npmignore similarity index 100% rename from deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/.npmignore rename to deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/.npmignore diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/.travis.yml b/deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/.travis.yml similarity index 58% rename from deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/.travis.yml rename to deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/.travis.yml index cc4dba29d959a2..ecd4193f6025e0 100644 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/.travis.yml +++ b/deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/.travis.yml @@ -1,4 +1,6 @@ language: node_js node_js: - - "0.8" - "0.10" + - "0.12" + - "4" + - "6" diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/license.md b/deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/LICENSE similarity index 79% rename from deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/license.md rename to deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/LICENSE index c67e3532b54245..bae9da7bfae2b5 100644 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/license.md +++ b/deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/LICENSE @@ -1,4 +1,6 @@ -# Copyright (c) 2015 Calvin Metcalf +The MIT License (MIT) + +Copyright (c) 2016 Mathias Buus Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal @@ -7,13 +9,13 @@ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. -**THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE.** +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/README.md b/deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/README.md new file mode 100644 index 00000000000000..d9cc2d945fa161 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/README.md @@ -0,0 +1,25 @@ +# stream-shift + +Returns the next buffer/object in a stream's readable queue + +``` +npm install stream-shift +``` + +[![build status](http://img.shields.io/travis/mafintosh/stream-shift.svg?style=flat)](http://travis-ci.org/mafintosh/stream-shift) + +## Usage + +``` js +var shift = require('stream-shift') + +console.log(shift(someStream)) // first item in its buffer +``` + +## Credit + +Thanks [@dignifiedquire](https://github.com/dignifiedquire) for making this work on node 6 + +## License + +MIT diff --git a/deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/index.js b/deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/index.js new file mode 100644 index 00000000000000..c4b18b9c2a5cf8 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/index.js @@ -0,0 +1,20 @@ +module.exports = shift + +function shift (stream) { + var rs = stream._readableState + if (!rs) return null + return rs.objectMode ? stream.read() : stream.read(getStateLength(rs)) +} + +function getStateLength (state) { + if (state.buffer.length) { + // Since node 6.3.0 state.buffer is a BufferList not an array + if (state.buffer.head) { + return state.buffer.head.data.length + } + + return state.buffer[0].length + } + + return state.length +} diff --git a/deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/package.json b/deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/package.json new file mode 100644 index 00000000000000..54bd23eb8e69a1 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/package.json @@ -0,0 +1,100 @@ +{ + "_args": [ + [ + { + "raw": "stream-shift@^1.0.0", + "scope": null, + "escapedName": "stream-shift", + "name": "stream-shift", + "rawSpec": "^1.0.0", + "spec": ">=1.0.0 <2.0.0", + "type": "range" + }, + "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/duplexify" + ], + [ + { + "raw": "stream-shift@^1.0.0", + "scope": null, + "escapedName": "stream-shift", + "name": "stream-shift", + "rawSpec": "^1.0.0", + "spec": ">=1.0.0 <2.0.0", + "type": "range" + }, + "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/stream-each" + ] + ], + "_from": "stream-shift@^1.0.0", + "_id": "stream-shift@1.0.0", + "_inCache": true, + "_location": "/mississippi/stream-each/stream-shift", + "_nodeVersion": "4.4.3", + "_npmOperationalInternal": { + "host": "packages-16-east.internal.npmjs.com", + "tmp": "tmp/stream-shift-1.0.0.tgz_1468011662152_0.6510484367609024" + }, + "_npmUser": { + "name": "mafintosh", + "email": "mathiasbuus@gmail.com" + }, + "_npmVersion": "2.15.9", + "_phantomChildren": {}, + "_requested": { + "raw": "stream-shift@^1.0.0", + "scope": null, + "escapedName": "stream-shift", + "name": "stream-shift", + "rawSpec": "^1.0.0", + "spec": ">=1.0.0 <2.0.0", + "type": "range" + }, + "_requiredBy": [ + "/mississippi/stream-each" + ], + "_resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.0.tgz", + "_shasum": "d5c752825e5367e786f78e18e445ea223a155952", + "_shrinkwrap": null, + "_spec": "stream-shift@^1.0.0", + "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/stream-each", + "author": { + "name": "Mathias Buus", + "url": "@mafintosh" + }, + "bugs": { + "url": "https://github.com/mafintosh/stream-shift/issues" + }, + "dependencies": {}, + "description": "Returns the next buffer/object in a stream's readable queue", + "devDependencies": { + "standard": "^7.1.2", + "tape": "^4.6.0", + "through2": "^2.0.1" + }, + "directories": {}, + "dist": { + "shasum": "d5c752825e5367e786f78e18e445ea223a155952", + "tarball": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.0.tgz" + }, + "gitHead": "2ea5f7dcd8ac6babb08324e6e603a3269252a2c4", + "homepage": "https://github.com/mafintosh/stream-shift", + "license": "MIT", + "main": "index.js", + "maintainers": [ + { + "name": "mafintosh", + "email": "mathiasbuus@gmail.com" + } + ], + "name": "stream-shift", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/mafintosh/stream-shift.git" + }, + "scripts": { + "test": "standard && tape test.js" + }, + "version": "1.0.0" +} diff --git a/deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/test.js b/deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/test.js new file mode 100644 index 00000000000000..c0222c37d53fe5 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/test.js @@ -0,0 +1,48 @@ +var tape = require('tape') +var through = require('through2') +var stream = require('stream') +var shift = require('./') + +tape('shifts next', function (t) { + var passthrough = through() + + passthrough.write('hello') + passthrough.write('world') + + t.same(shift(passthrough), Buffer('hello')) + t.same(shift(passthrough), Buffer('world')) + t.end() +}) + +tape('shifts next with core', function (t) { + var passthrough = stream.PassThrough() + + passthrough.write('hello') + passthrough.write('world') + + t.same(shift(passthrough), Buffer('hello')) + t.same(shift(passthrough), Buffer('world')) + t.end() +}) + +tape('shifts next with object mode', function (t) { + var passthrough = through({objectMode: true}) + + passthrough.write({hello: 1}) + passthrough.write({world: 1}) + + t.same(shift(passthrough), {hello: 1}) + t.same(shift(passthrough), {world: 1}) + t.end() +}) + +tape('shifts next with object mode with core', function (t) { + var passthrough = stream.PassThrough({objectMode: true}) + + passthrough.write({hello: 1}) + passthrough.write({world: 1}) + + t.same(shift(passthrough), {hello: 1}) + t.same(shift(passthrough), {world: 1}) + t.end() +}) diff --git a/deps/npm/node_modules/mississippi/node_modules/stream-each/package.json b/deps/npm/node_modules/mississippi/node_modules/stream-each/package.json index d73cd30d829b82..f8594afda69674 100644 --- a/deps/npm/node_modules/mississippi/node_modules/stream-each/package.json +++ b/deps/npm/node_modules/mississippi/node_modules/stream-each/package.json @@ -14,19 +14,19 @@ ] ], "_from": "stream-each@>=1.1.0 <2.0.0", - "_id": "stream-each@1.1.2", + "_id": "stream-each@1.2.0", "_inCache": true, "_location": "/mississippi/stream-each", - "_nodeVersion": "4.2.3", + "_nodeVersion": "4.6.1", "_npmOperationalInternal": { - "host": "packages-5-east.internal.npmjs.com", - "tmp": "tmp/stream-each-1.1.2.tgz_1454600601831_0.7560806761030108" + "host": "packages-12-west.internal.npmjs.com", + "tmp": "tmp/stream-each-1.2.0.tgz_1478427484733_0.5437065886799246" }, "_npmUser": { "name": "mafintosh", "email": "mathiasbuus@gmail.com" }, - "_npmVersion": "2.14.7", + "_npmVersion": "2.15.9", "_phantomChildren": {}, "_requested": { "raw": "stream-each@^1.1.0", @@ -40,8 +40,8 @@ "_requiredBy": [ "/mississippi" ], - "_resolved": "https://registry.npmjs.org/stream-each/-/stream-each-1.1.2.tgz", - "_shasum": "7d4f887f24c721ab0155b12a34263d8732ad8d39", + "_resolved": "https://registry.npmjs.org/stream-each/-/stream-each-1.2.0.tgz", + "_shasum": "1e95d47573f580d814dc0ff8cd0f66f1ce53c991", "_shrinkwrap": null, "_spec": "stream-each@^1.1.0", "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi", @@ -53,7 +53,8 @@ "url": "https://github.com/mafintosh/stream-each/issues" }, "dependencies": { - "end-of-stream": "^1.1.0" + "end-of-stream": "^1.1.0", + "stream-shift": "^1.0.0" }, "description": "Iterate all the data in a stream", "devDependencies": { @@ -63,10 +64,10 @@ }, "directories": {}, "dist": { - "shasum": "7d4f887f24c721ab0155b12a34263d8732ad8d39", - "tarball": "https://registry.npmjs.org/stream-each/-/stream-each-1.1.2.tgz" + "shasum": "1e95d47573f580d814dc0ff8cd0f66f1ce53c991", + "tarball": "https://registry.npmjs.org/stream-each/-/stream-each-1.2.0.tgz" }, - "gitHead": "f0ef91ddbe95688e376598b9959cab463c733b57", + "gitHead": "2968bf4f195641f5eda594c781a0b590776bc702", "homepage": "https://github.com/mafintosh/stream-each", "license": "MIT", "main": "index.js", @@ -94,5 +95,5 @@ "scripts": { "test": "standard && tape test.js" }, - "version": "1.1.2" + "version": "1.2.0" } diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/LICENSE b/deps/npm/node_modules/mississippi/node_modules/through2/LICENSE deleted file mode 100644 index f6a0029de11d71..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/LICENSE +++ /dev/null @@ -1,39 +0,0 @@ -Copyright 2013, Rod Vagg (the "Original Author") -All rights reserved. - -MIT +no-false-attribs License - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -Distributions of all or part of the Software intended to be used -by the recipients as they would use the unmodified Software, -containing modifications that substantially alter, remove, or -disable functionality of the Software, outside of the documented -configuration mechanisms provided by the Software, shall be -modified such that the Original Author's bug reporting email -addresses and urls are either replaced with the contact information -of the parties responsible for the changes, or removed entirely. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. - - -Except where noted, this license applies to any and all software -programs and associated documentation files created by the -Original Author, when distributed with the Software. \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/LICENSE.html b/deps/npm/node_modules/mississippi/node_modules/through2/LICENSE.html new file mode 100644 index 00000000000000..ac478189ea2271 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/LICENSE.html @@ -0,0 +1,336 @@ + + + + + + + + + + + + + + +
+

The MIT License (MIT)

+

Copyright (c) 2016 Rod Vagg (the "Original Author") and additional contributors

+

Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:

+

The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.

+

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

+
+
+ + \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/LICENSE.md b/deps/npm/node_modules/mississippi/node_modules/through2/LICENSE.md new file mode 100644 index 00000000000000..7f0b93daaa4708 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/LICENSE.md @@ -0,0 +1,9 @@ +# The MIT License (MIT) + +**Copyright (c) 2016 Rod Vagg (the "Original Author") and additional contributors** + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/README.md b/deps/npm/node_modules/mississippi/node_modules/through2/README.md index c84b3464ada4c9..a916f15ef5e330 100644 --- a/deps/npm/node_modules/mississippi/node_modules/through2/README.md +++ b/deps/npm/node_modules/mississippi/node_modules/through2/README.md @@ -20,6 +20,9 @@ fs.createReadStream('ex.txt') callback() })) .pipe(fs.createWriteStream('out.txt')) + .on('finish', function () { + doSomethingSpecial() + }) ``` Or object streams: diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/.npmignore b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/.npmignore deleted file mode 100644 index 38344f87a62766..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/.npmignore +++ /dev/null @@ -1,5 +0,0 @@ -build/ -test/ -examples/ -fs.js -zlib.js \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/.travis.yml b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/.travis.yml deleted file mode 100644 index 1b82118460cfe4..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/.travis.yml +++ /dev/null @@ -1,52 +0,0 @@ -sudo: false -language: node_js -before_install: - - npm install -g npm@2 - - npm install -g npm -notifications: - email: false -matrix: - fast_finish: true - allow_failures: - - env: TASK=browser BROWSER_NAME=ipad BROWSER_VERSION="6.0..latest" - - env: TASK=browser BROWSER_NAME=iphone BROWSER_VERSION="6.0..latest" - include: - - node_js: '0.8' - env: TASK=test - - node_js: '0.10' - env: TASK=test - - node_js: '0.11' - env: TASK=test - - node_js: '0.12' - env: TASK=test - - node_js: 1 - env: TASK=test - - node_js: 2 - env: TASK=test - - node_js: 3 - env: TASK=test - - node_js: 4 - env: TASK=test - - node_js: 5 - env: TASK=test - - node_js: 5 - env: TASK=browser BROWSER_NAME=android BROWSER_VERSION="4.0..latest" - - node_js: 5 - env: TASK=browser BROWSER_NAME=ie BROWSER_VERSION="9..latest" - - node_js: 5 - env: TASK=browser BROWSER_NAME=opera BROWSER_VERSION="11..latest" - - node_js: 5 - env: TASK=browser BROWSER_NAME=chrome BROWSER_VERSION="-3..latest" - - node_js: 5 - env: TASK=browser BROWSER_NAME=firefox BROWSER_VERSION="-3..latest" - - node_js: 5 - env: TASK=browser BROWSER_NAME=ipad BROWSER_VERSION="6.0..latest" - - node_js: 5 - env: TASK=browser BROWSER_NAME=iphone BROWSER_VERSION="6.0..latest" - - node_js: 5 - env: TASK=browser BROWSER_NAME=safari BROWSER_VERSION="5..latest" -script: "npm run $TASK" -env: - global: - - secure: rE2Vvo7vnjabYNULNyLFxOyt98BoJexDqsiOnfiD6kLYYsiQGfr/sbZkPMOFm9qfQG7pjqx+zZWZjGSswhTt+626C0t/njXqug7Yps4c3dFblzGfreQHp7wNX5TFsvrxd6dAowVasMp61sJcRnB2w8cUzoe3RAYUDHyiHktwqMc= - - secure: g9YINaKAdMatsJ28G9jCGbSaguXCyxSTy+pBO6Ch0Cf57ZLOTka3HqDj8p3nV28LUIHZ3ut5WO43CeYKwt4AUtLpBS3a0dndHdY6D83uY6b2qh5hXlrcbeQTq2cvw2y95F7hm4D1kwrgZ7ViqaKggRcEupAL69YbJnxeUDKWEdI= diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/.zuul.yml b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/.zuul.yml deleted file mode 100644 index 96d9cfbd38662f..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/.zuul.yml +++ /dev/null @@ -1 +0,0 @@ -ui: tape diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/LICENSE b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/LICENSE deleted file mode 100644 index e3d4e695a4cff2..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/LICENSE +++ /dev/null @@ -1,18 +0,0 @@ -Copyright Joyent, Inc. and other Node contributors. All rights reserved. -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/README.md b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/README.md deleted file mode 100644 index 1a67c48cd031b5..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/README.md +++ /dev/null @@ -1,36 +0,0 @@ -# readable-stream - -***Node-core v5.8.0 streams for userland*** [![Build Status](https://travis-ci.org/nodejs/readable-stream.svg?branch=master)](https://travis-ci.org/nodejs/readable-stream) - - -[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) -[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) - - -[![Sauce Test Status](https://saucelabs.com/browser-matrix/readable-stream.svg)](https://saucelabs.com/u/readable-stream) - -```bash -npm install --save readable-stream -``` - -***Node-core streams for userland*** - -This package is a mirror of the Streams2 and Streams3 implementations in -Node-core, including [documentation](doc/stream.markdown). - -If you want to guarantee a stable streams base, regardless of what version of -Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). - -As of version 2.0.0 **readable-stream** uses semantic versioning. - -# Streams WG Team Members - -* **Chris Dickinson** ([@chrisdickinson](https://github.com/chrisdickinson)) <christopher.s.dickinson@gmail.com> - - Release GPG key: 9554F04D7259F04124DE6B476D5A82AC7E37093B -* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> - - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 -* **Rod Vagg** ([@rvagg](https://github.com/rvagg)) <rod@vagg.org> - - Release GPG key: DD8F2338BAE7501E3DD5AC78C273792F7D83545D -* **Sam Newman** ([@sonewman](https://github.com/sonewman)) <newmansam@outlook.com> -* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> -* **Domenic Denicola** ([@domenic](https://github.com/domenic)) <d@domenic.me> diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/doc/stream.markdown b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/doc/stream.markdown deleted file mode 100644 index 0bc3819e63b025..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/doc/stream.markdown +++ /dev/null @@ -1,1760 +0,0 @@ -# Stream - - Stability: 2 - Stable - -A stream is an abstract interface implemented by various objects in -Node.js. For example a [request to an HTTP server][http-incoming-message] is a -stream, as is [`process.stdout`][]. Streams are readable, writable, or both. All -streams are instances of [`EventEmitter`][]. - -You can load the Stream base classes by doing `require('stream')`. -There are base classes provided for [Readable][] streams, [Writable][] -streams, [Duplex][] streams, and [Transform][] streams. - -This document is split up into 3 sections: - -1. The first section explains the parts of the API that you need to be - aware of to use streams in your programs. -2. The second section explains the parts of the API that you need to - use if you implement your own custom streams yourself. The API is designed to - make this easy for you to do. -3. The third section goes into more depth about how streams work, - including some of the internal mechanisms and functions that you - should probably not modify unless you definitely know what you are - doing. - - -## API for Stream Consumers - - - -Streams can be either [Readable][], [Writable][], or both ([Duplex][]). - -All streams are EventEmitters, but they also have other custom methods -and properties depending on whether they are Readable, Writable, or -Duplex. - -If a stream is both Readable and Writable, then it implements all of -the methods and events. So, a [Duplex][] or [Transform][] stream is -fully described by this API, though their implementation may be -somewhat different. - -It is not necessary to implement Stream interfaces in order to consume -streams in your programs. If you **are** implementing streaming -interfaces in your own program, please also refer to -[API for Stream Implementors][]. - -Almost all Node.js programs, no matter how simple, use Streams in some -way. Here is an example of using Streams in an Node.js program: - -```js -const http = require('http'); - -var server = http.createServer( (req, res) => { - // req is an http.IncomingMessage, which is a Readable Stream - // res is an http.ServerResponse, which is a Writable Stream - - var body = ''; - // we want to get the data as utf8 strings - // If you don't set an encoding, then you'll get Buffer objects - req.setEncoding('utf8'); - - // Readable streams emit 'data' events once a listener is added - req.on('data', (chunk) => { - body += chunk; - }); - - // the end event tells you that you have entire body - req.on('end', () => { - try { - var data = JSON.parse(body); - } catch (er) { - // uh oh! bad json! - res.statusCode = 400; - return res.end(`error: ${er.message}`); - } - - // write back something interesting to the user: - res.write(typeof data); - res.end(); - }); -}); - -server.listen(1337); - -// $ curl localhost:1337 -d '{}' -// object -// $ curl localhost:1337 -d '"foo"' -// string -// $ curl localhost:1337 -d 'not json' -// error: Unexpected token o -``` - -### Class: stream.Duplex - -Duplex streams are streams that implement both the [Readable][] and -[Writable][] interfaces. - -Examples of Duplex streams include: - -* [TCP sockets][] -* [zlib streams][zlib] -* [crypto streams][crypto] - -### Class: stream.Readable - - - -The Readable stream interface is the abstraction for a *source* of -data that you are reading from. In other words, data comes *out* of a -Readable stream. - -A Readable stream will not start emitting data until you indicate that -you are ready to receive it. - -Readable streams have two "modes": a **flowing mode** and a **paused -mode**. When in flowing mode, data is read from the underlying system -and provided to your program as fast as possible. In paused mode, you -must explicitly call [`stream.read()`][stream-read] to get chunks of data out. -Streams start out in paused mode. - -**Note**: If no data event handlers are attached, and there are no -[`stream.pipe()`][] destinations, and the stream is switched into flowing -mode, then data will be lost. - -You can switch to flowing mode by doing any of the following: - -* Adding a [`'data'`][] event handler to listen for data. -* Calling the [`stream.resume()`][stream-resume] method to explicitly open the - flow. -* Calling the [`stream.pipe()`][] method to send the data to a [Writable][]. - -You can switch back to paused mode by doing either of the following: - -* If there are no pipe destinations, by calling the - [`stream.pause()`][stream-pause] method. -* If there are pipe destinations, by removing any [`'data'`][] event - handlers, and removing all pipe destinations by calling the - [`stream.unpipe()`][] method. - -Note that, for backwards compatibility reasons, removing [`'data'`][] -event handlers will **not** automatically pause the stream. Also, if -there are piped destinations, then calling [`stream.pause()`][stream-pause] will -not guarantee that the stream will *remain* paused once those -destinations drain and ask for more data. - -Examples of readable streams include: - -* [HTTP responses, on the client][http-incoming-message] -* [HTTP requests, on the server][http-incoming-message] -* [fs read streams][] -* [zlib streams][zlib] -* [crypto streams][crypto] -* [TCP sockets][] -* [child process stdout and stderr][] -* [`process.stdin`][] - -#### Event: 'close' - -Emitted when the stream and any of its underlying resources (a file -descriptor, for example) have been closed. The event indicates that -no more events will be emitted, and no further computation will occur. - -Not all streams will emit the `'close'` event. - -#### Event: 'data' - -* `chunk` {Buffer|String} The chunk of data. - -Attaching a `'data'` event listener to a stream that has not been -explicitly paused will switch the stream into flowing mode. Data will -then be passed as soon as it is available. - -If you just want to get all the data out of the stream as fast as -possible, this is the best way to do so. - -```js -var readable = getReadableStreamSomehow(); -readable.on('data', (chunk) => { - console.log('got %d bytes of data', chunk.length); -}); -``` - -#### Event: 'end' - -This event fires when there will be no more data to read. - -Note that the `'end'` event **will not fire** unless the data is -completely consumed. This can be done by switching into flowing mode, -or by calling [`stream.read()`][stream-read] repeatedly until you get to the -end. - -```js -var readable = getReadableStreamSomehow(); -readable.on('data', (chunk) => { - console.log('got %d bytes of data', chunk.length); -}); -readable.on('end', () => { - console.log('there will be no more data.'); -}); -``` - -#### Event: 'error' - -* {Error Object} - -Emitted if there was an error receiving data. - -#### Event: 'readable' - -When a chunk of data can be read from the stream, it will emit a -`'readable'` event. - -In some cases, listening for a `'readable'` event will cause some data -to be read into the internal buffer from the underlying system, if it -hadn't already. - -```javascript -var readable = getReadableStreamSomehow(); -readable.on('readable', () => { - // there is some data to read now -}); -``` - -Once the internal buffer is drained, a `'readable'` event will fire -again when more data is available. - -The `'readable'` event is not emitted in the "flowing" mode with the -sole exception of the last one, on end-of-stream. - -The `'readable'` event indicates that the stream has new information: -either new data is available or the end of the stream has been reached. -In the former case, [`stream.read()`][stream-read] will return that data. In the -latter case, [`stream.read()`][stream-read] will return null. For instance, in -the following example, `foo.txt` is an empty file: - -```js -const fs = require('fs'); -var rr = fs.createReadStream('foo.txt'); -rr.on('readable', () => { - console.log('readable:', rr.read()); -}); -rr.on('end', () => { - console.log('end'); -}); -``` - -The output of running this script is: - -``` -$ node test.js -readable: null -end -``` - -#### readable.isPaused() - -* Return: {Boolean} - -This method returns whether or not the `readable` has been **explicitly** -paused by client code (using [`stream.pause()`][stream-pause] without a -corresponding [`stream.resume()`][stream-resume]). - -```js -var readable = new stream.Readable - -readable.isPaused() // === false -readable.pause() -readable.isPaused() // === true -readable.resume() -readable.isPaused() // === false -``` - -#### readable.pause() - -* Return: `this` - -This method will cause a stream in flowing mode to stop emitting -[`'data'`][] events, switching out of flowing mode. Any data that becomes -available will remain in the internal buffer. - -```js -var readable = getReadableStreamSomehow(); -readable.on('data', (chunk) => { - console.log('got %d bytes of data', chunk.length); - readable.pause(); - console.log('there will be no more data for 1 second'); - setTimeout(() => { - console.log('now data will start flowing again'); - readable.resume(); - }, 1000); -}); -``` - -#### readable.pipe(destination[, options]) - -* `destination` {stream.Writable} The destination for writing data -* `options` {Object} Pipe options - * `end` {Boolean} End the writer when the reader ends. Default = `true` - -This method pulls all the data out of a readable stream, and writes it -to the supplied destination, automatically managing the flow so that -the destination is not overwhelmed by a fast readable stream. - -Multiple destinations can be piped to safely. - -```js -var readable = getReadableStreamSomehow(); -var writable = fs.createWriteStream('file.txt'); -// All the data from readable goes into 'file.txt' -readable.pipe(writable); -``` - -This function returns the destination stream, so you can set up pipe -chains like so: - -```js -var r = fs.createReadStream('file.txt'); -var z = zlib.createGzip(); -var w = fs.createWriteStream('file.txt.gz'); -r.pipe(z).pipe(w); -``` - -For example, emulating the Unix `cat` command: - -```js -process.stdin.pipe(process.stdout); -``` - -By default [`stream.end()`][stream-end] is called on the destination when the -source stream emits [`'end'`][], so that `destination` is no longer writable. -Pass `{ end: false }` as `options` to keep the destination stream open. - -This keeps `writer` open so that "Goodbye" can be written at the -end. - -```js -reader.pipe(writer, { end: false }); -reader.on('end', () => { - writer.end('Goodbye\n'); -}); -``` - -Note that [`process.stderr`][] and [`process.stdout`][] are never closed until -the process exits, regardless of the specified options. - -#### readable.read([size]) - -* `size` {Number} Optional argument to specify how much data to read. -* Return {String|Buffer|Null} - -The `read()` method pulls some data out of the internal buffer and -returns it. If there is no data available, then it will return -`null`. - -If you pass in a `size` argument, then it will return that many -bytes. If `size` bytes are not available, then it will return `null`, -unless we've ended, in which case it will return the data remaining -in the buffer. - -If you do not specify a `size` argument, then it will return all the -data in the internal buffer. - -This method should only be called in paused mode. In flowing mode, -this method is called automatically until the internal buffer is -drained. - -```js -var readable = getReadableStreamSomehow(); -readable.on('readable', () => { - var chunk; - while (null !== (chunk = readable.read())) { - console.log('got %d bytes of data', chunk.length); - } -}); -``` - -If this method returns a data chunk, then it will also trigger the -emission of a [`'data'`][] event. - -Note that calling [`stream.read([size])`][stream-read] after the [`'end'`][] -event has been triggered will return `null`. No runtime error will be raised. - -#### readable.resume() - -* Return: `this` - -This method will cause the readable stream to resume emitting [`'data'`][] -events. - -This method will switch the stream into flowing mode. If you do *not* -want to consume the data from a stream, but you *do* want to get to -its [`'end'`][] event, you can call [`stream.resume()`][stream-resume] to open -the flow of data. - -```js -var readable = getReadableStreamSomehow(); -readable.resume(); -readable.on('end', () => { - console.log('got to the end, but did not read anything'); -}); -``` - -#### readable.setEncoding(encoding) - -* `encoding` {String} The encoding to use. -* Return: `this` - -Call this function to cause the stream to return strings of the specified -encoding instead of Buffer objects. For example, if you do -`readable.setEncoding('utf8')`, then the output data will be interpreted as -UTF-8 data, and returned as strings. If you do `readable.setEncoding('hex')`, -then the data will be encoded in hexadecimal string format. - -This properly handles multi-byte characters that would otherwise be -potentially mangled if you simply pulled the Buffers directly and -called [`buf.toString(encoding)`][] on them. If you want to read the data -as strings, always use this method. - -Also you can disable any encoding at all with `readable.setEncoding(null)`. -This approach is very useful if you deal with binary data or with large -multi-byte strings spread out over multiple chunks. - -```js -var readable = getReadableStreamSomehow(); -readable.setEncoding('utf8'); -readable.on('data', (chunk) => { - assert.equal(typeof chunk, 'string'); - console.log('got %d characters of string data', chunk.length); -}); -``` - -#### readable.unpipe([destination]) - -* `destination` {stream.Writable} Optional specific stream to unpipe - -This method will remove the hooks set up for a previous [`stream.pipe()`][] -call. - -If the destination is not specified, then all pipes are removed. - -If the destination is specified, but no pipe is set up for it, then -this is a no-op. - -```js -var readable = getReadableStreamSomehow(); -var writable = fs.createWriteStream('file.txt'); -// All the data from readable goes into 'file.txt', -// but only for the first second -readable.pipe(writable); -setTimeout(() => { - console.log('stop writing to file.txt'); - readable.unpipe(writable); - console.log('manually close the file stream'); - writable.end(); -}, 1000); -``` - -#### readable.unshift(chunk) - -* `chunk` {Buffer|String} Chunk of data to unshift onto the read queue - -This is useful in certain cases where a stream is being consumed by a -parser, which needs to "un-consume" some data that it has -optimistically pulled out of the source, so that the stream can be -passed on to some other party. - -Note that `stream.unshift(chunk)` cannot be called after the [`'end'`][] event -has been triggered; a runtime error will be raised. - -If you find that you must often call `stream.unshift(chunk)` in your -programs, consider implementing a [Transform][] stream instead. (See [API -for Stream Implementors][].) - -```js -// Pull off a header delimited by \n\n -// use unshift() if we get too much -// Call the callback with (error, header, stream) -const StringDecoder = require('string_decoder').StringDecoder; -function parseHeader(stream, callback) { - stream.on('error', callback); - stream.on('readable', onReadable); - var decoder = new StringDecoder('utf8'); - var header = ''; - function onReadable() { - var chunk; - while (null !== (chunk = stream.read())) { - var str = decoder.write(chunk); - if (str.match(/\n\n/)) { - // found the header boundary - var split = str.split(/\n\n/); - header += split.shift(); - var remaining = split.join('\n\n'); - var buf = new Buffer(remaining, 'utf8'); - if (buf.length) - stream.unshift(buf); - stream.removeListener('error', callback); - stream.removeListener('readable', onReadable); - // now the body of the message can be read from the stream. - callback(null, header, stream); - } else { - // still reading the header. - header += str; - } - } - } -} -``` - -Note that, unlike [`stream.push(chunk)`][stream-push], `stream.unshift(chunk)` -will not end the reading process by resetting the internal reading state of the -stream. This can cause unexpected results if `unshift()` is called during a -read (i.e. from within a [`stream._read()`][stream-_read] implementation on a -custom stream). Following the call to `unshift()` with an immediate -[`stream.push('')`][stream-push] will reset the reading state appropriately, -however it is best to simply avoid calling `unshift()` while in the process of -performing a read. - -#### readable.wrap(stream) - -* `stream` {Stream} An "old style" readable stream - -Versions of Node.js prior to v0.10 had streams that did not implement the -entire Streams API as it is today. (See [Compatibility][] for -more information.) - -If you are using an older Node.js library that emits [`'data'`][] events and -has a [`stream.pause()`][stream-pause] method that is advisory only, then you -can use the `wrap()` method to create a [Readable][] stream that uses the old -stream as its data source. - -You will very rarely ever need to call this function, but it exists -as a convenience for interacting with old Node.js programs and libraries. - -For example: - -```js -const OldReader = require('./old-api-module.js').OldReader; -const Readable = require('stream').Readable; -const oreader = new OldReader; -const myReader = new Readable().wrap(oreader); - -myReader.on('readable', () => { - myReader.read(); // etc. -}); -``` - -### Class: stream.Transform - -Transform streams are [Duplex][] streams where the output is in some way -computed from the input. They implement both the [Readable][] and -[Writable][] interfaces. - -Examples of Transform streams include: - -* [zlib streams][zlib] -* [crypto streams][crypto] - -### Class: stream.Writable - - - -The Writable stream interface is an abstraction for a *destination* -that you are writing data *to*. - -Examples of writable streams include: - -* [HTTP requests, on the client][] -* [HTTP responses, on the server][] -* [fs write streams][] -* [zlib streams][zlib] -* [crypto streams][crypto] -* [TCP sockets][] -* [child process stdin][] -* [`process.stdout`][], [`process.stderr`][] - -#### Event: 'drain' - -If a [`stream.write(chunk)`][stream-write] call returns `false`, then the -`'drain'` event will indicate when it is appropriate to begin writing more data -to the stream. - -```js -// Write the data to the supplied writable stream one million times. -// Be attentive to back-pressure. -function writeOneMillionTimes(writer, data, encoding, callback) { - var i = 1000000; - write(); - function write() { - var ok = true; - do { - i -= 1; - if (i === 0) { - // last time! - writer.write(data, encoding, callback); - } else { - // see if we should continue, or wait - // don't pass the callback, because we're not done yet. - ok = writer.write(data, encoding); - } - } while (i > 0 && ok); - if (i > 0) { - // had to stop early! - // write some more once it drains - writer.once('drain', write); - } - } -} -``` - -#### Event: 'error' - -* {Error} - -Emitted if there was an error when writing or piping data. - -#### Event: 'finish' - -When the [`stream.end()`][stream-end] method has been called, and all data has -been flushed to the underlying system, this event is emitted. - -```javascript -var writer = getWritableStreamSomehow(); -for (var i = 0; i < 100; i ++) { - writer.write('hello, #${i}!\n'); -} -writer.end('this is the end\n'); -writer.on('finish', () => { - console.error('all writes are now complete.'); -}); -``` - -#### Event: 'pipe' - -* `src` {stream.Readable} source stream that is piping to this writable - -This is emitted whenever the [`stream.pipe()`][] method is called on a readable -stream, adding this writable to its set of destinations. - -```js -var writer = getWritableStreamSomehow(); -var reader = getReadableStreamSomehow(); -writer.on('pipe', (src) => { - console.error('something is piping into the writer'); - assert.equal(src, reader); -}); -reader.pipe(writer); -``` - -#### Event: 'unpipe' - -* `src` {[Readable][] Stream} The source stream that - [unpiped][`stream.unpipe()`] this writable - -This is emitted whenever the [`stream.unpipe()`][] method is called on a -readable stream, removing this writable from its set of destinations. - -```js -var writer = getWritableStreamSomehow(); -var reader = getReadableStreamSomehow(); -writer.on('unpipe', (src) => { - console.error('something has stopped piping into the writer'); - assert.equal(src, reader); -}); -reader.pipe(writer); -reader.unpipe(writer); -``` - -#### writable.cork() - -Forces buffering of all writes. - -Buffered data will be flushed either at [`stream.uncork()`][] or at -[`stream.end()`][stream-end] call. - -#### writable.end([chunk][, encoding][, callback]) - -* `chunk` {String|Buffer} Optional data to write -* `encoding` {String} The encoding, if `chunk` is a String -* `callback` {Function} Optional callback for when the stream is finished - -Call this method when no more data will be written to the stream. If supplied, -the callback is attached as a listener on the [`'finish'`][] event. - -Calling [`stream.write()`][stream-write] after calling -[`stream.end()`][stream-end] will raise an error. - -```js -// write 'hello, ' and then end with 'world!' -var file = fs.createWriteStream('example.txt'); -file.write('hello, '); -file.end('world!'); -// writing more now is not allowed! -``` - -#### writable.setDefaultEncoding(encoding) - -* `encoding` {String} The new default encoding - -Sets the default encoding for a writable stream. - -#### writable.uncork() - -Flush all data, buffered since [`stream.cork()`][] call. - -#### writable.write(chunk[, encoding][, callback]) - -* `chunk` {String|Buffer} The data to write -* `encoding` {String} The encoding, if `chunk` is a String -* `callback` {Function} Callback for when this chunk of data is flushed -* Returns: {Boolean} `true` if the data was handled completely. - -This method writes some data to the underlying system, and calls the -supplied callback once the data has been fully handled. - -The return value indicates if you should continue writing right now. -If the data had to be buffered internally, then it will return -`false`. Otherwise, it will return `true`. - -This return value is strictly advisory. You MAY continue to write, -even if it returns `false`. However, writes will be buffered in -memory, so it is best not to do this excessively. Instead, wait for -the [`'drain'`][] event before writing more data. - - -## API for Stream Implementors - - - -To implement any sort of stream, the pattern is the same: - -1. Extend the appropriate parent class in your own subclass. (The - [`util.inherits()`][] method is particularly helpful for this.) -2. Call the appropriate parent class constructor in your constructor, - to be sure that the internal mechanisms are set up properly. -3. Implement one or more specific methods, as detailed below. - -The class to extend and the method(s) to implement depend on the sort -of stream class you are writing: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-

Use-case

-
-

Class

-
-

Method(s) to implement

-
-

Reading only

-
-

[Readable](#stream_class_stream_readable_1)

-
-

[_read][stream-_read]

-
-

Writing only

-
-

[Writable](#stream_class_stream_writable_1)

-
-

[_write][stream-_write], [_writev][stream-_writev]

-
-

Reading and writing

-
-

[Duplex](#stream_class_stream_duplex_1)

-
-

[_read][stream-_read], [_write][stream-_write], [_writev][stream-_writev]

-
-

Operate on written data, then read the result

-
-

[Transform](#stream_class_stream_transform_1)

-
-

[_transform][stream-_transform], [_flush][stream-_flush]

-
- -In your implementation code, it is very important to never call the methods -described in [API for Stream Consumers][]. Otherwise, you can potentially cause -adverse side effects in programs that consume your streaming interfaces. - -### Class: stream.Duplex - - - -A "duplex" stream is one that is both Readable and Writable, such as a TCP -socket connection. - -Note that `stream.Duplex` is an abstract class designed to be extended -with an underlying implementation of the [`stream._read(size)`][stream-_read] -and [`stream._write(chunk, encoding, callback)`][stream-_write] methods as you -would with a Readable or Writable stream class. - -Since JavaScript doesn't have multiple prototypal inheritance, this class -prototypally inherits from Readable, and then parasitically from Writable. It is -thus up to the user to implement both the low-level -[`stream._read(n)`][stream-_read] method as well as the low-level -[`stream._write(chunk, encoding, callback)`][stream-_write] method on extension -duplex classes. - -#### new stream.Duplex(options) - -* `options` {Object} Passed to both Writable and Readable - constructors. Also has the following fields: - * `allowHalfOpen` {Boolean} Default = `true`. If set to `false`, then - the stream will automatically end the readable side when the - writable side ends and vice versa. - * `readableObjectMode` {Boolean} Default = `false`. Sets `objectMode` - for readable side of the stream. Has no effect if `objectMode` - is `true`. - * `writableObjectMode` {Boolean} Default = `false`. Sets `objectMode` - for writable side of the stream. Has no effect if `objectMode` - is `true`. - -In classes that extend the Duplex class, make sure to call the -constructor so that the buffering settings can be properly -initialized. - -### Class: stream.PassThrough - -This is a trivial implementation of a [Transform][] stream that simply -passes the input bytes across to the output. Its purpose is mainly -for examples and testing, but there are occasionally use cases where -it can come in handy as a building block for novel sorts of streams. - -### Class: stream.Readable - - - -`stream.Readable` is an abstract class designed to be extended with an -underlying implementation of the [`stream._read(size)`][stream-_read] method. - -Please see [API for Stream Consumers][] for how to consume -streams in your programs. What follows is an explanation of how to -implement Readable streams in your programs. - -#### new stream.Readable([options]) - -* `options` {Object} - * `highWaterMark` {Number} The maximum number of bytes to store in - the internal buffer before ceasing to read from the underlying - resource. Default = `16384` (16kb), or `16` for `objectMode` streams - * `encoding` {String} If specified, then buffers will be decoded to - strings using the specified encoding. Default = `null` - * `objectMode` {Boolean} Whether this stream should behave - as a stream of objects. Meaning that [`stream.read(n)`][stream-read] returns - a single value instead of a Buffer of size n. Default = `false` - * `read` {Function} Implementation for the [`stream._read()`][stream-_read] - method. - -In classes that extend the Readable class, make sure to call the -Readable constructor so that the buffering settings can be properly -initialized. - -#### readable.\_read(size) - -* `size` {Number} Number of bytes to read asynchronously - -Note: **Implement this method, but do NOT call it directly.** - -This method is prefixed with an underscore because it is internal to the -class that defines it and should only be called by the internal Readable -class methods. All Readable stream implementations must provide a \_read -method to fetch data from the underlying resource. - -When `_read()` is called, if data is available from the resource, the `_read()` -implementation should start pushing that data into the read queue by calling -[`this.push(dataChunk)`][stream-push]. `_read()` should continue reading from -the resource and pushing data until push returns `false`, at which point it -should stop reading from the resource. Only when `_read()` is called again after -it has stopped should it start reading more data from the resource and pushing -that data onto the queue. - -Note: once the `_read()` method is called, it will not be called again until -the [`stream.push()`][stream-push] method is called. - -The `size` argument is advisory. Implementations where a "read" is a -single call that returns data can use this to know how much data to -fetch. Implementations where that is not relevant, such as TCP or -TLS, may ignore this argument, and simply provide data whenever it -becomes available. There is no need, for example to "wait" until -`size` bytes are available before calling [`stream.push(chunk)`][stream-push]. - -#### readable.push(chunk[, encoding]) - - -* `chunk` {Buffer|Null|String} Chunk of data to push into the read queue -* `encoding` {String} Encoding of String chunks. Must be a valid - Buffer encoding, such as `'utf8'` or `'ascii'` -* return {Boolean} Whether or not more pushes should be performed - -Note: **This method should be called by Readable implementors, NOT -by consumers of Readable streams.** - -If a value other than null is passed, The `push()` method adds a chunk of data -into the queue for subsequent stream processors to consume. If `null` is -passed, it signals the end of the stream (EOF), after which no more data -can be written. - -The data added with `push()` can be pulled out by calling the -[`stream.read()`][stream-read] method when the [`'readable'`][] event fires. - -This API is designed to be as flexible as possible. For example, -you may be wrapping a lower-level source which has some sort of -pause/resume mechanism, and a data callback. In those cases, you -could wrap the low-level source object by doing something like this: - -```js -// source is an object with readStop() and readStart() methods, -// and an `ondata` member that gets called when it has data, and -// an `onend` member that gets called when the data is over. - -util.inherits(SourceWrapper, Readable); - -function SourceWrapper(options) { - Readable.call(this, options); - - this._source = getLowlevelSourceObject(); - - // Every time there's data, we push it into the internal buffer. - this._source.ondata = (chunk) => { - // if push() returns false, then we need to stop reading from source - if (!this.push(chunk)) - this._source.readStop(); - }; - - // When the source ends, we push the EOF-signaling `null` chunk - this._source.onend = () => { - this.push(null); - }; -} - -// _read will be called when the stream wants to pull more data in -// the advisory size argument is ignored in this case. -SourceWrapper.prototype._read = function(size) { - this._source.readStart(); -}; -``` - -#### Example: A Counting Stream - - - -This is a basic example of a Readable stream. It emits the numerals -from 1 to 1,000,000 in ascending order, and then ends. - -```js -const Readable = require('stream').Readable; -const util = require('util'); -util.inherits(Counter, Readable); - -function Counter(opt) { - Readable.call(this, opt); - this._max = 1000000; - this._index = 1; -} - -Counter.prototype._read = function() { - var i = this._index++; - if (i > this._max) - this.push(null); - else { - var str = '' + i; - var buf = new Buffer(str, 'ascii'); - this.push(buf); - } -}; -``` - -#### Example: SimpleProtocol v1 (Sub-optimal) - -This is similar to the `parseHeader` function described -[here](#stream_readable_unshift_chunk), but implemented as a custom stream. -Also, note that this implementation does not convert the incoming data to a -string. - -However, this would be better implemented as a [Transform][] stream. See -[SimpleProtocol v2][] for a better implementation. - -```js -// A parser for a simple data protocol. -// The "header" is a JSON object, followed by 2 \n characters, and -// then a message body. -// -// NOTE: This can be done more simply as a Transform stream! -// Using Readable directly for this is sub-optimal. See the -// alternative example below under the Transform section. - -const Readable = require('stream').Readable; -const util = require('util'); - -util.inherits(SimpleProtocol, Readable); - -function SimpleProtocol(source, options) { - if (!(this instanceof SimpleProtocol)) - return new SimpleProtocol(source, options); - - Readable.call(this, options); - this._inBody = false; - this._sawFirstCr = false; - - // source is a readable stream, such as a socket or file - this._source = source; - - var self = this; - source.on('end', () => { - self.push(null); - }); - - // give it a kick whenever the source is readable - // read(0) will not consume any bytes - source.on('readable', () => { - self.read(0); - }); - - this._rawHeader = []; - this.header = null; -} - -SimpleProtocol.prototype._read = function(n) { - if (!this._inBody) { - var chunk = this._source.read(); - - // if the source doesn't have data, we don't have data yet. - if (chunk === null) - return this.push(''); - - // check if the chunk has a \n\n - var split = -1; - for (var i = 0; i < chunk.length; i++) { - if (chunk[i] === 10) { // '\n' - if (this._sawFirstCr) { - split = i; - break; - } else { - this._sawFirstCr = true; - } - } else { - this._sawFirstCr = false; - } - } - - if (split === -1) { - // still waiting for the \n\n - // stash the chunk, and try again. - this._rawHeader.push(chunk); - this.push(''); - } else { - this._inBody = true; - var h = chunk.slice(0, split); - this._rawHeader.push(h); - var header = Buffer.concat(this._rawHeader).toString(); - try { - this.header = JSON.parse(header); - } catch (er) { - this.emit('error', new Error('invalid simple protocol data')); - return; - } - // now, because we got some extra data, unshift the rest - // back into the read queue so that our consumer will see it. - var b = chunk.slice(split); - this.unshift(b); - // calling unshift by itself does not reset the reading state - // of the stream; since we're inside _read, doing an additional - // push('') will reset the state appropriately. - this.push(''); - - // and let them know that we are done parsing the header. - this.emit('header', this.header); - } - } else { - // from there on, just provide the data to our consumer. - // careful not to push(null), since that would indicate EOF. - var chunk = this._source.read(); - if (chunk) this.push(chunk); - } -}; - -// Usage: -// var parser = new SimpleProtocol(source); -// Now parser is a readable stream that will emit 'header' -// with the parsed header data. -``` - -### Class: stream.Transform - -A "transform" stream is a duplex stream where the output is causally -connected in some way to the input, such as a [zlib][] stream or a -[crypto][] stream. - -There is no requirement that the output be the same size as the input, -the same number of chunks, or arrive at the same time. For example, a -Hash stream will only ever have a single chunk of output which is -provided when the input is ended. A zlib stream will produce output -that is either much smaller or much larger than its input. - -Rather than implement the [`stream._read()`][stream-_read] and -[`stream._write()`][stream-_write] methods, Transform classes must implement the -[`stream._transform()`][stream-_transform] method, and may optionally -also implement the [`stream._flush()`][stream-_flush] method. (See below.) - -#### new stream.Transform([options]) - -* `options` {Object} Passed to both Writable and Readable - constructors. Also has the following fields: - * `transform` {Function} Implementation for the - [`stream._transform()`][stream-_transform] method. - * `flush` {Function} Implementation for the [`stream._flush()`][stream-_flush] - method. - -In classes that extend the Transform class, make sure to call the -constructor so that the buffering settings can be properly -initialized. - -#### Events: 'finish' and 'end' - -The [`'finish'`][] and [`'end'`][] events are from the parent Writable -and Readable classes respectively. The `'finish'` event is fired after -[`stream.end()`][stream-end] is called and all chunks have been processed by -[`stream._transform()`][stream-_transform], `'end'` is fired after all data has -been output which is after the callback in [`stream._flush()`][stream-_flush] -has been called. - -#### transform.\_flush(callback) - -* `callback` {Function} Call this function (optionally with an error - argument) when you are done flushing any remaining data. - -Note: **This function MUST NOT be called directly.** It MAY be implemented -by child classes, and if so, will be called by the internal Transform -class methods only. - -In some cases, your transform operation may need to emit a bit more -data at the end of the stream. For example, a `Zlib` compression -stream will store up some internal state so that it can optimally -compress the output. At the end, however, it needs to do the best it -can with what is left, so that the data will be complete. - -In those cases, you can implement a `_flush()` method, which will be -called at the very end, after all the written data is consumed, but -before emitting [`'end'`][] to signal the end of the readable side. Just -like with [`stream._transform()`][stream-_transform], call -`transform.push(chunk)` zero or more times, as appropriate, and call `callback` -when the flush operation is complete. - -This method is prefixed with an underscore because it is internal to -the class that defines it, and should not be called directly by user -programs. However, you **are** expected to override this method in -your own extension classes. - -#### transform.\_transform(chunk, encoding, callback) - -* `chunk` {Buffer|String} The chunk to be transformed. Will **always** - be a buffer unless the `decodeStrings` option was set to `false`. -* `encoding` {String} If the chunk is a string, then this is the - encoding type. If chunk is a buffer, then this is the special - value - 'buffer', ignore it in this case. -* `callback` {Function} Call this function (optionally with an error - argument and data) when you are done processing the supplied chunk. - -Note: **This function MUST NOT be called directly.** It should be -implemented by child classes, and called by the internal Transform -class methods only. - -All Transform stream implementations must provide a `_transform()` -method to accept input and produce output. - -`_transform()` should do whatever has to be done in this specific -Transform class, to handle the bytes being written, and pass them off -to the readable portion of the interface. Do asynchronous I/O, -process things, and so on. - -Call `transform.push(outputChunk)` 0 or more times to generate output -from this input chunk, depending on how much data you want to output -as a result of this chunk. - -Call the callback function only when the current chunk is completely -consumed. Note that there may or may not be output as a result of any -particular input chunk. If you supply a second argument to the callback -it will be passed to the push method. In other words the following are -equivalent: - -```js -transform.prototype._transform = function (data, encoding, callback) { - this.push(data); - callback(); -}; - -transform.prototype._transform = function (data, encoding, callback) { - callback(null, data); -}; -``` - -This method is prefixed with an underscore because it is internal to -the class that defines it, and should not be called directly by user -programs. However, you **are** expected to override this method in -your own extension classes. - -#### Example: `SimpleProtocol` parser v2 - -The example [here](#stream_example_simpleprotocol_v1_sub_optimal) of a simple -protocol parser can be implemented simply by using the higher level -[Transform][] stream class, similar to the `parseHeader` and `SimpleProtocol -v1` examples. - -In this example, rather than providing the input as an argument, it -would be piped into the parser, which is a more idiomatic Node.js stream -approach. - -```javascript -const util = require('util'); -const Transform = require('stream').Transform; -util.inherits(SimpleProtocol, Transform); - -function SimpleProtocol(options) { - if (!(this instanceof SimpleProtocol)) - return new SimpleProtocol(options); - - Transform.call(this, options); - this._inBody = false; - this._sawFirstCr = false; - this._rawHeader = []; - this.header = null; -} - -SimpleProtocol.prototype._transform = function(chunk, encoding, done) { - if (!this._inBody) { - // check if the chunk has a \n\n - var split = -1; - for (var i = 0; i < chunk.length; i++) { - if (chunk[i] === 10) { // '\n' - if (this._sawFirstCr) { - split = i; - break; - } else { - this._sawFirstCr = true; - } - } else { - this._sawFirstCr = false; - } - } - - if (split === -1) { - // still waiting for the \n\n - // stash the chunk, and try again. - this._rawHeader.push(chunk); - } else { - this._inBody = true; - var h = chunk.slice(0, split); - this._rawHeader.push(h); - var header = Buffer.concat(this._rawHeader).toString(); - try { - this.header = JSON.parse(header); - } catch (er) { - this.emit('error', new Error('invalid simple protocol data')); - return; - } - // and let them know that we are done parsing the header. - this.emit('header', this.header); - - // now, because we got some extra data, emit this first. - this.push(chunk.slice(split)); - } - } else { - // from there on, just provide the data to our consumer as-is. - this.push(chunk); - } - done(); -}; - -// Usage: -// var parser = new SimpleProtocol(); -// source.pipe(parser) -// Now parser is a readable stream that will emit 'header' -// with the parsed header data. -``` - -### Class: stream.Writable - - - -`stream.Writable` is an abstract class designed to be extended with an -underlying implementation of the -[`stream._write(chunk, encoding, callback)`][stream-_write] method. - -Please see [API for Stream Consumers][] for how to consume -writable streams in your programs. What follows is an explanation of -how to implement Writable streams in your programs. - -#### new stream.Writable([options]) - -* `options` {Object} - * `highWaterMark` {Number} Buffer level when - [`stream.write()`][stream-write] starts returning `false`. Default = `16384` - (16kb), or `16` for `objectMode` streams. - * `decodeStrings` {Boolean} Whether or not to decode strings into - Buffers before passing them to [`stream._write()`][stream-_write]. - Default = `true` - * `objectMode` {Boolean} Whether or not the - [`stream.write(anyObj)`][stream-write] is a valid operation. If set you can - write arbitrary data instead of only `Buffer` / `String` data. - Default = `false` - * `write` {Function} Implementation for the - [`stream._write()`][stream-_write] method. - * `writev` {Function} Implementation for the - [`stream._writev()`][stream-_writev] method. - -In classes that extend the Writable class, make sure to call the -constructor so that the buffering settings can be properly -initialized. - -#### writable.\_write(chunk, encoding, callback) - -* `chunk` {Buffer|String} The chunk to be written. Will **always** - be a buffer unless the `decodeStrings` option was set to `false`. -* `encoding` {String} If the chunk is a string, then this is the - encoding type. If chunk is a buffer, then this is the special - value - 'buffer', ignore it in this case. -* `callback` {Function} Call this function (optionally with an error - argument) when you are done processing the supplied chunk. - -All Writable stream implementations must provide a -[`stream._write()`][stream-_write] method to send data to the underlying -resource. - -Note: **This function MUST NOT be called directly.** It should be -implemented by child classes, and called by the internal Writable -class methods only. - -Call the callback using the standard `callback(error)` pattern to -signal that the write completed successfully or with an error. - -If the `decodeStrings` flag is set in the constructor options, then -`chunk` may be a string rather than a Buffer, and `encoding` will -indicate the sort of string that it is. This is to support -implementations that have an optimized handling for certain string -data encodings. If you do not explicitly set the `decodeStrings` -option to `false`, then you can safely ignore the `encoding` argument, -and assume that `chunk` will always be a Buffer. - -This method is prefixed with an underscore because it is internal to -the class that defines it, and should not be called directly by user -programs. However, you **are** expected to override this method in -your own extension classes. - -#### writable.\_writev(chunks, callback) - -* `chunks` {Array} The chunks to be written. Each chunk has following - format: `{ chunk: ..., encoding: ... }`. -* `callback` {Function} Call this function (optionally with an error - argument) when you are done processing the supplied chunks. - -Note: **This function MUST NOT be called directly.** It may be -implemented by child classes, and called by the internal Writable -class methods only. - -This function is completely optional to implement. In most cases it is -unnecessary. If implemented, it will be called with all the chunks -that are buffered in the write queue. - - -## Simplified Constructor API - - - -In simple cases there is now the added benefit of being able to construct a -stream without inheritance. - -This can be done by passing the appropriate methods as constructor options: - -Examples: - -### Duplex - -```js -var duplex = new stream.Duplex({ - read: function(n) { - // sets this._read under the hood - - // push data onto the read queue, passing null - // will signal the end of the stream (EOF) - this.push(chunk); - }, - write: function(chunk, encoding, next) { - // sets this._write under the hood - - // An optional error can be passed as the first argument - next() - } -}); - -// or - -var duplex = new stream.Duplex({ - read: function(n) { - // sets this._read under the hood - - // push data onto the read queue, passing null - // will signal the end of the stream (EOF) - this.push(chunk); - }, - writev: function(chunks, next) { - // sets this._writev under the hood - - // An optional error can be passed as the first argument - next() - } -}); -``` - -### Readable - -```js -var readable = new stream.Readable({ - read: function(n) { - // sets this._read under the hood - - // push data onto the read queue, passing null - // will signal the end of the stream (EOF) - this.push(chunk); - } -}); -``` - -### Transform - -```js -var transform = new stream.Transform({ - transform: function(chunk, encoding, next) { - // sets this._transform under the hood - - // generate output as many times as needed - // this.push(chunk); - - // call when the current chunk is consumed - next(); - }, - flush: function(done) { - // sets this._flush under the hood - - // generate output as many times as needed - // this.push(chunk); - - done(); - } -}); -``` - -### Writable - -```js -var writable = new stream.Writable({ - write: function(chunk, encoding, next) { - // sets this._write under the hood - - // An optional error can be passed as the first argument - next() - } -}); - -// or - -var writable = new stream.Writable({ - writev: function(chunks, next) { - // sets this._writev under the hood - - // An optional error can be passed as the first argument - next() - } -}); -``` - -## Streams: Under the Hood - - - -### Buffering - - - -Both Writable and Readable streams will buffer data on an internal -object which can be retrieved from `_writableState.getBuffer()` or -`_readableState.buffer`, respectively. - -The amount of data that will potentially be buffered depends on the -`highWaterMark` option which is passed into the constructor. - -Buffering in Readable streams happens when the implementation calls -[`stream.push(chunk)`][stream-push]. If the consumer of the Stream does not -call [`stream.read()`][stream-read], then the data will sit in the internal -queue until it is consumed. - -Buffering in Writable streams happens when the user calls -[`stream.write(chunk)`][stream-write] repeatedly, even when it returns `false`. - -The purpose of streams, especially with the [`stream.pipe()`][] method, is to -limit the buffering of data to acceptable levels, so that sources and -destinations of varying speed will not overwhelm the available memory. - -### Compatibility with Older Node.js Versions - - - -In versions of Node.js prior to v0.10, the Readable stream interface was -simpler, but also less powerful and less useful. - -* Rather than waiting for you to call the [`stream.read()`][stream-read] method, - [`'data'`][] events would start emitting immediately. If you needed to do - some I/O to decide how to handle data, then you had to store the chunks - in some kind of buffer so that they would not be lost. -* The [`stream.pause()`][stream-pause] method was advisory, rather than - guaranteed. This meant that you still had to be prepared to receive - [`'data'`][] events even when the stream was in a paused state. - -In Node.js v0.10, the [Readable][] class was added. -For backwards compatibility with older Node.js programs, Readable streams -switch into "flowing mode" when a [`'data'`][] event handler is added, or -when the [`stream.resume()`][stream-resume] method is called. The effect is -that, even if you are not using the new [`stream.read()`][stream-read] method -and [`'readable'`][] event, you no longer have to worry about losing -[`'data'`][] chunks. - -Most programs will continue to function normally. However, this -introduces an edge case in the following conditions: - -* No [`'data'`][] event handler is added. -* The [`stream.resume()`][stream-resume] method is never called. -* The stream is not piped to any writable destination. - -For example, consider the following code: - -```js -// WARNING! BROKEN! -net.createServer((socket) => { - - // we add an 'end' method, but never consume the data - socket.on('end', () => { - // It will never get here. - socket.end('I got your message (but didnt read it)\n'); - }); - -}).listen(1337); -``` - -In versions of Node.js prior to v0.10, the incoming message data would be -simply discarded. However, in Node.js v0.10 and beyond, -the socket will remain paused forever. - -The workaround in this situation is to call the -[`stream.resume()`][stream-resume] method to start the flow of data: - -```js -// Workaround -net.createServer((socket) => { - - socket.on('end', () => { - socket.end('I got your message (but didnt read it)\n'); - }); - - // start the flow of data, discarding it. - socket.resume(); - -}).listen(1337); -``` - -In addition to new Readable streams switching into flowing mode, -pre-v0.10 style streams can be wrapped in a Readable class using the -[`stream.wrap()`][] method. - - -### Object Mode - - - -Normally, Streams operate on Strings and Buffers exclusively. - -Streams that are in **object mode** can emit generic JavaScript values -other than Buffers and Strings. - -A Readable stream in object mode will always return a single item from -a call to [`stream.read(size)`][stream-read], regardless of what the size -argument is. - -A Writable stream in object mode will always ignore the `encoding` -argument to [`stream.write(data, encoding)`][stream-write]. - -The special value `null` still retains its special value for object -mode streams. That is, for object mode readable streams, `null` as a -return value from [`stream.read()`][stream-read] indicates that there is no more -data, and [`stream.push(null)`][stream-push] will signal the end of stream data -(`EOF`). - -No streams in Node.js core are object mode streams. This pattern is only -used by userland streaming libraries. - -You should set `objectMode` in your stream child class constructor on -the options object. Setting `objectMode` mid-stream is not safe. - -For Duplex streams `objectMode` can be set exclusively for readable or -writable side with `readableObjectMode` and `writableObjectMode` -respectively. These options can be used to implement parsers and -serializers with Transform streams. - -```js -const util = require('util'); -const StringDecoder = require('string_decoder').StringDecoder; -const Transform = require('stream').Transform; -util.inherits(JSONParseStream, Transform); - -// Gets \n-delimited JSON string data, and emits the parsed objects -function JSONParseStream() { - if (!(this instanceof JSONParseStream)) - return new JSONParseStream(); - - Transform.call(this, { readableObjectMode : true }); - - this._buffer = ''; - this._decoder = new StringDecoder('utf8'); -} - -JSONParseStream.prototype._transform = function(chunk, encoding, cb) { - this._buffer += this._decoder.write(chunk); - // split on newlines - var lines = this._buffer.split(/\r?\n/); - // keep the last partial line buffered - this._buffer = lines.pop(); - for (var l = 0; l < lines.length; l++) { - var line = lines[l]; - try { - var obj = JSON.parse(line); - } catch (er) { - this.emit('error', er); - return; - } - // push the parsed object out to the readable consumer - this.push(obj); - } - cb(); -}; - -JSONParseStream.prototype._flush = function(cb) { - // Just handle any leftover - var rem = this._buffer.trim(); - if (rem) { - try { - var obj = JSON.parse(rem); - } catch (er) { - this.emit('error', er); - return; - } - // push the parsed object out to the readable consumer - this.push(obj); - } - cb(); -}; -``` - -### `stream.read(0)` - -There are some cases where you want to trigger a refresh of the -underlying readable stream mechanisms, without actually consuming any -data. In that case, you can call `stream.read(0)`, which will always -return null. - -If the internal read buffer is below the `highWaterMark`, and the -stream is not currently reading, then calling `stream.read(0)` will trigger -a low-level [`stream._read()`][stream-_read] call. - -There is almost never a need to do this. However, you will see some -cases in Node.js's internals where this is done, particularly in the -Readable stream class internals. - -### `stream.push('')` - -Pushing a zero-byte string or Buffer (when not in [Object mode][]) has an -interesting side effect. Because it *is* a call to -[`stream.push()`][stream-push], it will end the `reading` process. However, it -does *not* add any data to the readable buffer, so there's nothing for -a user to consume. - -Very rarely, there are cases where you have no data to provide now, -but the consumer of your stream (or, perhaps, another bit of your own -code) will know when to check again, by calling [`stream.read(0)`][stream-read]. -In those cases, you *may* call `stream.push('')`. - -So far, the only use case for this functionality is in the -[`tls.CryptoStream`][] class, which is deprecated in Node.js/io.js v1.0. If you -find that you have to use `stream.push('')`, please consider another -approach, because it almost certainly indicates that something is -horribly wrong. - -[`'data'`]: #stream_event_data -[`'drain'`]: #stream_event_drain -[`'end'`]: #stream_event_end -[`'finish'`]: #stream_event_finish -[`'readable'`]: #stream_event_readable -[`buf.toString(encoding)`]: https://nodejs.org/docs/v5.8.0/api/buffer.html#buffer_buf_tostring_encoding_start_end -[`EventEmitter`]: https://nodejs.org/docs/v5.8.0/api/events.html#events_class_eventemitter -[`process.stderr`]: https://nodejs.org/docs/v5.8.0/api/process.html#process_process_stderr -[`process.stdin`]: https://nodejs.org/docs/v5.8.0/api/process.html#process_process_stdin -[`process.stdout`]: https://nodejs.org/docs/v5.8.0/api/process.html#process_process_stdout -[`stream.cork()`]: #stream_writable_cork -[`stream.pipe()`]: #stream_readable_pipe_destination_options -[`stream.uncork()`]: #stream_writable_uncork -[`stream.unpipe()`]: #stream_readable_unpipe_destination -[`stream.wrap()`]: #stream_readable_wrap_stream -[`tls.CryptoStream`]: https://nodejs.org/docs/v5.8.0/api/tls.html#tls_class_cryptostream -[`util.inherits()`]: https://nodejs.org/docs/v5.8.0/api/util.html#util_util_inherits_constructor_superconstructor -[API for Stream Consumers]: #stream_api_for_stream_consumers -[API for Stream Implementors]: #stream_api_for_stream_implementors -[child process stdin]: https://nodejs.org/docs/v5.8.0/api/child_process.html#child_process_child_stdin -[child process stdout and stderr]: https://nodejs.org/docs/v5.8.0/api/child_process.html#child_process_child_stdout -[Compatibility]: #stream_compatibility_with_older_node_js_versions -[crypto]: crypto.html -[Duplex]: #stream_class_stream_duplex -[fs read streams]: https://nodejs.org/docs/v5.8.0/api/fs.html#fs_class_fs_readstream -[fs write streams]: https://nodejs.org/docs/v5.8.0/api/fs.html#fs_class_fs_writestream -[HTTP requests, on the client]: https://nodejs.org/docs/v5.8.0/api/http.html#http_class_http_clientrequest -[HTTP responses, on the server]: https://nodejs.org/docs/v5.8.0/api/http.html#http_class_http_serverresponse -[http-incoming-message]: https://nodejs.org/docs/v5.8.0/api/http.html#http_class_http_incomingmessage -[Object mode]: #stream_object_mode -[Readable]: #stream_class_stream_readable -[SimpleProtocol v2]: #stream_example_simpleprotocol_parser_v2 -[stream-_flush]: #stream_transform_flush_callback -[stream-_read]: #stream_readable_read_size_1 -[stream-_transform]: #stream_transform_transform_chunk_encoding_callback -[stream-_write]: #stream_writable_write_chunk_encoding_callback_1 -[stream-_writev]: #stream_writable_writev_chunks_callback -[stream-end]: #stream_writable_end_chunk_encoding_callback -[stream-pause]: #stream_readable_pause -[stream-push]: #stream_readable_push_chunk_encoding -[stream-read]: #stream_readable_read_size -[stream-resume]: #stream_readable_resume -[stream-write]: #stream_writable_write_chunk_encoding_callback -[TCP sockets]: https://nodejs.org/docs/v5.8.0/api/net.html#net_class_net_socket -[Transform]: #stream_class_stream_transform -[Writable]: #stream_class_stream_writable -[zlib]: zlib.html diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md deleted file mode 100644 index c141a99c26c638..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md +++ /dev/null @@ -1,58 +0,0 @@ -# streams WG Meeting 2015-01-30 - -## Links - -* **Google Hangouts Video**: http://www.youtube.com/watch?v=I9nDOSGfwZg -* **GitHub Issue**: https://github.com/iojs/readable-stream/issues/106 -* **Original Minutes Google Doc**: https://docs.google.com/document/d/17aTgLnjMXIrfjgNaTUnHQO7m3xgzHR2VXBTmi03Qii4/ - -## Agenda - -Extracted from https://github.com/iojs/readable-stream/labels/wg-agenda prior to meeting. - -* adopt a charter [#105](https://github.com/iojs/readable-stream/issues/105) -* release and versioning strategy [#101](https://github.com/iojs/readable-stream/issues/101) -* simpler stream creation [#102](https://github.com/iojs/readable-stream/issues/102) -* proposal: deprecate implicit flowing of streams [#99](https://github.com/iojs/readable-stream/issues/99) - -## Minutes - -### adopt a charter - -* group: +1's all around - -### What versioning scheme should be adopted? -* group: +1’s 3.0.0 -* domenic+group: pulling in patches from other sources where appropriate -* mikeal: version independently, suggesting versions for io.js -* mikeal+domenic: work with TC to notify in advance of changes -simpler stream creation - -### streamline creation of streams -* sam: streamline creation of streams -* domenic: nice simple solution posted - but, we lose the opportunity to change the model - may not be backwards incompatible (double check keys) - - **action item:** domenic will check - -### remove implicit flowing of streams on(‘data’) -* add isFlowing / isPaused -* mikeal: worrying that we’re documenting polyfill methods – confuses users -* domenic: more reflective API is probably good, with warning labels for users -* new section for mad scientists (reflective stream access) -* calvin: name the “third state” -* mikeal: maybe borrow the name from whatwg? -* domenic: we’re missing the “third state” -* consensus: kind of difficult to name the third state -* mikeal: figure out differences in states / compat -* mathias: always flow on data – eliminates third state - * explore what it breaks - -**action items:** -* ask isaac for ability to list packages by what public io.js APIs they use (esp. Stream) -* ask rod/build for infrastructure -* **chris**: explore the “flow on data” approach -* add isPaused/isFlowing -* add new docs section -* move isPaused to that section diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/duplex.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/duplex.js deleted file mode 100644 index ca807af87620dd..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/duplex.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./lib/_stream_duplex.js") diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_duplex.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_duplex.js deleted file mode 100644 index 736693b8400fed..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_duplex.js +++ /dev/null @@ -1,75 +0,0 @@ -// a duplex stream is just a stream that is both readable and writable. -// Since JS doesn't have multiple prototypal inheritance, this class -// prototypally inherits from Readable, and then parasitically from -// Writable. - -'use strict'; - -/**/ - -var objectKeys = Object.keys || function (obj) { - var keys = []; - for (var key in obj) { - keys.push(key); - }return keys; -}; -/**/ - -module.exports = Duplex; - -/**/ -var processNextTick = require('process-nextick-args'); -/**/ - -/**/ -var util = require('core-util-is'); -util.inherits = require('inherits'); -/**/ - -var Readable = require('./_stream_readable'); -var Writable = require('./_stream_writable'); - -util.inherits(Duplex, Readable); - -var keys = objectKeys(Writable.prototype); -for (var v = 0; v < keys.length; v++) { - var method = keys[v]; - if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; -} - -function Duplex(options) { - if (!(this instanceof Duplex)) return new Duplex(options); - - Readable.call(this, options); - Writable.call(this, options); - - if (options && options.readable === false) this.readable = false; - - if (options && options.writable === false) this.writable = false; - - this.allowHalfOpen = true; - if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; - - this.once('end', onend); -} - -// the no-half-open enforcer -function onend() { - // if we allow half-open state, or if the writable side ended, - // then we're ok. - if (this.allowHalfOpen || this._writableState.ended) return; - - // no more data can be written. - // But allow more writes to happen in this tick. - processNextTick(onEndNT, this); -} - -function onEndNT(self) { - self.end(); -} - -function forEach(xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } -} \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_passthrough.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_passthrough.js deleted file mode 100644 index d06f71f1868d77..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_passthrough.js +++ /dev/null @@ -1,26 +0,0 @@ -// a passthrough stream. -// basically just the most minimal sort of Transform stream. -// Every written chunk gets output as-is. - -'use strict'; - -module.exports = PassThrough; - -var Transform = require('./_stream_transform'); - -/**/ -var util = require('core-util-is'); -util.inherits = require('inherits'); -/**/ - -util.inherits(PassThrough, Transform); - -function PassThrough(options) { - if (!(this instanceof PassThrough)) return new PassThrough(options); - - Transform.call(this, options); -} - -PassThrough.prototype._transform = function (chunk, encoding, cb) { - cb(null, chunk); -}; \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_readable.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_readable.js deleted file mode 100644 index 54a9d5c553d69e..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_readable.js +++ /dev/null @@ -1,880 +0,0 @@ -'use strict'; - -module.exports = Readable; - -/**/ -var processNextTick = require('process-nextick-args'); -/**/ - -/**/ -var isArray = require('isarray'); -/**/ - -/**/ -var Buffer = require('buffer').Buffer; -/**/ - -Readable.ReadableState = ReadableState; - -var EE = require('events'); - -/**/ -var EElistenerCount = function (emitter, type) { - return emitter.listeners(type).length; -}; -/**/ - -/**/ -var Stream; -(function () { - try { - Stream = require('st' + 'ream'); - } catch (_) {} finally { - if (!Stream) Stream = require('events').EventEmitter; - } -})(); -/**/ - -var Buffer = require('buffer').Buffer; - -/**/ -var util = require('core-util-is'); -util.inherits = require('inherits'); -/**/ - -/**/ -var debugUtil = require('util'); -var debug = undefined; -if (debugUtil && debugUtil.debuglog) { - debug = debugUtil.debuglog('stream'); -} else { - debug = function () {}; -} -/**/ - -var StringDecoder; - -util.inherits(Readable, Stream); - -var Duplex; -function ReadableState(options, stream) { - Duplex = Duplex || require('./_stream_duplex'); - - options = options || {}; - - // object stream flag. Used to make read(n) ignore n and to - // make all the buffer merging and length checks go away - this.objectMode = !!options.objectMode; - - if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.readableObjectMode; - - // the point at which it stops calling _read() to fill the buffer - // Note: 0 is a valid value, means "don't call _read preemptively ever" - var hwm = options.highWaterMark; - var defaultHwm = this.objectMode ? 16 : 16 * 1024; - this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm; - - // cast to ints. - this.highWaterMark = ~ ~this.highWaterMark; - - this.buffer = []; - this.length = 0; - this.pipes = null; - this.pipesCount = 0; - this.flowing = null; - this.ended = false; - this.endEmitted = false; - this.reading = false; - - // a flag to be able to tell if the onwrite cb is called immediately, - // or on a later tick. We set this to true at first, because any - // actions that shouldn't happen until "later" should generally also - // not happen before the first write call. - this.sync = true; - - // whenever we return null, then we set a flag to say - // that we're awaiting a 'readable' event emission. - this.needReadable = false; - this.emittedReadable = false; - this.readableListening = false; - this.resumeScheduled = false; - - // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = options.defaultEncoding || 'utf8'; - - // when piping, we only care about 'readable' events that happen - // after read()ing all the bytes and not getting any pushback. - this.ranOut = false; - - // the number of writers that are awaiting a drain event in .pipe()s - this.awaitDrain = 0; - - // if true, a maybeReadMore has been scheduled - this.readingMore = false; - - this.decoder = null; - this.encoding = null; - if (options.encoding) { - if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; - this.decoder = new StringDecoder(options.encoding); - this.encoding = options.encoding; - } -} - -var Duplex; -function Readable(options) { - Duplex = Duplex || require('./_stream_duplex'); - - if (!(this instanceof Readable)) return new Readable(options); - - this._readableState = new ReadableState(options, this); - - // legacy - this.readable = true; - - if (options && typeof options.read === 'function') this._read = options.read; - - Stream.call(this); -} - -// Manually shove something into the read() buffer. -// This returns true if the highWaterMark has not been hit yet, -// similar to how Writable.write() returns true if you should -// write() some more. -Readable.prototype.push = function (chunk, encoding) { - var state = this._readableState; - - if (!state.objectMode && typeof chunk === 'string') { - encoding = encoding || state.defaultEncoding; - if (encoding !== state.encoding) { - chunk = new Buffer(chunk, encoding); - encoding = ''; - } - } - - return readableAddChunk(this, state, chunk, encoding, false); -}; - -// Unshift should *always* be something directly out of read() -Readable.prototype.unshift = function (chunk) { - var state = this._readableState; - return readableAddChunk(this, state, chunk, '', true); -}; - -Readable.prototype.isPaused = function () { - return this._readableState.flowing === false; -}; - -function readableAddChunk(stream, state, chunk, encoding, addToFront) { - var er = chunkInvalid(state, chunk); - if (er) { - stream.emit('error', er); - } else if (chunk === null) { - state.reading = false; - onEofChunk(stream, state); - } else if (state.objectMode || chunk && chunk.length > 0) { - if (state.ended && !addToFront) { - var e = new Error('stream.push() after EOF'); - stream.emit('error', e); - } else if (state.endEmitted && addToFront) { - var e = new Error('stream.unshift() after end event'); - stream.emit('error', e); - } else { - var skipAdd; - if (state.decoder && !addToFront && !encoding) { - chunk = state.decoder.write(chunk); - skipAdd = !state.objectMode && chunk.length === 0; - } - - if (!addToFront) state.reading = false; - - // Don't add to the buffer if we've decoded to an empty string chunk and - // we're not in object mode - if (!skipAdd) { - // if we want the data now, just emit it. - if (state.flowing && state.length === 0 && !state.sync) { - stream.emit('data', chunk); - stream.read(0); - } else { - // update the buffer info. - state.length += state.objectMode ? 1 : chunk.length; - if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); - - if (state.needReadable) emitReadable(stream); - } - } - - maybeReadMore(stream, state); - } - } else if (!addToFront) { - state.reading = false; - } - - return needMoreData(state); -} - -// if it's past the high water mark, we can push in some more. -// Also, if we have no data yet, we can stand some -// more bytes. This is to work around cases where hwm=0, -// such as the repl. Also, if the push() triggered a -// readable event, and the user called read(largeNumber) such that -// needReadable was set, then we ought to push more, so that another -// 'readable' event will be triggered. -function needMoreData(state) { - return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); -} - -// backwards compatibility. -Readable.prototype.setEncoding = function (enc) { - if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; - this._readableState.decoder = new StringDecoder(enc); - this._readableState.encoding = enc; - return this; -}; - -// Don't raise the hwm > 8MB -var MAX_HWM = 0x800000; -function computeNewHighWaterMark(n) { - if (n >= MAX_HWM) { - n = MAX_HWM; - } else { - // Get the next highest power of 2 - n--; - n |= n >>> 1; - n |= n >>> 2; - n |= n >>> 4; - n |= n >>> 8; - n |= n >>> 16; - n++; - } - return n; -} - -function howMuchToRead(n, state) { - if (state.length === 0 && state.ended) return 0; - - if (state.objectMode) return n === 0 ? 0 : 1; - - if (n === null || isNaN(n)) { - // only flow one buffer at a time - if (state.flowing && state.buffer.length) return state.buffer[0].length;else return state.length; - } - - if (n <= 0) return 0; - - // If we're asking for more than the target buffer level, - // then raise the water mark. Bump up to the next highest - // power of 2, to prevent increasing it excessively in tiny - // amounts. - if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); - - // don't have that much. return null, unless we've ended. - if (n > state.length) { - if (!state.ended) { - state.needReadable = true; - return 0; - } else { - return state.length; - } - } - - return n; -} - -// you can override either this method, or the async _read(n) below. -Readable.prototype.read = function (n) { - debug('read', n); - var state = this._readableState; - var nOrig = n; - - if (typeof n !== 'number' || n > 0) state.emittedReadable = false; - - // if we're doing read(0) to trigger a readable event, but we - // already have a bunch of data in the buffer, then just trigger - // the 'readable' event and move on. - if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { - debug('read: emitReadable', state.length, state.ended); - if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); - return null; - } - - n = howMuchToRead(n, state); - - // if we've ended, and we're now clear, then finish it up. - if (n === 0 && state.ended) { - if (state.length === 0) endReadable(this); - return null; - } - - // All the actual chunk generation logic needs to be - // *below* the call to _read. The reason is that in certain - // synthetic stream cases, such as passthrough streams, _read - // may be a completely synchronous operation which may change - // the state of the read buffer, providing enough data when - // before there was *not* enough. - // - // So, the steps are: - // 1. Figure out what the state of things will be after we do - // a read from the buffer. - // - // 2. If that resulting state will trigger a _read, then call _read. - // Note that this may be asynchronous, or synchronous. Yes, it is - // deeply ugly to write APIs this way, but that still doesn't mean - // that the Readable class should behave improperly, as streams are - // designed to be sync/async agnostic. - // Take note if the _read call is sync or async (ie, if the read call - // has returned yet), so that we know whether or not it's safe to emit - // 'readable' etc. - // - // 3. Actually pull the requested chunks out of the buffer and return. - - // if we need a readable event, then we need to do some reading. - var doRead = state.needReadable; - debug('need readable', doRead); - - // if we currently have less than the highWaterMark, then also read some - if (state.length === 0 || state.length - n < state.highWaterMark) { - doRead = true; - debug('length less than watermark', doRead); - } - - // however, if we've ended, then there's no point, and if we're already - // reading, then it's unnecessary. - if (state.ended || state.reading) { - doRead = false; - debug('reading or ended', doRead); - } - - if (doRead) { - debug('do read'); - state.reading = true; - state.sync = true; - // if the length is currently zero, then we *need* a readable event. - if (state.length === 0) state.needReadable = true; - // call internal read method - this._read(state.highWaterMark); - state.sync = false; - } - - // If _read pushed data synchronously, then `reading` will be false, - // and we need to re-evaluate how much data we can return to the user. - if (doRead && !state.reading) n = howMuchToRead(nOrig, state); - - var ret; - if (n > 0) ret = fromList(n, state);else ret = null; - - if (ret === null) { - state.needReadable = true; - n = 0; - } - - state.length -= n; - - // If we have nothing in the buffer, then we want to know - // as soon as we *do* get something into the buffer. - if (state.length === 0 && !state.ended) state.needReadable = true; - - // If we tried to read() past the EOF, then emit end on the next tick. - if (nOrig !== n && state.ended && state.length === 0) endReadable(this); - - if (ret !== null) this.emit('data', ret); - - return ret; -}; - -function chunkInvalid(state, chunk) { - var er = null; - if (!Buffer.isBuffer(chunk) && typeof chunk !== 'string' && chunk !== null && chunk !== undefined && !state.objectMode) { - er = new TypeError('Invalid non-string/buffer chunk'); - } - return er; -} - -function onEofChunk(stream, state) { - if (state.ended) return; - if (state.decoder) { - var chunk = state.decoder.end(); - if (chunk && chunk.length) { - state.buffer.push(chunk); - state.length += state.objectMode ? 1 : chunk.length; - } - } - state.ended = true; - - // emit 'readable' now to make sure it gets picked up. - emitReadable(stream); -} - -// Don't emit readable right away in sync mode, because this can trigger -// another read() call => stack overflow. This way, it might trigger -// a nextTick recursion warning, but that's not so bad. -function emitReadable(stream) { - var state = stream._readableState; - state.needReadable = false; - if (!state.emittedReadable) { - debug('emitReadable', state.flowing); - state.emittedReadable = true; - if (state.sync) processNextTick(emitReadable_, stream);else emitReadable_(stream); - } -} - -function emitReadable_(stream) { - debug('emit readable'); - stream.emit('readable'); - flow(stream); -} - -// at this point, the user has presumably seen the 'readable' event, -// and called read() to consume some data. that may have triggered -// in turn another _read(n) call, in which case reading = true if -// it's in progress. -// However, if we're not ended, or reading, and the length < hwm, -// then go ahead and try to read some more preemptively. -function maybeReadMore(stream, state) { - if (!state.readingMore) { - state.readingMore = true; - processNextTick(maybeReadMore_, stream, state); - } -} - -function maybeReadMore_(stream, state) { - var len = state.length; - while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { - debug('maybeReadMore read 0'); - stream.read(0); - if (len === state.length) - // didn't get any data, stop spinning. - break;else len = state.length; - } - state.readingMore = false; -} - -// abstract method. to be overridden in specific implementation classes. -// call cb(er, data) where data is <= n in length. -// for virtual (non-string, non-buffer) streams, "length" is somewhat -// arbitrary, and perhaps not very meaningful. -Readable.prototype._read = function (n) { - this.emit('error', new Error('not implemented')); -}; - -Readable.prototype.pipe = function (dest, pipeOpts) { - var src = this; - var state = this._readableState; - - switch (state.pipesCount) { - case 0: - state.pipes = dest; - break; - case 1: - state.pipes = [state.pipes, dest]; - break; - default: - state.pipes.push(dest); - break; - } - state.pipesCount += 1; - debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); - - var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; - - var endFn = doEnd ? onend : cleanup; - if (state.endEmitted) processNextTick(endFn);else src.once('end', endFn); - - dest.on('unpipe', onunpipe); - function onunpipe(readable) { - debug('onunpipe'); - if (readable === src) { - cleanup(); - } - } - - function onend() { - debug('onend'); - dest.end(); - } - - // when the dest drains, it reduces the awaitDrain counter - // on the source. This would be more elegant with a .once() - // handler in flow(), but adding and removing repeatedly is - // too slow. - var ondrain = pipeOnDrain(src); - dest.on('drain', ondrain); - - var cleanedUp = false; - function cleanup() { - debug('cleanup'); - // cleanup event handlers once the pipe is broken - dest.removeListener('close', onclose); - dest.removeListener('finish', onfinish); - dest.removeListener('drain', ondrain); - dest.removeListener('error', onerror); - dest.removeListener('unpipe', onunpipe); - src.removeListener('end', onend); - src.removeListener('end', cleanup); - src.removeListener('data', ondata); - - cleanedUp = true; - - // if the reader is waiting for a drain event from this - // specific writer, then it would cause it to never start - // flowing again. - // So, if this is awaiting a drain, then we just call it now. - // If we don't know, then assume that we are waiting for one. - if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); - } - - src.on('data', ondata); - function ondata(chunk) { - debug('ondata'); - var ret = dest.write(chunk); - if (false === ret) { - // If the user unpiped during `dest.write()`, it is possible - // to get stuck in a permanently paused state if that write - // also returned false. - if (state.pipesCount === 1 && state.pipes[0] === dest && src.listenerCount('data') === 1 && !cleanedUp) { - debug('false write response, pause', src._readableState.awaitDrain); - src._readableState.awaitDrain++; - } - src.pause(); - } - } - - // if the dest has an error, then stop piping into it. - // however, don't suppress the throwing behavior for this. - function onerror(er) { - debug('onerror', er); - unpipe(); - dest.removeListener('error', onerror); - if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); - } - // This is a brutally ugly hack to make sure that our error handler - // is attached before any userland ones. NEVER DO THIS. - if (!dest._events || !dest._events.error) dest.on('error', onerror);else if (isArray(dest._events.error)) dest._events.error.unshift(onerror);else dest._events.error = [onerror, dest._events.error]; - - // Both close and finish should trigger unpipe, but only once. - function onclose() { - dest.removeListener('finish', onfinish); - unpipe(); - } - dest.once('close', onclose); - function onfinish() { - debug('onfinish'); - dest.removeListener('close', onclose); - unpipe(); - } - dest.once('finish', onfinish); - - function unpipe() { - debug('unpipe'); - src.unpipe(dest); - } - - // tell the dest that it's being piped to - dest.emit('pipe', src); - - // start the flow if it hasn't been started already. - if (!state.flowing) { - debug('pipe resume'); - src.resume(); - } - - return dest; -}; - -function pipeOnDrain(src) { - return function () { - var state = src._readableState; - debug('pipeOnDrain', state.awaitDrain); - if (state.awaitDrain) state.awaitDrain--; - if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { - state.flowing = true; - flow(src); - } - }; -} - -Readable.prototype.unpipe = function (dest) { - var state = this._readableState; - - // if we're not piping anywhere, then do nothing. - if (state.pipesCount === 0) return this; - - // just one destination. most common case. - if (state.pipesCount === 1) { - // passed in one, but it's not the right one. - if (dest && dest !== state.pipes) return this; - - if (!dest) dest = state.pipes; - - // got a match. - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; - if (dest) dest.emit('unpipe', this); - return this; - } - - // slow case. multiple pipe destinations. - - if (!dest) { - // remove all. - var dests = state.pipes; - var len = state.pipesCount; - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; - - for (var _i = 0; _i < len; _i++) { - dests[_i].emit('unpipe', this); - }return this; - } - - // try to find the right one. - var i = indexOf(state.pipes, dest); - if (i === -1) return this; - - state.pipes.splice(i, 1); - state.pipesCount -= 1; - if (state.pipesCount === 1) state.pipes = state.pipes[0]; - - dest.emit('unpipe', this); - - return this; -}; - -// set up data events if they are asked for -// Ensure readable listeners eventually get something -Readable.prototype.on = function (ev, fn) { - var res = Stream.prototype.on.call(this, ev, fn); - - // If listening to data, and it has not explicitly been paused, - // then call resume to start the flow of data on the next tick. - if (ev === 'data' && false !== this._readableState.flowing) { - this.resume(); - } - - if (ev === 'readable' && !this._readableState.endEmitted) { - var state = this._readableState; - if (!state.readableListening) { - state.readableListening = true; - state.emittedReadable = false; - state.needReadable = true; - if (!state.reading) { - processNextTick(nReadingNextTick, this); - } else if (state.length) { - emitReadable(this, state); - } - } - } - - return res; -}; -Readable.prototype.addListener = Readable.prototype.on; - -function nReadingNextTick(self) { - debug('readable nexttick read 0'); - self.read(0); -} - -// pause() and resume() are remnants of the legacy readable stream API -// If the user uses them, then switch into old mode. -Readable.prototype.resume = function () { - var state = this._readableState; - if (!state.flowing) { - debug('resume'); - state.flowing = true; - resume(this, state); - } - return this; -}; - -function resume(stream, state) { - if (!state.resumeScheduled) { - state.resumeScheduled = true; - processNextTick(resume_, stream, state); - } -} - -function resume_(stream, state) { - if (!state.reading) { - debug('resume read 0'); - stream.read(0); - } - - state.resumeScheduled = false; - stream.emit('resume'); - flow(stream); - if (state.flowing && !state.reading) stream.read(0); -} - -Readable.prototype.pause = function () { - debug('call pause flowing=%j', this._readableState.flowing); - if (false !== this._readableState.flowing) { - debug('pause'); - this._readableState.flowing = false; - this.emit('pause'); - } - return this; -}; - -function flow(stream) { - var state = stream._readableState; - debug('flow', state.flowing); - if (state.flowing) { - do { - var chunk = stream.read(); - } while (null !== chunk && state.flowing); - } -} - -// wrap an old-style stream as the async data source. -// This is *not* part of the readable stream interface. -// It is an ugly unfortunate mess of history. -Readable.prototype.wrap = function (stream) { - var state = this._readableState; - var paused = false; - - var self = this; - stream.on('end', function () { - debug('wrapped end'); - if (state.decoder && !state.ended) { - var chunk = state.decoder.end(); - if (chunk && chunk.length) self.push(chunk); - } - - self.push(null); - }); - - stream.on('data', function (chunk) { - debug('wrapped data'); - if (state.decoder) chunk = state.decoder.write(chunk); - - // don't skip over falsy values in objectMode - if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; - - var ret = self.push(chunk); - if (!ret) { - paused = true; - stream.pause(); - } - }); - - // proxy all the other methods. - // important when wrapping filters and duplexes. - for (var i in stream) { - if (this[i] === undefined && typeof stream[i] === 'function') { - this[i] = function (method) { - return function () { - return stream[method].apply(stream, arguments); - }; - }(i); - } - } - - // proxy certain important events. - var events = ['error', 'close', 'destroy', 'pause', 'resume']; - forEach(events, function (ev) { - stream.on(ev, self.emit.bind(self, ev)); - }); - - // when we try to consume some more bytes, simply unpause the - // underlying stream. - self._read = function (n) { - debug('wrapped _read', n); - if (paused) { - paused = false; - stream.resume(); - } - }; - - return self; -}; - -// exposed for testing purposes only. -Readable._fromList = fromList; - -// Pluck off n bytes from an array of buffers. -// Length is the combined lengths of all the buffers in the list. -function fromList(n, state) { - var list = state.buffer; - var length = state.length; - var stringMode = !!state.decoder; - var objectMode = !!state.objectMode; - var ret; - - // nothing in the list, definitely empty. - if (list.length === 0) return null; - - if (length === 0) ret = null;else if (objectMode) ret = list.shift();else if (!n || n >= length) { - // read it all, truncate the array. - if (stringMode) ret = list.join('');else if (list.length === 1) ret = list[0];else ret = Buffer.concat(list, length); - list.length = 0; - } else { - // read just some of it. - if (n < list[0].length) { - // just take a part of the first list item. - // slice is the same for buffers and strings. - var buf = list[0]; - ret = buf.slice(0, n); - list[0] = buf.slice(n); - } else if (n === list[0].length) { - // first list is a perfect match - ret = list.shift(); - } else { - // complex case. - // we have enough to cover it, but it spans past the first buffer. - if (stringMode) ret = '';else ret = new Buffer(n); - - var c = 0; - for (var i = 0, l = list.length; i < l && c < n; i++) { - var buf = list[0]; - var cpy = Math.min(n - c, buf.length); - - if (stringMode) ret += buf.slice(0, cpy);else buf.copy(ret, c, 0, cpy); - - if (cpy < buf.length) list[0] = buf.slice(cpy);else list.shift(); - - c += cpy; - } - } - } - - return ret; -} - -function endReadable(stream) { - var state = stream._readableState; - - // If we get here before consuming all the bytes, then that is a - // bug in node. Should never happen. - if (state.length > 0) throw new Error('endReadable called on non-empty stream'); - - if (!state.endEmitted) { - state.ended = true; - processNextTick(endReadableNT, state, stream); - } -} - -function endReadableNT(state, stream) { - // Check that we didn't get one last unshift. - if (!state.endEmitted && state.length === 0) { - state.endEmitted = true; - stream.readable = false; - stream.emit('end'); - } -} - -function forEach(xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } -} - -function indexOf(xs, x) { - for (var i = 0, l = xs.length; i < l; i++) { - if (xs[i] === x) return i; - } - return -1; -} \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_transform.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_transform.js deleted file mode 100644 index 625cdc17698059..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_transform.js +++ /dev/null @@ -1,180 +0,0 @@ -// a transform stream is a readable/writable stream where you do -// something with the data. Sometimes it's called a "filter", -// but that's not a great name for it, since that implies a thing where -// some bits pass through, and others are simply ignored. (That would -// be a valid example of a transform, of course.) -// -// While the output is causally related to the input, it's not a -// necessarily symmetric or synchronous transformation. For example, -// a zlib stream might take multiple plain-text writes(), and then -// emit a single compressed chunk some time in the future. -// -// Here's how this works: -// -// The Transform stream has all the aspects of the readable and writable -// stream classes. When you write(chunk), that calls _write(chunk,cb) -// internally, and returns false if there's a lot of pending writes -// buffered up. When you call read(), that calls _read(n) until -// there's enough pending readable data buffered up. -// -// In a transform stream, the written data is placed in a buffer. When -// _read(n) is called, it transforms the queued up data, calling the -// buffered _write cb's as it consumes chunks. If consuming a single -// written chunk would result in multiple output chunks, then the first -// outputted bit calls the readcb, and subsequent chunks just go into -// the read buffer, and will cause it to emit 'readable' if necessary. -// -// This way, back-pressure is actually determined by the reading side, -// since _read has to be called to start processing a new chunk. However, -// a pathological inflate type of transform can cause excessive buffering -// here. For example, imagine a stream where every byte of input is -// interpreted as an integer from 0-255, and then results in that many -// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in -// 1kb of data being output. In this case, you could write a very small -// amount of input, and end up with a very large amount of output. In -// such a pathological inflating mechanism, there'd be no way to tell -// the system to stop doing the transform. A single 4MB write could -// cause the system to run out of memory. -// -// However, even in such a pathological case, only a single written chunk -// would be consumed, and then the rest would wait (un-transformed) until -// the results of the previous transformed chunk were consumed. - -'use strict'; - -module.exports = Transform; - -var Duplex = require('./_stream_duplex'); - -/**/ -var util = require('core-util-is'); -util.inherits = require('inherits'); -/**/ - -util.inherits(Transform, Duplex); - -function TransformState(stream) { - this.afterTransform = function (er, data) { - return afterTransform(stream, er, data); - }; - - this.needTransform = false; - this.transforming = false; - this.writecb = null; - this.writechunk = null; - this.writeencoding = null; -} - -function afterTransform(stream, er, data) { - var ts = stream._transformState; - ts.transforming = false; - - var cb = ts.writecb; - - if (!cb) return stream.emit('error', new Error('no writecb in Transform class')); - - ts.writechunk = null; - ts.writecb = null; - - if (data !== null && data !== undefined) stream.push(data); - - cb(er); - - var rs = stream._readableState; - rs.reading = false; - if (rs.needReadable || rs.length < rs.highWaterMark) { - stream._read(rs.highWaterMark); - } -} - -function Transform(options) { - if (!(this instanceof Transform)) return new Transform(options); - - Duplex.call(this, options); - - this._transformState = new TransformState(this); - - // when the writable side finishes, then flush out anything remaining. - var stream = this; - - // start out asking for a readable event once data is transformed. - this._readableState.needReadable = true; - - // we have implemented the _read method, and done the other things - // that Readable wants before the first _read call, so unset the - // sync guard flag. - this._readableState.sync = false; - - if (options) { - if (typeof options.transform === 'function') this._transform = options.transform; - - if (typeof options.flush === 'function') this._flush = options.flush; - } - - this.once('prefinish', function () { - if (typeof this._flush === 'function') this._flush(function (er) { - done(stream, er); - });else done(stream); - }); -} - -Transform.prototype.push = function (chunk, encoding) { - this._transformState.needTransform = false; - return Duplex.prototype.push.call(this, chunk, encoding); -}; - -// This is the part where you do stuff! -// override this function in implementation classes. -// 'chunk' is an input chunk. -// -// Call `push(newChunk)` to pass along transformed output -// to the readable side. You may call 'push' zero or more times. -// -// Call `cb(err)` when you are done with this chunk. If you pass -// an error, then that'll put the hurt on the whole operation. If you -// never call cb(), then you'll never get another chunk. -Transform.prototype._transform = function (chunk, encoding, cb) { - throw new Error('not implemented'); -}; - -Transform.prototype._write = function (chunk, encoding, cb) { - var ts = this._transformState; - ts.writecb = cb; - ts.writechunk = chunk; - ts.writeencoding = encoding; - if (!ts.transforming) { - var rs = this._readableState; - if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); - } -}; - -// Doesn't matter what the args are here. -// _transform does all the work. -// That we got here means that the readable side wants more data. -Transform.prototype._read = function (n) { - var ts = this._transformState; - - if (ts.writechunk !== null && ts.writecb && !ts.transforming) { - ts.transforming = true; - this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); - } else { - // mark that we need a transform, so that any data that comes in - // will get processed, now that we've asked for it. - ts.needTransform = true; - } -}; - -function done(stream, er) { - if (er) return stream.emit('error', er); - - // if there's nothing in the write buffer, then that means - // that nothing more will ever be provided - var ws = stream._writableState; - var ts = stream._transformState; - - if (ws.length) throw new Error('calling transform done when ws.length != 0'); - - if (ts.transforming) throw new Error('calling transform done when still transforming'); - - return stream.push(null); -} \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_writable.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_writable.js deleted file mode 100644 index 95916c992a9507..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_writable.js +++ /dev/null @@ -1,516 +0,0 @@ -// A bit simpler than readable streams. -// Implement an async ._write(chunk, encoding, cb), and it'll handle all -// the drain event emission and buffering. - -'use strict'; - -module.exports = Writable; - -/**/ -var processNextTick = require('process-nextick-args'); -/**/ - -/**/ -var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : processNextTick; -/**/ - -/**/ -var Buffer = require('buffer').Buffer; -/**/ - -Writable.WritableState = WritableState; - -/**/ -var util = require('core-util-is'); -util.inherits = require('inherits'); -/**/ - -/**/ -var internalUtil = { - deprecate: require('util-deprecate') -}; -/**/ - -/**/ -var Stream; -(function () { - try { - Stream = require('st' + 'ream'); - } catch (_) {} finally { - if (!Stream) Stream = require('events').EventEmitter; - } -})(); -/**/ - -var Buffer = require('buffer').Buffer; - -util.inherits(Writable, Stream); - -function nop() {} - -function WriteReq(chunk, encoding, cb) { - this.chunk = chunk; - this.encoding = encoding; - this.callback = cb; - this.next = null; -} - -var Duplex; -function WritableState(options, stream) { - Duplex = Duplex || require('./_stream_duplex'); - - options = options || {}; - - // object stream flag to indicate whether or not this stream - // contains buffers or objects. - this.objectMode = !!options.objectMode; - - if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.writableObjectMode; - - // the point at which write() starts returning false - // Note: 0 is a valid value, means that we always return false if - // the entire buffer is not flushed immediately on write() - var hwm = options.highWaterMark; - var defaultHwm = this.objectMode ? 16 : 16 * 1024; - this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm; - - // cast to ints. - this.highWaterMark = ~ ~this.highWaterMark; - - this.needDrain = false; - // at the start of calling end() - this.ending = false; - // when end() has been called, and returned - this.ended = false; - // when 'finish' is emitted - this.finished = false; - - // should we decode strings into buffers before passing to _write? - // this is here so that some node-core streams can optimize string - // handling at a lower level. - var noDecode = options.decodeStrings === false; - this.decodeStrings = !noDecode; - - // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = options.defaultEncoding || 'utf8'; - - // not an actual buffer we keep track of, but a measurement - // of how much we're waiting to get pushed to some underlying - // socket or file. - this.length = 0; - - // a flag to see when we're in the middle of a write. - this.writing = false; - - // when true all writes will be buffered until .uncork() call - this.corked = 0; - - // a flag to be able to tell if the onwrite cb is called immediately, - // or on a later tick. We set this to true at first, because any - // actions that shouldn't happen until "later" should generally also - // not happen before the first write call. - this.sync = true; - - // a flag to know if we're processing previously buffered items, which - // may call the _write() callback in the same tick, so that we don't - // end up in an overlapped onwrite situation. - this.bufferProcessing = false; - - // the callback that's passed to _write(chunk,cb) - this.onwrite = function (er) { - onwrite(stream, er); - }; - - // the callback that the user supplies to write(chunk,encoding,cb) - this.writecb = null; - - // the amount that is being written when _write is called. - this.writelen = 0; - - this.bufferedRequest = null; - this.lastBufferedRequest = null; - - // number of pending user-supplied write callbacks - // this must be 0 before 'finish' can be emitted - this.pendingcb = 0; - - // emit prefinish if the only thing we're waiting for is _write cbs - // This is relevant for synchronous Transform streams - this.prefinished = false; - - // True if the error was already emitted and should not be thrown again - this.errorEmitted = false; - - // count buffered requests - this.bufferedRequestCount = 0; - - // create the two objects needed to store the corked requests - // they are not a linked list, as no new elements are inserted in there - this.corkedRequestsFree = new CorkedRequest(this); - this.corkedRequestsFree.next = new CorkedRequest(this); -} - -WritableState.prototype.getBuffer = function writableStateGetBuffer() { - var current = this.bufferedRequest; - var out = []; - while (current) { - out.push(current); - current = current.next; - } - return out; -}; - -(function () { - try { - Object.defineProperty(WritableState.prototype, 'buffer', { - get: internalUtil.deprecate(function () { - return this.getBuffer(); - }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.') - }); - } catch (_) {} -})(); - -var Duplex; -function Writable(options) { - Duplex = Duplex || require('./_stream_duplex'); - - // Writable ctor is applied to Duplexes, though they're not - // instanceof Writable, they're instanceof Readable. - if (!(this instanceof Writable) && !(this instanceof Duplex)) return new Writable(options); - - this._writableState = new WritableState(options, this); - - // legacy. - this.writable = true; - - if (options) { - if (typeof options.write === 'function') this._write = options.write; - - if (typeof options.writev === 'function') this._writev = options.writev; - } - - Stream.call(this); -} - -// Otherwise people can pipe Writable streams, which is just wrong. -Writable.prototype.pipe = function () { - this.emit('error', new Error('Cannot pipe. Not readable.')); -}; - -function writeAfterEnd(stream, cb) { - var er = new Error('write after end'); - // TODO: defer error events consistently everywhere, not just the cb - stream.emit('error', er); - processNextTick(cb, er); -} - -// If we get something that is not a buffer, string, null, or undefined, -// and we're not in objectMode, then that's an error. -// Otherwise stream chunks are all considered to be of length=1, and the -// watermarks determine how many objects to keep in the buffer, rather than -// how many bytes or characters. -function validChunk(stream, state, chunk, cb) { - var valid = true; - - if (!Buffer.isBuffer(chunk) && typeof chunk !== 'string' && chunk !== null && chunk !== undefined && !state.objectMode) { - var er = new TypeError('Invalid non-string/buffer chunk'); - stream.emit('error', er); - processNextTick(cb, er); - valid = false; - } - return valid; -} - -Writable.prototype.write = function (chunk, encoding, cb) { - var state = this._writableState; - var ret = false; - - if (typeof encoding === 'function') { - cb = encoding; - encoding = null; - } - - if (Buffer.isBuffer(chunk)) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; - - if (typeof cb !== 'function') cb = nop; - - if (state.ended) writeAfterEnd(this, cb);else if (validChunk(this, state, chunk, cb)) { - state.pendingcb++; - ret = writeOrBuffer(this, state, chunk, encoding, cb); - } - - return ret; -}; - -Writable.prototype.cork = function () { - var state = this._writableState; - - state.corked++; -}; - -Writable.prototype.uncork = function () { - var state = this._writableState; - - if (state.corked) { - state.corked--; - - if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); - } -}; - -Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { - // node::ParseEncoding() requires lower case. - if (typeof encoding === 'string') encoding = encoding.toLowerCase(); - if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); - this._writableState.defaultEncoding = encoding; -}; - -function decodeChunk(state, chunk, encoding) { - if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { - chunk = new Buffer(chunk, encoding); - } - return chunk; -} - -// if we're already writing something, then just put this -// in the queue, and wait our turn. Otherwise, call _write -// If we return false, then we need a drain event, so set that flag. -function writeOrBuffer(stream, state, chunk, encoding, cb) { - chunk = decodeChunk(state, chunk, encoding); - - if (Buffer.isBuffer(chunk)) encoding = 'buffer'; - var len = state.objectMode ? 1 : chunk.length; - - state.length += len; - - var ret = state.length < state.highWaterMark; - // we must ensure that previous needDrain will not be reset to false. - if (!ret) state.needDrain = true; - - if (state.writing || state.corked) { - var last = state.lastBufferedRequest; - state.lastBufferedRequest = new WriteReq(chunk, encoding, cb); - if (last) { - last.next = state.lastBufferedRequest; - } else { - state.bufferedRequest = state.lastBufferedRequest; - } - state.bufferedRequestCount += 1; - } else { - doWrite(stream, state, false, len, chunk, encoding, cb); - } - - return ret; -} - -function doWrite(stream, state, writev, len, chunk, encoding, cb) { - state.writelen = len; - state.writecb = cb; - state.writing = true; - state.sync = true; - if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); - state.sync = false; -} - -function onwriteError(stream, state, sync, er, cb) { - --state.pendingcb; - if (sync) processNextTick(cb, er);else cb(er); - - stream._writableState.errorEmitted = true; - stream.emit('error', er); -} - -function onwriteStateUpdate(state) { - state.writing = false; - state.writecb = null; - state.length -= state.writelen; - state.writelen = 0; -} - -function onwrite(stream, er) { - var state = stream._writableState; - var sync = state.sync; - var cb = state.writecb; - - onwriteStateUpdate(state); - - if (er) onwriteError(stream, state, sync, er, cb);else { - // Check if we're actually ready to finish, but don't emit yet - var finished = needFinish(state); - - if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { - clearBuffer(stream, state); - } - - if (sync) { - /**/ - asyncWrite(afterWrite, stream, state, finished, cb); - /**/ - } else { - afterWrite(stream, state, finished, cb); - } - } -} - -function afterWrite(stream, state, finished, cb) { - if (!finished) onwriteDrain(stream, state); - state.pendingcb--; - cb(); - finishMaybe(stream, state); -} - -// Must force callback to be called on nextTick, so that we don't -// emit 'drain' before the write() consumer gets the 'false' return -// value, and has a chance to attach a 'drain' listener. -function onwriteDrain(stream, state) { - if (state.length === 0 && state.needDrain) { - state.needDrain = false; - stream.emit('drain'); - } -} - -// if there's something in the buffer waiting, then process it -function clearBuffer(stream, state) { - state.bufferProcessing = true; - var entry = state.bufferedRequest; - - if (stream._writev && entry && entry.next) { - // Fast case, write everything using _writev() - var l = state.bufferedRequestCount; - var buffer = new Array(l); - var holder = state.corkedRequestsFree; - holder.entry = entry; - - var count = 0; - while (entry) { - buffer[count] = entry; - entry = entry.next; - count += 1; - } - - doWrite(stream, state, true, state.length, buffer, '', holder.finish); - - // doWrite is always async, defer these to save a bit of time - // as the hot path ends with doWrite - state.pendingcb++; - state.lastBufferedRequest = null; - state.corkedRequestsFree = holder.next; - holder.next = null; - } else { - // Slow case, write chunks one-by-one - while (entry) { - var chunk = entry.chunk; - var encoding = entry.encoding; - var cb = entry.callback; - var len = state.objectMode ? 1 : chunk.length; - - doWrite(stream, state, false, len, chunk, encoding, cb); - entry = entry.next; - // if we didn't call the onwrite immediately, then - // it means that we need to wait until it does. - // also, that means that the chunk and cb are currently - // being processed, so move the buffer counter past them. - if (state.writing) { - break; - } - } - - if (entry === null) state.lastBufferedRequest = null; - } - - state.bufferedRequestCount = 0; - state.bufferedRequest = entry; - state.bufferProcessing = false; -} - -Writable.prototype._write = function (chunk, encoding, cb) { - cb(new Error('not implemented')); -}; - -Writable.prototype._writev = null; - -Writable.prototype.end = function (chunk, encoding, cb) { - var state = this._writableState; - - if (typeof chunk === 'function') { - cb = chunk; - chunk = null; - encoding = null; - } else if (typeof encoding === 'function') { - cb = encoding; - encoding = null; - } - - if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); - - // .end() fully uncorks - if (state.corked) { - state.corked = 1; - this.uncork(); - } - - // ignore unnecessary end() calls. - if (!state.ending && !state.finished) endWritable(this, state, cb); -}; - -function needFinish(state) { - return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; -} - -function prefinish(stream, state) { - if (!state.prefinished) { - state.prefinished = true; - stream.emit('prefinish'); - } -} - -function finishMaybe(stream, state) { - var need = needFinish(state); - if (need) { - if (state.pendingcb === 0) { - prefinish(stream, state); - state.finished = true; - stream.emit('finish'); - } else { - prefinish(stream, state); - } - } - return need; -} - -function endWritable(stream, state, cb) { - state.ending = true; - finishMaybe(stream, state); - if (cb) { - if (state.finished) processNextTick(cb);else stream.once('finish', cb); - } - state.ended = true; - stream.writable = false; -} - -// It seems a linked list but it is not -// there will be only 2 of these for each stream -function CorkedRequest(state) { - var _this = this; - - this.next = null; - this.entry = null; - - this.finish = function (err) { - var entry = _this.entry; - _this.entry = null; - while (entry) { - var cb = entry.callback; - state.pendingcb--; - cb(err); - entry = entry.next; - } - if (state.corkedRequestsFree) { - state.corkedRequestsFree.next = _this; - } else { - state.corkedRequestsFree = _this; - } - }; -} \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/LICENSE b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/LICENSE deleted file mode 100644 index d8d7f9437dbf5a..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/LICENSE +++ /dev/null @@ -1,19 +0,0 @@ -Copyright Node.js contributors. All rights reserved. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/README.md b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/README.md deleted file mode 100644 index 5a76b4149c5eb5..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# core-util-is - -The `util.is*` functions introduced in Node v0.12. diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/float.patch b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/float.patch deleted file mode 100644 index a06d5c05f75fd5..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/float.patch +++ /dev/null @@ -1,604 +0,0 @@ -diff --git a/lib/util.js b/lib/util.js -index a03e874..9074e8e 100644 ---- a/lib/util.js -+++ b/lib/util.js -@@ -19,430 +19,6 @@ - // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE - // USE OR OTHER DEALINGS IN THE SOFTWARE. - --var formatRegExp = /%[sdj%]/g; --exports.format = function(f) { -- if (!isString(f)) { -- var objects = []; -- for (var i = 0; i < arguments.length; i++) { -- objects.push(inspect(arguments[i])); -- } -- return objects.join(' '); -- } -- -- var i = 1; -- var args = arguments; -- var len = args.length; -- var str = String(f).replace(formatRegExp, function(x) { -- if (x === '%%') return '%'; -- if (i >= len) return x; -- switch (x) { -- case '%s': return String(args[i++]); -- case '%d': return Number(args[i++]); -- case '%j': -- try { -- return JSON.stringify(args[i++]); -- } catch (_) { -- return '[Circular]'; -- } -- default: -- return x; -- } -- }); -- for (var x = args[i]; i < len; x = args[++i]) { -- if (isNull(x) || !isObject(x)) { -- str += ' ' + x; -- } else { -- str += ' ' + inspect(x); -- } -- } -- return str; --}; -- -- --// Mark that a method should not be used. --// Returns a modified function which warns once by default. --// If --no-deprecation is set, then it is a no-op. --exports.deprecate = function(fn, msg) { -- // Allow for deprecating things in the process of starting up. -- if (isUndefined(global.process)) { -- return function() { -- return exports.deprecate(fn, msg).apply(this, arguments); -- }; -- } -- -- if (process.noDeprecation === true) { -- return fn; -- } -- -- var warned = false; -- function deprecated() { -- if (!warned) { -- if (process.throwDeprecation) { -- throw new Error(msg); -- } else if (process.traceDeprecation) { -- console.trace(msg); -- } else { -- console.error(msg); -- } -- warned = true; -- } -- return fn.apply(this, arguments); -- } -- -- return deprecated; --}; -- -- --var debugs = {}; --var debugEnviron; --exports.debuglog = function(set) { -- if (isUndefined(debugEnviron)) -- debugEnviron = process.env.NODE_DEBUG || ''; -- set = set.toUpperCase(); -- if (!debugs[set]) { -- if (new RegExp('\\b' + set + '\\b', 'i').test(debugEnviron)) { -- var pid = process.pid; -- debugs[set] = function() { -- var msg = exports.format.apply(exports, arguments); -- console.error('%s %d: %s', set, pid, msg); -- }; -- } else { -- debugs[set] = function() {}; -- } -- } -- return debugs[set]; --}; -- -- --/** -- * Echos the value of a value. Trys to print the value out -- * in the best way possible given the different types. -- * -- * @param {Object} obj The object to print out. -- * @param {Object} opts Optional options object that alters the output. -- */ --/* legacy: obj, showHidden, depth, colors*/ --function inspect(obj, opts) { -- // default options -- var ctx = { -- seen: [], -- stylize: stylizeNoColor -- }; -- // legacy... -- if (arguments.length >= 3) ctx.depth = arguments[2]; -- if (arguments.length >= 4) ctx.colors = arguments[3]; -- if (isBoolean(opts)) { -- // legacy... -- ctx.showHidden = opts; -- } else if (opts) { -- // got an "options" object -- exports._extend(ctx, opts); -- } -- // set default options -- if (isUndefined(ctx.showHidden)) ctx.showHidden = false; -- if (isUndefined(ctx.depth)) ctx.depth = 2; -- if (isUndefined(ctx.colors)) ctx.colors = false; -- if (isUndefined(ctx.customInspect)) ctx.customInspect = true; -- if (ctx.colors) ctx.stylize = stylizeWithColor; -- return formatValue(ctx, obj, ctx.depth); --} --exports.inspect = inspect; -- -- --// http://en.wikipedia.org/wiki/ANSI_escape_code#graphics --inspect.colors = { -- 'bold' : [1, 22], -- 'italic' : [3, 23], -- 'underline' : [4, 24], -- 'inverse' : [7, 27], -- 'white' : [37, 39], -- 'grey' : [90, 39], -- 'black' : [30, 39], -- 'blue' : [34, 39], -- 'cyan' : [36, 39], -- 'green' : [32, 39], -- 'magenta' : [35, 39], -- 'red' : [31, 39], -- 'yellow' : [33, 39] --}; -- --// Don't use 'blue' not visible on cmd.exe --inspect.styles = { -- 'special': 'cyan', -- 'number': 'yellow', -- 'boolean': 'yellow', -- 'undefined': 'grey', -- 'null': 'bold', -- 'string': 'green', -- 'date': 'magenta', -- // "name": intentionally not styling -- 'regexp': 'red' --}; -- -- --function stylizeWithColor(str, styleType) { -- var style = inspect.styles[styleType]; -- -- if (style) { -- return '\u001b[' + inspect.colors[style][0] + 'm' + str + -- '\u001b[' + inspect.colors[style][1] + 'm'; -- } else { -- return str; -- } --} -- -- --function stylizeNoColor(str, styleType) { -- return str; --} -- -- --function arrayToHash(array) { -- var hash = {}; -- -- array.forEach(function(val, idx) { -- hash[val] = true; -- }); -- -- return hash; --} -- -- --function formatValue(ctx, value, recurseTimes) { -- // Provide a hook for user-specified inspect functions. -- // Check that value is an object with an inspect function on it -- if (ctx.customInspect && -- value && -- isFunction(value.inspect) && -- // Filter out the util module, it's inspect function is special -- value.inspect !== exports.inspect && -- // Also filter out any prototype objects using the circular check. -- !(value.constructor && value.constructor.prototype === value)) { -- var ret = value.inspect(recurseTimes, ctx); -- if (!isString(ret)) { -- ret = formatValue(ctx, ret, recurseTimes); -- } -- return ret; -- } -- -- // Primitive types cannot have properties -- var primitive = formatPrimitive(ctx, value); -- if (primitive) { -- return primitive; -- } -- -- // Look up the keys of the object. -- var keys = Object.keys(value); -- var visibleKeys = arrayToHash(keys); -- -- if (ctx.showHidden) { -- keys = Object.getOwnPropertyNames(value); -- } -- -- // Some type of object without properties can be shortcutted. -- if (keys.length === 0) { -- if (isFunction(value)) { -- var name = value.name ? ': ' + value.name : ''; -- return ctx.stylize('[Function' + name + ']', 'special'); -- } -- if (isRegExp(value)) { -- return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp'); -- } -- if (isDate(value)) { -- return ctx.stylize(Date.prototype.toString.call(value), 'date'); -- } -- if (isError(value)) { -- return formatError(value); -- } -- } -- -- var base = '', array = false, braces = ['{', '}']; -- -- // Make Array say that they are Array -- if (isArray(value)) { -- array = true; -- braces = ['[', ']']; -- } -- -- // Make functions say that they are functions -- if (isFunction(value)) { -- var n = value.name ? ': ' + value.name : ''; -- base = ' [Function' + n + ']'; -- } -- -- // Make RegExps say that they are RegExps -- if (isRegExp(value)) { -- base = ' ' + RegExp.prototype.toString.call(value); -- } -- -- // Make dates with properties first say the date -- if (isDate(value)) { -- base = ' ' + Date.prototype.toUTCString.call(value); -- } -- -- // Make error with message first say the error -- if (isError(value)) { -- base = ' ' + formatError(value); -- } -- -- if (keys.length === 0 && (!array || value.length == 0)) { -- return braces[0] + base + braces[1]; -- } -- -- if (recurseTimes < 0) { -- if (isRegExp(value)) { -- return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp'); -- } else { -- return ctx.stylize('[Object]', 'special'); -- } -- } -- -- ctx.seen.push(value); -- -- var output; -- if (array) { -- output = formatArray(ctx, value, recurseTimes, visibleKeys, keys); -- } else { -- output = keys.map(function(key) { -- return formatProperty(ctx, value, recurseTimes, visibleKeys, key, array); -- }); -- } -- -- ctx.seen.pop(); -- -- return reduceToSingleString(output, base, braces); --} -- -- --function formatPrimitive(ctx, value) { -- if (isUndefined(value)) -- return ctx.stylize('undefined', 'undefined'); -- if (isString(value)) { -- var simple = '\'' + JSON.stringify(value).replace(/^"|"$/g, '') -- .replace(/'/g, "\\'") -- .replace(/\\"/g, '"') + '\''; -- return ctx.stylize(simple, 'string'); -- } -- if (isNumber(value)) { -- // Format -0 as '-0'. Strict equality won't distinguish 0 from -0, -- // so instead we use the fact that 1 / -0 < 0 whereas 1 / 0 > 0 . -- if (value === 0 && 1 / value < 0) -- return ctx.stylize('-0', 'number'); -- return ctx.stylize('' + value, 'number'); -- } -- if (isBoolean(value)) -- return ctx.stylize('' + value, 'boolean'); -- // For some reason typeof null is "object", so special case here. -- if (isNull(value)) -- return ctx.stylize('null', 'null'); --} -- -- --function formatError(value) { -- return '[' + Error.prototype.toString.call(value) + ']'; --} -- -- --function formatArray(ctx, value, recurseTimes, visibleKeys, keys) { -- var output = []; -- for (var i = 0, l = value.length; i < l; ++i) { -- if (hasOwnProperty(value, String(i))) { -- output.push(formatProperty(ctx, value, recurseTimes, visibleKeys, -- String(i), true)); -- } else { -- output.push(''); -- } -- } -- keys.forEach(function(key) { -- if (!key.match(/^\d+$/)) { -- output.push(formatProperty(ctx, value, recurseTimes, visibleKeys, -- key, true)); -- } -- }); -- return output; --} -- -- --function formatProperty(ctx, value, recurseTimes, visibleKeys, key, array) { -- var name, str, desc; -- desc = Object.getOwnPropertyDescriptor(value, key) || { value: value[key] }; -- if (desc.get) { -- if (desc.set) { -- str = ctx.stylize('[Getter/Setter]', 'special'); -- } else { -- str = ctx.stylize('[Getter]', 'special'); -- } -- } else { -- if (desc.set) { -- str = ctx.stylize('[Setter]', 'special'); -- } -- } -- if (!hasOwnProperty(visibleKeys, key)) { -- name = '[' + key + ']'; -- } -- if (!str) { -- if (ctx.seen.indexOf(desc.value) < 0) { -- if (isNull(recurseTimes)) { -- str = formatValue(ctx, desc.value, null); -- } else { -- str = formatValue(ctx, desc.value, recurseTimes - 1); -- } -- if (str.indexOf('\n') > -1) { -- if (array) { -- str = str.split('\n').map(function(line) { -- return ' ' + line; -- }).join('\n').substr(2); -- } else { -- str = '\n' + str.split('\n').map(function(line) { -- return ' ' + line; -- }).join('\n'); -- } -- } -- } else { -- str = ctx.stylize('[Circular]', 'special'); -- } -- } -- if (isUndefined(name)) { -- if (array && key.match(/^\d+$/)) { -- return str; -- } -- name = JSON.stringify('' + key); -- if (name.match(/^"([a-zA-Z_][a-zA-Z_0-9]*)"$/)) { -- name = name.substr(1, name.length - 2); -- name = ctx.stylize(name, 'name'); -- } else { -- name = name.replace(/'/g, "\\'") -- .replace(/\\"/g, '"') -- .replace(/(^"|"$)/g, "'"); -- name = ctx.stylize(name, 'string'); -- } -- } -- -- return name + ': ' + str; --} -- -- --function reduceToSingleString(output, base, braces) { -- var numLinesEst = 0; -- var length = output.reduce(function(prev, cur) { -- numLinesEst++; -- if (cur.indexOf('\n') >= 0) numLinesEst++; -- return prev + cur.replace(/\u001b\[\d\d?m/g, '').length + 1; -- }, 0); -- -- if (length > 60) { -- return braces[0] + -- (base === '' ? '' : base + '\n ') + -- ' ' + -- output.join(',\n ') + -- ' ' + -- braces[1]; -- } -- -- return braces[0] + base + ' ' + output.join(', ') + ' ' + braces[1]; --} -- -- - // NOTE: These type checking functions intentionally don't use `instanceof` - // because it is fragile and can be easily faked with `Object.create()`. - function isArray(ar) { -@@ -522,166 +98,10 @@ function isPrimitive(arg) { - exports.isPrimitive = isPrimitive; - - function isBuffer(arg) { -- return arg instanceof Buffer; -+ return Buffer.isBuffer(arg); - } - exports.isBuffer = isBuffer; - - function objectToString(o) { - return Object.prototype.toString.call(o); --} -- -- --function pad(n) { -- return n < 10 ? '0' + n.toString(10) : n.toString(10); --} -- -- --var months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', -- 'Oct', 'Nov', 'Dec']; -- --// 26 Feb 16:19:34 --function timestamp() { -- var d = new Date(); -- var time = [pad(d.getHours()), -- pad(d.getMinutes()), -- pad(d.getSeconds())].join(':'); -- return [d.getDate(), months[d.getMonth()], time].join(' '); --} -- -- --// log is just a thin wrapper to console.log that prepends a timestamp --exports.log = function() { -- console.log('%s - %s', timestamp(), exports.format.apply(exports, arguments)); --}; -- -- --/** -- * Inherit the prototype methods from one constructor into another. -- * -- * The Function.prototype.inherits from lang.js rewritten as a standalone -- * function (not on Function.prototype). NOTE: If this file is to be loaded -- * during bootstrapping this function needs to be rewritten using some native -- * functions as prototype setup using normal JavaScript does not work as -- * expected during bootstrapping (see mirror.js in r114903). -- * -- * @param {function} ctor Constructor function which needs to inherit the -- * prototype. -- * @param {function} superCtor Constructor function to inherit prototype from. -- */ --exports.inherits = function(ctor, superCtor) { -- ctor.super_ = superCtor; -- ctor.prototype = Object.create(superCtor.prototype, { -- constructor: { -- value: ctor, -- enumerable: false, -- writable: true, -- configurable: true -- } -- }); --}; -- --exports._extend = function(origin, add) { -- // Don't do anything if add isn't an object -- if (!add || !isObject(add)) return origin; -- -- var keys = Object.keys(add); -- var i = keys.length; -- while (i--) { -- origin[keys[i]] = add[keys[i]]; -- } -- return origin; --}; -- --function hasOwnProperty(obj, prop) { -- return Object.prototype.hasOwnProperty.call(obj, prop); --} -- -- --// Deprecated old stuff. -- --exports.p = exports.deprecate(function() { -- for (var i = 0, len = arguments.length; i < len; ++i) { -- console.error(exports.inspect(arguments[i])); -- } --}, 'util.p: Use console.error() instead'); -- -- --exports.exec = exports.deprecate(function() { -- return require('child_process').exec.apply(this, arguments); --}, 'util.exec is now called `child_process.exec`.'); -- -- --exports.print = exports.deprecate(function() { -- for (var i = 0, len = arguments.length; i < len; ++i) { -- process.stdout.write(String(arguments[i])); -- } --}, 'util.print: Use console.log instead'); -- -- --exports.puts = exports.deprecate(function() { -- for (var i = 0, len = arguments.length; i < len; ++i) { -- process.stdout.write(arguments[i] + '\n'); -- } --}, 'util.puts: Use console.log instead'); -- -- --exports.debug = exports.deprecate(function(x) { -- process.stderr.write('DEBUG: ' + x + '\n'); --}, 'util.debug: Use console.error instead'); -- -- --exports.error = exports.deprecate(function(x) { -- for (var i = 0, len = arguments.length; i < len; ++i) { -- process.stderr.write(arguments[i] + '\n'); -- } --}, 'util.error: Use console.error instead'); -- -- --exports.pump = exports.deprecate(function(readStream, writeStream, callback) { -- var callbackCalled = false; -- -- function call(a, b, c) { -- if (callback && !callbackCalled) { -- callback(a, b, c); -- callbackCalled = true; -- } -- } -- -- readStream.addListener('data', function(chunk) { -- if (writeStream.write(chunk) === false) readStream.pause(); -- }); -- -- writeStream.addListener('drain', function() { -- readStream.resume(); -- }); -- -- readStream.addListener('end', function() { -- writeStream.end(); -- }); -- -- readStream.addListener('close', function() { -- call(); -- }); -- -- readStream.addListener('error', function(err) { -- writeStream.end(); -- call(err); -- }); -- -- writeStream.addListener('error', function(err) { -- readStream.destroy(); -- call(err); -- }); --}, 'util.pump(): Use readableStream.pipe() instead'); -- -- --var uv; --exports._errnoException = function(err, syscall) { -- if (isUndefined(uv)) uv = process.binding('uv'); -- var errname = uv.errname(err); -- var e = new Error(syscall + ' ' + errname); -- e.code = errname; -- e.errno = errname; -- e.syscall = syscall; -- return e; --}; -+} \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/lib/util.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/lib/util.js deleted file mode 100644 index ff4c851c075a2f..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/lib/util.js +++ /dev/null @@ -1,107 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -// NOTE: These type checking functions intentionally don't use `instanceof` -// because it is fragile and can be easily faked with `Object.create()`. - -function isArray(arg) { - if (Array.isArray) { - return Array.isArray(arg); - } - return objectToString(arg) === '[object Array]'; -} -exports.isArray = isArray; - -function isBoolean(arg) { - return typeof arg === 'boolean'; -} -exports.isBoolean = isBoolean; - -function isNull(arg) { - return arg === null; -} -exports.isNull = isNull; - -function isNullOrUndefined(arg) { - return arg == null; -} -exports.isNullOrUndefined = isNullOrUndefined; - -function isNumber(arg) { - return typeof arg === 'number'; -} -exports.isNumber = isNumber; - -function isString(arg) { - return typeof arg === 'string'; -} -exports.isString = isString; - -function isSymbol(arg) { - return typeof arg === 'symbol'; -} -exports.isSymbol = isSymbol; - -function isUndefined(arg) { - return arg === void 0; -} -exports.isUndefined = isUndefined; - -function isRegExp(re) { - return objectToString(re) === '[object RegExp]'; -} -exports.isRegExp = isRegExp; - -function isObject(arg) { - return typeof arg === 'object' && arg !== null; -} -exports.isObject = isObject; - -function isDate(d) { - return objectToString(d) === '[object Date]'; -} -exports.isDate = isDate; - -function isError(e) { - return (objectToString(e) === '[object Error]' || e instanceof Error); -} -exports.isError = isError; - -function isFunction(arg) { - return typeof arg === 'function'; -} -exports.isFunction = isFunction; - -function isPrimitive(arg) { - return arg === null || - typeof arg === 'boolean' || - typeof arg === 'number' || - typeof arg === 'string' || - typeof arg === 'symbol' || // ES6 symbol - typeof arg === 'undefined'; -} -exports.isPrimitive = isPrimitive; - -exports.isBuffer = Buffer.isBuffer; - -function objectToString(o) { - return Object.prototype.toString.call(o); -} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/package.json b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/package.json deleted file mode 100644 index 4f65c18e22abb3..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/package.json +++ /dev/null @@ -1,106 +0,0 @@ -{ - "_args": [ - [ - { - "raw": "core-util-is@~1.0.0", - "scope": null, - "escapedName": "core-util-is", - "name": "core-util-is", - "rawSpec": "~1.0.0", - "spec": ">=1.0.0 <1.1.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream" - ], - [ - { - "raw": "core-util-is@~1.0.0", - "scope": null, - "escapedName": "core-util-is", - "name": "core-util-is", - "rawSpec": "~1.0.0", - "spec": ">=1.0.0 <1.1.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream" - ] - ], - "_from": "core-util-is@~1.0.0", - "_id": "core-util-is@1.0.2", - "_inCache": true, - "_location": "/mississippi/through2/readable-stream/core-util-is", - "_nodeVersion": "4.0.0", - "_npmUser": { - "name": "isaacs", - "email": "i@izs.me" - }, - "_npmVersion": "3.3.2", - "_phantomChildren": {}, - "_requested": { - "raw": "core-util-is@~1.0.0", - "scope": null, - "escapedName": "core-util-is", - "name": "core-util-is", - "rawSpec": "~1.0.0", - "spec": ">=1.0.0 <1.1.0", - "type": "range" - }, - "_requiredBy": [ - "/mississippi/through2/readable-stream" - ], - "_resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", - "_shasum": "b5fd54220aa2bc5ab57aab7140c940754503c1a7", - "_shrinkwrap": null, - "_spec": "core-util-is@~1.0.0", - "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream", - "author": { - "name": "Isaac Z. Schlueter", - "email": "i@izs.me", - "url": "http://blog.izs.me/" - }, - "bugs": { - "url": "https://github.com/isaacs/core-util-is/issues" - }, - "dependencies": {}, - "description": "The `util.is*` functions introduced in Node v0.12.", - "devDependencies": { - "tap": "^2.3.0" - }, - "directories": {}, - "dist": { - "shasum": "b5fd54220aa2bc5ab57aab7140c940754503c1a7", - "tarball": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz" - }, - "gitHead": "a177da234df5638b363ddc15fa324619a38577c8", - "homepage": "https://github.com/isaacs/core-util-is#readme", - "keywords": [ - "util", - "isBuffer", - "isArray", - "isNumber", - "isString", - "isRegExp", - "isThis", - "isThat", - "polyfill" - ], - "license": "MIT", - "main": "lib/util.js", - "maintainers": [ - { - "name": "isaacs", - "email": "i@izs.me" - } - ], - "name": "core-util-is", - "optionalDependencies": {}, - "readme": "ERROR: No README data found!", - "repository": { - "type": "git", - "url": "git://github.com/isaacs/core-util-is.git" - }, - "scripts": { - "test": "tap test.js" - }, - "version": "1.0.2" -} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/test.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/test.js deleted file mode 100644 index 1a490c65ac8b5d..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/test.js +++ /dev/null @@ -1,68 +0,0 @@ -var assert = require('tap'); - -var t = require('./lib/util'); - -assert.equal(t.isArray([]), true); -assert.equal(t.isArray({}), false); - -assert.equal(t.isBoolean(null), false); -assert.equal(t.isBoolean(true), true); -assert.equal(t.isBoolean(false), true); - -assert.equal(t.isNull(null), true); -assert.equal(t.isNull(undefined), false); -assert.equal(t.isNull(false), false); -assert.equal(t.isNull(), false); - -assert.equal(t.isNullOrUndefined(null), true); -assert.equal(t.isNullOrUndefined(undefined), true); -assert.equal(t.isNullOrUndefined(false), false); -assert.equal(t.isNullOrUndefined(), true); - -assert.equal(t.isNumber(null), false); -assert.equal(t.isNumber('1'), false); -assert.equal(t.isNumber(1), true); - -assert.equal(t.isString(null), false); -assert.equal(t.isString('1'), true); -assert.equal(t.isString(1), false); - -assert.equal(t.isSymbol(null), false); -assert.equal(t.isSymbol('1'), false); -assert.equal(t.isSymbol(1), false); -assert.equal(t.isSymbol(Symbol()), true); - -assert.equal(t.isUndefined(null), false); -assert.equal(t.isUndefined(undefined), true); -assert.equal(t.isUndefined(false), false); -assert.equal(t.isUndefined(), true); - -assert.equal(t.isRegExp(null), false); -assert.equal(t.isRegExp('1'), false); -assert.equal(t.isRegExp(new RegExp()), true); - -assert.equal(t.isObject({}), true); -assert.equal(t.isObject([]), true); -assert.equal(t.isObject(new RegExp()), true); -assert.equal(t.isObject(new Date()), true); - -assert.equal(t.isDate(null), false); -assert.equal(t.isDate('1'), false); -assert.equal(t.isDate(new Date()), true); - -assert.equal(t.isError(null), false); -assert.equal(t.isError({ err: true }), false); -assert.equal(t.isError(new Error()), true); - -assert.equal(t.isFunction(null), false); -assert.equal(t.isFunction({ }), false); -assert.equal(t.isFunction(function() {}), true); - -assert.equal(t.isPrimitive(null), true); -assert.equal(t.isPrimitive(''), true); -assert.equal(t.isPrimitive(0), true); -assert.equal(t.isPrimitive(new Date()), false); - -assert.equal(t.isBuffer(null), false); -assert.equal(t.isBuffer({}), false); -assert.equal(t.isBuffer(new Buffer(0)), true); diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/.travis.yml b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/.travis.yml deleted file mode 100644 index cc4dba29d959a2..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/.travis.yml +++ /dev/null @@ -1,4 +0,0 @@ -language: node_js -node_js: - - "0.8" - - "0.10" diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/Makefile b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/Makefile deleted file mode 100644 index 0ecc29c402c243..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/Makefile +++ /dev/null @@ -1,5 +0,0 @@ - -test: - @node_modules/.bin/tape test.js - -.PHONY: test diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/README.md b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/README.md deleted file mode 100644 index 16d2c59c6195f9..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/README.md +++ /dev/null @@ -1,60 +0,0 @@ - -# isarray - -`Array#isArray` for older browsers. - -[![build status](https://secure.travis-ci.org/juliangruber/isarray.svg)](http://travis-ci.org/juliangruber/isarray) -[![downloads](https://img.shields.io/npm/dm/isarray.svg)](https://www.npmjs.org/package/isarray) - -[![browser support](https://ci.testling.com/juliangruber/isarray.png) -](https://ci.testling.com/juliangruber/isarray) - -## Usage - -```js -var isArray = require('isarray'); - -console.log(isArray([])); // => true -console.log(isArray({})); // => false -``` - -## Installation - -With [npm](http://npmjs.org) do - -```bash -$ npm install isarray -``` - -Then bundle for the browser with -[browserify](https://github.com/substack/browserify). - -With [component](http://component.io) do - -```bash -$ component install juliangruber/isarray -``` - -## License - -(MIT) - -Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/component.json b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/component.json deleted file mode 100644 index 9e31b683889015..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/component.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "name" : "isarray", - "description" : "Array#isArray for older browsers", - "version" : "0.0.1", - "repository" : "juliangruber/isarray", - "homepage": "https://github.com/juliangruber/isarray", - "main" : "index.js", - "scripts" : [ - "index.js" - ], - "dependencies" : {}, - "keywords": ["browser","isarray","array"], - "author": { - "name": "Julian Gruber", - "email": "mail@juliangruber.com", - "url": "http://juliangruber.com" - }, - "license": "MIT" -} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/index.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/index.js deleted file mode 100644 index a57f63495943a0..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/index.js +++ /dev/null @@ -1,5 +0,0 @@ -var toString = {}.toString; - -module.exports = Array.isArray || function (arr) { - return toString.call(arr) == '[object Array]'; -}; diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/package.json b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/package.json deleted file mode 100644 index d3a2bda3c6583e..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/package.json +++ /dev/null @@ -1,116 +0,0 @@ -{ - "_args": [ - [ - { - "raw": "isarray@~1.0.0", - "scope": null, - "escapedName": "isarray", - "name": "isarray", - "rawSpec": "~1.0.0", - "spec": ">=1.0.0 <1.1.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream" - ], - [ - { - "raw": "isarray@~1.0.0", - "scope": null, - "escapedName": "isarray", - "name": "isarray", - "rawSpec": "~1.0.0", - "spec": ">=1.0.0 <1.1.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream" - ] - ], - "_from": "isarray@~1.0.0", - "_id": "isarray@1.0.0", - "_inCache": true, - "_location": "/mississippi/through2/readable-stream/isarray", - "_nodeVersion": "5.1.0", - "_npmUser": { - "name": "juliangruber", - "email": "julian@juliangruber.com" - }, - "_npmVersion": "3.3.12", - "_phantomChildren": {}, - "_requested": { - "raw": "isarray@~1.0.0", - "scope": null, - "escapedName": "isarray", - "name": "isarray", - "rawSpec": "~1.0.0", - "spec": ">=1.0.0 <1.1.0", - "type": "range" - }, - "_requiredBy": [ - "/mississippi/through2/readable-stream" - ], - "_resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "_shasum": "bb935d48582cba168c06834957a54a3e07124f11", - "_shrinkwrap": null, - "_spec": "isarray@~1.0.0", - "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream", - "author": { - "name": "Julian Gruber", - "email": "mail@juliangruber.com", - "url": "http://juliangruber.com" - }, - "bugs": { - "url": "https://github.com/juliangruber/isarray/issues" - }, - "dependencies": {}, - "description": "Array#isArray for older browsers", - "devDependencies": { - "tape": "~2.13.4" - }, - "directories": {}, - "dist": { - "shasum": "bb935d48582cba168c06834957a54a3e07124f11", - "tarball": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" - }, - "gitHead": "2a23a281f369e9ae06394c0fb4d2381355a6ba33", - "homepage": "https://github.com/juliangruber/isarray", - "keywords": [ - "browser", - "isarray", - "array" - ], - "license": "MIT", - "main": "index.js", - "maintainers": [ - { - "name": "juliangruber", - "email": "julian@juliangruber.com" - } - ], - "name": "isarray", - "optionalDependencies": {}, - "readme": "ERROR: No README data found!", - "repository": { - "type": "git", - "url": "git://github.com/juliangruber/isarray.git" - }, - "scripts": { - "test": "tape test.js" - }, - "testling": { - "files": "test.js", - "browsers": [ - "ie/8..latest", - "firefox/17..latest", - "firefox/nightly", - "chrome/22..latest", - "chrome/canary", - "opera/12..latest", - "opera/next", - "safari/5.1..latest", - "ipad/6.0..latest", - "iphone/6.0..latest", - "android-browser/4.2..latest" - ] - }, - "version": "1.0.0" -} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/test.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/test.js deleted file mode 100644 index f7f7bcd19fec56..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/test.js +++ /dev/null @@ -1,19 +0,0 @@ -var isArray = require('./'); -var test = require('tape'); - -test('is array', function(t){ - t.ok(isArray([])); - t.notOk(isArray({})); - t.notOk(isArray(null)); - t.notOk(isArray(false)); - - var obj = {}; - obj[0] = true; - t.notOk(isArray(obj)); - - var arr = []; - arr.foo = 'bar'; - t.ok(isArray(arr)); - - t.end(); -}); diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/.travis.yml b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/.travis.yml deleted file mode 100644 index 36201b10017a5e..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/.travis.yml +++ /dev/null @@ -1,12 +0,0 @@ -language: node_js -node_js: - - "0.8" - - "0.10" - - "0.11" - - "0.12" - - "1.7.1" - - 1 - - 2 - - 3 - - 4 - - 5 diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/index.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/index.js deleted file mode 100644 index a4f40f845faa65..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/index.js +++ /dev/null @@ -1,43 +0,0 @@ -'use strict'; - -if (!process.version || - process.version.indexOf('v0.') === 0 || - process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) { - module.exports = nextTick; -} else { - module.exports = process.nextTick; -} - -function nextTick(fn, arg1, arg2, arg3) { - if (typeof fn !== 'function') { - throw new TypeError('"callback" argument must be a function'); - } - var len = arguments.length; - var args, i; - switch (len) { - case 0: - case 1: - return process.nextTick(fn); - case 2: - return process.nextTick(function afterTickOne() { - fn.call(null, arg1); - }); - case 3: - return process.nextTick(function afterTickTwo() { - fn.call(null, arg1, arg2); - }); - case 4: - return process.nextTick(function afterTickThree() { - fn.call(null, arg1, arg2, arg3); - }); - default: - args = new Array(len - 1); - i = 0; - while (i < args.length) { - args[i++] = arguments[i]; - } - return process.nextTick(function afterTick() { - fn.apply(null, args); - }); - } -} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/license.md b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/license.md deleted file mode 100644 index c67e3532b54245..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/license.md +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (c) 2015 Calvin Metcalf - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -**THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE.** diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/package.json b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/package.json deleted file mode 100644 index f5d106537ad8dc..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/package.json +++ /dev/null @@ -1,95 +0,0 @@ -{ - "_args": [ - [ - { - "raw": "process-nextick-args@~1.0.6", - "scope": null, - "escapedName": "process-nextick-args", - "name": "process-nextick-args", - "rawSpec": "~1.0.6", - "spec": ">=1.0.6 <1.1.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream" - ], - [ - { - "raw": "process-nextick-args@~1.0.6", - "scope": null, - "escapedName": "process-nextick-args", - "name": "process-nextick-args", - "rawSpec": "~1.0.6", - "spec": ">=1.0.6 <1.1.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream" - ] - ], - "_from": "process-nextick-args@~1.0.6", - "_id": "process-nextick-args@1.0.7", - "_inCache": true, - "_location": "/mississippi/through2/readable-stream/process-nextick-args", - "_nodeVersion": "5.11.0", - "_npmOperationalInternal": { - "host": "packages-12-west.internal.npmjs.com", - "tmp": "tmp/process-nextick-args-1.0.7.tgz_1462394251778_0.36989671061746776" - }, - "_npmUser": { - "name": "cwmma", - "email": "calvin.metcalf@gmail.com" - }, - "_npmVersion": "3.8.6", - "_phantomChildren": {}, - "_requested": { - "raw": "process-nextick-args@~1.0.6", - "scope": null, - "escapedName": "process-nextick-args", - "name": "process-nextick-args", - "rawSpec": "~1.0.6", - "spec": ">=1.0.6 <1.1.0", - "type": "range" - }, - "_requiredBy": [ - "/mississippi/through2/readable-stream" - ], - "_resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz", - "_shasum": "150e20b756590ad3f91093f25a4f2ad8bff30ba3", - "_shrinkwrap": null, - "_spec": "process-nextick-args@~1.0.6", - "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream", - "author": "", - "bugs": { - "url": "https://github.com/calvinmetcalf/process-nextick-args/issues" - }, - "dependencies": {}, - "description": "process.nextTick but always with args", - "devDependencies": { - "tap": "~0.2.6" - }, - "directories": {}, - "dist": { - "shasum": "150e20b756590ad3f91093f25a4f2ad8bff30ba3", - "tarball": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz" - }, - "gitHead": "5c00899ab01dd32f93ad4b5743da33da91404f39", - "homepage": "https://github.com/calvinmetcalf/process-nextick-args", - "license": "MIT", - "main": "index.js", - "maintainers": [ - { - "name": "cwmma", - "email": "calvin.metcalf@gmail.com" - } - ], - "name": "process-nextick-args", - "optionalDependencies": {}, - "readme": "ERROR: No README data found!", - "repository": { - "type": "git", - "url": "git+https://github.com/calvinmetcalf/process-nextick-args.git" - }, - "scripts": { - "test": "node test.js" - }, - "version": "1.0.7" -} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/readme.md b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/readme.md deleted file mode 100644 index 78e7cfaeb7acde..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/readme.md +++ /dev/null @@ -1,18 +0,0 @@ -process-nextick-args -===== - -[![Build Status](https://travis-ci.org/calvinmetcalf/process-nextick-args.svg?branch=master)](https://travis-ci.org/calvinmetcalf/process-nextick-args) - -```bash -npm install --save process-nextick-args -``` - -Always be able to pass arguments to process.nextTick, no matter the platform - -```js -var nextTick = require('process-nextick-args'); - -nextTick(function (a, b, c) { - console.log(a, b, c); -}, 'step', 3, 'profit'); -``` diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/test.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/test.js deleted file mode 100644 index ef15721584ac99..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/test.js +++ /dev/null @@ -1,24 +0,0 @@ -var test = require("tap").test; -var nextTick = require('./'); - -test('should work', function (t) { - t.plan(5); - nextTick(function (a) { - t.ok(a); - nextTick(function (thing) { - t.equals(thing, 7); - }, 7); - }, true); - nextTick(function (a, b, c) { - t.equals(a, 'step'); - t.equals(b, 3); - t.equals(c, 'profit'); - }, 'step', 3, 'profit'); -}); - -test('correct number of arguments', function (t) { - t.plan(1); - nextTick(function () { - t.equals(2, arguments.length, 'correct number'); - }, 1, 2); -}); diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/.npmignore b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/.npmignore deleted file mode 100644 index 206320cc1d21b9..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/.npmignore +++ /dev/null @@ -1,2 +0,0 @@ -build -test diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/LICENSE b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/LICENSE deleted file mode 100644 index 6de584a48f5c89..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -Copyright Joyent, Inc. and other Node contributors. - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to permit -persons to whom the Software is furnished to do so, subject to the -following conditions: - -The above copyright notice and this permission notice shall be included -in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/README.md b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/README.md deleted file mode 100644 index 4d2aa001501107..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/README.md +++ /dev/null @@ -1,7 +0,0 @@ -**string_decoder.js** (`require('string_decoder')`) from Node.js core - -Copyright Joyent, Inc. and other Node contributors. See LICENCE file for details. - -Version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. **Prefer the stable version over the unstable.** - -The *build/* directory contains a build script that will scrape the source from the [joyent/node](https://github.com/joyent/node) repo given a specific Node version. \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/index.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/index.js deleted file mode 100644 index b00e54fb790982..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/index.js +++ /dev/null @@ -1,221 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -var Buffer = require('buffer').Buffer; - -var isBufferEncoding = Buffer.isEncoding - || function(encoding) { - switch (encoding && encoding.toLowerCase()) { - case 'hex': case 'utf8': case 'utf-8': case 'ascii': case 'binary': case 'base64': case 'ucs2': case 'ucs-2': case 'utf16le': case 'utf-16le': case 'raw': return true; - default: return false; - } - } - - -function assertEncoding(encoding) { - if (encoding && !isBufferEncoding(encoding)) { - throw new Error('Unknown encoding: ' + encoding); - } -} - -// StringDecoder provides an interface for efficiently splitting a series of -// buffers into a series of JS strings without breaking apart multi-byte -// characters. CESU-8 is handled as part of the UTF-8 encoding. -// -// @TODO Handling all encodings inside a single object makes it very difficult -// to reason about this code, so it should be split up in the future. -// @TODO There should be a utf8-strict encoding that rejects invalid UTF-8 code -// points as used by CESU-8. -var StringDecoder = exports.StringDecoder = function(encoding) { - this.encoding = (encoding || 'utf8').toLowerCase().replace(/[-_]/, ''); - assertEncoding(encoding); - switch (this.encoding) { - case 'utf8': - // CESU-8 represents each of Surrogate Pair by 3-bytes - this.surrogateSize = 3; - break; - case 'ucs2': - case 'utf16le': - // UTF-16 represents each of Surrogate Pair by 2-bytes - this.surrogateSize = 2; - this.detectIncompleteChar = utf16DetectIncompleteChar; - break; - case 'base64': - // Base-64 stores 3 bytes in 4 chars, and pads the remainder. - this.surrogateSize = 3; - this.detectIncompleteChar = base64DetectIncompleteChar; - break; - default: - this.write = passThroughWrite; - return; - } - - // Enough space to store all bytes of a single character. UTF-8 needs 4 - // bytes, but CESU-8 may require up to 6 (3 bytes per surrogate). - this.charBuffer = new Buffer(6); - // Number of bytes received for the current incomplete multi-byte character. - this.charReceived = 0; - // Number of bytes expected for the current incomplete multi-byte character. - this.charLength = 0; -}; - - -// write decodes the given buffer and returns it as JS string that is -// guaranteed to not contain any partial multi-byte characters. Any partial -// character found at the end of the buffer is buffered up, and will be -// returned when calling write again with the remaining bytes. -// -// Note: Converting a Buffer containing an orphan surrogate to a String -// currently works, but converting a String to a Buffer (via `new Buffer`, or -// Buffer#write) will replace incomplete surrogates with the unicode -// replacement character. See https://codereview.chromium.org/121173009/ . -StringDecoder.prototype.write = function(buffer) { - var charStr = ''; - // if our last write ended with an incomplete multibyte character - while (this.charLength) { - // determine how many remaining bytes this buffer has to offer for this char - var available = (buffer.length >= this.charLength - this.charReceived) ? - this.charLength - this.charReceived : - buffer.length; - - // add the new bytes to the char buffer - buffer.copy(this.charBuffer, this.charReceived, 0, available); - this.charReceived += available; - - if (this.charReceived < this.charLength) { - // still not enough chars in this buffer? wait for more ... - return ''; - } - - // remove bytes belonging to the current character from the buffer - buffer = buffer.slice(available, buffer.length); - - // get the character that was split - charStr = this.charBuffer.slice(0, this.charLength).toString(this.encoding); - - // CESU-8: lead surrogate (D800-DBFF) is also the incomplete character - var charCode = charStr.charCodeAt(charStr.length - 1); - if (charCode >= 0xD800 && charCode <= 0xDBFF) { - this.charLength += this.surrogateSize; - charStr = ''; - continue; - } - this.charReceived = this.charLength = 0; - - // if there are no more bytes in this buffer, just emit our char - if (buffer.length === 0) { - return charStr; - } - break; - } - - // determine and set charLength / charReceived - this.detectIncompleteChar(buffer); - - var end = buffer.length; - if (this.charLength) { - // buffer the incomplete character bytes we got - buffer.copy(this.charBuffer, 0, buffer.length - this.charReceived, end); - end -= this.charReceived; - } - - charStr += buffer.toString(this.encoding, 0, end); - - var end = charStr.length - 1; - var charCode = charStr.charCodeAt(end); - // CESU-8: lead surrogate (D800-DBFF) is also the incomplete character - if (charCode >= 0xD800 && charCode <= 0xDBFF) { - var size = this.surrogateSize; - this.charLength += size; - this.charReceived += size; - this.charBuffer.copy(this.charBuffer, size, 0, size); - buffer.copy(this.charBuffer, 0, 0, size); - return charStr.substring(0, end); - } - - // or just emit the charStr - return charStr; -}; - -// detectIncompleteChar determines if there is an incomplete UTF-8 character at -// the end of the given buffer. If so, it sets this.charLength to the byte -// length that character, and sets this.charReceived to the number of bytes -// that are available for this character. -StringDecoder.prototype.detectIncompleteChar = function(buffer) { - // determine how many bytes we have to check at the end of this buffer - var i = (buffer.length >= 3) ? 3 : buffer.length; - - // Figure out if one of the last i bytes of our buffer announces an - // incomplete char. - for (; i > 0; i--) { - var c = buffer[buffer.length - i]; - - // See http://en.wikipedia.org/wiki/UTF-8#Description - - // 110XXXXX - if (i == 1 && c >> 5 == 0x06) { - this.charLength = 2; - break; - } - - // 1110XXXX - if (i <= 2 && c >> 4 == 0x0E) { - this.charLength = 3; - break; - } - - // 11110XXX - if (i <= 3 && c >> 3 == 0x1E) { - this.charLength = 4; - break; - } - } - this.charReceived = i; -}; - -StringDecoder.prototype.end = function(buffer) { - var res = ''; - if (buffer && buffer.length) - res = this.write(buffer); - - if (this.charReceived) { - var cr = this.charReceived; - var buf = this.charBuffer; - var enc = this.encoding; - res += buf.slice(0, cr).toString(enc); - } - - return res; -}; - -function passThroughWrite(buffer) { - return buffer.toString(this.encoding); -} - -function utf16DetectIncompleteChar(buffer) { - this.charReceived = buffer.length % 2; - this.charLength = this.charReceived ? 2 : 0; -} - -function base64DetectIncompleteChar(buffer) { - this.charReceived = buffer.length % 3; - this.charLength = this.charReceived ? 3 : 0; -} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/package.json b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/package.json deleted file mode 100644 index 36fa27f82b498b..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/package.json +++ /dev/null @@ -1,99 +0,0 @@ -{ - "_args": [ - [ - { - "raw": "string_decoder@~0.10.x", - "scope": null, - "escapedName": "string_decoder", - "name": "string_decoder", - "rawSpec": "~0.10.x", - "spec": ">=0.10.0 <0.11.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream" - ], - [ - { - "raw": "string_decoder@~0.10.x", - "scope": null, - "escapedName": "string_decoder", - "name": "string_decoder", - "rawSpec": "~0.10.x", - "spec": ">=0.10.0 <0.11.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream" - ] - ], - "_from": "string_decoder@~0.10.x", - "_id": "string_decoder@0.10.31", - "_inCache": true, - "_location": "/mississippi/through2/readable-stream/string_decoder", - "_npmUser": { - "name": "rvagg", - "email": "rod@vagg.org" - }, - "_npmVersion": "1.4.23", - "_phantomChildren": {}, - "_requested": { - "raw": "string_decoder@~0.10.x", - "scope": null, - "escapedName": "string_decoder", - "name": "string_decoder", - "rawSpec": "~0.10.x", - "spec": ">=0.10.0 <0.11.0", - "type": "range" - }, - "_requiredBy": [ - "/mississippi/through2/readable-stream" - ], - "_resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", - "_shasum": "62e203bc41766c6c28c9fc84301dab1c5310fa94", - "_shrinkwrap": null, - "_spec": "string_decoder@~0.10.x", - "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream", - "bugs": { - "url": "https://github.com/rvagg/string_decoder/issues" - }, - "dependencies": {}, - "description": "The string_decoder module from Node core", - "devDependencies": { - "tap": "~0.4.8" - }, - "directories": {}, - "dist": { - "shasum": "62e203bc41766c6c28c9fc84301dab1c5310fa94", - "tarball": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" - }, - "gitHead": "d46d4fd87cf1d06e031c23f1ba170ca7d4ade9a0", - "homepage": "https://github.com/rvagg/string_decoder", - "keywords": [ - "string", - "decoder", - "browser", - "browserify" - ], - "license": "MIT", - "main": "index.js", - "maintainers": [ - { - "name": "substack", - "email": "mail@substack.net" - }, - { - "name": "rvagg", - "email": "rod@vagg.org" - } - ], - "name": "string_decoder", - "optionalDependencies": {}, - "readme": "ERROR: No README data found!", - "repository": { - "type": "git", - "url": "git://github.com/rvagg/string_decoder.git" - }, - "scripts": { - "test": "tap test/simple/*.js" - }, - "version": "0.10.31" -} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/History.md b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/History.md deleted file mode 100644 index acc8675372e980..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/History.md +++ /dev/null @@ -1,16 +0,0 @@ - -1.0.2 / 2015-10-07 -================== - - * use try/catch when checking `localStorage` (#3, @kumavis) - -1.0.1 / 2014-11-25 -================== - - * browser: use `console.warn()` for deprecation calls - * browser: more jsdocs - -1.0.0 / 2014-04-30 -================== - - * initial commit diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/LICENSE b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/LICENSE deleted file mode 100644 index 6a60e8c225c9ba..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/LICENSE +++ /dev/null @@ -1,24 +0,0 @@ -(The MIT License) - -Copyright (c) 2014 Nathan Rajlich - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/README.md b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/README.md deleted file mode 100644 index 75622fa7c250a6..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/README.md +++ /dev/null @@ -1,53 +0,0 @@ -util-deprecate -============== -### The Node.js `util.deprecate()` function with browser support - -In Node.js, this module simply re-exports the `util.deprecate()` function. - -In the web browser (i.e. via browserify), a browser-specific implementation -of the `util.deprecate()` function is used. - - -## API - -A `deprecate()` function is the only thing exposed by this module. - -``` javascript -// setup: -exports.foo = deprecate(foo, 'foo() is deprecated, use bar() instead'); - - -// users see: -foo(); -// foo() is deprecated, use bar() instead -foo(); -foo(); -``` - - -## License - -(The MIT License) - -Copyright (c) 2014 Nathan Rajlich - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/browser.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/browser.js deleted file mode 100644 index 549ae2f065ea5a..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/browser.js +++ /dev/null @@ -1,67 +0,0 @@ - -/** - * Module exports. - */ - -module.exports = deprecate; - -/** - * Mark that a method should not be used. - * Returns a modified function which warns once by default. - * - * If `localStorage.noDeprecation = true` is set, then it is a no-op. - * - * If `localStorage.throwDeprecation = true` is set, then deprecated functions - * will throw an Error when invoked. - * - * If `localStorage.traceDeprecation = true` is set, then deprecated functions - * will invoke `console.trace()` instead of `console.error()`. - * - * @param {Function} fn - the function to deprecate - * @param {String} msg - the string to print to the console when `fn` is invoked - * @returns {Function} a new "deprecated" version of `fn` - * @api public - */ - -function deprecate (fn, msg) { - if (config('noDeprecation')) { - return fn; - } - - var warned = false; - function deprecated() { - if (!warned) { - if (config('throwDeprecation')) { - throw new Error(msg); - } else if (config('traceDeprecation')) { - console.trace(msg); - } else { - console.warn(msg); - } - warned = true; - } - return fn.apply(this, arguments); - } - - return deprecated; -} - -/** - * Checks `localStorage` for boolean values for the given `name`. - * - * @param {String} name - * @returns {Boolean} - * @api private - */ - -function config (name) { - // accessing global.localStorage can trigger a DOMException in sandboxed iframes - try { - if (!global.localStorage) return false; - } catch (_) { - return false; - } - var val = global.localStorage[name]; - if (null == val) return false; - return String(val).toLowerCase() === 'true'; -} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/node.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/node.js deleted file mode 100644 index 5e6fcff5ddd3fb..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/node.js +++ /dev/null @@ -1,6 +0,0 @@ - -/** - * For Node.js, simply re-export the core `util.deprecate` function. - */ - -module.exports = require('util').deprecate; diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/package.json b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/package.json deleted file mode 100644 index 44061da89bcdfb..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/package.json +++ /dev/null @@ -1,101 +0,0 @@ -{ - "_args": [ - [ - { - "raw": "util-deprecate@~1.0.1", - "scope": null, - "escapedName": "util-deprecate", - "name": "util-deprecate", - "rawSpec": "~1.0.1", - "spec": ">=1.0.1 <1.1.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream" - ], - [ - { - "raw": "util-deprecate@~1.0.1", - "scope": null, - "escapedName": "util-deprecate", - "name": "util-deprecate", - "rawSpec": "~1.0.1", - "spec": ">=1.0.1 <1.1.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream" - ] - ], - "_from": "util-deprecate@~1.0.1", - "_id": "util-deprecate@1.0.2", - "_inCache": true, - "_location": "/mississippi/through2/readable-stream/util-deprecate", - "_nodeVersion": "4.1.2", - "_npmUser": { - "name": "tootallnate", - "email": "nathan@tootallnate.net" - }, - "_npmVersion": "2.14.4", - "_phantomChildren": {}, - "_requested": { - "raw": "util-deprecate@~1.0.1", - "scope": null, - "escapedName": "util-deprecate", - "name": "util-deprecate", - "rawSpec": "~1.0.1", - "spec": ">=1.0.1 <1.1.0", - "type": "range" - }, - "_requiredBy": [ - "/mississippi/through2/readable-stream" - ], - "_resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "_shasum": "450d4dc9fa70de732762fbd2d4a28981419a0ccf", - "_shrinkwrap": null, - "_spec": "util-deprecate@~1.0.1", - "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream", - "author": { - "name": "Nathan Rajlich", - "email": "nathan@tootallnate.net", - "url": "http://n8.io/" - }, - "browser": "browser.js", - "bugs": { - "url": "https://github.com/TooTallNate/util-deprecate/issues" - }, - "dependencies": {}, - "description": "The Node.js `util.deprecate()` function with browser support", - "devDependencies": {}, - "directories": {}, - "dist": { - "shasum": "450d4dc9fa70de732762fbd2d4a28981419a0ccf", - "tarball": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" - }, - "gitHead": "475fb6857cd23fafff20c1be846c1350abf8e6d4", - "homepage": "https://github.com/TooTallNate/util-deprecate", - "keywords": [ - "util", - "deprecate", - "browserify", - "browser", - "node" - ], - "license": "MIT", - "main": "node.js", - "maintainers": [ - { - "name": "tootallnate", - "email": "nathan@tootallnate.net" - } - ], - "name": "util-deprecate", - "optionalDependencies": {}, - "readme": "ERROR: No README data found!", - "repository": { - "type": "git", - "url": "git://github.com/TooTallNate/util-deprecate.git" - }, - "scripts": { - "test": "echo \"Error: no test specified\" && exit 1" - }, - "version": "1.0.2" -} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/package.json b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/package.json deleted file mode 100644 index ae39f5f97d50ee..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/package.json +++ /dev/null @@ -1,125 +0,0 @@ -{ - "_args": [ - [ - { - "raw": "readable-stream@~2.0.0", - "scope": null, - "escapedName": "readable-stream", - "name": "readable-stream", - "rawSpec": "~2.0.0", - "spec": ">=2.0.0 <2.1.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream" - ], - [ - { - "raw": "readable-stream@~2.0.0", - "scope": null, - "escapedName": "readable-stream", - "name": "readable-stream", - "rawSpec": "~2.0.0", - "spec": ">=2.0.0 <2.1.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2" - ] - ], - "_from": "readable-stream@~2.0.0", - "_id": "readable-stream@2.0.6", - "_inCache": true, - "_location": "/mississippi/through2/readable-stream", - "_nodeVersion": "5.7.0", - "_npmOperationalInternal": { - "host": "packages-12-west.internal.npmjs.com", - "tmp": "tmp/readable-stream-2.0.6.tgz_1457893507709_0.369257491780445" - }, - "_npmUser": { - "name": "cwmma", - "email": "calvin.metcalf@gmail.com" - }, - "_npmVersion": "3.6.0", - "_phantomChildren": {}, - "_requested": { - "raw": "readable-stream@~2.0.0", - "scope": null, - "escapedName": "readable-stream", - "name": "readable-stream", - "rawSpec": "~2.0.0", - "spec": ">=2.0.0 <2.1.0", - "type": "range" - }, - "_requiredBy": [ - "/mississippi/through2" - ], - "_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.0.6.tgz", - "_shasum": "8f90341e68a53ccc928788dacfcd11b36eb9b78e", - "_shrinkwrap": null, - "_spec": "readable-stream@~2.0.0", - "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2", - "browser": { - "util": false - }, - "bugs": { - "url": "https://github.com/nodejs/readable-stream/issues" - }, - "dependencies": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.1", - "isarray": "~1.0.0", - "process-nextick-args": "~1.0.6", - "string_decoder": "~0.10.x", - "util-deprecate": "~1.0.1" - }, - "description": "Streams3, a user-land copy of the stream library from Node.js", - "devDependencies": { - "tap": "~0.2.6", - "tape": "~4.5.1", - "zuul": "~3.9.0" - }, - "directories": {}, - "dist": { - "shasum": "8f90341e68a53ccc928788dacfcd11b36eb9b78e", - "tarball": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.0.6.tgz" - }, - "gitHead": "01fb5608a970b42c900b96746cadc13d27dd9d7e", - "homepage": "https://github.com/nodejs/readable-stream#readme", - "keywords": [ - "readable", - "stream", - "pipe" - ], - "license": "MIT", - "main": "readable.js", - "maintainers": [ - { - "name": "isaacs", - "email": "isaacs@npmjs.com" - }, - { - "name": "tootallnate", - "email": "nathan@tootallnate.net" - }, - { - "name": "rvagg", - "email": "rod@vagg.org" - }, - { - "name": "cwmma", - "email": "calvin.metcalf@gmail.com" - } - ], - "name": "readable-stream", - "optionalDependencies": {}, - "readme": "ERROR: No README data found!", - "repository": { - "type": "git", - "url": "git://github.com/nodejs/readable-stream.git" - }, - "scripts": { - "browser": "npm run write-zuul && zuul -- test/browser.js", - "test": "tap test/parallel/*.js test/ours/*.js", - "write-zuul": "printf \"ui: tape\nbrowsers:\n - name: $BROWSER_NAME\n version: $BROWSER_VERSION\n\">.zuul.yml" - }, - "version": "2.0.6" -} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/passthrough.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/passthrough.js deleted file mode 100644 index 27e8d8a55165f9..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/passthrough.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./lib/_stream_passthrough.js") diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/readable.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/readable.js deleted file mode 100644 index 6222a579864dd2..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/readable.js +++ /dev/null @@ -1,12 +0,0 @@ -var Stream = (function (){ - try { - return require('st' + 'ream'); // hack to fix a circular dependency issue when used with browserify - } catch(_){} -}()); -exports = module.exports = require('./lib/_stream_readable.js'); -exports.Stream = Stream || exports; -exports.Readable = exports; -exports.Writable = require('./lib/_stream_writable.js'); -exports.Duplex = require('./lib/_stream_duplex.js'); -exports.Transform = require('./lib/_stream_transform.js'); -exports.PassThrough = require('./lib/_stream_passthrough.js'); diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/transform.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/transform.js deleted file mode 100644 index 5d482f0780e993..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/transform.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./lib/_stream_transform.js") diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/writable.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/writable.js deleted file mode 100644 index e1e9efdf3c12e9..00000000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/writable.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./lib/_stream_writable.js") diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/package.json b/deps/npm/node_modules/mississippi/node_modules/through2/package.json index c7413b9f91c5df..a60bc8b192d22e 100644 --- a/deps/npm/node_modules/mississippi/node_modules/through2/package.json +++ b/deps/npm/node_modules/mississippi/node_modules/through2/package.json @@ -14,22 +14,20 @@ ] ], "_from": "through2@>=2.0.0 <3.0.0", - "_id": "through2@2.0.1", + "_id": "through2@2.0.3", "_inCache": true, "_location": "/mississippi/through2", - "_nodeVersion": "5.5.0", + "_nodeVersion": "7.2.0", "_npmOperationalInternal": { - "host": "packages-6-west.internal.npmjs.com", - "tmp": "tmp/through2-2.0.1.tgz_1454928418348_0.7339043114334345" + "host": "packages-18-east.internal.npmjs.com", + "tmp": "tmp/through2-2.0.3.tgz_1480373529377_0.264686161885038" }, "_npmUser": { "name": "rvagg", "email": "rod@vagg.org" }, - "_npmVersion": "3.6.0", - "_phantomChildren": { - "inherits": "2.0.3" - }, + "_npmVersion": "3.10.9", + "_phantomChildren": {}, "_requested": { "raw": "through2@^2.0.0", "scope": null, @@ -42,8 +40,8 @@ "_requiredBy": [ "/mississippi" ], - "_resolved": "https://registry.npmjs.org/through2/-/through2-2.0.1.tgz", - "_shasum": "384e75314d49f32de12eebb8136b8eb6b5d59da9", + "_resolved": "https://registry.npmjs.org/through2/-/through2-2.0.3.tgz", + "_shasum": "0004569b37c7c74ba39c43f3ced78d1ad94140be", "_shrinkwrap": null, "_spec": "through2@^2.0.0", "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi", @@ -56,22 +54,22 @@ "url": "https://github.com/rvagg/through2/issues" }, "dependencies": { - "readable-stream": "~2.0.0", - "xtend": "~4.0.0" + "readable-stream": "^2.1.5", + "xtend": "~4.0.1" }, "description": "A tiny wrapper around Node streams2 Transform to avoid explicit subclassing noise", "devDependencies": { - "bl": "~0.9.4", + "bl": "~1.1.2", "faucet": "0.0.1", "stream-spigot": "~3.0.5", - "tape": "~4.0.0" + "tape": "~4.6.2" }, "directories": {}, "dist": { - "shasum": "384e75314d49f32de12eebb8136b8eb6b5d59da9", - "tarball": "https://registry.npmjs.org/through2/-/through2-2.0.1.tgz" + "shasum": "0004569b37c7c74ba39c43f3ced78d1ad94140be", + "tarball": "https://registry.npmjs.org/through2/-/through2-2.0.3.tgz" }, - "gitHead": "6d52a1b77db13a741f2708cd5854a198e4ae3072", + "gitHead": "4383b10b2cb6a32ae215760715b317513abe609f", "homepage": "https://github.com/rvagg/through2#readme", "keywords": [ "stream", @@ -102,5 +100,5 @@ "test": "node test/test.js | faucet", "test-local": "brtapsauce-local test/basic-test.js" }, - "version": "2.0.1" + "version": "2.0.3" } diff --git a/deps/npm/node_modules/mississippi/package.json b/deps/npm/node_modules/mississippi/package.json index 3595472180efc4..503a931104a573 100644 --- a/deps/npm/node_modules/mississippi/package.json +++ b/deps/npm/node_modules/mississippi/package.json @@ -2,50 +2,54 @@ "_args": [ [ { - "raw": "mississippi@~1.2.0", + "raw": "mississippi@1.3.0", "scope": null, "escapedName": "mississippi", "name": "mississippi", - "rawSpec": "~1.2.0", - "spec": ">=1.2.0 <1.3.0", - "type": "range" + "rawSpec": "1.3.0", + "spec": "1.3.0", + "type": "version" }, "/Users/zkat/Documents/code/npm" ] ], - "_from": "mississippi@>=1.2.0 <1.3.0", - "_id": "mississippi@1.2.0", + "_from": "mississippi@1.3.0", + "_id": "mississippi@1.3.0", "_inCache": true, "_location": "/mississippi", - "_nodeVersion": "4.2.3", + "_nodeVersion": "6.9.2", + "_npmOperationalInternal": { + "host": "packages-18-east.internal.npmjs.com", + "tmp": "tmp/mississippi-1.3.0.tgz_1484780252704_0.3479328122921288" + }, "_npmUser": { "name": "maxogden", "email": "max@maxogden.com" }, - "_npmVersion": "2.14.15", + "_npmVersion": "3.10.9", "_phantomChildren": { "inherits": "2.0.3", "once": "1.4.0", - "readable-stream": "2.1.5", + "readable-stream": "2.2.2", "wrappy": "1.0.2" }, "_requested": { - "raw": "mississippi@~1.2.0", + "raw": "mississippi@1.3.0", "scope": null, "escapedName": "mississippi", "name": "mississippi", - "rawSpec": "~1.2.0", - "spec": ">=1.2.0 <1.3.0", - "type": "range" + "rawSpec": "1.3.0", + "spec": "1.3.0", + "type": "version" }, "_requiredBy": [ "#USER", "/" ], - "_resolved": "https://registry.npmjs.org/mississippi/-/mississippi-1.2.0.tgz", - "_shasum": "cd51bb9bbad3ddb13dee3cf60f1d0929c7a7fa4c", + "_resolved": "https://registry.npmjs.org/mississippi/-/mississippi-1.3.0.tgz", + "_shasum": "d201583eb12327e3c5c1642a404a9cacf94e34f5", "_shrinkwrap": null, - "_spec": "mississippi@~1.2.0", + "_spec": "mississippi@1.3.0", "_where": "/Users/zkat/Documents/code/npm", "author": { "name": "max ogden" @@ -59,6 +63,7 @@ "end-of-stream": "^1.1.0", "flush-write-stream": "^1.0.0", "from2": "^2.1.0", + "parallel-transform": "^1.1.0", "pump": "^1.0.0", "pumpify": "^1.3.3", "stream-each": "^1.1.0", @@ -68,10 +73,10 @@ "devDependencies": {}, "directories": {}, "dist": { - "shasum": "cd51bb9bbad3ddb13dee3cf60f1d0929c7a7fa4c", - "tarball": "https://registry.npmjs.org/mississippi/-/mississippi-1.2.0.tgz" + "shasum": "d201583eb12327e3c5c1642a404a9cacf94e34f5", + "tarball": "https://registry.npmjs.org/mississippi/-/mississippi-1.3.0.tgz" }, - "gitHead": "4aab2a2d4d98fd5e300a329048eb02a12df44c60", + "gitHead": "dd64841b932d06baf7859ee80db78d139c90b4c7", "homepage": "https://github.com/maxogden/mississippi#readme", "license": "BSD-2-Clause", "main": "index.js", @@ -91,5 +96,5 @@ "scripts": { "test": "echo \"Error: no test specified\" && exit 1" }, - "version": "1.2.0" + "version": "1.3.0" } diff --git a/deps/npm/node_modules/mississippi/readme.md b/deps/npm/node_modules/mississippi/readme.md index 9013bb0dc53188..569803865c5e91 100644 --- a/deps/npm/node_modules/mississippi/readme.md +++ b/deps/npm/node_modules/mississippi/readme.md @@ -21,6 +21,7 @@ var miss = require('mississippi') - [to](#to) - [concat](#concat) - [finished](#finished) +- [parallel](#parallel) ### pipe @@ -28,13 +29,13 @@ var miss = require('mississippi') Pipes streams together and destroys all of them if one of them closes. Calls `cb` with `(error)` if there was an error in any of the streams. -When using standard `source.pipe(destination)` the source will _not_ be destroyed if the destination emits close or error. You are also not able to provide a callback to tell when then pipe has finished. +When using standard `source.pipe(destination)` the source will _not_ be destroyed if the destination emits close or error. You are also not able to provide a callback to tell when the pipe has finished. `miss.pipe` does these two things for you, ensuring you handle stream errors 100% of the time (unhandled errors are probably the most common bug in most node streams code) #### original module -`miss.pipe` is provided by [`require('pump')`](https://npmjs.org/pump) +`miss.pipe` is provided by [`require('pump')`](https://www.npmjs.com/package/pump) #### example @@ -56,13 +57,13 @@ miss.pipe(read, write, function (err) { ##### `miss.each(stream, each, [done])` -Iterate the data in `stream` one chunk at a time. Your `each` function will be called with with `(data, next)` where data is a data chunk and next is a callback. Call `next` when you are ready to consume the next chunk. +Iterate the data in `stream` one chunk at a time. Your `each` function will be called with `(data, next)` where data is a data chunk and next is a callback. Call `next` when you are ready to consume the next chunk. Optionally you can call `next` with an error to destroy the stream. You can also pass the optional third argument, `done`, which is a function that will be called with `(err)` when the stream ends. The `err` argument will be populated with an error if the stream emitted an error. #### original module -`miss.each` is provided by [`require('stream-each')`](https://npmjs.org/stream-each) +`miss.each` is provided by [`require('stream-each')`](https://www.npmjs.com/package/stream-each) #### example @@ -97,7 +98,7 @@ If any of the streams in the pipeline emits an error or gets destroyed, or you d #### original module -`miss.pipeline` is provided by [`require('pumpify')`](https://npmjs.org/pumpify) +`miss.pipeline` is provided by [`require('pumpify')`](https://www.npmjs.com/package/pumpify) #### example @@ -136,7 +137,7 @@ You can either choose to supply the writable and the readable at the time you cr #### original module -`miss.duplex` is provided by [`require('duplexify')`](https://npmjs.org/duplexify) +`miss.duplex` is provided by [`require('duplexify')`](https://www.npmjs.com/package/duplexify) #### example @@ -165,7 +166,7 @@ The `flushFunction`, with signature `(cb)`, is called just before the stream is #### original module -`miss.through` is provided by [`require('through2')`](https://npmjs.org/through2) +`miss.through` is provided by [`require('through2')`](https://www.npmjs.com/package/through2) #### example @@ -178,10 +179,10 @@ var write = fs.createWriteStream('./AWESOMECASE.TXT') // Leaving out the options object var uppercaser = miss.through( function (chunk, enc, cb) { - cb(chunk.toString().toUpperCase()) + cb(null, chunk.toString().toUpperCase()) }, function (cb) { - cb('ONE LAST BIT OF UPPERCASE') + cb(null, 'ONE LAST BIT OF UPPERCASE') } ) @@ -206,7 +207,7 @@ Returns a readable stream that calls `read(size, next)` when data is requested f #### original module -`miss.from` is provided by [`require('from2')`](https://npmjs.org/from2) +`miss.from` is provided by [`require('from2')`](https://www.npmjs.com/package/from2) #### example @@ -252,7 +253,7 @@ Returns a writable stream that calls `write(data, enc, cb)` when data is written #### original module -`miss.to` is provided by [`require('flush-write-stream')`](https://npmjs.org/flush-write-stream) +`miss.to` is provided by [`require('flush-write-stream')`](https://www.npmjs.com/package/flush-write-stream) #### example @@ -294,13 +295,13 @@ finished Returns a writable stream that concatenates all data written to the stream and calls a callback with the single result. -Calling `miss.concat(cb)` returns a writable stream. `cb` is called when the writable stream is finished, e.g. when all data is done being written to it. `cb` is called with a single argument, `(data)`, which will containe the result of concatenating all the data written to the stream. +Calling `miss.concat(cb)` returns a writable stream. `cb` is called when the writable stream is finished, e.g. when all data is done being written to it. `cb` is called with a single argument, `(data)`, which will contain the result of concatenating all the data written to the stream. Note that `miss.concat` will not handle stream errors for you. To handle errors, use `miss.pipe` or handle the `error` event manually. #### original module -`miss.concat` is provided by [`require('concat-stream')`](https://npmjs.org/concat-stream) +`miss.concat` is provided by [`require('concat-stream')`](https://www.npmjs.com/package/concat-stream) #### example @@ -335,7 +336,7 @@ This function is useful for simplifying stream handling code as it lets you hand #### original module -`miss.finished` is provided by [`require('end-of-stream')`](https://npmjs.org/end-of-stream) +`miss.finished` is provided by [`require('end-of-stream')`](https://www.npmjs.com/package/end-of-stream) #### example @@ -350,3 +351,44 @@ miss.finished(copyDest, function(err) { console.log('write success') }) ``` + +### parallel + +#####`miss.parallel(concurrency, each)` + +This works like `through` except you can process items in parallel, while still preserving the original input order. + +This is handy if you wanna take advantage of node's async I/O and process streams of items in batches. With this module you can build your very own streaming parallel job queue. + +Note that `miss.parallel` preserves input ordering, if you don't need that then you can use [through2-concurrent](https://github.com/almost/through2-concurrent) instead, which is very similar to this otherwise. + +#### original module + +`miss.parallel` is provided by [`require('parallel-transform')`](https://npmjs.org/parallel-transform) + +#### example + +This example fetches the GET HTTP headers for a stream of input URLs 5 at a time in parallel. + +```js +function getResponse (item, cb) { + var r = request(item.url) + r.on('error', function (err) { + cb(err) + }) + r.on('response', function (re) { + cb(null, {url: item.url, date: new Date(), status: re.statusCode, headers: re.headers}) + r.abort() + }) +} + +miss.pipe( + fs.createReadStream('./urls.txt'), // one url per line + split(), + miss.parallel(5, getResponse), + miss.through(function (row, enc, next) { + console.log(JSON.stringify(row)) + next() + }) +) +``` diff --git a/deps/npm/node_modules/request/node_modules/form-data/README.md b/deps/npm/node_modules/request/node_modules/form-data/README.md new file mode 100644 index 00000000000000..642a9d14a800b7 --- /dev/null +++ b/deps/npm/node_modules/request/node_modules/form-data/README.md @@ -0,0 +1,217 @@ +# Form-Data [![NPM Module](https://img.shields.io/npm/v/form-data.svg)](https://www.npmjs.com/package/form-data) [![Join the chat at https://gitter.im/form-data/form-data](http://form-data.github.io/images/gitterbadge.svg)](https://gitter.im/form-data/form-data) + +A library to create readable ```"multipart/form-data"``` streams. Can be used to submit forms and file uploads to other web applications. + +The API of this library is inspired by the [XMLHttpRequest-2 FormData Interface][xhr2-fd]. + +[xhr2-fd]: http://dev.w3.org/2006/webapi/XMLHttpRequest-2/Overview.html#the-formdata-interface + +[![Linux Build](https://img.shields.io/travis/form-data/form-data/v2.1.2.svg?label=linux:0.12-6.x)](https://travis-ci.org/form-data/form-data) +[![MacOS Build](https://img.shields.io/travis/form-data/form-data/v2.1.2.svg?label=macos:0.12-6.x)](https://travis-ci.org/form-data/form-data) +[![Windows Build](https://img.shields.io/appveyor/ci/alexindigo/form-data/v2.1.2.svg?label=windows:0.12-6.x)](https://ci.appveyor.com/project/alexindigo/form-data) + +[![Coverage Status](https://img.shields.io/coveralls/form-data/form-data/v2.1.2.svg?label=code+coverage)](https://coveralls.io/github/form-data/form-data?branch=master) +[![Dependency Status](https://img.shields.io/david/form-data/form-data.svg)](https://david-dm.org/form-data/form-data) +[![bitHound Overall Score](https://www.bithound.io/github/form-data/form-data/badges/score.svg)](https://www.bithound.io/github/form-data/form-data) + +## Install + +``` +npm install --save form-data +``` + +## Usage + +In this example we are constructing a form with 3 fields that contain a string, +a buffer and a file stream. + +``` javascript +var FormData = require('form-data'); +var fs = require('fs'); + +var form = new FormData(); +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_file', fs.createReadStream('/foo/bar.jpg')); +``` + +Also you can use http-response stream: + +``` javascript +var FormData = require('form-data'); +var http = require('http'); + +var form = new FormData(); + +http.request('http://nodejs.org/images/logo.png', function(response) { + form.append('my_field', 'my value'); + form.append('my_buffer', new Buffer(10)); + form.append('my_logo', response); +}); +``` + +Or @mikeal's [request](https://github.com/request/request) stream: + +``` javascript +var FormData = require('form-data'); +var request = require('request'); + +var form = new FormData(); + +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_logo', request('http://nodejs.org/images/logo.png')); +``` + +In order to submit this form to a web application, call ```submit(url, [callback])``` method: + +``` javascript +form.submit('http://example.org/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +}); + +``` + +For more advanced request manipulations ```submit()``` method returns ```http.ClientRequest``` object, or you can choose from one of the alternative submission methods. + +### Alternative submission methods + +You can use node's http client interface: + +``` javascript +var http = require('http'); + +var request = http.request({ + method: 'post', + host: 'example.org', + path: '/upload', + headers: form.getHeaders() +}); + +form.pipe(request); + +request.on('response', function(res) { + console.log(res.statusCode); +}); +``` + +Or if you would prefer the `'Content-Length'` header to be set for you: + +``` javascript +form.submit('example.org/upload', function(err, res) { + console.log(res.statusCode); +}); +``` + +To use custom headers and pre-known length in parts: + +``` javascript +var CRLF = '\r\n'; +var form = new FormData(); + +var options = { + header: CRLF + '--' + form.getBoundary() + CRLF + 'X-Custom-Header: 123' + CRLF + CRLF, + knownLength: 1 +}; + +form.append('my_buffer', buffer, options); + +form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); +}); +``` + +Form-Data can recognize and fetch all the required information from common types of streams (```fs.readStream```, ```http.response``` and ```mikeal's request```), for some other types of streams you'd need to provide "file"-related information manually: + +``` javascript +someModule.stream(function(err, stdout, stderr) { + if (err) throw err; + + var form = new FormData(); + + form.append('file', stdout, { + filename: 'unicycle.jpg', + contentType: 'image/jpg', + knownLength: 19806 + }); + + form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); + }); +}); +``` + +For edge cases, like POST request to URL with query string or to pass HTTP auth credentials, object can be passed to `form.submit()` as first parameter: + +``` javascript +form.submit({ + host: 'example.com', + path: '/probably.php?extra=params', + auth: 'username:password' +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +In case you need to also send custom HTTP headers with the POST request, you can use the `headers` key in first parameter of `form.submit()`: + +``` javascript +form.submit({ + host: 'example.com', + path: '/surelynot.php', + headers: {'x-test-header': 'test-header-value'} +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +### Integration with other libraries + +#### Request + +Form submission using [request](https://github.com/request/request): + +```javascript +var formData = { + my_field: 'my_value', + my_file: fs.createReadStream(__dirname + '/unicycle.jpg'), +}; + +request.post({url:'http://service.com/upload', formData: formData}, function(err, httpResponse, body) { + if (err) { + return console.error('upload failed:', err); + } + console.log('Upload successful! Server responded with:', body); +}); +``` + +For more details see [request readme](https://github.com/request/request#multipartform-data-multipart-form-uploads). + +#### node-fetch + +You can also submit a form using [node-fetch](https://github.com/bitinn/node-fetch): + +```javascript +var form = new FormData(); + +form.append('a', 1); + +fetch('http://example.com', { method: 'POST', body: form }) + .then(function(res) { + return res.json(); + }).then(function(json) { + console.log(json); + }); +``` + +## Notes + +- ```getLengthSync()``` method DOESN'T calculate length for streams, use ```knownLength``` options as workaround. +- Starting version `2.x` FormData has dropped support for `node@0.10.x`. + +## License + +Form-Data is released under the [MIT](License) license. diff --git a/deps/npm/package.json b/deps/npm/package.json index 4a15414a98d6b6..daed5a4747677b 100644 --- a/deps/npm/package.json +++ b/deps/npm/package.json @@ -1,5 +1,5 @@ { - "version": "4.1.2", + "version": "4.2.0", "name": "npm", "description": "a package manager for JavaScript", "keywords": [ @@ -32,6 +32,7 @@ "dependencies": { "JSONStream": "~1.3.0", "abbrev": "~1.0.9", + "ansi-regex": "~2.1.1", "ansicolors": "~0.3.2", "ansistyles": "~0.1.3", "aproba": "~1.0.4", @@ -62,7 +63,7 @@ "lodash.union": "~4.6.0", "lodash.uniq": "~4.5.0", "lodash.without": "~4.4.0", - "mississippi": "~1.2.0", + "mississippi": "~1.3.0", "mkdirp": "~0.5.1", "node-gyp": "~3.5.0", "nopt": "~4.0.1", diff --git a/deps/npm/test/tap/debug-logs.js b/deps/npm/test/tap/debug-logs.js new file mode 100644 index 00000000000000..fbd244446e1f6a --- /dev/null +++ b/deps/npm/test/tap/debug-logs.js @@ -0,0 +1,101 @@ +'use strict' +var path = require('path') +var test = require('tap').test +var Tacks = require('tacks') +var glob = require('glob') +var asyncMap = require('slide').asyncMap +var File = Tacks.File +var Dir = Tacks.Dir +var extend = Object.assign || require('util')._extend +var common = require('../common-tap.js') + +var basedir = path.join(__dirname, path.basename(__filename, '.js')) +var testdir = path.join(basedir, 'testdir') +var cachedir = path.join(basedir, 'cache') +var globaldir = path.join(basedir, 'global') +var tmpdir = path.join(basedir, 'tmp') + +var conf = { + cwd: testdir, + env: extend(extend({}, process.env), { + npm_config_cache: cachedir, + npm_config_tmp: tmpdir, + npm_config_prefix: globaldir, + npm_config_registry: common.registry, + npm_config_loglevel: 'warn' + }) +} + +var fixture = new Tacks(Dir({ + cache: Dir(), + global: Dir(), + tmp: Dir(), + testdir: Dir({ + 'package.json': File({ + name: 'debug-logs', + version: '1.0.0', + scripts: { + true: 'node -e "process.exit(0)"', + false: 'node -e "process.exit(1)"' + } + }) + }) +})) + +function setup () { + cleanup() + fixture.create(basedir) +} + +function cleanup () { + fixture.remove(basedir) +} + +test('setup', function (t) { + setup() + t.done() +}) + +test('example', function (t) { + common.npm(['run', 'false'], conf, function (err, code, stdout, stderr) { + if (err) throw err + t.is(code, 1, 'command errored') + var matches = stderr.match(/Please include the following file with any support request:.*\nnpm ERR! {5,5}(.*)/) + t.ok(matches, 'debug log mentioned in error message') + if (matches) { + var logfile = matches[1] + t.matches(path.relative(cachedir, logfile), /^_logs/, 'debug log is inside the cache in _logs') + } + + // we run a bunch concurrently, this will actually create > than our limit as the check is done + // when the command starts + var todo = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12] + asyncMap(todo, function (num, next) { + common.npm(['run', '--logs-max=10', 'false'], conf, function (err, code) { + if (err) throw err + t.is(code, 1, 'run #' + num + ' errored as expected') + next() + }) + }, function () { + // now we do one more and that should clean up the list + common.npm(['run', '--logs-max=10', 'false'], conf, function (err, code) { + if (err) throw err + t.is(code, 1, 'final run errored as expected') + var files = glob.sync(path.join(cachedir, '_logs', '*')) + t.is(files.length, 10, 'there should never be more than 10 log files') + common.npm(['run', '--logs-max=5', 'true'], conf, function (err, code) { + if (err) throw err + t.is(code, 0, 'success run is ok') + var files = glob.sync(path.join(cachedir, '_logs', '*')) + t.is(files.length, 4, 'after success there should be logs-max - 1 log files') + t.done() + }) + }) + }) + }) +}) + +test('cleanup', function (t) { + cleanup() + t.done() +}) diff --git a/deps/npm/test/tap/gently-rm-overeager.js b/deps/npm/test/tap/gently-rm-overeager.js index c266f1c4dc1202..08fa72bc71c7ca 100644 --- a/deps/npm/test/tap/gently-rm-overeager.js +++ b/deps/npm/test/tap/gently-rm-overeager.js @@ -1,4 +1,4 @@ -var resolve = require('path').resolve +var path = require('path') var fs = require('graceful-fs') var test = require('tap').test var mkdirp = require('mkdirp') @@ -6,8 +6,8 @@ var rimraf = require('rimraf') var common = require('../common-tap.js') -var pkg = resolve(__dirname, 'gently-rm-overeager') -var dep = resolve(__dirname, 'test-whoops') +var pkg = path.join(__dirname, 'gently-rm-overeager') +var dep = path.join(__dirname, 'test-whoops') var EXEC_OPTS = { cwd: pkg } @@ -32,8 +32,7 @@ test('cache add', function (t) { t.ok(c, 'test-whoops install also failed') fs.readdir(pkg, function (er, files) { t.ifError(er, 'package directory is still there') - t.deepEqual(files, ['npm-debug.log'], 'only debug log remains') - + t.deepEqual(files, [], 'no files remain') t.end() }) }) @@ -53,7 +52,7 @@ function cleanup () { function setup () { mkdirp.sync(pkg) // so it doesn't try to install into npm's own node_modules - mkdirp.sync(resolve(pkg, 'node_modules')) + mkdirp.sync(path.join(pkg, 'node_modules')) mkdirp.sync(dep) - fs.writeFileSync(resolve(dep, 'package.json'), JSON.stringify(fixture)) + fs.writeFileSync(path.join(dep, 'package.json'), JSON.stringify(fixture)) } diff --git a/deps/npm/test/tap/run-script.js b/deps/npm/test/tap/run-script.js index 3892337cee9f76..4dea9b83604bf5 100644 --- a/deps/npm/test/tap/run-script.js +++ b/deps/npm/test/tap/run-script.js @@ -67,6 +67,14 @@ var preversionOnly = { } } +var exitCode = { + name: 'scripted', + version: '1.2.3', + scripts: { + 'start': 'node -e "process.exit(7)"' + } +} + function testOutput (t, command, er, code, stdout, stderr) { var lines @@ -323,6 +331,17 @@ test('npm run-script no-params (direct only)', function (t) { }) }) +test('npm run-script keep non-zero exit code', function (t) { + writeMetadata(exitCode) + + common.npm(['run-script', 'start'], opts, function (err, code, stdout, stderr) { + t.ifError(err, 'ran run-script without parameters without crashing') + t.equal(code, 7, 'got expected exit code') + t.ok(stderr, 'should generate errors') + t.end() + }) +}) + test('cleanup', function (t) { cleanup() t.end() diff --git a/deps/npm/test/tap/search.all-package-search.js b/deps/npm/test/tap/search.all-package-search.js new file mode 100644 index 00000000000000..c70f4f8e7ef074 --- /dev/null +++ b/deps/npm/test/tap/search.all-package-search.js @@ -0,0 +1,201 @@ +var path = require('path') +var mkdirp = require('mkdirp') +var mr = require('npm-registry-mock') +var osenv = require('osenv') +var rimraf = require('rimraf') +var cacheFile = require('npm-cache-filename') +var test = require('tap').test +var Tacks = require('tacks') +var File = Tacks.File + +var common = require('../common-tap.js') + +var PKG_DIR = path.resolve(__dirname, 'search') +var CACHE_DIR = path.resolve(PKG_DIR, 'cache') +var cacheBase = cacheFile(CACHE_DIR)(common.registry + '/-/all') +var cachePath = path.join(cacheBase, '.cache.json') + +var server + +test('setup', function (t) { + mr({port: common.port, throwOnUnmatched: true}, function (err, s) { + t.ifError(err, 'registry mocked successfully') + server = s + t.pass('all set up') + t.done() + }) +}) + +var searches = [ + { + term: 'cool', + description: 'non-regex search', + location: 103 + }, + { + term: '/cool/', + description: 'regex search', + location: 103 + }, + { + term: 'cool', + description: 'searches name field', + location: 103 + }, + { + term: 'ool', + description: 'excludes matches for --searchexclude', + location: 205, + inject: { + other: { name: 'other', description: 'this is a simple tool' } + }, + extraOpts: ['--searchexclude', 'cool'] + }, + { + term: 'neat lib', + description: 'searches description field', + location: 141, + inject: { + cool: { + name: 'cool', version: '5.0.0', description: 'this is a neat lib' + } + } + }, + { + term: 'foo', + description: 'skips description field with --no-description', + location: 80, + inject: { + cool: { + name: 'cool', version: '5.0.0', description: 'foo bar!' + } + }, + extraOpts: ['--no-description'] + }, + { + term: 'zkat', + description: 'searches maintainers by name', + location: 155, + inject: { + cool: { + name: 'cool', + version: '5.0.0', + maintainers: [{ + name: 'zkat' + }] + } + } + }, + { + term: '=zkat', + description: 'searches maintainers unambiguously by =name', + location: 154, + inject: { + bar: { name: 'bar', description: 'zkat thing', version: '1.0.0' }, + cool: { + name: 'cool', + version: '5.0.0', + maintainers: [{ + name: 'zkat' + }] + } + } + }, + { + term: 'github.com', + description: 'searches projects by url', + location: 205, + inject: { + bar: { + name: 'bar', + url: 'gitlab.com/bar', + // For historical reasons, `url` is only present if `versions` is there + versions: ['1.0.0'], + version: '1.0.0' + }, + cool: { + name: 'cool', + version: '5.0.0', + versions: ['1.0.0'], + url: 'github.com/cool/cool' + } + } + }, + { + term: 'monad', + description: 'searches projects by keywords', + location: 197, + inject: { + cool: { + name: 'cool', + version: '5.0.0', + keywords: ['monads'] + } + } + } +] + +// These test classic hand-matched searches +searches.forEach(function (search) { + test(search.description, function (t) { + setup() + server.get('/-/v1/search?text=' + encodeURIComponent(search.term) + '&size=20').once().reply(404, {}) + var now = Date.now() + var cacheContents = { + '_updated': now, + bar: { name: 'bar', version: '1.0.0' }, + cool: { name: 'cool', version: '5.0.0' }, + foo: { name: 'foo', version: '2.0.0' }, + other: { name: 'other', version: '1.0.0' } + } + for (var k in search.inject) { + cacheContents[k] = search.inject[k] + } + var fixture = new Tacks(File(cacheContents)) + fixture.create(cachePath) + common.npm([ + 'search', search.term, + '--registry', common.registry, + '--cache', CACHE_DIR, + '--loglevel', 'error', + '--color', 'always' + ].concat(search.extraOpts || []), + {}, + function (err, code, stdout, stderr) { + t.equal(stderr, '', 'no error output') + t.notEqual(stdout, '', 'got output') + t.equal(code, 0, 'search finished successfully') + t.ifErr(err, 'search finished successfully') + // \033 == \u001B + var markStart = '\u001B\\[[0-9][0-9]m' + var markEnd = '\u001B\\[0m' + + var re = new RegExp(markStart + '.*?' + markEnd) + + var cnt = stdout.search(re) + t.equal( + cnt, + search.location, + search.description + ' search for ' + search.term + ) + t.end() + }) + }) +}) + +test('cleanup', function (t) { + cleanup() + server.close() + t.end() +}) + +function setup () { + cleanup() + mkdirp.sync(cacheBase) +} + +function cleanup () { + server.done() + process.chdir(osenv.tmpdir()) + rimraf.sync(PKG_DIR) +} diff --git a/deps/npm/test/tap/search.esearch.js b/deps/npm/test/tap/search.esearch.js new file mode 100644 index 00000000000000..d892aec95759c3 --- /dev/null +++ b/deps/npm/test/tap/search.esearch.js @@ -0,0 +1,192 @@ +var common = require('../common-tap.js') +var finished = require('mississippi').finished +var mkdirp = require('mkdirp') +var mr = require('npm-registry-mock') +var npm = require('../../') +var osenv = require('osenv') +var path = require('path') +var rimraf = require('rimraf') +var test = require('tap').test + +var SEARCH = '/-/v1/search' +var PKG_DIR = path.resolve(__dirname, 'create-entry-update-stream') +var CACHE_DIR = path.resolve(PKG_DIR, 'cache') + +var esearch = require('../../lib/search/esearch') + +var server + +function setup () { + cleanup() + mkdirp.sync(CACHE_DIR) + process.chdir(CACHE_DIR) +} + +function cleanup () { + process.chdir(osenv.tmpdir()) + rimraf.sync(PKG_DIR) +} + +test('setup', function (t) { + mr({port: common.port, throwOnUnmatched: true}, function (err, s) { + t.ifError(err, 'registry mocked successfully') + npm.load({ cache: CACHE_DIR, registry: common.registry }, function (err) { + t.ifError(err, 'npm loaded successfully') + server = s + t.pass('all set up') + t.done() + }) + }) +}) + +test('basic test', function (t) { + setup() + server.get(SEARCH + '?text=oo&size=1').once().reply(200, { + objects: [ + { package: { name: 'cool', version: '1.0.0' } }, + { package: { name: 'foo', version: '2.0.0' } } + ] + }) + var results = [] + var s = esearch({ + include: ['oo'], + limit: 1 + }) + t.ok(s, 'got a stream') + s.on('data', function (d) { + results.push(d) + }) + finished(s, function (err) { + if (err) { throw err } + t.ok(true, 'stream finished without error') + t.deepEquals(results, [{ + name: 'cool', + version: '1.0.0', + description: null, + maintainers: null, + keywords: null, + date: null + }, { + name: 'foo', + version: '2.0.0', + description: null, + maintainers: null, + keywords: null, + date: null + }]) + server.done() + t.done() + }) +}) + +test('only returns certain fields for each package', function (t) { + setup() + var date = new Date() + server.get(SEARCH + '?text=oo&size=1').once().reply(200, { + objects: [{ + package: { + name: 'cool', + version: '1.0.0', + description: 'desc', + maintainers: [ + {username: 'x', email: 'a@b.c'}, + {username: 'y', email: 'c@b.a'} + ], + keywords: ['a', 'b', 'c'], + date: date.toISOString(), + extra: 'lol' + } + }] + }) + var results = [] + var s = esearch({ + include: ['oo'], + limit: 1 + }) + t.ok(s, 'got a stream') + s.on('data', function (d) { + results.push(d) + }) + finished(s, function (err) { + if (err) { throw err } + t.ok(true, 'stream finished without error') + t.deepEquals(results, [{ + name: 'cool', + version: '1.0.0', + description: 'desc', + maintainers: [ + {username: 'x', email: 'a@b.c'}, + {username: 'y', email: 'c@b.a'} + ], + keywords: ['a', 'b', 'c'], + date: date + }]) + server.done() + t.done() + }) +}) + +test('accepts a limit option', function (t) { + setup() + server.get(SEARCH + '?text=oo&size=3').once().reply(200, { + objects: [ + { package: { name: 'cool', version: '1.0.0' } }, + { package: { name: 'cool', version: '1.0.0' } } + ] + }) + var results = 0 + var s = esearch({ + include: ['oo'], + limit: 3 + }) + s.on('data', function () { results++ }) + finished(s, function (err) { + if (err) { throw err } + t.ok(true, 'request sent with correct size') + t.equal(results, 2, 'behaves fine with fewer results than size') + server.done() + t.done() + }) +}) + +test('passes foo:bar syntax params directly', function (t) { + setup() + server.get(SEARCH + '?text=foo%3Abar&size=1').once().reply(200, { + objects: [] + }) + var s = esearch({ + include: ['foo:bar'], + limit: 1 + }) + s.on('data', function () {}) + finished(s, function (err) { + if (err) { throw err } + t.ok(true, 'request sent with correct params') + server.done() + t.done() + }) +}) + +test('space-separates and URI-encodes multiple search params', function (t) { + setup() + server.get(SEARCH + '?text=foo%20bar%3Abaz%20quux%3F%3D&size=1').once().reply(200, { + objects: [] + }) + var s = esearch({ + include: ['foo', 'bar:baz', 'quux?='], + limit: 1 + }) + s.on('data', function () {}) + finished(s, function (err) { + if (err) { throw err } + t.ok(true, 'request sent with correct params') + server.done() + t.done() + }) +}) + +test('cleanup', function (t) { + server.close() + cleanup() + t.done() +}) diff --git a/deps/npm/test/tap/search.js b/deps/npm/test/tap/search.js index dcc46df78e3049..78436e1e29a496 100644 --- a/deps/npm/test/tap/search.js +++ b/deps/npm/test/tap/search.js @@ -1,5 +1,6 @@ var path = require('path') var mkdirp = require('mkdirp') +var mr = require('npm-registry-mock') var osenv = require('osenv') var rimraf = require('rimraf') var cacheFile = require('npm-cache-filename') @@ -14,28 +15,26 @@ var CACHE_DIR = path.resolve(PKG_DIR, 'cache') var cacheBase = cacheFile(CACHE_DIR)(common.registry + '/-/all') var cachePath = path.join(cacheBase, '.cache.json') +var server + test('setup', function (t) { - t.pass('all set up') - t.done() + mr({port: common.port, throwOnUnmatched: true}, function (err, s) { + t.ifError(err, 'registry mocked successfully') + server = s + t.pass('all set up') + t.done() + }) }) test('notifies when there are no results', function (t) { setup() - var now = Date.now() - var cacheContents = { - '_updated': now, - bar: { name: 'bar', version: '1.0.0' }, - cool: { name: 'cool', version: '1.0.0' }, - foo: { name: 'foo', version: '2.0.0' }, - other: { name: 'other', version: '1.0.0' } - } - var fixture = new Tacks(File(cacheContents)) - fixture.create(cachePath) + server.get('/-/v1/search?text=none&size=20').once().reply(200, { + objects: [] + }) common.npm([ - 'search', 'nomatcheswhatsoeverfromthis', + 'search', 'none', '--registry', common.registry, - '--loglevel', 'error', - '--cache', CACHE_DIR + '--loglevel', 'error' ], {}, function (err, code, stdout, stderr) { if (err) throw err t.equal(stderr, '', 'no error output') @@ -47,6 +46,8 @@ test('notifies when there are no results', function (t) { test('spits out a useful error when no cache nor network', function (t) { setup() + server.get('/-/v1/search?text=foo&size=20').once().reply(404, {}) + server.get('/-/all').many().reply(404, {}) var cacheContents = {} var fixture = new Tacks(File(cacheContents)) fixture.create(cachePath) @@ -54,6 +55,7 @@ test('spits out a useful error when no cache nor network', function (t) { 'search', 'foo', '--registry', common.registry, '--loglevel', 'silly', + '--json', '--fetch-retry-mintimeout', 0, '--fetch-retry-maxtimeout', 0, '--cache', CACHE_DIR @@ -68,16 +70,12 @@ test('spits out a useful error when no cache nor network', function (t) { test('can switch to JSON mode', function (t) { setup() - var now = Date.now() - var cacheContents = { - '_updated': now, - bar: { name: 'bar', version: '1.0.0' }, - cool: { name: 'cool', version: '1.0.0' }, - foo: { name: 'foo', version: '2.0.0' }, - other: { name: 'other', version: '1.0.0' } - } - var fixture = new Tacks(File(cacheContents)) - fixture.create(cachePath) + server.get('/-/v1/search?text=oo&size=20').once().reply(200, { + objects: [ + { package: { name: 'cool', version: '1.0.0' } }, + { package: { name: 'foo', version: '2.0.0' } } + ] + }) common.npm([ 'search', 'oo', '--json', @@ -86,30 +84,23 @@ test('can switch to JSON mode', function (t) { '--cache', CACHE_DIR ], {}, function (err, code, stdout, stderr) { if (err) throw err - t.deepEquals(JSON.parse(stdout), [ - { name: 'cool', version: '1.0.0' }, - { name: 'foo', version: '2.0.0' } - ], 'results returned as valid json') t.equal(stderr, '', 'no error output') t.equal(code, 0, 'search gives 0 error code even if no matches') + t.deepEquals(JSON.parse(stdout), [ + { name: 'cool', version: '1.0.0', date: null }, + { name: 'foo', version: '2.0.0', date: null } + ], 'results returned as valid json') t.done() }) }) test('JSON mode does not notify on empty', function (t) { setup() - var now = Date.now() - var cacheContents = { - '_updated': now, - bar: { name: 'bar', version: '1.0.0' }, - cool: { name: 'cool', version: '1.0.0' }, - foo: { name: 'foo', version: '2.0.0' }, - other: { name: 'other', version: '1.0.0' } - } - var fixture = new Tacks(File(cacheContents)) - fixture.create(cachePath) + server.get('/-/v1/search?text=oo&size=20').once().reply(200, { + objects: [] + }) common.npm([ - 'search', 'nomatcheswhatsoeverfromthis', + 'search', 'oo', '--json', '--registry', common.registry, '--loglevel', 'error', @@ -125,16 +116,12 @@ test('JSON mode does not notify on empty', function (t) { test('can switch to tab separated mode', function (t) { setup() - var now = Date.now() - var cacheContents = { - '_updated': now, - bar: { name: 'bar', version: '1.0.0' }, - cool: { name: 'cool', version: '1.0.0' }, - foo: { name: 'foo', description: 'this\thas\ttabs', version: '2.0.0' }, - other: { name: 'other', version: '1.0.0' } - } - var fixture = new Tacks(File(cacheContents)) - fixture.create(cachePath) + server.get('/-/v1/search?text=oo&size=20').once().reply(200, { + objects: [ + { package: { name: 'cool', version: '1.0.0' } }, + { package: { name: 'foo', description: 'this\thas\ttabs', version: '2.0.0' } } + ] + }) common.npm([ 'search', 'oo', '--parseable', @@ -152,18 +139,11 @@ test('can switch to tab separated mode', function (t) { test('tab mode does not notify on empty', function (t) { setup() - var now = Date.now() - var cacheContents = { - '_updated': now, - bar: { name: 'bar', version: '1.0.0' }, - cool: { name: 'cool', version: '1.0.0' }, - foo: { name: 'foo', version: '2.0.0' }, - other: { name: 'other', version: '1.0.0' } - } - var fixture = new Tacks(File(cacheContents)) - fixture.create(cachePath) + server.get('/-/v1/search?text=oo&size=20').once().reply(200, { + objects: [] + }) common.npm([ - 'search', 'nomatcheswhatsoeverfromthis', + 'search', 'oo', '--parseable', '--registry', common.registry, '--loglevel', 'error', @@ -192,163 +172,9 @@ test('no arguments provided should error', function (t) { }) }) -var searches = [ - { - term: 'cool', - description: 'non-regex search', - location: 103 - }, - { - term: '/cool/', - description: 'regex search', - location: 103 - }, - { - term: 'cool', - description: 'searches name field', - location: 103 - }, - { - term: 'ool', - description: 'excludes matches for --searchexclude', - location: 205, - inject: { - other: { name: 'other', description: 'this is a simple tool' } - }, - extraOpts: ['--searchexclude', 'cool'] - }, - { - term: 'neat lib', - description: 'searches description field', - location: 141, - inject: { - cool: { - name: 'cool', version: '5.0.0', description: 'this is a neat lib' - } - } - }, - { - term: 'foo', - description: 'skips description field with --no-description', - location: 80, - inject: { - cool: { - name: 'cool', version: '5.0.0', description: 'foo bar!' - } - }, - extraOpts: ['--no-description'] - }, - { - term: 'zkat', - description: 'searches maintainers by name', - location: 155, - inject: { - cool: { - name: 'cool', - version: '5.0.0', - maintainers: [{ - name: 'zkat' - }] - } - } - }, - { - term: '=zkat', - description: 'searches maintainers unambiguously by =name', - location: 154, - inject: { - bar: { name: 'bar', description: 'zkat thing', version: '1.0.0' }, - cool: { - name: 'cool', - version: '5.0.0', - maintainers: [{ - name: 'zkat' - }] - } - } - }, - { - term: 'github.com', - description: 'searches projects by url', - location: 205, - inject: { - bar: { - name: 'bar', - url: 'gitlab.com/bar', - // For historical reasons, `url` is only present if `versions` is there - versions: ['1.0.0'], - version: '1.0.0' - }, - cool: { - name: 'cool', - version: '5.0.0', - versions: ['1.0.0'], - url: 'github.com/cool/cool' - } - } - }, - { - term: 'monad', - description: 'searches projects by keywords', - location: 197, - inject: { - cool: { - name: 'cool', - version: '5.0.0', - keywords: ['monads'] - } - } - } -] - -searches.forEach(function (search) { - test(search.description, function (t) { - setup() - var now = Date.now() - var cacheContents = { - '_updated': now, - bar: { name: 'bar', version: '1.0.0' }, - cool: { name: 'cool', version: '5.0.0' }, - foo: { name: 'foo', version: '2.0.0' }, - other: { name: 'other', version: '1.0.0' } - } - for (var k in search.inject) { - cacheContents[k] = search.inject[k] - } - var fixture = new Tacks(File(cacheContents)) - fixture.create(cachePath) - common.npm([ - 'search', search.term, - '--registry', common.registry, - '--cache', CACHE_DIR, - '--loglevel', 'error', - '--color', 'always' - ].concat(search.extraOpts || []), - {}, - function (err, code, stdout, stderr) { - t.equal(stderr, '', 'no error output') - t.notEqual(stdout, '', 'got output') - t.equal(code, 0, 'search finished successfully') - t.ifErr(err, 'search finished successfully') - // \033 == \u001B - var markStart = '\u001B\\[[0-9][0-9]m' - var markEnd = '\u001B\\[0m' - - var re = new RegExp(markStart + '.*?' + markEnd) - - var cnt = stdout.search(re) - t.equal( - cnt, - search.location, - search.description + ' search for ' + search.term - ) - t.end() - }) - }) -}) - test('cleanup', function (t) { cleanup() + server.close() t.end() }) @@ -358,6 +184,7 @@ function setup () { } function cleanup () { + server.done() process.chdir(osenv.tmpdir()) rimraf.sync(PKG_DIR) } From 924f34606d82476cc1a65d90394ac2569d57c137 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Wed, 22 Mar 2017 16:11:58 -0700 Subject: [PATCH 28/67] doc: update collaborator email address Per email conversation with Shigeki Ohtsu, updating email address in docs. The current listed email address does not work anymore. PR-URL: https://github.com/nodejs/node/pull/11996 Reviewed-By: Anna Henningsen Reviewed-By: James M Snell Reviewed-By: Shigeki Ohtsu --- .mailmap | 2 +- README.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.mailmap b/.mailmap index 36324c6aa2ae9b..722995b6b04b63 100644 --- a/.mailmap +++ b/.mailmap @@ -143,7 +143,7 @@ San-Tai Hsu Scott Blomquist Sergey Kryzhanovsky Shannen Saez -Shigeki Ohtsu +Shigeki Ohtsu Siddharth Mahendraker Simon Willison Stanislav Opichal diff --git a/README.md b/README.md index e2d66b90be14b4..fdbbb16f820e19 100644 --- a/README.md +++ b/README.md @@ -185,7 +185,7 @@ more information about the governance of the Node.js project, see * [rvagg](https://github.com/rvagg) - **Rod Vagg** <rod@vagg.org> * [shigeki](https://github.com/shigeki) - -**Shigeki Ohtsu** <ohtsu@iij.ad.jp> (he/him) +**Shigeki Ohtsu** <ohtsu@ohtsu.org> (he/him) * [targos](https://github.com/targos) - **Michaël Zasso** <targos@protonmail.com> (he/him) * [thefourtheye](https://github.com/thefourtheye) - From bb2de4a5a1fb93c00247d6b608d579750b950d90 Mon Sep 17 00:00:00 2001 From: Vse Mozhet Byt Date: Tue, 21 Mar 2017 02:58:54 +0200 Subject: [PATCH 29/67] test: do not use `more` command on Windows MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/11953 Fixes: https://github.com/nodejs/node/issues/11469 Reviewed-By: João Reis Reviewed-By: Gibson Fahnestock Reviewed-By: Michael Dawson Reviewed-By: Rich Trott --- test/README.md | 12 ---------- test/common.js | 22 ------------------- .../test-child-process-spawn-shell.js | 2 +- .../test-child-process-spawnsync-shell.js | 2 +- test/parallel/test-child-process-stdin.js | 8 ++----- .../test-child-process-stdio-inherit.js | 4 ++-- test/parallel/test-child-process-stdio.js | 3 ++- 7 files changed, 8 insertions(+), 45 deletions(-) diff --git a/test/README.md b/test/README.md index 653f78cc6d2eb5..ae549420624ccc 100644 --- a/test/README.md +++ b/test/README.md @@ -376,24 +376,12 @@ Path to the 'root' directory. either `/` or `c:\\` (windows) Logs '1..0 # Skipped: ' + `msg` -### spawnCat(options) -* `options` [<Object>](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object) -* return [<Object>](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object) - -Platform normalizes the `cat` command. - ### spawnPwd(options) * `options` [<Object>](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object) * return [<Object>](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object) Platform normalizes the `pwd` command. -### spawnSyncCat(options) -* `options` [<Object>](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object) -* return [<Object>](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object) - -Synchronous version of `spawnCat`. - ### spawnSyncPwd(options) * `options` [<Object>](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object) * return [<Object>](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object) diff --git a/test/common.js b/test/common.js index 99ea679114c038..85b2393c559a23 100644 --- a/test/common.js +++ b/test/common.js @@ -237,28 +237,6 @@ exports.ddCommand = function(filename, kilobytes) { }; -exports.spawnCat = function(options) { - const spawn = require('child_process').spawn; - - if (exports.isWindows) { - return spawn('more', [], options); - } else { - return spawn('cat', [], options); - } -}; - - -exports.spawnSyncCat = function(options) { - const spawnSync = require('child_process').spawnSync; - - if (exports.isWindows) { - return spawnSync('more', [], options); - } else { - return spawnSync('cat', [], options); - } -}; - - exports.spawnPwd = function(options) { const spawn = require('child_process').spawn; diff --git a/test/parallel/test-child-process-spawn-shell.js b/test/parallel/test-child-process-spawn-shell.js index 444d1eb063ca4a..e9dd0e07efaa0c 100644 --- a/test/parallel/test-child-process-spawn-shell.js +++ b/test/parallel/test-child-process-spawn-shell.js @@ -34,7 +34,7 @@ echo.on('close', common.mustCall((code, signal) => { })); // Verify that shell features can be used -const cmd = common.isWindows ? 'echo bar | more' : 'echo bar | cat'; +const cmd = 'echo bar | cat'; const command = cp.spawn(cmd, { encoding: 'utf8', shell: true diff --git a/test/parallel/test-child-process-spawnsync-shell.js b/test/parallel/test-child-process-spawnsync-shell.js index 1d92767a8b5ba5..9e03ee126f087b 100644 --- a/test/parallel/test-child-process-spawnsync-shell.js +++ b/test/parallel/test-child-process-spawnsync-shell.js @@ -23,7 +23,7 @@ assert.strictEqual(echo.args[echo.args.length - 1].replace(/"/g, ''), assert.strictEqual(echo.stdout.toString().trim(), 'foo'); // Verify that shell features can be used -const cmd = common.isWindows ? 'echo bar | more' : 'echo bar | cat'; +const cmd = 'echo bar | cat'; const command = cp.spawnSync(cmd, {shell: true}); assert.strictEqual(command.stdout.toString().trim(), 'bar'); diff --git a/test/parallel/test-child-process-stdin.js b/test/parallel/test-child-process-stdin.js index 8782e46f8f7229..a4c8e359494696 100644 --- a/test/parallel/test-child-process-stdin.js +++ b/test/parallel/test-child-process-stdin.js @@ -4,7 +4,7 @@ const assert = require('assert'); const spawn = require('child_process').spawn; -const cat = spawn(common.isWindows ? 'more' : 'cat'); +const cat = spawn('cat'); cat.stdin.write('hello'); cat.stdin.write(' '); cat.stdin.write('world'); @@ -33,9 +33,5 @@ cat.on('exit', common.mustCall(function(status) { })); cat.on('close', common.mustCall(function() { - if (common.isWindows) { - assert.strictEqual('hello world\r\n', response); - } else { - assert.strictEqual('hello world', response); - } + assert.strictEqual('hello world', response); })); diff --git a/test/parallel/test-child-process-stdio-inherit.js b/test/parallel/test-child-process-stdio-inherit.js index d2b64aeb30dd31..068d525d70ff32 100644 --- a/test/parallel/test-child-process-stdio-inherit.js +++ b/test/parallel/test-child-process-stdio-inherit.js @@ -1,5 +1,5 @@ 'use strict'; -const common = require('../common'); +require('../common'); const assert = require('assert'); const spawn = require('child_process').spawn; @@ -31,5 +31,5 @@ function grandparent() { function parent() { // should not immediately exit. - common.spawnCat({ stdio: 'inherit' }); + spawn('cat', [], { stdio: 'inherit' }); } diff --git a/test/parallel/test-child-process-stdio.js b/test/parallel/test-child-process-stdio.js index 3c67b3c3fe37a0..0fb7909b024f51 100644 --- a/test/parallel/test-child-process-stdio.js +++ b/test/parallel/test-child-process-stdio.js @@ -1,6 +1,7 @@ 'use strict'; const common = require('../common'); const assert = require('assert'); +const spawnSync = require('child_process').spawnSync; let options = {stdio: ['pipe']}; let child = common.spawnPwd(options); @@ -15,7 +16,7 @@ assert.strictEqual(child.stdout, null); assert.strictEqual(child.stderr, null); options = {stdio: 'ignore'}; -child = common.spawnSyncCat(options); +child = spawnSync('cat', [], options); assert.deepStrictEqual(options, {stdio: 'ignore'}); assert.throws(() => { From f6755182e54490951f3a53d13e7ea479176087d4 Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Mon, 20 Mar 2017 14:18:37 +0800 Subject: [PATCH 30/67] url: show input in parse error message PR-URL: https://github.com/nodejs/node/pull/11934 Reviewed-By: James M Snell Reviewed-By: Colin Ihrig Reviewed-By: Timothy Gu Reviewed-By: Anna Henningsen Reviewed-By: Daijiro Wachi --- lib/internal/url.js | 24 ++++------- src/node_url.cc | 52 ++++++++++++++---------- src/node_url.h | 10 +++++ test/parallel/test-whatwg-url-parsing.js | 31 ++++++++++---- 4 files changed, 71 insertions(+), 46 deletions(-) diff --git a/lib/internal/url.js b/lib/internal/url.js index ad3f0d6f0bbab0..a474ed30b4d6e0 100644 --- a/lib/internal/url.js +++ b/lib/internal/url.js @@ -83,8 +83,6 @@ class TupleOrigin { function onParseComplete(flags, protocol, username, password, host, port, path, query, fragment) { - if (flags & binding.URL_FLAGS_FAILED) - throw new TypeError('Invalid URL'); var ctx = this[context]; ctx.flags = flags; ctx.scheme = protocol; @@ -102,6 +100,12 @@ function onParseComplete(flags, protocol, username, password, initSearchParams(this[searchParams], query); } +function onParseError(flags, input) { + const error = new TypeError('Invalid URL: ' + input); + error.input = input; + throw error; +} + // Reused by URL constructor and URL#href setter. function parse(url, input, base) { input = String(input); @@ -109,13 +113,11 @@ function parse(url, input, base) { url[context] = new StorageObject(); binding.parse(input.trim(), -1, base_context, undefined, - onParseComplete.bind(url)); + onParseComplete.bind(url), onParseError); } function onParseProtocolComplete(flags, protocol, username, password, host, port, path, query, fragment) { - if (flags & binding.URL_FLAGS_FAILED) - return; const newIsSpecial = (flags & binding.URL_FLAGS_SPECIAL) !== 0; const s = this[special]; const ctx = this[context]; @@ -144,8 +146,6 @@ function onParseProtocolComplete(flags, protocol, username, password, function onParseHostComplete(flags, protocol, username, password, host, port, path, query, fragment) { - if (flags & binding.URL_FLAGS_FAILED) - return; const ctx = this[context]; if (host) { ctx.host = host; @@ -159,8 +159,6 @@ function onParseHostComplete(flags, protocol, username, password, function onParseHostnameComplete(flags, protocol, username, password, host, port, path, query, fragment) { - if (flags & binding.URL_FLAGS_FAILED) - return; const ctx = this[context]; if (host) { ctx.host = host; @@ -172,15 +170,11 @@ function onParseHostnameComplete(flags, protocol, username, password, function onParsePortComplete(flags, protocol, username, password, host, port, path, query, fragment) { - if (flags & binding.URL_FLAGS_FAILED) - return; this[context].port = port; } function onParsePathComplete(flags, protocol, username, password, host, port, path, query, fragment) { - if (flags & binding.URL_FLAGS_FAILED) - return; const ctx = this[context]; if (path) { ctx.path = path; @@ -192,16 +186,12 @@ function onParsePathComplete(flags, protocol, username, password, function onParseSearchComplete(flags, protocol, username, password, host, port, path, query, fragment) { - if (flags & binding.URL_FLAGS_FAILED) - return; const ctx = this[context]; ctx.query = query; } function onParseHashComplete(flags, protocol, username, password, host, port, path, query, fragment) { - if (flags & binding.URL_FLAGS_FAILED) - return; const ctx = this[context]; if (fragment) { ctx.fragment = fragment; diff --git a/src/node_url.cc b/src/node_url.cc index daae20813918c6..a013380b75839e 100644 --- a/src/node_url.cc +++ b/src/node_url.cc @@ -1246,7 +1246,8 @@ namespace url { enum url_parse_state state_override, Local base_obj, Local context_obj, - Local cb) { + Local cb, + Local error_cb) { Isolate* isolate = env->isolate(); Local context = env->context(); HandleScope handle_scope(isolate); @@ -1267,20 +1268,19 @@ namespace url { // Define the return value placeholders const Local undef = Undefined(isolate); - Local argv[9] = { - undef, - undef, - undef, - undef, - undef, - undef, - undef, - undef, - undef, - }; - - argv[ARG_FLAGS] = Integer::NewFromUnsigned(isolate, url.flags); if (!(url.flags & URL_FLAGS_FAILED)) { + Local argv[9] = { + undef, + undef, + undef, + undef, + undef, + undef, + undef, + undef, + undef, + }; + argv[ARG_FLAGS] = Integer::NewFromUnsigned(isolate, url.flags); if (url.flags & URL_FLAGS_HAS_SCHEME) argv[ARG_PROTOCOL] = OneByteString(isolate, url.scheme.c_str()); if (url.flags & URL_FLAGS_HAS_USERNAME) @@ -1297,22 +1297,31 @@ namespace url { argv[ARG_PORT] = Integer::New(isolate, url.port); if (url.flags & URL_FLAGS_HAS_PATH) argv[ARG_PATH] = Copy(env, url.path); + (void)cb->Call(context, recv, arraysize(argv), argv); + } else if (error_cb->IsFunction()) { + Local argv[2] = { undef, undef }; + argv[ERR_ARG_FLAGS] = Integer::NewFromUnsigned(isolate, url.flags); + argv[ERR_ARG_INPUT] = + String::NewFromUtf8(env->isolate(), + input, + v8::NewStringType::kNormal).ToLocalChecked(); + (void)error_cb.As()->Call(context, recv, arraysize(argv), argv); } - - (void)cb->Call(context, recv, 9, argv); } static void Parse(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); CHECK_GE(args.Length(), 5); - CHECK(args[0]->IsString()); - CHECK(args[2]->IsUndefined() || + CHECK(args[0]->IsString()); // input + CHECK(args[2]->IsUndefined() || // base context args[2]->IsNull() || args[2]->IsObject()); - CHECK(args[3]->IsUndefined() || + CHECK(args[3]->IsUndefined() || // context args[3]->IsNull() || args[3]->IsObject()); - CHECK(args[4]->IsFunction()); + CHECK(args[4]->IsFunction()); // complete callback + CHECK(args[5]->IsUndefined() || args[5]->IsFunction()); // error callback + Utf8Value input(env->isolate(), args[0]); enum url_parse_state state_override = kUnknownState; if (args[1]->IsNumber()) { @@ -1325,7 +1334,8 @@ namespace url { state_override, args[2], args[3], - args[4].As()); + args[4].As(), + args[5]); } static void EncodeAuthSet(const FunctionCallbackInfo& args) { diff --git a/src/node_url.h b/src/node_url.h index 49f6de866da501..b9d91782be9e59 100644 --- a/src/node_url.h +++ b/src/node_url.h @@ -463,6 +463,10 @@ static inline void PercentDecode(const char* input, XX(ARG_QUERY) \ XX(ARG_FRAGMENT) +#define ERR_ARGS(XX) \ + XX(ERR_ARG_FLAGS) \ + XX(ERR_ARG_INPUT) \ + static const char kEOL = -1; enum url_parse_state { @@ -484,6 +488,12 @@ enum url_cb_args { #undef XX }; +enum url_error_cb_args { +#define XX(name) name, + ERR_ARGS(XX) +#undef XX +} url_error_cb_args; + static inline bool IsSpecial(std::string scheme) { #define XX(name, _) if (scheme == name) return true; SPECIALS(XX); diff --git a/test/parallel/test-whatwg-url-parsing.js b/test/parallel/test-whatwg-url-parsing.js index e4f8306ead8d87..4d04c3194e1f1a 100644 --- a/test/parallel/test-whatwg-url-parsing.js +++ b/test/parallel/test-whatwg-url-parsing.js @@ -13,15 +13,30 @@ if (!common.hasIntl) { // Tests below are not from WPT. const tests = require(path.join(common.fixturesDir, 'url-tests')); +const failureTests = tests.filter((test) => test.failure).concat([ + { input: '' }, + { input: 'test' }, + { input: undefined }, + { input: 0 }, + { input: true }, + { input: false }, + { input: null }, + { input: new Date() }, + { input: new RegExp() }, + { input: () => {} } +]); -for (const test of tests) { - if (typeof test === 'string') - continue; - - if (test.failure) { - assert.throws(() => new URL(test.input, test.base), - /^TypeError: Invalid URL$/); - } +for (const test of failureTests) { + assert.throws( + () => new URL(test.input, test.base), + (error) => { + // The input could be processed, so we don't do strict matching here + const match = (error + '').match(/^TypeError: Invalid URL: (.*)$/); + if (!match) { + return false; + } + return error.input === match[1]; + }); } const additional_tests = require( From 1f7fe55c9740734ab4f20b19fad2642a4dbf400f Mon Sep 17 00:00:00 2001 From: Richard Lau Date: Fri, 24 Mar 2017 08:27:38 -0400 Subject: [PATCH 31/67] doc: add richardlau to collaborators PR-URL: https://github.com/nodejs/node/pull/12020 Reviewed-By: Ben Noordhuis Reviewed-By: Daniel Bevenius Reviewed-By: Evan Lucas Reviewed-By: Gibson Fahnestock Reviewed-By: Yuta Hiroto --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index fdbbb16f820e19..2144742ea78cdb 100644 --- a/README.md +++ b/README.md @@ -299,6 +299,8 @@ more information about the governance of the Node.js project, see **Prince John Wesley** <princejohnwesley@gmail.com> * [qard](https://github.com/qard) - **Stephen Belanger** <admin@stephenbelanger.com> (he/him) +* [richardlau](https://github.com/richardlau) - +**Richard Lau** <riclau@uk.ibm.com> * [rlidwka](https://github.com/rlidwka) - **Alex Kocharin** <alex@kocharin.ru> * [rmg](https://github.com/rmg) - From fb41ee3983033f40dec5f4d02ecee56c1b8482aa Mon Sep 17 00:00:00 2001 From: Morgan Brenner Date: Wed, 22 Mar 2017 15:23:00 -0400 Subject: [PATCH 32/67] build: add lint option to vcbuild.bat help PR-URL: https://github.com/nodejs/node/pull/11992 Fixes: https://github.com/nodejs/node/issues/11971 Reviewed-By: Gibson Fahnestock Reviewed-By: Vse Mozhet Byt Reviewed-By: James M Snell Reviewed-By: Richard Lau Reviewed-By: Colin Ihrig --- vcbuild.bat | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vcbuild.bat b/vcbuild.bat index 6a9d1d728b1529..01750a4a004819 100644 --- a/vcbuild.bat +++ b/vcbuild.bat @@ -416,7 +416,7 @@ echo Failed to create vc project files. goto exit :help -echo vcbuild.bat [debug/release] [msi] [test-all/test-uv/test-inspector/test-internet/test-pummel/test-simple/test-message] [clean] [noprojgen] [small-icu/full-icu/without-intl] [nobuild] [nosign] [x86/x64] [vc2015] [download-all] [enable-vtune] +echo vcbuild.bat [debug/release] [msi] [test-all/test-uv/test-inspector/test-internet/test-pummel/test-simple/test-message] [clean] [noprojgen] [small-icu/full-icu/without-intl] [nobuild] [nosign] [x86/x64] [vc2015] [download-all] [enable-vtune] [lint/lint-ci] echo Examples: echo vcbuild.bat : builds release build echo vcbuild.bat debug : builds debug build From 764a00e6e5f9d3431120a6ca68ce23b9eb559f92 Mon Sep 17 00:00:00 2001 From: Yuta Hiroto Date: Thu, 23 Mar 2017 10:57:26 +0900 Subject: [PATCH 33/67] test: add test for url PR-URL: https://github.com/nodejs/node/pull/11999 Reviewed-By: James M Snell Reviewed-By: Joyee Cheung Reviewed-By: Prince John Wesley --- test/parallel/test-url-relative.js | 3 +++ 1 file changed, 3 insertions(+) diff --git a/test/parallel/test-url-relative.js b/test/parallel/test-url-relative.js index cf379e591d6088..f40981de62201e 100644 --- a/test/parallel/test-url-relative.js +++ b/test/parallel/test-url-relative.js @@ -4,6 +4,9 @@ const assert = require('assert'); const inspect = require('util').inspect; const url = require('url'); +// when source is false +assert.strictEqual(url.resolveObject('', 'foo'), 'foo'); + /* [from, path, expected] */ From b48f13af953905cbc34df977b36ac91a725c9f2e Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Tue, 21 Mar 2017 17:07:23 -0700 Subject: [PATCH 34/67] test: add minimal test for net benchmarks Currently, benchmark code is not exercised at all in CI. This adds a minimal test for net benchmarks. If this is deemed acceptable, similar minimal tests for other benchmarks can be written. Additionally, as issues and edge cases are uncovered, checks for them can be added. PR-URL: https://github.com/nodejs/node/pull/11979 Reviewed-By: James M Snell Reviewed-By: Joyee Cheung --- test/sequential/test-benchmark-net.js | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 test/sequential/test-benchmark-net.js diff --git a/test/sequential/test-benchmark-net.js b/test/sequential/test-benchmark-net.js new file mode 100644 index 00000000000000..4bb91451e9b254 --- /dev/null +++ b/test/sequential/test-benchmark-net.js @@ -0,0 +1,22 @@ +'use strict'; + +require('../common'); + +// Minimal test for net benchmarks. This makes sure the benchmarks aren't +// horribly broken but nothing more than that. + +// Because the net benchmarks use hardcoded ports, this should be in sequential +// rather than parallel to make sure it does not conflict with tests that choose +// random available ports. + +const assert = require('assert'); +const fork = require('child_process').fork; +const path = require('path'); + +const runjs = path.join(__dirname, '..', '..', 'benchmark', 'run.js'); + +const child = fork(runjs, ['--set', 'dur=0', 'net']); +child.on('exit', (code, signal) => { + assert.strictEqual(code, 0); + assert.strictEqual(signal, null); +}); From e70ed3cb313b07837847e1eeaecec77712919f57 Mon Sep 17 00:00:00 2001 From: Nick Peleh Date: Tue, 21 Mar 2017 19:50:05 +0200 Subject: [PATCH 35/67] test: improve test-vm-cached-data.js * verify error message by adding 2nd argument to throws in test-assert PR-URL: https://github.com/nodejs/node/pull/11974 Reviewed-By: Colin Ihrig Reviewed-By: James M Snell Reviewed-By: Daijiro Wachi Reviewed-By: Luigi Pinca --- test/parallel/test-vm-cached-data.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/parallel/test-vm-cached-data.js b/test/parallel/test-vm-cached-data.js index 3f74f398b814ca..a09a41fe986012 100644 --- a/test/parallel/test-vm-cached-data.js +++ b/test/parallel/test-vm-cached-data.js @@ -89,4 +89,4 @@ assert.throws(() => { new vm.Script('function abc() {}', { cachedData: 'ohai' }); -}); +}, /^TypeError: options.cachedData must be a Buffer instance$/); From 3d21bfe6b94b6214563bbc5a9dc25da241c8e9d8 Mon Sep 17 00:00:00 2001 From: Luca Maraschi Date: Tue, 21 Mar 2017 15:47:25 +0100 Subject: [PATCH 36/67] test: invalid chars in http client path This test adds coverage for all the characters which are considered invalid in a http path. PR-URL: https://github.com/nodejs/node/pull/11964 Reviewed-By: Colin Ihrig Reviewed-By: Luigi Pinca Reviewed-By: James M Snell --- test/parallel/test-http-invalid-path-chars.js | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 test/parallel/test-http-invalid-path-chars.js diff --git a/test/parallel/test-http-invalid-path-chars.js b/test/parallel/test-http-invalid-path-chars.js new file mode 100644 index 00000000000000..462e0bc12a0a3f --- /dev/null +++ b/test/parallel/test-http-invalid-path-chars.js @@ -0,0 +1,20 @@ +'use strict'; +const common = require('../common'); +const assert = require('assert'); +const http = require('http'); + +const expectedError = /^TypeError: Request path contains unescaped characters$/; +const theExperimentallyDeterminedNumber = 39; + +function fail(path) { + assert.throws(() => { + http.request({ path }, common.fail); + }, expectedError); +} + +for (let i = 0; i <= theExperimentallyDeterminedNumber; i++) { + const prefix = 'a'.repeat(i); + for (let i = 0; i <= 32; i++) { + fail(prefix + String.fromCodePoint(i)); + } +} From 6c803db7b9d204aad4fa9ac3a1d12cdabd799fe6 Mon Sep 17 00:00:00 2001 From: Ben Noordhuis Date: Tue, 21 Mar 2017 11:30:59 +0100 Subject: [PATCH 37/67] lib: fix event race condition with -e Commit c5b07d4 ("lib: fix beforeExit not working with -e") runs the to-be-evaluated code at a later time than before because it switches from `process.nextTick()` to `setImmediate()`. It affects `-e 'process.on("message", ...)'` because there is now a larger time gap between startup and attaching the event listener, increasing the chances of missing early messages. I'm reasonably sure `process.nextTick()` was also susceptible to that, only less pronounced. Avoid the problem altogether by evaluating the code synchronously. Harmonizes the logic with `Module.runMain()` from lib/module.js which also calls `process._tickCallback()` afterwards. PR-URL: https://github.com/nodejs/node/pull/11958 Reviewed-By: Colin Ihrig Reviewed-By: James M Snell Reviewed-By: Jeremiah Senkpiel Reviewed-By: Michael Dawson Reviewed-By: Daijiro Wachi --- lib/internal/bootstrap_node.js | 10 ++----- test/message/eval_messages.out | 53 ++++++++++++++++++--------------- test/message/stdin_messages.out | 37 +++++++++++++---------- test/parallel/test-cli-eval.js | 19 ++++++++++++ 4 files changed, 72 insertions(+), 47 deletions(-) diff --git a/lib/internal/bootstrap_node.js b/lib/internal/bootstrap_node.js index 1dd51c01ff7a96..2ad6ad63bb490b 100644 --- a/lib/internal/bootstrap_node.js +++ b/lib/internal/bootstrap_node.js @@ -385,13 +385,9 @@ 'return require("vm").runInThisContext(' + `${JSON.stringify(body)}, { filename: ` + `${JSON.stringify(name)}, displayErrors: true });\n`; - // Defer evaluation for a tick. This is a workaround for deferred - // events not firing when evaluating scripts from the command line, - // see https://github.com/nodejs/node/issues/1600. - setImmediate(function() { - const result = module._compile(script, `${name}-wrapper`); - if (process._print_eval) console.log(result); - }); + const result = module._compile(script, `${name}-wrapper`); + if (process._print_eval) console.log(result); + process._tickCallback(); } // Load preload modules diff --git a/test/message/eval_messages.out b/test/message/eval_messages.out index ba0c35431c6907..340e760dc696b7 100644 --- a/test/message/eval_messages.out +++ b/test/message/eval_messages.out @@ -3,14 +3,15 @@ with(this){__filename} ^^^^ SyntaxError: Strict mode code may not include a with statement - at createScript (vm.js:*) - at Object.runInThisContext (vm.js:*) + at createScript (vm.js:*:*) + at Object.runInThisContext (vm.js:*:*) at Object. ([eval]-wrapper:*:*) at Module._compile (module.js:*:*) - at Immediate. (bootstrap_node.js:*:*) - at runCallback (timers.js:*:*) - at tryOnImmediate (timers.js:*:*) - at processImmediate [as _immediateCallback] (timers.js:*:*) + at evalScript (bootstrap_node.js:*:*) + at run (bootstrap_node.js:*:*) + at run (bootstrap_node.js:*:*) + at startup (bootstrap_node.js:*:*) + at bootstrap_node.js:*:* 42 42 [eval]:1 @@ -19,28 +20,31 @@ throw new Error("hello") Error: hello at [eval]:1:7 - at ContextifyScript.Script.runInThisContext (vm.js:*) - at Object.runInThisContext (vm.js:*) + at ContextifyScript.Script.runInThisContext (vm.js:*:*) + at Object.runInThisContext (vm.js:*:*) at Object. ([eval]-wrapper:*:*) at Module._compile (module.js:*:*) - at Immediate. (bootstrap_node.js:*:*) - at runCallback (timers.js:*:*) - at tryOnImmediate (timers.js:*:*) - at processImmediate [as _immediateCallback] (timers.js:*:*) + at evalScript (bootstrap_node.js:*:*) + at run (bootstrap_node.js:*:*) + at run (bootstrap_node.js:*:*) + at startup (bootstrap_node.js:*:*) + at bootstrap_node.js:*:* + [eval]:1 throw new Error("hello") ^ Error: hello at [eval]:1:7 - at ContextifyScript.Script.runInThisContext (vm.js:*) - at Object.runInThisContext (vm.js:*) + at ContextifyScript.Script.runInThisContext (vm.js:*:*) + at Object.runInThisContext (vm.js:*:*) at Object. ([eval]-wrapper:*:*) at Module._compile (module.js:*:*) - at Immediate. (bootstrap_node.js:*:*) - at runCallback (timers.js:*:*) - at tryOnImmediate (timers.js:*:*) - at processImmediate [as _immediateCallback] (timers.js:*:*) + at evalScript (bootstrap_node.js:*:*) + at run (bootstrap_node.js:*:*) + at run (bootstrap_node.js:*:*) + at startup (bootstrap_node.js:*:*) + at bootstrap_node.js:*:* 100 [eval]:1 var x = 100; y = x; @@ -48,14 +52,15 @@ var x = 100; y = x; ReferenceError: y is not defined at [eval]:1:16 - at ContextifyScript.Script.runInThisContext (vm.js:*) - at Object.runInThisContext (vm.js:*) + at ContextifyScript.Script.runInThisContext (vm.js:*:*) + at Object.runInThisContext (vm.js:*:*) at Object. ([eval]-wrapper:*:*) at Module._compile (module.js:*:*) - at Immediate. (bootstrap_node.js:*:*) - at runCallback (timers.js:*:*) - at tryOnImmediate (timers.js:*:*) - at processImmediate [as _immediateCallback] (timers.js:*:*) + at evalScript (bootstrap_node.js:*:*) + at run (bootstrap_node.js:*:*) + at run (bootstrap_node.js:*:*) + at startup (bootstrap_node.js:*:*) + at bootstrap_node.js:*:* [eval]:1 var ______________________________________________; throw 10 diff --git a/test/message/stdin_messages.out b/test/message/stdin_messages.out index b4c51d7ad567f0..ad1688f15d09b8 100644 --- a/test/message/stdin_messages.out +++ b/test/message/stdin_messages.out @@ -7,10 +7,12 @@ SyntaxError: Strict mode code may not include a with statement at Object.runInThisContext (vm.js:*) at Object. ([stdin]-wrapper:*:*) at Module._compile (module.js:*:*) - at Immediate. (bootstrap_node.js:*:*) - at runCallback (timers.js:*:*) - at tryOnImmediate (timers.js:*:*) - at processImmediate [as _immediateCallback] (timers.js:*:*) + at evalScript (bootstrap_node.js:*:*) + at Socket. (bootstrap_node.js:*:*) + at emitNone (events.js:*:*) + at Socket.emit (events.js:*:*) + at endReadableNT (_stream_readable.js:*:*) + at _combinedTickCallback (internal/process/next_tick.js:*:*) 42 42 [stdin]:1 @@ -23,10 +25,11 @@ Error: hello at Object.runInThisContext (vm.js:*) at Object. ([stdin]-wrapper:*:*) at Module._compile (module.js:*:*) - at Immediate. (bootstrap_node.js:*:*) - at runCallback (timers.js:*:*) - at tryOnImmediate (timers.js:*:*) - at processImmediate [as _immediateCallback] (timers.js:*:*) + at evalScript (bootstrap_node.js:*:*) + at Socket. (bootstrap_node.js:*:*) + at emitNone (events.js:*:*) + at Socket.emit (events.js:*:*) + at endReadableNT (_stream_readable.js:*:*) [stdin]:1 throw new Error("hello") ^ @@ -37,10 +40,11 @@ Error: hello at Object.runInThisContext (vm.js:*) at Object. ([stdin]-wrapper:*:*) at Module._compile (module.js:*:*) - at Immediate. (bootstrap_node.js:*:*) - at runCallback (timers.js:*:*) - at tryOnImmediate (timers.js:*:*) - at processImmediate [as _immediateCallback] (timers.js:*:*) + at evalScript (bootstrap_node.js:*:*) + at Socket. (bootstrap_node.js:*:*) + at emitNone (events.js:*:*) + at Socket.emit (events.js:*:*) + at endReadableNT (_stream_readable.js:*:*) 100 [stdin]:1 var x = 100; y = x; @@ -52,10 +56,11 @@ ReferenceError: y is not defined at Object.runInThisContext (vm.js:*) at Object. ([stdin]-wrapper:*:*) at Module._compile (module.js:*:*) - at Immediate. (bootstrap_node.js:*:*) - at runCallback (timers.js:*:*) - at tryOnImmediate (timers.js:*:*) - at processImmediate [as _immediateCallback] (timers.js:*:*) + at evalScript (bootstrap_node.js:*:*) + at Socket. (bootstrap_node.js:*:*) + at emitNone (events.js:*:*) + at Socket.emit (events.js:*:*) + at endReadableNT (_stream_readable.js:*:*) [stdin]:1 var ______________________________________________; throw 10 diff --git a/test/parallel/test-cli-eval.js b/test/parallel/test-cli-eval.js index 9f781642a9724e..93de319a56a58e 100644 --- a/test/parallel/test-cli-eval.js +++ b/test/parallel/test-cli-eval.js @@ -150,6 +150,25 @@ child.exec(`${nodejs} --use-strict -p process.execArgv`, assert.strictEqual(proc.stdout, 'start\nbeforeExit\nexit\n'); } +// Regression test for https://github.com/nodejs/node/issues/11948. +{ + const script = ` + process.on('message', (message) => { + if (message === 'ping') process.send('pong'); + if (message === 'exit') process.disconnect(); + }); + `; + const proc = child.fork('-e', [script]); + proc.on('exit', common.mustCall((exitCode, signalCode) => { + assert.strictEqual(exitCode, 0); + assert.strictEqual(signalCode, null); + })); + proc.on('message', (message) => { + if (message === 'pong') proc.send('exit'); + }); + proc.send('ping'); +} + [ '-arg1', '-arg1 arg2 --arg3', '--', From ade64e61cd925aeecb479fdc09d60508465ec29d Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Tue, 21 Mar 2017 21:48:21 -0700 Subject: [PATCH 38/67] test: refactor test-cluster-disconnect MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Replace `process.once('exit', ...)` with `common.mustCall()`. Remove unneeded variable in loop declaration. PR-URL: https://github.com/nodejs/node/pull/11981 Reviewed-By: James M Snell Reviewed-By: Michaël Zasso Reviewed-By: Colin Ihrig Reviewed-By: Santiago Gimeno Reviewed-By: Luigi Pinca --- test/parallel/test-cluster-disconnect.js | 35 ++++++------------------ 1 file changed, 8 insertions(+), 27 deletions(-) diff --git a/test/parallel/test-cluster-disconnect.js b/test/parallel/test-cluster-disconnect.js index 0fc1611e2d9577..62ad2956fec4eb 100644 --- a/test/parallel/test-cluster-disconnect.js +++ b/test/parallel/test-cluster-disconnect.js @@ -21,7 +21,7 @@ if (cluster.isWorker) { const socket = net.connect(port, '127.0.0.1', () => { // buffer result let result = ''; - socket.on('data', common.mustCall((chunk) => { result += chunk; })); + socket.on('data', (chunk) => { result += chunk; }); // check result socket.on('end', common.mustCall(() => { @@ -34,7 +34,7 @@ if (cluster.isWorker) { const testCluster = function(cb) { let done = 0; - for (let i = 0, l = servers; i < l; i++) { + for (let i = 0; i < servers; i++) { testConnection(common.PORT + i, (success) => { assert.ok(success); done += 1; @@ -60,40 +60,21 @@ if (cluster.isWorker) { } }; - - const results = { - start: 0, - test: 0, - disconnect: 0 - }; - const test = function(again) { //1. start cluster - startCluster(() => { - results.start += 1; - + startCluster(common.mustCall(() => { //2. test cluster - testCluster(() => { - results.test += 1; - + testCluster(common.mustCall(() => { //3. disconnect cluster - cluster.disconnect(() => { - results.disconnect += 1; - + cluster.disconnect(common.mustCall(() => { // run test again to confirm cleanup if (again) { test(); } - }); - }); - }); + })); + })); + })); }; test(true); - - process.once('exit', () => { - assert.strictEqual(results.start, 2); - assert.strictEqual(results.test, 2); - assert.strictEqual(results.disconnect, 2); - }); } From 7aeeee3276f2bb0138c5d5a00b09fb2d44f70f92 Mon Sep 17 00:00:00 2001 From: Sorin Baltateanu Date: Thu, 21 Jul 2016 14:08:28 +0300 Subject: [PATCH 39/67] benchmark: repair the fs/readfile benchmark PR-URL: https://github.com/nodejs/node/pull/7818 Reviewed-By: Anna Henningsen Reviewed-By: James M Snell --- benchmark/fs/readfile.js | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/benchmark/fs/readfile.js b/benchmark/fs/readfile.js index f58550fae83f96..9fc8316743e285 100644 --- a/benchmark/fs/readfile.js +++ b/benchmark/fs/readfile.js @@ -22,8 +22,10 @@ function main(conf) { data = null; var reads = 0; + var bench_ended = false; bench.start(); setTimeout(function() { + bench_ended = true; bench.end(reads); try { fs.unlinkSync(filename); } catch (e) {} process.exit(0); @@ -41,7 +43,8 @@ function main(conf) { throw new Error('wrong number of bytes returned'); reads++; - read(); + if (!bench_ended) + read(); } var cur = +conf.concurrent; From 2462fd800903184461f367b550a886c8f93f46e1 Mon Sep 17 00:00:00 2001 From: Bryan English Date: Wed, 26 Oct 2016 15:21:36 -0700 Subject: [PATCH 40/67] process: maintain constructor descriptor MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Use the original property descriptor instead of just taking the value, which would, by default, be non-writable and non-configurable. PR-URL: https://github.com/nodejs/node/pull/9306 Reviewed-By: Anna Henningsen Reviewed-By: Colin Ihrig Reviewed-By: Michaël Zasso --- lib/internal/bootstrap_node.js | 5 ++--- test/parallel/test-process-prototype.js | 15 +++++++++++++++ 2 files changed, 17 insertions(+), 3 deletions(-) create mode 100644 test/parallel/test-process-prototype.js diff --git a/lib/internal/bootstrap_node.js b/lib/internal/bootstrap_node.js index 2ad6ad63bb490b..0827326fad7782 100644 --- a/lib/internal/bootstrap_node.js +++ b/lib/internal/bootstrap_node.js @@ -13,10 +13,9 @@ const EventEmitter = NativeModule.require('events'); process._eventsCount = 0; + const origProcProto = Object.getPrototypeOf(process); Object.setPrototypeOf(process, Object.create(EventEmitter.prototype, { - constructor: { - value: process.constructor - } + constructor: Object.getOwnPropertyDescriptor(origProcProto, 'constructor') })); EventEmitter.call(process); diff --git a/test/parallel/test-process-prototype.js b/test/parallel/test-process-prototype.js new file mode 100644 index 00000000000000..0a0de8123d127d --- /dev/null +++ b/test/parallel/test-process-prototype.js @@ -0,0 +1,15 @@ +'use strict'; +require('../common'); +const assert = require('assert'); +const EventEmitter = require('events'); + +const proto = Object.getPrototypeOf(process); + +assert(proto instanceof EventEmitter); + +const desc = Object.getOwnPropertyDescriptor(proto, 'constructor'); + +assert.strictEqual(desc.value, process.constructor); +assert.strictEqual(desc.writable, true); +assert.strictEqual(desc.enumerable, false); +assert.strictEqual(desc.configurable, true); From 210250465a678bd1520fc1c1359e23209be03c8c Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Fri, 24 Mar 2017 12:42:50 -0700 Subject: [PATCH 41/67] benchmark: update obsolete information pointer A doc suggested in an error message is no longer the place to get the information about required http benchmarkers. Update the error message to point to the current location for the information. PR-URL: https://github.com/nodejs/node/pull/12026 Reviewed-By: James M Snell Reviewed-By: Colin Ihrig Reviewed-By: Luigi Pinca Reviewed-By: Franziska Hinkelmann --- benchmark/_http-benchmarkers.js | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/benchmark/_http-benchmarkers.js b/benchmark/_http-benchmarkers.js index e096efc71ba162..f81335a20fc7a0 100644 --- a/benchmark/_http-benchmarkers.js +++ b/benchmark/_http-benchmarkers.js @@ -88,9 +88,11 @@ exports.run = function(options, callback) { benchmarker: exports.default_http_benchmarker }, options); if (!options.benchmarker) { - callback(new Error('Could not locate any of the required http ' + - 'benchmarkers. Check benchmark/README.md for further ' + - 'instructions.')); + callback(new Error('Could not locate required http benchmarker. See ' + + 'https://github.com/nodejs/node/blob/master/doc/' + + 'guides/writing-and-running-benchmarks.md##http-' + + 'benchmark-requirements ' + + 'for further instructions.')); return; } const benchmarker = benchmarkers[options.benchmarker]; From 837ff4ba597255ce83dda895f8c93eb0a6b9e030 Mon Sep 17 00:00:00 2001 From: Jeremiah Senkpiel Date: Fri, 24 Mar 2017 12:13:13 -0400 Subject: [PATCH 42/67] lib: remove an unnecessary coverage check PR-URL: https://github.com/nodejs/node/pull/12023 Fixes: https://github.com/nodejs/node/issues/11445 Refs: f65a48fc8f89071dc326e4d84646b5cbcfa8eca7 Reviewed-By: Colin Ihrig Reviewed-By: James M Snell Reviewed-By: Anna Henningsen Reviewed-By: Franziska Hinkelmann --- lib/internal/process/write-coverage.js | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/lib/internal/process/write-coverage.js b/lib/internal/process/write-coverage.js index 6bbc59a6e981be..50c55200d2185f 100644 --- a/lib/internal/process/write-coverage.js +++ b/lib/internal/process/write-coverage.js @@ -5,12 +5,10 @@ const fs = require('fs'); const mkdirSync = fs.mkdirSync; const writeFileSync = fs.writeFileSync; -var isWritingCoverage = false; function writeCoverage() { - if (isWritingCoverage || !global.__coverage__) { + if (!global.__coverage__) { return; } - isWritingCoverage = true; const dirname = path.join(path.dirname(process.execPath), '.coverage'); const filename = `coverage-${process.pid}-${Date.now()}.json`; From a73dea9499b85b16379a63e12028ede8d0c45585 Mon Sep 17 00:00:00 2001 From: Rj Bernaldo Date: Fri, 24 Mar 2017 15:32:50 +0800 Subject: [PATCH 43/67] test: add second argument to assert.throws() MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Adds a second argument to the assert.throws() test to implicitly specify expected error message. PR-URL: https://github.com/nodejs/node/pull/12016 Reviewed-By: Michaël Zasso Reviewed-By: Vse Mozhet Byt Reviewed-By: Colin Ihrig Reviewed-By: Richard Lau Reviewed-By: James M Snell Reviewed-By: Anna Henningsen Reviewed-By: Rich Trott Reviewed-By: Franziska Hinkelmann --- test/parallel/test-util-inspect-proxy.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/test/parallel/test-util-inspect-proxy.js b/test/parallel/test-util-inspect-proxy.js index 744bf235266e86..a6afc9f9bf9bad 100644 --- a/test/parallel/test-util-inspect-proxy.js +++ b/test/parallel/test-util-inspect-proxy.js @@ -31,7 +31,8 @@ assert.strictEqual(processUtil.getProxyDetails({}), undefined); // and the get function on the handler object defined above // is actually invoked. assert.throws( - () => util.inspect(proxyObj) + () => util.inspect(proxyObj), + /^Error: Getter should not be called$/ ); // Yo dawg, I heard you liked Proxy so I put a Proxy From 1c0435b1f3c06262c4b20b3c448d3302d1af2bdd Mon Sep 17 00:00:00 2001 From: "John F. Mercer" Date: Thu, 23 Mar 2017 19:07:58 -0400 Subject: [PATCH 44/67] test: add regex for expected error message Provide a regex to validate the error message. PR-URL: https://github.com/nodejs/node/pull/12011 Reviewed-By: Sam Roberts Reviewed-By: Rich Trott Reviewed-By: Yuta Hiroto Reviewed-By: James M Snell Reviewed-By: Daniel Bevenius Reviewed-By: Colin Ihrig Reviewed-By: Franziska Hinkelmann --- test/parallel/test-tls-key-mismatch.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/test/parallel/test-tls-key-mismatch.js b/test/parallel/test-tls-key-mismatch.js index 65cac6f07a296c..93c7e70d8c61f7 100644 --- a/test/parallel/test-tls-key-mismatch.js +++ b/test/parallel/test-tls-key-mismatch.js @@ -8,6 +8,8 @@ if (!common.hasCrypto) { const assert = require('assert'); const tls = require('tls'); const fs = require('fs'); +const errorMessageRegex = new RegExp('^Error: error:0B080074:x509 ' + + 'certificate routines:X509_check_private_key:key values mismatch$'); const options = { key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), @@ -16,4 +18,4 @@ const options = { assert.throws(function() { tls.createSecureContext(options); -}); +}, errorMessageRegex); From 414df6c93bc8adc1e457693cf94bf247ac3bf2af Mon Sep 17 00:00:00 2001 From: Luca Maraschi Date: Thu, 23 Mar 2017 16:38:58 -0400 Subject: [PATCH 45/67] test: test validity of prefix in mkdtempSync This test is checking for the validity of the path used as parameter for mkdtempSync. PR-URL: https://github.com/nodejs/node/pull/12009 Reviewed-By: James M Snell Reviewed-By: Colin Ihrig Reviewed-By: Franziska Hinkelmann --- test/parallel/test-mkdtemp-sync-prefix-check.js | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 test/parallel/test-mkdtemp-sync-prefix-check.js diff --git a/test/parallel/test-mkdtemp-sync-prefix-check.js b/test/parallel/test-mkdtemp-sync-prefix-check.js new file mode 100644 index 00000000000000..825f622f069b5e --- /dev/null +++ b/test/parallel/test-mkdtemp-sync-prefix-check.js @@ -0,0 +1,13 @@ +'use strict'; +require('../common'); +const assert = require('assert'); +const fs = require('fs'); + +const assertValues = [undefined, null, 0, true, false, 1]; + +assertValues.forEach((assertValue) => { + assert.throws( + () => fs.mkdtempSync(assertValue, {}), + /^TypeError: filename prefix is required$/ + ); +}); From ac92d0249b39b337a63f27e016d50f176791fcf4 Mon Sep 17 00:00:00 2001 From: Claudio Rodriguez Date: Sun, 5 Mar 2017 17:15:38 +0000 Subject: [PATCH 46/67] net: refactor net module to module.exports Refactor net module to use the more efficient module.exports = {} pattern. Also renames internal "connect" function to "internalConnect" to avoid collision with exported "connect". PR-URL: https://github.com/nodejs/node/pull/11698 Reviewed-By: Joyee Cheung Reviewed-By: James M Snell Reviewed-By: Franziska Hinkelmann --- lib/net.js | 66 ++++++++++++++++++++++++++++-------------------------- 1 file changed, 34 insertions(+), 32 deletions(-) diff --git a/lib/net.js b/lib/net.js index 309f38c360736b..2b5e5baeb28812 100644 --- a/lib/net.js +++ b/lib/net.js @@ -41,9 +41,9 @@ function isPipeName(s) { return typeof s === 'string' && toNumber(s) === false; } -exports.createServer = function(options, connectionListener) { +function createServer(options, connectionListener) { return new Server(options, connectionListener); -}; +} // Target API: @@ -58,7 +58,7 @@ exports.createServer = function(options, connectionListener) { // connect(port, [host], [cb]) // connect(path, [cb]); // -exports.connect = exports.createConnection = function() { +function connect() { const args = new Array(arguments.length); for (var i = 0; i < arguments.length; i++) args[i] = arguments[i]; @@ -74,7 +74,7 @@ exports.connect = exports.createConnection = function() { } return Socket.prototype.connect.call(socket, options, cb); -}; +} // Returns an array [options, cb], where options is an object, @@ -114,7 +114,6 @@ function normalizeArgs(args) { else return [options, cb]; } -exports._normalizeArgs = normalizeArgs; // called when creating new Socket, or when re-using a closed Socket @@ -312,9 +311,6 @@ function writeAfterFIN(chunk, encoding, cb) { } } -exports.Socket = Socket; -exports.Stream = Socket; // Legacy naming. - Socket.prototype.read = function(n) { if (n === 0) return stream.Readable.prototype.read.call(this, n); @@ -829,7 +825,8 @@ function afterWrite(status, handle, req, err) { } -function connect(self, address, port, addressType, localAddress, localPort) { +function internalConnect( + self, address, port, addressType, localAddress, localPort) { // TODO return promise from Socket.prototype.connect which // wraps _connectReq. @@ -943,7 +940,7 @@ Socket.prototype.connect = function() { this.writable = true; if (pipe) { - connect(this, options.path); + internalConnect(this, options.path); } else { lookupAndConnect(this, options); } @@ -958,7 +955,7 @@ function lookupAndConnect(self, options) { var localAddress = options.localAddress; var localPort = options.localPort; - if (localAddress && !exports.isIP(localAddress)) + if (localAddress && !cares.isIP(localAddress)) throw new TypeError('"localAddress" option must be a valid IP: ' + localAddress); @@ -975,11 +972,11 @@ function lookupAndConnect(self, options) { port |= 0; // If host is an IP, skip performing a lookup - var addressType = exports.isIP(host); + var addressType = cares.isIP(host); if (addressType) { process.nextTick(function() { if (self.connecting) - connect(self, host, port, addressType, localAddress, localPort); + internalConnect(self, host, port, addressType, localAddress, localPort); }); return; } @@ -1019,12 +1016,12 @@ function lookupAndConnect(self, options) { process.nextTick(connectErrorNT, self, err); } else { self._unrefTimer(); - connect(self, - ip, - port, - addressType, - localAddress, - localPort); + internalConnect(self, + ip, + port, + addressType, + localAddress, + localPort); } }); } @@ -1155,7 +1152,6 @@ function Server(options, connectionListener) { this.pauseOnConnect = !!options.pauseOnConnect; } util.inherits(Server, EventEmitter); -exports.Server = Server; function toNumber(x) { return (x = Number(x)) >= 0 ? x : false; } @@ -1222,7 +1218,6 @@ function createServerHandle(address, port, addressType, fd) { return handle; } -exports._createServerHandle = createServerHandle; Server.prototype._listen2 = function(address, port, addressType, backlog, fd) { @@ -1595,20 +1590,12 @@ Server.prototype.unref = function() { return this; }; - -exports.isIP = cares.isIP; - - -exports.isIPv4 = cares.isIPv4; - - -exports.isIPv6 = cares.isIPv6; - +var _setSimultaneousAccepts; if (process.platform === 'win32') { var simultaneousAccepts; - exports._setSimultaneousAccepts = function(handle) { + _setSimultaneousAccepts = function(handle) { if (handle === undefined) { return; } @@ -1624,5 +1611,20 @@ if (process.platform === 'win32') { } }; } else { - exports._setSimultaneousAccepts = function(handle) {}; + _setSimultaneousAccepts = function(handle) {}; } + +module.exports = { + _createServerHandle: createServerHandle, + _normalizeArgs: normalizeArgs, + _setSimultaneousAccepts, + connect, + createConnection: connect, + createServer, + isIP: cares.isIP, + isIPv4: cares.isIPv4, + isIPv6: cares.isIPv6, + Server, + Socket, + Stream: Socket, // Legacy naming +}; From 7347860966b61e792ddf1367dd112a911c668002 Mon Sep 17 00:00:00 2001 From: Jackson Tian Date: Sat, 4 Feb 2017 00:50:12 +0800 Subject: [PATCH 47/67] lib: clarify the usage of 'else' The keyword 'else' is unnecessary after 'throw' statements. PR-URL: https://github.com/nodejs/node/pull/11148 Reviewed-By: Anna Henningsen Reviewed-By: James M Snell Reviewed-By: Colin Ihrig Reviewed-By: Sakthipriyan Vairamani Reviewed-By: Luigi Pinca Reviewed-By: Franziska Hinkelmann --- lib/child_process.js | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/lib/child_process.js b/lib/child_process.js index b7fdb5ce8bb53f..cb2cc18440568e 100644 --- a/lib/child_process.js +++ b/lib/child_process.js @@ -30,9 +30,9 @@ exports.fork = function(modulePath /*, args, options*/) { if (pos < arguments.length && arguments[pos] != null) { if (typeof arguments[pos] !== 'object') { throw new TypeError('Incorrect value of args option'); - } else { - options = util._extend({}, arguments[pos++]); } + + options = util._extend({}, arguments[pos++]); } // Prepare arguments for fork: @@ -502,8 +502,8 @@ function execFileSync(/*command, args, options*/) { if (err) throw err; - else - return ret.stdout; + + return ret.stdout; } exports.execFileSync = execFileSync; @@ -522,7 +522,7 @@ function execSync(command /*, options*/) { if (err) throw err; - else - return ret.stdout; + + return ret.stdout; } exports.execSync = execSync; From 41284fbc5bc0d25d6fb2710e0fd6b3c475dd87ee Mon Sep 17 00:00:00 2001 From: cjihrig Date: Fri, 27 Jan 2017 15:04:25 -0500 Subject: [PATCH 48/67] test: cover thrown errors from exec() kill This commit adds code coverage for the scenario where exec() kills a child process, but the call to ChildProcess#kill() throws an exception. PR-URL: https://github.com/nodejs/node/pull/11038 Reviewed-By: Sam Roberts Reviewed-By: James M Snell Reviewed-By: Franziska Hinkelmann --- .../test-child-process-exec-kill-throws.js | 29 +++++++++++++++++++ 1 file changed, 29 insertions(+) create mode 100644 test/parallel/test-child-process-exec-kill-throws.js diff --git a/test/parallel/test-child-process-exec-kill-throws.js b/test/parallel/test-child-process-exec-kill-throws.js new file mode 100644 index 00000000000000..d9473222a4551a --- /dev/null +++ b/test/parallel/test-child-process-exec-kill-throws.js @@ -0,0 +1,29 @@ +'use strict'; +// Flags: --expose_internals +const common = require('../common'); +const assert = require('assert'); +const cp = require('child_process'); +const internalCp = require('internal/child_process'); + +if (process.argv[2] === 'child') { + // Keep the process alive and printing to stdout. + setInterval(() => { console.log('foo'); }, 1); +} else { + // Monkey patch ChildProcess#kill() to kill the process and then throw. + const kill = internalCp.ChildProcess.prototype.kill; + + internalCp.ChildProcess.prototype.kill = function() { + kill.apply(this, arguments); + throw new Error('mock error'); + }; + + const cmd = `${process.execPath} ${__filename} child`; + const options = { maxBuffer: 0 }; + const child = cp.exec(cmd, options, common.mustCall((err, stdout, stderr) => { + // Verify that if ChildProcess#kill() throws, the error is reported. + assert(/^Error: mock error$/.test(err)); + assert.strictEqual(stdout, ''); + assert.strictEqual(stderr, ''); + assert.strictEqual(child.killed, true); + })); +} From 126dcb76affc15a51962d79095a0ae52416c6d32 Mon Sep 17 00:00:00 2001 From: Pedro lima Date: Fri, 21 Oct 2016 18:57:24 +0000 Subject: [PATCH 49/67] url: name anonymous functions in url MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Name anonymous functions in url.js. PR-URL: https://github.com/nodejs/node/pull/9225 Ref: https://github.com/nodejs/node/issues/8913 Reviewed-By: James M Snell Reviewed-By: Michaël Zasso Reviewed-By: Luigi Pinca Reviewed-By: Franziska Hinkelmann --- lib/url.js | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/lib/url.js b/lib/url.js index cc098593a5f238..8314a82427dc04 100644 --- a/lib/url.js +++ b/lib/url.js @@ -76,7 +76,7 @@ function urlParse(url, parseQueryString, slashesDenoteHost) { return u; } -Url.prototype.parse = function(url, parseQueryString, slashesDenoteHost) { +Url.prototype.parse = function parse(url, parseQueryString, slashesDenoteHost) { if (typeof url !== 'string') { throw new TypeError('Parameter "url" must be a string, not ' + typeof url); } @@ -557,7 +557,7 @@ function urlFormat(obj, options) { return obj.format(); } -Url.prototype.format = function() { +Url.prototype.format = function format() { var auth = this.auth || ''; if (auth) { auth = encodeAuth(auth); @@ -642,7 +642,7 @@ function urlResolve(source, relative) { return urlParse(source, false, true).resolve(relative); } -Url.prototype.resolve = function(relative) { +Url.prototype.resolve = function resolve(relative) { return this.resolveObject(urlParse(relative, false, true)).format(); }; @@ -651,7 +651,7 @@ function urlResolveObject(source, relative) { return urlParse(source, false, true).resolveObject(relative); } -Url.prototype.resolveObject = function(relative) { +Url.prototype.resolveObject = function resolveObject(relative) { if (typeof relative === 'string') { var rel = new Url(); rel.parse(relative, false, true); @@ -929,7 +929,7 @@ Url.prototype.resolveObject = function(relative) { return result; }; -Url.prototype.parseHost = function() { +Url.prototype.parseHost = function parseHost() { var host = this.host; var port = portPattern.exec(host); if (port) { From d112aad78bcc792e184a344bf93f04d7a9dc2222 Mon Sep 17 00:00:00 2001 From: "Dejon \"DJ\" Gill" Date: Sat, 19 Nov 2016 14:31:17 -0800 Subject: [PATCH 50/67] test: replace throw with common.fail Replace anonymous functions with arrow functions. Replace throw new Error with common.fail. PR-URL: https://github.com/nodejs/node/pull/9700 Reviewed-By: Colin Ihrig Reviewed-By: James M Snell Reviewed-By: Italo A. Casas Reviewed-By: Franziska Hinkelmann --- test/parallel/test-fs-empty-readStream.js | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/test/parallel/test-fs-empty-readStream.js b/test/parallel/test-fs-empty-readStream.js index 858d07e4f0b982..cc09517590cdc7 100644 --- a/test/parallel/test-fs-empty-readStream.js +++ b/test/parallel/test-fs-empty-readStream.js @@ -7,29 +7,32 @@ const fs = require('fs'); const emptyFile = path.join(common.fixturesDir, 'empty.txt'); fs.open(emptyFile, 'r', common.mustCall((error, fd) => { + assert.ifError(error); - const read = fs.createReadStream(emptyFile, { 'fd': fd }); + const read = fs.createReadStream(emptyFile, { fd }); read.once('data', () => { - throw new Error('data event should not emit'); + common.fail('data event should not emit'); }); read.once('end', common.mustCall(function endEvent1() {})); })); fs.open(emptyFile, 'r', common.mustCall((error, fd) => { + assert.ifError(error); - const read = fs.createReadStream(emptyFile, { 'fd': fd }); + const read = fs.createReadStream(emptyFile, { fd }); + read.pause(); read.once('data', () => { - throw new Error('data event should not emit'); + common.fail('data event should not emit'); }); read.once('end', function endEvent2() { - throw new Error('end event should not emit'); + common.fail('end event should not emit'); }); setTimeout(common.mustCall(() => { From 4051184106713091335934fbca56f691c9196d97 Mon Sep 17 00:00:00 2001 From: Trevor Norris Date: Mon, 31 Oct 2016 16:48:14 -0600 Subject: [PATCH 51/67] stream_base,tls_wrap: notify on destruct The TLSWrap constructor is passed a StreamBase* which it stores as TLSWrap::stream_, and is used to receive/send data along the pipeline (e.g. tls -> tcp). Problem is the lifetime of the instance that stream_ points to is independent of the lifetime of the TLSWrap instance. So it's possible for stream_ to be delete'd while the TLSWrap instance is still alive, allowing potential access to a then invalid pointer. Fix by having the StreamBase destructor null out TLSWrap::stream_; allowing all TLSWrap methods that rely on stream_ to do a check to see if it's available. While the test provided is fixed by this commit, it was also previously fixed by 478fabf. Regardless, leave the test in for better testing. PR-URL: https://github.com/nodejs/node/pull/11947 Reviewed-By: Franziska Hinkelmann Reviewed-By: James M Snell Reviewed-By: Anna Henningsen --- src/stream_base.h | 9 +++- src/tls_wrap.cc | 7 ++++ src/tls_wrap.h | 3 ++ .../test-tls-retain-handle-no-abort.js | 42 +++++++++++++++++++ 4 files changed, 60 insertions(+), 1 deletion(-) create mode 100644 test/parallel/test-tls-retain-handle-no-abort.js diff --git a/src/stream_base.h b/src/stream_base.h index faddee88c82786..35929750bfbc54 100644 --- a/src/stream_base.h +++ b/src/stream_base.h @@ -146,10 +146,14 @@ class StreamResource { const uv_buf_t* buf, uv_handle_type pending, void* ctx); + typedef void (*DestructCb)(void* ctx); StreamResource() : bytes_read_(0) { } - virtual ~StreamResource() = default; + virtual ~StreamResource() { + if (!destruct_cb_.is_empty()) + destruct_cb_.fn(destruct_cb_.ctx); + } virtual int DoShutdown(ShutdownWrap* req_wrap) = 0; virtual int DoTryWrite(uv_buf_t** bufs, size_t* count); @@ -186,15 +190,18 @@ class StreamResource { inline void set_alloc_cb(Callback c) { alloc_cb_ = c; } inline void set_read_cb(Callback c) { read_cb_ = c; } + inline void set_destruct_cb(Callback c) { destruct_cb_ = c; } inline Callback after_write_cb() { return after_write_cb_; } inline Callback alloc_cb() { return alloc_cb_; } inline Callback read_cb() { return read_cb_; } + inline Callback destruct_cb() { return destruct_cb_; } private: Callback after_write_cb_; Callback alloc_cb_; Callback read_cb_; + Callback destruct_cb_; uint64_t bytes_read_; friend class StreamBase; diff --git a/src/tls_wrap.cc b/src/tls_wrap.cc index e2bf06d5fb8429..3614abe392430b 100644 --- a/src/tls_wrap.cc +++ b/src/tls_wrap.cc @@ -64,6 +64,7 @@ TLSWrap::TLSWrap(Environment* env, stream_->set_after_write_cb({ OnAfterWriteImpl, this }); stream_->set_alloc_cb({ OnAllocImpl, this }); stream_->set_read_cb({ OnReadImpl, this }); + stream_->set_destruct_cb({ OnDestructImpl, this }); set_alloc_cb({ OnAllocSelf, this }); set_read_cb({ OnReadSelf, this }); @@ -666,6 +667,12 @@ void TLSWrap::OnReadImpl(ssize_t nread, } +void TLSWrap::OnDestructImpl(void* ctx) { + TLSWrap* wrap = static_cast(ctx); + wrap->clear_stream(); +} + + void TLSWrap::OnAllocSelf(size_t suggested_size, uv_buf_t* buf, void* ctx) { buf->base = node::Malloc(suggested_size); buf->len = suggested_size; diff --git a/src/tls_wrap.h b/src/tls_wrap.h index 0efdbf0e3991d3..8132d66876f26f 100644 --- a/src/tls_wrap.h +++ b/src/tls_wrap.h @@ -54,6 +54,8 @@ class TLSWrap : public AsyncWrap, size_t self_size() const override { return sizeof(*this); } + void clear_stream() { stream_ = nullptr; } + protected: static const int kClearOutChunkSize = 16384; @@ -121,6 +123,7 @@ class TLSWrap : public AsyncWrap, const uv_buf_t* buf, uv_handle_type pending, void* ctx); + static void OnDestructImpl(void* ctx); void DoRead(ssize_t nread, const uv_buf_t* buf, uv_handle_type pending); diff --git a/test/parallel/test-tls-retain-handle-no-abort.js b/test/parallel/test-tls-retain-handle-no-abort.js new file mode 100644 index 00000000000000..43b3709fd5f85b --- /dev/null +++ b/test/parallel/test-tls-retain-handle-no-abort.js @@ -0,0 +1,42 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); + +if (!common.hasCrypto) { + common.skip('missing crypto'); + return; +} +const tls = require('tls'); +const fs = require('fs'); +const util = require('util'); + +const sent = 'hello world'; +const serverOptions = { + isServer: true, + key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), + cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') +}; + +let ssl = null; + +process.on('exit', function() { + assert.ok(ssl !== null); + // If the internal pointer to stream_ isn't cleared properly then this + // will abort. + util.inspect(ssl); +}); + +const server = tls.createServer(serverOptions, function(s) { + s.on('data', function() { }); + s.on('end', function() { + server.close(); + s.destroy(); + }); +}).listen(0, function() { + const c = new tls.TLSSocket(); + ssl = c.ssl; + c.connect(this.address().port, function() { + c.end(sent); + }); +}); From 51c8d8088a6289238c3b863d0e8f1520bd39c20b Mon Sep 17 00:00:00 2001 From: Trevor Norris Date: Mon, 20 Mar 2017 16:26:10 -0600 Subject: [PATCH 52/67] Partial revert "tls: keep track of stream that is closed" This partually reverts commit 4cdb0e89d8daf7e1371c3b8d3f057940aa327d4a. A nullptr check in TSLWrap::IsAlive() and the added test were left. PR-URL: https://github.com/nodejs/node/pull/11947 Reviewed-By: Franziska Hinkelmann Reviewed-By: James M Snell Reviewed-By: Anna Henningsen --- lib/_tls_wrap.js | 6 ------ src/tls_wrap.cc | 9 --------- src/tls_wrap.h | 1 - 3 files changed, 16 deletions(-) diff --git a/lib/_tls_wrap.js b/lib/_tls_wrap.js index 4ad79cf7a28d15..5fb6746a2c131c 100644 --- a/lib/_tls_wrap.js +++ b/lib/_tls_wrap.js @@ -378,12 +378,6 @@ TLSSocket.prototype._wrapHandle = function(wrap) { res = null; }); - if (wrap) { - wrap.on('close', function() { - res.onStreamClose(); - }); - } - return res; }; diff --git a/src/tls_wrap.cc b/src/tls_wrap.cc index 3614abe392430b..d506a69295b3c2 100644 --- a/src/tls_wrap.cc +++ b/src/tls_wrap.cc @@ -794,14 +794,6 @@ void TLSWrap::EnableSessionCallbacks( } -void TLSWrap::OnStreamClose(const FunctionCallbackInfo& args) { - TLSWrap* wrap; - ASSIGN_OR_RETURN_UNWRAP(&wrap, args.Holder()); - - wrap->stream_ = nullptr; -} - - void TLSWrap::DestroySSL(const FunctionCallbackInfo& args) { TLSWrap* wrap; ASSIGN_OR_RETURN_UNWRAP(&wrap, args.Holder()); @@ -932,7 +924,6 @@ void TLSWrap::Initialize(Local target, env->SetProtoMethod(t, "enableSessionCallbacks", EnableSessionCallbacks); env->SetProtoMethod(t, "destroySSL", DestroySSL); env->SetProtoMethod(t, "enableCertCb", EnableCertCb); - env->SetProtoMethod(t, "onStreamClose", OnStreamClose); StreamBase::AddMethods(env, t, StreamBase::kFlagHasWritev); SSLWrap::AddMethods(env, t); diff --git a/src/tls_wrap.h b/src/tls_wrap.h index 8132d66876f26f..fbf664edd530c0 100644 --- a/src/tls_wrap.h +++ b/src/tls_wrap.h @@ -140,7 +140,6 @@ class TLSWrap : public AsyncWrap, static void EnableCertCb( const v8::FunctionCallbackInfo& args); static void DestroySSL(const v8::FunctionCallbackInfo& args); - static void OnStreamClose(const v8::FunctionCallbackInfo& args); #ifdef SSL_CTRL_SET_TLSEXT_SERVERNAME_CB static void GetServername(const v8::FunctionCallbackInfo& args); From 7f6b03fd0f06bbee8ce8df423c815c4cba24f0d6 Mon Sep 17 00:00:00 2001 From: Fumiya KARASAWA Date: Mon, 27 Mar 2017 11:21:30 +0900 Subject: [PATCH 53/67] doc: fix process.stdout fd number it should be 1 for the stdout fd number PR-URL: https://github.com/nodejs/node/pull/12055 Reviewed-By: James M Snell Reviewed-By: Anna Henningsen Reviewed-By: Colin Ihrig Reviewed-By: Richard Lau --- doc/api/process.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/api/process.md b/doc/api/process.md index f158a1bc8efa14..4739ffb27ad588 100644 --- a/doc/api/process.md +++ b/doc/api/process.md @@ -1539,7 +1539,7 @@ must call `process.stdin.resume()` to read from it. Note also that calling * {Stream} The `process.stdout` property returns a [Writable][] stream connected to -`stdout` (fd `2`). +`stdout` (fd `1`). For example, to copy process.stdin to process.stdout: From 5e3d429613e6d1ebf3078af94f04cb97838971b4 Mon Sep 17 00:00:00 2001 From: Sebastian Van Sande Date: Mon, 6 Feb 2017 10:56:47 +0100 Subject: [PATCH 54/67] doc: stdout/err/in are all Duplex streams stdout, stderr and stdin are all Duplex streams but documentation states otherwise Fixes https://github.com/nodejs/node/issues/9201 PR-URL: https://github.com/nodejs/node/pull/11194 Reviewed-By: Sam Roberts Reviewed-By: James M Snell Reviewed-By: Jeremiah Senkpiel Reviewed-By: Franziska Hinkelmann --- doc/api/process.md | 21 ++++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/doc/api/process.md b/doc/api/process.md index 4739ffb27ad588..0d0615e2fa7697 100644 --- a/doc/api/process.md +++ b/doc/api/process.md @@ -1496,8 +1496,10 @@ Android) * {Stream} -The `process.stderr` property returns a [Writable][] stream connected to -`stderr` (fd `2`). +The `process.stderr` property returns a stream connected to +`stderr` (fd `2`). It is a [`net.Socket`][] (which is a [Duplex][] +stream) unless fd `2` refers to a file, in which case it is +a [Writable][] stream. Note: `process.stderr` differs from other Node.js streams in important ways, see [note on process I/O][] for more information. @@ -1506,8 +1508,10 @@ see [note on process I/O][] for more information. * {Stream} -The `process.stdin` property returns a [Readable][] stream equivalent to or -associated with `stdin` (fd `0`). +The `process.stdin` property returns a stream connected to +`stdin` (fd `0`). It is a [`net.Socket`][] (which is a [Duplex][] +stream) unless fd `0` refers to a file, in which case it is +a [Readable][] stream. For example: @@ -1526,7 +1530,7 @@ process.stdin.on('end', () => { }); ``` -As a [Readable][] stream, `process.stdin` can also be used in "old" mode that +As a [Duplex][] stream, `process.stdin` can also be used in "old" mode that is compatible with scripts written for Node.js prior to v0.10. For more information see [Stream compatibility][]. @@ -1538,8 +1542,10 @@ must call `process.stdin.resume()` to read from it. Note also that calling * {Stream} -The `process.stdout` property returns a [Writable][] stream connected to -`stdout` (fd `1`). +The `process.stdout` property returns a stream connected to +`stdout` (fd `1`). It is a [`net.Socket`][] (which is a [Duplex][] +stream) unless fd `1` refers to a file, in which case it is +a [Writable][] stream. For example, to copy process.stdin to process.stdout: @@ -1780,6 +1786,7 @@ cases: [TTY]: tty.html#tty_tty [Writable]: stream.html#stream_writable_streams [Readable]: stream.html#stream_readable_streams +[Duplex]: stream.html#stream_duplex_and_transform_streams [Child Process]: child_process.html [Cluster]: cluster.html [`process.exitCode`]: #process_process_exitcode From 18a586a278d30dcbf74c58cf9411d2c72881ece1 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Sat, 25 Mar 2017 11:01:06 -0700 Subject: [PATCH 55/67] doc: edit the benchmark guide These are minor typographical and style improvements to the guide for writing and running benchmarks. PR-URL: https://github.com/nodejs/node/pull/12041 Reviewed-By: Nikolai Vavilov Reviewed-By: Joyee Cheung Reviewed-By: Luigi Pinca Reviewed-By: Colin Ihrig Reviewed-By: James M Snell --- doc/guides/writing-and-running-benchmarks.md | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/doc/guides/writing-and-running-benchmarks.md b/doc/guides/writing-and-running-benchmarks.md index a20f321b7c2408..94b09dd234dfbb 100644 --- a/doc/guides/writing-and-running-benchmarks.md +++ b/doc/guides/writing-and-running-benchmarks.md @@ -22,21 +22,20 @@ which need to be included in the global Windows `PATH`. ### HTTP Benchmark Requirements -Most of the HTTP benchmarks require a benchmarker to be installed, this can be +Most of the HTTP benchmarks require a benchmarker to be installed. This can be either [`wrk`][wrk] or [`autocannon`][autocannon]. `Autocannon` is a Node.js script that can be installed using `npm install -g autocannon`. It will use the Node.js executable that is in the -path, hence if you want to compare two HTTP benchmark runs make sure that the +path. Hence if you want to compare two HTTP benchmark runs, make sure that the Node.js version in the path is not altered. `wrk` may be available through your preferred package manager. If not, you can easily build it [from source][wrk] via `make`. -By default `wrk` will be used as benchmarker. If it is not available -`autocannon` will be used in it its place. When creating a HTTP benchmark you -can specify which benchmarker should be used. You can force a specific -benchmarker to be used by providing it as an argument, e. g.: +By default, `wrk` will be used as the benchmarker. If it is not available, +`autocannon` will be used in its place. When creating an HTTP benchmark, you can +specify which benchmarker should be used by providing it as an argument: `node benchmark/run.js --set benchmarker=autocannon http` @@ -44,7 +43,7 @@ benchmarker to be used by providing it as an argument, e. g.: ### Benchmark Analysis Requirements -To analyze the results `R` should be installed. Check you package manager or +To analyze the results, `R` should be installed. Use your package manager or download it from https://www.r-project.org/. The R packages `ggplot2` and `plyr` are also used and can be installed using @@ -56,9 +55,8 @@ install.packages("ggplot2") install.packages("plyr") ``` -In the event you get a message that you need to select a CRAN mirror first. - -You can specify a mirror by adding in the repo parameter. +In the event you get a message that you need to select a CRAN mirror first, you +can specify a mirror by adding in the repo parameter. If we used the "http://cran.us.r-project.org" mirror, it could look something like this: From 948b99deabb017653616f284d042d4c0a241eaa9 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Sat, 25 Mar 2017 19:39:12 +0200 Subject: [PATCH 56/67] test: fix broken tests in test-buffer-includes Some of the tests for `buffer.includes()` functionality introduced in https://github.com/nodejs/node/pull/3567 have been broken in a way that caused them to always pass regardless of the result of the tested method. This behavior was caused by two reasons: * These tests were written as though `buffer.includes()` was supposed to return the same value that `buffer.indexOf()` does, i.e., used indices or -1 as expected return values instead of true and false. * `assert()` was used as the assertion function to do that instead of `assert.strictEqual()`. Thus `assert()` was called with a non-zero number as the first argument effectively causing these tests to pass. This commit changes the tests to use `assert.ok()` and removes redundant indices. PR-URL: https://github.com/nodejs/node/pull/12040 Ref: https://github.com/nodejs/node/pull/3567 Reviewed-By: Rich Trott Reviewed-By: Anna Henningsen Reviewed-By: Luigi Pinca Reviewed-By: Colin Ihrig Reviewed-By: Yuta Hiroto Reviewed-By: Santiago Gimeno Reviewed-By: James M Snell --- test/parallel/test-buffer-includes.js | 22 ++++++++++------------ 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/test/parallel/test-buffer-includes.js b/test/parallel/test-buffer-includes.js index 9b0aef0ad7652d..2b65ce41889cdc 100644 --- a/test/parallel/test-buffer-includes.js +++ b/test/parallel/test-buffer-includes.js @@ -155,14 +155,12 @@ assert(mixedByteStringUcs2.includes('bc', 0, 'ucs2')); assert(mixedByteStringUcs2.includes('\u03a3', 0, 'ucs2')); assert(!mixedByteStringUcs2.includes('\u0396', 0, 'ucs2')); -assert( - 6, mixedByteStringUcs2.includes(Buffer.from('bc', 'ucs2'), 0, 'ucs2')); -assert( - 10, mixedByteStringUcs2.includes(Buffer.from('\u03a3', 'ucs2'), - 0, 'ucs2')); -assert( - -1, mixedByteStringUcs2.includes(Buffer.from('\u0396', 'ucs2'), - 0, 'ucs2')); +assert.ok( + mixedByteStringUcs2.includes(Buffer.from('bc', 'ucs2'), 0, 'ucs2')); +assert.ok( + mixedByteStringUcs2.includes(Buffer.from('\u03a3', 'ucs2'), 0, 'ucs2')); +assert.ok( + !mixedByteStringUcs2.includes(Buffer.from('\u0396', 'ucs2'), 0, 'ucs2')); twoByteString = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'ucs2'); @@ -266,12 +264,12 @@ for (let lengthIndex = 0; lengthIndex < lengths.length; lengthIndex++) { const patternBufferUcs2 = allCharsBufferUcs2.slice(index, index + length); - assert( - index, allCharsBufferUcs2.includes(patternBufferUcs2, 0, 'ucs2')); + assert.ok( + allCharsBufferUcs2.includes(patternBufferUcs2, 0, 'ucs2')); const patternStringUcs2 = patternBufferUcs2.toString('ucs2'); - assert( - index, allCharsBufferUcs2.includes(patternStringUcs2, 0, 'ucs2')); + assert.ok( + allCharsBufferUcs2.includes(patternStringUcs2, 0, 'ucs2')); } } From af051f65283ce01685d911b14fb633b4fccdf1ac Mon Sep 17 00:00:00 2001 From: Ahmad Nassri Date: Sat, 25 Mar 2017 13:28:59 -0400 Subject: [PATCH 57/67] doc: fix http properties documented as methods * at 9772fb928263562a755f1aeef6a65620e7e51bf2 [`maxHeadersCount`][maxheaderscount] and [`timeout`][timeout] were erroneously changed to methods * `maxHeadersCount` was also listed to default to `1000` where it actually is [`2000`][default] [maxheaderscount]: https://github.com/nodejs/node/blob/e0a9ad1af244f8756a228a6d087b3a55ee4c0d14/lib/_http_server.js#L276 [timeout]: https://github.com/nodejs/node/blob/e0a9ad1af244f8756a228a6d087b3a55ee4c0d14/lib/_http_server.js#L273 [default]: https://github.com/nodejs/node/blob/e0a9ad1af244f8756a228a6d087b3a55ee4c0d14/lib/_http_server.js#L312 PR-URL: https://github.com/nodejs/node/pull/12039 Reviewed-By: Luigi Pinca Reviewed-By: Franziska Hinkelmann Reviewed-By: James M Snell --- doc/api/http.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/doc/api/http.md b/doc/api/http.md index a407c76a5e9550..88d83382ff070d 100644 --- a/doc/api/http.md +++ b/doc/api/http.md @@ -794,14 +794,14 @@ added: v5.7.0 A Boolean indicating whether or not the server is listening for connections. -### server.maxHeadersCount([limit]) +### server.maxHeadersCount -* `limit` {number} Defaults to 1000. +* {number} Defaults to 2000. -Limits maximum incoming headers count, equal to 1000 by default. If set to 0 - +Limits maximum incoming headers count, equal to 2000 by default. If set to 0 - no limit will be applied. ### server.setTimeout([msecs][, callback]) @@ -825,12 +825,12 @@ to the Server's `'timeout'` event, timeouts must be handled explicitly. Returns `server`. -### server.timeout([msecs]) +### server.timeout -* `msecs` {number} Defaults to 120000 (2 minutes). +* {number} Defaults to 120000 (2 minutes). The number of milliseconds of inactivity before a socket is presumed to have timed out. From 4b5f177e3dfcbb4b1a1589c03528dee49f9fb219 Mon Sep 17 00:00:00 2001 From: Ahmad Nassri Date: Sat, 25 Mar 2017 14:26:11 -0400 Subject: [PATCH 58/67] doc: fix https.timeout docs `server.timeout` is a property, not a method PR-URL: https://github.com/nodejs/node/pull/12039 Reviewed-By: Luigi Pinca Reviewed-By: Franziska Hinkelmann Reviewed-By: James M Snell --- doc/api/https.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/api/https.md b/doc/api/https.md index 515a4362effc85..7cbb5fd3429a81 100644 --- a/doc/api/https.md +++ b/doc/api/https.md @@ -30,11 +30,11 @@ added: v0.11.2 See [`http.Server#setTimeout()`][]. -### server.timeout([msecs]) +### server.timeout -- `msecs` {number} Defaults to 120000 (2 minutes). +- {number} Defaults to 120000 (2 minutes). See [`http.Server#timeout`][]. From 63a19c701237b57da3ca4480195f2eb6dd38fb7b Mon Sep 17 00:00:00 2001 From: Vse Mozhet Byt Date: Sat, 25 Mar 2017 15:33:03 +0200 Subject: [PATCH 59/67] doc: update and modernize examples in fs.ms * unify quotes in fs.md * avoid quote escaping in fs.md * simplify logics in fs.md * concatenation -> template literal in fs.md * add missing callback in fs.md * fix typo in fs.md * update output example in fs.md PR-URL: https://github.com/nodejs/node/pull/12035 Reviewed-By: Joyee Cheung Reviewed-By: Luigi Pinca Reviewed-By: Colin Ihrig Reviewed-By: James M Snell --- doc/api/fs.md | 50 +++++++++++++++++++++++++------------------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/doc/api/fs.md b/doc/api/fs.md index c6046d27313791..dfe4f55b52b5a6 100644 --- a/doc/api/fs.md +++ b/doc/api/fs.md @@ -218,7 +218,7 @@ For a regular file [`util.inspect(stats)`][] would return a string very similar to this: ```js -{ +Stats { dev: 2114, ino: 48064969, mode: 33188, @@ -232,8 +232,7 @@ similar to this: atime: Mon, 10 Oct 2011 23:24:11 GMT, mtime: Mon, 10 Oct 2011 23:24:11 GMT, ctime: Mon, 10 Oct 2011 23:24:11 GMT, - birthtime: Mon, 10 Oct 2011 23:24:11 GMT -} + birthtime: Mon, 10 Oct 2011 23:24:11 GMT } ``` Please note that `atime`, `mtime`, `birthtime`, and `ctime` are @@ -377,12 +376,12 @@ fs.access('myfile', (err) => { ```js fs.open('myfile', 'wx', (err, fd) => { if (err) { - if (err.code === "EEXIST") { + if (err.code === 'EEXIST') { console.error('myfile already exists'); return; - } else { - throw err; } + + throw err; } writeMyData(fd); @@ -394,12 +393,12 @@ fs.open('myfile', 'wx', (err, fd) => { ```js fs.access('myfile', (err) => { if (err) { - if (err.code === "ENOENT") { + if (err.code === 'ENOENT') { console.error('myfile does not exist'); return; - } else { - throw err; } + + throw err; } fs.open('myfile', 'r', (err, fd) => { @@ -414,12 +413,12 @@ fs.access('myfile', (err) => { ```js fs.open('myfile', 'r', (err, fd) => { if (err) { - if (err.code === "ENOENT") { + if (err.code === 'ENOENT') { console.error('myfile does not exist'); return; - } else { - throw err; } + + throw err; } readMyData(fd); @@ -779,13 +778,14 @@ fs.exists('myfile', (exists) => { ```js fs.open('myfile', 'wx', (err, fd) => { if (err) { - if (err.code === "EEXIST") { + if (err.code === 'EEXIST') { console.error('myfile already exists'); return; - } else { - throw err; } + + throw err; } + writeMyData(fd); }); ``` @@ -809,15 +809,15 @@ fs.exists('myfile', (exists) => { ```js fs.open('myfile', 'r', (err, fd) => { if (err) { - if (err.code === "ENOENT") { + if (err.code === 'ENOENT') { console.error('myfile does not exist'); return; - } else { - throw err; } - } else { - readMyData(fd); + + throw err; } + + readMyData(fd); }); ``` @@ -1025,7 +1025,7 @@ const fd = fs.openSync('temp.txt', 'r+'); // truncate the file to 10 bytes, whereas the actual size is 7 bytes fs.ftruncate(fd, 10, (err) => { - assert.ifError(!err); + assert.ifError(err); console.log(fs.readFileSync('temp.txt')); }); // Prints: @@ -1281,8 +1281,8 @@ fs.mkdtemp(tmpDir, (err, folder) => { }); // This method is *CORRECT*: -const path = require('path'); -fs.mkdtemp(tmpDir + path.sep, (err, folder) => { +const { sep } = require('path'); +fs.mkdtemp(`${tmpDir}${sep}`, (err, folder) => { if (err) throw err; console.log(folder); // Will print something similar to `/tmp/abc123`. @@ -1763,7 +1763,7 @@ argument will automatically be normalized to absolute path. Here is an example below: ```js -fs.symlink('./foo', './new-port'); +fs.symlink('./foo', './new-port', callback); ``` It creates a symbolic link named "new-port" that points to "foo". @@ -2174,7 +2174,7 @@ Example: ```js fs.writeFile('message.txt', 'Hello Node.js', (err) => { if (err) throw err; - console.log('It\'s saved!'); + console.log('The file has been saved!'); }); ``` From 8b2b93f148bf5a96b6af83921907152ce97d2ba5 Mon Sep 17 00:00:00 2001 From: Gibson Fahnestock Date: Sun, 26 Mar 2017 23:36:50 +0100 Subject: [PATCH 60/67] test: mark child-process-exec-kill-throws flaky PR-URL: https://github.com/nodejs/node/pull/12054 Ref: https://github.com/nodejs/node/issues/12053 Ref: https://github.com/nodejs/node/pull/11038 Reviewed-By: Colin Ihrig Reviewed-By: Santiago Gimeno Reviewed-By: James M Snell Reviewed-By: Richard Lau --- test/parallel/parallel.status | 3 +++ 1 file changed, 3 insertions(+) diff --git a/test/parallel/parallel.status b/test/parallel/parallel.status index 4703fdd05e2dae..e01e1428955ca7 100644 --- a/test/parallel/parallel.status +++ b/test/parallel/parallel.status @@ -7,6 +7,9 @@ prefix parallel [true] # This section applies to all platforms [$system==win32] +# See https://github.com/nodejs/node/issues/12053, this test may be exposing a +# genuine bug with kill() on Windows. +test-child-process-exec-kill-throws : PASS,FLAKY [$system==linux] From 751c1153a4ebe35b3c0ae1786eb68934d7bf3aae Mon Sep 17 00:00:00 2001 From: Vse Mozhet Byt Date: Sat, 25 Mar 2017 01:12:51 +0200 Subject: [PATCH 61/67] benchmark: check end() argument to be > 0 PR-URL: https://github.com/nodejs/node/pull/12030 Ref: https://github.com/nodejs/node/issues/11972 Reviewed-By: James M Snell Reviewed-By: Rich Trott --- benchmark/common.js | 3 +++ test/sequential/test-benchmark-net.js | 3 ++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/benchmark/common.js b/benchmark/common.js index 24369f2f543b67..77ebe0bd1dc37f 100644 --- a/benchmark/common.js +++ b/benchmark/common.js @@ -193,6 +193,9 @@ Benchmark.prototype.end = function(operations) { if (typeof operations !== 'number') { throw new Error('called end() without specifying operation count'); } + if (!process.env.NODEJS_BENCHMARK_ZERO_ALLOWED && operations <= 0) { + throw new Error('called end() with operation count <= 0'); + } const time = elapsed[0] + elapsed[1] / 1e9; const rate = operations / time; diff --git a/test/sequential/test-benchmark-net.js b/test/sequential/test-benchmark-net.js index 4bb91451e9b254..826d891fd76621 100644 --- a/test/sequential/test-benchmark-net.js +++ b/test/sequential/test-benchmark-net.js @@ -15,7 +15,8 @@ const path = require('path'); const runjs = path.join(__dirname, '..', '..', 'benchmark', 'run.js'); -const child = fork(runjs, ['--set', 'dur=0', 'net']); +const child = fork(runjs, ['--set', 'dur=0', 'net'], + {env: {NODEJS_BENCHMARK_ZERO_ALLOWED: 1}}); child.on('exit', (code, signal) => { assert.strictEqual(code, 0); assert.strictEqual(signal, null); From fade55b025d46ba055de43de94d720741a7e6510 Mon Sep 17 00:00:00 2001 From: Nikolai Vavilov Date: Fri, 10 Feb 2017 15:30:51 +0200 Subject: [PATCH 62/67] doc: clarify out-of-bounds behavior of buf[index] PR-URL: https://github.com/nodejs/node/pull/11286 Fixes: https://github.com/nodejs/node/issues/11244 Reviewed-By: Anna Henningsen Reviewed-By: Gibson Fahnestock Reviewed-By: Colin Ihrig Reviewed-By: Ron Korving --- doc/api/buffer.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/doc/api/buffer.md b/doc/api/buffer.md index b5c1e9895d1fdc..7ca33c0b8f14a7 100644 --- a/doc/api/buffer.md +++ b/doc/api/buffer.md @@ -939,6 +939,10 @@ The index operator `[index]` can be used to get and set the octet at position `index` in `buf`. The values refer to individual bytes, so the legal value range is between `0x00` and `0xFF` (hex) or `0` and `255` (decimal). +This operator is inherited from `Uint8Array`, so its behavior on out-of-bounds +access is the same as `UInt8Array` - that is, getting returns `undefined` and +setting does nothing. + Example: Copy an ASCII string into a `Buffer`, one byte at a time ```js From d8b71be1836db18c18793841f2debdf5bde494eb Mon Sep 17 00:00:00 2001 From: Franziska Hinkelmann Date: Sun, 26 Mar 2017 11:16:27 +0200 Subject: [PATCH 63/67] test: fix misleading comment MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The comment is outdated, function declarations have nothing to do with defineProperties. PR-URL: https://github.com/nodejs/node/pull/12048 Reviewed-By: Colin Ihrig Reviewed-By: Gibson Fahnestock Reviewed-By: Michaël Zasso Reviewed-By: Yuta Hiroto Reviewed-By: James M Snell --- test/parallel/test-vm-function-declaration.js | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/test/parallel/test-vm-function-declaration.js b/test/parallel/test-vm-function-declaration.js index 668b29885c44cf..c2aff80f02b524 100644 --- a/test/parallel/test-vm-function-declaration.js +++ b/test/parallel/test-vm-function-declaration.js @@ -5,11 +5,9 @@ const assert = require('assert'); const vm = require('vm'); const o = vm.createContext({ console: console }); -// This triggers the setter callback in node_contextify.cc +// Function declaration and expression should both be copied to the +// sandboxed context. let code = 'var a = function() {};\n'; - -// but this does not, since function decls are defineProperties, -// not simple sets. code += 'function b(){}\n'; // Grab the global b function as the completion value, to ensure that From a64aa442c18e7ea498bb9a03d150a5e0a56be509 Mon Sep 17 00:00:00 2001 From: Tom Atkinson Date: Tue, 28 Mar 2017 14:14:05 +0200 Subject: [PATCH 64/67] crypto: fix memory leak if certificate is revoked The additional validity checks applied to StartCom and WoSign certificates failed to free memory before returning. Refs: https://github.com/nodejs/node/pull/9469 Fixes: https://github.com/nodejs/node/issues/12033 PR-URL: https://github.com/nodejs/node/pull/12089 Reviewed-By: Sam Roberts Reviewed-By: Fedor Indutny Reviewed-By: Ben Noordhuis Reviewed-By: Myles Borins Reviewed-By: Shigeki Ohtsu Reviewed-By: Colin Ihrig --- src/node_crypto.cc | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/node_crypto.cc b/src/node_crypto.cc index 85d030312ca7ac..f00d75761c63c7 100644 --- a/src/node_crypto.cc +++ b/src/node_crypto.cc @@ -2760,7 +2760,9 @@ inline bool CertIsStartComOrWoSign(X509_NAME* name) { startcom_wosign_data = dn.data; startcom_wosign_name = d2i_X509_NAME(nullptr, &startcom_wosign_data, dn.len); - if (X509_NAME_cmp(name, startcom_wosign_name) == 0) + int cmp = X509_NAME_cmp(name, startcom_wosign_name); + X509_NAME_free(startcom_wosign_name); + if (cmp == 0) return true; } @@ -2805,8 +2807,10 @@ inline CheckResult CheckWhitelistedServerCert(X509_STORE_CTX* ctx) { } X509* leaf_cert = sk_X509_value(chain, 0); - if (!CheckStartComOrWoSign(root_name, leaf_cert)) + if (!CheckStartComOrWoSign(root_name, leaf_cert)) { + sk_X509_pop_free(chain, X509_free); return CHECK_CERT_REVOKED; + } // When the cert is issued from either CNNNIC ROOT CA or CNNNIC EV // ROOT CA, check a hash of its leaf cert if it is in the whitelist. From fd93622f8a69fc403f751f9f2166bad39ef233d8 Mon Sep 17 00:00:00 2001 From: Ben Noordhuis Date: Fri, 17 Mar 2017 11:35:28 +0100 Subject: [PATCH 65/67] tls: fix SecurePair external memory reporting Ensure that AdjustAmountOfExternalAllocatedMemory() is called when the SecurePair is destroyed. Not doing so is not an actual memory leak but it makes `process.memoryUsage().external` wildly inaccurate and can cause performance problems due to excessive garbage collection. PR-URL: https://github.com/nodejs/node/pull/11896 Reviewed-By: James M Snell Reviewed-By: Anna Henningsen --- src/node_crypto.cc | 6 +---- test/parallel/test-tls-securepair-leak.js | 29 +++++++++++++++++++++++ 2 files changed, 30 insertions(+), 5 deletions(-) create mode 100644 test/parallel/test-tls-securepair-leak.js diff --git a/src/node_crypto.cc b/src/node_crypto.cc index f00d75761c63c7..a463a4667a63be 100644 --- a/src/node_crypto.cc +++ b/src/node_crypto.cc @@ -3227,11 +3227,7 @@ void Connection::Start(const FunctionCallbackInfo& args) { void Connection::Close(const FunctionCallbackInfo& args) { Connection* conn; ASSIGN_OR_RETURN_UNWRAP(&conn, args.Holder()); - - if (conn->ssl_ != nullptr) { - SSL_free(conn->ssl_); - conn->ssl_ = nullptr; - } + conn->DestroySSL(); } diff --git a/test/parallel/test-tls-securepair-leak.js b/test/parallel/test-tls-securepair-leak.js new file mode 100644 index 00000000000000..b513bcd4c7c73a --- /dev/null +++ b/test/parallel/test-tls-securepair-leak.js @@ -0,0 +1,29 @@ +// Flags: --expose-gc --no-deprecation +'use strict'; + +const common = require('../common'); +const assert = require('assert'); + +if (!common.hasCrypto) { + common.skip('missing crypto'); + return; +} + +const { createSecureContext } = require('tls'); +const { createSecurePair } = require('_tls_legacy'); + +const before = process.memoryUsage().external; +{ + const context = createSecureContext(); + const options = {}; + for (let i = 0; i < 1e4; i += 1) + createSecurePair(context, false, false, false, options).destroy(); +} +global.gc(); +const after = process.memoryUsage().external; + +// It's not an exact science but a SecurePair grows .external by about 45 kB. +// Unless AdjustAmountOfExternalAllocatedMemory() is called on destruction, +// 10,000 instances make it grow by well over 400 MB. Allow for some slop +// because objects like buffers also affect the external limit. +assert(after - before < 25 << 20); From 5bdd54925acecfa3af4fb65bce037cc95193ee17 Mon Sep 17 00:00:00 2001 From: Gibson Fahnestock Date: Sun, 26 Mar 2017 16:01:31 +0100 Subject: [PATCH 66/67] lib: add comment to script eval _tickCallback Add a comment to match lib/module.js, missed in #11958. PR-URL: https://github.com/nodejs/node/pull/12050 Ref: https://github.com/nodejs/node/pull/11958 Reviewed-By: Colin Ihrig Reviewed-By: Yuta Hiroto Reviewed-By: Ben Noordhuis Reviewed-By: James M Snell Reviewed-By: Jeremiah Senkpiel Reviewed-By: Richard Lau --- lib/internal/bootstrap_node.js | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/internal/bootstrap_node.js b/lib/internal/bootstrap_node.js index 0827326fad7782..1aa2f8c23b442a 100644 --- a/lib/internal/bootstrap_node.js +++ b/lib/internal/bootstrap_node.js @@ -386,6 +386,7 @@ `${JSON.stringify(name)}, displayErrors: true });\n`; const result = module._compile(script, `${name}-wrapper`); if (process._print_eval) console.log(result); + // Handle any nextTicks added in the first tick of the program. process._tickCallback(); } From f04524e0c90c3834f1499f51c8da2eb6016286b9 Mon Sep 17 00:00:00 2001 From: Myles Borins Date: Tue, 28 Mar 2017 19:01:47 -0400 Subject: [PATCH 67/67] 2017-03-28, Version 7.8.0 (Current) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Notable changes: * buffer: - do not segfault on out-of-range index (Timothy Gu) https://github.com/nodejs/node/pull/11927 * crypto: - Fix memory leak if certificate is revoked (Tom Atkinson) https://github.com/nodejs/node/pull/12089 * deps: * upgrade npm to 4.2.0 (Kat Marchán) https://github.com/nodejs/node/pull/11389 * fix async await desugaring in V8 (Michaël Zasso) https://github.com/nodejs/node/pull/12004 * readline: - add option to stop duplicates in history (Danny Nemer) https://github.com/nodejs/node/pull/2982 * src: - add native URL class (James M Snell) https://github.com/nodejs/node/pull/11801 PR-URL: https://github.com/nodejs/node/pull/12104 --- CHANGELOG.md | 3 +- doc/changelogs/CHANGELOG_V7.md | 85 ++++++++++++++++++++++++++++++++++ src/node_version.h | 6 +-- 3 files changed, 90 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 22116f1960d480..ff24baf55faa72 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -28,7 +28,8 @@ release. - 7.7.4
+ 7.8.0
+ 7.7.4
7.7.3
7.7.2
7.7.1
diff --git a/doc/changelogs/CHANGELOG_V7.md b/doc/changelogs/CHANGELOG_V7.md index b0edcc826381e6..e9b1ab03ff630d 100644 --- a/doc/changelogs/CHANGELOG_V7.md +++ b/doc/changelogs/CHANGELOG_V7.md @@ -6,6 +6,7 @@ +7.8.0
7.7.4
7.7.3
7.7.2
@@ -32,6 +33,90 @@ * [io.js](CHANGELOG_IOJS.md) * [Archive](CHANGELOG_ARCHIVE.md) + +## 2017-03-28, Version 7.8.0 (Current), @MylesBorins + +### Notable Changes + +* **buffer**: + - do not segfault on out-of-range index (Timothy Gu) [#11927](https://github.com/nodejs/node/pull/11927) +* **crypto**: + - Fix memory leak if certificate is revoked (Tom Atkinson) [#12089](https://github.com/nodejs/node/pull/12089) +* **deps**: + * upgrade npm to 4.2.0 (Kat Marchán) [#11389](https://github.com/nodejs/node/pull/11389) + * fix async await desugaring in V8 (Michaël Zasso) [#12004](https://github.com/nodejs/node/pull/12004) +* **readline**: + - add option to stop duplicates in history (Danny Nemer) [#2982](https://github.com/nodejs/node/pull/2982) + +### Commits + +* [[`51c8d8088a`](https://github.com/nodejs/node/commit/51c8d8088a)] - Partial revert "tls: keep track of stream that is closed" (Trevor Norris) [#11947](https://github.com/nodejs/node/pull/11947) +* [[`751c1153a4`](https://github.com/nodejs/node/commit/751c1153a4)] - **benchmark**: check end() argument to be \> 0 (Vse Mozhet Byt) [#12030](https://github.com/nodejs/node/pull/12030) +* [[`210250465a`](https://github.com/nodejs/node/commit/210250465a)] - **benchmark**: update obsolete information pointer (Rich Trott) [#12026](https://github.com/nodejs/node/pull/12026) +* [[`7aeeee3276`](https://github.com/nodejs/node/commit/7aeeee3276)] - **benchmark**: repair the fs/readfile benchmark (Sorin Baltateanu) [#7818](https://github.com/nodejs/node/pull/7818) +* [[`90acb773be`](https://github.com/nodejs/node/commit/90acb773be)] - **benchmark**: allow multiple values for same config (Nikolai Vavilov) [#11819](https://github.com/nodejs/node/pull/11819) +* [[`2f4ad6fea2`](https://github.com/nodejs/node/commit/2f4ad6fea2)] - **benchmark**: harmonize progress bar + stderr output (Vse Mozhet Byt) [#11925](https://github.com/nodejs/node/pull/11925) +* [[`d62ddbe145`](https://github.com/nodejs/node/commit/d62ddbe145)] - **benchmark**: fix fs\bench-realpathSync.js (Vse Mozhet Byt) [#11904](https://github.com/nodejs/node/pull/11904) +* [[`85eb1bc0a9`](https://github.com/nodejs/node/commit/85eb1bc0a9)] - **benchmark**: remove v8ForceOptimization calls (Lucas Lago) [#11908](https://github.com/nodejs/node/pull/11908) +* [[`17d16e8f3d`](https://github.com/nodejs/node/commit/17d16e8f3d)] - **buffer**: remove unneeded eslint-disable comment (Rich Trott) [#11906](https://github.com/nodejs/node/pull/11906) +* [[`fb41ee3983`](https://github.com/nodejs/node/commit/fb41ee3983)] - **build**: add lint option to vcbuild.bat help (Morgan Brenner) [#11992](https://github.com/nodejs/node/pull/11992) +* [[`3e4ecca0be`](https://github.com/nodejs/node/commit/3e4ecca0be)] - **build**: don't create directory for NDK toolchain (TheBeastOfCaerbannog) [#11916](https://github.com/nodejs/node/pull/11916) +* [[`a64aa442c1`](https://github.com/nodejs/node/commit/a64aa442c1)] - **crypto**: fix memory leak if certificate is revoked (Tom Atkinson) [#12089](https://github.com/nodejs/node/pull/12089) +* [[`2767e2d3cc`](https://github.com/nodejs/node/commit/2767e2d3cc)] - **(SEMVER-MINOR)** **deps**: upgrade npm to 4.2.0 (Kat Marchán) [#11389](https://github.com/nodejs/node/pull/11389) +* [[`d22346de40`](https://github.com/nodejs/node/commit/d22346de40)] - **deps**: fix async await desugaring in V8 (Michaël Zasso) [#12004](https://github.com/nodejs/node/pull/12004) +* [[`fade55b025`](https://github.com/nodejs/node/commit/fade55b025)] - **doc**: clarify out-of-bounds behavior of buf\[index\] (Nikolai Vavilov) [#11286](https://github.com/nodejs/node/pull/11286) +* [[`63a19c7012`](https://github.com/nodejs/node/commit/63a19c7012)] - **doc**: update and modernize examples in fs.ms (Vse Mozhet Byt) [#12035](https://github.com/nodejs/node/pull/12035) +* [[`4b5f177e3d`](https://github.com/nodejs/node/commit/4b5f177e3d)] - **doc**: fix https.timeout docs (Ahmad Nassri) [#12039](https://github.com/nodejs/node/pull/12039) +* [[`af051f6528`](https://github.com/nodejs/node/commit/af051f6528)] - **doc**: fix http properties documented as methods (Ahmad Nassri) [#12039](https://github.com/nodejs/node/pull/12039) +* [[`18a586a278`](https://github.com/nodejs/node/commit/18a586a278)] - **doc**: edit the benchmark guide (Rich Trott) [#12041](https://github.com/nodejs/node/pull/12041) +* [[`5e3d429613`](https://github.com/nodejs/node/commit/5e3d429613)] - **doc**: stdout/err/in are all Duplex streams (Sebastian Van Sande) [#11194](https://github.com/nodejs/node/pull/11194) +* [[`7f6b03fd0f`](https://github.com/nodejs/node/commit/7f6b03fd0f)] - **doc**: fix process.stdout fd number (Fumiya KARASAWA) [#12055](https://github.com/nodejs/node/pull/12055) +* [[`1f7fe55c97`](https://github.com/nodejs/node/commit/1f7fe55c97)] - **doc**: add richardlau to collaborators (Richard Lau) [#12020](https://github.com/nodejs/node/pull/12020) +* [[`924f34606d`](https://github.com/nodejs/node/commit/924f34606d)] - **doc**: update collaborator email address (Rich Trott) [#11996](https://github.com/nodejs/node/pull/11996) +* [[`41bec5cff4`](https://github.com/nodejs/node/commit/41bec5cff4)] - **doc**: correct info in child_process.md (Vse Mozhet Byt) [#11949](https://github.com/nodejs/node/pull/11949) +* [[`96ad336d9e`](https://github.com/nodejs/node/commit/96ad336d9e)] - **doc**: remove superfluous sample assert code (Rich Trott) [#11933](https://github.com/nodejs/node/pull/11933) +* [[`486bd1bd9b`](https://github.com/nodejs/node/commit/486bd1bd9b)] - **doc**: require uses fs root for '/' prefix (Bradley Farias) [#11897](https://github.com/nodejs/node/pull/11897) +* [[`04fa28e6dc`](https://github.com/nodejs/node/commit/04fa28e6dc)] - **doc**: fix gitter badge in README (Roman Reiss) [#11944](https://github.com/nodejs/node/pull/11944) +* [[`68b23be51f`](https://github.com/nodejs/node/commit/68b23be51f)] - **doc**: add missing word in stream.md (Jyotman Singh) [#11914](https://github.com/nodejs/node/pull/11914) +* [[`0f2642ee36`](https://github.com/nodejs/node/commit/0f2642ee36)] - **errors**: remove needless lazyAssert (DavidCai) [#11891](https://github.com/nodejs/node/pull/11891) +* [[`5bdd54925a`](https://github.com/nodejs/node/commit/5bdd54925a)] - **lib**: add comment to script eval _tickCallback (Gibson Fahnestock) [#12050](https://github.com/nodejs/node/pull/12050) +* [[`7347860966`](https://github.com/nodejs/node/commit/7347860966)] - **lib**: clarify the usage of 'else' (Jackson Tian) [#11148](https://github.com/nodejs/node/pull/11148) +* [[`837ff4ba59`](https://github.com/nodejs/node/commit/837ff4ba59)] - **lib**: remove an unnecessary coverage check (Jeremiah Senkpiel) [#12023](https://github.com/nodejs/node/pull/12023) +* [[`6c803db7b9`](https://github.com/nodejs/node/commit/6c803db7b9)] - **lib**: fix event race condition with -e (Ben Noordhuis) [#11958](https://github.com/nodejs/node/pull/11958) +* [[`ac92d0249b`](https://github.com/nodejs/node/commit/ac92d0249b)] - **net**: refactor net module to module.exports (Claudio Rodriguez) [#11698](https://github.com/nodejs/node/pull/11698) +* [[`2462fd8009`](https://github.com/nodejs/node/commit/2462fd8009)] - **process**: maintain constructor descriptor (Bryan English) [#9306](https://github.com/nodejs/node/pull/9306) +* [[`91a2700721`](https://github.com/nodejs/node/commit/91a2700721)] - **readline**: rename `deDupeHistory` option (Danny Nemer) [#11950](https://github.com/nodejs/node/pull/11950) +* [[`8ab26cf508`](https://github.com/nodejs/node/commit/8ab26cf508)] - **(SEMVER-MINOR)** **readline**: add option to stop duplicates in history (Danny Nemer) [#2982](https://github.com/nodejs/node/pull/2982) +* [[`6a6c431eec`](https://github.com/nodejs/node/commit/6a6c431eec)] - **src**: use persistent strings from node::Environment (Ben Noordhuis) [#11945](https://github.com/nodejs/node/pull/11945) +* [[`d0c2d67083`](https://github.com/nodejs/node/commit/d0c2d67083)] - **src**: add native URL class (James M Snell) [#11801](https://github.com/nodejs/node/pull/11801) +* [[`019a20adb5`](https://github.com/nodejs/node/commit/019a20adb5)] - **src**: make PercentDecode return void (Timothy Gu) [#11922](https://github.com/nodejs/node/pull/11922) +* [[`d6da1705cd`](https://github.com/nodejs/node/commit/d6da1705cd)] - **src**: ensure that fd 0-2 are valid on windows (Bartosz Sosnowski) [#11863](https://github.com/nodejs/node/pull/11863) +* [[`59f71f5661`](https://github.com/nodejs/node/commit/59f71f5661)] - **src, buffer**: do not segfault on out-of-range index (Timothy Gu) [#11927](https://github.com/nodejs/node/pull/11927) +* [[`4051184106`](https://github.com/nodejs/node/commit/4051184106)] - **stream_base,tls_wrap**: notify on destruct (Trevor Norris) [#11947](https://github.com/nodejs/node/pull/11947) +* [[`d8b71be183`](https://github.com/nodejs/node/commit/d8b71be183)] - **test**: fix misleading comment (Franziska Hinkelmann) [#12048](https://github.com/nodejs/node/pull/12048) +* [[`8b2b93f148`](https://github.com/nodejs/node/commit/8b2b93f148)] - **test**: mark child-process-exec-kill-throws flaky (Gibson Fahnestock) [#12054](https://github.com/nodejs/node/pull/12054) +* [[`948b99deab`](https://github.com/nodejs/node/commit/948b99deab)] - **test**: fix broken tests in test-buffer-includes (Alexey Orlenko) [#12040](https://github.com/nodejs/node/pull/12040) +* [[`d112aad78b`](https://github.com/nodejs/node/commit/d112aad78b)] - **test**: replace throw with common.fail (Dejon "DJ" Gill) [#9700](https://github.com/nodejs/node/pull/9700) +* [[`41284fbc5b`](https://github.com/nodejs/node/commit/41284fbc5b)] - **test**: cover thrown errors from exec() kill (cjihrig) [#11038](https://github.com/nodejs/node/pull/11038) +* [[`414df6c93b`](https://github.com/nodejs/node/commit/414df6c93b)] - **test**: test validity of prefix in mkdtempSync (Luca Maraschi) [#12009](https://github.com/nodejs/node/pull/12009) +* [[`1c0435b1f3`](https://github.com/nodejs/node/commit/1c0435b1f3)] - **test**: add regex for expected error message (John F. Mercer) [#12011](https://github.com/nodejs/node/pull/12011) +* [[`a73dea9499`](https://github.com/nodejs/node/commit/a73dea9499)] - **test**: add second argument to assert.throws() (Rj Bernaldo) [#12016](https://github.com/nodejs/node/pull/12016) +* [[`ade64e61cd`](https://github.com/nodejs/node/commit/ade64e61cd)] - **test**: refactor test-cluster-disconnect (Rich Trott) [#11981](https://github.com/nodejs/node/pull/11981) +* [[`3d21bfe6b9`](https://github.com/nodejs/node/commit/3d21bfe6b9)] - **test**: invalid chars in http client path (Luca Maraschi) [#11964](https://github.com/nodejs/node/pull/11964) +* [[`e70ed3cb31`](https://github.com/nodejs/node/commit/e70ed3cb31)] - **test**: improve test-vm-cached-data.js (Nick Peleh) [#11974](https://github.com/nodejs/node/pull/11974) +* [[`b48f13af95`](https://github.com/nodejs/node/commit/b48f13af95)] - **test**: add minimal test for net benchmarks (Rich Trott) [#11979](https://github.com/nodejs/node/pull/11979) +* [[`764a00e6e5`](https://github.com/nodejs/node/commit/764a00e6e5)] - **test**: add test for url (Yuta Hiroto) [#11999](https://github.com/nodejs/node/pull/11999) +* [[`bb2de4a5a1`](https://github.com/nodejs/node/commit/bb2de4a5a1)] - **test**: do not use `more` command on Windows (Vse Mozhet Byt) [#11953](https://github.com/nodejs/node/pull/11953) +* [[`55a112689a`](https://github.com/nodejs/node/commit/55a112689a)] - **test**: add test for child_process.execFile() (Rich Trott) [#11929](https://github.com/nodejs/node/pull/11929) +* [[`9ba551f7e3`](https://github.com/nodejs/node/commit/9ba551f7e3)] - **test**: fix flaky test-tls-socket-close (Rich Trott) [#11921](https://github.com/nodejs/node/pull/11921) +* [[`114f9d619d`](https://github.com/nodejs/node/commit/114f9d619d)] - **test**: add hasCrypto check to tls-socket-close (Daniel Bevenius) [#11911](https://github.com/nodejs/node/pull/11911) +* [[`169f187f16`](https://github.com/nodejs/node/commit/169f187f16)] - **test**: synchronize WPT url setters tests data (Daijiro Wachi) [#11887](https://github.com/nodejs/node/pull/11887) +* [[`4b1b6b85a9`](https://github.com/nodejs/node/commit/4b1b6b85a9)] - **timers**: fix not to close reused timer handle (Shigeki Ohtsu) [#11646](https://github.com/nodejs/node/pull/11646) +* [[`fd93622f8a`](https://github.com/nodejs/node/commit/fd93622f8a)] - **tls**: fix SecurePair external memory reporting (Ben Noordhuis) [#11896](https://github.com/nodejs/node/pull/11896) +* [[`126dcb76af`](https://github.com/nodejs/node/commit/126dcb76af)] - **url**: name anonymous functions in url (Pedro lima) [#9225](https://github.com/nodejs/node/pull/9225) +* [[`f6755182e5`](https://github.com/nodejs/node/commit/f6755182e5)] - **url**: show input in parse error message (Joyee Cheung) [#11934](https://github.com/nodejs/node/pull/11934) +* [[`c51d925c84`](https://github.com/nodejs/node/commit/c51d925c84)] - **url**: restrict setting protocol to "file" (Daijiro Wachi) [#11887](https://github.com/nodejs/node/pull/11887) + ## 2017-03-21, Version 7.7.4 (Current), @cjihrig diff --git a/src/node_version.h b/src/node_version.h index 80706f8caa992b..a01f39a534bd65 100644 --- a/src/node_version.h +++ b/src/node_version.h @@ -2,10 +2,10 @@ #define SRC_NODE_VERSION_H_ #define NODE_MAJOR_VERSION 7 -#define NODE_MINOR_VERSION 7 -#define NODE_PATCH_VERSION 5 +#define NODE_MINOR_VERSION 8 +#define NODE_PATCH_VERSION 0 -#define NODE_VERSION_IS_RELEASE 0 +#define NODE_VERSION_IS_RELEASE 1 #ifndef NODE_STRINGIFY #define NODE_STRINGIFY(n) NODE_STRINGIFY_HELPER(n)