Commit a01dd710 authored by Jérémy Lal's avatar Jérémy Lal

Imported Upstream version 5.11.0~dfsg

parent 08f10022
lib/internal/v8_prof_polyfill.js
lib/internal/v8_prof_processor.js
lib/punycode.js
test/addons/??_*/
test/fixtures
......
......@@ -13,7 +13,7 @@ rules:
no-duplicate-case: 2
no-empty-character-class: 2
no-ex-assign: 2
no-extra-boolean-cast : 2
no-extra-boolean-cast: 2
no-extra-parens: [2, "functions"]
no-extra-semi: 2
no-func-assign: 2
......@@ -85,8 +85,9 @@ rules:
prefer-const: 2
# Custom rules in tools/eslint-rules
assert-fail-single-argument: 2
new-with-error: [2, "Error", "RangeError", "TypeError", "SyntaxError", "ReferenceError"]
align-multiline-assignment: 2
# Global scoped method and vars
globals:
......
......@@ -41,6 +41,8 @@ _UpgradeReport_Files/
ipch/
*.sdf
*.opensdf
*.VC.opendb
.vs/
/config.mk
/config.gypi
......
......@@ -87,7 +87,14 @@ To test if Node.js was built correctly:
$ node -e "console.log('Hello from Node.js ' + process.version)"
```
### Android / Android based devices, aka. Firefox OS
### Android / Android-based devices (e.g., Firefox OS)
Although these instructions for building on Android are provided, please note
that Android is not an officially supported platform at this time. Patches to
improve the Android build are accepted. However, there is no testing on Android
in the current continuous integration environment. The participation of people
dedicated and determined to improve Android building, testing, and support is
encouraged.
Be sure you have downloaded and extracted [Android NDK]
(https://developer.android.com/tools/sdk/ndk/index.html)
......
This diff is collapsed.
......@@ -49,7 +49,7 @@ and built upon.
#### Respect the stability index
The rules for the master branch are less strict; consult the
[stability index](./doc/api/documentation.markdown#stability-index) for details.
[stability index](./doc/api/documentation..md#stability-index) for details.
In a nutshell, modules are at varying levels of API stability. Bug fixes are
always welcome but API or behavioral changes to modules at stability level 3
......@@ -173,6 +173,7 @@ to address, apply your changes in a separate commit and push that to your
feature branch. Post a comment in the pull request afterwards; GitHub does
not send out notifications when you add commits.
<a id="developers-certificate-of-origin"></a>
## Developer's Certificate of Origin 1.1
By making a contribution to this project, I certify that:
......
This diff is collapsed.
......@@ -8,9 +8,12 @@ PREFIX ?= /usr/local
FLAKY_TESTS ?= run
TEST_CI_ARGS ?=
STAGINGSERVER ?= node-www
OSTYPE := $(shell uname -s | tr '[A-Z]' '[a-z]')
ifdef JOBS
PARALLEL_ARGS = -j $(JOBS)
endif
ifdef QUICKCHECK
QUICKCHECK_ARG := --quickcheck
endif
......@@ -128,7 +131,7 @@ test/gc/node_modules/weak/build/Release/weakref.node: $(NODE_EXE)
--nodedir="$(shell pwd)"
# Implicitly depends on $(NODE_EXE), see the build-addons rule for rationale.
test/addons/.docbuildstamp: doc/api/addons.markdown
test/addons/.docbuildstamp: doc/api/addons.md
$(RM) -r test/addons/??_*/
$(NODE) tools/doc/addon-verify.js
touch $@
......@@ -168,7 +171,8 @@ test-all-valgrind: test-build
$(PYTHON) tools/test.py --mode=debug,release --valgrind
test-ci: | build-addons
$(PYTHON) tools/test.py -p tap --logfile test.tap --mode=release --flaky-tests=$(FLAKY_TESTS) \
$(PYTHON) tools/test.py $(PARALLEL_ARGS) -p tap --logfile test.tap \
--mode=release --flaky-tests=$(FLAKY_TESTS) \
$(TEST_CI_ARGS) addons message parallel sequential
test-release: test-build
......@@ -235,9 +239,9 @@ test-v8-benchmarks:
test-v8-all: test-v8 test-v8-intl test-v8-benchmarks
# runs all v8 tests
apidoc_sources = $(wildcard doc/api/*.markdown)
apidocs = $(addprefix out/,$(apidoc_sources:.markdown=.html)) \
$(addprefix out/,$(apidoc_sources:.markdown=.json))
apidoc_sources = $(wildcard doc/api/*.md)
apidocs = $(addprefix out/,$(apidoc_sources:.md=.html)) \
$(addprefix out/,$(apidoc_sources:.md=.json))
apidoc_dirs = out/doc out/doc/api/ out/doc/api/assets
......@@ -254,10 +258,10 @@ out/doc/api/assets/%: doc/api_assets/% out/doc/api/assets/
out/doc/%: doc/%
cp -r $< $@
out/doc/api/%.json: doc/api/%.markdown $(NODE_EXE)
out/doc/api/%.json: doc/api/%.md $(NODE_EXE)
$(NODE) tools/doc/generate.js --format=json $< > $@
out/doc/api/%.html: doc/api/%.markdown $(NODE_EXE)
out/doc/api/%.html: doc/api/%.md $(NODE_EXE)
$(NODE) tools/doc/generate.js --format=html --template=doc/template.html $< > $@
docopen: out/doc/api/all.html
......@@ -469,7 +473,7 @@ doc-upload: tar
scp -pr out/doc/ $(STAGINGSERVER):nodejs/$(DISTTYPEDIR)/$(FULLVERSION)/docs/
ssh $(STAGINGSERVER) "touch nodejs/$(DISTTYPEDIR)/$(FULLVERSION)/docs.done"
$(TARBALL)-headers: config.gypi release-only
$(TARBALL)-headers: release-only
$(PYTHON) ./configure \
--prefix=/ \
--dest-cpu=$(DESTCPU) \
......@@ -477,7 +481,7 @@ $(TARBALL)-headers: config.gypi release-only
--release-urlbase=$(RELEASE_URLBASE) \
$(CONFIG_FLAGS) $(BUILD_RELEASE_FLAGS)
HEADERS_ONLY=1 $(PYTHON) tools/install.py install '$(TARNAME)' '/'
find $(TARNAME)/ -type l | xargs rm # annoying on windows
find $(TARNAME)/ -type l | xargs rm -f
tar -cf $(TARNAME)-headers.tar $(TARNAME)
rm -rf $(TARNAME)
gzip -c -f -9 $(TARNAME)-headers.tar > $(TARNAME)-headers.tar.gz
......@@ -592,8 +596,13 @@ bench-idle:
$(NODE) benchmark/idle_clients.js &
jslint:
$(NODE) tools/eslint/bin/eslint.js benchmark lib src test tools/doc \
tools/eslint-rules --rulesdir tools/eslint-rules
$(NODE) tools/jslint.js -J benchmark lib src test tools/doc \
tools/eslint-rules tools/jslint.js
jslint-ci:
$(NODE) tools/jslint.js $(PARALLEL_ARGS) -f tap -o test-eslint.tap \
benchmark lib src test tools/doc \
tools/eslint-rules tools/jslint.js
CPPLINT_EXCLUDE ?=
CPPLINT_EXCLUDE += src/node_lttng.cc
......@@ -621,8 +630,7 @@ cpplint:
@$(PYTHON) tools/cpplint.py $(CPPLINT_FILES)
lint: jslint cpplint
lint-ci: lint
lint-ci: jslint-ci cpplint
.PHONY: lint cpplint jslint bench clean docopen docclean doc dist distclean \
check uninstall install install-includes install-bin all staticlib \
......@@ -630,4 +638,5 @@ lint-ci: lint
blog blogclean tar binary release-only bench-http-simple bench-idle \
bench-all bench bench-misc bench-array bench-buffer bench-net \
bench-http bench-fs bench-tls cctest run-ci test-v8 test-v8-intl \
test-v8-benchmarks test-v8-all v8 lint-ci bench-ci
test-v8-benchmarks test-v8-all v8 lint-ci bench-ci jslint-ci \
$(TARBALL)-headers
......@@ -70,7 +70,15 @@ documentation of the latest stable version.
Stable, LTS and Nightly download directories all contain a *SHASUM256.txt*
file that lists the SHA checksums for each file available for
download. To check that a downloaded file matches the checksum, run
download.
The *SHASUM256.txt* can be downloaded using curl.
```
$ curl -O https://nodejs.org/dist/vx.y.z/SHASUMS256.txt
```
To check that a downloaded file matches the checksum, run
it through `sha256sum` with a command such as:
```
......@@ -159,6 +167,7 @@ information about the governance of the Node.js project, see
### Collaborators
* [addaleax](https://github.com/addaleax) - **Anna Henningsen** &lt;anna@addaleax.net&gt;
* [AndreasMadsen](https://github.com/AndreasMadsen) - **Andreas Madsen** &lt;amwebdk@gmail.com&gt;
* [benjamingr](https://github.com/benjamingr) - **Benjamin Gruenbaum** &lt;benjamingr@gmail.com&gt;
* [brendanashworth](https://github.com/brendanashworth) - **Brendan Ashworth** &lt;brendan.ashworth@me.com&gt;
......@@ -168,6 +177,7 @@ information about the governance of the Node.js project, see
* [geek](https://github.com/geek) - **Wyatt Preul** &lt;wpreul@gmail.com&gt;
* [iarna](https://github.com/iarna) - **Rebecca Turner** &lt;me@re-becca.org&gt;
* [isaacs](https://github.com/isaacs) - **Isaac Z. Schlueter** &lt;i@izs.me&gt;
* [iWuzHere](https://github.com/iWuzHere) - **Imran Iqbal** &lt;imran@imraniqbal.org&gt;
* [jbergstroem](https://github.com/jbergstroem) - **Johan Bergström** &lt;bugs@bergstroem.nu&gt;
* [joaocgreis](https://github.com/joaocgreis) - **João Reis** &lt;reis@janeasystems.com&gt;
* [julianduque](https://github.com/julianduque) - **Julian Duque** &lt;julianduquej@gmail.com&gt;
......@@ -188,9 +198,11 @@ information about the governance of the Node.js project, see
* [romankl](https://github.com/romankl) - **Roman Klauke** &lt;romaaan.git@gmail.com&gt;
* [saghul](https://github.com/saghul) - **Saúl Ibarra Corretgé** &lt;saghul@gmail.com&gt;
* [sam-github](https://github.com/sam-github) - **Sam Roberts** &lt;vieuxtech@gmail.com&gt;
* [santigimeno](https://github.com/santigimeno) - **Santiago Gimeno** &lt;santiago.gimeno@gmail.com&gt;
* [seishun](https://github.com/seishun) - **Nikolai Vavilov** &lt;vvnicholas@gmail.com&gt;
* [silverwind](https://github.com/silverwind) - **Roman Reiss** &lt;me@silverwind.io&gt;
* [srl295](https://github.com/srl295) - **Steven R Loomis** &lt;srloomis@us.ibm.com&gt;
* [stefanmb](https://github.com/stefanmb) - **Stefan Budeanu** &lt;stefan@budeanu.com&gt;
* [targos](https://github.com/targos) - **Michaël Zasso** &lt;mic.besace@gmail.com&gt;
* [tellnes](https://github.com/tellnes) - **Christian Tellnes** &lt;christian@tellnes.no&gt;
* [thealphanerd](https://github.com/thealphanerd) - **Myles Borins** &lt;myles.borins@gmail.com&gt;
......
'use strict';
const common = require('../common.js');
const v8 = require('v8');
const bench = common.createBenchmark(main, {
method: ['offset', 'slice'],
size: [16, 512, 1024, 4096, 16386],
millions: [1]
});
function compareUsingSlice(b0, b1, len, iter) {
// Force optimization before starting the benchmark
Buffer.compare(b0.slice(1, len), b1.slice(1, len));
v8.setFlagsFromString('--allow_natives_syntax');
eval('%OptimizeFunctionOnNextCall(Buffer.compare)');
eval('%OptimizeFunctionOnNextCall(b0.slice)');
eval('%OptimizeFunctionOnNextCall(b1.slice)');
Buffer.compare(b0.slice(1, len), b1.slice(1, len));
doCompareUsingSlice(b0, b1, len, iter);
}
function doCompareUsingSlice(b0, b1, len, iter) {
var i;
bench.start();
for (i = 0; i < iter; i++)
Buffer.compare(b0.slice(1, len), b1.slice(1, len));
bench.end(iter / 1e6);
}
function compareUsingOffset(b0, b1, len, iter) {
len = len + 1;
// Force optimization before starting the benchmark
b0.compare(b1, 1, len, 1, len);
v8.setFlagsFromString('--allow_natives_syntax');
eval('%OptimizeFunctionOnNextCall(b0.compare)');
b0.compare(b1, 1, len, 1, len);
doCompareUsingOffset(b0, b1, len, iter);
}
function doCompareUsingOffset(b0, b1, len, iter) {
var i;
bench.start();
for (i = 0; i < iter; i++)
b0.compare(b1, 1, len, 1, len);
bench.end(iter / 1e6);
}
function main(conf) {
const iter = (conf.millions >>> 0) * 1e6;
const size = (conf.size >>> 0);
const method = conf.method === 'slice' ?
compareUsingSlice : compareUsingOffset;
method(Buffer.alloc(size, 'a'),
Buffer.alloc(size, 'b'),
size >> 1,
iter);
}
......@@ -7,15 +7,17 @@ var tmpDirectory = path.join(__dirname, '..', 'tmp');
var benchmarkDirectory = path.join(tmpDirectory, 'nodejs-benchmark-module');
var bench = common.createBenchmark(main, {
thousands: [50]
thousands: [50],
fullPath: ['true', 'false']
});
function main(conf) {
var n = +conf.thousands * 1e3;
rmrf(tmpDirectory);
try { fs.mkdirSync(tmpDirectory); } catch (e) {}
try { fs.mkdirSync(benchmarkDirectory); } catch (e) {}
var n = +conf.thousands * 1e3;
for (var i = 0; i <= n; i++) {
fs.mkdirSync(benchmarkDirectory + i);
fs.writeFileSync(
......@@ -28,10 +30,21 @@ function main(conf) {
);
}
measure(n);
if (conf.fullPath === 'true')
measureFull(n);
else
measureDir(n);
}
function measureFull(n) {
bench.start();
for (var i = 0; i <= n; i++) {
require(benchmarkDirectory + i + '/index.js');
}
bench.end(n / 1e3);
}
function measure(n) {
function measureDir(n) {
bench.start();
for (var i = 0; i <= n; i++) {
require(benchmarkDirectory + i);
......
......@@ -95,7 +95,7 @@
'variables': {
'v8_enable_handle_zapping': 0,
},
'cflags': [ '-O3', '-ffunction-sections', '-fdata-sections' ],
'cflags': [ '-O3' ],
'conditions': [
['target_arch=="x64"', {
'msvs_configuration_platform': 'x64',
......
......@@ -22,14 +22,14 @@ PLATFORM ?= $(shell sh -c 'uname -s | tr "[A-Z]" "[a-z]"')
HELPER ?=
BINEXT ?=
ifeq (darwin,$(PLATFORM))
SONAME ?= libhttp_parser.2.6.2.dylib
SONAME ?= libhttp_parser.2.7.0.dylib
SOEXT ?= dylib
else ifeq (wine,$(PLATFORM))
CC = winegcc
BINEXT = .exe.so
HELPER = wine
else
SONAME ?= libhttp_parser.so.2.6.2
SONAME ?= libhttp_parser.so.2.7.0
SOEXT ?= so
endif
......
......@@ -1812,6 +1812,9 @@ reexecute:
case 0:
break;
case 2:
parser->upgrade = 1;
case 1:
parser->flags |= F_SKIPBODY;
break;
......
......@@ -26,8 +26,8 @@ extern "C" {
/* Also update SONAME in the Makefile whenever you change these. */
#define HTTP_PARSER_VERSION_MAJOR 2
#define HTTP_PARSER_VERSION_MINOR 6
#define HTTP_PARSER_VERSION_PATCH 2
#define HTTP_PARSER_VERSION_MINOR 7
#define HTTP_PARSER_VERSION_PATCH 0
#include <sys/types.h>
#if defined(_WIN32) && !defined(__MINGW32__) && \
......@@ -77,6 +77,11 @@ typedef struct http_parser_settings http_parser_settings;
* HEAD request which may contain 'Content-Length' or 'Transfer-Encoding:
* chunked' headers that indicate the presence of a body.
*
* Returning `2` from on_headers_complete will tell parser that it should not
* expect neither a body nor any futher responses on this connection. This is
* useful for handling responses to a CONNECT request which may not contain
* `Upgrade` or `Connection: upgrade` headers.
*
* http_data_cb does not return data chunks. It will be called arbitrarily
* many times for each string. E.G. you might get 10 callbacks for "on_url"
* each providing just a few characters more data.
......
......@@ -2173,6 +2173,20 @@ pause_chunk_complete_cb (http_parser *p)
return chunk_complete_cb(p);
}
int
connect_headers_complete_cb (http_parser *p)
{
headers_complete_cb(p);
return 1;
}
int
connect_message_complete_cb (http_parser *p)
{
messages[num_messages].should_keep_alive = http_should_keep_alive(parser);
return message_complete_cb(p);
}
static http_parser_settings settings_pause =
{.on_message_begin = pause_message_begin_cb
,.on_header_field = pause_header_field_cb
......@@ -2212,6 +2226,19 @@ static http_parser_settings settings_count_body =
,.on_chunk_complete = chunk_complete_cb
};
static http_parser_settings settings_connect =
{.on_message_begin = message_begin_cb
,.on_header_field = header_field_cb
,.on_header_value = header_value_cb
,.on_url = request_url_cb
,.on_status = response_status_cb
,.on_body = dontcall_body_cb
,.on_headers_complete = connect_headers_complete_cb
,.on_message_complete = connect_message_complete_cb
,.on_chunk_header = chunk_header_cb
,.on_chunk_complete = chunk_complete_cb
};
static http_parser_settings settings_null =
{.on_message_begin = 0
,.on_header_field = 0
......@@ -2275,6 +2302,14 @@ size_t parse_pause (const char *buf, size_t len)
return nparsed;
}
size_t parse_connect (const char *buf, size_t len)
{
size_t nparsed;
currently_parsing_eof = (len == 0);
nparsed = http_parser_execute(parser, &settings_connect, buf, len);
return nparsed;
}
static inline int
check_str_eq (const struct message *m,
const char *prop,
......@@ -2331,7 +2366,7 @@ do { \
} while(0)
int
message_eq (int index, const struct message *expected)
message_eq (int index, int connect, const struct message *expected)
{
int i;
struct message *m = &messages[index];
......@@ -2346,8 +2381,10 @@ message_eq (int index, const struct message *expected)
MESSAGE_CHECK_STR_EQ(expected, m, response_status);
}
MESSAGE_CHECK_NUM_EQ(expected, m, should_keep_alive);
MESSAGE_CHECK_NUM_EQ(expected, m, message_complete_on_eof);
if (!connect) {
MESSAGE_CHECK_NUM_EQ(expected, m, should_keep_alive);
MESSAGE_CHECK_NUM_EQ(expected, m, message_complete_on_eof);
}
assert(m->message_begin_cb_called);
assert(m->headers_complete_cb_called);
......@@ -2385,16 +2422,22 @@ message_eq (int index, const struct message *expected)
MESSAGE_CHECK_NUM_EQ(expected, m, port);
}
if (expected->body_size) {
if (connect) {
check_num_eq(m, "body_size", 0, m->body_size);
} else if (expected->body_size) {
MESSAGE_CHECK_NUM_EQ(expected, m, body_size);
} else {
MESSAGE_CHECK_STR_EQ(expected, m, body);
}
assert(m->num_chunks == m->num_chunks_complete);
MESSAGE_CHECK_NUM_EQ(expected, m, num_chunks_complete);
for (i = 0; i < m->num_chunks && i < MAX_CHUNKS; i++) {
MESSAGE_CHECK_NUM_EQ(expected, m, chunk_lengths[i]);
if (connect) {
check_num_eq(m, "num_chunks_complete", 0, m->num_chunks_complete);
} else {
assert(m->num_chunks == m->num_chunks_complete);
MESSAGE_CHECK_NUM_EQ(expected, m, num_chunks_complete);
for (i = 0; i < m->num_chunks && i < MAX_CHUNKS; i++) {
MESSAGE_CHECK_NUM_EQ(expected, m, chunk_lengths[i]);
}
}
MESSAGE_CHECK_NUM_EQ(expected, m, num_headers);
......@@ -3201,7 +3244,7 @@ test_message (const struct message *message)
abort();
}
if(!message_eq(0, message)) abort();
if(!message_eq(0, 0, message)) abort();
parser_free();
}
......@@ -3238,7 +3281,7 @@ test_message_count_body (const struct message *message)
abort();
}
if(!message_eq(0, message)) abort();
if(!message_eq(0, 0, message)) abort();
parser_free();
}
......@@ -3589,9 +3632,9 @@ test:
abort();
}
if (!message_eq(0, r1)) abort();
if (message_count > 1 && !message_eq(1, r2)) abort();
if (message_count > 2 && !message_eq(2, r3)) abort();
if (!message_eq(0, 0, r1)) abort();
if (message_count > 1 && !message_eq(1, 0, r2)) abort();
if (message_count > 2 && !message_eq(2, 0, r3)) abort();
parser_free();
}
......@@ -3687,17 +3730,17 @@ test:
goto error;
}
if (!message_eq(0, r1)) {
if (!message_eq(0, 0, r1)) {
fprintf(stderr, "\n\nError matching messages[0] in test_scan.\n");
goto error;
}
if (message_count > 1 && !message_eq(1, r2)) {
if (message_count > 1 && !message_eq(1, 0, r2)) {
fprintf(stderr, "\n\nError matching messages[1] in test_scan.\n");
goto error;
}
if (message_count > 2 && !message_eq(2, r3)) {
if (message_count > 2 && !message_eq(2, 0, r3)) {
fprintf(stderr, "\n\nError matching messages[2] in test_scan.\n");
goto error;
}
......@@ -3796,7 +3839,29 @@ test:
abort();
}
if(!message_eq(0, msg)) abort();
if(!message_eq(0, 0, msg)) abort();
parser_free();
}
/* Verify that body and next message won't be parsed in responses to CONNECT */
void
test_message_connect (const struct message *msg)
{
char *buf = (char*) msg->raw;
size_t buflen = strlen(msg->raw);
size_t nread;
parser_init(msg->type);
nread = parse_connect(buf, buflen);
if (num_messages != 1) {
printf("\n*** num_messages != 1 after testing '%s' ***\n\n", msg->name);
abort();
}
if(!message_eq(0, 1, msg)) abort();
parser_free();
}
......@@ -3867,6 +3932,10 @@ main (void)
test_message_pause(&responses[i]);
}
for (i = 0; i < response_count; i++) {
test_message_connect(&responses[i]);
}
for (i = 0; i < response_count; i++) {
if (!responses[i].should_keep_alive) continue;
for (j = 0; j < response_count; j++) {
......
......@@ -2416,7 +2416,7 @@ DebugCommandProcessor.prototype.restartFrameRequest_ = function(
frame_mirror = this.exec_state_.frame();
}
var result_description = Debug.LiveEdit.RestartFrame(frame_mirror);
var result_description = frame_mirror.restart();
response.body = {result: result_description};
};
......
......@@ -750,8 +750,11 @@ inherits(MacCppEntriesProvider, UnixCppEntriesProvider);
MacCppEntriesProvider.prototype.loadSymbols = function(libName) {
this.parsePos = 0;
libName = this.targetRootFS + libName;
// It seems that in OS X `nm` thinks that `-f` is a format option, not a
// "flat" display option flag.
try {
this.symbols = [os.system(this.nmExec, ['-n', '-f', libName], -1, -1), ''];
this.symbols = [os.system(this.nmExec, ['-n', libName], -1, -1), ''];
} catch (e) {
// If the library cannot be found on this system let's not panic.