+
+
diff --git a/test/fixtures/wpt/performance-timeline/po-takeRecords.any.js b/test/fixtures/wpt/performance-timeline/po-takeRecords.any.js
new file mode 100644
index 00000000000000..86ad397b0a5c37
--- /dev/null
+++ b/test/fixtures/wpt/performance-timeline/po-takeRecords.any.js
@@ -0,0 +1,34 @@
+// META: title=PerformanceObserver: takeRecords
+// META: script=performanceobservers.js
+
+async_test(function (t) {
+ const observer = new PerformanceObserver(function (entryList, observer) {
+ assert_unreached('This callback should not have been called.')
+ });
+ let entries = observer.takeRecords();
+ checkEntries(entries, [], 'No records before observe');
+ observer.observe({entryTypes: ['mark']});
+ assert_equals(typeof(observer.takeRecords), 'function');
+ entries = observer.takeRecords();
+ checkEntries(entries, [], 'No records just from observe');
+ performance.mark('a');
+ performance.mark('b');
+ entries = observer.takeRecords();
+ checkEntries(entries, [
+ {entryType: 'mark', name: 'a'},
+ {entryType: 'mark', name: 'b'}
+ ]);
+ performance.mark('c');
+ performance.mark('d');
+ performance.mark('e');
+ entries = observer.takeRecords();
+ checkEntries(entries, [
+ {entryType: 'mark', name: 'c'},
+ {entryType: 'mark', name: 'd'},
+ {entryType: 'mark', name: 'e'}
+ ]);
+ entries = observer.takeRecords();
+ checkEntries(entries, [], 'No entries right after takeRecords');
+ observer.disconnect();
+ t.done();
+ }, "Test PerformanceObserver's takeRecords()");
diff --git a/test/fixtures/wpt/performance-timeline/resources/postmessage-entry.html b/test/fixtures/wpt/performance-timeline/resources/postmessage-entry.html
new file mode 100644
index 00000000000000..ef5be73395b49d
--- /dev/null
+++ b/test/fixtures/wpt/performance-timeline/resources/postmessage-entry.html
@@ -0,0 +1,17 @@
+
+
diff --git a/test/fixtures/wpt/performance-timeline/resources/square.png b/test/fixtures/wpt/performance-timeline/resources/square.png
new file mode 100644
index 0000000000000000000000000000000000000000..be211bc377180386eec7e891485e083a2c45841e
GIT binary patch
literal 249
zcmeAS@N?(olHy`uVBq!ia0vp^&Oq$I!2~4F*~E4MDaPU;cPEB*=VV?2IV|apzK#qG
z8~eHcB(eheoCO|{#S9F5he4R}c>anMpkSY;i(^QJ^V@3&IS(i>usHtKSF4Sf>dC%O
z>l*WB#=Z#>-@S`%S8CZt@7Oo5h)`6&={0M#tksJI@9JEj%NRUe
L{an^LB{Ts5 true);
+ postMessage("SUCCESS");
+} catch (ex) {
+ postMessage("FAILURE");
+}
diff --git a/test/fixtures/wpt/performance-timeline/supportedEntryTypes.any.js b/test/fixtures/wpt/performance-timeline/supportedEntryTypes.any.js
new file mode 100644
index 00000000000000..25f195939e7b69
--- /dev/null
+++ b/test/fixtures/wpt/performance-timeline/supportedEntryTypes.any.js
@@ -0,0 +1,19 @@
+test(() => {
+ if (typeof PerformanceObserver.supportedEntryTypes === "undefined")
+ assert_unreached("supportedEntryTypes is not supported.");
+ const types = PerformanceObserver.supportedEntryTypes;
+ assert_greater_than(types.length, 0,
+ "There should be at least one entry in supportedEntryTypes.");
+ for (let i = 1; i < types.length; i++) {
+ assert_true(types[i-1] < types[i],
+ "The strings '" + types[i-1] + "' and '" + types[i] +
+ "' are repeated or they are not in alphabetical order.")
+ }
+}, "supportedEntryTypes exists and returns entries in alphabetical order");
+
+test(() => {
+ if (typeof PerformanceObserver.supportedEntryTypes === "undefined")
+ assert_unreached("supportedEntryTypes is not supported.");
+ assert_true(PerformanceObserver.supportedEntryTypes ===
+ PerformanceObserver.supportedEntryTypes);
+}, "supportedEntryTypes caches result");
diff --git a/test/fixtures/wpt/performance-timeline/webtiming-resolution.any.js b/test/fixtures/wpt/performance-timeline/webtiming-resolution.any.js
new file mode 100644
index 00000000000000..d869c7c52d55d6
--- /dev/null
+++ b/test/fixtures/wpt/performance-timeline/webtiming-resolution.any.js
@@ -0,0 +1,25 @@
+function testTimeResolution(highResTimeFunc, funcString) {
+ test(() => {
+ const t0 = highResTimeFunc();
+ let t1 = highResTimeFunc();
+ while (t0 == t1) {
+ t1 = highResTimeFunc();
+ }
+ const epsilon = 1e-5;
+ assert_greater_than_equal(t1 - t0, 0.005 - epsilon, 'The second ' + funcString + ' should be much greater than the first');
+ }, 'Verifies the resolution of ' + funcString + ' is at least 5 microseconds.');
+}
+
+function timeByPerformanceNow() {
+ return performance.now();
+}
+
+function timeByUserTiming() {
+ performance.mark('timer');
+ const time = performance.getEntriesByName('timer')[0].startTime;
+ performance.clearMarks('timer');
+ return time;
+}
+
+testTimeResolution(timeByPerformanceNow, 'performance.now()');
+testTimeResolution(timeByUserTiming, 'entry.startTime');
diff --git a/test/fixtures/wpt/performance-timeline/worker-with-performance-observer.html b/test/fixtures/wpt/performance-timeline/worker-with-performance-observer.html
new file mode 100644
index 00000000000000..fc92bc971003f2
--- /dev/null
+++ b/test/fixtures/wpt/performance-timeline/worker-with-performance-observer.html
@@ -0,0 +1,18 @@
+
+
+
+
+
+
+
+
+
+
diff --git a/test/fixtures/wpt/user-timing/META.yml b/test/fixtures/wpt/user-timing/META.yml
new file mode 100644
index 00000000000000..5cb2a789c09c89
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/META.yml
@@ -0,0 +1,4 @@
+spec: https://w3c.github.io/user-timing/
+suggested_reviewers:
+ - plehegar
+ - igrigorik
diff --git a/test/fixtures/wpt/user-timing/buffered-flag.any.js b/test/fixtures/wpt/user-timing/buffered-flag.any.js
new file mode 100644
index 00000000000000..f938c8522d829a
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/buffered-flag.any.js
@@ -0,0 +1,27 @@
+async_test(t => {
+ // First observer creates second in callback to ensure the entry has been dispatched by the time
+ // the second observer begins observing.
+ new PerformanceObserver(() => {
+ // Second observer requires 'buffered: true' to see an entry.
+ new PerformanceObserver(t.step_func_done(list => {
+ const entries = list.getEntries();
+ assert_equals(entries.length, 1, 'There should be 1 mark entry.');
+ assert_equals(entries[0].entryType, 'mark');
+ })).observe({type: 'mark', buffered: true});
+ }).observe({entryTypes: ['mark']});
+ performance.mark('foo');
+}, 'PerformanceObserver with buffered flag sees previous marks');
+
+async_test(t => {
+ // First observer creates second in callback to ensure the entry has been dispatched by the time
+ // the second observer begins observing.
+ new PerformanceObserver(() => {
+ // Second observer requires 'buffered: true' to see an entry.
+ new PerformanceObserver(t.step_func_done(list => {
+ const entries = list.getEntries();
+ assert_equals(entries.length, 1, 'There should be 1 measure entry.');
+ assert_equals(entries[0].entryType, 'measure');
+ })).observe({type: 'measure', buffered: true});
+ }).observe({entryTypes: ['measure']});
+ performance.measure('bar');
+}, 'PerformanceObserver with buffered flag sees previous measures');
diff --git a/test/fixtures/wpt/user-timing/case-sensitivity.any.js b/test/fixtures/wpt/user-timing/case-sensitivity.any.js
new file mode 100644
index 00000000000000..1c0b0dcac361fe
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/case-sensitivity.any.js
@@ -0,0 +1,25 @@
+ test(function () {
+ assert_equals(typeof self.performance, "object");
+ assert_equals(typeof self.performance.getEntriesByType, "function");
+
+ self.performance.mark("mark1");
+ self.performance.measure("measure1");
+
+ const type = [
+ 'mark',
+ 'measure',
+ ];
+ type.forEach(function(entryType) {
+ if (PerformanceObserver.supportedEntryTypes.includes(entryType)) {
+ const entryTypeUpperCased = entryType.toUpperCase();
+ const entryTypeCapitalized = entryType[0].toUpperCase() + entryType.substring(1);
+ const lowerList = self.performance.getEntriesByType(entryType);
+ const upperList = self.performance.getEntriesByType(entryTypeUpperCased);
+ const mixedList = self.performance.getEntriesByType(entryTypeCapitalized);
+
+ assert_greater_than(lowerList.length, 0, "Entries exist");
+ assert_equals(upperList.length, 0, "getEntriesByType('" + entryTypeCapitalized + "').length");
+ assert_equals(mixedList.length, 0, "getEntriesByType('" + entryTypeCapitalized + "').length");
+ }
+ });
+ }, "getEntriesByType values are case sensitive");
diff --git a/test/fixtures/wpt/user-timing/clearMarks.html b/test/fixtures/wpt/user-timing/clearMarks.html
new file mode 100644
index 00000000000000..92c60a3bbb856b
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/clearMarks.html
@@ -0,0 +1,84 @@
+
+
+
+
+functionality test of window.performance.clearMarks
+
+
+
+
+
+
+
+
+
+
+
Description
+
This test validates functionality of the interface window.performance.clearMarks.
+
+
+
diff --git a/test/fixtures/wpt/user-timing/clearMeasures.html b/test/fixtures/wpt/user-timing/clearMeasures.html
new file mode 100644
index 00000000000000..54d41005698305
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/clearMeasures.html
@@ -0,0 +1,77 @@
+
+
+
+
+functionality test of window.performance.clearMeasures
+
+
+
+
+
+
+
+
+
+
+
Description
+
This test validates functionality of the interface window.performance.clearMeasures.
+
+
+
diff --git a/test/fixtures/wpt/user-timing/clear_all_marks.any.js b/test/fixtures/wpt/user-timing/clear_all_marks.any.js
new file mode 100644
index 00000000000000..35cd2a04f61036
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/clear_all_marks.any.js
@@ -0,0 +1,17 @@
+test(function() {
+ self.performance.mark("mark1");
+ self.performance.mark("mark2");
+
+ // test that two marks have been created
+ var entries = self.performance.getEntriesByType("mark");
+ assert_equals(entries.length, 2, "Two marks have been created for this test.");
+
+ // clear all marks
+ self.performance.clearMarks();
+
+ // test that all marks were cleared
+ entries = self.performance.getEntriesByType("mark");
+
+ assert_equals(entries.length, 0, "All marks have been cleared.");
+
+}, "Clearing all marks remove all of them.");
diff --git a/test/fixtures/wpt/user-timing/clear_all_measures.any.js b/test/fixtures/wpt/user-timing/clear_all_measures.any.js
new file mode 100644
index 00000000000000..32c993f2827a30
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/clear_all_measures.any.js
@@ -0,0 +1,21 @@
+test(function()
+{
+ self.performance.mark("mark1");
+ self.performance.measure("measure1", "mark1");
+ self.performance.mark("mark2");
+ self.performance.measure("measure2", "mark2");
+
+ // test that two measures have been created
+ var entries = self.performance.getEntriesByType("measure");
+ assert_equals(entries.length, 2, "Two measures have been created for this test.");
+
+ // clear all measures
+ self.performance.clearMeasures();
+
+ // test that all measures were cleared
+ entries = self.performance.getEntriesByType("measure");
+ assert_equals(entries.length, 0,
+ "After a call to self.performance.clearMeasures(), " +
+ "self.performance.getEntriesByType(\"measure\") returns an empty object.");
+
+}, "Clearing all marks remove all of them.");
diff --git a/test/fixtures/wpt/user-timing/clear_non_existent_mark.any.js b/test/fixtures/wpt/user-timing/clear_non_existent_mark.any.js
new file mode 100644
index 00000000000000..c7d8b478613401
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/clear_non_existent_mark.any.js
@@ -0,0 +1,26 @@
+test(function() {
+ self.performance.mark("mark1");
+ self.performance.mark("mark2");
+
+ // test that two marks have been created
+ var entries = self.performance.getEntriesByType("mark");
+ assert_equals(entries.length, 2, "Two marks have been created for this test.");
+
+ // clear non-existent mark
+ self.performance.clearMarks("mark3");
+
+ // test that "mark1" still exists
+ entries = self.performance.getEntriesByName("mark1");
+ assert_equals(entries[0].name, "mark1",
+ "After a call to self.performance.clearMarks(\"mark3\"), where \"mark3" +
+ "\" is a non-existent mark, self.performance.getEntriesByName(\"mark1\") " +
+ "returns an object containing the \"mark1\" mark.");
+
+ // test that "mark2" still exists
+ entries = self.performance.getEntriesByName("mark2");
+ assert_equals(entries[0].name, "mark2",
+ "After a call to self.performance.clearMarks(\"mark3\"), where \"mark3" +
+ "\" is a non-existent mark, self.performance.getEntriesByName(\"mark2\") " +
+ "returns an object containing the \"mark2\" mark.");
+
+}, "Clearing a non-existent mark doesn't affect existing marks");
diff --git a/test/fixtures/wpt/user-timing/clear_non_existent_measure.any.js b/test/fixtures/wpt/user-timing/clear_non_existent_measure.any.js
new file mode 100644
index 00000000000000..9de0b5f266d4e2
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/clear_non_existent_measure.any.js
@@ -0,0 +1,29 @@
+test(function()
+{
+ self.performance.mark("mark1");
+ self.performance.measure("measure1", "mark1");
+ self.performance.mark("mark2");
+ self.performance.measure("measure2", "mark2");
+
+ // test that two measures have been created
+ var entries = self.performance.getEntriesByType("measure");
+ assert_equals(entries.length, 2, "Two measures have been created for this test.");
+
+ // clear non-existent measure
+ self.performance.clearMeasures("measure3");
+
+ // test that "measure1" still exists
+ entries = self.performance.getEntriesByName("measure1");
+ assert_equals(entries[0].name, "measure1",
+ "After a call to self.performance.clearMeasures(\"measure3\"), where \"measure3" +
+ "\" is a non-existent measure, self.performance.getEntriesByName(\"measure1\") " +
+ "returns an object containing the \"measure1\" measure.");
+
+ // test that "measure2" still exists
+ entries = self.performance.getEntriesByName("measure2");
+ assert_equals(entries[0].name, "measure2",
+ "After a call to self.performance.clearMeasures(\"measure3\"), where \"measure3" +
+ "\" is a non-existent measure, self.performance.getEntriesByName(\"measure2\") " +
+ "returns an object containing the \"measure2\" measure.");
+
+}, "Clearing a non-existent measure doesn't affect existing measures");
diff --git a/test/fixtures/wpt/user-timing/clear_one_mark.any.js b/test/fixtures/wpt/user-timing/clear_one_mark.any.js
new file mode 100644
index 00000000000000..c180199d8c9f92
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/clear_one_mark.any.js
@@ -0,0 +1,26 @@
+test(function() {
+ self.performance.mark("mark1");
+ self.performance.mark("mark2");
+
+ // test that two marks have been created
+ var entries = self.performance.getEntriesByType("mark");
+ assert_equals(entries.length, 2, "Two marks have been created for this test.");
+
+ // clear existent mark
+ self.performance.clearMarks("mark1");
+
+ // test that "mark1" was cleared
+ entries = self.performance.getEntriesByName("mark1");
+
+ assert_equals(entries.length, 0,
+ "After a call to self.performance.clearMarks(\"mark1\"), " +
+ "window.performance.getEntriesByName(\"mark1\") returns an empty object.");
+
+ // test that "mark2" still exists
+ entries = self.performance.getEntriesByName("mark2");
+ assert_equals(entries[0].name, "mark2",
+ "After a call to self.performance.clearMarks(\"mark1\"), " +
+ "window.performance.getEntriesByName(\"mark2\") returns an object containing the " +
+ "\"mark2\" mark.");
+
+}, "Clearing an existent mark doesn't affect other existing marks");
diff --git a/test/fixtures/wpt/user-timing/clear_one_measure.any.js b/test/fixtures/wpt/user-timing/clear_one_measure.any.js
new file mode 100644
index 00000000000000..a5e663772c8bbe
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/clear_one_measure.any.js
@@ -0,0 +1,29 @@
+test(function()
+{
+ self.performance.mark("mark1");
+ self.performance.measure("measure1", "mark1");
+ self.performance.mark("mark2");
+ self.performance.measure("measure2", "mark2");
+
+ // test that two measures have been created
+ var entries = self.performance.getEntriesByType("measure");
+ assert_equals(entries.length, 2, "Two measures have been created for this test.");
+
+ // clear existent measure
+ self.performance.clearMeasures("measure1");
+
+ // test that "measure1" was cleared
+ entries = self.performance.getEntriesByName("measure1");
+
+ assert_equals(entries.length, 0,
+ "After a call to self.performance.clearMeasures(\"measure1\"), " +
+ "self.performance.getEntriesByName(\"measure1\") returns an empty object.");
+
+ // test that "measure2" still exists
+ entries = self.performance.getEntriesByName("measure2");
+ assert_equals(entries[0].name, "measure2",
+ "After a call to self.performance.clearMeasures(\"measure1\"), " +
+ "self.performance.getEntriesByName(\"measure2\") returns an object containing the " +
+ "\"measure2\" measure.");
+
+}, "Clearing an existent measure doesn't affect other existing measures");
diff --git a/test/fixtures/wpt/user-timing/entry_type.any.js b/test/fixtures/wpt/user-timing/entry_type.any.js
new file mode 100644
index 00000000000000..1e37453d09d42e
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/entry_type.any.js
@@ -0,0 +1,13 @@
+test(function () {
+ self.performance.mark('mark');
+ var mark_entry = self.performance.getEntriesByName('mark')[0];
+
+ assert_equals(Object.prototype.toString.call(mark_entry), '[object PerformanceMark]', 'Class name of mark entry should be PerformanceMark.');
+}, "Validate the user timing entry type PerformanceMark");
+
+test(function () {
+ self.performance.measure('measure');
+ var measure_entry = self.performance.getEntriesByName('measure')[0];
+
+ assert_equals(Object.prototype.toString.call(measure_entry), '[object PerformanceMeasure]', 'Class name of measure entry should be PerformanceMeasure.');
+}, "Validate the user timing entry type PerformanceMeasure");
diff --git a/test/fixtures/wpt/user-timing/idlharness.any.js b/test/fixtures/wpt/user-timing/idlharness.any.js
new file mode 100644
index 00000000000000..511f2d0455b833
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/idlharness.any.js
@@ -0,0 +1,33 @@
+// META: global=window,worker
+// META: script=/resources/WebIDLParser.js
+// META: script=/resources/idlharness.js
+// META: timeout=long
+
+// https://w3c.github.io/user-timing/
+
+'use strict';
+
+idl_test(
+ ['user-timing'],
+ ['hr-time', 'performance-timeline', 'dom'],
+ idl_array => {
+ try {
+ performance.mark('test');
+ performance.measure('test');
+ for (const m of performance.getEntriesByType('mark')) {
+ self.mark = m;
+ }
+ for (const m of performance.getEntriesByType('measure')) {
+ self.measure = m;
+ }
+ } catch (e) {
+ // Will be surfaced when mark is undefined below.
+ }
+
+ idl_array.add_objects({
+ Performance: ['performance'],
+ PerformanceMark: ['mark'],
+ PerformanceMeasure: ['measure'],
+ });
+ }
+);
diff --git a/test/fixtures/wpt/user-timing/invoke_with_timing_attributes.html b/test/fixtures/wpt/user-timing/invoke_with_timing_attributes.html
new file mode 100644
index 00000000000000..1df94a3006d7fb
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/invoke_with_timing_attributes.html
@@ -0,0 +1,35 @@
+
+
+
+
+ exception test of performance.mark and performance.measure
+
+
+
+
+
+
+
Description
+
This test validates exception scenarios of invoking mark() and measure() with timing attributes as value.
+
+
+
+
diff --git a/test/fixtures/wpt/user-timing/invoke_with_timing_attributes.worker.js b/test/fixtures/wpt/user-timing/invoke_with_timing_attributes.worker.js
new file mode 100644
index 00000000000000..32677c64d3bd5f
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/invoke_with_timing_attributes.worker.js
@@ -0,0 +1,25 @@
+importScripts("/resources/testharness.js");
+importScripts("resources/webperftestharness.js");
+
+function emit_test(attrName) {
+ test(function() {
+ performance.mark(attrName);
+ performance.clearMarks(attrName);
+ }, "performance.mark should not throw if used with timing attribute " + attrName
+ + " in workers");
+}
+for (var i in timingAttributes) {
+ emit_test(timingAttributes[i]);
+}
+
+function emit_test2(attrName) {
+ test(function() {
+ performance.measure(attrName);
+ performance.clearMeasures(attrName);
+ }, "performance.measure should not throw if used with timing attribute " + attrName
+ + " in workers");
+}
+for (var i in timingAttributes) {
+ emit_test2(timingAttributes[i]);
+}
+done();
diff --git a/test/fixtures/wpt/user-timing/invoke_without_parameter.html b/test/fixtures/wpt/user-timing/invoke_without_parameter.html
new file mode 100644
index 00000000000000..114435e59befbb
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/invoke_without_parameter.html
@@ -0,0 +1,26 @@
+
+
+
+
+ exception test of performance.mark and performance.measure
+
+
+
+
+
+
+
+
Description
+
This test validates exception scenarios of invoking mark() and measure() without parameter.
+
+
+
+
diff --git a/test/fixtures/wpt/user-timing/mark-entry-constructor.any.js b/test/fixtures/wpt/user-timing/mark-entry-constructor.any.js
new file mode 100644
index 00000000000000..ef9c403dda6723
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/mark-entry-constructor.any.js
@@ -0,0 +1,40 @@
+// META: script=resources/user-timing-helper.js
+
+test(()=>{
+ const entry = new PerformanceMark("name");
+ assert_true(entry instanceof PerformanceMark);
+ checkEntry(entry, {name: "name", entryType: "mark"});
+}, "Mark entry can be created by 'new PerformanceMark(string)'.");
+
+test(()=>{
+ const entry = new PerformanceMark("name", {});
+ assert_true(entry instanceof PerformanceMark);
+ checkEntry(entry, {name: "name", entryType: "mark"});
+}, "Mark entry can be created by 'new PerformanceMark(string, {})'.");
+
+test(()=>{
+ const entry = new PerformanceMark("name", {startTime: 1});
+ assert_true(entry instanceof PerformanceMark);
+ checkEntry(entry, {name: "name", entryType: "mark", startTime: 1});
+}, "Mark entry can be created by 'new PerformanceMark(string, {startTime})'.");
+
+test(()=>{
+ const entry = new PerformanceMark("name", {detail: {info: "abc"}});
+ assert_true(entry instanceof PerformanceMark);
+ checkEntry(entry, {name: "name", entryType: "mark", detail: {info: "abc"}});
+}, "Mark entry can be created by 'new PerformanceMark(string, {detail})'.");
+
+test(()=>{
+ const entry =
+ new PerformanceMark("name", {startTime: 1, detail: {info: "abc"}});
+ assert_true(entry instanceof PerformanceMark);
+ checkEntry(entry, {name: "name", entryType: "mark", startTime: 1, detail: {info: "abc"}});
+}, "Mark entry can be created by " +
+ "'new PerformanceMark(string, {startTime, detail})'.");
+
+test(()=>{
+ const entry = new PerformanceMark("name");
+ assert_true(entry instanceof PerformanceMark);
+ checkEntry(entry, {name: "name", entryType: "mark"});
+ assert_equals(performance.getEntriesByName("name").length, 0);
+}, "Using new PerformanceMark() shouldn't add the entry to performance timeline.");
diff --git a/test/fixtures/wpt/user-timing/mark-errors.any.js b/test/fixtures/wpt/user-timing/mark-errors.any.js
new file mode 100644
index 00000000000000..dcd36695e22f2c
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/mark-errors.any.js
@@ -0,0 +1,15 @@
+test(function() {
+ assert_throws_js(TypeError, function() { self.performance.mark("mark1", 123); }, "Number passed as a dict argument should cause type-error.")
+}, "Number should be rejected as the mark-options.")
+
+test(function() {
+ assert_throws_js(TypeError, function() { self.performance.mark("mark1", NaN); }, "NaN passed as a dict argument should cause type-error.")
+}, "NaN should be rejected as the mark-options.")
+
+test(function() {
+ assert_throws_js(TypeError, function() { self.performance.mark("mark1", Infinity); }, "Infinity passed as a dict argument should cause type-error.")
+}, "Infinity should be rejected as the mark-options.")
+
+test(function() {
+ assert_throws_js(TypeError, function() { self.performance.mark("mark1", "string"); }, "String passed as a dict argument should cause type-error.")
+}, "String should be rejected as the mark-options.")
diff --git a/test/fixtures/wpt/user-timing/mark-l3.any.js b/test/fixtures/wpt/user-timing/mark-l3.any.js
new file mode 100644
index 00000000000000..407a5c8bba6a3c
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/mark-l3.any.js
@@ -0,0 +1,39 @@
+// META: script=resources/user-timing-helper.js
+
+async_test(function (t) {
+ let mark_entries = [];
+ const expected_entries =
+ [{ entryType: "mark", name: "mark1", detail: null},
+ { entryType: "mark", name: "mark2", detail: null},
+ { entryType: "mark", name: "mark3", detail: null},
+ { entryType: "mark", name: "mark4", detail: null},
+ { entryType: "mark", name: "mark5", detail: null},
+ { entryType: "mark", name: "mark6", detail: {}},
+ { entryType: "mark", name: "mark7", detail: {info: 'abc'}},
+ { entryType: "mark", name: "mark8", detail: null, startTime: 234.56},
+ { entryType: "mark", name: "mark9", detail: {count: 3}, startTime: 345.67}];
+ const observer = new PerformanceObserver(
+ t.step_func(function (entryList, obs) {
+ mark_entries =
+ mark_entries.concat(entryList.getEntries());
+ if (mark_entries.length >= expected_entries.length) {
+ checkEntries(mark_entries, expected_entries);
+ observer.disconnect();
+ t.done();
+ }
+ })
+ );
+ self.performance.clearMarks();
+ observer.observe({entryTypes: ["mark"]});
+ const returned_entries = [];
+ returned_entries.push(self.performance.mark("mark1"));
+ returned_entries.push(self.performance.mark("mark2", undefined));
+ returned_entries.push(self.performance.mark("mark3", null));
+ returned_entries.push(self.performance.mark("mark4", {}));
+ returned_entries.push(self.performance.mark("mark5", {detail: null}));
+ returned_entries.push(self.performance.mark("mark6", {detail: {}}));
+ returned_entries.push(self.performance.mark("mark7", {detail: {info: 'abc'}}));
+ returned_entries.push(self.performance.mark("mark8", {startTime: 234.56}));
+ returned_entries.push(self.performance.mark("mark9", {detail: {count: 3}, startTime: 345.67}));
+ checkEntries(returned_entries, expected_entries);
+}, "mark entries' detail and startTime are customizable.");
diff --git a/test/fixtures/wpt/user-timing/mark-measure-feature-detection.html b/test/fixtures/wpt/user-timing/mark-measure-feature-detection.html
new file mode 100644
index 00000000000000..6f1ad489e95680
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/mark-measure-feature-detection.html
@@ -0,0 +1,36 @@
+
+
+User Timing: L2 vs L3 feature detection
+
+
+
diff --git a/test/fixtures/wpt/user-timing/mark-measure-return-objects.any.js b/test/fixtures/wpt/user-timing/mark-measure-return-objects.any.js
new file mode 100644
index 00000000000000..bb15c5839818ba
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/mark-measure-return-objects.any.js
@@ -0,0 +1,37 @@
+async_test(function (t) {
+ self.performance.clearMeasures();
+ const measure = self.performance.measure("measure1");
+ assert_true(measure instanceof PerformanceMeasure);
+ t.done();
+}, "L3: performance.measure(name) should return an entry.");
+
+async_test(function (t) {
+ self.performance.clearMeasures();
+ const measure = self.performance.measure("measure2",
+ { start: 12, end: 23 });
+ assert_true(measure instanceof PerformanceMeasure);
+ t.done();
+}, "L3: performance.measure(name, param1) should return an entry.");
+
+async_test(function (t) {
+ self.performance.clearMeasures();
+ self.performance.mark("1");
+ self.performance.mark("2");
+ const measure = self.performance.measure("measure3", "1", "2");
+ assert_true(measure instanceof PerformanceMeasure);
+ t.done();
+}, "L3: performance.measure(name, param1, param2) should return an entry.");
+
+async_test(function (t) {
+ self.performance.clearMarks();
+ const mark = self.performance.mark("mark1");
+ assert_true(mark instanceof PerformanceMark);
+ t.done();
+}, "L3: performance.mark(name) should return an entry.");
+
+async_test(function (t) {
+ self.performance.clearMarks();
+ const mark = self.performance.mark("mark2", { startTime: 34 });
+ assert_true(mark instanceof PerformanceMark);
+ t.done();
+}, "L3: performance.mark(name, param) should return an entry.");
diff --git a/test/fixtures/wpt/user-timing/mark.any.js b/test/fixtures/wpt/user-timing/mark.any.js
new file mode 100644
index 00000000000000..7e814d2074ca8b
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/mark.any.js
@@ -0,0 +1,118 @@
+// test data
+var testThreshold = 20;
+
+var expectedTimes = new Array();
+
+function match_entries(entries, index)
+{
+ var entry = entries[index];
+ var match = self.performance.getEntriesByName("mark")[index];
+ assert_equals(entry.name, match.name, "entry.name");
+ assert_equals(entry.startTime, match.startTime, "entry.startTime");
+ assert_equals(entry.entryType, match.entryType, "entry.entryType");
+ assert_equals(entry.duration, match.duration, "entry.duration");
+}
+
+function filter_entries_by_type(entryList, entryType)
+{
+ var testEntries = new Array();
+
+ // filter entryList
+ for (var i in entryList)
+ {
+ if (entryList[i].entryType == entryType)
+ {
+ testEntries.push(entryList[i]);
+ }
+ }
+
+ return testEntries;
+}
+
+test(function () {
+ // create first mark
+ self.performance.mark("mark");
+
+ expectedTimes[0] = self.performance.now();
+
+ const entries = self.performance.getEntriesByName("mark");
+ assert_equals(entries.length, 1);
+}, "Entry 0 is properly created");
+
+test(function () {
+ // create second, duplicate mark
+ self.performance.mark("mark");
+
+ expectedTimes[1] = self.performance.now();
+
+ const entries = self.performance.getEntriesByName("mark");
+ assert_equals(entries.length, 2);
+
+}, "Entry 1 is properly created");
+
+function test_mark(index) {
+ test(function () {
+ const entries = self.performance.getEntriesByName("mark");
+ assert_equals(entries[index].name, "mark", "Entry has the proper name");
+ }, "Entry " + index + " has the proper name");
+
+ test(function () {
+ const entries = self.performance.getEntriesByName("mark");
+ assert_approx_equals(entries[index].startTime, expectedTimes[index], testThreshold);
+ }, "Entry " + index + " startTime is approximately correct (up to " + testThreshold +
+ "ms difference allowed)");
+
+ test(function () {
+ const entries = self.performance.getEntriesByName("mark");
+ assert_equals(entries[index].entryType, "mark");
+ }, "Entry " + index + " has the proper entryType");
+
+ test(function () {
+ const entries = self.performance.getEntriesByName("mark");
+ assert_equals(entries[index].duration, 0);
+ }, "Entry " + index + " duration == 0");
+
+ test(function () {
+ const entries = self.performance.getEntriesByName("mark", "mark");
+ assert_equals(entries[index].name, "mark");
+ }, "getEntriesByName(\"mark\", \"mark\")[" + index + "] returns an " +
+ "object containing a \"mark\" mark");
+
+ test(function () {
+ const entries = self.performance.getEntriesByName("mark", "mark");
+ match_entries(entries, index);
+ }, "The mark returned by getEntriesByName(\"mark\", \"mark\")[" + index
+ + "] matches the mark returned by " +
+ "getEntriesByName(\"mark\")[" + index + "]");
+
+ test(function () {
+ const entries = filter_entries_by_type(self.performance.getEntries(), "mark");
+ assert_equals(entries[index].name, "mark");
+ }, "getEntries()[" + index + "] returns an " +
+ "object containing a \"mark\" mark");
+
+ test(function () {
+ const entries = filter_entries_by_type(self.performance.getEntries(), "mark");
+ match_entries(entries, index);
+ }, "The mark returned by getEntries()[" + index
+ + "] matches the mark returned by " +
+ "getEntriesByName(\"mark\")[" + index + "]");
+
+ test(function () {
+ const entries = self.performance.getEntriesByType("mark");
+ assert_equals(entries[index].name, "mark");
+ }, "getEntriesByType(\"mark\")[" + index + "] returns an " +
+ "object containing a \"mark\" mark");
+
+ test(function () {
+ const entries = self.performance.getEntriesByType("mark");
+ match_entries(entries, index);
+ }, "The mark returned by getEntriesByType(\"mark\")[" + index
+ + "] matches the mark returned by " +
+ "getEntriesByName(\"mark\")[" + index + "]");
+
+}
+
+for (var i = 0; i < expectedTimes.length; i++) {
+ test_mark(i);
+}
diff --git a/test/fixtures/wpt/user-timing/mark.html b/test/fixtures/wpt/user-timing/mark.html
new file mode 100644
index 00000000000000..e03e9e6247adab
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/mark.html
@@ -0,0 +1,58 @@
+
+
+
+
+functionality test of window.performance.mark
+
+
+
+
+
+
+
+
+
+
+
Description
+
This test validates functionality of the interface window.performance.mark.
+
+
+
diff --git a/test/fixtures/wpt/user-timing/mark_exceptions.html b/test/fixtures/wpt/user-timing/mark_exceptions.html
new file mode 100644
index 00000000000000..b445c6b8778ae7
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/mark_exceptions.html
@@ -0,0 +1,41 @@
+
+
+
+
+ window.performance User Timing mark() method is throwing the proper exceptions
+
+
+
+
+
+
+
+
+
+
Description
+
This test validates that the performance.mark() method throws a SYNTAX_ERR exception whenever a navigation
+ timing attribute is provided for the name parameter.
+
+
+
+
+
diff --git a/test/fixtures/wpt/user-timing/measure-exceptions.html b/test/fixtures/wpt/user-timing/measure-exceptions.html
new file mode 100644
index 00000000000000..2836eaee2a86c1
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/measure-exceptions.html
@@ -0,0 +1,49 @@
+
+
+
+ This tests that 'performance.measure' throws exceptions with reasonable messages.
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/test/fixtures/wpt/user-timing/measure-l3.any.js b/test/fixtures/wpt/user-timing/measure-l3.any.js
new file mode 100644
index 00000000000000..24c27c483515ed
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/measure-l3.any.js
@@ -0,0 +1,35 @@
+// META: script=resources/user-timing-helper.js
+
+function endTime(entry) {
+ return entry.startTime + entry.duration;
+}
+
+test(function() {
+ performance.clearMarks();
+ performance.clearMeasures();
+ const markEntry = performance.mark("mark", {startTime: 123});
+ const measureEntry = performance.measure("A", undefined, "mark");
+ assert_equals(measureEntry.startTime, 0);
+ assert_equals(endTime(measureEntry), markEntry.startTime);
+}, "When the end mark is given and the start is unprovided, the end time of the measure entry should be the end mark's time, the start time should be 0.");
+
+test(function() {
+ performance.clearMarks();
+ performance.clearMeasures();
+ const markEntry = performance.mark("mark", {startTime: 123});
+ const endMin = performance.now();
+ const measureEntry = performance.measure("A", "mark", undefined);
+ const endMax = performance.now();
+ assert_equals(measureEntry.startTime, markEntry.startTime);
+ assert_greater_than_equal(endTime(measureEntry), endMin);
+ assert_greater_than_equal(endMax, endTime(measureEntry));
+}, "When the start mark is given and the end is unprovided, the start time of the measure entry should be the start mark's time, the end should be now.");
+
+test(function() {
+ performance.clearMarks();
+ performance.clearMeasures();
+ const markEntry = performance.mark("mark", {startTime: 123});
+ const measureEntry = performance.measure("A", "mark", "mark");
+ assert_equals(endTime(measureEntry), markEntry.startTime);
+ assert_equals(measureEntry.startTime, markEntry.startTime);
+}, "When start and end mark are both given, the start time and end time of the measure entry should be the the marks' time, repectively");
diff --git a/test/fixtures/wpt/user-timing/measure-with-dict.any.js b/test/fixtures/wpt/user-timing/measure-with-dict.any.js
new file mode 100644
index 00000000000000..b452feb0de6fbb
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/measure-with-dict.any.js
@@ -0,0 +1,112 @@
+// META: script=resources/user-timing-helper.js
+
+function cleanupPerformanceTimeline() {
+ performance.clearMarks();
+ performance.clearMeasures();
+}
+
+async_test(function (t) {
+ this.add_cleanup(cleanupPerformanceTimeline);
+ let measureEntries = [];
+ const timeStamp1 = 784.4;
+ const timeStamp2 = 1234.5;
+ const timeStamp3 = 66.6;
+ const timeStamp4 = 5566;
+ const expectedEntries =
+ [{ entryType: "measure", name: "measure1", detail: null, startTime: 0 },
+ { entryType: "measure", name: "measure2", detail: null, startTime: 0 },
+ { entryType: "measure", name: "measure3", detail: null, startTime: 0 },
+ { entryType: "measure", name: "measure4", detail: null },
+ { entryType: "measure", name: "measure5", detail: null, startTime: 0 },
+ { entryType: "measure", name: "measure6", detail: null, startTime: timeStamp1 },
+ { entryType: "measure", name: "measure7", detail: null, startTime: timeStamp1, duration: timeStamp2 - timeStamp1 },
+ { entryType: "measure", name: "measure8", detail: null, startTime: 0 },
+ { entryType: "measure", name: "measure9", detail: null, startTime: 0 },
+ { entryType: "measure", name: "measure10", detail: null, startTime: timeStamp1 },
+ { entryType: "measure", name: "measure11", detail: null, startTime: timeStamp3 },
+ { entryType: "measure", name: "measure12", detail: null, startTime: 0 },
+ { entryType: "measure", name: "measure13", detail: null, startTime: 0 },
+ { entryType: "measure", name: "measure14", detail: null, startTime: timeStamp3, duration: timeStamp1 - timeStamp3 },
+ { entryType: "measure", name: "measure15", detail: null, startTime: timeStamp1, duration: timeStamp2 - timeStamp1 },
+ { entryType: "measure", name: "measure16", detail: null, startTime: timeStamp1 },
+ { entryType: "measure", name: "measure17", detail: { customInfo: 159 }, startTime: timeStamp3, duration: timeStamp2 - timeStamp3 },
+ { entryType: "measure", name: "measure18", detail: null, startTime: timeStamp1, duration: timeStamp2 - timeStamp1 },
+ { entryType: "measure", name: "measure19", detail: null, startTime: timeStamp1, duration: timeStamp2 - timeStamp1 },
+ { entryType: "measure", name: "measure20", detail: null, startTime: 0 },
+ { entryType: "measure", name: "measure21", detail: null, startTime: 0 },
+ { entryType: "measure", name: "measure22", detail: null, startTime: 0 },
+ { entryType: "measure", name: "measure23", detail: null, startTime: 0 }];
+ const observer = new PerformanceObserver(
+ t.step_func(function (entryList, obs) {
+ measureEntries =
+ measureEntries.concat(entryList.getEntries());
+ if (measureEntries.length >= expectedEntries.length) {
+ checkEntries(measureEntries, expectedEntries);
+ observer.disconnect();
+ t.done();
+ }
+ })
+ );
+ observer.observe({ entryTypes: ["measure"] });
+ self.performance.mark("mark1", { detail: { randomInfo: 3 }, startTime: timeStamp1 });
+ self.performance.mark("mark2", { startTime: timeStamp2 });
+
+ const returnedEntries = [];
+ returnedEntries.push(self.performance.measure("measure1"));
+ returnedEntries.push(self.performance.measure("measure2", undefined));
+ returnedEntries.push(self.performance.measure("measure3", null));
+ returnedEntries.push(self.performance.measure("measure4", 'mark1'));
+ returnedEntries.push(
+ self.performance.measure("measure5", null, 'mark1'));
+ returnedEntries.push(
+ self.performance.measure("measure6", 'mark1', undefined));
+ returnedEntries.push(
+ self.performance.measure("measure7", 'mark1', 'mark2'));
+ returnedEntries.push(
+ self.performance.measure("measure8", {}));
+ returnedEntries.push(
+ self.performance.measure("measure9", { start: undefined }));
+ returnedEntries.push(
+ self.performance.measure("measure10", { start: 'mark1' }));
+ returnedEntries.push(
+ self.performance.measure("measure11", { start: timeStamp3 }));
+ returnedEntries.push(
+ self.performance.measure("measure12", { end: undefined }));
+ returnedEntries.push(
+ self.performance.measure("measure13", { end: 'mark1' }));
+ returnedEntries.push(
+ self.performance.measure("measure14", { start: timeStamp3, end: 'mark1' }));
+ returnedEntries.push(
+ self.performance.measure("measure15", { start: timeStamp1, end: timeStamp2, detail: undefined }));
+ returnedEntries.push(
+ self.performance.measure("measure16", { start: 'mark1', end: undefined, detail: null }));
+ returnedEntries.push(
+ self.performance.measure("measure17", { start: timeStamp3, end: 'mark2', detail: { customInfo: 159 }}));
+ returnedEntries.push(
+ self.performance.measure("measure18", { start: timeStamp1, duration: timeStamp2 - timeStamp1 }));
+ returnedEntries.push(
+ self.performance.measure("measure19", { duration: timeStamp2 - timeStamp1, end: timeStamp2 }));
+ // {}, null, undefined, invalid-dict passed to startOrOptions are interpreted as start time being 0.
+ returnedEntries.push(self.performance.measure("measure20", {}, 'mark1'));
+ returnedEntries.push(self.performance.measure("measure21", null, 'mark1'));
+ returnedEntries.push(self.performance.measure("measure22", undefined, 'mark1'));
+ returnedEntries.push(self.performance.measure("measure23", { invalidDict:1 }, 'mark1'));
+ checkEntries(returnedEntries, expectedEntries);
+}, "measure entries' detail and start/end are customizable");
+
+test(function() {
+ this.add_cleanup(cleanupPerformanceTimeline);
+ assert_throws_js(TypeError, function() {
+ self.performance.measure("optionsAndNumberEnd", {'start': 2}, 12);
+ }, "measure should throw a TypeError when passed an options object and an end time");
+ assert_throws_js(TypeError, function() {
+ self.performance.measure("optionsAndMarkEnd", {'start': 2}, 'mark1');
+ }, "measure should throw a TypeError when passed an options object and an end mark");
+ assert_throws_js(TypeError, function() {
+ self.performance.measure("negativeStartInOptions", {'start': -1});
+ }, "measure cannot have a negative time stamp.");
+ assert_throws_js(TypeError, function() {
+ self.performance.measure("negativeEndInOptions", {'end': -1});
+ }, "measure cannot have a negative time stamp for end.");
+}, "measure should throw a TypeError when passed an invalid argument combination");
+
diff --git a/test/fixtures/wpt/user-timing/measure.html b/test/fixtures/wpt/user-timing/measure.html
new file mode 100644
index 00000000000000..40f71a3362b581
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/measure.html
@@ -0,0 +1,362 @@
+
+
+
+
+
+ window.performance User Timing measure() method is working properly
+
+
+
+
+
+
+
+
+
+
+
Description
+
This test validates that the performance.measure() method is working properly. This test creates the
+ following measures to test this method:
+
+
"measure_no_start_no_end": created using a measure() call without a startMark or endMark
+ provided
+
"measure_start_no_end": created using a measure() call with only the startMark provided
+
"measure_start_end": created using a measure() call with both a startMark or endMark provided
+
"measure_no_start_end": created using a measure() call with only the endMark provided
+
"measure_no_start_no_end": duplicate of the first measure, used to confirm names can be re-used
+
+ After creating each measure, the existence of these measures is validated by calling
+ performance.getEntriesByName() (both with and without the entryType parameter provided),
+ performance.getEntriesByType(), and performance.getEntries()
+
This test validates functionality of the interface window.performance.measure using keywords from the Navigation Timing spec.
+
+
+
diff --git a/test/fixtures/wpt/user-timing/measure_exception.html b/test/fixtures/wpt/user-timing/measure_exception.html
new file mode 100644
index 00000000000000..5c1aa086c0fc88
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/measure_exception.html
@@ -0,0 +1,34 @@
+
+
+
+
+exception test of window.performance.measure
+
+
+
+
+
+
+
+
+
+
+
Description
+
This test validates all exception scenarios of method window.performance.measure in User Timing API
+
+
+
+
+
diff --git a/test/fixtures/wpt/user-timing/measure_exceptions_navigation_timing.html b/test/fixtures/wpt/user-timing/measure_exceptions_navigation_timing.html
new file mode 100644
index 00000000000000..b1868b2cb6b3cb
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/measure_exceptions_navigation_timing.html
@@ -0,0 +1,70 @@
+
+
+
+
+ window.performance User Timing measure() method is throwing the proper exceptions
+
+
+
+
+
+
+
+
+
+
Description
+
window.performance.measure() method throws a InvalidAccessError
+ whenever a navigation timing attribute with a value of zero is provided as the startMark or endMark.
+
+
+
+
+
diff --git a/test/fixtures/wpt/user-timing/measure_navigation_timing.html b/test/fixtures/wpt/user-timing/measure_navigation_timing.html
new file mode 100644
index 00000000000000..d6480d27a261c9
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/measure_navigation_timing.html
@@ -0,0 +1,205 @@
+
+
+
+
+
+ window.performance User Timing clearMeasures() method is working properly with navigation timing
+ attributes
+
+
+
+
+
+
+
+
+
+
+
Description
+
This test validates that the performance.measure() method is working properly when navigation timing
+ attributes are used in place of mark names. This test creates the following measures to test this method:
+
+
"measure_nav_start_no_end": created using a measure() call with a navigation timing attribute
+ provided as the startMark and nothing provided as the endMark
+
"measure_nav_start_mark_end": created using a measure() call with a navigation timing attribute
+ provided as the startMark and a mark name provided as the endMark
+
"measure_mark_start_nav_end": created using a measure() call with a mark name provided as the
+ startMark and a navigation timing attribute provided as the endMark
+
"measure_nav_start_nav_end":created using a measure() call with a navigation timing attribute
+ provided as both the startMark and endMark
+
+ After creating each measure, the existence of these measures is validated by calling
+ performance.getEntriesByName() with each measure name
+
+
+
+
+
diff --git a/test/fixtures/wpt/user-timing/measure_syntax_err.any.js b/test/fixtures/wpt/user-timing/measure_syntax_err.any.js
new file mode 100644
index 00000000000000..9b762a40906351
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/measure_syntax_err.any.js
@@ -0,0 +1,33 @@
+test(function () {
+ self.performance.mark("existing_mark");
+ var entries = self.performance.getEntriesByName("existing_mark");
+ assert_equals(entries.length, 1);
+ self.performance.measure("measure", "existing_mark");
+}, "Create a mark \"existing_mark\"");
+test(function () {
+ assert_throws_dom("SyntaxError", function () {
+ self.performance.measure("measure", "mark");
+ });
+}, "self.performance.measure(\"measure\", \"mark\"), where \"mark\" is a non-existent mark, " +
+ "throws a SyntaxError exception.");
+
+test(function () {
+ assert_throws_dom("SyntaxError", function () {
+ self.performance.measure("measure", "mark", "existing_mark");
+ });
+}, "self.performance.measure(\"measure\", \"mark\", \"existing_mark\"), where \"mark\" is a " +
+ "non-existent mark, throws a SyntaxError exception.");
+
+test(function () {
+ assert_throws_dom("SyntaxError", function () {
+ self.performance.measure("measure", "existing_mark", "mark");
+ });
+}, "self.performance.measure(\"measure\", \"existing_mark\", \"mark\"), where \"mark\" " +
+ "is a non-existent mark, throws a SyntaxError exception.");
+
+test(function () {
+ assert_throws_dom("SyntaxError", function () {
+ self.performance.measure("measure", "mark", "mark");
+ });
+}, "self.performance.measure(\"measure\", \"mark\", \"mark\"), where \"mark\" is a " +
+ "non-existent mark, throws a SyntaxError exception.");
diff --git a/test/fixtures/wpt/user-timing/measures.html b/test/fixtures/wpt/user-timing/measures.html
new file mode 100644
index 00000000000000..0de68965ddb9c7
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/measures.html
@@ -0,0 +1,66 @@
+
+
+
+
+functionality test of window.performance.measure
+
+
+
+
+
+
+
+
+
+
+
Description
+
This test validates functionality of the interface window.performance.measure.
+
+
+
diff --git a/test/fixtures/wpt/user-timing/performance-measure-invalid.worker.js b/test/fixtures/wpt/user-timing/performance-measure-invalid.worker.js
new file mode 100644
index 00000000000000..29efb729992cc6
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/performance-measure-invalid.worker.js
@@ -0,0 +1,9 @@
+importScripts("/resources/testharness.js");
+
+test(() => {
+ assert_throws_js(TypeError, () => {
+ performance.measure('name', 'navigationStart', 'navigationStart');
+ });
+}, "When converting 'navigationStart' to a timestamp, the global object has to be a Window object.");
+
+done();
diff --git a/test/fixtures/wpt/user-timing/resources/user-timing-helper.js b/test/fixtures/wpt/user-timing/resources/user-timing-helper.js
new file mode 100644
index 00000000000000..8d43768ec28196
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/resources/user-timing-helper.js
@@ -0,0 +1,30 @@
+// Compares a list of performance entries to a predefined one.
+// actualEntries is an array of performance entries from the user agent,
+// and expectedEntries is an array of performance entries minted by the test.
+// The comparison doesn't assert the order of the entries.
+function checkEntries(actualEntries, expectedEntries) {
+ assert_equals(actualEntries.length, expectedEntries.length,
+ `The length of actual and expected entries should match.
+ actual: ${JSON.stringify(actualEntries)},
+ expected: ${JSON.stringify(expectedEntries)}`);
+ const actualEntrySet = new Set(actualEntries.map(ae=>ae.name));
+ assert_equals(actualEntrySet.size, actualEntries.length, `Actual entry names are not unique: ${JSON.stringify(actualEntries)}`);
+ const expectedEntrySet = new Set(expectedEntries.map(ee=>ee.name));
+ assert_equals(expectedEntrySet.size, expectedEntries.length, `Expected entry names are not unique: ${JSON.stringify(expectedEntries)}`);
+ actualEntries.forEach(ae=>{
+ const expectedEntry = expectedEntries.find(e=>e.name === ae.name);
+ assert_true(!!expectedEntry, `Entry name '${ae.name}' was not found.`);
+ checkEntry(ae, expectedEntry);
+ });
+}
+
+function checkEntry(entry, {name, entryType, startTime, detail, duration}) {
+ assert_equals(entry.name, name);
+ assert_equals(entry.entryType, entryType);
+ if (startTime !== undefined)
+ assert_equals(entry.startTime, startTime);
+ if (detail !== undefined)
+ assert_equals(JSON.stringify(entry.detail), JSON.stringify(detail));
+ if (duration !== undefined)
+ assert_equals(entry.duration, duration);
+}
diff --git a/test/fixtures/wpt/user-timing/resources/webperftestharness.js b/test/fixtures/wpt/user-timing/resources/webperftestharness.js
new file mode 100644
index 00000000000000..2fbd0210de906d
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/resources/webperftestharness.js
@@ -0,0 +1,124 @@
+//
+// Helper functions for User Timing tests
+//
+
+var timingAttributes = [
+ "navigationStart",
+ "unloadEventStart",
+ "unloadEventEnd",
+ "redirectStart",
+ "redirectEnd",
+ "fetchStart",
+ "domainLookupStart",
+ "domainLookupEnd",
+ "connectStart",
+ "connectEnd",
+ "secureConnectionStart",
+ "requestStart",
+ "responseStart",
+ "responseEnd",
+ "domLoading",
+ "domInteractive",
+ "domContentLoadedEventStart",
+ "domContentLoadedEventEnd",
+ "domComplete",
+ "loadEventStart",
+ "loadEventEnd"
+];
+
+function has_required_interfaces()
+{
+ if (window.performance.mark == undefined ||
+ window.performance.clearMarks == undefined ||
+ window.performance.measure == undefined ||
+ window.performance.clearMeasures == undefined ||
+ window.performance.getEntriesByName == undefined ||
+ window.performance.getEntriesByType == undefined ||
+ window.performance.getEntries == undefined) {
+ return false;
+ }
+
+ return true;
+}
+
+function test_namespace(child_name, skip_root)
+{
+ if (skip_root === undefined) {
+ var msg = 'window.performance is defined';
+ wp_test(function () { assert_not_equals(performanceNamespace, undefined, msg); }, msg);
+ }
+
+ if (child_name !== undefined) {
+ var msg2 = 'window.performance.' + child_name + ' is defined';
+ wp_test(function() { assert_not_equals(performanceNamespace[child_name], undefined, msg2); }, msg2);
+ }
+}
+
+function test_attribute_exists(parent_name, attribute_name, properties)
+{
+ var msg = 'window.performance.' + parent_name + '.' + attribute_name + ' is defined.';
+ wp_test(function() { assert_not_equals(performanceNamespace[parent_name][attribute_name], undefined, msg); }, msg, properties);
+}
+
+function test_enum(parent_name, enum_name, value, properties)
+{
+ var msg = 'window.performance.' + parent_name + '.' + enum_name + ' is defined.';
+ wp_test(function() { assert_not_equals(performanceNamespace[parent_name][enum_name], undefined, msg); }, msg, properties);
+
+ msg = 'window.performance.' + parent_name + '.' + enum_name + ' = ' + value;
+ wp_test(function() { assert_equals(performanceNamespace[parent_name][enum_name], value, msg); }, msg, properties);
+}
+
+function test_timing_order(attribute_name, greater_than_attribute, properties)
+{
+ // ensure it's not 0 first
+ var msg = "window.performance.timing." + attribute_name + " > 0";
+ wp_test(function() { assert_true(performanceNamespace.timing[attribute_name] > 0, msg); }, msg, properties);
+
+ // ensure it's in the right order
+ msg = "window.performance.timing." + attribute_name + " >= window.performance.timing." + greater_than_attribute;
+ wp_test(function() { assert_true(performanceNamespace.timing[attribute_name] >= performanceNamespace.timing[greater_than_attribute], msg); }, msg, properties);
+}
+
+function test_timing_greater_than(attribute_name, greater_than, properties)
+{
+ var msg = "window.performance.timing." + attribute_name + " > " + greater_than;
+ test_greater_than(performanceNamespace.timing[attribute_name], greater_than, msg, properties);
+}
+
+function test_timing_equals(attribute_name, equals, msg, properties)
+{
+ var test_msg = msg || "window.performance.timing." + attribute_name + " == " + equals;
+ test_equals(performanceNamespace.timing[attribute_name], equals, test_msg, properties);
+}
+
+//
+// Non-test related helper functions
+//
+
+function sleep_milliseconds(n)
+{
+ var start = new Date().getTime();
+ while (true) {
+ if ((new Date().getTime() - start) >= n) break;
+ }
+}
+
+//
+// Common helper functions
+//
+
+function test_greater_than(value, greater_than, msg, properties)
+{
+ wp_test(function () { assert_true(value > greater_than, msg); }, msg, properties);
+}
+
+function test_greater_or_equals(value, greater_than, msg, properties)
+{
+ wp_test(function () { assert_true(value >= greater_than, msg); }, msg, properties);
+}
+
+function test_not_equals(value, notequals, msg, properties)
+{
+ wp_test(function() { assert_not_equals(value, notequals, msg); }, msg, properties);
+}
diff --git a/test/fixtures/wpt/user-timing/resources/webperftestharnessextension.js b/test/fixtures/wpt/user-timing/resources/webperftestharnessextension.js
new file mode 100644
index 00000000000000..8640918d4f255e
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/resources/webperftestharnessextension.js
@@ -0,0 +1,202 @@
+//
+// Helper functions for User Timing tests
+//
+
+var mark_names = [
+ '',
+ '1',
+ 'abc',
+];
+
+var measures = [
+ [''],
+ ['2', 1],
+ ['aaa', 'navigationStart', ''],
+];
+
+function test_method_exists(method, method_name, properties)
+{
+ var msg;
+ if (typeof method === 'function')
+ msg = 'performance.' + method.name + ' is supported!';
+ else
+ msg = 'performance.' + method_name + ' is supported!';
+ wp_test(function() { assert_equals(typeof method, 'function', msg); }, msg, properties);
+}
+
+function test_method_throw_exception(func_str, exception, msg)
+{
+ let exception_name;
+ let test_func;
+ if (typeof exception == "function") {
+ exception_name = exception.name;
+ test_func = assert_throws_js;
+ } else {
+ exception_name = exception;
+ test_func = assert_throws_dom;
+ }
+ var msg = 'Invocation of ' + func_str + ' should throw ' + exception_name + ' Exception.';
+ wp_test(function() { test_func(exception, function() {eval(func_str)}, msg); }, msg);
+}
+
+function test_noless_than(value, greater_than, msg, properties)
+{
+ wp_test(function () { assert_true(value >= greater_than, msg); }, msg, properties);
+}
+
+function test_fail(msg, properties)
+{
+ wp_test(function() { assert_unreached(); }, msg, properties);
+}
+
+function test_resource_entries(entries, expected_entries)
+{
+ // This is slightly convoluted so that we can sort the output.
+ var actual_entries = {};
+ var origin = window.location.protocol + "//" + window.location.host;
+
+ for (var i = 0; i < entries.length; ++i) {
+ var entry = entries[i];
+ var found = false;
+ for (var expected_entry in expected_entries) {
+ if (entry.name == origin + expected_entry) {
+ found = true;
+ if (expected_entry in actual_entries) {
+ test_fail(expected_entry + ' is not expected to have duplicate entries');
+ }
+ actual_entries[expected_entry] = entry;
+ break;
+ }
+ }
+ if (!found) {
+ test_fail(entries[i].name + ' is not expected to be in the Resource Timing buffer');
+ }
+ }
+
+ sorted_urls = [];
+ for (var i in actual_entries) {
+ sorted_urls.push(i);
+ }
+ sorted_urls.sort();
+ for (var i in sorted_urls) {
+ var url = sorted_urls[i];
+ test_equals(actual_entries[url].initiatorType,
+ expected_entries[url],
+ origin + url + ' is expected to have initiatorType ' + expected_entries[url]);
+ }
+ for (var j in expected_entries) {
+ if (!(j in actual_entries)) {
+ test_fail(origin + j + ' is expected to be in the Resource Timing buffer');
+ }
+ }
+}
+
+function performance_entrylist_checker(type)
+{
+ const entryType = type;
+
+ function entry_check(entry, expectedNames, testDescription = '')
+ {
+ const msg = testDescription + 'Entry \"' + entry.name + '\" should be one that we have set.';
+ wp_test(function() { assert_in_array(entry.name, expectedNames, msg); }, msg);
+ test_equals(entry.entryType, entryType, testDescription + 'entryType should be \"' + entryType + '\".');
+ if (type === "measure") {
+ test_true(isFinite(entry.startTime), testDescription + 'startTime should be a number.');
+ test_true(isFinite(entry.duration), testDescription + 'duration should be a number.');
+ } else if (type === "mark") {
+ test_greater_than(entry.startTime, 0, testDescription + 'startTime should greater than 0.');
+ test_equals(entry.duration, 0, testDescription + 'duration of mark should be 0.');
+ }
+ }
+
+ function entrylist_order_check(entryList)
+ {
+ let inOrder = true;
+ for (let i = 0; i < entryList.length - 1; ++i)
+ {
+ if (entryList[i + 1].startTime < entryList[i].startTime) {
+ inOrder = false;
+ break;
+ }
+ }
+ return inOrder;
+ }
+
+ function entrylist_check(entryList, expectedLength, expectedNames, testDescription = '')
+ {
+ test_equals(entryList.length, expectedLength, testDescription + 'There should be ' + expectedLength + ' entries.');
+ test_true(entrylist_order_check(entryList), testDescription + 'Entries in entrylist should be in order.');
+ for (let i = 0; i < entryList.length; ++i)
+ {
+ entry_check(entryList[i], expectedNames, testDescription + 'Entry_list ' + i + '. ');
+ }
+ }
+
+ return{"entrylist_check":entrylist_check};
+}
+
+function PerformanceContext(context)
+{
+ this.performanceContext = context;
+}
+
+PerformanceContext.prototype =
+{
+
+ initialMeasures: function(item, index, array)
+ {
+ this.performanceContext.measure.apply(this.performanceContext, item);
+ },
+
+ mark: function()
+ {
+ this.performanceContext.mark.apply(this.performanceContext, arguments);
+ },
+
+ measure: function()
+ {
+ this.performanceContext.measure.apply(this.performanceContext, arguments);
+ },
+
+ clearMarks: function()
+ {
+ this.performanceContext.clearMarks.apply(this.performanceContext, arguments);
+ },
+
+ clearMeasures: function()
+ {
+ this.performanceContext.clearMeasures.apply(this.performanceContext, arguments);
+
+ },
+
+ getEntries: function()
+ {
+ return this.performanceContext.getEntries.apply(this.performanceContext, arguments);
+ },
+
+ getEntriesByType: function()
+ {
+ return this.performanceContext.getEntriesByType.apply(this.performanceContext, arguments);
+ },
+
+ getEntriesByName: function()
+ {
+ return this.performanceContext.getEntriesByName.apply(this.performanceContext, arguments);
+ },
+
+ setResourceTimingBufferSize: function()
+ {
+ return this.performanceContext.setResourceTimingBufferSize.apply(this.performanceContext, arguments);
+ },
+
+ registerResourceTimingBufferFullCallback: function(func)
+ {
+ this.performanceContext.onresourcetimingbufferfull = func;
+ },
+
+ clearResourceTimings: function()
+ {
+ this.performanceContext.clearResourceTimings.apply(this.performanceContext, arguments);
+ }
+
+};
diff --git a/test/fixtures/wpt/user-timing/structured-serialize-detail.any.js b/test/fixtures/wpt/user-timing/structured-serialize-detail.any.js
new file mode 100644
index 00000000000000..78771b2f7663d4
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/structured-serialize-detail.any.js
@@ -0,0 +1,64 @@
+test(function() {
+ performance.clearMarks();
+ const detail = { randomInfo: 123 }
+ const markEntry = new PerformanceMark("A", { detail });
+ assert_equals(markEntry.detail.randomInfo, detail.randomInfo);
+ assert_not_equals(markEntry.detail, detail);
+}, "The detail property in the mark constructor should be structured-clone.");
+
+test(function() {
+ performance.clearMarks();
+ const detail = { randomInfo: 123 }
+ const markEntry = performance.mark("A", { detail });
+ assert_not_equals(markEntry.detail, detail);
+}, "The detail property in the mark method should be structured-clone.");
+
+test(function() {
+ performance.clearMarks();
+ const markEntry = performance.mark("A");
+ assert_equals(markEntry.detail, null);
+}, "When accessing detail from a mark entry and the detail is not provided, just return a null value.");
+
+test(function() {
+ performance.clearMarks();
+ const detail = { unserializable: Symbol() };
+ assert_throws_dom("DataCloneError", ()=>{
+ new PerformanceMark("A", { detail });
+ }, "Trying to structured-serialize a Symbol.");
+}, "Mark: Throw an exception when the detail property cannot be structured-serialized.");
+
+test(function() {
+ performance.clearMeasures();
+ const detail = { randomInfo: 123 }
+ const measureEntry = performance.measure("A", { start: 0, detail });
+ assert_not_equals(measureEntry.detail, detail);
+}, "The detail property in the measure method should be structured-clone.");
+
+test(function() {
+ performance.clearMeasures();
+ const detail = { randomInfo: 123 }
+ const measureEntry = performance.measure("A", { start: 0, detail });
+ assert_equals(measureEntry.detail, measureEntry.detail);
+}, "The detail property in the measure method should be the same reference.");
+
+test(function() {
+ performance.clearMeasures();
+ const measureEntry = performance.measure("A");
+ assert_equals(measureEntry.detail, null);
+}, "When accessing detail from a measure entry and the detail is not provided, just return a null value.");
+
+test(function() {
+ performance.clearMeasures();
+ const detail = { unserializable: Symbol() };
+ assert_throws_dom("DataCloneError", ()=>{
+ performance.measure("A", { start: 0, detail });
+ }, "Trying to structured-serialize a Symbol.");
+}, "Measure: Throw an exception when the detail property cannot be structured-serialized.");
+
+test(function() {
+ const bar = { 1: 2 };
+ const detail = { foo: 1, bar };
+ const mark = performance.mark("m", { detail });
+ detail.foo = 2;
+ assert_equals(mark.detail.foo, 1);
+}, "The detail object is cloned when passed to mark API.");
diff --git a/test/fixtures/wpt/user-timing/supported-usertiming-types.any.js b/test/fixtures/wpt/user-timing/supported-usertiming-types.any.js
new file mode 100644
index 00000000000000..ea3b2fe9dc90f7
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/supported-usertiming-types.any.js
@@ -0,0 +1,37 @@
+test(() => {
+ if (typeof PerformanceObserver.supportedEntryTypes === "undefined")
+ assert_unreached("supportedEntryTypes is not supported.");
+ const types = PerformanceObserver.supportedEntryTypes;
+ assert_true(types.includes("mark"),
+ "There should be 'mark' in PerformanceObserver.supportedEntryTypes");
+ assert_true(types.includes("measure"),
+ "There should be 'measure' in PerformanceObserver.supportedEntryTypes");
+ assert_greater_than(types.indexOf("measure"), types.indexOf('mark'),
+ "The 'measure' entry should appear after the 'mark' entry");
+}, "supportedEntryTypes contains 'mark' and 'measure'.");
+
+if (typeof PerformanceObserver.supportedEntryTypes !== "undefined") {
+ const entryTypes = {
+ "mark": () => {
+ performance.mark('foo');
+ },
+ "measure": () => {
+ performance.measure('bar');
+ }
+ }
+ for (let entryType in entryTypes) {
+ if (PerformanceObserver.supportedEntryTypes.includes(entryType)) {
+ promise_test(async() => {
+ await new Promise((resolve) => {
+ new PerformanceObserver(function (list, observer) {
+ observer.disconnect();
+ resolve();
+ }).observe({entryTypes: [entryType]});
+
+ // Force the PerformanceEntry.
+ entryTypes[entryType]();
+ })
+ }, `'${entryType}' entries should be observable.`)
+ }
+ }
+}
diff --git a/test/fixtures/wpt/user-timing/user-timing-tojson.html b/test/fixtures/wpt/user-timing/user-timing-tojson.html
new file mode 100644
index 00000000000000..6aef7fa904ab95
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/user-timing-tojson.html
@@ -0,0 +1,44 @@
+
+
+
+
+
+
+
+
+
+
diff --git a/test/fixtures/wpt/user-timing/user_timing_exists.any.js b/test/fixtures/wpt/user-timing/user_timing_exists.any.js
new file mode 100644
index 00000000000000..adf9052ebd58d8
--- /dev/null
+++ b/test/fixtures/wpt/user-timing/user_timing_exists.any.js
@@ -0,0 +1,12 @@
+test(function() {
+ assert_not_equals(self.performance.mark, undefined);
+}, "self.performance.mark is defined.");
+test(function() {
+ assert_not_equals(self.performance.clearMarks, undefined);
+}, "self.performance.clearMarks is defined.");
+test(function() {
+ assert_not_equals(self.performance.measure, undefined);
+}, "self.performance.measure is defined.");
+test(function() {
+ assert_not_equals(self.performance.clearMeasures, undefined);
+}, "self.performance.clearMeasures is defined.");
diff --git a/test/fixtures/wpt/versions.json b/test/fixtures/wpt/versions.json
index fb10c7d403d730..fb49e36cc07747 100644
--- a/test/fixtures/wpt/versions.json
+++ b/test/fixtures/wpt/versions.json
@@ -36,9 +36,13 @@
"path": "html/webappapis/timers"
},
"interfaces": {
- "commit": "fcb671ed8b068b25cee87429d803833777f35c2c",
+ "commit": "80a417662387b6eda904607d78ad246c5d8bf191",
"path": "interfaces"
},
+ "performance-timeline": {
+ "commit": "17ebc3aea0d6321e69554067c39ab5855e6fb67e",
+ "path": "performance-timeline"
+ },
"resources": {
"commit": "972ca5b6693bffebebc5805e1b9da68a6876e1f6",
"path": "resources"
@@ -50,5 +54,9 @@
"url": {
"commit": "77d54aa9e0405f737987b59331f3584e3e1c26f9",
"path": "url"
+ },
+ "user-timing": {
+ "commit": "df24fb604e2d40528ac1d1b5dd970e32fc5c2978",
+ "path": "user-timing"
}
}
\ No newline at end of file
diff --git a/test/parallel/test-perf-hooks-usertiming.js b/test/parallel/test-perf-hooks-usertiming.js
index 401d0a6816481a..e7ef26889eae0f 100644
--- a/test/parallel/test-perf-hooks-usertiming.js
+++ b/test/parallel/test-perf-hooks-usertiming.js
@@ -29,7 +29,7 @@ assert(measure);
assert.strictEqual(m.entryType, 'mark');
assert.strictEqual(typeof m.startTime, 'number');
assert.strictEqual(m.duration, 0);
- assert.strictEqual(m.details, undefined);
+ assert.strictEqual(m.detail, null);
});
clearMarks();
@@ -38,11 +38,18 @@ assert.throws(() => mark(Symbol('a')), {
message: /Cannot convert a Symbol value to a string/
});
-[undefined, null, 1, 'any', {}, []].forEach((detail) => {
+[undefined, null].forEach((detail) => {
const m = mark('a', { detail });
assert.strictEqual(m.name, 'a');
assert.strictEqual(m.entryType, 'mark');
- assert.strictEqual(m.detail, detail);
+ assert.deepStrictEqual(m.detail, null);
+});
+[1, 'any', {}, []].forEach((detail) => {
+ const m = mark('a', { detail });
+ assert.strictEqual(m.name, 'a');
+ assert.strictEqual(m.entryType, 'mark');
+ // Value of detail is structured cloned.
+ assert.deepStrictEqual(m.detail, detail);
});
clearMarks();
diff --git a/test/wpt/status/performance-timeline.json b/test/wpt/status/performance-timeline.json
new file mode 100644
index 00000000000000..0967ef424bce67
--- /dev/null
+++ b/test/wpt/status/performance-timeline.json
@@ -0,0 +1 @@
+{}
diff --git a/test/wpt/status/user-timing.json b/test/wpt/status/user-timing.json
new file mode 100644
index 00000000000000..b1110e6a5e798d
--- /dev/null
+++ b/test/wpt/status/user-timing.json
@@ -0,0 +1,11 @@
+{
+ "invoke_with_timing_attributes.worker.js": {
+ "skip": "importScripts not supported"
+ },
+ "performance-measure-invalid.worker.js": {
+ "skip": "importScripts not supported"
+ },
+ "idlharness.any.js": {
+ "skip": "idlharness cannot recognize Node.js environment"
+ }
+}
diff --git a/test/wpt/test-performance-timeline.js b/test/wpt/test-performance-timeline.js
new file mode 100644
index 00000000000000..36d13297ba57cc
--- /dev/null
+++ b/test/wpt/test-performance-timeline.js
@@ -0,0 +1,27 @@
+'use strict';
+require('../common');
+const { WPTRunner } = require('../common/wpt');
+
+const runner = new WPTRunner('user-timing');
+
+// Needed to access to DOMException.
+runner.setFlags(['--expose-internals']);
+
+runner.setInitScript(`
+ const {
+ PerformanceMark,
+ PerformanceMeasure,
+ PerformanceObserver,
+ performance,
+ } = require('perf_hooks');
+ global.PerformanceMark = performance;
+ global.PerformanceMeasure = performance;
+ global.PerformanceObserver = PerformanceObserver;
+ global.performance = performance;
+
+ const { internalBinding } = require('internal/test/binding');
+ const { DOMException } = internalBinding('messaging');
+ global.DOMException = DOMException;
+`);
+
+runner.runJsTests();
diff --git a/test/wpt/test-user-timing.js b/test/wpt/test-user-timing.js
new file mode 100644
index 00000000000000..36d13297ba57cc
--- /dev/null
+++ b/test/wpt/test-user-timing.js
@@ -0,0 +1,27 @@
+'use strict';
+require('../common');
+const { WPTRunner } = require('../common/wpt');
+
+const runner = new WPTRunner('user-timing');
+
+// Needed to access to DOMException.
+runner.setFlags(['--expose-internals']);
+
+runner.setInitScript(`
+ const {
+ PerformanceMark,
+ PerformanceMeasure,
+ PerformanceObserver,
+ performance,
+ } = require('perf_hooks');
+ global.PerformanceMark = performance;
+ global.PerformanceMeasure = performance;
+ global.PerformanceObserver = PerformanceObserver;
+ global.performance = performance;
+
+ const { internalBinding } = require('internal/test/binding');
+ const { DOMException } = internalBinding('messaging');
+ global.DOMException = DOMException;
+`);
+
+runner.runJsTests();
From 7d0c869cfa29458e2790f9d9d773a45dc55c1564 Mon Sep 17 00:00:00 2001
From: legendecas
Date: Sun, 25 Jul 2021 23:53:21 +0800
Subject: [PATCH 002/119] doc: add PerformanceObserver `buffered` document
The option buffered is not about queueing the PerformanceEntrys with
an event loop task or not. The option buffered in the spec is about
filling the observer with the global PerformanceEntry buffer. The
current (and the spec) behavior is different with Node.js
version <= v16.0.0.
PR-URL: https://github.com/nodejs/node/pull/39514
Refs: https://w3c.github.io/performance-timeline/#observe-method
Refs: https://nodejs.org/dist/latest-v14.x/docs/api/perf_hooks.html#perf_hooks_performanceobserver_observe_options
Refs: https://github.com/nodejs/node/pull/39297
Reviewed-By: Antoine du Hamel
Reviewed-By: Benjamin Gruenbaum
---
doc/api/perf_hooks.md | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/doc/api/perf_hooks.md b/doc/api/perf_hooks.md
index 80a3c46f36a281..750c2bbc692097 100644
--- a/doc/api/perf_hooks.md
+++ b/doc/api/perf_hooks.md
@@ -585,6 +585,10 @@ Disconnects the `PerformanceObserver` instance from all notifications.
## Technology Sponsors
diff --git a/tools/node_modules/eslint/lib/rules/consistent-return.js b/tools/node_modules/eslint/lib/rules/consistent-return.js
index a250430cb766b5..0e20209af56b7f 100644
--- a/tools/node_modules/eslint/lib/rules/consistent-return.js
+++ b/tools/node_modules/eslint/lib/rules/consistent-return.js
@@ -104,18 +104,18 @@ module.exports = {
} else if (node.type === "ArrowFunctionExpression") {
// `=>` token
- loc = context.getSourceCode().getTokenBefore(node.body, astUtils.isArrowToken).loc.start;
+ loc = context.getSourceCode().getTokenBefore(node.body, astUtils.isArrowToken).loc;
} else if (
node.parent.type === "MethodDefinition" ||
(node.parent.type === "Property" && node.parent.method)
) {
// Method name.
- loc = node.parent.key.loc.start;
+ loc = node.parent.key.loc;
} else {
// Function name or `function` keyword.
- loc = (node.id || node).loc.start;
+ loc = (node.id || context.getSourceCode().getFirstToken(node)).loc;
}
if (!name) {
diff --git a/tools/node_modules/eslint/lib/rules/no-mixed-operators.js b/tools/node_modules/eslint/lib/rules/no-mixed-operators.js
index 15eb20bed2a49e..5a2e139a620519 100644
--- a/tools/node_modules/eslint/lib/rules/no-mixed-operators.js
+++ b/tools/node_modules/eslint/lib/rules/no-mixed-operators.js
@@ -117,7 +117,7 @@ module.exports = {
],
messages: {
- unexpectedMixedOperator: "Unexpected mix of '{{leftOperator}}' and '{{rightOperator}}'."
+ unexpectedMixedOperator: "Unexpected mix of '{{leftOperator}}' and '{{rightOperator}}'. Use parentheses to clarify the intended order of operations."
}
},
diff --git a/tools/node_modules/eslint/lib/rules/operator-assignment.js b/tools/node_modules/eslint/lib/rules/operator-assignment.js
index fdb0884922b654..a48d2725197055 100644
--- a/tools/node_modules/eslint/lib/rules/operator-assignment.js
+++ b/tools/node_modules/eslint/lib/rules/operator-assignment.js
@@ -76,8 +76,8 @@ module.exports = {
fixable: "code",
messages: {
- replaced: "Assignment can be replaced with operator assignment.",
- unexpected: "Unexpected operator assignment shorthand."
+ replaced: "Assignment (=) can be replaced with operator assignment ({{operator}}=).",
+ unexpected: "Unexpected operator assignment ({{operator}}=) shorthand."
}
},
@@ -113,6 +113,7 @@ module.exports = {
context.report({
node,
messageId: "replaced",
+ data: { operator },
fix(fixer) {
if (canBeFixed(left) && canBeFixed(expr.left)) {
const equalsToken = getOperatorToken(node);
@@ -139,7 +140,8 @@ module.exports = {
*/
context.report({
node,
- messageId: "replaced"
+ messageId: "replaced",
+ data: { operator }
});
}
}
@@ -155,6 +157,7 @@ module.exports = {
context.report({
node,
messageId: "unexpected",
+ data: { operator: node.operator },
fix(fixer) {
if (canBeFixed(node.left)) {
const firstToken = sourceCode.getFirstToken(node);
diff --git a/tools/node_modules/eslint/node_modules/@eslint/eslintrc/lib/config-array-factory.js b/tools/node_modules/eslint/node_modules/@eslint/eslintrc/lib/config-array-factory.js
index c7ff6a09a93881..406601c24714ac 100644
--- a/tools/node_modules/eslint/node_modules/@eslint/eslintrc/lib/config-array-factory.js
+++ b/tools/node_modules/eslint/node_modules/@eslint/eslintrc/lib/config-array-factory.js
@@ -495,7 +495,7 @@ class ConfigArrayFactory {
basePath
);
- if (fs.existsSync(ctx.filePath)) {
+ if (fs.existsSync(ctx.filePath) && fs.statSync(ctx.filePath).isFile()) {
let configData;
try {
diff --git a/tools/node_modules/eslint/node_modules/@eslint/eslintrc/package.json b/tools/node_modules/eslint/node_modules/@eslint/eslintrc/package.json
index e79857f5553e99..35f5129707338c 100644
--- a/tools/node_modules/eslint/node_modules/@eslint/eslintrc/package.json
+++ b/tools/node_modules/eslint/node_modules/@eslint/eslintrc/package.json
@@ -1,6 +1,6 @@
{
"name": "@eslint/eslintrc",
- "version": "0.4.2",
+ "version": "0.4.3",
"description": "The legacy ESLintRC config file format for ESLint",
"main": "lib/index.js",
"files": [
@@ -40,7 +40,7 @@
"eslint-plugin-jsdoc": "^32.2.0",
"eslint-plugin-node": "^11.1.0",
"eslint-release": "^3.1.2",
- "fs-teardown": "^0.1.0",
+ "fs-teardown": "0.1.1",
"mocha": "^8.1.1",
"shelljs": "^0.8.4",
"sinon": "^9.2.0",
diff --git a/tools/node_modules/eslint/node_modules/acorn-jsx/package.json b/tools/node_modules/eslint/node_modules/acorn-jsx/package.json
index f42a7ea1437c8c..6debde9caa30ba 100644
--- a/tools/node_modules/eslint/node_modules/acorn-jsx/package.json
+++ b/tools/node_modules/eslint/node_modules/acorn-jsx/package.json
@@ -2,7 +2,7 @@
"name": "acorn-jsx",
"description": "Modern, fast React.js JSX parser",
"homepage": "https://github.com/acornjs/acorn-jsx",
- "version": "5.3.1",
+ "version": "5.3.2",
"maintainers": [
{
"name": "Ingvar Stepanyan",
diff --git a/tools/node_modules/eslint/node_modules/debug/package.json b/tools/node_modules/eslint/node_modules/debug/package.json
index da809d2b8d28b2..b7d70acb9bee82 100644
--- a/tools/node_modules/eslint/node_modules/debug/package.json
+++ b/tools/node_modules/eslint/node_modules/debug/package.json
@@ -1,6 +1,6 @@
{
"name": "debug",
- "version": "4.3.1",
+ "version": "4.3.2",
"repository": {
"type": "git",
"url": "git://github.com/visionmedia/debug.git"
diff --git a/tools/node_modules/eslint/node_modules/debug/src/common.js b/tools/node_modules/eslint/node_modules/debug/src/common.js
index 392a8e005a063a..50ce2925101d73 100644
--- a/tools/node_modules/eslint/node_modules/debug/src/common.js
+++ b/tools/node_modules/eslint/node_modules/debug/src/common.js
@@ -60,6 +60,8 @@ function setup(env) {
function createDebug(namespace) {
let prevTime;
let enableOverride = null;
+ let namespacesCache;
+ let enabledCache;
function debug(...args) {
// Disabled?
@@ -120,7 +122,17 @@ function setup(env) {
Object.defineProperty(debug, 'enabled', {
enumerable: true,
configurable: false,
- get: () => enableOverride === null ? createDebug.enabled(namespace) : enableOverride,
+ get: () => {
+ if (enableOverride !== null) {
+ return enableOverride;
+ }
+ if (namespacesCache !== createDebug.namespaces) {
+ namespacesCache = createDebug.namespaces;
+ enabledCache = createDebug.enabled(namespace);
+ }
+
+ return enabledCache;
+ },
set: v => {
enableOverride = v;
}
@@ -149,6 +161,7 @@ function setup(env) {
*/
function enable(namespaces) {
createDebug.save(namespaces);
+ createDebug.namespaces = namespaces;
createDebug.names = [];
createDebug.skips = [];
diff --git a/tools/node_modules/eslint/node_modules/flatted/README.md b/tools/node_modules/eslint/node_modules/flatted/README.md
index 8fd5b4d82f4459..1f52cc5b3877ff 100644
--- a/tools/node_modules/eslint/node_modules/flatted/README.md
+++ b/tools/node_modules/eslint/node_modules/flatted/README.md
@@ -1,6 +1,6 @@
# flatted
-[![Downloads](https://img.shields.io/npm/dm/flatted.svg)](https://www.npmjs.com/package/flatted) [![Coverage Status](https://coveralls.io/repos/github/WebReflection/flatted/badge.svg?branch=master)](https://coveralls.io/github/WebReflection/flatted?branch=master) [![Build Status](https://travis-ci.org/WebReflection/flatted.svg?branch=master)](https://travis-ci.org/WebReflection/flatted) [![License: ISC](https://img.shields.io/badge/License-ISC-yellow.svg)](https://opensource.org/licenses/ISC) ![WebReflection status](https://offline.report/status/webreflection.svg)
+[![Downloads](https://img.shields.io/npm/dm/flatted.svg)](https://www.npmjs.com/package/flatted) [![Coverage Status](https://coveralls.io/repos/github/WebReflection/flatted/badge.svg?branch=main)](https://coveralls.io/github/WebReflection/flatted?branch=main) [![Build Status](https://travis-ci.com/WebReflection/flatted.svg?branch=main)](https://travis-ci.com/WebReflection/flatted) [![License: ISC](https://img.shields.io/badge/License-ISC-yellow.svg)](https://opensource.org/licenses/ISC) ![WebReflection status](https://offline.report/status/webreflection.svg)
![snow flake](./flatted.jpg)
diff --git a/tools/node_modules/eslint/node_modules/flatted/package.json b/tools/node_modules/eslint/node_modules/flatted/package.json
index 013728929103d3..405ccfd799b098 100644
--- a/tools/node_modules/eslint/node_modules/flatted/package.json
+++ b/tools/node_modules/eslint/node_modules/flatted/package.json
@@ -1,6 +1,6 @@
{
"name": "flatted",
- "version": "3.2.0",
+ "version": "3.2.1",
"description": "A super light and fast circular JSON parser.",
"unpkg": "min.js",
"types": "types.d.ts",
@@ -12,8 +12,9 @@
"rollup:babel": "rollup --config rollup/babel.config.js && sed -i.bck 's/^var /self./' index.js && rm -rf index.js.bck && drop-babel-typeof index.js",
"min": "terser index.js -c -m -o min.js",
"size": "cat index.js | wc -c;cat min.js | wc -c;gzip -c9 min.js | wc -c;cat min.js | brotli | wc -c; cat es.js | brotli | wc -c",
- "coveralls": "nyc report --reporter=text-lcov | coveralls",
- "test": "nyc node test/index.js"
+ "coveralls": "c8 report --reporter=text-lcov | coveralls",
+ "test": "c8 node test/index.js",
+ "test:php": "php php/test.php"
},
"repository": {
"type": "git",
@@ -36,13 +37,13 @@
"@babel/core": "^7.14.6",
"@babel/preset-env": "^7.14.7",
"ascjs": "^5.0.1",
+ "c8": "^7.7.3",
"circular-json": "^0.5.9",
"circular-json-es6": "^2.0.2",
"coveralls": "^3.1.1",
"drop-babel-typeof": "^1.0.3",
"jsan": "^3.1.13",
- "nyc": "^15.1.0",
- "rollup": "^2.52.6",
+ "rollup": "^2.52.8",
"rollup-plugin-babel": "^4.4.0",
"rollup-plugin-node-resolve": "^5.2.0",
"rollup-plugin-terser": "^7.0.2",
diff --git a/tools/node_modules/eslint/node_modules/flatted/php/test.php b/tools/node_modules/eslint/node_modules/flatted/php/test.php
deleted file mode 100644
index 8b49e924842035..00000000000000
--- a/tools/node_modules/eslint/node_modules/flatted/php/test.php
+++ /dev/null
@@ -1,118 +0,0 @@
-o = &$o;
-
-console::assert(Flatted::stringify($a) === '[["0"]]', 'recursive Array');
-console::assert(Flatted::stringify($o) === '[{"o":"0"}]', 'recursive Object');
-
-$b = Flatted::parse(Flatted::stringify($a));
-console::assert(is_array($b) && $b[0] === $b, 'restoring recursive Array');
-
-$a[] = 1;
-$a[] = 'two';
-$a[] = true;
-$o->one = 1;
-$o->two = 'two';
-$o->three = true;
-
-console::assert(Flatted::stringify($a) === '[["0",1,"1",true],"two"]', 'values in Array');
-console::assert(Flatted::stringify($o) === '[{"o":"0","one":1,"two":"1","three":true},"two"]', 'values in Object');
-
-$a[] = &$o;
-$o->a = &$a;
-
-console::assert(Flatted::stringify($a) === '[["0",1,"1",true,"2"],"two",{"o":"2","one":1,"two":"1","three":true,"a":"0"}]', 'object in Array');
-console::assert(Flatted::stringify($o) === '[{"o":"0","one":1,"two":"1","three":true,"a":"2"},"two",["2",1,"1",true,"0"]]', 'array in Object');
-
-$a[] = array('test' => 'OK');
-$a[] = [1, 2, 3];
-
-$o->test = array('test' => 'OK');
-$o->array = [1, 2, 3];
-
-console::assert(Flatted::stringify($a) === '[["0",1,"1",true,"2","3","4"],"two",{"o":"2","one":1,"two":"1","three":true,"a":"0","test":"3","array":"4"},{"test":"5"},[1,2,3],"OK"]', 'objects in Array');
-console::assert(Flatted::stringify($o) === '[{"o":"0","one":1,"two":"1","three":true,"a":"2","test":"3","array":"4"},"two",["2",1,"1",true,"0","3","4"],{"test":"5"},[1,2,3],"OK"]', 'objects in Object');
-
-$a2 = Flatted::parse(Flatted::stringify($a));
-$o2 = Flatted::parse(Flatted::stringify($o));
-
-console::assert($a2[0] === $a2, 'parsed Array');
-console::assert($o2->o === $o2, 'parsed Object');
-
-console::assert(
- $a2[1] === 1 &&
- $a2[2] === 'two' &&
- $a2[3] === true &&
- $a2[4] instanceof stdClass &&
- json_encode($a2[5]) === json_encode(array('test' => 'OK')) &&
- json_encode($a2[6]) === json_encode([1, 2, 3]),
- 'array values are all OK'
-);
-
-console::assert($a2[4] === $a2[4]->o && $a2 === $a2[4]->o->a, 'array recursive values are OK');
-
-console::assert(
- $o2->one === 1 &&
- $o2->two === 'two' &&
- $o2->three === true &&
- is_array($o2->a) &&
- json_encode($o2->test) === json_encode(array('test' => 'OK')) &&
- json_encode($o2->array) === json_encode([1, 2, 3]),
- 'object values are all OK'
-);
-
-console::assert($o2->a === $o2->a[0] && $o2 === $o2->a[4], 'object recursive values are OK');
-
-console::assert(Flatted::parse(Flatted::stringify(1)) === 1, 'numbers can be parsed too');
-console::assert(Flatted::parse(Flatted::stringify(false)) === false, 'booleans can be parsed too');
-console::assert(Flatted::parse(Flatted::stringify(null)) === null, 'null can be parsed too');
-console::assert(Flatted::parse(Flatted::stringify('test')) === 'test', 'strings can be parsed too');
-
-$str = Flatted::parse('[{"prop":"1","a":"2","b":"3"},{"value":123},["4","5"],{"e":"6","t":"7","p":4},{},{"b":"8"},"f",{"a":"9"},["10"],"sup",{"a":1,"d":2,"c":"7","z":"11","h":1},{"g":2,"a":"7","b":"12","f":6},{"r":4,"u":"7","c":5}]');
-
-console::assert(
- $str->b->t->a === 'sup' &&
- $str->a[1]->b[0]->c === $str->b->t,
- 'str is fine'
-);
-
-$oo = Flatted::parse('[{"a":"1","b":"0","c":"2"},{"aa":"3"},{"ca":"4","cb":"5","cc":"6","cd":"7","ce":"8","cf":"9"},{"aaa":"10"},{"caa":"4"},{"cba":"5"},{"cca":"2"},{"cda":"4"},"value2","value3","value1"]');
-
-console::assert(
- $oo->a->aa->aaa = 'value1'
- && $oo === $oo->b
- && $oo === $oo->b
- && $oo->c->ca->caa === $oo->c->ca
- && $oo->c->cb->cba === $oo->c->cb
- && $oo->c->cc->cca === $oo->c
- && $oo->c->cd->cda === $oo->c->ca->caa
- && $oo->c->ce === 'value2'
- && $oo->c->cf === 'value3',
- 'parse is correct'
-);
-
-echo "OK\n";
-
-?>
\ No newline at end of file
diff --git a/tools/node_modules/eslint/node_modules/globals/globals.json b/tools/node_modules/eslint/node_modules/globals/globals.json
index 1119914e203e2c..334ff3e01e9cc0 100644
--- a/tools/node_modules/eslint/node_modules/globals/globals.json
+++ b/tools/node_modules/eslint/node_modules/globals/globals.json
@@ -442,15 +442,24 @@
"CSSImportRule": false,
"CSSKeyframeRule": false,
"CSSKeyframesRule": false,
+ "CSSMatrixComponent": false,
"CSSMediaRule": false,
"CSSNamespaceRule": false,
"CSSPageRule": false,
+ "CSSPerspective": false,
+ "CSSRotate": false,
"CSSRule": false,
"CSSRuleList": false,
+ "CSSScale": false,
+ "CSSSkew": false,
+ "CSSSkewX": false,
+ "CSSSkewY": false,
"CSSStyleDeclaration": false,
"CSSStyleRule": false,
"CSSStyleSheet": false,
"CSSSupportsRule": false,
+ "CSSTransformValue": false,
+ "CSSTranslate": false,
"CustomElementRegistry": false,
"customElements": false,
"CustomEvent": false,
diff --git a/tools/node_modules/eslint/node_modules/globals/package.json b/tools/node_modules/eslint/node_modules/globals/package.json
index 66d027e5f30e8c..a646c1ded41493 100644
--- a/tools/node_modules/eslint/node_modules/globals/package.json
+++ b/tools/node_modules/eslint/node_modules/globals/package.json
@@ -1,6 +1,6 @@
{
"name": "globals",
- "version": "13.9.0",
+ "version": "13.10.0",
"description": "Global identifiers from different JavaScript environments",
"license": "MIT",
"repository": "sindresorhus/globals",
diff --git a/tools/node_modules/eslint/node_modules/table/node_modules/ajv/README.md b/tools/node_modules/eslint/node_modules/table/node_modules/ajv/README.md
index f1ff6731d37e2d..25cc428c27f613 100644
--- a/tools/node_modules/eslint/node_modules/table/node_modules/ajv/README.md
+++ b/tools/node_modules/eslint/node_modules/table/node_modules/ajv/README.md
@@ -105,8 +105,8 @@ Performance of different validators by [json-schema-benchmark](https://github.co
- Ajv implements JSON Schema [draft-06/07/2019-09/2020-12](http://json-schema.org/) standards (draft-04 is supported in v6):
- all validation keywords (see [JSON Schema validation keywords](https://ajv.js.org/json-schema.html))
- [OpenAPI](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.3.md) extensions:
- - NEW: keyword [discriminator](https://ajv.js.org/json-schema.md#discriminator).
- - keyword [nullable](https://ajv.js.org/json-schema.md#nullable).
+ - NEW: keyword [discriminator](https://ajv.js.org/json-schema.html#discriminator).
+ - keyword [nullable](https://ajv.js.org/json-schema.html#nullable).
- full support of remote references (remote schemas have to be added with `addSchema` or compiled to be available)
- support of recursive references between schemas
- correct string lengths for strings with unicode pairs
@@ -161,6 +161,11 @@ const schema = {
additionalProperties: false,
}
+const data = {
+ foo: 1,
+ bar: "abc"
+}
+
const validate = ajv.compile(schema)
const valid = validate(data)
if (!valid) console.log(validate.errors)
diff --git a/tools/node_modules/eslint/node_modules/table/node_modules/ajv/dist/compile/errors.js b/tools/node_modules/eslint/node_modules/table/node_modules/ajv/dist/compile/errors.js
index 0dcf28bc5cd491..27b2091762c20f 100644
--- a/tools/node_modules/eslint/node_modules/table/node_modules/ajv/dist/compile/errors.js
+++ b/tools/node_modules/eslint/node_modules/table/node_modules/ajv/dist/compile/errors.js
@@ -5,7 +5,7 @@ const codegen_1 = require("./codegen");
const util_1 = require("./util");
const names_1 = require("./names");
exports.keywordError = {
- message: ({ keyword }) => codegen_1.str `should pass "${keyword}" keyword validation`,
+ message: ({ keyword }) => codegen_1.str `must pass "${keyword}" keyword validation`,
};
exports.keyword$DataError = {
message: ({ keyword, schemaType }) => schemaType
diff --git a/tools/node_modules/eslint/node_modules/table/node_modules/ajv/dist/compile/jtd/serialize.js b/tools/node_modules/eslint/node_modules/table/node_modules/ajv/dist/compile/jtd/serialize.js
index 69f0f563c7f128..7ca512d4399f7a 100644
--- a/tools/node_modules/eslint/node_modules/table/node_modules/ajv/dist/compile/jtd/serialize.js
+++ b/tools/node_modules/eslint/node_modules/table/node_modules/ajv/dist/compile/jtd/serialize.js
@@ -180,7 +180,7 @@ function serializeType(cxt) {
serializeString(cxt);
break;
case "timestamp":
- gen.if(codegen_1._ `${data} instanceof Date`, () => gen.add(names_1.default.json, codegen_1._ `${data}.toISOString()`), () => serializeString(cxt));
+ gen.if(codegen_1._ `${data} instanceof Date`, () => gen.add(names_1.default.json, codegen_1._ `'"' + ${data}.toISOString() + '"'`), () => serializeString(cxt));
break;
default:
serializeNumber(cxt);
diff --git a/tools/node_modules/eslint/node_modules/table/node_modules/ajv/dist/compile/validate/index.js b/tools/node_modules/eslint/node_modules/table/node_modules/ajv/dist/compile/validate/index.js
index 43371e7c7b51b8..a43053e228b6ca 100644
--- a/tools/node_modules/eslint/node_modules/table/node_modules/ajv/dist/compile/validate/index.js
+++ b/tools/node_modules/eslint/node_modules/table/node_modules/ajv/dist/compile/validate/index.js
@@ -303,7 +303,10 @@ class KeywordCxt {
}
}
result(condition, successAction, failAction) {
- this.gen.if(codegen_1.not(condition));
+ this.failResult(codegen_1.not(condition), successAction, failAction);
+ }
+ failResult(condition, successAction, failAction) {
+ this.gen.if(condition);
if (failAction)
failAction();
else
@@ -322,7 +325,7 @@ class KeywordCxt {
}
}
pass(condition, failAction) {
- this.result(condition, undefined, failAction);
+ this.failResult(codegen_1.not(condition), undefined, failAction);
}
fail(condition) {
if (condition === undefined) {
diff --git a/tools/node_modules/eslint/node_modules/table/node_modules/ajv/dist/vocabularies/applicator/not.js b/tools/node_modules/eslint/node_modules/table/node_modules/ajv/dist/vocabularies/applicator/not.js
index 60ad2b9a7c4dab..72f9f85a0ac8c2 100644
--- a/tools/node_modules/eslint/node_modules/table/node_modules/ajv/dist/vocabularies/applicator/not.js
+++ b/tools/node_modules/eslint/node_modules/table/node_modules/ajv/dist/vocabularies/applicator/not.js
@@ -18,7 +18,7 @@ const def = {
createErrors: false,
allErrors: false,
}, valid);
- cxt.result(valid, () => cxt.error(), () => cxt.reset());
+ cxt.failResult(valid, () => cxt.reset(), () => cxt.error());
},
error: { message: "must NOT be valid" },
};
diff --git a/tools/node_modules/eslint/node_modules/table/node_modules/ajv/package.json b/tools/node_modules/eslint/node_modules/table/node_modules/ajv/package.json
index 357ede01961155..6bf862d66f44ec 100644
--- a/tools/node_modules/eslint/node_modules/table/node_modules/ajv/package.json
+++ b/tools/node_modules/eslint/node_modules/table/node_modules/ajv/package.json
@@ -1,6 +1,6 @@
{
"name": "ajv",
- "version": "8.6.0",
+ "version": "8.6.2",
"description": "Another JSON Schema Validator",
"main": "dist/ajv.js",
"types": "dist/ajv.d.ts",
@@ -70,7 +70,7 @@
"@rollup/plugin-typescript": "^8.2.1",
"@types/chai": "^4.2.12",
"@types/mocha": "^8.0.3",
- "@types/node": "^15.0.2",
+ "@types/node": "^16.3.2",
"@types/require-from-string": "^1.2.0",
"@typescript-eslint/eslint-plugin": "^3.8.0",
"@typescript-eslint/parser": "^3.8.0",
@@ -83,7 +83,7 @@
"eslint": "^7.8.1",
"eslint-config-prettier": "^7.0.0",
"glob": "^7.0.0",
- "husky": "^6.0.0",
+ "husky": "^7.0.1",
"if-node-version": "^1.0.0",
"jimp": "^0.16.1",
"js-beautify": "^1.7.3",
diff --git a/tools/node_modules/eslint/package.json b/tools/node_modules/eslint/package.json
index ac3a78cc3b0f30..0e64b624b966e6 100644
--- a/tools/node_modules/eslint/package.json
+++ b/tools/node_modules/eslint/package.json
@@ -1,6 +1,6 @@
{
"name": "eslint",
- "version": "7.30.0",
+ "version": "7.31.0",
"author": "Nicholas C. Zakas ",
"description": "An AST-based pattern checker for JavaScript.",
"bin": {
@@ -44,7 +44,7 @@
"bugs": "https://github.com/eslint/eslint/issues/",
"dependencies": {
"@babel/code-frame": "7.12.11",
- "@eslint/eslintrc": "^0.4.2",
+ "@eslint/eslintrc": "^0.4.3",
"@humanwhocodes/config-array": "^0.5.0",
"ajv": "^6.10.0",
"chalk": "^4.0.0",
@@ -103,7 +103,7 @@
"eslint-release": "^2.0.0",
"eslump": "^3.0.0",
"esprima": "^4.0.1",
- "fs-teardown": "^0.1.0",
+ "fs-teardown": "0.1.1",
"glob": "^7.1.6",
"jsdoc": "^3.5.5",
"karma": "^6.1.1",
From 23bc4cfb21f139b349d7f35608f228a1508c2c68 Mon Sep 17 00:00:00 2001
From: Rich Trott
Date: Sun, 25 Jul 2021 20:16:47 -0700
Subject: [PATCH 005/119] meta: update collaborator email in AUTHORS/.mailmap
Update email for Zeyu Yang.
PR-URL: https://github.com/nodejs/node/pull/39521
Reviewed-By: Zeyu Yang
Reviewed-By: Ujjwal Sharma
Reviewed-By: James M Snell
Reviewed-By: Luigi Pinca
---
.mailmap | 4 ++--
AUTHORS | 2 +-
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/.mailmap b/.mailmap
index 83c9c5cab55ce1..a3495e44f5d637 100644
--- a/.mailmap
+++ b/.mailmap
@@ -158,8 +158,8 @@ Hannes Magnusson
Hendrik Schwalm
Henry Chin
Herbert Vojčík
-himself65
-himself65
+himself65
+himself65
Hitesh Kanwathirtha
Icer Liang
Igor Savin
diff --git a/AUTHORS b/AUTHORS
index d9ae717436d206..987f5267ea59f4 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -2681,7 +2681,7 @@ Alexander Sattelmaier
Avi ד
Thomas
Aymen Naghmouchi
-himself65
+himself65
Geir Hauge
Patrick Gansterer
Nicolas Moteau
From 72af147bb58e79a8a908f056b68f826916563149 Mon Sep 17 00:00:00 2001
From: Rich Trott
Date: Sun, 25 Jul 2021 20:18:38 -0700
Subject: [PATCH 006/119] meta: update collaborator email in README
Update email for Shelley Vohr to correspond to what is specified in
.mailmap and what is used in more recently authored commits.
PR-URL: https://github.com/nodejs/node/pull/39521
Reviewed-By: Zeyu Yang
Reviewed-By: Ujjwal Sharma
Reviewed-By: James M Snell
Reviewed-By: Luigi Pinca
---
README.md | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/README.md b/README.md
index bcbffcb9ed3465..4408ef5126e709 100644
--- a/README.md
+++ b/README.md
@@ -168,7 +168,7 @@ For information about the governance of the Node.js project, see
* [cjihrig](https://github.com/cjihrig) -
**Colin Ihrig** <cjihrig@gmail.com> (he/him)
* [codebytere](https://github.com/codebytere) -
-**Shelley Vohr** <codebytere@gmail.com> (she/her)
+**Shelley Vohr** <shelley.vohr@gmail.com> (she/her)
* [danbev](https://github.com/danbev) -
**Daniel Bevenius** <daniel.bevenius@gmail.com> (he/him)
* [danielleadams](https://github.com/danielleadams) -
@@ -288,7 +288,7 @@ For information about the governance of the Node.js project, see
* [cjihrig](https://github.com/cjihrig) -
**Colin Ihrig** <cjihrig@gmail.com> (he/him)
* [codebytere](https://github.com/codebytere) -
-**Shelley Vohr** <codebytere@gmail.com> (she/her)
+**Shelley Vohr** <shelley.vohr@gmail.com> (she/her)
* [danbev](https://github.com/danbev) -
**Daniel Bevenius** <daniel.bevenius@gmail.com> (he/him)
* [danielleadams](https://github.com/danielleadams) -
From 110c088f0241c30f5a07ae4835ebfce94d09444f Mon Sep 17 00:00:00 2001
From: Rich Trott
Date: Sun, 25 Jul 2021 20:19:44 -0700
Subject: [PATCH 007/119] meta: update collaborator email in AUTHORS/.mailmap
Update email for Ujjwal Sharma.
PR-URL: https://github.com/nodejs/node/pull/39521
Reviewed-By: Zeyu Yang
Reviewed-By: Ujjwal Sharma
Reviewed-By: James M Snell
Reviewed-By: Luigi Pinca
---
.mailmap | 2 +-
AUTHORS | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/.mailmap b/.mailmap
index a3495e44f5d637..a7a36916790903 100644
--- a/.mailmap
+++ b/.mailmap
@@ -421,7 +421,7 @@ Travis Meisenheimer
Trevor Burnham
Trivikram Kamat <16024985+trivikr@users.noreply.github.com>
Tyler Larson
-Ujjwal Sharma
+Ujjwal Sharma
Viktor Karpov
Vincent Voyer
Vladimir de Turckheim
diff --git a/AUTHORS b/AUTHORS
index 987f5267ea59f4..fb27f493b7a252 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -2116,7 +2116,7 @@ dustinnewman98
Oluwaseun Omoyajowo
Wilson Lin
Eric Bickle
-Ujjwal Sharma
+Ujjwal Sharma
Wei-Wei Wu
Prateek Singh
Ken Lin
From 3ff5e153ef86b5ca3144e141c73b9293aad1421a Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Juan=20Jos=C3=A9=20Arboleda?=
Date: Mon, 19 Jul 2021 16:44:29 -0500
Subject: [PATCH 008/119] doc: add code example to `http.createServer` method
PR-URL: https://github.com/nodejs/node/pull/39455
Reviewed-By: James M Snell
---
doc/api/http.md | 31 +++++++++++++++++++++++++++++++
1 file changed, 31 insertions(+)
diff --git a/doc/api/http.md b/doc/api/http.md
index 2ab4b86ee86976..4b4f758287f516 100644
--- a/doc/api/http.md
+++ b/doc/api/http.md
@@ -2687,6 +2687,37 @@ Returns a new instance of [`http.Server`][].
The `requestListener` is a function which is automatically
added to the [`'request'`][] event.
+```cjs
+const http = require('http');
+
+// Create a local server to receive data from
+const server = http.createServer((req, res) => {
+ res.writeHead(200, { 'Content-Type': 'application/json' });
+ res.end(JSON.stringify({
+ data: 'Hello World!'
+ }));
+});
+
+server.listen(8000);
+```
+
+```cjs
+const http = require('http');
+
+// Create a local server to receive data from
+const server = http.createServer();
+
+// Listen to the request event
+server.on('request', (request, res) => {
+ res.writeHead(200, { 'Content-Type': 'application/json' });
+ res.end(JSON.stringify({
+ data: 'Hello World!'
+ }));
+});
+
+server.listen(8000);
+```
+
## `http.get(options[, callback])`
## `http.get(url[, options][, callback])`
+
> Stability: 2 - Stable
diff --git a/doc/api/webstreams.md b/doc/api/webstreams.md
index f34cd155e48204..42f1481e3ff15f 100644
--- a/doc/api/webstreams.md
+++ b/doc/api/webstreams.md
@@ -1,5 +1,7 @@
# Web Streams API
+
+
> Stability: 1 - Experimental
An implementation of the [WHATWG Streams Standard][].
From 75144054564f5105a0660b47879be23f17f26fd9 Mon Sep 17 00:00:00 2001
From: Ash Cripps
Date: Fri, 30 Jul 2021 11:05:50 +0100
Subject: [PATCH 030/119] doc: update min mac ver + move mac arm64 to tier 1
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Update the minimum macos version that can compile to match the
xcode requirements.
Also move mac arm64 to tier 1.
refs: https://github.com/nodejs/node/issues/39584#issuecomment-889701855
PR-URL: https://github.com/nodejs/node/pull/39586
Reviewed-By: Antoine du Hamel
Reviewed-By: Beth Griggs
Reviewed-By: James M Snell
Reviewed-By: Richard Lau
Reviewed-By: Jiawen Geng
Reviewed-By: Michaël Zasso
---
BUILDING.md | 8 ++++++--
1 file changed, 6 insertions(+), 2 deletions(-)
diff --git a/BUILDING.md b/BUILDING.md
index 6f54601b5a078c..06cfaff6052bb4 100644
--- a/BUILDING.md
+++ b/BUILDING.md
@@ -113,8 +113,8 @@ platforms. This is true regardless of entries in the table below.
| Windows | x86 (native) | >= Windows 8.1/2012 R2 | Tier 1 (running) / Experimental (compiling) [6](#fn6) | |
| Windows | x64, x86 | Windows Server 2012 (not R2) | Experimental | |
| Windows | arm64 | >= Windows 10 | Tier 2 (compiling) / Experimental (running) | |
-| macOS | x64 | >= 10.13 | Tier 1 | |
-| macOS | arm64 | >= 11 | Experimental | |
+| macOS | x64 | >= 10.13 | Tier 1 | For notes about compilation see [8](#fn8) |
+| macOS | arm64 | >= 11 | Tier 1 | |
| SmartOS | x64 | >= 18 | Tier 2 | |
| AIX | ppc64be >=power7 | >= 7.2 TL04 | Tier 2 | |
| FreeBSD | x64 | >= 11 | Experimental | Downgraded as of Node.js 12 [7](#fn7) |
@@ -155,6 +155,10 @@ may not be possible.
FreeBSD 12.1 upgrades to 8.0.1. Other Clang/LLVM versions are available
via the system's package manager, including Clang 9.0.
+8: Our macOS x64 Binaries are compiled with 10.13 as a target.
+However there is no guarantee compiling on 10.13 will work as Xcode11 is
+required to compile.
+
### Supported toolchains
Depending on the host platform, the selection of toolchains may vary.
From cff2aea5df3d65caee17148891a7c751be7e5110 Mon Sep 17 00:00:00 2001
From: Rich Trott
Date: Tue, 27 Jul 2021 19:10:42 -0700
Subject: [PATCH 031/119] test: add known issues test for debugger heap
snapshot race
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Refs: https://github.com/nodejs/node/issues/39555
PR-URL: https://github.com/nodejs/node/pull/39557
Reviewed-By: James M Snell
Reviewed-By: Michaël Zasso
---
.../test-debugger-takeHeapSnapshot-race.js | 48 +++++++++++++++++++
1 file changed, 48 insertions(+)
create mode 100644 test/known_issues/test-debugger-takeHeapSnapshot-race.js
diff --git a/test/known_issues/test-debugger-takeHeapSnapshot-race.js b/test/known_issues/test-debugger-takeHeapSnapshot-race.js
new file mode 100644
index 00000000000000..c1974135046671
--- /dev/null
+++ b/test/known_issues/test-debugger-takeHeapSnapshot-race.js
@@ -0,0 +1,48 @@
+'use strict';
+const common = require('../common');
+
+// Refs: https://github.com/nodejs/node/issues/39555
+
+// After this issue is fixed, this can perhaps be integrated into
+// test/sequential/test-debugger-heap-profiler.js as it shares almost all
+// the same code.
+
+// These skips should be uncommented once the issue is fixed.
+// common.skipIfInspectorDisabled();
+
+// if (!common.isMainThread) {
+// common.skip('process.chdir() is not available in workers');
+// }
+
+// This assert.fail() can be removed once the issue is fixed.
+if (!common.hasCrypto || !process.features.inspector) {
+ require('assert').fail('crypto is not available');
+}
+
+const fixtures = require('../common/fixtures');
+const startCLI = require('../common/debugger');
+const tmpdir = require('../common/tmpdir');
+
+tmpdir.refresh();
+process.chdir(tmpdir.path);
+
+const { readFileSync } = require('fs');
+
+const filename = 'node.heapsnapshot';
+
+// Check that two simultaneous snapshots don't step all over each other.
+{
+ const cli = startCLI([fixtures.path('debugger/empty.js')]);
+
+ function onFatal(error) {
+ cli.quit();
+ throw error;
+ }
+
+ return cli.waitForInitialBreak()
+ .then(() => cli.waitForPrompt())
+ .then(() => cli.command('takeHeapSnapshot(); takeHeapSnapshot()'))
+ .then(() => JSON.parse(readFileSync(filename, 'utf8')))
+ .then(() => cli.quit())
+ .then(null, onFatal);
+}
From 058e882a2a09b436586e1ae2a2a81f27e1bb61f6 Mon Sep 17 00:00:00 2001
From: Mestery
Date: Wed, 28 Jul 2021 03:41:18 +0200
Subject: [PATCH 032/119] lib: use ERR_ILLEGAL_CONSTRUCTOR
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Use ERR_ILLEGAL_CONSTRUCTOR error instead of `illegal constructor` or
`Illegal constructor` TypeError.
PR-URL: https://github.com/nodejs/node/pull/39556
Reviewed-By: Michaël Zasso
Reviewed-By: Antoine du Hamel
Reviewed-By: James M Snell
Reviewed-By: Luigi Pinca
Reviewed-By: Darshan Sen
---
lib/internal/abort_controller.js | 5 ++---
lib/internal/crypto/keys.js | 6 +++---
lib/internal/histogram.js | 5 ++---
lib/internal/perf/event_loop_delay.js | 10 +++++++---
lib/internal/perf/performance.js | 10 +++++++---
lib/internal/perf/performance_entry.js | 10 +++++++---
test/parallel/test-abortcontroller.js | 7 +++----
test/parallel/test-perf-hooks-histogram.js | 5 +----
test/parallel/test-webcrypto-keygen.js | 4 +---
9 files changed, 33 insertions(+), 29 deletions(-)
diff --git a/lib/internal/abort_controller.js b/lib/internal/abort_controller.js
index e6ee07052617d5..c24963b823c683 100644
--- a/lib/internal/abort_controller.js
+++ b/lib/internal/abort_controller.js
@@ -10,7 +10,6 @@ const {
ObjectDefineProperty,
Symbol,
SymbolToStringTag,
- TypeError,
} = primordials;
const {
@@ -25,6 +24,7 @@ const {
const { inspect } = require('internal/util/inspect');
const {
codes: {
+ ERR_ILLEGAL_CONSTRUCTOR,
ERR_INVALID_THIS,
}
} = require('internal/errors');
@@ -49,8 +49,7 @@ function validateAbortSignal(obj) {
class AbortSignal extends EventTarget {
constructor() {
- // eslint-disable-next-line no-restricted-syntax
- throw new TypeError('Illegal constructor');
+ throw new ERR_ILLEGAL_CONSTRUCTOR();
}
get aborted() {
diff --git a/lib/internal/crypto/keys.js b/lib/internal/crypto/keys.js
index 27e28942fdaa8a..1127217d6b9dee 100644
--- a/lib/internal/crypto/keys.js
+++ b/lib/internal/crypto/keys.js
@@ -34,11 +34,11 @@ const {
codes: {
ERR_CRYPTO_INCOMPATIBLE_KEY_OPTIONS,
ERR_CRYPTO_INVALID_KEY_OBJECT_TYPE,
+ ERR_CRYPTO_INVALID_JWK,
+ ERR_ILLEGAL_CONSTRUCTOR,
ERR_INVALID_ARG_TYPE,
ERR_INVALID_ARG_VALUE,
ERR_OUT_OF_RANGE,
- ERR_OPERATION_FAILED,
- ERR_CRYPTO_INVALID_JWK,
}
} = require('internal/errors');
@@ -631,7 +631,7 @@ function isKeyObject(obj) {
// would be fantastic if we could find a way of making those interop.
class CryptoKey extends JSTransferable {
constructor() {
- throw new ERR_OPERATION_FAILED('Illegal constructor');
+ throw new ERR_ILLEGAL_CONSTRUCTOR();
}
[kInspect](depth, options) {
diff --git a/lib/internal/histogram.js b/lib/internal/histogram.js
index 2df321300fb87e..f437bfd4d791ac 100644
--- a/lib/internal/histogram.js
+++ b/lib/internal/histogram.js
@@ -7,7 +7,6 @@ const {
ObjectSetPrototypeOf,
SafeMap,
Symbol,
- TypeError,
} = primordials;
const {
@@ -22,6 +21,7 @@ const { inspect } = require('util');
const {
codes: {
+ ERR_ILLEGAL_CONSTRUCTOR,
ERR_INVALID_ARG_VALUE,
ERR_INVALID_ARG_TYPE,
ERR_OUT_OF_RANGE,
@@ -130,8 +130,7 @@ class Histogram extends JSTransferable {
class RecordableHistogram extends Histogram {
constructor() {
- // eslint-disable-next-line no-restricted-syntax
- throw new TypeError('illegal constructor');
+ throw new ERR_ILLEGAL_CONSTRUCTOR();
}
record(val) {
diff --git a/lib/internal/perf/event_loop_delay.js b/lib/internal/perf/event_loop_delay.js
index f90bb9e4de7d58..f5d0eb74d588e4 100644
--- a/lib/internal/perf/event_loop_delay.js
+++ b/lib/internal/perf/event_loop_delay.js
@@ -1,9 +1,14 @@
'use strict';
const {
Symbol,
- TypeError,
} = primordials;
+const {
+ codes: {
+ ERR_ILLEGAL_CONSTRUCTOR,
+ }
+} = require('internal/errors');
+
const {
ELDHistogram: _ELDHistogram,
} = internalBinding('performance');
@@ -23,8 +28,7 @@ const kEnabled = Symbol('kEnabled');
class ELDHistogram extends Histogram {
constructor(i) {
if (!(i instanceof _ELDHistogram)) {
- // eslint-disable-next-line no-restricted-syntax
- throw new TypeError('illegal constructor');
+ throw new ERR_ILLEGAL_CONSTRUCTOR();
}
super(i);
this[kEnabled] = false;
diff --git a/lib/internal/perf/performance.js b/lib/internal/perf/performance.js
index 2f75eb143a6ffe..38dac0ee32397c 100644
--- a/lib/internal/perf/performance.js
+++ b/lib/internal/perf/performance.js
@@ -4,9 +4,14 @@ const {
ObjectDefineProperty,
ObjectDefineProperties,
ObjectSetPrototypeOf,
- TypeError,
} = primordials;
+const {
+ codes: {
+ ERR_ILLEGAL_CONSTRUCTOR,
+ }
+} = require('internal/errors');
+
const {
EventTarget,
} = require('internal/event_target');
@@ -35,8 +40,7 @@ const {
class Performance extends EventTarget {
constructor() {
- // eslint-disable-next-line no-restricted-syntax
- throw new TypeError('Illegal constructor');
+ throw new ERR_ILLEGAL_CONSTRUCTOR();
}
[kInspect](depth, options) {
diff --git a/lib/internal/perf/performance_entry.js b/lib/internal/perf/performance_entry.js
index 8fcb0ca3fcdc0c..d8eedb9fb8f85b 100644
--- a/lib/internal/perf/performance_entry.js
+++ b/lib/internal/perf/performance_entry.js
@@ -3,9 +3,14 @@
const {
ObjectSetPrototypeOf,
Symbol,
- TypeError,
} = primordials;
+const {
+ codes: {
+ ERR_ILLEGAL_CONSTRUCTOR,
+ }
+} = require('internal/errors');
+
const {
customInspectSymbol: kInspect,
} = require('internal/util');
@@ -25,8 +30,7 @@ function isPerformanceEntry(obj) {
class PerformanceEntry {
constructor() {
- // eslint-disable-next-line no-restricted-syntax
- throw new TypeError('illegal constructor');
+ throw new ERR_ILLEGAL_CONSTRUCTOR();
}
get name() { return this[kName]; }
diff --git a/test/parallel/test-abortcontroller.js b/test/parallel/test-abortcontroller.js
index f7a70fbddbc89e..630a1c5708d765 100644
--- a/test/parallel/test-abortcontroller.js
+++ b/test/parallel/test-abortcontroller.js
@@ -56,10 +56,9 @@ const { ok, strictEqual, throws } = require('assert');
{
// Tests that AbortSignal is impossible to construct manually
const ac = new AbortController();
- throws(
- () => new ac.signal.constructor(),
- /^TypeError: Illegal constructor$/
- );
+ throws(() => new ac.signal.constructor(), {
+ code: 'ERR_ILLEGAL_CONSTRUCTOR',
+ });
}
{
// Symbol.toStringTag
diff --git a/test/parallel/test-perf-hooks-histogram.js b/test/parallel/test-perf-hooks-histogram.js
index 323dbeb153633a..a60d3a94bbc136 100644
--- a/test/parallel/test-perf-hooks-histogram.js
+++ b/test/parallel/test-perf-hooks-histogram.js
@@ -78,8 +78,5 @@ const { inspect } = require('util');
{
// Tests that RecordableHistogram is impossible to construct manually
const h = createHistogram();
- assert.throws(
- () => new h.constructor(),
- /^TypeError: illegal constructor$/
- );
+ assert.throws(() => new h.constructor(), { code: 'ERR_ILLEGAL_CONSTRUCTOR' });
}
diff --git a/test/parallel/test-webcrypto-keygen.js b/test/parallel/test-webcrypto-keygen.js
index e94a7b4488222a..502c86cf32abf3 100644
--- a/test/parallel/test-webcrypto-keygen.js
+++ b/test/parallel/test-webcrypto-keygen.js
@@ -657,9 +657,7 @@ const vectors = {
})().then(common.mustCall());
// End user code cannot create CryptoKey directly
-assert.throws(() => new CryptoKey(), {
- code: 'ERR_OPERATION_FAILED'
-});
+assert.throws(() => new CryptoKey(), { code: 'ERR_ILLEGAL_CONSTRUCTOR' });
{
const buffer = Buffer.from('Hello World');
From 6c375e18b64e479734c36baf8b9d9af098498352 Mon Sep 17 00:00:00 2001
From: Rich Trott
Date: Wed, 28 Jul 2021 20:55:50 -0700
Subject: [PATCH 033/119] debugger: remove undefined parameter
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
The data parameter of unpackError() is typically undefined.
PR-URL: https://github.com/nodejs/node/pull/39570
Refs: https://github.com/nodejs/node-inspect/issues/101
Reviewed-By: Jan Krems
Reviewed-By: Michaël Zasso
---
lib/internal/debugger/inspect_client.js | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/lib/internal/debugger/inspect_client.js b/lib/internal/debugger/inspect_client.js
index 419b984cc9f473..5c72304ba285c3 100644
--- a/lib/internal/debugger/inspect_client.js
+++ b/lib/internal/debugger/inspect_client.js
@@ -40,8 +40,8 @@ const kMaskingKeyWidthInBytes = 4;
// https://tools.ietf.org/html/rfc6455#section-1.3
const WEBSOCKET_HANDSHAKE_GUID = '258EAFA5-E914-47DA-95CA-C5AB0DC85B11';
-function unpackError({ code, message, data }) {
- const err = new ERR_DEBUGGER_ERROR(`${message} - ${data}`);
+function unpackError({ code, message }) {
+ const err = new ERR_DEBUGGER_ERROR(`${message}`);
err.code = code;
ErrorCaptureStackTrace(err, unpackError);
return err;
From 635e1a0274e89dfaa76e3704f102d3c70feaa037 Mon Sep 17 00:00:00 2001
From: Rich Trott
Date: Wed, 28 Jul 2021 20:56:11 -0700
Subject: [PATCH 034/119] test: add test-debugger-breakpoint-exists
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
This adds test coverage to `unpackError()` in
`lib/internal/debugger/inspect_client.js`. That function previously was
untested.
PR-URL: https://github.com/nodejs/node/pull/39570
Refs: https://github.com/nodejs/node-inspect/issues/101
Reviewed-By: Jan Krems
Reviewed-By: Michaël Zasso
---
.../test-debugger-breakpoint-exists.js | 27 +++++++++++++++++++
1 file changed, 27 insertions(+)
create mode 100644 test/sequential/test-debugger-breakpoint-exists.js
diff --git a/test/sequential/test-debugger-breakpoint-exists.js b/test/sequential/test-debugger-breakpoint-exists.js
new file mode 100644
index 00000000000000..7be0ba657fa981
--- /dev/null
+++ b/test/sequential/test-debugger-breakpoint-exists.js
@@ -0,0 +1,27 @@
+'use strict';
+
+const common = require('../common');
+
+common.skipIfInspectorDisabled();
+
+const fixtures = require('../common/fixtures');
+const startCLI = require('../common/debugger');
+
+// Test for "Breakpoint at specified location already exists" error.
+{
+ const script = fixtures.path('debugger', 'three-lines.js');
+ const cli = startCLI([script]);
+
+ function onFatal(error) {
+ cli.quit();
+ throw error;
+ }
+
+ cli.waitForInitialBreak()
+ .then(() => cli.waitForPrompt())
+ .then(() => cli.command('setBreakpoint(1)'))
+ .then(() => cli.command('setBreakpoint(1)'))
+ .then(() => cli.waitFor(/Breakpoint at specified location already exists/))
+ .then(() => cli.quit())
+ .then(null, onFatal);
+}
From 016b7ba6168a4a59024af92f10109325231c96c6 Mon Sep 17 00:00:00 2001
From: James M Snell
Date: Tue, 27 Jul 2021 10:06:13 -0700
Subject: [PATCH 035/119] perf_hooks: fix PerformanceObserver gc crash
Signed-off-by: James M Snell
Fixes: https://github.com/nodejs/node/issues/39548
PR-URL: https://github.com/nodejs/node/pull/39550
Reviewed-By: Bryan English
Reviewed-By: Chengzhong Wu
---
lib/internal/perf/observe.js | 1 +
test/parallel/test-perf-gc-crash.js | 25 +++++++++++++++++++++++++
2 files changed, 26 insertions(+)
create mode 100644 test/parallel/test-perf-gc-crash.js
diff --git a/lib/internal/perf/observe.js b/lib/internal/perf/observe.js
index 67a37175a179b3..eadc617452e952 100644
--- a/lib/internal/perf/observe.js
+++ b/lib/internal/perf/observe.js
@@ -132,6 +132,7 @@ function maybeDecrementObserverCounts(entryTypes) {
if (observerType === NODE_PERFORMANCE_ENTRY_TYPE_GC &&
observerCounts[observerType] === 0) {
removeGarbageCollectionTracking();
+ gcTrackingInstalled = false;
}
}
}
diff --git a/test/parallel/test-perf-gc-crash.js b/test/parallel/test-perf-gc-crash.js
new file mode 100644
index 00000000000000..d980e91a2f2799
--- /dev/null
+++ b/test/parallel/test-perf-gc-crash.js
@@ -0,0 +1,25 @@
+'use strict';
+
+require('../common');
+
+// Refers to https://github.com/nodejs/node/issues/39548
+
+// The test fails if this crashes. If it closes normally,
+// then all is good.
+
+const {
+ PerformanceObserver,
+} = require('perf_hooks');
+
+// We don't actually care if the observer callback is called here.
+const gcObserver = new PerformanceObserver(() => {});
+
+gcObserver.observe({ entryTypes: ['gc'] });
+
+gcObserver.disconnect();
+
+const gcObserver2 = new PerformanceObserver(() => {});
+
+gcObserver2.observe({ entryTypes: ['gc'] });
+
+gcObserver2.disconnect();
From 4df59bc7276cc7fe280cb7a25c4561ae8e6f7f32 Mon Sep 17 00:00:00 2001
From: Antoine du Hamel
Date: Fri, 23 Jul 2021 13:25:45 +0200
Subject: [PATCH 036/119] module: add some typings to
`internal/modules/esm/resolve`
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
PR-URL: https://github.com/nodejs/node/pull/39504
Reviewed-By: James M Snell
Reviewed-By: Michaël Zasso
---
lib/internal/modules/esm/resolve.js | 131 +++++++++++++++++++++++++---
1 file changed, 121 insertions(+), 10 deletions(-)
diff --git a/lib/internal/modules/esm/resolve.js b/lib/internal/modules/esm/resolve.js
index 8b6f23bb485d8b..0a0da107682469 100644
--- a/lib/internal/modules/esm/resolve.js
+++ b/lib/internal/modules/esm/resolve.js
@@ -59,7 +59,26 @@ const userConditions = getOptionValue('--conditions');
const DEFAULT_CONDITIONS = ObjectFreeze(['node', 'import', ...userConditions]);
const DEFAULT_CONDITIONS_SET = new SafeSet(DEFAULT_CONDITIONS);
+/**
+ * @typedef {string | string[] | Record} Exports
+ * @typedef {'module' | 'commonjs'} PackageType
+ * @typedef {{
+ * exports?: ExportConfig;
+ * name?: string;
+ * main?: string;
+ * type?: PackageType;
+ * }} PackageConfig
+ */
+
const emittedPackageWarnings = new SafeSet();
+
+/**
+ * @param {string} match
+ * @param {URL} pjsonUrl
+ * @param {boolean} isExports
+ * @param {string | URL | undefined} base
+ * @returns {void}
+ */
function emitFolderMapDeprecation(match, pjsonUrl, isExports, base) {
const pjsonPath = fileURLToPath(pjsonUrl);
@@ -76,6 +95,13 @@ function emitFolderMapDeprecation(match, pjsonUrl, isExports, base) {
);
}
+/**
+ * @param {URL} url
+ * @param {URL} packageJSONUrl
+ * @param {string | URL | undefined} base
+ * @param {string} main
+ * @returns
+ */
function emitLegacyIndexDeprecation(url, packageJSONUrl, base, main) {
const { format } = defaultGetFormat(url);
if (format !== 'module')
@@ -104,6 +130,10 @@ function emitLegacyIndexDeprecation(url, packageJSONUrl, base, main) {
);
}
+/**
+ * @param {string[]} [conditions]
+ * @returns {Set}
+ */
function getConditionsSet(conditions) {
if (conditions !== undefined && conditions !== DEFAULT_CONDITIONS) {
if (!ArrayIsArray(conditions)) {
@@ -118,9 +148,19 @@ function getConditionsSet(conditions) {
const realpathCache = new SafeMap();
const packageJSONCache = new SafeMap(); /* string -> PackageConfig */
+/**
+ * @param {string | URL} path
+ * @returns {import('fs').Stats}
+ */
const tryStatSync =
(path) => statSync(path, { throwIfNoEntry: false }) ?? new Stats();
+/**
+ * @param {string} path
+ * @param {string} specifier
+ * @param {string | URL | undefined} base
+ * @returns {PackageConfig}
+ */
function getPackageConfig(path, specifier, base) {
const existing = packageJSONCache.get(path);
if (existing !== undefined) {
@@ -173,6 +213,10 @@ function getPackageConfig(path, specifier, base) {
return packageConfig;
}
+/**
+ * @param {URL | string} resolved
+ * @returns {PackageConfig}
+ */
function getPackageScopeConfig(resolved) {
let packageJSONUrl = new URL('./package.json', resolved);
while (true) {
@@ -205,12 +249,6 @@ function getPackageScopeConfig(resolved) {
}
/**
- * Legacy CommonJS main resolution:
- * 1. let M = pkg_url + (json main field)
- * 2. TRY(M, M.js, M.json, M.node)
- * 3. TRY(M/index.js, M/index.json, M/index.node)
- * 4. TRY(pkg_url/index.js, pkg_url/index.json, pkg_url/index.node)
- * 5. NOT_FOUND
* @param {string | URL} url
* @returns {boolean}
*/
@@ -218,6 +256,18 @@ function fileExists(url) {
return statSync(url, { throwIfNoEntry: false })?.isFile() ?? false;
}
+/**
+ * Legacy CommonJS main resolution:
+ * 1. let M = pkg_url + (json main field)
+ * 2. TRY(M, M.js, M.json, M.node)
+ * 3. TRY(M/index.js, M/index.json, M/index.node)
+ * 4. TRY(pkg_url/index.js, pkg_url/index.json, pkg_url/index.node)
+ * 5. NOT_FOUND
+ * @param {URL} packageJSONUrl
+ * @param {PackageConfig} packageConfig
+ * @param {string | URL | undefined} base
+ * @returns {URL}
+ */
function legacyMainResolve(packageJSONUrl, packageConfig, base) {
let guess;
if (packageConfig.main !== undefined) {
@@ -259,12 +309,21 @@ function legacyMainResolve(packageJSONUrl, packageConfig, base) {
fileURLToPath(new URL('.', packageJSONUrl)), fileURLToPath(base));
}
+/**
+ * @param {URL} search
+ * @returns {URL | undefined}
+ */
function resolveExtensionsWithTryExactName(search) {
if (fileExists(search)) return search;
return resolveExtensions(search);
}
const extensions = ['.js', '.json', '.node', '.mjs'];
+
+/**
+ * @param {URL} search
+ * @returns {URL | undefined}
+ */
function resolveExtensions(search) {
for (let i = 0; i < extensions.length; i++) {
const extension = extensions[i];
@@ -274,6 +333,10 @@ function resolveExtensions(search) {
return undefined;
}
+/**
+ * @param {URL} search
+ * @returns {URL | undefined}
+ */
function resolveDirectoryEntry(search) {
const dirPath = fileURLToPath(search);
const pkgJsonPath = resolve(dirPath, 'package.json');
@@ -291,6 +354,11 @@ function resolveDirectoryEntry(search) {
}
const encodedSepRegEx = /%2F|%2C/i;
+/**
+ * @param {URL} resolved
+ * @param {string | URL | undefined} base
+ * @returns {URL | undefined}
+ */
function finalizeResolution(resolved, base) {
if (RegExpPrototypeTest(encodedSepRegEx, resolved.pathname))
throw new ERR_INVALID_MODULE_SPECIFIER(
@@ -325,18 +393,35 @@ function finalizeResolution(resolved, base) {
return resolved;
}
+/**
+ * @param {string} specifier
+ * @param {URL} packageJSONUrl
+ * @param {string | URL | undefined} base
+ */
function throwImportNotDefined(specifier, packageJSONUrl, base) {
throw new ERR_PACKAGE_IMPORT_NOT_DEFINED(
specifier, packageJSONUrl && fileURLToPath(new URL('.', packageJSONUrl)),
fileURLToPath(base));
}
+/**
+ * @param {string} specifier
+ * @param {URL} packageJSONUrl
+ * @param {string | URL | undefined} base
+ */
function throwExportsNotFound(subpath, packageJSONUrl, base) {
throw new ERR_PACKAGE_PATH_NOT_EXPORTED(
fileURLToPath(new URL('.', packageJSONUrl)), subpath,
base && fileURLToPath(base));
}
+/**
+ *
+ * @param {string | URL} subpath
+ * @param {URL} packageJSONUrl
+ * @param {boolean} internal
+ * @param {string | URL | undefined} base
+ */
function throwInvalidSubpath(subpath, packageJSONUrl, internal, base) {
const reason = `request is not a valid subpath for the "${internal ?
'imports' : 'exports'}" resolution of ${fileURLToPath(packageJSONUrl)}`;
@@ -478,6 +563,13 @@ function resolvePackageTarget(packageJSONUrl, target, subpath, packageSubpath,
base);
}
+/**
+ *
+ * @param {Exports} exports
+ * @param {URL} packageJSONUrl
+ * @param {string | URL | undefined} base
+ * @returns
+ */
function isConditionalExportsMainSugar(exports, packageJSONUrl, base) {
if (typeof exports === 'string' || ArrayIsArray(exports)) return true;
if (typeof exports !== 'object' || exports === null) return false;
@@ -504,8 +596,8 @@ function isConditionalExportsMainSugar(exports, packageJSONUrl, base) {
/**
* @param {URL} packageJSONUrl
* @param {string} packageSubpath
- * @param {object} packageConfig
- * @param {string} base
+ * @param {PackageConfig} packageConfig
+ * @param {string | URL | undefined} base
* @param {Set} conditions
* @returns {URL}
*/
@@ -560,6 +652,12 @@ function packageExportsResolve(
throwExportsNotFound(packageSubpath, packageJSONUrl, base);
}
+/**
+ * @param {string} name
+ * @param {string | URL | undefined} base
+ * @param {Set} conditions
+ * @returns
+ */
function packageImportsResolve(name, base, conditions) {
if (name === '#' || StringPrototypeStartsWith(name, '#/')) {
const reason = 'is not a valid internal imports specifier name';
@@ -615,11 +713,20 @@ function packageImportsResolve(name, base, conditions) {
throwImportNotDefined(name, packageJSONUrl, base);
}
+/**
+ * @param {URL} url
+ * @returns {PackageType}
+ */
function getPackageType(url) {
const packageConfig = getPackageScopeConfig(url);
return packageConfig.type;
}
+/**
+ * @param {string} specifier
+ * @param {string | URL | undefined} base
+ * @returns {{ packageName: string, packageSubpath: string, isScoped: boolean }}
+ */
function parsePackageName(specifier, base) {
let separatorIndex = StringPrototypeIndexOf(specifier, '/');
let validPackageName = true;
@@ -659,7 +766,7 @@ function parsePackageName(specifier, base) {
/**
* @param {string} specifier
- * @param {URL} base
+ * @param {string | URL | undefined} base
* @param {Set} conditions
* @returns {URL}
*/
@@ -712,6 +819,10 @@ function packageResolve(specifier, base, conditions) {
throw new ERR_MODULE_NOT_FOUND(packageName, fileURLToPath(base));
}
+/**
+ * @param {string} specifier
+ * @returns {boolean}
+ */
function isBareSpecifier(specifier) {
return specifier[0] && specifier[0] !== '/' && specifier[0] !== '.';
}
@@ -734,7 +845,7 @@ function shouldBeTreatedAsRelativeOrAbsolutePath(specifier) {
/**
* @param {string} specifier
- * @param {URL} base
+ * @param {string | URL | undefined} base
* @param {Set} conditions
* @returns {URL}
*/
From 9e38fc6757332b088ee5e3ccd704f13c424e25e1 Mon Sep 17 00:00:00 2001
From: Robert Nagy
Date: Fri, 30 Jul 2021 14:18:38 +0200
Subject: [PATCH 037/119] stream: add readableDidRead if has been read from
Adds did read accessor used to determine whether a readable has been
read from.
PR-URL: https://github.com/nodejs/node/pull/39589
Refs: https://github.com/nodejs/undici/pull/907
Reviewed-By: Matteo Collina
Reviewed-By: Benjamin Gruenbaum
---
doc/api/stream.md | 11 ++
lib/internal/streams/readable.js | 19 +++-
test/parallel/test-stream-readable-didRead.js | 104 ++++++++++++++++++
3 files changed, 133 insertions(+), 1 deletion(-)
create mode 100644 test/parallel/test-stream-readable-didRead.js
diff --git a/doc/api/stream.md b/doc/api/stream.md
index c86f051c55fffa..89d86f0f38c638 100644
--- a/doc/api/stream.md
+++ b/doc/api/stream.md
@@ -1259,6 +1259,17 @@ added: v11.4.0
Is `true` if it is safe to call [`readable.read()`][stream-read], which means
the stream has not been destroyed or emitted `'error'` or `'end'`.
+##### `readable.readableDidRead`
+
+
+* {boolean}
+
+Allows determining if the stream has been or is about to be read.
+Returns true if `'data'`, `'end'`, `'error'` or `'close'` has been
+emitted.
+
##### `readable.readableEncoding`
+
+* Returns: {string}
+
+Generates a random [RFC 4122][] Version 4 UUID. The UUID is generated using a
+cryptographic pseudorandom number generator.
+
## Class: `CryptoKey`
+#### `new CompressionStream(format)`
+
+
+* `format` {string} One of either `'deflate'` or `'gzip'`.
+
+#### `compressionStream.readable`
+
+
+* Type: {ReadableStream}
+
+#### `compressionStream.writable`
+
+
+* Type: {WritableStream}
+
+### Class: `DecompressionStream`
+
+
+#### `new DecompressionStream(format)`
+
+
+* `format` {string} One of either `'deflate'` or `'gzip'`.
+
+#### `decompressionStream.readable`
+
+
+* Type: {ReadableStream}
+
+#### `deccompressionStream.writable`
+
+
+* Type: {WritableStream}
+
+### Utility Consumers
+
+
+The utility consumer functions provide common options for consuming
+streams.
+
+They are accessed using:
+
+```mjs
+import {
+ arrayBuffer,
+ blob,
+ json,
+ text,
+} from 'node:stream/consumers';
+```
+
+```cjs
+const {
+ arrayBuffer,
+ blob,
+ json,
+ text,
+} = require('stream/consumers');
+```
+
+#### `streamConsumers.arrayBuffer(stream)`
+
+
+* `stream` {ReadableStream|stream.Readable|AsyncIterator}
+* Returns: {Promise} Fulfills with an `ArrayBuffer` containing the full
+ contents of the stream.
+
+#### `streamConsumers.blob(stream)`
+
+
+* `stream` {ReadableStream|stream.Readable|AsyncIterator}
+* Returns: {Promise} Fulfills with a {Blob} containing the full contents
+ of the stream.
+
+#### `streamConsumers.buffer(stream)`
+
+
+* `stream` {ReadableStream|stream.Readable|AsyncIterator}
+* Returns: {Promise} Fulfills with a {Buffer} containing the full
+ contents of the stream.
+
+#### `streamConsumers.json(stream)`
+
+
+* `stream` {ReadableStream|stream.Readable|AsyncIterator}
+* Returns: {Promise} Fulfills with the contents of the stream parsed as a
+ UTF-8 encoded string that is then passed through `JSON.parse()`.
+
+#### `streamConsumers.text(stream)`
+
+
+* `stream` {ReadableStream|stream.Readable|AsyncIterator}
+* Returns: {Promise} Fulfills with the contents of the stream parsed as a
+ UTF-8 encoded string.
+
[Streams]: stream.md
[WHATWG Streams Standard]: https://streams.spec.whatwg.org/
diff --git a/lib/stream/consumers.js b/lib/stream/consumers.js
new file mode 100644
index 00000000000000..ffe6e531205e7f
--- /dev/null
+++ b/lib/stream/consumers.js
@@ -0,0 +1,84 @@
+'use strict';
+
+const {
+ JSONParse,
+} = primordials;
+
+const {
+ TextDecoder,
+} = require('internal/encoding');
+
+const {
+ Blob,
+} = require('internal/blob');
+
+const {
+ Buffer,
+} = require('buffer');
+
+/**
+ * @typedef {import('../internal/webstreams/readablestream').ReadableStream
+ * } ReadableStream
+ * @typedef {import('../internal/streams/readable')} Readable
+ */
+
+/**
+ * @param {AsyncIterable|ReadableStream|Readable} stream
+ * @returns {Promise}
+ */
+async function blob(stream) {
+ const chunks = [];
+ for await (const chunk of stream)
+ chunks.push(chunk);
+ return new Blob(chunks);
+}
+
+/**
+ * @param {AsyncIterable|ReadableStream|Readable} stream
+ * @returns {Promise}
+ */
+async function arrayBuffer(stream) {
+ const ret = await blob(stream);
+ return ret.arrayBuffer();
+}
+
+/**
+ * @param {AsyncIterable|ReadableStream|Readable} stream
+ * @returns {Promise}
+ */
+async function buffer(stream) {
+ return Buffer.from(await arrayBuffer(stream));
+}
+
+/**
+ * @param {AsyncIterable|ReadableStream|Readable} stream
+ * @returns {Promise}
+ */
+async function text(stream) {
+ const dec = new TextDecoder();
+ let str = '';
+ for await (const chunk of stream) {
+ if (typeof chunk === 'string')
+ str += chunk;
+ else
+ str += dec.decode(chunk, { stream: true });
+ }
+ return str;
+}
+
+/**
+ * @param {AsyncIterable|ReadableStream|Readable} stream
+ * @returns {Promise}
+ */
+async function json(stream) {
+ const str = await text(stream);
+ return JSONParse(str);
+}
+
+module.exports = {
+ arrayBuffer,
+ blob,
+ buffer,
+ text,
+ json,
+};
diff --git a/test/parallel/test-stream-consumers.js b/test/parallel/test-stream-consumers.js
new file mode 100644
index 00000000000000..8f6a9deb1c27dc
--- /dev/null
+++ b/test/parallel/test-stream-consumers.js
@@ -0,0 +1,234 @@
+// Flags: --no-warnings
+'use strict';
+
+const common = require('../common');
+const assert = require('assert');
+
+const {
+ arrayBuffer,
+ blob,
+ buffer,
+ text,
+ json,
+} = require('stream/consumers');
+
+const {
+ PassThrough
+} = require('stream');
+
+const {
+ TransformStream,
+} = require('stream/web');
+
+const buf = Buffer.from('hellothere');
+const kArrayBuffer =
+ buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
+
+{
+ const passthrough = new PassThrough();
+
+ blob(passthrough).then(common.mustCall(async (blob) => {
+ assert.strictEqual(blob.size, 10);
+ assert.deepStrictEqual(await blob.arrayBuffer(), kArrayBuffer);
+ }));
+
+ passthrough.write('hello');
+ setTimeout(() => passthrough.end('there'), 10);
+}
+
+{
+ const passthrough = new PassThrough();
+
+ arrayBuffer(passthrough).then(common.mustCall(async (ab) => {
+ assert.strictEqual(ab.byteLength, 10);
+ assert.deepStrictEqual(ab, kArrayBuffer);
+ }));
+
+ passthrough.write('hello');
+ setTimeout(() => passthrough.end('there'), 10);
+}
+
+{
+ const passthrough = new PassThrough();
+
+ buffer(passthrough).then(common.mustCall(async (buf) => {
+ assert.strictEqual(buf.byteLength, 10);
+ assert.deepStrictEqual(buf.buffer, kArrayBuffer);
+ }));
+
+ passthrough.write('hello');
+ setTimeout(() => passthrough.end('there'), 10);
+}
+
+
+{
+ const passthrough = new PassThrough();
+
+ text(passthrough).then(common.mustCall(async (str) => {
+ assert.strictEqual(str.length, 10);
+ assert.deepStrictEqual(str, 'hellothere');
+ }));
+
+ passthrough.write('hello');
+ setTimeout(() => passthrough.end('there'), 10);
+}
+
+{
+ const passthrough = new PassThrough();
+
+ json(passthrough).then(common.mustCall(async (str) => {
+ assert.strictEqual(str.length, 10);
+ assert.deepStrictEqual(str, 'hellothere');
+ }));
+
+ passthrough.write('"hello');
+ setTimeout(() => passthrough.end('there"'), 10);
+}
+
+{
+ const { writable, readable } = new TransformStream();
+
+ blob(readable).then(common.mustCall(async (blob) => {
+ assert.strictEqual(blob.size, 10);
+ assert.deepStrictEqual(await blob.arrayBuffer(), kArrayBuffer);
+ }));
+
+ const writer = writable.getWriter();
+ writer.write('hello');
+ setTimeout(() => {
+ writer.write('there');
+ writer.close();
+ }, 10);
+
+ assert.rejects(blob(readable), { code: 'ERR_INVALID_STATE' });
+}
+
+{
+ const { writable, readable } = new TransformStream();
+
+ arrayBuffer(readable).then(common.mustCall(async (ab) => {
+ assert.strictEqual(ab.byteLength, 10);
+ assert.deepStrictEqual(ab, kArrayBuffer);
+ }));
+
+ const writer = writable.getWriter();
+ writer.write('hello');
+ setTimeout(() => {
+ writer.write('there');
+ writer.close();
+ }, 10);
+
+ assert.rejects(arrayBuffer(readable), { code: 'ERR_INVALID_STATE' });
+}
+
+{
+ const { writable, readable } = new TransformStream();
+
+ text(readable).then(common.mustCall(async (str) => {
+ assert.strictEqual(str.length, 10);
+ assert.deepStrictEqual(str, 'hellothere');
+ }));
+
+ const writer = writable.getWriter();
+ writer.write('hello');
+ setTimeout(() => {
+ writer.write('there');
+ writer.close();
+ }, 10);
+
+ assert.rejects(text(readable), { code: 'ERR_INVALID_STATE' });
+}
+
+{
+ const { writable, readable } = new TransformStream();
+
+ json(readable).then(common.mustCall(async (str) => {
+ assert.strictEqual(str.length, 10);
+ assert.deepStrictEqual(str, 'hellothere');
+ }));
+
+ const writer = writable.getWriter();
+ writer.write('"hello');
+ setTimeout(() => {
+ writer.write('there"');
+ writer.close();
+ }, 10);
+
+ assert.rejects(json(readable), { code: 'ERR_INVALID_STATE' });
+}
+
+{
+ const stream = new PassThrough({
+ readableObjectMode: true,
+ writableObjectMode: true,
+ });
+
+ blob(stream).then(common.mustCall((blob) => {
+ assert.strictEqual(blob.size, 30);
+ }));
+
+ stream.write({});
+ stream.end({});
+}
+
+{
+ const stream = new PassThrough({
+ readableObjectMode: true,
+ writableObjectMode: true,
+ });
+
+ arrayBuffer(stream).then(common.mustCall((ab) => {
+ assert.strictEqual(ab.byteLength, 30);
+ assert.strictEqual(
+ Buffer.from(ab).toString(),
+ '[object Object][object Object]');
+ }));
+
+ stream.write({});
+ stream.end({});
+}
+
+{
+ const stream = new PassThrough({
+ readableObjectMode: true,
+ writableObjectMode: true,
+ });
+
+ buffer(stream).then(common.mustCall((buf) => {
+ assert.strictEqual(buf.byteLength, 30);
+ assert.strictEqual(
+ buf.toString(),
+ '[object Object][object Object]');
+ }));
+
+ stream.write({});
+ stream.end({});
+}
+
+{
+ const stream = new PassThrough({
+ readableObjectMode: true,
+ writableObjectMode: true,
+ });
+
+ assert.rejects(text(stream), {
+ code: 'ERR_INVALID_ARG_TYPE',
+ });
+
+ stream.write({});
+ stream.end({});
+}
+
+{
+ const stream = new PassThrough({
+ readableObjectMode: true,
+ writableObjectMode: true,
+ });
+
+ assert.rejects(json(stream), {
+ code: 'ERR_INVALID_ARG_TYPE',
+ });
+
+ stream.write({});
+ stream.end({});
+}
From f5200f97855bcf95ccb478e74e3fa692261935f3 Mon Sep 17 00:00:00 2001
From: Rich Trott
Date: Wed, 4 Aug 2021 17:27:40 -0700
Subject: [PATCH 067/119] doc: update debugger.md description and examples
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
The console captures in debugger.md are out of date, showing the
wrapper:
function (exports, require, module, __filename, __dirname) {
That wrapper is no longer shown in debug sessions.
This also shortens and simplifies the description of the debugger in the
lede.
PR-URL: https://github.com/nodejs/node/pull/39661
Reviewed-By: Antoine du Hamel
Reviewed-By: Tobias Nießen
Reviewed-By: James M Snell
---
doc/api/debugger.md | 81 +++++++++++++++++++++++++--------------------
1 file changed, 46 insertions(+), 35 deletions(-)
diff --git a/doc/api/debugger.md b/doc/api/debugger.md
index 66a97f36ec5ee9..b155738b073165 100644
--- a/doc/api/debugger.md
+++ b/doc/api/debugger.md
@@ -6,20 +6,22 @@
-Node.js includes an out-of-process debugging utility accessible via a
-[V8 Inspector][] and built-in debugging client. To use it, start Node.js
-with the `inspect` argument followed by the path to the script to debug; a
-prompt will be displayed indicating successful launch of the debugger:
+Node.js includes a command-line debugging utility. To use it, start Node.js
+with the `inspect` argument followed by the path to the script to debug.
```console
$ node inspect myscript.js
-< Debugger listening on ws://127.0.0.1:9229/80e7a814-7cd3-49fb-921a-2e02228cd5ba
+< Debugger listening on ws://127.0.0.1:9229/621111f9-ffcb-4e82-b718-48a145fa5db8
< For help, see: https://nodejs.org/en/docs/inspector
+<
< Debugger attached.
-Break on start in myscript.js:1
-> 1 (function (exports, require, module, __filename, __dirname) { global.x = 5;
- 2 setTimeout(() => {
- 3 console.log('world');
+<
+ ok
+Break on start in myscript.js:2
+ 1 // myscript.js
+> 2 global.x = 5;
+ 3 setTimeout(() => {
+ 4 debugger;
debug>
```
@@ -44,28 +46,33 @@ Once the debugger is run, a breakpoint will occur at line 3:
```console
$ node inspect myscript.js
-< Debugger listening on ws://127.0.0.1:9229/80e7a814-7cd3-49fb-921a-2e02228cd5ba
+< Debugger listening on ws://127.0.0.1:9229/621111f9-ffcb-4e82-b718-48a145fa5db8
< For help, see: https://nodejs.org/en/docs/inspector
+<
< Debugger attached.
-Break on start in myscript.js:1
-> 1 (function (exports, require, module, __filename, __dirname) { global.x = 5;
- 2 setTimeout(() => {
- 3 debugger;
+<
+ ok
+Break on start in myscript.js:2
+ 1 // myscript.js
+> 2 global.x = 5;
+ 3 setTimeout(() => {
+ 4 debugger;
debug> cont
< hello
-break in myscript.js:3
- 1 (function (exports, require, module, __filename, __dirname) { global.x = 5;
- 2 setTimeout(() => {
-> 3 debugger;
- 4 console.log('world');
- 5 }, 1000);
-debug> next
+<
break in myscript.js:4
- 2 setTimeout(() => {
- 3 debugger;
-> 4 console.log('world');
- 5 }, 1000);
- 6 console.log('hello');
+ 2 global.x = 5;
+ 3 setTimeout(() => {
+> 4 debugger;
+ 5 console.log('world');
+ 6 }, 1000);
+debug> next
+break in myscript.js:5
+ 3 setTimeout(() => {
+ 4 debugger;
+> 5 console.log('world');
+ 6 }, 1000);
+ 7 console.log('hello');
debug> repl
Press Ctrl+C to leave debug repl
> x
@@ -74,13 +81,15 @@ Press Ctrl+C to leave debug repl
4
debug> next
< world
-break in myscript.js:5
- 3 debugger;
- 4 console.log('world');
-> 5 }, 1000);
- 6 console.log('hello');
- 7
+<
+break in myscript.js:6
+ 4 debugger;
+ 5 console.log('world');
+> 6 }, 1000);
+ 7 console.log('hello');
+ 8
debug> .exit
+$
```
The `repl` command allows code to be evaluated remotely. The `next` command
@@ -129,11 +138,14 @@ is not loaded yet:
```console
$ node inspect main.js
-< Debugger listening on ws://127.0.0.1:9229/4e3db158-9791-4274-8909-914f7facf3bd
+< Debugger listening on ws://127.0.0.1:9229/48a5b28a-550c-471b-b5e1-d13dd7165df9
< For help, see: https://nodejs.org/en/docs/inspector
+<
< Debugger attached.
+<
+ ok
Break on start in main.js:1
-> 1 (function (exports, require, module, __filename, __dirname) { const mod = require('./mod.js');
+> 1 const mod = require('./mod.js');
2 mod.hello();
3 mod.hello();
debug> setBreakpoint('mod.js', 22)
@@ -239,6 +251,5 @@ Chrome DevTools doesn't support debugging [worker threads][] yet.
[ndb][] can be used to debug them.
[Chrome DevTools Protocol]: https://chromedevtools.github.io/devtools-protocol/
-[V8 Inspector]: #debugger_v8_inspector_integration_for_node_js
[ndb]: https://github.com/GoogleChromeLabs/ndb/
[worker threads]: worker_threads.md
From 9f5acfa90e554eb367ddc3cb57597c666869839d Mon Sep 17 00:00:00 2001
From: bcoe
Date: Fri, 6 Aug 2021 14:45:58 -0700
Subject: [PATCH 068/119] test: increase memory for coverage action
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
PR-URL: https://github.com/nodejs/node/pull/39690
Reviewed-By: Richard Lau
Reviewed-By: Rich Trott
Reviewed-By: Michaël Zasso
Reviewed-By: James M Snell
---
.github/workflows/coverage-linux.yml | 2 ++
1 file changed, 2 insertions(+)
diff --git a/.github/workflows/coverage-linux.yml b/.github/workflows/coverage-linux.yml
index 9cc17aa892d285..c8b740db801169 100644
--- a/.github/workflows/coverage-linux.yml
+++ b/.github/workflows/coverage-linux.yml
@@ -46,6 +46,8 @@ jobs:
run: NODE_V8_COVERAGE=coverage/tmp make test-cov -j2 V=1 TEST_CI_ARGS="-p dots" || exit 0
- name: Report JS
run: npx c8 report --check-coverage
+ env:
+ NODE_OPTIONS: --max-old-space-size=8192
- name: Report C++
run: cd out && gcovr --gcov-exclude='.*\b(deps|usr|out|obj|cctest|embedding)\b' -v -r Release/obj.target --xml -o ../coverage/coverage-cxx.xml --root=$(cd ../ && pwd)
# Clean temporary output from gcov and c8, so that it's not uploaded:
From cf028df0ed7ceeee221e8b235bec3a158b7942dc Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Micha=C3=ABl=20Zasso?=
Date: Thu, 5 Aug 2021 16:30:32 +0200
Subject: [PATCH 069/119] build: fix V8 build with pointer compression
Refs: https://github.com/nodejs/TSC/issues/790#issuecomment-893457655
PR-URL: https://github.com/nodejs/node/pull/39664
Reviewed-By: Richard Lau
Reviewed-By: James M Snell
Reviewed-By: Colin Ihrig
Reviewed-By: Mary Marchini
---
common.gypi | 5 ++++-
tools/v8_gypfiles/features.gypi | 5 ++++-
2 files changed, 8 insertions(+), 2 deletions(-)
diff --git a/common.gypi b/common.gypi
index d4f1d425f83958..fcd1cebeb56007 100644
--- a/common.gypi
+++ b/common.gypi
@@ -356,7 +356,10 @@
],
}],
['v8_enable_pointer_compression == 1', {
- 'defines': ['V8_COMPRESS_POINTERS'],
+ 'defines': [
+ 'V8_COMPRESS_POINTERS',
+ 'V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE',
+ ],
}],
['v8_enable_pointer_compression == 1 or v8_enable_31bit_smis_on_64bit_arch == 1', {
'defines': ['V8_31BIT_SMIS_ON_64BIT_ARCH'],
diff --git a/tools/v8_gypfiles/features.gypi b/tools/v8_gypfiles/features.gypi
index 68da6ef3378904..6bb08bcebeca91 100644
--- a/tools/v8_gypfiles/features.gypi
+++ b/tools/v8_gypfiles/features.gypi
@@ -261,7 +261,10 @@
'defines': ['ENABLE_MINOR_MC',],
}],
['v8_enable_pointer_compression==1', {
- 'defines': ['V8_COMPRESS_POINTERS',],
+ 'defines': [
+ 'V8_COMPRESS_POINTERS',
+ 'V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE',
+ ],
}],
['v8_enable_pointer_compression==1 or v8_enable_31bit_smis_on_64bit_arch==1', {
'defines': ['V8_31BIT_SMIS_ON_64BIT_ARCH',],
From 08b31f12f8204f8687d25cc0722061d30683de05 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Tobias=20Nie=C3=9Fen?=
Date: Fri, 6 Aug 2021 02:00:58 +0200
Subject: [PATCH 070/119] doc: change "Version 4 UUID" to "version 4 UUID"
Refs: https://www.rfc-editor.org/rfc/rfc4122.txt
PR-URL: https://github.com/nodejs/node/pull/39682
Reviewed-By: Colin Ihrig
Reviewed-By: James M Snell
Reviewed-By: Beth Griggs
Reviewed-By: Darshan Sen
---
doc/api/crypto.md | 2 +-
doc/api/webcrypto.md | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/doc/api/crypto.md b/doc/api/crypto.md
index 0cc623dbfac291..8318b20ca3003f 100644
--- a/doc/api/crypto.md
+++ b/doc/api/crypto.md
@@ -4715,7 +4715,7 @@ added: v15.6.0
**Default:** `false`.
* Returns: {string}
-Generates a random [RFC 4122][] Version 4 UUID. The UUID is generated using a
+Generates a random [RFC 4122][] version 4 UUID. The UUID is generated using a
cryptographic pseudorandom number generator.
### `crypto.scrypt(password, salt, keylen[, options], callback)`
diff --git a/doc/api/webcrypto.md b/doc/api/webcrypto.md
index 799cfd56431e5e..a345655b77e703 100644
--- a/doc/api/webcrypto.md
+++ b/doc/api/webcrypto.md
@@ -368,7 +368,7 @@ added: REPLACEME
* Returns: {string}
-Generates a random [RFC 4122][] Version 4 UUID. The UUID is generated using a
+Generates a random [RFC 4122][] version 4 UUID. The UUID is generated using a
cryptographic pseudorandom number generator.
## Class: `CryptoKey`
From 34a041a846db1d2d343f5d897a560ddc4f70fe3e Mon Sep 17 00:00:00 2001
From: Richard Lau
Date: Fri, 6 Aug 2021 12:59:28 -0400
Subject: [PATCH 071/119] test: fix test-debugger-heap-profiler for workers
Fix `sequential/test-debugger-heap-profiler` so that it can be run
in a worker thread. `process.chdir()` is not allowed in worker threads
but passing a current working directory into a spawned child process
is allowed.
PR-URL: https://github.com/nodejs/node/pull/39687
Reviewed-By: Rich Trott
Reviewed-By: Colin Ihrig
Reviewed-By: James M Snell
---
test/sequential/test-debugger-heap-profiler.js | 11 ++++-------
1 file changed, 4 insertions(+), 7 deletions(-)
diff --git a/test/sequential/test-debugger-heap-profiler.js b/test/sequential/test-debugger-heap-profiler.js
index 0c8327a64f3c85..86eb9d9d0d232f 100644
--- a/test/sequential/test-debugger-heap-profiler.js
+++ b/test/sequential/test-debugger-heap-profiler.js
@@ -3,24 +3,21 @@ const common = require('../common');
common.skipIfInspectorDisabled();
-if (!common.isMainThread) {
- common.skip('process.chdir() is not available in workers');
-}
-
const fixtures = require('../common/fixtures');
const startCLI = require('../common/debugger');
const tmpdir = require('../common/tmpdir');
+const path = require('path');
tmpdir.refresh();
-process.chdir(tmpdir.path);
const { readFileSync } = require('fs');
-const filename = 'node.heapsnapshot';
+const filename = path.join(tmpdir.path, 'node.heapsnapshot');
// Heap profiler take snapshot.
{
- const cli = startCLI([fixtures.path('debugger/empty.js')]);
+ const opts = { cwd: tmpdir.path };
+ const cli = startCLI([fixtures.path('debugger/empty.js')], [], opts);
function onFatal(error) {
cli.quit();
From 61c53f39d2e5c2ee5e41b0b06a1592c8d65aef17 Mon Sep 17 00:00:00 2001
From: Rich Trott
Date: Fri, 6 Aug 2021 20:59:35 -0700
Subject: [PATCH 072/119] tools: update inspector_protocol to fe0467fd105a
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Refs: https://chromium.googlesource.com/deps/inspector_protocol/+log
PR-URL: https://github.com/nodejs/node/pull/39694
Reviewed-By: Michaël Zasso
Reviewed-By: Tobias Nießen
Reviewed-By: James M Snell
---
.../lib/encoding_cpp.template | 21 ++++++++++---------
1 file changed, 11 insertions(+), 10 deletions(-)
diff --git a/tools/inspector_protocol/lib/encoding_cpp.template b/tools/inspector_protocol/lib/encoding_cpp.template
index 2fc7dd623fdcc3..c5b6489652a9f2 100644
--- a/tools/inspector_protocol/lib/encoding_cpp.template
+++ b/tools/inspector_protocol/lib/encoding_cpp.template
@@ -850,14 +850,15 @@ void CBORTokenizer::ReadNextToken(bool enter_envelope) {
return;
case MajorType::NEGATIVE: { // INT32.
// INT32 is a signed int32 (int32 makes sense for the
- // inspector_protocol, it's not a CBOR limitation); in CBOR,
- // the negative values for INT32 are represented as NEGATIVE,
- // that is, -1 INT32 is represented as 1 << 5 | 0 (major type 1,
- // additional info value 0). So here, we compute the INT32 value
- // and then check it against the INT32 min.
- int64_t actual_value =
- -static_cast(token_start_internal_value_) - 1;
- if (!success || actual_value < std::numeric_limits::min()) {
+ // inspector_protocol, it's not a CBOR limitation); in CBOR, the
+ // negative values for INT32 are represented as NEGATIVE, that is, -1
+ // INT32 is represented as 1 << 5 | 0 (major type 1, additional info
+ // value 0). The minimal allowed INT32 value in our protocol is
+ // std::numeric_limits::min(). We check for it by directly
+ // checking the payload against the maximal allowed signed (!) int32
+ // value.
+ if (!success || token_start_internal_value_ >
+ std::numeric_limits::max()) {
SetError(Error::CBOR_INVALID_INT32);
return;
}
@@ -1864,7 +1865,7 @@ class JsonParser {
// If the |Char| we're dealing with is really a byte, then
// we have utf8 here, and we need to check for multibyte characters
// and transcode them to utf16 (either one or two utf16 chars).
- if (sizeof(Char) == sizeof(uint8_t) && c >= 0x7f) {
+ if (sizeof(Char) == sizeof(uint8_t) && c > 0x7f) {
// Inspect the leading byte to figure out how long the utf8
// byte sequence is; while doing this initialize |codepoint|
// with the first few bits.
@@ -1903,7 +1904,7 @@ class JsonParser {
// Disallow overlong encodings for ascii characters, as these
// would include " and other characters significant to JSON
// string termination / control.
- if (codepoint < 0x7f)
+ if (codepoint <= 0x7f)
return false;
// Invalid in UTF8, and can't be represented in UTF16 anyway.
if (codepoint > 0x10ffff)
From 6e19c166e40af48af56492d25358465c7f33d395 Mon Sep 17 00:00:00 2001
From: Rich Trott
Date: Fri, 6 Aug 2021 21:03:45 -0700
Subject: [PATCH 073/119] tools: update inspector_protocol to
a53e96d31a2755eb16ca37
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Refs: https://chromium.googlesource.com/deps/inspector_protocol/+log
PR-URL: https://github.com/nodejs/node/pull/39694
Reviewed-By: Michaël Zasso
Reviewed-By: Tobias Nießen
Reviewed-By: James M Snell
---
tools/inspector_protocol/encoding/encoding.cc | 10 ++++++----
tools/inspector_protocol/encoding/encoding_test.cc | 7 ++++++-
2 files changed, 12 insertions(+), 5 deletions(-)
diff --git a/tools/inspector_protocol/encoding/encoding.cc b/tools/inspector_protocol/encoding/encoding.cc
index 353316a555373d..7eb499f9712a25 100644
--- a/tools/inspector_protocol/encoding/encoding.cc
+++ b/tools/inspector_protocol/encoding/encoding.cc
@@ -846,10 +846,12 @@ void CBORTokenizer::ReadNextToken(bool enter_envelope) {
// inspector_protocol, it's not a CBOR limitation); in CBOR, the
// negative values for INT32 are represented as NEGATIVE, that is, -1
// INT32 is represented as 1 << 5 | 0 (major type 1, additional info
- // value 0). The minimal allowed INT32 value in our protocol is
- // std::numeric_limits::min(). We check for it by directly
- // checking the payload against the maximal allowed signed (!) int32
- // value.
+ // value 0).
+ // The represented allowed values range is -1 to -2^31.
+ // They are mapped into the encoded range of 0 to 2^31-1.
+ // We check the the payload in token_start_internal_value_ against
+ // that range (2^31-1 is also known as
+ // std::numeric_limits::max()).
if (!success || token_start_internal_value_ >
std::numeric_limits::max()) {
SetError(Error::CBOR_INVALID_INT32);
diff --git a/tools/inspector_protocol/encoding/encoding_test.cc b/tools/inspector_protocol/encoding/encoding_test.cc
index 338d1ece10b87f..067ede2748685a 100644
--- a/tools/inspector_protocol/encoding/encoding_test.cc
+++ b/tools/inspector_protocol/encoding/encoding_test.cc
@@ -235,7 +235,9 @@ TEST(EncodeDecodeInt32Test, RoundtripsInt32Max) {
}
TEST(EncodeDecodeInt32Test, RoundtripsInt32Min) {
- // std::numeric_limits is encoded as a uint32 after the initial byte.
+ // std::numeric_limits is encoded as a uint32 (4 unsigned bytes)
+ // after the initial byte, which effectively carries the sign by
+ // designating the token as NEGATIVE.
std::vector encoded;
EncodeInt32(std::numeric_limits::min(), &encoded);
// 1 for initial byte, 4 for the uint32.
@@ -248,6 +250,9 @@ TEST(EncodeDecodeInt32Test, RoundtripsInt32Min) {
CBORTokenizer tokenizer(SpanFrom(encoded));
EXPECT_EQ(CBORTokenTag::INT32, tokenizer.TokenTag());
EXPECT_EQ(std::numeric_limits::min(), tokenizer.GetInt32());
+ // It's nice to see how the min int32 value reads in hex:
+ // That is, -1 minus the unsigned payload (0x7fffffff, see above).
+ EXPECT_EQ(-0x80000000l, tokenizer.GetInt32());
tokenizer.Next();
EXPECT_EQ(CBORTokenTag::DONE, tokenizer.TokenTag());
}
From c6323d847d79c5e214603fec593deb33bf441011 Mon Sep 17 00:00:00 2001
From: Rich Trott
Date: Fri, 6 Aug 2021 21:25:59 -0700
Subject: [PATCH 074/119] Revert "tools: fix compiler warning in
inspector_protocol"
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
This reverts commit ffb34b6d5dbf002f182f08e45c32883d7174be8b.
PR-URL: https://github.com/nodejs/node/pull/39694
Refs: https://chromium.googlesource.com/deps/inspector_protocol/+log
Reviewed-By: Michaël Zasso
Reviewed-By: Tobias Nießen
Reviewed-By: James M Snell
---
tools/inspector_protocol/encoding/encoding.cc | 5 ++---
tools/inspector_protocol/lib/encoding_cpp.template | 5 ++---
2 files changed, 4 insertions(+), 6 deletions(-)
diff --git a/tools/inspector_protocol/encoding/encoding.cc b/tools/inspector_protocol/encoding/encoding.cc
index 7eb499f9712a25..9a869bbbee29b4 100644
--- a/tools/inspector_protocol/encoding/encoding.cc
+++ b/tools/inspector_protocol/encoding/encoding.cc
@@ -833,9 +833,8 @@ void CBORTokenizer::ReadNextToken(bool enter_envelope) {
// inspector_protocol, it's not a CBOR limitation), so we check
// against the signed max, so that the allowable values are
// 0, 1, 2, ... 2^31 - 1.
- if (!success ||
- static_cast(std::numeric_limits::max()) <
- static_cast(token_start_internal_value_)) {
+ if (!success || std::numeric_limits::max() <
+ token_start_internal_value_) {
SetError(Error::CBOR_INVALID_INT32);
return;
}
diff --git a/tools/inspector_protocol/lib/encoding_cpp.template b/tools/inspector_protocol/lib/encoding_cpp.template
index c5b6489652a9f2..d24e9286a12f79 100644
--- a/tools/inspector_protocol/lib/encoding_cpp.template
+++ b/tools/inspector_protocol/lib/encoding_cpp.template
@@ -840,9 +840,8 @@ void CBORTokenizer::ReadNextToken(bool enter_envelope) {
// inspector_protocol, it's not a CBOR limitation), so we check
// against the signed max, so that the allowable values are
// 0, 1, 2, ... 2^31 - 1.
- if (!success ||
- static_cast(std::numeric_limits::max()) <
- static_cast(token_start_internal_value_)) {
+ if (!success || std::numeric_limits::max() <
+ token_start_internal_value_) {
SetError(Error::CBOR_INVALID_INT32);
return;
}
From ee7142fa37719d316e45a32c513a19d211fcefc7 Mon Sep 17 00:00:00 2001
From: Rich Trott
Date: Fri, 6 Aug 2021 21:28:08 -0700
Subject: [PATCH 075/119] tools: update inspector_protocol to 97d3146
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
To apply the diff cleanly, ffb34b6 had to be reverted.
Refs: https://chromium.googlesource.com/deps/inspector_protocol/+log
PR-URL: https://github.com/nodejs/node/pull/39694
Reviewed-By: Michaël Zasso
Reviewed-By: Tobias Nießen
Reviewed-By: James M Snell
---
tools/inspector_protocol/encoding/encoding.cc | 46 +++++++--------
tools/inspector_protocol/encoding/encoding.h | 2 +-
.../lib/encoding_cpp.template | 56 +++++++++----------
.../lib/encoding_h.template | 2 +-
4 files changed, 50 insertions(+), 56 deletions(-)
diff --git a/tools/inspector_protocol/encoding/encoding.cc b/tools/inspector_protocol/encoding/encoding.cc
index 9a869bbbee29b4..1513767a85592b 100644
--- a/tools/inspector_protocol/encoding/encoding.cc
+++ b/tools/inspector_protocol/encoding/encoding.cc
@@ -185,11 +185,10 @@ namespace internals {
// |type| is the major type as specified in RFC 7049 Section 2.1.
// |value| is the payload (e.g. for MajorType::UNSIGNED) or is the size
// (e.g. for BYTE_STRING).
-// If successful, returns the number of bytes read. Otherwise returns -1.
-// TODO(johannes): change return type to size_t and use 0 for error.
-int8_t ReadTokenStart(span bytes, MajorType* type, uint64_t* value) {
+// If successful, returns the number of bytes read. Otherwise returns 0.
+size_t ReadTokenStart(span bytes, MajorType* type, uint64_t* value) {
if (bytes.empty())
- return -1;
+ return 0;
uint8_t initial_byte = bytes[0];
*type = MajorType((initial_byte & kMajorTypeMask) >> kMajorTypeBitShift);
@@ -203,32 +202,32 @@ int8_t ReadTokenStart(span bytes, MajorType* type, uint64_t* value) {
if (additional_information == kAdditionalInformation1Byte) {
// Values 24-255 are encoded with one initial byte, followed by the value.
if (bytes.size() < 2)
- return -1;
+ return 0;
*value = ReadBytesMostSignificantByteFirst(bytes.subspan(1));
return 2;
}
if (additional_information == kAdditionalInformation2Bytes) {
// Values 256-65535: 1 initial byte + 2 bytes payload.
if (bytes.size() < 1 + sizeof(uint16_t))
- return -1;
+ return 0;
*value = ReadBytesMostSignificantByteFirst(bytes.subspan(1));
return 3;
}
if (additional_information == kAdditionalInformation4Bytes) {
// 32 bit uint: 1 initial byte + 4 bytes payload.
if (bytes.size() < 1 + sizeof(uint32_t))
- return -1;
+ return 0;
*value = ReadBytesMostSignificantByteFirst(bytes.subspan(1));
return 5;
}
if (additional_information == kAdditionalInformation8Bytes) {
// 64 bit uint: 1 initial byte + 8 bytes payload.
if (bytes.size() < 1 + sizeof(uint64_t))
- return -1;
+ return 0;
*value = ReadBytesMostSignificantByteFirst(bytes.subspan(1));
return 9;
}
- return -1;
+ return 0;
}
// Writes the start of a token with |type|. The |value| may indicate the size,
@@ -770,10 +769,10 @@ void CBORTokenizer::ReadNextToken(bool enter_envelope) {
SetToken(CBORTokenTag::NULL_VALUE, 1);
return;
case kExpectedConversionToBase64Tag: { // BINARY
- const int8_t bytes_read = internals::ReadTokenStart(
+ const size_t bytes_read = internals::ReadTokenStart(
bytes_.subspan(status_.pos + 1), &token_start_type_,
&token_start_internal_value_);
- if (bytes_read < 0 || token_start_type_ != MajorType::BYTE_STRING ||
+ if (!bytes_read || token_start_type_ != MajorType::BYTE_STRING ||
token_start_internal_value_ > kMaxValidLength) {
SetError(Error::CBOR_INVALID_BINARY);
return;
@@ -823,22 +822,21 @@ void CBORTokenizer::ReadNextToken(bool enter_envelope) {
return;
}
default: {
- const int8_t token_start_length = internals::ReadTokenStart(
+ const size_t bytes_read = internals::ReadTokenStart(
bytes_.subspan(status_.pos), &token_start_type_,
&token_start_internal_value_);
- const bool success = token_start_length >= 0;
switch (token_start_type_) {
case MajorType::UNSIGNED: // INT32.
// INT32 is a signed int32 (int32 makes sense for the
// inspector_protocol, it's not a CBOR limitation), so we check
// against the signed max, so that the allowable values are
// 0, 1, 2, ... 2^31 - 1.
- if (!success || std::numeric_limits::max() <
- token_start_internal_value_) {
+ if (!bytes_read || std::numeric_limits::max() <
+ token_start_internal_value_) {
SetError(Error::CBOR_INVALID_INT32);
return;
}
- SetToken(CBORTokenTag::INT32, token_start_length);
+ SetToken(CBORTokenTag::INT32, bytes_read);
return;
case MajorType::NEGATIVE: { // INT32.
// INT32 is a signed int32 (int32 makes sense for the
@@ -851,21 +849,20 @@ void CBORTokenizer::ReadNextToken(bool enter_envelope) {
// We check the the payload in token_start_internal_value_ against
// that range (2^31-1 is also known as
// std::numeric_limits::max()).
- if (!success || token_start_internal_value_ >
- std::numeric_limits::max()) {
+ if (!bytes_read || token_start_internal_value_ >
+ std::numeric_limits::max()) {
SetError(Error::CBOR_INVALID_INT32);
return;
}
- SetToken(CBORTokenTag::INT32, token_start_length);
+ SetToken(CBORTokenTag::INT32, bytes_read);
return;
}
case MajorType::STRING: { // STRING8.
- if (!success || token_start_internal_value_ > kMaxValidLength) {
+ if (!bytes_read || token_start_internal_value_ > kMaxValidLength) {
SetError(Error::CBOR_INVALID_STRING8);
return;
}
- uint64_t token_byte_length =
- token_start_internal_value_ + token_start_length;
+ uint64_t token_byte_length = token_start_internal_value_ + bytes_read;
if (token_byte_length > remaining_bytes) {
SetError(Error::CBOR_INVALID_STRING8);
return;
@@ -877,13 +874,12 @@ void CBORTokenizer::ReadNextToken(bool enter_envelope) {
case MajorType::BYTE_STRING: { // STRING16.
// Length must be divisible by 2 since UTF16 is 2 bytes per
// character, hence the &1 check.
- if (!success || token_start_internal_value_ > kMaxValidLength ||
+ if (!bytes_read || token_start_internal_value_ > kMaxValidLength ||
token_start_internal_value_ & 1) {
SetError(Error::CBOR_INVALID_STRING16);
return;
}
- uint64_t token_byte_length =
- token_start_internal_value_ + token_start_length;
+ uint64_t token_byte_length = token_start_internal_value_ + bytes_read;
if (token_byte_length > remaining_bytes) {
SetError(Error::CBOR_INVALID_STRING16);
return;
diff --git a/tools/inspector_protocol/encoding/encoding.h b/tools/inspector_protocol/encoding/encoding.h
index 90916d42b36dae..08596e9e1e43f0 100644
--- a/tools/inspector_protocol/encoding/encoding.h
+++ b/tools/inspector_protocol/encoding/encoding.h
@@ -427,7 +427,7 @@ Status AppendString8EntryToCBORMap(span string8_key,
std::string* cbor);
namespace internals { // Exposed only for writing tests.
-int8_t ReadTokenStart(span bytes,
+size_t ReadTokenStart(span bytes,
cbor::MajorType* type,
uint64_t* value);
diff --git a/tools/inspector_protocol/lib/encoding_cpp.template b/tools/inspector_protocol/lib/encoding_cpp.template
index d24e9286a12f79..70bf9091a7dd6a 100644
--- a/tools/inspector_protocol/lib/encoding_cpp.template
+++ b/tools/inspector_protocol/lib/encoding_cpp.template
@@ -192,11 +192,10 @@ namespace internals {
// |type| is the major type as specified in RFC 7049 Section 2.1.
// |value| is the payload (e.g. for MajorType::UNSIGNED) or is the size
// (e.g. for BYTE_STRING).
-// If successful, returns the number of bytes read. Otherwise returns -1.
-// TODO(johannes): change return type to size_t and use 0 for error.
-int8_t ReadTokenStart(span bytes, MajorType* type, uint64_t* value) {
+// If successful, returns the number of bytes read. Otherwise returns 0.
+size_t ReadTokenStart(span bytes, MajorType* type, uint64_t* value) {
if (bytes.empty())
- return -1;
+ return 0;
uint8_t initial_byte = bytes[0];
*type = MajorType((initial_byte & kMajorTypeMask) >> kMajorTypeBitShift);
@@ -210,32 +209,32 @@ int8_t ReadTokenStart(span bytes, MajorType* type, uint64_t* value) {
if (additional_information == kAdditionalInformation1Byte) {
// Values 24-255 are encoded with one initial byte, followed by the value.
if (bytes.size() < 2)
- return -1;
+ return 0;
*value = ReadBytesMostSignificantByteFirst(bytes.subspan(1));
return 2;
}
if (additional_information == kAdditionalInformation2Bytes) {
// Values 256-65535: 1 initial byte + 2 bytes payload.
if (bytes.size() < 1 + sizeof(uint16_t))
- return -1;
+ return 0;
*value = ReadBytesMostSignificantByteFirst(bytes.subspan(1));
return 3;
}
if (additional_information == kAdditionalInformation4Bytes) {
// 32 bit uint: 1 initial byte + 4 bytes payload.
if (bytes.size() < 1 + sizeof(uint32_t))
- return -1;
+ return 0;
*value = ReadBytesMostSignificantByteFirst(bytes.subspan(1));
return 5;
}
if (additional_information == kAdditionalInformation8Bytes) {
// 64 bit uint: 1 initial byte + 8 bytes payload.
if (bytes.size() < 1 + sizeof(uint64_t))
- return -1;
+ return 0;
*value = ReadBytesMostSignificantByteFirst(bytes.subspan(1));
return 9;
}
- return -1;
+ return 0;
}
// Writes the start of a token with |type|. The |value| may indicate the size,
@@ -777,10 +776,10 @@ void CBORTokenizer::ReadNextToken(bool enter_envelope) {
SetToken(CBORTokenTag::NULL_VALUE, 1);
return;
case kExpectedConversionToBase64Tag: { // BINARY
- const int8_t bytes_read = internals::ReadTokenStart(
+ const size_t bytes_read = internals::ReadTokenStart(
bytes_.subspan(status_.pos + 1), &token_start_type_,
&token_start_internal_value_);
- if (bytes_read < 0 || token_start_type_ != MajorType::BYTE_STRING ||
+ if (!bytes_read || token_start_type_ != MajorType::BYTE_STRING ||
token_start_internal_value_ > kMaxValidLength) {
SetError(Error::CBOR_INVALID_BINARY);
return;
@@ -830,47 +829,47 @@ void CBORTokenizer::ReadNextToken(bool enter_envelope) {
return;
}
default: {
- const int8_t token_start_length = internals::ReadTokenStart(
+ const size_t bytes_read = internals::ReadTokenStart(
bytes_.subspan(status_.pos), &token_start_type_,
&token_start_internal_value_);
- const bool success = token_start_length >= 0;
switch (token_start_type_) {
case MajorType::UNSIGNED: // INT32.
// INT32 is a signed int32 (int32 makes sense for the
// inspector_protocol, it's not a CBOR limitation), so we check
// against the signed max, so that the allowable values are
// 0, 1, 2, ... 2^31 - 1.
- if (!success || std::numeric_limits::max() <
- token_start_internal_value_) {
+ if (!bytes_read || std::numeric_limits::max() <
+ token_start_internal_value_) {
SetError(Error::CBOR_INVALID_INT32);
return;
}
- SetToken(CBORTokenTag::INT32, token_start_length);
+ SetToken(CBORTokenTag::INT32, bytes_read);
return;
case MajorType::NEGATIVE: { // INT32.
// INT32 is a signed int32 (int32 makes sense for the
// inspector_protocol, it's not a CBOR limitation); in CBOR, the
// negative values for INT32 are represented as NEGATIVE, that is, -1
// INT32 is represented as 1 << 5 | 0 (major type 1, additional info
- // value 0). The minimal allowed INT32 value in our protocol is
- // std::numeric_limits::min(). We check for it by directly
- // checking the payload against the maximal allowed signed (!) int32
- // value.
- if (!success || token_start_internal_value_ >
- std::numeric_limits::max()) {
+ // value 0).
+ // The represented allowed values range is -1 to -2^31.
+ // They are mapped into the encoded range of 0 to 2^31-1.
+ // We check the the payload in token_start_internal_value_ against
+ // that range (2^31-1 is also known as
+ // std::numeric_limits::max()).
+ if (!bytes_read || token_start_internal_value_ >
+ std::numeric_limits::max()) {
SetError(Error::CBOR_INVALID_INT32);
return;
}
- SetToken(CBORTokenTag::INT32, token_start_length);
+ SetToken(CBORTokenTag::INT32, bytes_read);
return;
}
case MajorType::STRING: { // STRING8.
- if (!success || token_start_internal_value_ > kMaxValidLength) {
+ if (!bytes_read || token_start_internal_value_ > kMaxValidLength) {
SetError(Error::CBOR_INVALID_STRING8);
return;
}
- uint64_t token_byte_length =
- token_start_internal_value_ + token_start_length;
+ uint64_t token_byte_length = token_start_internal_value_ + bytes_read;
if (token_byte_length > remaining_bytes) {
SetError(Error::CBOR_INVALID_STRING8);
return;
@@ -882,13 +881,12 @@ void CBORTokenizer::ReadNextToken(bool enter_envelope) {
case MajorType::BYTE_STRING: { // STRING16.
// Length must be divisible by 2 since UTF16 is 2 bytes per
// character, hence the &1 check.
- if (!success || token_start_internal_value_ > kMaxValidLength ||
+ if (!bytes_read || token_start_internal_value_ > kMaxValidLength ||
token_start_internal_value_ & 1) {
SetError(Error::CBOR_INVALID_STRING16);
return;
}
- uint64_t token_byte_length =
- token_start_internal_value_ + token_start_length;
+ uint64_t token_byte_length = token_start_internal_value_ + bytes_read;
if (token_byte_length > remaining_bytes) {
SetError(Error::CBOR_INVALID_STRING16);
return;
diff --git a/tools/inspector_protocol/lib/encoding_h.template b/tools/inspector_protocol/lib/encoding_h.template
index f1a52a1958a14d..406c4b87ff8aa5 100644
--- a/tools/inspector_protocol/lib/encoding_h.template
+++ b/tools/inspector_protocol/lib/encoding_h.template
@@ -435,7 +435,7 @@ Status AppendString8EntryToCBORMap(span string8_key,
std::string* cbor);
namespace internals { // Exposed only for writing tests.
-int8_t ReadTokenStart(span bytes,
+size_t ReadTokenStart(span bytes,
cbor::MajorType* type,
uint64_t* value);
From 8d509d8773f4f23df42087005a64e7c13b921f1f Mon Sep 17 00:00:00 2001
From: Rich Trott
Date: Fri, 6 Aug 2021 21:36:34 -0700
Subject: [PATCH 076/119] tools: update inspector_protocol to 39ca567
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Refs: https://chromium.googlesource.com/deps/inspector_protocol/+log
PR-URL: https://github.com/nodejs/node/pull/39694
Reviewed-By: Michaël Zasso
Reviewed-By: Tobias Nießen
Reviewed-By: James M Snell
---
tools/inspector_protocol/encoding/encoding_test.cc | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/tools/inspector_protocol/encoding/encoding_test.cc b/tools/inspector_protocol/encoding/encoding_test.cc
index 067ede2748685a..6893fe2581683c 100644
--- a/tools/inspector_protocol/encoding/encoding_test.cc
+++ b/tools/inspector_protocol/encoding/encoding_test.cc
@@ -252,7 +252,8 @@ TEST(EncodeDecodeInt32Test, RoundtripsInt32Min) {
EXPECT_EQ(std::numeric_limits::min(), tokenizer.GetInt32());
// It's nice to see how the min int32 value reads in hex:
// That is, -1 minus the unsigned payload (0x7fffffff, see above).
- EXPECT_EQ(-0x80000000l, tokenizer.GetInt32());
+ int32_t expected = -1 - 0x7fffffff;
+ EXPECT_EQ(expected, tokenizer.GetInt32());
tokenizer.Next();
EXPECT_EQ(CBORTokenTag::DONE, tokenizer.TokenTag());
}
From b9510d21c9f3975f349ca22974f6cd46983e4f30 Mon Sep 17 00:00:00 2001
From: Rich Trott
Date: Fri, 6 Aug 2021 22:05:41 -0700
Subject: [PATCH 077/119] tools: update inspector_protocol to e8ba1a7
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Refs: https://chromium.googlesource.com/deps/inspector_protocol/+log
PR-URL: https://github.com/nodejs/node/pull/39694
Reviewed-By: Michaël Zasso
Reviewed-By: Tobias Nießen
Reviewed-By: James M Snell
---
tools/inspector_protocol/code_generator.py | 11 ++++++++
.../lib/Values_cpp.template | 28 +++++++++++++++++++
.../lib/encoding_cpp.template | 3 ++
.../lib/encoding_h.template | 2 ++
4 files changed, 44 insertions(+)
diff --git a/tools/inspector_protocol/code_generator.py b/tools/inspector_protocol/code_generator.py
index c1f78dc7492d78..0b8baea0ae710e 100755
--- a/tools/inspector_protocol/code_generator.py
+++ b/tools/inspector_protocol/code_generator.py
@@ -103,6 +103,17 @@ def init_defaults(config_tuple, path, defaults):
".lib": False,
".lib.export_macro": "",
".lib.export_header": False,
+ # The encoding lib consists of encoding/encoding.h and
+ # encoding/encoding.cc in its subdirectory, which binaries
+ # may link / depend on, instead of relying on the
+ # JINJA2 templates lib/encoding_{h,cc}.template.
+ # In that case, |header| identifies the include file
+ # and |namespace| is the namespace it's using. Usually
+ # inspector_protocol_encoding but for v8's copy it's
+ # v8_inspector_protocol_encoding.
+ # TODO(johannes): Migrate away from lib/encoding_{h,cc}.template
+ # in favor of this.
+ ".encoding_lib": { "header": "", "namespace": []},
}
for key_value in config_values:
parts = key_value.split("=")
diff --git a/tools/inspector_protocol/lib/Values_cpp.template b/tools/inspector_protocol/lib/Values_cpp.template
index 17c69255851ee7..8b4dfc91e3b9c9 100644
--- a/tools/inspector_protocol/lib/Values_cpp.template
+++ b/tools/inspector_protocol/lib/Values_cpp.template
@@ -6,6 +6,10 @@
//#include "Values.h"
+{% if config.encoding_lib.header %}
+#include "{{config.encoding_lib.header}}"
+{% endif %}
+
{% for namespace in config.protocol.namespace %}
namespace {{namespace}} {
{% endfor %}
@@ -64,6 +68,30 @@ void escapeStringForJSONInternal(const Char* str, unsigned len,
// to this constant.
static constexpr int kStackLimitValues = 1000;
+{% if config.encoding_lib.namespace %}
+using {{"::".join(config.encoding_lib.namespace)}}::Error;
+using {{"::".join(config.encoding_lib.namespace)}}::Status;
+using {{"::".join(config.encoding_lib.namespace)}}::span;
+namespace cbor {
+using {{"::".join(config.encoding_lib.namespace + ['cbor'])}}::CBORTokenTag;
+using {{"::".join(config.encoding_lib.namespace + ['cbor'])}}::CBORTokenizer;
+using {{"::".join(config.encoding_lib.namespace + ['cbor'])}}::EncodeBinary;
+using {{"::".join(config.encoding_lib.namespace + ['cbor'])}}::EncodeDouble;
+using {{"::".join(config.encoding_lib.namespace + ['cbor'])}}::EncodeFalse;
+using {{"::".join(config.encoding_lib.namespace + ['cbor'])}}::EncodeFromLatin1;
+using {{"::".join(config.encoding_lib.namespace + ['cbor'])}}::EncodeFromUTF16;
+using {{"::".join(config.encoding_lib.namespace + ['cbor'])}}::EncodeIndefiniteLengthArrayStart;
+using {{"::".join(config.encoding_lib.namespace + ['cbor'])}}::EncodeIndefiniteLengthMapStart;
+using {{"::".join(config.encoding_lib.namespace + ['cbor'])}}::EncodeInt32;
+using {{"::".join(config.encoding_lib.namespace + ['cbor'])}}::EncodeNull;
+using {{"::".join(config.encoding_lib.namespace + ['cbor'])}}::EncodeStop;
+using {{"::".join(config.encoding_lib.namespace + ['cbor'])}}::EncodeString8;
+using {{"::".join(config.encoding_lib.namespace + ['cbor'])}}::EncodeTrue;
+using {{"::".join(config.encoding_lib.namespace + ['cbor'])}}::EnvelopeEncoder;
+using {{"::".join(config.encoding_lib.namespace + ['cbor'])}}::InitialByteForEnvelope;
+} // namespace cbor
+{% endif %}
+
// Below are three parsing routines for CBOR, which cover enough
// to roundtrip JSON messages.
std::unique_ptr parseMap(int32_t stack_depth, cbor::CBORTokenizer* tokenizer);
diff --git a/tools/inspector_protocol/lib/encoding_cpp.template b/tools/inspector_protocol/lib/encoding_cpp.template
index 70bf9091a7dd6a..e950acd6a6f34d 100644
--- a/tools/inspector_protocol/lib/encoding_cpp.template
+++ b/tools/inspector_protocol/lib/encoding_cpp.template
@@ -5,6 +5,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
+{% if config.encoding_lib.header == "" %}
#include
#include
@@ -2196,3 +2197,5 @@ Status ConvertJSONToCBOR(const Platform& platform,
{% for namespace in config.protocol.namespace %}
} // namespace {{namespace}}
{% endfor %}
+
+{% endif %}
diff --git a/tools/inspector_protocol/lib/encoding_h.template b/tools/inspector_protocol/lib/encoding_h.template
index 406c4b87ff8aa5..2c6cfc10d594c2 100644
--- a/tools/inspector_protocol/lib/encoding_h.template
+++ b/tools/inspector_protocol/lib/encoding_h.template
@@ -5,6 +5,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
+{% if config.encoding_lib.header == "" %}
#ifndef {{"_".join(config.protocol.namespace)}}_encoding_h
#define {{"_".join(config.protocol.namespace)}}_encoding_h
@@ -518,3 +519,4 @@ Status ConvertJSONToCBOR(const Platform& platform,
} // namespace {{namespace}}
{% endfor %}
#endif // !defined({{"_".join(config.protocol.namespace)}}_encoding_h)
+{% endif %}
From aaab2095db3ab65b7f61a859eee8f0c8e22c0f5b Mon Sep 17 00:00:00 2001
From: Rich Trott
Date: Tue, 3 Aug 2021 06:49:12 -0700
Subject: [PATCH 078/119] meta: consolidate email addresses for szmarczak
PR-URL: https://github.com/nodejs/node/pull/39651
Reviewed-By: James M Snell
---
.mailmap | 1 +
AUTHORS | 1 -
2 files changed, 1 insertion(+), 1 deletion(-)
diff --git a/.mailmap b/.mailmap
index f1c1a287ad2ffb..8590b47bb6b940 100644
--- a/.mailmap
+++ b/.mailmap
@@ -386,6 +386,7 @@ Stewart X Addison
Suraiya Hameed
Suramya shah
Surya Panikkal
+Szymon Marczak <36894700+szmarczak@users.noreply.github.com>
Tadashi SAWADA
Tadhg Creedon
Taehee Kang
diff --git a/AUTHORS b/AUTHORS
index 78d946454bf5a9..58545169748397 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -3165,7 +3165,6 @@ krank2me
masx200 <34191203+masx200@users.noreply.github.com>
Baruch Odem (Rothkoff)
Mattias Runge-Broberg
-Szymon Marczak <36894700+szmarczak@users.noreply.github.com>
Dmitry Semigradsky
Ole André Vadla Ravnås
Aleksandr Krutko
From a76b63536ac87e54a603e99740101c895803a09d Mon Sep 17 00:00:00 2001
From: Rich Trott
Date: Tue, 3 Aug 2021 06:57:51 -0700
Subject: [PATCH 079/119] meta: consolidate email addresses for tadjik1
PR-URL: https://github.com/nodejs/node/pull/39651
Reviewed-By: James M Snell
---
.mailmap | 1 +
AUTHORS | 1 -
2 files changed, 1 insertion(+), 1 deletion(-)
diff --git a/.mailmap b/.mailmap
index 8590b47bb6b940..82c0db7c908156 100644
--- a/.mailmap
+++ b/.mailmap
@@ -360,6 +360,7 @@ Saúl Ibarra Corretgé
Scott Blomquist
Segu Riluvan
Sergey Kryzhanovsky
+Sergey Zelenov
Shannen Saez
Shaopeng Zhang
Shelley Vohr
diff --git a/AUTHORS b/AUTHORS
index 58545169748397..2c85305c4d25f4 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -3008,7 +3008,6 @@ Andrew Neitsch
RamanandPatil
forfun414
David Gilbertson
-Sergey Zelenov
Eric Bickle