perf_hooks: web performance timeline compliance
All API introduced in this PR are compliant with web [performance-timeline](https://w3c.github.io/performance-timeline) spec. "performance-timeline" is listed as supported web spec in the doc https://nodejs.org/docs/latest/api/perf_hooks.html#perf_hooks_performance_measurement_apis. Changes summary: 1. Add new supported wpt test subsets: user-timing and performance-timeline. 2. Add support for `Performance.getEntries`, `Performance.getEntriesByName` and `Performance.getEntriesByType` to synchronously fetch buffered performance entries. This means the user should invoke `Performance.clearMarks` and `Performance.clearMeasures` to clear buffered entries to prevent from those entries been kept alive forever. 3. Add support (again after https://github.com/nodejs/node/pull/37136) for `buffered` flags for `PerformanceObserver`. 3. Fixes `PerformanceMark` and `PerformanceMeasure` wpt compliance issues. 4. Only user-created performance entries will be buffered globally. This behavior should be compliant with https://w3c.github.io/timing-entrytypes-registry/#registry. With the new ability to fetch user-created performance entries synchronously, the issues raised in https://github.com/nodejs/diagnostics/issues/464#issuecomment-861920116 could also be fixed. PR-URL: https://github.com/nodejs/node/pull/39297 Reviewed-By: James M Snell <jasnell@gmail.com>
This commit is contained in:
parent
5c4e673a96
commit
062f8e3730
@ -8,24 +8,27 @@ const {
|
||||
} = require('perf_hooks');
|
||||
|
||||
const bench = common.createBenchmark(main, {
|
||||
n: [1e5]
|
||||
n: [1e5],
|
||||
observe: ['all', 'measure'],
|
||||
});
|
||||
|
||||
function test() {
|
||||
performance.mark('a');
|
||||
setImmediate(() => {
|
||||
performance.mark('b');
|
||||
performance.measure('a to b', 'a', 'b');
|
||||
});
|
||||
performance.mark('b');
|
||||
performance.measure('a to b', 'a', 'b');
|
||||
}
|
||||
|
||||
function main({ n }) {
|
||||
function main({ n, observe }) {
|
||||
const entryTypes = observe === 'all' ?
|
||||
[ 'mark', 'measure' ] :
|
||||
[ observe ];
|
||||
const obs = new PerformanceObserver(() => {
|
||||
bench.end(n);
|
||||
});
|
||||
obs.observe({ entryTypes: ['measure'], buffered: true });
|
||||
obs.observe({ entryTypes, buffered: true });
|
||||
|
||||
bench.start();
|
||||
for (let i = 0; i < n; i++)
|
||||
performance.mark('start');
|
||||
for (let i = 0; i < 1e5; i++)
|
||||
test();
|
||||
}
|
||||
|
@ -1266,8 +1266,6 @@ E('ERR_INVALID_PACKAGE_TARGET',
|
||||
pkgPath}package.json${base ? ` imported from ${base}` : ''}${relError ?
|
||||
'; targets must start with "./"' : ''}`;
|
||||
}, Error);
|
||||
E('ERR_INVALID_PERFORMANCE_MARK',
|
||||
'The "%s" performance mark has not been set', Error);
|
||||
E('ERR_INVALID_PROTOCOL',
|
||||
'Protocol "%s" not supported. Expected "%s"',
|
||||
TypeError);
|
||||
|
@ -4,10 +4,13 @@ const {
|
||||
ArrayFrom,
|
||||
ArrayIsArray,
|
||||
ArrayPrototypeFilter,
|
||||
ArrayPrototypeFlatMap,
|
||||
ArrayPrototypeIncludes,
|
||||
ArrayPrototypePush,
|
||||
ArrayPrototypePushApply,
|
||||
ArrayPrototypeSlice,
|
||||
ArrayPrototypeSort,
|
||||
Error,
|
||||
ObjectDefineProperties,
|
||||
ObjectFreeze,
|
||||
ObjectKeys,
|
||||
@ -31,6 +34,7 @@ const {
|
||||
const {
|
||||
InternalPerformanceEntry,
|
||||
isPerformanceEntry,
|
||||
kBufferNext,
|
||||
} = require('internal/perf/performance_entry');
|
||||
|
||||
const {
|
||||
@ -83,6 +87,16 @@ const kSupportedEntryTypes = ObjectFreeze([
|
||||
'measure',
|
||||
]);
|
||||
|
||||
// Performance timeline entry Buffers
|
||||
const markEntryBuffer = createBuffer();
|
||||
const measureEntryBuffer = createBuffer();
|
||||
const kMaxPerformanceEntryBuffers = 1e6;
|
||||
const kClearPerformanceEntryBuffers = ObjectFreeze({
|
||||
'mark': 'performance.clearMarks',
|
||||
'measure': 'performance.clearMeasures',
|
||||
});
|
||||
const kWarnedEntryTypes = new SafeMap();
|
||||
|
||||
const kObservers = new SafeSet();
|
||||
const kPending = new SafeSet();
|
||||
let isPending = false;
|
||||
@ -190,6 +204,7 @@ class PerformanceObserver {
|
||||
const {
|
||||
entryTypes,
|
||||
type,
|
||||
buffered,
|
||||
} = { ...options };
|
||||
if (entryTypes === undefined && type === undefined)
|
||||
throw new ERR_MISSING_ARGS('options.entryTypes', 'options.type');
|
||||
@ -229,6 +244,13 @@ class PerformanceObserver {
|
||||
return;
|
||||
this[kEntryTypes].add(type);
|
||||
maybeIncrementObserverCount(type);
|
||||
if (buffered) {
|
||||
const entries = filterBufferMapByNameAndType(undefined, type);
|
||||
ArrayPrototypePushApply(this[kBuffer], entries);
|
||||
kPending.add(this);
|
||||
if (kPending.size)
|
||||
queuePending();
|
||||
}
|
||||
}
|
||||
|
||||
if (this[kEntryTypes].size)
|
||||
@ -291,6 +313,99 @@ function enqueue(entry) {
|
||||
for (const obs of kObservers) {
|
||||
obs[kMaybeBuffer](entry);
|
||||
}
|
||||
|
||||
const entryType = entry.entryType;
|
||||
let buffer;
|
||||
if (entryType === 'mark') {
|
||||
buffer = markEntryBuffer;
|
||||
} else if (entryType === 'measure') {
|
||||
buffer = measureEntryBuffer;
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
|
||||
const count = buffer.count + 1;
|
||||
buffer.count = count;
|
||||
if (count === 1) {
|
||||
buffer.head = entry;
|
||||
buffer.tail = entry;
|
||||
return;
|
||||
}
|
||||
buffer.tail[kBufferNext] = entry;
|
||||
buffer.tail = entry;
|
||||
|
||||
if (count > kMaxPerformanceEntryBuffers &&
|
||||
!kWarnedEntryTypes.has(entryType)) {
|
||||
kWarnedEntryTypes.set(entryType, true);
|
||||
// No error code for this since it is a Warning
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
const w = new Error('Possible perf_hooks memory leak detected. ' +
|
||||
`${count} ${entryType} entries added to the global ` +
|
||||
'performance entry buffer. Use ' +
|
||||
`${kClearPerformanceEntryBuffers[entryType]} to ` +
|
||||
'clear the buffer.');
|
||||
w.name = 'MaxPerformanceEntryBufferExceededWarning';
|
||||
w.entryType = entryType;
|
||||
w.count = count;
|
||||
process.emitWarning(w);
|
||||
}
|
||||
}
|
||||
|
||||
function clearEntriesFromBuffer(type, name) {
|
||||
let buffer;
|
||||
if (type === 'mark') {
|
||||
buffer = markEntryBuffer;
|
||||
} else if (type === 'measure') {
|
||||
buffer = measureEntryBuffer;
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
if (name === undefined) {
|
||||
resetBuffer(buffer);
|
||||
return;
|
||||
}
|
||||
|
||||
let head = null;
|
||||
let tail = null;
|
||||
for (let entry = buffer.head; entry !== null; entry = entry[kBufferNext]) {
|
||||
if (entry.name !== name) {
|
||||
head = head ?? entry;
|
||||
tail = entry;
|
||||
continue;
|
||||
}
|
||||
if (tail === null) {
|
||||
continue;
|
||||
}
|
||||
tail[kBufferNext] = entry[kBufferNext];
|
||||
}
|
||||
buffer.head = head;
|
||||
buffer.tail = tail;
|
||||
}
|
||||
|
||||
function filterBufferMapByNameAndType(name, type) {
|
||||
let bufferList;
|
||||
if (type === 'mark') {
|
||||
bufferList = [markEntryBuffer];
|
||||
} else if (type === 'measure') {
|
||||
bufferList = [measureEntryBuffer];
|
||||
} else if (type !== undefined) {
|
||||
// Unrecognized type;
|
||||
return [];
|
||||
} else {
|
||||
bufferList = [markEntryBuffer, measureEntryBuffer];
|
||||
}
|
||||
return ArrayPrototypeFlatMap(bufferList,
|
||||
(buffer) => filterBufferByName(buffer, name));
|
||||
}
|
||||
|
||||
function filterBufferByName(buffer, name) {
|
||||
const arr = [];
|
||||
for (let entry = buffer.head; entry !== null; entry = entry[kBufferNext]) {
|
||||
if (name === undefined || entry.name === name) {
|
||||
ArrayPrototypePush(arr, entry);
|
||||
}
|
||||
}
|
||||
return arr;
|
||||
}
|
||||
|
||||
function observerCallback(name, type, startTime, duration, details) {
|
||||
@ -338,8 +453,24 @@ function hasObserver(type) {
|
||||
return observerCounts[observerType] > 0;
|
||||
}
|
||||
|
||||
function createBuffer() {
|
||||
return {
|
||||
head: null,
|
||||
tail: null,
|
||||
count: 0,
|
||||
};
|
||||
}
|
||||
|
||||
function resetBuffer(buffer) {
|
||||
buffer.head = null;
|
||||
buffer.tail = null;
|
||||
buffer.count = 0;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
PerformanceObserver,
|
||||
enqueue,
|
||||
hasObserver,
|
||||
clearEntriesFromBuffer,
|
||||
filterBufferMapByNameAndType,
|
||||
};
|
||||
|
@ -16,8 +16,12 @@ const { now } = require('internal/perf/utils');
|
||||
const {
|
||||
mark,
|
||||
measure,
|
||||
clearMarks,
|
||||
clearMarkTimings,
|
||||
} = require('internal/perf/usertiming');
|
||||
const {
|
||||
clearEntriesFromBuffer,
|
||||
filterBufferMapByNameAndType,
|
||||
} = require('internal/perf/observe');
|
||||
|
||||
const eventLoopUtilization = require('internal/perf/event_loop_utilization');
|
||||
const nodeTiming = require('internal/perf/nodetiming');
|
||||
@ -48,7 +52,6 @@ class Performance extends EventTarget {
|
||||
timeOrigin: this.timeOrigin,
|
||||
}, opts)}`;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
function toJSON() {
|
||||
@ -59,6 +62,39 @@ function toJSON() {
|
||||
};
|
||||
}
|
||||
|
||||
function clearMarks(name) {
|
||||
if (name !== undefined) {
|
||||
name = `${name}`;
|
||||
}
|
||||
clearMarkTimings(name);
|
||||
clearEntriesFromBuffer('mark', name);
|
||||
}
|
||||
|
||||
function clearMeasures(name) {
|
||||
if (name !== undefined) {
|
||||
name = `${name}`;
|
||||
}
|
||||
clearEntriesFromBuffer('measure', name);
|
||||
}
|
||||
|
||||
function getEntries() {
|
||||
return filterBufferMapByNameAndType();
|
||||
}
|
||||
|
||||
function getEntriesByName(name) {
|
||||
if (name !== undefined) {
|
||||
name = `${name}`;
|
||||
}
|
||||
return filterBufferMapByNameAndType(name, undefined);
|
||||
}
|
||||
|
||||
function getEntriesByType(type) {
|
||||
if (type !== undefined) {
|
||||
type = `${type}`;
|
||||
}
|
||||
return filterBufferMapByNameAndType(undefined, type);
|
||||
}
|
||||
|
||||
class InternalPerformance extends EventTarget {}
|
||||
InternalPerformance.prototype.constructor = Performance.prototype.constructor;
|
||||
ObjectSetPrototypeOf(InternalPerformance.prototype, Performance.prototype);
|
||||
@ -69,11 +105,31 @@ ObjectDefineProperties(Performance.prototype, {
|
||||
enumerable: false,
|
||||
value: clearMarks,
|
||||
},
|
||||
clearMeasures: {
|
||||
configurable: true,
|
||||
enumerable: false,
|
||||
value: clearMeasures,
|
||||
},
|
||||
eventLoopUtilization: {
|
||||
configurable: true,
|
||||
enumerable: false,
|
||||
value: eventLoopUtilization,
|
||||
},
|
||||
getEntries: {
|
||||
configurable: true,
|
||||
enumerable: false,
|
||||
value: getEntries,
|
||||
},
|
||||
getEntriesByName: {
|
||||
configurable: true,
|
||||
enumerable: false,
|
||||
value: getEntriesByName,
|
||||
},
|
||||
getEntriesByType: {
|
||||
configurable: true,
|
||||
enumerable: false,
|
||||
value: getEntriesByType,
|
||||
},
|
||||
mark: {
|
||||
configurable: true,
|
||||
enumerable: false,
|
||||
|
@ -17,6 +17,7 @@ const kType = Symbol('kType');
|
||||
const kStart = Symbol('kStart');
|
||||
const kDuration = Symbol('kDuration');
|
||||
const kDetail = Symbol('kDetail');
|
||||
const kBufferNext = Symbol('kBufferNext');
|
||||
|
||||
function isPerformanceEntry(obj) {
|
||||
return obj?.[kName] !== undefined;
|
||||
@ -67,6 +68,7 @@ class InternalPerformanceEntry {
|
||||
this[kStart] = start;
|
||||
this[kDuration] = duration;
|
||||
this[kDetail] = detail;
|
||||
this[kBufferNext] = null;
|
||||
}
|
||||
}
|
||||
|
||||
@ -79,4 +81,5 @@ module.exports = {
|
||||
InternalPerformanceEntry,
|
||||
PerformanceEntry,
|
||||
isPerformanceEntry,
|
||||
kBufferNext,
|
||||
};
|
||||
|
@ -1,10 +1,10 @@
|
||||
'use strict';
|
||||
|
||||
const {
|
||||
ObjectKeys,
|
||||
SafeMap,
|
||||
SafeSet,
|
||||
SafeArrayIterator,
|
||||
SymbolToStringTag,
|
||||
} = primordials;
|
||||
|
||||
const { InternalPerformanceEntry } = require('internal/perf/performance_entry');
|
||||
@ -21,13 +21,14 @@ const {
|
||||
const {
|
||||
codes: {
|
||||
ERR_INVALID_ARG_VALUE,
|
||||
ERR_INVALID_PERFORMANCE_MARK,
|
||||
ERR_PERFORMANCE_INVALID_TIMESTAMP,
|
||||
ERR_PERFORMANCE_MEASURE_INVALID_OPTIONS,
|
||||
},
|
||||
} = require('internal/errors');
|
||||
|
||||
const marks = new SafeMap();
|
||||
const { structuredClone, lazyDOMException } = require('internal/util');
|
||||
|
||||
const markTimings = new SafeMap();
|
||||
|
||||
const nodeTimingReadOnlyAttributes = new SafeSet(new SafeArrayIterator([
|
||||
'nodeStart',
|
||||
@ -48,60 +49,69 @@ function getMark(name) {
|
||||
name = `${name}`;
|
||||
if (nodeTimingReadOnlyAttributes.has(name))
|
||||
return nodeTiming[name];
|
||||
const ts = marks.get(name);
|
||||
const ts = markTimings.get(name);
|
||||
if (ts === undefined)
|
||||
throw new ERR_INVALID_PERFORMANCE_MARK(name);
|
||||
throw lazyDOMException(`The "${name}" performance mark has not been set`, 'SyntaxError');
|
||||
return ts;
|
||||
}
|
||||
|
||||
class PerformanceMark extends InternalPerformanceEntry {
|
||||
constructor(name, options = {}) {
|
||||
constructor(name, options) {
|
||||
name = `${name}`;
|
||||
if (nodeTimingReadOnlyAttributes.has(name))
|
||||
throw new ERR_INVALID_ARG_VALUE('name', name);
|
||||
options ??= {};
|
||||
validateObject(options, 'options');
|
||||
const {
|
||||
detail,
|
||||
startTime = now(),
|
||||
} = options;
|
||||
const startTime = options.startTime ?? now();
|
||||
validateNumber(startTime, 'startTime');
|
||||
if (startTime < 0)
|
||||
throw new ERR_PERFORMANCE_INVALID_TIMESTAMP(startTime);
|
||||
marks.set(name, startTime);
|
||||
markTimings.set(name, startTime);
|
||||
|
||||
let detail = options.detail;
|
||||
detail = detail != null ?
|
||||
structuredClone(detail) :
|
||||
null;
|
||||
super(name, 'mark', startTime, 0, detail);
|
||||
enqueue(this);
|
||||
}
|
||||
|
||||
get [SymbolToStringTag]() {
|
||||
return 'PerformanceMark';
|
||||
}
|
||||
}
|
||||
|
||||
class PerformanceMeasure extends InternalPerformanceEntry {
|
||||
constructor(name, start, duration, detail) {
|
||||
super(name, 'measure', start, duration, detail);
|
||||
enqueue(this);
|
||||
}
|
||||
|
||||
get [SymbolToStringTag]() {
|
||||
return 'PerformanceMeasure';
|
||||
}
|
||||
}
|
||||
|
||||
function mark(name, options = {}) {
|
||||
return new PerformanceMark(name, options);
|
||||
const mark = new PerformanceMark(name, options);
|
||||
enqueue(mark);
|
||||
return mark;
|
||||
}
|
||||
|
||||
function calculateStartDuration(startOrMeasureOptions, endMark) {
|
||||
startOrMeasureOptions ??= 0;
|
||||
let detail;
|
||||
let start;
|
||||
let end;
|
||||
let duration;
|
||||
if (typeof startOrMeasureOptions === 'object' &&
|
||||
ObjectKeys(startOrMeasureOptions).length) {
|
||||
({
|
||||
start,
|
||||
end,
|
||||
duration,
|
||||
detail,
|
||||
} = startOrMeasureOptions);
|
||||
let optionsValid = false;
|
||||
if (typeof startOrMeasureOptions === 'object') {
|
||||
({ start, end, duration } = startOrMeasureOptions);
|
||||
optionsValid = start !== undefined || end !== undefined;
|
||||
}
|
||||
if (optionsValid) {
|
||||
if (endMark !== undefined) {
|
||||
throw new ERR_PERFORMANCE_MEASURE_INVALID_OPTIONS(
|
||||
'endMark must not be specified');
|
||||
}
|
||||
|
||||
if (start === undefined && end === undefined) {
|
||||
throw new ERR_PERFORMANCE_MEASURE_INVALID_OPTIONS(
|
||||
'One of options.start or options.end is required');
|
||||
@ -111,21 +121,30 @@ function calculateStartDuration(startOrMeasureOptions, endMark) {
|
||||
'Must not have options.start, options.end, and ' +
|
||||
'options.duration specified');
|
||||
}
|
||||
start = getMark(start);
|
||||
duration = getMark(duration);
|
||||
} else {
|
||||
start = getMark(startOrMeasureOptions);
|
||||
}
|
||||
|
||||
end = getMark(endMark || end) ??
|
||||
((start !== undefined && duration !== undefined) ?
|
||||
start + duration : now());
|
||||
if (endMark !== undefined) {
|
||||
end = getMark(endMark);
|
||||
} else if (optionsValid && end !== undefined) {
|
||||
end = getMark(end);
|
||||
} else if (optionsValid && start !== undefined && duration !== undefined) {
|
||||
end = getMark(start) + getMark(duration);
|
||||
} else {
|
||||
end = now();
|
||||
}
|
||||
|
||||
start ??= (duration !== undefined) ? end - duration : 0;
|
||||
if (typeof startOrMeasureOptions === 'string') {
|
||||
start = getMark(startOrMeasureOptions);
|
||||
} else if (optionsValid && start !== undefined) {
|
||||
start = getMark(start);
|
||||
} else if (optionsValid && duration !== undefined && end !== undefined) {
|
||||
start = end - getMark(duration);
|
||||
} else {
|
||||
start = 0;
|
||||
}
|
||||
|
||||
duration ??= end - start;
|
||||
|
||||
return { start, duration, detail };
|
||||
duration = end - start;
|
||||
return { start, duration };
|
||||
}
|
||||
|
||||
function measure(name, startOrMeasureOptions, endMark) {
|
||||
@ -133,25 +152,29 @@ function measure(name, startOrMeasureOptions, endMark) {
|
||||
const {
|
||||
start,
|
||||
duration,
|
||||
detail
|
||||
} = calculateStartDuration(startOrMeasureOptions, endMark);
|
||||
return new PerformanceMeasure(name, start, duration, detail);
|
||||
let detail = startOrMeasureOptions?.detail;
|
||||
detail = detail != null ? structuredClone(detail) : null;
|
||||
const measure = new PerformanceMeasure(name, start, duration, detail);
|
||||
enqueue(measure);
|
||||
return measure;
|
||||
}
|
||||
|
||||
function clearMarks(name) {
|
||||
function clearMarkTimings(name) {
|
||||
if (name !== undefined) {
|
||||
name = `${name}`;
|
||||
if (nodeTimingReadOnlyAttributes.has(name))
|
||||
throw new ERR_INVALID_ARG_VALUE('name', name);
|
||||
marks.delete(name);
|
||||
markTimings.delete(name);
|
||||
return;
|
||||
}
|
||||
marks.clear();
|
||||
markTimings.clear();
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
PerformanceMark,
|
||||
clearMarks,
|
||||
PerformanceMeasure,
|
||||
clearMarkTimings,
|
||||
mark,
|
||||
measure,
|
||||
};
|
||||
|
@ -449,6 +449,21 @@ const lazyDOMException = hideStackFrames((message, name) => {
|
||||
return new DOMException(message, name);
|
||||
});
|
||||
|
||||
function structuredClone(value) {
|
||||
const {
|
||||
DefaultSerializer,
|
||||
DefaultDeserializer,
|
||||
} = require('v8');
|
||||
const ser = new DefaultSerializer();
|
||||
ser._getDataCloneError = hideStackFrames((message) =>
|
||||
lazyDOMException(message, 'DataCloneError'));
|
||||
ser.writeValue(value);
|
||||
const serialized = ser.releaseBuffer();
|
||||
|
||||
const des = new DefaultDeserializer(serialized);
|
||||
return des.readValue();
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
assertCrypto,
|
||||
cachedResult,
|
||||
@ -471,6 +486,7 @@ module.exports = {
|
||||
promisify,
|
||||
sleep,
|
||||
spliceOne,
|
||||
structuredClone,
|
||||
removeColors,
|
||||
|
||||
// Symbol used to customize promisify conversion
|
||||
|
@ -10,7 +10,10 @@ const {
|
||||
|
||||
const { PerformanceEntry } = require('internal/perf/performance_entry');
|
||||
const { PerformanceObserver } = require('internal/perf/observe');
|
||||
const { PerformanceMark } = require('internal/perf/usertiming');
|
||||
const {
|
||||
PerformanceMark,
|
||||
PerformanceMeasure,
|
||||
} = require('internal/perf/usertiming');
|
||||
const { InternalPerformance } = require('internal/perf/performance');
|
||||
|
||||
const {
|
||||
@ -22,6 +25,7 @@ const monitorEventLoopDelay = require('internal/perf/event_loop_delay');
|
||||
module.exports = {
|
||||
PerformanceEntry,
|
||||
PerformanceMark,
|
||||
PerformanceMeasure,
|
||||
PerformanceObserver,
|
||||
monitorEventLoopDelay,
|
||||
createHistogram,
|
||||
|
@ -286,6 +286,12 @@ if (global.gc) {
|
||||
if (global.performance) {
|
||||
knownGlobals.push(global.performance);
|
||||
}
|
||||
if (global.PerformanceMark) {
|
||||
knownGlobals.push(global.PerformanceMark);
|
||||
}
|
||||
if (global.PerformanceMeasure) {
|
||||
knownGlobals.push(global.PerformanceMeasure);
|
||||
}
|
||||
|
||||
function allowGlobals(...allowlist) {
|
||||
knownGlobals = knownGlobals.concat(allowlist);
|
||||
|
2
test/fixtures/wpt/LICENSE.md
vendored
2
test/fixtures/wpt/LICENSE.md
vendored
@ -1,6 +1,6 @@
|
||||
# The 3-Clause BSD License
|
||||
|
||||
Copyright © web-platform-tests contributors
|
||||
Copyright 2019 web-platform-tests contributors
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
|
||||
|
||||
|
4
test/fixtures/wpt/README.md
vendored
4
test/fixtures/wpt/README.md
vendored
@ -19,10 +19,12 @@ Last update:
|
||||
- html/webappapis/atob: https://github.com/web-platform-tests/wpt/tree/f267e1dca6/html/webappapis/atob
|
||||
- html/webappapis/microtask-queuing: https://github.com/web-platform-tests/wpt/tree/2c5c3c4c27/html/webappapis/microtask-queuing
|
||||
- html/webappapis/timers: https://github.com/web-platform-tests/wpt/tree/5873f2d8f1/html/webappapis/timers
|
||||
- interfaces: https://github.com/web-platform-tests/wpt/tree/fcb671ed8b/interfaces
|
||||
- interfaces: https://github.com/web-platform-tests/wpt/tree/80a4176623/interfaces
|
||||
- performance-timeline: https://github.com/web-platform-tests/wpt/tree/17ebc3aea0/performance-timeline
|
||||
- resources: https://github.com/web-platform-tests/wpt/tree/972ca5b669/resources
|
||||
- streams: https://github.com/web-platform-tests/wpt/tree/8f60d94439/streams
|
||||
- url: https://github.com/web-platform-tests/wpt/tree/77d54aa9e0/url
|
||||
- user-timing: https://github.com/web-platform-tests/wpt/tree/df24fb604e/user-timing
|
||||
|
||||
[Web Platform Tests]: https://github.com/web-platform-tests/wpt
|
||||
[`git node wpt`]: https://github.com/nodejs/node-core-utils/blob/main/docs/git-node.md#git-node-wpt
|
||||
|
49
test/fixtures/wpt/interfaces/performance-timeline.idl
vendored
Normal file
49
test/fixtures/wpt/interfaces/performance-timeline.idl
vendored
Normal file
@ -0,0 +1,49 @@
|
||||
// GENERATED CONTENT - DO NOT EDIT
|
||||
// Content was automatically extracted by Reffy into webref
|
||||
// (https://github.com/w3c/webref)
|
||||
// Source: Performance Timeline Level 2 (https://w3c.github.io/performance-timeline/)
|
||||
|
||||
partial interface Performance {
|
||||
PerformanceEntryList getEntries ();
|
||||
PerformanceEntryList getEntriesByType (DOMString type);
|
||||
PerformanceEntryList getEntriesByName (DOMString name, optional DOMString type);
|
||||
};
|
||||
typedef sequence<PerformanceEntry> PerformanceEntryList;
|
||||
|
||||
[Exposed=(Window,Worker)]
|
||||
interface PerformanceEntry {
|
||||
readonly attribute DOMString name;
|
||||
readonly attribute DOMString entryType;
|
||||
readonly attribute DOMHighResTimeStamp startTime;
|
||||
readonly attribute DOMHighResTimeStamp duration;
|
||||
[Default] object toJSON();
|
||||
};
|
||||
|
||||
callback PerformanceObserverCallback = undefined (PerformanceObserverEntryList entries,
|
||||
PerformanceObserver observer,
|
||||
optional PerformanceObserverCallbackOptions options = {});
|
||||
[Exposed=(Window,Worker)]
|
||||
interface PerformanceObserver {
|
||||
constructor(PerformanceObserverCallback callback);
|
||||
undefined observe (optional PerformanceObserverInit options = {});
|
||||
undefined disconnect ();
|
||||
PerformanceEntryList takeRecords();
|
||||
[SameObject] static readonly attribute FrozenArray<DOMString> supportedEntryTypes;
|
||||
};
|
||||
|
||||
dictionary PerformanceObserverCallbackOptions {
|
||||
unsigned long long droppedEntriesCount;
|
||||
};
|
||||
|
||||
dictionary PerformanceObserverInit {
|
||||
sequence<DOMString> entryTypes;
|
||||
DOMString type;
|
||||
boolean buffered;
|
||||
};
|
||||
|
||||
[Exposed=(Window,Worker)]
|
||||
interface PerformanceObserverEntryList {
|
||||
PerformanceEntryList getEntries();
|
||||
PerformanceEntryList getEntriesByType (DOMString type);
|
||||
PerformanceEntryList getEntriesByName (DOMString name, optional DOMString type);
|
||||
};
|
34
test/fixtures/wpt/interfaces/user-timing.idl
vendored
Normal file
34
test/fixtures/wpt/interfaces/user-timing.idl
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
// GENERATED CONTENT - DO NOT EDIT
|
||||
// Content was automatically extracted by Reffy into webref
|
||||
// (https://github.com/w3c/webref)
|
||||
// Source: User Timing Level 3 (https://w3c.github.io/user-timing/)
|
||||
|
||||
dictionary PerformanceMarkOptions {
|
||||
any detail;
|
||||
DOMHighResTimeStamp startTime;
|
||||
};
|
||||
|
||||
dictionary PerformanceMeasureOptions {
|
||||
any detail;
|
||||
(DOMString or DOMHighResTimeStamp) start;
|
||||
DOMHighResTimeStamp duration;
|
||||
(DOMString or DOMHighResTimeStamp) end;
|
||||
};
|
||||
|
||||
partial interface Performance {
|
||||
PerformanceMark mark(DOMString markName, optional PerformanceMarkOptions markOptions = {});
|
||||
undefined clearMarks(optional DOMString markName);
|
||||
PerformanceMeasure measure(DOMString measureName, optional (DOMString or PerformanceMeasureOptions) startOrMeasureOptions = {}, optional DOMString endMark);
|
||||
undefined clearMeasures(optional DOMString measureName);
|
||||
};
|
||||
|
||||
[Exposed=(Window,Worker)]
|
||||
interface PerformanceMark : PerformanceEntry {
|
||||
constructor(DOMString markName, optional PerformanceMarkOptions markOptions = {});
|
||||
readonly attribute any detail;
|
||||
};
|
||||
|
||||
[Exposed=(Window,Worker)]
|
||||
interface PerformanceMeasure : PerformanceEntry {
|
||||
readonly attribute any detail;
|
||||
};
|
4
test/fixtures/wpt/performance-timeline/META.yml
vendored
Normal file
4
test/fixtures/wpt/performance-timeline/META.yml
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
spec: https://w3c.github.io/performance-timeline/
|
||||
suggested_reviewers:
|
||||
- plehegar
|
||||
- igrigorik
|
11
test/fixtures/wpt/performance-timeline/buffered-flag-after-timeout.any.js
vendored
Normal file
11
test/fixtures/wpt/performance-timeline/buffered-flag-after-timeout.any.js
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
async_test(t => {
|
||||
performance.mark('foo');
|
||||
t.step_timeout(() => {
|
||||
// After a timeout, PerformanceObserver should still receive entry if using the buffered flag.
|
||||
new PerformanceObserver(t.step_func_done(list => {
|
||||
const entries = list.getEntries();
|
||||
assert_equals(entries.length, 1, 'There should be 1 mark entry.');
|
||||
assert_equals(entries[0].entryType, 'mark');
|
||||
})).observe({type: 'mark', buffered: true});
|
||||
}, 100);
|
||||
}, 'PerformanceObserver with buffered flag sees entry after timeout');
|
15
test/fixtures/wpt/performance-timeline/buffered-flag-observer.any.js
vendored
Normal file
15
test/fixtures/wpt/performance-timeline/buffered-flag-observer.any.js
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
async_test( t=> {
|
||||
for (let i = 0; i < 50; i++)
|
||||
performance.mark('foo' + i);
|
||||
let marksCreated = 50;
|
||||
let marksReceived = 0;
|
||||
new PerformanceObserver(list => {
|
||||
marksReceived += list.getEntries().length;
|
||||
if (marksCreated < 100) {
|
||||
performance.mark('bar' + marksCreated);
|
||||
marksCreated++;
|
||||
}
|
||||
if (marksReceived == 100)
|
||||
t.done();
|
||||
}).observe({type: 'mark', buffered: true});
|
||||
}, 'PerformanceObserver with buffered flag should see past and future entries.');
|
64
test/fixtures/wpt/performance-timeline/case-sensitivity.any.js
vendored
Normal file
64
test/fixtures/wpt/performance-timeline/case-sensitivity.any.js
vendored
Normal file
@ -0,0 +1,64 @@
|
||||
test(function () {
|
||||
assert_equals(typeof self.performance, "object");
|
||||
assert_equals(typeof self.performance.getEntriesByType, "function");
|
||||
var lowerList = self.performance.getEntriesByType("resource");
|
||||
var upperList = self.performance.getEntriesByType("RESOURCE");
|
||||
var mixedList = self.performance.getEntriesByType("ReSoUrCe");
|
||||
|
||||
assert_not_equals(lowerList.length, 0, "Resource entries exist");
|
||||
assert_equals(upperList.length, 0, "getEntriesByType('RESOURCE').length");
|
||||
assert_equals(mixedList.length, 0, "getEntriesByType('ReSoUrCe').length");
|
||||
|
||||
}, "getEntriesByType values are case sensitive");
|
||||
|
||||
test(function () {
|
||||
assert_equals(typeof self.performance, "object");
|
||||
assert_equals(typeof self.performance.getEntriesByName, "function");
|
||||
var origin = self.location.protocol + "//" + self.location.host;
|
||||
var location1 = origin.toUpperCase() + "/resources/testharness.js";
|
||||
var location2 = self.location.protocol + "//"
|
||||
+ self.location.host.toUpperCase() + "/resources/testharness.js";
|
||||
var lowerList = self.performance.getEntriesByName(origin + "/resources/testharness.js");
|
||||
var upperList = self.performance.getEntriesByName(location1);
|
||||
var mixedList = self.performance.getEntriesByName(location2);
|
||||
|
||||
assert_equals(lowerList.length, 1, "Resource entry exist");
|
||||
assert_equals(upperList.length, 0, "getEntriesByName('" + location1 + "').length");
|
||||
assert_equals(mixedList.length, 0, "getEntriesByName('" + location2 + "').length");
|
||||
|
||||
}, "getEntriesByName values are case sensitive");
|
||||
|
||||
async_test(function (t) {
|
||||
// Test type/buffered case sensitivity.
|
||||
observer = new PerformanceObserver(
|
||||
t.step_func(function (entryList, obs) {
|
||||
assert_unreached("Observer(type) should not be called.");
|
||||
})
|
||||
);
|
||||
observer.observe({type: "Mark"});
|
||||
observer.observe({type: "Measure"});
|
||||
observer.observe({type: "MARK"});
|
||||
observer.observe({type: "MEASURE"});
|
||||
observer.observe({type: "Mark", buffered: true});
|
||||
observer.observe({type: "Measure", buffered: true});
|
||||
observer.observe({type: "MARK", buffered: true});
|
||||
observer.observe({type: "MEASURE", buffered: true});
|
||||
self.performance.mark("mark1");
|
||||
self.performance.measure("measure1");
|
||||
|
||||
// Test entryTypes case sensitivity.
|
||||
observer = new PerformanceObserver(
|
||||
t.step_func(function (entryList, obs) {
|
||||
assert_unreached("Observer(entryTypes) should not be called.");
|
||||
})
|
||||
);
|
||||
observer.observe({entryTypes: ["Mark", "Measure"]});
|
||||
observer.observe({entryTypes: ["MARK", "MEASURE"]});
|
||||
self.performance.mark("mark1");
|
||||
self.performance.measure("measure1");
|
||||
|
||||
t.step_timeout(function() {
|
||||
t.done();
|
||||
}, 1000);
|
||||
|
||||
}, "observe() and case sensitivity for types/entryTypes and buffered.");
|
27
test/fixtures/wpt/performance-timeline/get-invalid-entries.html
vendored
Normal file
27
test/fixtures/wpt/performance-timeline/get-invalid-entries.html
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
<!doctype html>
|
||||
<html>
|
||||
<head>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<script>
|
||||
async_test(function(t) {
|
||||
performance.mark('windowMark');
|
||||
const worker = new Worker("resources/worker-invalid-entries.js");
|
||||
worker.onmessage = function(event) {
|
||||
assert_equals(event.data['invalid'], 0, 'The worker must have 0 invalid entries.');
|
||||
assert_equals(event.data['mark'], 1, 'The worker must have 1 mark entry.');
|
||||
assert_equals(event.data['measure'], 0, 'The worker must have 0 measure entries.');
|
||||
assert_equals(performance.getEntriesByType('invalid').length, 0,
|
||||
'The window must have 0 invalid entries.');
|
||||
assert_equals(performance.getEntriesByType('mark').length, 1,
|
||||
'The window must have 1 mark entry.');
|
||||
assert_equals(performance.getEntriesByType('measure').length, 0,
|
||||
'The window must have 0 measure entries.')
|
||||
t.done();
|
||||
}
|
||||
}, 'Get invalid entries from worker and window.');
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
25
test/fixtures/wpt/performance-timeline/idlharness.any.js
vendored
Normal file
25
test/fixtures/wpt/performance-timeline/idlharness.any.js
vendored
Normal file
@ -0,0 +1,25 @@
|
||||
// META: global=window,worker
|
||||
// META: script=/resources/WebIDLParser.js
|
||||
// META: script=/resources/idlharness.js
|
||||
|
||||
// https://w3c.github.io/performance-timeline/
|
||||
|
||||
'use strict';
|
||||
|
||||
idl_test(
|
||||
['performance-timeline'],
|
||||
['hr-time', 'dom'],
|
||||
async idl_array => {
|
||||
idl_array.add_objects({
|
||||
Performance: ['performance'],
|
||||
PerformanceObserver: ['observer'],
|
||||
PerformanceObserverEntryList: ['entryList'],
|
||||
});
|
||||
|
||||
self.entryList = await new Promise((resolve, reject) => {
|
||||
self.observer = new PerformanceObserver(resolve);
|
||||
observer.observe({ entryTypes: ['mark'] });
|
||||
performance.mark('test');
|
||||
});
|
||||
}
|
||||
);
|
32
test/fixtures/wpt/performance-timeline/multiple-buffered-flag-observers.any.js
vendored
Normal file
32
test/fixtures/wpt/performance-timeline/multiple-buffered-flag-observers.any.js
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
promise_test(() => {
|
||||
// The first promise waits for one buffered flag observer to receive 3 entries.
|
||||
const promise1 = new Promise(resolve1 => {
|
||||
let numObserved1 = 0;
|
||||
new PerformanceObserver((entryList, obs) => {
|
||||
// This buffered flag observer is constructed after a regular observer detects a mark.
|
||||
new PerformanceObserver(list => {
|
||||
numObserved1 += list.getEntries().length;
|
||||
if (numObserved1 == 3)
|
||||
resolve1();
|
||||
}).observe({type: 'mark', buffered: true});
|
||||
obs.disconnect();
|
||||
}).observe({entryTypes: ['mark']});
|
||||
performance.mark('foo');
|
||||
});
|
||||
// The second promise waits for another buffered flag observer to receive 3 entries.
|
||||
const promise2 = new Promise(resolve2 => {
|
||||
step_timeout(() => {
|
||||
let numObserved2 = 0;
|
||||
// This buffered flag observer is constructed after a delay of 100ms.
|
||||
new PerformanceObserver(list => {
|
||||
numObserved2 += list.getEntries().length;
|
||||
if (numObserved2 == 3)
|
||||
resolve2();
|
||||
}).observe({type: 'mark', buffered: true});
|
||||
}, 100);
|
||||
performance.mark('bar');
|
||||
});
|
||||
performance.mark('meow');
|
||||
// Pass if and only if both buffered observers received all 3 mark entries.
|
||||
return Promise.all([promise1, promise2]);
|
||||
}, 'Multiple PerformanceObservers with buffered flag see all entries');
|
10
test/fixtures/wpt/performance-timeline/not-clonable.html
vendored
Normal file
10
test/fixtures/wpt/performance-timeline/not-clonable.html
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
<!doctype html>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script>
|
||||
const t = async_test("Test that a postMessage of a performance entry fails");
|
||||
addEventListener("message", t.step_func_done(e => {
|
||||
assert_equals(e.data, "PASS");
|
||||
}));
|
||||
</script>
|
||||
<iframe src="resources/postmessage-entry.html"></iframe>
|
12
test/fixtures/wpt/performance-timeline/observer-buffered-false.any.js
vendored
Normal file
12
test/fixtures/wpt/performance-timeline/observer-buffered-false.any.js
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
async_test(t => {
|
||||
performance.mark('foo');
|
||||
// Use a timeout to ensure the remainder of the test runs after the entry is created.
|
||||
t.step_timeout(() => {
|
||||
// Observer with buffered flag set to false should not see entry.
|
||||
new PerformanceObserver(() => {
|
||||
assert_unreached('Should not have observed any entry!');
|
||||
}).observe({type: 'mark', buffered: false});
|
||||
// Use a timeout to give time to the observer.
|
||||
t.step_timeout(t.step_func_done(() => {}), 100);
|
||||
}, 0);
|
||||
}, 'PerformanceObserver without buffered flag set to false cannot see past entries.');
|
21
test/fixtures/wpt/performance-timeline/performanceentry-tojson.any.js
vendored
Normal file
21
test/fixtures/wpt/performance-timeline/performanceentry-tojson.any.js
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
test(() => {
|
||||
performance.mark('markName');
|
||||
performance.measure('measureName');
|
||||
|
||||
const entries = performance.getEntries();
|
||||
const performanceEntryKeys = [
|
||||
'name',
|
||||
'entryType',
|
||||
'startTime',
|
||||
'duration'
|
||||
];
|
||||
for (let i = 0; i < entries.length; ++i) {
|
||||
assert_equals(typeof(entries[i].toJSON), 'function');
|
||||
const json = entries[i].toJSON();
|
||||
assert_equals(typeof(json), 'object');
|
||||
for (const key of performanceEntryKeys) {
|
||||
assert_equals(json[key], entries[i][key],
|
||||
`entries[${i}].toJSON().${key} should match entries[${i}].${key}`);
|
||||
}
|
||||
}
|
||||
}, 'Test toJSON() in PerformanceEntry');
|
44
test/fixtures/wpt/performance-timeline/performanceobservers.js
vendored
Normal file
44
test/fixtures/wpt/performance-timeline/performanceobservers.js
vendored
Normal file
@ -0,0 +1,44 @@
|
||||
// Compares a performance entry to a predefined one
|
||||
// perfEntriesToCheck is an array of performance entries from the user agent
|
||||
// expectedEntries is an array of performance entries minted by the test
|
||||
function checkEntries(perfEntriesToCheck, expectedEntries) {
|
||||
function findMatch(pe) {
|
||||
// we match based on entryType and name
|
||||
for (var i = expectedEntries.length - 1; i >= 0; i--) {
|
||||
var ex = expectedEntries[i];
|
||||
if (ex.entryType === pe.entryType && ex.name === pe.name) {
|
||||
return ex;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
assert_equals(perfEntriesToCheck.length, expectedEntries.length, "performance entries must match");
|
||||
|
||||
perfEntriesToCheck.forEach(function (pe1) {
|
||||
assert_not_equals(findMatch(pe1), null, "Entry matches");
|
||||
});
|
||||
}
|
||||
|
||||
// Waits for performance.now to advance. Since precision reduction might
|
||||
// cause it to return the same value across multiple calls.
|
||||
function wait() {
|
||||
var now = performance.now();
|
||||
while (now === performance.now())
|
||||
continue;
|
||||
}
|
||||
|
||||
// Ensure the entries list is sorted by startTime.
|
||||
function checkSorted(entries) {
|
||||
assert_not_equals(entries.length, 0, "entries list must not be empty");
|
||||
if (!entries.length)
|
||||
return;
|
||||
|
||||
var sorted = false;
|
||||
var lastStartTime = entries[0].startTime;
|
||||
for (var i = 1; i < entries.length; ++i) {
|
||||
var currStartTime = entries[i].startTime;
|
||||
assert_less_than_equal(lastStartTime, currStartTime, "entry list must be sorted by startTime");
|
||||
lastStartTime = currStartTime;
|
||||
}
|
||||
}
|
66
test/fixtures/wpt/performance-timeline/po-callback-mutate.any.js
vendored
Normal file
66
test/fixtures/wpt/performance-timeline/po-callback-mutate.any.js
vendored
Normal file
@ -0,0 +1,66 @@
|
||||
// META: script=performanceobservers.js
|
||||
|
||||
async_test(function (t) {
|
||||
var callbackCount = 0;
|
||||
var observer = new PerformanceObserver(
|
||||
t.step_func(function (entryList, obs) {
|
||||
callbackCount++;
|
||||
|
||||
if (callbackCount === 1) {
|
||||
checkEntries(entryList.getEntries(), [
|
||||
{entryType: "measure", name: "measure1"},
|
||||
]);
|
||||
observer.observe({entryTypes: ["mark"]});
|
||||
self.performance.mark("mark2");
|
||||
self.performance.measure("measure2");
|
||||
return;
|
||||
}
|
||||
|
||||
if (callbackCount === 2) {
|
||||
checkEntries(entryList.getEntries(), [
|
||||
{entryType: "mark", name: "mark2"},
|
||||
]);
|
||||
self.performance.mark("mark-before-change-observe-state-to-measure");
|
||||
self.performance.measure("measure-before-change-observe-state-to-measure");
|
||||
observer.observe({entryTypes: ["measure"]});
|
||||
self.performance.mark("mark3");
|
||||
self.performance.measure("measure3");
|
||||
return;
|
||||
}
|
||||
|
||||
if (callbackCount === 3) {
|
||||
checkEntries(entryList.getEntries(), [
|
||||
{entryType: "measure", name: "measure3"},
|
||||
{entryType: "mark", name: "mark-before-change-observe-state-to-measure"},
|
||||
]);
|
||||
self.performance.mark("mark-before-change-observe-state-to-both");
|
||||
self.performance.measure("measure-before-change-observe-state-to-both");
|
||||
observer.observe({entryTypes: ["mark", "measure"]});
|
||||
self.performance.mark("mark4");
|
||||
self.performance.measure("measure4");
|
||||
return;
|
||||
}
|
||||
|
||||
if (callbackCount === 4) {
|
||||
checkEntries(entryList.getEntries(), [
|
||||
{entryType: "measure", name: "measure-before-change-observe-state-to-both"},
|
||||
{entryType: "measure", name: "measure4"},
|
||||
{entryType: "mark", name: "mark4"},
|
||||
]);
|
||||
self.performance.mark("mark-before-disconnect");
|
||||
self.performance.measure("measure-before-disconnect");
|
||||
observer.disconnect();
|
||||
self.performance.mark("mark-after-disconnect");
|
||||
self.performance.measure("measure-after-disconnect");
|
||||
t.done();
|
||||
return;
|
||||
}
|
||||
|
||||
assert_unreached("The callback must not be invoked after disconnecting");
|
||||
})
|
||||
);
|
||||
|
||||
observer.observe({entryTypes: ["measure"]});
|
||||
self.performance.mark("mark1");
|
||||
self.performance.measure("measure1");
|
||||
}, "PerformanceObserver modifications inside callback should update filtering and not clear buffer");
|
19
test/fixtures/wpt/performance-timeline/po-disconnect-removes-observed-types.any.js
vendored
Normal file
19
test/fixtures/wpt/performance-timeline/po-disconnect-removes-observed-types.any.js
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
// META: script=performanceobservers.js
|
||||
|
||||
async_test(function (t) {
|
||||
const observer = new PerformanceObserver(
|
||||
t.step_func(function (entryList) {
|
||||
// There should be no mark entry.
|
||||
checkEntries(entryList.getEntries(),
|
||||
[{ entryType: "measure", name: "b"}]);
|
||||
t.done();
|
||||
})
|
||||
);
|
||||
observer.observe({type: "mark"});
|
||||
// Disconnect the observer.
|
||||
observer.disconnect();
|
||||
// Now, only observe measure.
|
||||
observer.observe({type: "measure"});
|
||||
performance.mark("a");
|
||||
performance.measure("b");
|
||||
}, "Types observed are forgotten when disconnect() is called.");
|
37
test/fixtures/wpt/performance-timeline/po-disconnect.any.js
vendored
Normal file
37
test/fixtures/wpt/performance-timeline/po-disconnect.any.js
vendored
Normal file
@ -0,0 +1,37 @@
|
||||
// META: script=performanceobservers.js
|
||||
|
||||
async_test(function (t) {
|
||||
var observer = new PerformanceObserver(
|
||||
t.step_func(function (entryList, obs) {
|
||||
assert_unreached("This callback must not be invoked");
|
||||
})
|
||||
);
|
||||
observer.observe({entryTypes: ["mark", "measure", "navigation"]});
|
||||
observer.disconnect();
|
||||
self.performance.mark("mark1");
|
||||
self.performance.measure("measure1");
|
||||
t.step_timeout(function () {
|
||||
t.done();
|
||||
}, 2000);
|
||||
}, "disconnected callbacks must not be invoked");
|
||||
|
||||
test(function () {
|
||||
var obs = new PerformanceObserver(function () { return true; });
|
||||
obs.disconnect();
|
||||
obs.disconnect();
|
||||
}, "disconnecting an unconnected observer is a no-op");
|
||||
|
||||
async_test(function (t) {
|
||||
var observer = new PerformanceObserver(
|
||||
t.step_func(function (entryList, obs) {
|
||||
assert_unreached("This callback must not be invoked");
|
||||
})
|
||||
);
|
||||
observer.observe({entryTypes: ["mark"]});
|
||||
self.performance.mark("mark1");
|
||||
observer.disconnect();
|
||||
self.performance.mark("mark2");
|
||||
t.step_timeout(function () {
|
||||
t.done();
|
||||
}, 2000);
|
||||
}, "An observer disconnected after a mark must not have its callback invoked");
|
64
test/fixtures/wpt/performance-timeline/po-entries-sort.any.js
vendored
Normal file
64
test/fixtures/wpt/performance-timeline/po-entries-sort.any.js
vendored
Normal file
@ -0,0 +1,64 @@
|
||||
// META: script=performanceobservers.js
|
||||
|
||||
async_test(function (t) {
|
||||
var stored_entries = [];
|
||||
var stored_entries_by_type = [];
|
||||
var observer = new PerformanceObserver(
|
||||
t.step_func(function (entryList, obs) {
|
||||
|
||||
stored_entries = entryList.getEntries();
|
||||
stored_entries_by_type = entryList.getEntriesByType("mark");
|
||||
stored_entries_by_name = entryList.getEntriesByName("name-repeat");
|
||||
var startTimeOfMark2 = entryList.getEntriesByName("mark2")[0].startTime;
|
||||
|
||||
checkSorted(stored_entries);
|
||||
checkEntries(stored_entries, [
|
||||
{entryType: "measure", name: "measure1"},
|
||||
{entryType: "measure", name: "measure2"},
|
||||
{entryType: "measure", name: "measure3"},
|
||||
{entryType: "measure", name: "name-repeat"},
|
||||
{entryType: "mark", name: "mark1"},
|
||||
{entryType: "mark", name: "mark2"},
|
||||
{entryType: "measure", name: "measure-matching-mark2-1"},
|
||||
{entryType: "measure", name: "measure-matching-mark2-2"},
|
||||
{entryType: "mark", name: "name-repeat"},
|
||||
{entryType: "mark", name: "name-repeat"},
|
||||
]);
|
||||
|
||||
checkSorted(stored_entries_by_type);
|
||||
checkEntries(stored_entries_by_type, [
|
||||
{entryType: "mark", name: "mark1"},
|
||||
{entryType: "mark", name: "mark2"},
|
||||
{entryType: "mark", name: "name-repeat"},
|
||||
{entryType: "mark", name: "name-repeat"},
|
||||
]);
|
||||
|
||||
checkSorted(stored_entries_by_name);
|
||||
checkEntries(stored_entries_by_name, [
|
||||
{entryType: "measure", name: "name-repeat"},
|
||||
{entryType: "mark", name: "name-repeat"},
|
||||
{entryType: "mark", name: "name-repeat"},
|
||||
]);
|
||||
|
||||
observer.disconnect();
|
||||
t.done();
|
||||
})
|
||||
);
|
||||
|
||||
observer.observe({entryTypes: ["mark", "measure"]});
|
||||
|
||||
self.performance.mark("mark1");
|
||||
self.performance.measure("measure1");
|
||||
wait(); // Ensure mark1 !== mark2 startTime by making sure performance.now advances.
|
||||
self.performance.mark("mark2");
|
||||
self.performance.measure("measure2");
|
||||
self.performance.measure("measure-matching-mark2-1", "mark2");
|
||||
wait(); // Ensure mark2 !== mark3 startTime by making sure performance.now advances.
|
||||
self.performance.mark("name-repeat");
|
||||
self.performance.measure("measure3");
|
||||
self.performance.measure("measure-matching-mark2-2", "mark2");
|
||||
wait(); // Ensure name-repeat startTime will differ.
|
||||
self.performance.mark("name-repeat");
|
||||
wait(); // Ensure name-repeat startTime will differ.
|
||||
self.performance.measure("name-repeat");
|
||||
}, "getEntries, getEntriesByType, getEntriesByName sort order");
|
38
test/fixtures/wpt/performance-timeline/po-getentries.any.js
vendored
Normal file
38
test/fixtures/wpt/performance-timeline/po-getentries.any.js
vendored
Normal file
@ -0,0 +1,38 @@
|
||||
// META: script=performanceobservers.js
|
||||
|
||||
async_test(function (t) {
|
||||
var observer = new PerformanceObserver(
|
||||
t.step_func(function (entryList, obs) {
|
||||
checkEntries(entryList.getEntries(),
|
||||
[{ entryType: "mark", name: "mark1"}], "getEntries");
|
||||
|
||||
checkEntries(entryList.getEntriesByType("mark"),
|
||||
[{ entryType: "mark", name: "mark1"}], "getEntriesByType");
|
||||
assert_equals(entryList.getEntriesByType("measure").length, 0,
|
||||
"getEntriesByType with no expected entry");
|
||||
assert_equals(entryList.getEntriesByType("234567").length, 0,
|
||||
"getEntriesByType with no expected entry");
|
||||
|
||||
checkEntries(entryList.getEntriesByName("mark1"),
|
||||
[{ entryType: "mark", name: "mark1"}], "getEntriesByName");
|
||||
assert_equals(entryList.getEntriesByName("mark2").length, 0,
|
||||
"getEntriesByName with no expected entry");
|
||||
assert_equals(entryList.getEntriesByName("234567").length, 0,
|
||||
"getEntriesByName with no expected entry");
|
||||
|
||||
checkEntries(entryList.getEntriesByName("mark1", "mark"),
|
||||
[{ entryType: "mark", name: "mark1"}], "getEntriesByName with a type");
|
||||
assert_equals(entryList.getEntriesByName("mark1", "measure").length, 0,
|
||||
"getEntriesByName with a type with no expected entry");
|
||||
assert_equals(entryList.getEntriesByName("mark2", "measure").length, 0,
|
||||
"getEntriesByName with a type with no expected entry");
|
||||
assert_equals(entryList.getEntriesByName("mark1", "234567").length, 0,
|
||||
"getEntriesByName with a type with no expected entry");
|
||||
|
||||
observer.disconnect();
|
||||
t.done();
|
||||
})
|
||||
);
|
||||
observer.observe({entryTypes: ["mark"]});
|
||||
self.performance.mark("mark1");
|
||||
}, "getEntries, getEntriesByType and getEntriesByName work");
|
61
test/fixtures/wpt/performance-timeline/po-mark-measure.any.js
vendored
Normal file
61
test/fixtures/wpt/performance-timeline/po-mark-measure.any.js
vendored
Normal file
@ -0,0 +1,61 @@
|
||||
// META: script=performanceobservers.js
|
||||
|
||||
async_test(function (t) {
|
||||
var stored_entries = [];
|
||||
var observer = new PerformanceObserver(
|
||||
t.step_func(function (entryList, obs) {
|
||||
stored_entries =
|
||||
stored_entries.concat(entryList.getEntries());
|
||||
if (stored_entries.length >= 4) {
|
||||
checkEntries(stored_entries,
|
||||
[{ entryType: "mark", name: "mark1"},
|
||||
{ entryType: "mark", name: "mark2"},
|
||||
{ entryType: "measure", name: "measure1"},
|
||||
{ entryType: "measure", name: "measure2"}]);
|
||||
observer.disconnect();
|
||||
t.done();
|
||||
}
|
||||
})
|
||||
);
|
||||
observer.observe({entryTypes: ["mark", "measure"]});
|
||||
}, "entries are observable");
|
||||
|
||||
async_test(function (t) {
|
||||
var mark_entries = [];
|
||||
var observer = new PerformanceObserver(
|
||||
t.step_func(function (entryList, obs) {
|
||||
mark_entries =
|
||||
mark_entries.concat(entryList.getEntries());
|
||||
if (mark_entries.length >= 2) {
|
||||
checkEntries(mark_entries,
|
||||
[{ entryType: "mark", name: "mark1"},
|
||||
{ entryType: "mark", name: "mark2"}]);
|
||||
observer.disconnect();
|
||||
t.done();
|
||||
}
|
||||
})
|
||||
);
|
||||
observer.observe({entryTypes: ["mark"]});
|
||||
self.performance.mark("mark1");
|
||||
self.performance.mark("mark2");
|
||||
}, "mark entries are observable");
|
||||
|
||||
async_test(function (t) {
|
||||
var measure_entries = [];
|
||||
var observer = new PerformanceObserver(
|
||||
t.step_func(function (entryList, obs) {
|
||||
measure_entries =
|
||||
measure_entries.concat(entryList.getEntries());
|
||||
if (measure_entries.length >= 2) {
|
||||
checkEntries(measure_entries,
|
||||
[{ entryType: "measure", name: "measure1"},
|
||||
{ entryType: "measure", name: "measure2"}]);
|
||||
observer.disconnect();
|
||||
t.done();
|
||||
}
|
||||
})
|
||||
);
|
||||
observer.observe({entryTypes: ["measure"]});
|
||||
self.performance.measure("measure1");
|
||||
self.performance.measure("measure2");
|
||||
}, "measure entries are observable");
|
17
test/fixtures/wpt/performance-timeline/po-observe-repeated-type.any.js
vendored
Normal file
17
test/fixtures/wpt/performance-timeline/po-observe-repeated-type.any.js
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
// META: script=performanceobservers.js
|
||||
|
||||
async_test(function (t) {
|
||||
const observer = new PerformanceObserver(
|
||||
t.step_func(function (entryList) {
|
||||
checkEntries(entryList.getEntries(),
|
||||
[{ entryType: "mark", name: "early"}]);
|
||||
observer.disconnect();
|
||||
t.done();
|
||||
})
|
||||
);
|
||||
performance.mark("early");
|
||||
// This call will not trigger anything.
|
||||
observer.observe({type: "mark"});
|
||||
// This call should override the previous call and detect the early mark.
|
||||
observer.observe({type: "mark", buffered: true});
|
||||
}, "Two calls of observe() with the same 'type' cause override.");
|
64
test/fixtures/wpt/performance-timeline/po-observe-type.any.js
vendored
Normal file
64
test/fixtures/wpt/performance-timeline/po-observe-type.any.js
vendored
Normal file
@ -0,0 +1,64 @@
|
||||
// META: script=performanceobservers.js
|
||||
|
||||
test(function () {
|
||||
const obs = new PerformanceObserver(() => {});
|
||||
assert_throws_js(TypeError, function () {
|
||||
obs.observe({});
|
||||
});
|
||||
assert_throws_js(TypeError, function () {
|
||||
obs.observe({entryType: ['mark', 'measure']});
|
||||
});
|
||||
}, "Calling observe() without 'type' or 'entryTypes' throws a TypeError");
|
||||
|
||||
test(() => {
|
||||
const obs = new PerformanceObserver(() =>{});
|
||||
obs.observe({entryTypes: ["mark"]});
|
||||
assert_throws_dom('InvalidModificationError', function () {
|
||||
obs.observe({type: "measure"});
|
||||
});
|
||||
}, "Calling observe() with entryTypes and then type should throw an InvalidModificationError");
|
||||
|
||||
test(() => {
|
||||
const obs = new PerformanceObserver(() =>{});
|
||||
obs.observe({type: "mark"});
|
||||
assert_throws_dom('InvalidModificationError', function () {
|
||||
obs.observe({entryTypes: ["measure"]});
|
||||
});
|
||||
}, "Calling observe() with type and then entryTypes should throw an InvalidModificationError");
|
||||
|
||||
test(() => {
|
||||
const obs = new PerformanceObserver(() =>{});
|
||||
assert_throws_js(TypeError, function () {
|
||||
obs.observe({type: "mark", entryTypes: ["measure"]});
|
||||
});
|
||||
}, "Calling observe() with type and entryTypes should throw a TypeError");
|
||||
|
||||
test(function () {
|
||||
const obs = new PerformanceObserver(() =>{});
|
||||
// Definitely not an entry type.
|
||||
obs.observe({type: "this-cannot-match-an-entryType"});
|
||||
// Close to an entry type, but not quite.
|
||||
obs.observe({type: "marks"});
|
||||
}, "Passing in unknown values to type does throw an exception.");
|
||||
|
||||
async_test(function (t) {
|
||||
let observedMark = false;
|
||||
let observedMeasure = false;
|
||||
const observer = new PerformanceObserver(
|
||||
t.step_func(function (entryList, obs) {
|
||||
observedMark |= entryList.getEntries().filter(
|
||||
entry => entry.entryType === 'mark').length;
|
||||
observedMeasure |= entryList.getEntries().filter(
|
||||
entry => entry.entryType === 'measure').length
|
||||
// Only conclude the test once we receive both entries!
|
||||
if (observedMark && observedMeasure) {
|
||||
observer.disconnect();
|
||||
t.done();
|
||||
}
|
||||
})
|
||||
);
|
||||
observer.observe({type: "mark"});
|
||||
observer.observe({type: "measure"});
|
||||
self.performance.mark("mark1");
|
||||
self.performance.measure("measure1");
|
||||
}, "observe() with different type values stacks.");
|
63
test/fixtures/wpt/performance-timeline/po-observe.any.js
vendored
Normal file
63
test/fixtures/wpt/performance-timeline/po-observe.any.js
vendored
Normal file
@ -0,0 +1,63 @@
|
||||
// META: script=performanceobservers.js
|
||||
|
||||
test(function () {
|
||||
const obs = new PerformanceObserver(() => {});
|
||||
assert_throws_js(TypeError, function () {
|
||||
obs.observe({entryTypes: "mark"});
|
||||
});
|
||||
}, "entryTypes must be a sequence or throw a TypeError");
|
||||
|
||||
test(function () {
|
||||
const obs = new PerformanceObserver(() => {});
|
||||
obs.observe({entryTypes: []});
|
||||
}, "Empty sequence entryTypes does not throw an exception.");
|
||||
|
||||
test(function () {
|
||||
const obs = new PerformanceObserver(() => {});
|
||||
obs.observe({entryTypes: ["this-cannot-match-an-entryType"]});
|
||||
obs.observe({entryTypes: ["marks","navigate", "resources"]});
|
||||
}, "Unknown entryTypes do not throw an exception.");
|
||||
|
||||
test(function () {
|
||||
const obs = new PerformanceObserver(() => {});
|
||||
obs.observe({entryTypes: ["mark","this-cannot-match-an-entryType"]});
|
||||
obs.observe({entryTypes: ["this-cannot-match-an-entryType","mark"]});
|
||||
obs.observe({entryTypes: ["mark"], others: true});
|
||||
}, "Filter unsupported entryType entryType names within the entryTypes sequence");
|
||||
|
||||
async_test(function (t) {
|
||||
var finish = t.step_func(function () { t.done(); });
|
||||
var observer = new PerformanceObserver(
|
||||
function (entryList, obs) {
|
||||
var self = this;
|
||||
t.step(function () {
|
||||
assert_true(entryList instanceof PerformanceObserverEntryList, "first callback parameter must be a PerformanceObserverEntryList instance");
|
||||
assert_true(obs instanceof PerformanceObserver, "second callback parameter must be a PerformanceObserver instance");
|
||||
assert_equals(observer, self, "observer is the this value");
|
||||
assert_equals(observer, obs, "observer is second parameter");
|
||||
assert_equals(self, obs, "this and second parameter are the same");
|
||||
observer.disconnect();
|
||||
finish();
|
||||
});
|
||||
}
|
||||
);
|
||||
self.performance.clearMarks();
|
||||
observer.observe({entryTypes: ["mark"]});
|
||||
self.performance.mark("mark1");
|
||||
}, "Check observer callback parameter and this values");
|
||||
|
||||
async_test(function (t) {
|
||||
var observer = new PerformanceObserver(
|
||||
t.step_func(function (entryList, obs) {
|
||||
checkEntries(entryList.getEntries(),
|
||||
[{ entryType: "measure", name: "measure1"}]);
|
||||
observer.disconnect();
|
||||
t.done();
|
||||
})
|
||||
);
|
||||
self.performance.clearMarks();
|
||||
observer.observe({entryTypes: ["mark"]});
|
||||
observer.observe({entryTypes: ["measure"]});
|
||||
self.performance.mark("mark1");
|
||||
self.performance.measure("measure1");
|
||||
}, "replace observer if already present");
|
86
test/fixtures/wpt/performance-timeline/po-observe.html
vendored
Normal file
86
test/fixtures/wpt/performance-timeline/po-observe.html
vendored
Normal file
@ -0,0 +1,86 @@
|
||||
<!DOCTYPE HTML>
|
||||
<meta charset=utf-8>
|
||||
<title>PerformanceObservers: PerformanceObserverInit.buffered</title>
|
||||
<meta name="timeout" content="long">
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="performanceobservers.js"></script>
|
||||
<h1>PerformanceObservers: PerformanceObserverInit.buffered</h1>
|
||||
<p>
|
||||
PerformanceObserverInit.buffered should retrieve previously buffered entries
|
||||
</p>
|
||||
<div id="log"></div>
|
||||
<script>
|
||||
async_test(function (t) {
|
||||
function initTest() {
|
||||
new PerformanceObserver(function (entryList, observer) {
|
||||
entryList.getEntries().forEach(function(entry) {
|
||||
if (shouldExclude(entry)) {
|
||||
return;
|
||||
}
|
||||
|
||||
observedEntries.push(entry);
|
||||
if (observedEntries.length === entryTypes.length) {
|
||||
observer.disconnect();
|
||||
runTest();
|
||||
}
|
||||
});
|
||||
}).observe({entryTypes});
|
||||
|
||||
// creates a `resource` entry
|
||||
var img = document.createElement("img");
|
||||
img.src = "./resources/square.png";
|
||||
document.body.appendChild(img);
|
||||
|
||||
performance.mark("markName"); // creates a `mark` entry
|
||||
performance.measure("measureName"); // creates a `measure` entry
|
||||
}
|
||||
function shouldExclude(entry) {
|
||||
// exclude all `resource` entries that aren't for "square.png"
|
||||
return entry.entryType === "resource" &&
|
||||
entry.name.indexOf("square.png") === -1;
|
||||
}
|
||||
function runTest() {
|
||||
// this PerformanceObserver is a nop because we've already been notified about all of our `entryTypes`
|
||||
var po_nop = new PerformanceObserver(function (entryList, observer) {
|
||||
if (entryList.getEntries().find(function(entry) {
|
||||
return !shouldExclude(entry);
|
||||
})) {
|
||||
assert_unreached("this PerformanceObserver callback should never be called");
|
||||
}
|
||||
});
|
||||
po_nop.observe({
|
||||
entryTypes,
|
||||
});
|
||||
|
||||
// this PerformanceObserver should be notified about the previously
|
||||
// buffered mark entry only
|
||||
const bufferedEntries = [];
|
||||
new PerformanceObserver(function (entryList, observer) {
|
||||
entryList.getEntries().forEach(function(entry) {
|
||||
if (shouldExclude(entry)) {
|
||||
return;
|
||||
}
|
||||
|
||||
bufferedEntries.push(entry);
|
||||
if (bufferedEntries.length === 1) {
|
||||
observer.disconnect();
|
||||
po_nop.disconnect();
|
||||
for (i = 0; i < bufferedEntries.length; i++) {
|
||||
assert_equals(bufferedEntries[i].entryType, "mark")
|
||||
}
|
||||
t.done();
|
||||
}
|
||||
});
|
||||
}).observe({
|
||||
type: "mark",
|
||||
buffered: true
|
||||
});
|
||||
}
|
||||
|
||||
const entryTypes = ["navigation", "resource", "mark", "measure"];
|
||||
const observedEntries = [];
|
||||
initTest();
|
||||
}, "PerformanceObserverInit.buffered should retrieve previously buffered entries");
|
||||
|
||||
</script>
|
48
test/fixtures/wpt/performance-timeline/po-resource.html
vendored
Normal file
48
test/fixtures/wpt/performance-timeline/po-resource.html
vendored
Normal file
@ -0,0 +1,48 @@
|
||||
<!DOCTYPE HTML>
|
||||
<meta charset=utf-8>
|
||||
<title>PerformanceObservers: resource</title>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="performanceobservers.js"></script>
|
||||
<h1>PerformanceObservers: resource</h1>
|
||||
<p>
|
||||
New resources will <a href="https://w3c.github.io/performance-timeline/#dfn-queue-a-performanceentry">queue a PerformanceEntry</a>.
|
||||
</p>
|
||||
<div id="log"></div>
|
||||
<script>
|
||||
async_test(function (t) {
|
||||
function path(pathname) {
|
||||
var filename = pathname.substring(pathname.lastIndexOf('/')+1);
|
||||
return pathname.substring(0, pathname.length - filename.length);
|
||||
}
|
||||
var gUniqueCounter = 0;
|
||||
function generateUniqueValues() {
|
||||
return Date.now() + "-" + (++gUniqueCounter);
|
||||
}
|
||||
var stored_entries = [];
|
||||
var img_location = document.location.origin + path(document.location.pathname)
|
||||
+ "resources/square.png?random=";
|
||||
var img1 = img_location + generateUniqueValues();
|
||||
var img2 = img_location + generateUniqueValues();
|
||||
var observer = new PerformanceObserver(
|
||||
t.step_func(function (entryList, obs) {
|
||||
stored_entries =
|
||||
stored_entries.concat(entryList.getEntriesByType("resource"));
|
||||
if (stored_entries.length >= 2) {
|
||||
checkEntries(stored_entries,
|
||||
[{ entryType: "resource", name: img1},
|
||||
{ entryType: "resource", name: img2}]);
|
||||
observer.disconnect();
|
||||
t.done();
|
||||
}
|
||||
})
|
||||
);
|
||||
observer.observe({entryTypes: ["resource"]});
|
||||
var img = document.createElement("img");
|
||||
img.src = img1;
|
||||
document.body.appendChild(img);
|
||||
img = document.createElement("img");
|
||||
img.src = img2;
|
||||
document.body.appendChild(img);
|
||||
}, "resource entries are observable");
|
||||
</script>
|
34
test/fixtures/wpt/performance-timeline/po-takeRecords.any.js
vendored
Normal file
34
test/fixtures/wpt/performance-timeline/po-takeRecords.any.js
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
// META: title=PerformanceObserver: takeRecords
|
||||
// META: script=performanceobservers.js
|
||||
|
||||
async_test(function (t) {
|
||||
const observer = new PerformanceObserver(function (entryList, observer) {
|
||||
assert_unreached('This callback should not have been called.')
|
||||
});
|
||||
let entries = observer.takeRecords();
|
||||
checkEntries(entries, [], 'No records before observe');
|
||||
observer.observe({entryTypes: ['mark']});
|
||||
assert_equals(typeof(observer.takeRecords), 'function');
|
||||
entries = observer.takeRecords();
|
||||
checkEntries(entries, [], 'No records just from observe');
|
||||
performance.mark('a');
|
||||
performance.mark('b');
|
||||
entries = observer.takeRecords();
|
||||
checkEntries(entries, [
|
||||
{entryType: 'mark', name: 'a'},
|
||||
{entryType: 'mark', name: 'b'}
|
||||
]);
|
||||
performance.mark('c');
|
||||
performance.mark('d');
|
||||
performance.mark('e');
|
||||
entries = observer.takeRecords();
|
||||
checkEntries(entries, [
|
||||
{entryType: 'mark', name: 'c'},
|
||||
{entryType: 'mark', name: 'd'},
|
||||
{entryType: 'mark', name: 'e'}
|
||||
]);
|
||||
entries = observer.takeRecords();
|
||||
checkEntries(entries, [], 'No entries right after takeRecords');
|
||||
observer.disconnect();
|
||||
t.done();
|
||||
}, "Test PerformanceObserver's takeRecords()");
|
17
test/fixtures/wpt/performance-timeline/resources/postmessage-entry.html
vendored
Normal file
17
test/fixtures/wpt/performance-timeline/resources/postmessage-entry.html
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
<!doctype html>
|
||||
<script>
|
||||
addEventListener("load", () => {
|
||||
const entry = performance.getEntriesByType("navigation")[0];
|
||||
try {
|
||||
window.top.postMessage(entry, "*");
|
||||
} catch(error) {
|
||||
if (error.name == "DataCloneError") {
|
||||
window.top.postMessage("PASS", "*");
|
||||
} else {
|
||||
window.top.postMessage("FAIL - Wrong exception name: " + error.name, "*");
|
||||
}
|
||||
}
|
||||
window.top.postMessage("FAIL - No exception thrown", "*");
|
||||
});
|
||||
|
||||
</script>
|
BIN
test/fixtures/wpt/performance-timeline/resources/square.png
vendored
Normal file
BIN
test/fixtures/wpt/performance-timeline/resources/square.png
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 249 B |
6
test/fixtures/wpt/performance-timeline/resources/worker-invalid-entries.js
vendored
Normal file
6
test/fixtures/wpt/performance-timeline/resources/worker-invalid-entries.js
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
performance.mark('workerMark');
|
||||
postMessage({
|
||||
'invalid' : performance.getEntriesByType('invalid').length,
|
||||
'mark' : performance.getEntriesByType('mark').length,
|
||||
'measure' : performance.getEntriesByType('measure').length
|
||||
});
|
6
test/fixtures/wpt/performance-timeline/resources/worker-with-performance-observer.js
vendored
Normal file
6
test/fixtures/wpt/performance-timeline/resources/worker-with-performance-observer.js
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
try {
|
||||
new PerformanceObserver(() => true);
|
||||
postMessage("SUCCESS");
|
||||
} catch (ex) {
|
||||
postMessage("FAILURE");
|
||||
}
|
19
test/fixtures/wpt/performance-timeline/supportedEntryTypes.any.js
vendored
Normal file
19
test/fixtures/wpt/performance-timeline/supportedEntryTypes.any.js
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
test(() => {
|
||||
if (typeof PerformanceObserver.supportedEntryTypes === "undefined")
|
||||
assert_unreached("supportedEntryTypes is not supported.");
|
||||
const types = PerformanceObserver.supportedEntryTypes;
|
||||
assert_greater_than(types.length, 0,
|
||||
"There should be at least one entry in supportedEntryTypes.");
|
||||
for (let i = 1; i < types.length; i++) {
|
||||
assert_true(types[i-1] < types[i],
|
||||
"The strings '" + types[i-1] + "' and '" + types[i] +
|
||||
"' are repeated or they are not in alphabetical order.")
|
||||
}
|
||||
}, "supportedEntryTypes exists and returns entries in alphabetical order");
|
||||
|
||||
test(() => {
|
||||
if (typeof PerformanceObserver.supportedEntryTypes === "undefined")
|
||||
assert_unreached("supportedEntryTypes is not supported.");
|
||||
assert_true(PerformanceObserver.supportedEntryTypes ===
|
||||
PerformanceObserver.supportedEntryTypes);
|
||||
}, "supportedEntryTypes caches result");
|
25
test/fixtures/wpt/performance-timeline/webtiming-resolution.any.js
vendored
Normal file
25
test/fixtures/wpt/performance-timeline/webtiming-resolution.any.js
vendored
Normal file
@ -0,0 +1,25 @@
|
||||
function testTimeResolution(highResTimeFunc, funcString) {
|
||||
test(() => {
|
||||
const t0 = highResTimeFunc();
|
||||
let t1 = highResTimeFunc();
|
||||
while (t0 == t1) {
|
||||
t1 = highResTimeFunc();
|
||||
}
|
||||
const epsilon = 1e-5;
|
||||
assert_greater_than_equal(t1 - t0, 0.005 - epsilon, 'The second ' + funcString + ' should be much greater than the first');
|
||||
}, 'Verifies the resolution of ' + funcString + ' is at least 5 microseconds.');
|
||||
}
|
||||
|
||||
function timeByPerformanceNow() {
|
||||
return performance.now();
|
||||
}
|
||||
|
||||
function timeByUserTiming() {
|
||||
performance.mark('timer');
|
||||
const time = performance.getEntriesByName('timer')[0].startTime;
|
||||
performance.clearMarks('timer');
|
||||
return time;
|
||||
}
|
||||
|
||||
testTimeResolution(timeByPerformanceNow, 'performance.now()');
|
||||
testTimeResolution(timeByUserTiming, 'entry.startTime');
|
18
test/fixtures/wpt/performance-timeline/worker-with-performance-observer.html
vendored
Normal file
18
test/fixtures/wpt/performance-timeline/worker-with-performance-observer.html
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
<!doctype html>
|
||||
<html>
|
||||
<head>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<script>
|
||||
async_test(function(t) {
|
||||
const worker = new Worker("resources/worker-with-performance-observer.js");
|
||||
worker.onmessage = function(event) {
|
||||
t.step(() => assert_equals(event.data, 'SUCCESS'));
|
||||
t.done();
|
||||
}
|
||||
}, 'Worker: Test Performance Observer inside a worker.');
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
4
test/fixtures/wpt/user-timing/META.yml
vendored
Normal file
4
test/fixtures/wpt/user-timing/META.yml
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
spec: https://w3c.github.io/user-timing/
|
||||
suggested_reviewers:
|
||||
- plehegar
|
||||
- igrigorik
|
27
test/fixtures/wpt/user-timing/buffered-flag.any.js
vendored
Normal file
27
test/fixtures/wpt/user-timing/buffered-flag.any.js
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
async_test(t => {
|
||||
// First observer creates second in callback to ensure the entry has been dispatched by the time
|
||||
// the second observer begins observing.
|
||||
new PerformanceObserver(() => {
|
||||
// Second observer requires 'buffered: true' to see an entry.
|
||||
new PerformanceObserver(t.step_func_done(list => {
|
||||
const entries = list.getEntries();
|
||||
assert_equals(entries.length, 1, 'There should be 1 mark entry.');
|
||||
assert_equals(entries[0].entryType, 'mark');
|
||||
})).observe({type: 'mark', buffered: true});
|
||||
}).observe({entryTypes: ['mark']});
|
||||
performance.mark('foo');
|
||||
}, 'PerformanceObserver with buffered flag sees previous marks');
|
||||
|
||||
async_test(t => {
|
||||
// First observer creates second in callback to ensure the entry has been dispatched by the time
|
||||
// the second observer begins observing.
|
||||
new PerformanceObserver(() => {
|
||||
// Second observer requires 'buffered: true' to see an entry.
|
||||
new PerformanceObserver(t.step_func_done(list => {
|
||||
const entries = list.getEntries();
|
||||
assert_equals(entries.length, 1, 'There should be 1 measure entry.');
|
||||
assert_equals(entries[0].entryType, 'measure');
|
||||
})).observe({type: 'measure', buffered: true});
|
||||
}).observe({entryTypes: ['measure']});
|
||||
performance.measure('bar');
|
||||
}, 'PerformanceObserver with buffered flag sees previous measures');
|
25
test/fixtures/wpt/user-timing/case-sensitivity.any.js
vendored
Normal file
25
test/fixtures/wpt/user-timing/case-sensitivity.any.js
vendored
Normal file
@ -0,0 +1,25 @@
|
||||
test(function () {
|
||||
assert_equals(typeof self.performance, "object");
|
||||
assert_equals(typeof self.performance.getEntriesByType, "function");
|
||||
|
||||
self.performance.mark("mark1");
|
||||
self.performance.measure("measure1");
|
||||
|
||||
const type = [
|
||||
'mark',
|
||||
'measure',
|
||||
];
|
||||
type.forEach(function(entryType) {
|
||||
if (PerformanceObserver.supportedEntryTypes.includes(entryType)) {
|
||||
const entryTypeUpperCased = entryType.toUpperCase();
|
||||
const entryTypeCapitalized = entryType[0].toUpperCase() + entryType.substring(1);
|
||||
const lowerList = self.performance.getEntriesByType(entryType);
|
||||
const upperList = self.performance.getEntriesByType(entryTypeUpperCased);
|
||||
const mixedList = self.performance.getEntriesByType(entryTypeCapitalized);
|
||||
|
||||
assert_greater_than(lowerList.length, 0, "Entries exist");
|
||||
assert_equals(upperList.length, 0, "getEntriesByType('" + entryTypeCapitalized + "').length");
|
||||
assert_equals(mixedList.length, 0, "getEntriesByType('" + entryTypeCapitalized + "').length");
|
||||
}
|
||||
});
|
||||
}, "getEntriesByType values are case sensitive");
|
84
test/fixtures/wpt/user-timing/clearMarks.html
vendored
Normal file
84
test/fixtures/wpt/user-timing/clearMarks.html
vendored
Normal file
@ -0,0 +1,84 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<title>functionality test of window.performance.clearMarks</title>
|
||||
<link rel="author" title="Intel" href="http://www.intel.com/" />
|
||||
<link rel="help" href="http://www.w3.org/TR/user-timing/#extensions-performance-interface"/>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="/common/performance-timeline-utils.js"></script>
|
||||
<script src="resources/webperftestharness.js"></script>
|
||||
<script src="resources/webperftestharnessextension.js"></script>
|
||||
<script>
|
||||
setup({ explicit_done: true });
|
||||
|
||||
function onload_test()
|
||||
{
|
||||
const entrylist_checker = new performance_entrylist_checker('mark');
|
||||
const string_mark_names = mark_names.map(function (x) { return String(x)});
|
||||
mark_names.forEach(function(name) {
|
||||
performance.mark(name);
|
||||
});
|
||||
|
||||
for (let i = 0; i < mark_names.length; ++i)
|
||||
{
|
||||
performance.clearMarks(mark_names[i]);
|
||||
const retained_entries = performance.getEntriesByType('mark');
|
||||
const non_retained_entries = performance.getEntriesByName(mark_names[i], 'mark');
|
||||
entrylist_checker.entrylist_check(retained_entries, mark_names.length - i - 1, string_mark_names,
|
||||
'First loop: checking entries after removing "' + mark_names[i] + '". ');
|
||||
test_equals(non_retained_entries.length, 0,
|
||||
'First loop: marks that we cleared for "' + mark_names[i] + '" should not exist anymore.');
|
||||
}
|
||||
|
||||
mark_names.forEach(function(name) {
|
||||
performance.mark(name);
|
||||
});
|
||||
performance.clearMarks();
|
||||
test_equals(performance.getEntriesByType('mark').length, 0, 'No marks should exist after we clear all.');
|
||||
|
||||
// Following cases test clear existed mark name that is tied for two times.
|
||||
mark_names.forEach(function(name) {
|
||||
performance.mark(name);
|
||||
});
|
||||
mark_names.forEach(function(name) {
|
||||
performance.mark(name);
|
||||
});
|
||||
|
||||
for (let i = 0; i < mark_names.length; ++i)
|
||||
{
|
||||
performance.clearMarks(mark_names[i]);
|
||||
const retained_entries = performance.getEntriesByType('mark');
|
||||
const non_retained_entries = performance.getEntriesByName(mark_names[i], 'mark');
|
||||
entrylist_checker.entrylist_check(retained_entries, (mark_names.length - i - 1) * 2, string_mark_names,
|
||||
'Second loop: checking entries after removing "' + mark_names[i] + '". ');
|
||||
test_equals(non_retained_entries.length, 0,
|
||||
'Second loop: marks that we cleared for "' + mark_names[i] + '" should not exist anymore.');
|
||||
}
|
||||
|
||||
// Following cases test clear functionality when mark names are tied for two times.
|
||||
mark_names.forEach(function(name) {
|
||||
performance.mark(name);
|
||||
});
|
||||
mark_names.forEach(function(name) {
|
||||
performance.mark(name);
|
||||
});
|
||||
var entry_number_before_useless_clear = performance.getEntriesByType('Mark').length;
|
||||
performance.clearMarks('NonExist');
|
||||
var entry_number_after_useless_clear = performance.getEntriesByType('Mark').length;
|
||||
test_equals(entry_number_before_useless_clear, entry_number_after_useless_clear, 'Nothing should happen if we clear a non-exist mark.');
|
||||
|
||||
performance.clearMarks();
|
||||
test_equals(performance.getEntriesByType('mark').length, 0, 'No marks should exist when we clear all.');
|
||||
|
||||
done();
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
<body onload=onload_test()>
|
||||
<h1>Description</h1>
|
||||
<p>This test validates functionality of the interface window.performance.clearMarks.</p>
|
||||
<div id="log"></div>
|
||||
</body>
|
||||
</html>
|
77
test/fixtures/wpt/user-timing/clearMeasures.html
vendored
Normal file
77
test/fixtures/wpt/user-timing/clearMeasures.html
vendored
Normal file
@ -0,0 +1,77 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<title>functionality test of window.performance.clearMeasures</title>
|
||||
<link rel="author" title="Intel" href="http://www.intel.com/" />
|
||||
<link rel="help" href="http://www.w3.org/TR/user-timing/#extensions-performance-interface"/>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="/common/performance-timeline-utils.js"></script>
|
||||
<script src="resources/webperftestharness.js"></script>
|
||||
<script src="resources/webperftestharnessextension.js"></script>
|
||||
<script>
|
||||
setup({ explicit_done: true });
|
||||
|
||||
function onload_test()
|
||||
{
|
||||
const context = new PerformanceContext(window.performance);
|
||||
const entrylist_checker = new performance_entrylist_checker('measure');
|
||||
const measure_names = measures.map(function(x) {return x[0];});
|
||||
|
||||
mark_names.forEach(function(name) {
|
||||
context.mark(name);
|
||||
});
|
||||
measures.forEach(context.initialMeasures, context);
|
||||
for (let i = 0; i < measures.length; ++i)
|
||||
{
|
||||
context.clearMeasures(measures[i][0]);
|
||||
const retained_entries = context.getEntriesByType('measure');
|
||||
const non_retained_entries = context.getEntriesByName(measures[i][0], 'measure');
|
||||
entrylist_checker.entrylist_check(retained_entries, measures.length - i - 1, measure_names,
|
||||
'First loop: checking entries after removing "' + measures[i][0] + '". ');
|
||||
test_equals(non_retained_entries.length, 0,
|
||||
'First loop: measure "' + measures[i][0] + '" should not exist anymore after we cleared it.');
|
||||
}
|
||||
|
||||
measures.forEach(context.initialMeasures, context);
|
||||
context.clearMeasures();
|
||||
test_equals(context.getEntriesByType('measure').length, 0, 'No measures should exist after we clear all (after first loop).');
|
||||
|
||||
// Following cases test clear existed measure name that is tied twice.
|
||||
measures.forEach(context.initialMeasures, context);
|
||||
mark_names.forEach(function(name) {
|
||||
context.mark(name);
|
||||
});
|
||||
measures.forEach(context.initialMeasures, context);
|
||||
for (let i = 0; i < measures.length; ++i)
|
||||
{
|
||||
context.clearMeasures(measures[i][0]);
|
||||
const retained_entries = context.getEntriesByType('measure');
|
||||
const non_retained_entries = context.getEntriesByName(measures[i][0], 'measure');
|
||||
entrylist_checker.entrylist_check(retained_entries, (measures.length - i - 1) * 2, measure_names,
|
||||
'Second loop: checking entries after removing "' + measures[i][0] + '". ');
|
||||
test_equals(non_retained_entries.length, 0,
|
||||
'Second loop: measure "' + measures[i][0] +'" should not exist anymore after we cleared it.');
|
||||
}
|
||||
|
||||
// Following cases test clear functionality when measure names are tied twice.
|
||||
measures.forEach(context.initialMeasures, context);
|
||||
measures.forEach(context.initialMeasures, context);
|
||||
const entry_number_before_useless_clear = context.getEntriesByType('measure').length;
|
||||
context.clearMeasures('NonExist');
|
||||
const entry_number_after_useless_clear = context.getEntriesByType('measure').length;
|
||||
test_equals(entry_number_before_useless_clear, entry_number_after_useless_clear, 'Nothing should happen if we clear a non-exist measure');
|
||||
context.clearMeasures();
|
||||
test_equals(context.getEntriesByType('measure').length, 0, 'No measures should exist when we clear all (after second loop).');
|
||||
|
||||
done();
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
<body onload=onload_test()>
|
||||
<h1>Description</h1>
|
||||
<p>This test validates functionality of the interface window.performance.clearMeasures.</p>
|
||||
<div id="log"></div>
|
||||
</body>
|
||||
</html>
|
17
test/fixtures/wpt/user-timing/clear_all_marks.any.js
vendored
Normal file
17
test/fixtures/wpt/user-timing/clear_all_marks.any.js
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
test(function() {
|
||||
self.performance.mark("mark1");
|
||||
self.performance.mark("mark2");
|
||||
|
||||
// test that two marks have been created
|
||||
var entries = self.performance.getEntriesByType("mark");
|
||||
assert_equals(entries.length, 2, "Two marks have been created for this test.");
|
||||
|
||||
// clear all marks
|
||||
self.performance.clearMarks();
|
||||
|
||||
// test that all marks were cleared
|
||||
entries = self.performance.getEntriesByType("mark");
|
||||
|
||||
assert_equals(entries.length, 0, "All marks have been cleared.");
|
||||
|
||||
}, "Clearing all marks remove all of them.");
|
21
test/fixtures/wpt/user-timing/clear_all_measures.any.js
vendored
Normal file
21
test/fixtures/wpt/user-timing/clear_all_measures.any.js
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
test(function()
|
||||
{
|
||||
self.performance.mark("mark1");
|
||||
self.performance.measure("measure1", "mark1");
|
||||
self.performance.mark("mark2");
|
||||
self.performance.measure("measure2", "mark2");
|
||||
|
||||
// test that two measures have been created
|
||||
var entries = self.performance.getEntriesByType("measure");
|
||||
assert_equals(entries.length, 2, "Two measures have been created for this test.");
|
||||
|
||||
// clear all measures
|
||||
self.performance.clearMeasures();
|
||||
|
||||
// test that all measures were cleared
|
||||
entries = self.performance.getEntriesByType("measure");
|
||||
assert_equals(entries.length, 0,
|
||||
"After a call to self.performance.clearMeasures(), " +
|
||||
"self.performance.getEntriesByType(\"measure\") returns an empty object.");
|
||||
|
||||
}, "Clearing all marks remove all of them.");
|
26
test/fixtures/wpt/user-timing/clear_non_existent_mark.any.js
vendored
Normal file
26
test/fixtures/wpt/user-timing/clear_non_existent_mark.any.js
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
test(function() {
|
||||
self.performance.mark("mark1");
|
||||
self.performance.mark("mark2");
|
||||
|
||||
// test that two marks have been created
|
||||
var entries = self.performance.getEntriesByType("mark");
|
||||
assert_equals(entries.length, 2, "Two marks have been created for this test.");
|
||||
|
||||
// clear non-existent mark
|
||||
self.performance.clearMarks("mark3");
|
||||
|
||||
// test that "mark1" still exists
|
||||
entries = self.performance.getEntriesByName("mark1");
|
||||
assert_equals(entries[0].name, "mark1",
|
||||
"After a call to self.performance.clearMarks(\"mark3\"), where \"mark3" +
|
||||
"\" is a non-existent mark, self.performance.getEntriesByName(\"mark1\") " +
|
||||
"returns an object containing the \"mark1\" mark.");
|
||||
|
||||
// test that "mark2" still exists
|
||||
entries = self.performance.getEntriesByName("mark2");
|
||||
assert_equals(entries[0].name, "mark2",
|
||||
"After a call to self.performance.clearMarks(\"mark3\"), where \"mark3" +
|
||||
"\" is a non-existent mark, self.performance.getEntriesByName(\"mark2\") " +
|
||||
"returns an object containing the \"mark2\" mark.");
|
||||
|
||||
}, "Clearing a non-existent mark doesn't affect existing marks");
|
29
test/fixtures/wpt/user-timing/clear_non_existent_measure.any.js
vendored
Normal file
29
test/fixtures/wpt/user-timing/clear_non_existent_measure.any.js
vendored
Normal file
@ -0,0 +1,29 @@
|
||||
test(function()
|
||||
{
|
||||
self.performance.mark("mark1");
|
||||
self.performance.measure("measure1", "mark1");
|
||||
self.performance.mark("mark2");
|
||||
self.performance.measure("measure2", "mark2");
|
||||
|
||||
// test that two measures have been created
|
||||
var entries = self.performance.getEntriesByType("measure");
|
||||
assert_equals(entries.length, 2, "Two measures have been created for this test.");
|
||||
|
||||
// clear non-existent measure
|
||||
self.performance.clearMeasures("measure3");
|
||||
|
||||
// test that "measure1" still exists
|
||||
entries = self.performance.getEntriesByName("measure1");
|
||||
assert_equals(entries[0].name, "measure1",
|
||||
"After a call to self.performance.clearMeasures(\"measure3\"), where \"measure3" +
|
||||
"\" is a non-existent measure, self.performance.getEntriesByName(\"measure1\") " +
|
||||
"returns an object containing the \"measure1\" measure.");
|
||||
|
||||
// test that "measure2" still exists
|
||||
entries = self.performance.getEntriesByName("measure2");
|
||||
assert_equals(entries[0].name, "measure2",
|
||||
"After a call to self.performance.clearMeasures(\"measure3\"), where \"measure3" +
|
||||
"\" is a non-existent measure, self.performance.getEntriesByName(\"measure2\") " +
|
||||
"returns an object containing the \"measure2\" measure.");
|
||||
|
||||
}, "Clearing a non-existent measure doesn't affect existing measures");
|
26
test/fixtures/wpt/user-timing/clear_one_mark.any.js
vendored
Normal file
26
test/fixtures/wpt/user-timing/clear_one_mark.any.js
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
test(function() {
|
||||
self.performance.mark("mark1");
|
||||
self.performance.mark("mark2");
|
||||
|
||||
// test that two marks have been created
|
||||
var entries = self.performance.getEntriesByType("mark");
|
||||
assert_equals(entries.length, 2, "Two marks have been created for this test.");
|
||||
|
||||
// clear existent mark
|
||||
self.performance.clearMarks("mark1");
|
||||
|
||||
// test that "mark1" was cleared
|
||||
entries = self.performance.getEntriesByName("mark1");
|
||||
|
||||
assert_equals(entries.length, 0,
|
||||
"After a call to self.performance.clearMarks(\"mark1\"), " +
|
||||
"window.performance.getEntriesByName(\"mark1\") returns an empty object.");
|
||||
|
||||
// test that "mark2" still exists
|
||||
entries = self.performance.getEntriesByName("mark2");
|
||||
assert_equals(entries[0].name, "mark2",
|
||||
"After a call to self.performance.clearMarks(\"mark1\"), " +
|
||||
"window.performance.getEntriesByName(\"mark2\") returns an object containing the " +
|
||||
"\"mark2\" mark.");
|
||||
|
||||
}, "Clearing an existent mark doesn't affect other existing marks");
|
29
test/fixtures/wpt/user-timing/clear_one_measure.any.js
vendored
Normal file
29
test/fixtures/wpt/user-timing/clear_one_measure.any.js
vendored
Normal file
@ -0,0 +1,29 @@
|
||||
test(function()
|
||||
{
|
||||
self.performance.mark("mark1");
|
||||
self.performance.measure("measure1", "mark1");
|
||||
self.performance.mark("mark2");
|
||||
self.performance.measure("measure2", "mark2");
|
||||
|
||||
// test that two measures have been created
|
||||
var entries = self.performance.getEntriesByType("measure");
|
||||
assert_equals(entries.length, 2, "Two measures have been created for this test.");
|
||||
|
||||
// clear existent measure
|
||||
self.performance.clearMeasures("measure1");
|
||||
|
||||
// test that "measure1" was cleared
|
||||
entries = self.performance.getEntriesByName("measure1");
|
||||
|
||||
assert_equals(entries.length, 0,
|
||||
"After a call to self.performance.clearMeasures(\"measure1\"), " +
|
||||
"self.performance.getEntriesByName(\"measure1\") returns an empty object.");
|
||||
|
||||
// test that "measure2" still exists
|
||||
entries = self.performance.getEntriesByName("measure2");
|
||||
assert_equals(entries[0].name, "measure2",
|
||||
"After a call to self.performance.clearMeasures(\"measure1\"), " +
|
||||
"self.performance.getEntriesByName(\"measure2\") returns an object containing the " +
|
||||
"\"measure2\" measure.");
|
||||
|
||||
}, "Clearing an existent measure doesn't affect other existing measures");
|
13
test/fixtures/wpt/user-timing/entry_type.any.js
vendored
Normal file
13
test/fixtures/wpt/user-timing/entry_type.any.js
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
test(function () {
|
||||
self.performance.mark('mark');
|
||||
var mark_entry = self.performance.getEntriesByName('mark')[0];
|
||||
|
||||
assert_equals(Object.prototype.toString.call(mark_entry), '[object PerformanceMark]', 'Class name of mark entry should be PerformanceMark.');
|
||||
}, "Validate the user timing entry type PerformanceMark");
|
||||
|
||||
test(function () {
|
||||
self.performance.measure('measure');
|
||||
var measure_entry = self.performance.getEntriesByName('measure')[0];
|
||||
|
||||
assert_equals(Object.prototype.toString.call(measure_entry), '[object PerformanceMeasure]', 'Class name of measure entry should be PerformanceMeasure.');
|
||||
}, "Validate the user timing entry type PerformanceMeasure");
|
33
test/fixtures/wpt/user-timing/idlharness.any.js
vendored
Normal file
33
test/fixtures/wpt/user-timing/idlharness.any.js
vendored
Normal file
@ -0,0 +1,33 @@
|
||||
// META: global=window,worker
|
||||
// META: script=/resources/WebIDLParser.js
|
||||
// META: script=/resources/idlharness.js
|
||||
// META: timeout=long
|
||||
|
||||
// https://w3c.github.io/user-timing/
|
||||
|
||||
'use strict';
|
||||
|
||||
idl_test(
|
||||
['user-timing'],
|
||||
['hr-time', 'performance-timeline', 'dom'],
|
||||
idl_array => {
|
||||
try {
|
||||
performance.mark('test');
|
||||
performance.measure('test');
|
||||
for (const m of performance.getEntriesByType('mark')) {
|
||||
self.mark = m;
|
||||
}
|
||||
for (const m of performance.getEntriesByType('measure')) {
|
||||
self.measure = m;
|
||||
}
|
||||
} catch (e) {
|
||||
// Will be surfaced when mark is undefined below.
|
||||
}
|
||||
|
||||
idl_array.add_objects({
|
||||
Performance: ['performance'],
|
||||
PerformanceMark: ['mark'],
|
||||
PerformanceMeasure: ['measure'],
|
||||
});
|
||||
}
|
||||
);
|
35
test/fixtures/wpt/user-timing/invoke_with_timing_attributes.html
vendored
Normal file
35
test/fixtures/wpt/user-timing/invoke_with_timing_attributes.html
vendored
Normal file
@ -0,0 +1,35 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<title>exception test of performance.mark and performance.measure</title>
|
||||
<meta rel="help" href="https://w3c.github.io/user-timing/#extensions-performance-interface"/>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="resources/webperftestharness.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Description</h1>
|
||||
<p>This test validates exception scenarios of invoking mark() and measure() with timing attributes as value.</p>
|
||||
<div id="log"></div>
|
||||
<script>
|
||||
function emit_test(attrName) {
|
||||
test(function() {
|
||||
assert_throws_dom("SyntaxError", function() { window.performance.mark(attrName); });
|
||||
}, "performance.mark should throw if used with timing attribute " + attrName);
|
||||
}
|
||||
for (var i in timingAttributes) {
|
||||
emit_test(timingAttributes[i]);
|
||||
}
|
||||
|
||||
function emit_test2(attrName) {
|
||||
test(function() {
|
||||
window.performance.measure(attrName);
|
||||
}, "performance.measure should not throw if used with timing attribute " + attrName);
|
||||
}
|
||||
for (var i in timingAttributes) {
|
||||
emit_test2(timingAttributes[i]);
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
25
test/fixtures/wpt/user-timing/invoke_with_timing_attributes.worker.js
vendored
Normal file
25
test/fixtures/wpt/user-timing/invoke_with_timing_attributes.worker.js
vendored
Normal file
@ -0,0 +1,25 @@
|
||||
importScripts("/resources/testharness.js");
|
||||
importScripts("resources/webperftestharness.js");
|
||||
|
||||
function emit_test(attrName) {
|
||||
test(function() {
|
||||
performance.mark(attrName);
|
||||
performance.clearMarks(attrName);
|
||||
}, "performance.mark should not throw if used with timing attribute " + attrName
|
||||
+ " in workers");
|
||||
}
|
||||
for (var i in timingAttributes) {
|
||||
emit_test(timingAttributes[i]);
|
||||
}
|
||||
|
||||
function emit_test2(attrName) {
|
||||
test(function() {
|
||||
performance.measure(attrName);
|
||||
performance.clearMeasures(attrName);
|
||||
}, "performance.measure should not throw if used with timing attribute " + attrName
|
||||
+ " in workers");
|
||||
}
|
||||
for (var i in timingAttributes) {
|
||||
emit_test2(timingAttributes[i]);
|
||||
}
|
||||
done();
|
26
test/fixtures/wpt/user-timing/invoke_without_parameter.html
vendored
Normal file
26
test/fixtures/wpt/user-timing/invoke_without_parameter.html
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<title>exception test of performance.mark and performance.measure</title>
|
||||
<link rel="author" title="Intel" href="http://www.intel.com/" />
|
||||
<link rel="help" href="https://w3c.github.io/user-timing/#extensions-performance-interface"/>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="resources/webperftestharness.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Description</h1>
|
||||
<p>This test validates exception scenarios of invoking mark() and measure() without parameter.</p>
|
||||
<div id="log"></div>
|
||||
<script>
|
||||
test(function () {
|
||||
assert_throws_js(TypeError, function () { window.performance.mark() });
|
||||
}, "window.performance.mark() throws a TypeError exception when invoke without a parameter.");
|
||||
|
||||
test(function () {
|
||||
assert_throws_js(TypeError, function () { window.performance.measure(); });
|
||||
}, "window.performance.measure() throws a TypeError exception when invoke without a parameter.");
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
40
test/fixtures/wpt/user-timing/mark-entry-constructor.any.js
vendored
Normal file
40
test/fixtures/wpt/user-timing/mark-entry-constructor.any.js
vendored
Normal file
@ -0,0 +1,40 @@
|
||||
// META: script=resources/user-timing-helper.js
|
||||
|
||||
test(()=>{
|
||||
const entry = new PerformanceMark("name");
|
||||
assert_true(entry instanceof PerformanceMark);
|
||||
checkEntry(entry, {name: "name", entryType: "mark"});
|
||||
}, "Mark entry can be created by 'new PerformanceMark(string)'.");
|
||||
|
||||
test(()=>{
|
||||
const entry = new PerformanceMark("name", {});
|
||||
assert_true(entry instanceof PerformanceMark);
|
||||
checkEntry(entry, {name: "name", entryType: "mark"});
|
||||
}, "Mark entry can be created by 'new PerformanceMark(string, {})'.");
|
||||
|
||||
test(()=>{
|
||||
const entry = new PerformanceMark("name", {startTime: 1});
|
||||
assert_true(entry instanceof PerformanceMark);
|
||||
checkEntry(entry, {name: "name", entryType: "mark", startTime: 1});
|
||||
}, "Mark entry can be created by 'new PerformanceMark(string, {startTime})'.");
|
||||
|
||||
test(()=>{
|
||||
const entry = new PerformanceMark("name", {detail: {info: "abc"}});
|
||||
assert_true(entry instanceof PerformanceMark);
|
||||
checkEntry(entry, {name: "name", entryType: "mark", detail: {info: "abc"}});
|
||||
}, "Mark entry can be created by 'new PerformanceMark(string, {detail})'.");
|
||||
|
||||
test(()=>{
|
||||
const entry =
|
||||
new PerformanceMark("name", {startTime: 1, detail: {info: "abc"}});
|
||||
assert_true(entry instanceof PerformanceMark);
|
||||
checkEntry(entry, {name: "name", entryType: "mark", startTime: 1, detail: {info: "abc"}});
|
||||
}, "Mark entry can be created by " +
|
||||
"'new PerformanceMark(string, {startTime, detail})'.");
|
||||
|
||||
test(()=>{
|
||||
const entry = new PerformanceMark("name");
|
||||
assert_true(entry instanceof PerformanceMark);
|
||||
checkEntry(entry, {name: "name", entryType: "mark"});
|
||||
assert_equals(performance.getEntriesByName("name").length, 0);
|
||||
}, "Using new PerformanceMark() shouldn't add the entry to performance timeline.");
|
15
test/fixtures/wpt/user-timing/mark-errors.any.js
vendored
Normal file
15
test/fixtures/wpt/user-timing/mark-errors.any.js
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
test(function() {
|
||||
assert_throws_js(TypeError, function() { self.performance.mark("mark1", 123); }, "Number passed as a dict argument should cause type-error.")
|
||||
}, "Number should be rejected as the mark-options.")
|
||||
|
||||
test(function() {
|
||||
assert_throws_js(TypeError, function() { self.performance.mark("mark1", NaN); }, "NaN passed as a dict argument should cause type-error.")
|
||||
}, "NaN should be rejected as the mark-options.")
|
||||
|
||||
test(function() {
|
||||
assert_throws_js(TypeError, function() { self.performance.mark("mark1", Infinity); }, "Infinity passed as a dict argument should cause type-error.")
|
||||
}, "Infinity should be rejected as the mark-options.")
|
||||
|
||||
test(function() {
|
||||
assert_throws_js(TypeError, function() { self.performance.mark("mark1", "string"); }, "String passed as a dict argument should cause type-error.")
|
||||
}, "String should be rejected as the mark-options.")
|
39
test/fixtures/wpt/user-timing/mark-l3.any.js
vendored
Normal file
39
test/fixtures/wpt/user-timing/mark-l3.any.js
vendored
Normal file
@ -0,0 +1,39 @@
|
||||
// META: script=resources/user-timing-helper.js
|
||||
|
||||
async_test(function (t) {
|
||||
let mark_entries = [];
|
||||
const expected_entries =
|
||||
[{ entryType: "mark", name: "mark1", detail: null},
|
||||
{ entryType: "mark", name: "mark2", detail: null},
|
||||
{ entryType: "mark", name: "mark3", detail: null},
|
||||
{ entryType: "mark", name: "mark4", detail: null},
|
||||
{ entryType: "mark", name: "mark5", detail: null},
|
||||
{ entryType: "mark", name: "mark6", detail: {}},
|
||||
{ entryType: "mark", name: "mark7", detail: {info: 'abc'}},
|
||||
{ entryType: "mark", name: "mark8", detail: null, startTime: 234.56},
|
||||
{ entryType: "mark", name: "mark9", detail: {count: 3}, startTime: 345.67}];
|
||||
const observer = new PerformanceObserver(
|
||||
t.step_func(function (entryList, obs) {
|
||||
mark_entries =
|
||||
mark_entries.concat(entryList.getEntries());
|
||||
if (mark_entries.length >= expected_entries.length) {
|
||||
checkEntries(mark_entries, expected_entries);
|
||||
observer.disconnect();
|
||||
t.done();
|
||||
}
|
||||
})
|
||||
);
|
||||
self.performance.clearMarks();
|
||||
observer.observe({entryTypes: ["mark"]});
|
||||
const returned_entries = [];
|
||||
returned_entries.push(self.performance.mark("mark1"));
|
||||
returned_entries.push(self.performance.mark("mark2", undefined));
|
||||
returned_entries.push(self.performance.mark("mark3", null));
|
||||
returned_entries.push(self.performance.mark("mark4", {}));
|
||||
returned_entries.push(self.performance.mark("mark5", {detail: null}));
|
||||
returned_entries.push(self.performance.mark("mark6", {detail: {}}));
|
||||
returned_entries.push(self.performance.mark("mark7", {detail: {info: 'abc'}}));
|
||||
returned_entries.push(self.performance.mark("mark8", {startTime: 234.56}));
|
||||
returned_entries.push(self.performance.mark("mark9", {detail: {count: 3}, startTime: 345.67}));
|
||||
checkEntries(returned_entries, expected_entries);
|
||||
}, "mark entries' detail and startTime are customizable.");
|
36
test/fixtures/wpt/user-timing/mark-measure-feature-detection.html
vendored
Normal file
36
test/fixtures/wpt/user-timing/mark-measure-feature-detection.html
vendored
Normal file
@ -0,0 +1,36 @@
|
||||
<!DOCTYPE HTML>
|
||||
<meta charset=utf-8>
|
||||
<title>User Timing: L2 vs L3 feature detection</title>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script>
|
||||
test(() => {
|
||||
// Feature detection for PerformanceMark.
|
||||
assert_equals(typeof(PerformanceMark.prototype), "object");
|
||||
// Test for UserTiming L3.
|
||||
if (PerformanceMark.prototype.hasOwnProperty('detail')) {
|
||||
assert_equals(typeof(performance.mark("mark")), "object",
|
||||
"performance.mark should return an object in UserTiming L3.");
|
||||
}
|
||||
// Test for UserTiming L2.
|
||||
else {
|
||||
assert_equals(typeof(performance.mark("mark")), "undefined",
|
||||
"performance.mark should be void in UserTiming L2.");
|
||||
}
|
||||
}, "Test PerformanceMark existence and feature detection");
|
||||
|
||||
test(() => {
|
||||
// Feature detection for PerformanceMeasure.
|
||||
assert_equals(typeof(PerformanceMeasure.prototype), "object");
|
||||
// Test for UserTiming L3.
|
||||
if (PerformanceMeasure.prototype.hasOwnProperty('detail')) {
|
||||
assert_equals(typeof(performance.measure("measure")), "object",
|
||||
"performance.measure should return an object in UserTiming L3.");
|
||||
}
|
||||
// Test for UserTiming L2.
|
||||
else {
|
||||
assert_equals(typeof(performance.measure("measure")), "undefined",
|
||||
"performance.measure should be void in UserTiming L2.");
|
||||
}
|
||||
}, "Test PerformanceMeasure existence and feature detection");
|
||||
</script>
|
37
test/fixtures/wpt/user-timing/mark-measure-return-objects.any.js
vendored
Normal file
37
test/fixtures/wpt/user-timing/mark-measure-return-objects.any.js
vendored
Normal file
@ -0,0 +1,37 @@
|
||||
async_test(function (t) {
|
||||
self.performance.clearMeasures();
|
||||
const measure = self.performance.measure("measure1");
|
||||
assert_true(measure instanceof PerformanceMeasure);
|
||||
t.done();
|
||||
}, "L3: performance.measure(name) should return an entry.");
|
||||
|
||||
async_test(function (t) {
|
||||
self.performance.clearMeasures();
|
||||
const measure = self.performance.measure("measure2",
|
||||
{ start: 12, end: 23 });
|
||||
assert_true(measure instanceof PerformanceMeasure);
|
||||
t.done();
|
||||
}, "L3: performance.measure(name, param1) should return an entry.");
|
||||
|
||||
async_test(function (t) {
|
||||
self.performance.clearMeasures();
|
||||
self.performance.mark("1");
|
||||
self.performance.mark("2");
|
||||
const measure = self.performance.measure("measure3", "1", "2");
|
||||
assert_true(measure instanceof PerformanceMeasure);
|
||||
t.done();
|
||||
}, "L3: performance.measure(name, param1, param2) should return an entry.");
|
||||
|
||||
async_test(function (t) {
|
||||
self.performance.clearMarks();
|
||||
const mark = self.performance.mark("mark1");
|
||||
assert_true(mark instanceof PerformanceMark);
|
||||
t.done();
|
||||
}, "L3: performance.mark(name) should return an entry.");
|
||||
|
||||
async_test(function (t) {
|
||||
self.performance.clearMarks();
|
||||
const mark = self.performance.mark("mark2", { startTime: 34 });
|
||||
assert_true(mark instanceof PerformanceMark);
|
||||
t.done();
|
||||
}, "L3: performance.mark(name, param) should return an entry.");
|
118
test/fixtures/wpt/user-timing/mark.any.js
vendored
Normal file
118
test/fixtures/wpt/user-timing/mark.any.js
vendored
Normal file
@ -0,0 +1,118 @@
|
||||
// test data
|
||||
var testThreshold = 20;
|
||||
|
||||
var expectedTimes = new Array();
|
||||
|
||||
function match_entries(entries, index)
|
||||
{
|
||||
var entry = entries[index];
|
||||
var match = self.performance.getEntriesByName("mark")[index];
|
||||
assert_equals(entry.name, match.name, "entry.name");
|
||||
assert_equals(entry.startTime, match.startTime, "entry.startTime");
|
||||
assert_equals(entry.entryType, match.entryType, "entry.entryType");
|
||||
assert_equals(entry.duration, match.duration, "entry.duration");
|
||||
}
|
||||
|
||||
function filter_entries_by_type(entryList, entryType)
|
||||
{
|
||||
var testEntries = new Array();
|
||||
|
||||
// filter entryList
|
||||
for (var i in entryList)
|
||||
{
|
||||
if (entryList[i].entryType == entryType)
|
||||
{
|
||||
testEntries.push(entryList[i]);
|
||||
}
|
||||
}
|
||||
|
||||
return testEntries;
|
||||
}
|
||||
|
||||
test(function () {
|
||||
// create first mark
|
||||
self.performance.mark("mark");
|
||||
|
||||
expectedTimes[0] = self.performance.now();
|
||||
|
||||
const entries = self.performance.getEntriesByName("mark");
|
||||
assert_equals(entries.length, 1);
|
||||
}, "Entry 0 is properly created");
|
||||
|
||||
test(function () {
|
||||
// create second, duplicate mark
|
||||
self.performance.mark("mark");
|
||||
|
||||
expectedTimes[1] = self.performance.now();
|
||||
|
||||
const entries = self.performance.getEntriesByName("mark");
|
||||
assert_equals(entries.length, 2);
|
||||
|
||||
}, "Entry 1 is properly created");
|
||||
|
||||
function test_mark(index) {
|
||||
test(function () {
|
||||
const entries = self.performance.getEntriesByName("mark");
|
||||
assert_equals(entries[index].name, "mark", "Entry has the proper name");
|
||||
}, "Entry " + index + " has the proper name");
|
||||
|
||||
test(function () {
|
||||
const entries = self.performance.getEntriesByName("mark");
|
||||
assert_approx_equals(entries[index].startTime, expectedTimes[index], testThreshold);
|
||||
}, "Entry " + index + " startTime is approximately correct (up to " + testThreshold +
|
||||
"ms difference allowed)");
|
||||
|
||||
test(function () {
|
||||
const entries = self.performance.getEntriesByName("mark");
|
||||
assert_equals(entries[index].entryType, "mark");
|
||||
}, "Entry " + index + " has the proper entryType");
|
||||
|
||||
test(function () {
|
||||
const entries = self.performance.getEntriesByName("mark");
|
||||
assert_equals(entries[index].duration, 0);
|
||||
}, "Entry " + index + " duration == 0");
|
||||
|
||||
test(function () {
|
||||
const entries = self.performance.getEntriesByName("mark", "mark");
|
||||
assert_equals(entries[index].name, "mark");
|
||||
}, "getEntriesByName(\"mark\", \"mark\")[" + index + "] returns an " +
|
||||
"object containing a \"mark\" mark");
|
||||
|
||||
test(function () {
|
||||
const entries = self.performance.getEntriesByName("mark", "mark");
|
||||
match_entries(entries, index);
|
||||
}, "The mark returned by getEntriesByName(\"mark\", \"mark\")[" + index
|
||||
+ "] matches the mark returned by " +
|
||||
"getEntriesByName(\"mark\")[" + index + "]");
|
||||
|
||||
test(function () {
|
||||
const entries = filter_entries_by_type(self.performance.getEntries(), "mark");
|
||||
assert_equals(entries[index].name, "mark");
|
||||
}, "getEntries()[" + index + "] returns an " +
|
||||
"object containing a \"mark\" mark");
|
||||
|
||||
test(function () {
|
||||
const entries = filter_entries_by_type(self.performance.getEntries(), "mark");
|
||||
match_entries(entries, index);
|
||||
}, "The mark returned by getEntries()[" + index
|
||||
+ "] matches the mark returned by " +
|
||||
"getEntriesByName(\"mark\")[" + index + "]");
|
||||
|
||||
test(function () {
|
||||
const entries = self.performance.getEntriesByType("mark");
|
||||
assert_equals(entries[index].name, "mark");
|
||||
}, "getEntriesByType(\"mark\")[" + index + "] returns an " +
|
||||
"object containing a \"mark\" mark");
|
||||
|
||||
test(function () {
|
||||
const entries = self.performance.getEntriesByType("mark");
|
||||
match_entries(entries, index);
|
||||
}, "The mark returned by getEntriesByType(\"mark\")[" + index
|
||||
+ "] matches the mark returned by " +
|
||||
"getEntriesByName(\"mark\")[" + index + "]");
|
||||
|
||||
}
|
||||
|
||||
for (var i = 0; i < expectedTimes.length; i++) {
|
||||
test_mark(i);
|
||||
}
|
58
test/fixtures/wpt/user-timing/mark.html
vendored
Normal file
58
test/fixtures/wpt/user-timing/mark.html
vendored
Normal file
@ -0,0 +1,58 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<title>functionality test of window.performance.mark</title>
|
||||
<link rel="author" title="Intel" href="http://www.intel.com/" />
|
||||
<link rel="help" href="http://www.w3.org/TR/user-timing/#extensions-performance-interface"/>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="/common/performance-timeline-utils.js"></script>
|
||||
<script src="resources/webperftestharness.js"></script>
|
||||
<script src="resources/webperftestharnessextension.js"></script>
|
||||
<script>
|
||||
setup({ explicit_done: true });
|
||||
|
||||
function onload_test()
|
||||
{
|
||||
const entrylist_checker = new performance_entrylist_checker('mark');
|
||||
const string_mark_names = mark_names.map(function (x) { return String(x)});
|
||||
|
||||
test_equals(performance.getEntriesByType("mark").length, 0, 'There should be ' + 0 + ' marks');
|
||||
mark_names.forEach(function(name) {
|
||||
performance.mark(name);
|
||||
});
|
||||
let mark_entrylist = performance.getEntriesByType('mark');
|
||||
|
||||
entrylist_checker.entrylist_check(mark_entrylist, mark_names.length, string_mark_names, 'Checking all entries.');
|
||||
|
||||
for (let i = 0; i < mark_entrylist.length; ++i)
|
||||
{
|
||||
const mark_entrylist_by_name = performance.getEntriesByName(mark_entrylist[i].name, 'mark');
|
||||
entrylist_checker.entrylist_check(mark_entrylist_by_name, 1, string_mark_names,
|
||||
'First loop: checking entry of name "' + mark_entrylist[i].name + '".');
|
||||
}
|
||||
|
||||
mark_names.forEach(function(name) {
|
||||
performance.mark(name);
|
||||
});
|
||||
mark_entrylist = performance.getEntriesByType('mark');
|
||||
entrylist_checker.entrylist_check(mark_entrylist, mark_names.length * 2, string_mark_names, 'Checking all doubly marked entries.');
|
||||
|
||||
for (let i = 0; i < mark_entrylist.length; ++i)
|
||||
{
|
||||
const mark_entrylist_by_name = performance.getEntriesByName(mark_entrylist[i].name, 'mark');
|
||||
entrylist_checker.entrylist_check(mark_entrylist_by_name, 2, string_mark_names,
|
||||
'Second loop step ' + i + ': checking entries of name "' + mark_entrylist[i].name + '".');
|
||||
}
|
||||
|
||||
done();
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
<body onload=onload_test()>
|
||||
<h1>Description</h1>
|
||||
<p>This test validates functionality of the interface window.performance.mark.</p>
|
||||
<div id="log"></div>
|
||||
</body>
|
||||
</html>
|
41
test/fixtures/wpt/user-timing/mark_exceptions.html
vendored
Normal file
41
test/fixtures/wpt/user-timing/mark_exceptions.html
vendored
Normal file
@ -0,0 +1,41 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<title>window.performance User Timing mark() method is throwing the proper exceptions</title>
|
||||
<link rel="author" title="Microsoft" href="http://www.microsoft.com/" />
|
||||
<link rel="help" href="http://w3c.github.io/user-timing/#dom-performance-mark"/>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="resources/webperftestharness.js"></script>
|
||||
|
||||
<script>
|
||||
function test_exception(attrName) {
|
||||
test(function () {
|
||||
assert_throws_dom("SyntaxError", function () {
|
||||
window.performance.mark(attrName);
|
||||
})
|
||||
}, "window.performance.mark(\"" + attrName + "\") throws a SyntaxError exception.");
|
||||
}
|
||||
|
||||
test(() => {
|
||||
assert_throws_js(TypeError, function() {
|
||||
window.performance.mark();
|
||||
});
|
||||
}, 'window.performance.mark() throws a TypeError exception.')
|
||||
|
||||
// loop through mark scenarios
|
||||
for (var i in timingAttributes) {
|
||||
test_exception(timingAttributes[i]);
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Description</h1>
|
||||
<p>This test validates that the performance.mark() method throws a SYNTAX_ERR exception whenever a navigation
|
||||
timing attribute is provided for the name parameter.
|
||||
</p>
|
||||
|
||||
<div id="log"></div>
|
||||
</body>
|
||||
</html>
|
49
test/fixtures/wpt/user-timing/measure-exceptions.html
vendored
Normal file
49
test/fixtures/wpt/user-timing/measure-exceptions.html
vendored
Normal file
@ -0,0 +1,49 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
This tests that 'performance.measure' throws exceptions with reasonable messages.
|
||||
</head>
|
||||
<body>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script>
|
||||
window.performance.clearMarks();
|
||||
window.performance.clearMeasures();
|
||||
|
||||
window.performance.mark('mark');
|
||||
|
||||
const eventMarks = [
|
||||
'unloadEventStart',
|
||||
'unloadEventEnd',
|
||||
'redirectStart',
|
||||
'redirectEnd',
|
||||
'secureConnectionStart',
|
||||
'domInteractive',
|
||||
'domContentLoadedEventStart',
|
||||
'domContentLoadedEventEnd',
|
||||
'domComplete',
|
||||
'loadEventStart',
|
||||
'loadEventEnd',
|
||||
];
|
||||
eventMarks.forEach(function(name) {
|
||||
test(()=>{
|
||||
assert_throws_dom("InvalidAccessError", ()=>{
|
||||
window.performance.measure("measuring", name, "mark");
|
||||
}, "Should throw");
|
||||
}, `Passing '${name}' as a mark to measure API should cause error when the mark is empty.`);
|
||||
});
|
||||
|
||||
const args = [
|
||||
51.15, // Verify that number is parsed as string, not number.
|
||||
"DoesNotExist", // Non-existant mark name should cause error.
|
||||
];
|
||||
args.forEach(each => {
|
||||
test(()=>{
|
||||
assert_throws_dom("SyntaxError", ()=>{
|
||||
window.performance.measure("measuring", each, "mark");
|
||||
}, "Should throw");
|
||||
}, `Passing ${each} as a mark to measure API should cause error.`);
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
35
test/fixtures/wpt/user-timing/measure-l3.any.js
vendored
Normal file
35
test/fixtures/wpt/user-timing/measure-l3.any.js
vendored
Normal file
@ -0,0 +1,35 @@
|
||||
// META: script=resources/user-timing-helper.js
|
||||
|
||||
function endTime(entry) {
|
||||
return entry.startTime + entry.duration;
|
||||
}
|
||||
|
||||
test(function() {
|
||||
performance.clearMarks();
|
||||
performance.clearMeasures();
|
||||
const markEntry = performance.mark("mark", {startTime: 123});
|
||||
const measureEntry = performance.measure("A", undefined, "mark");
|
||||
assert_equals(measureEntry.startTime, 0);
|
||||
assert_equals(endTime(measureEntry), markEntry.startTime);
|
||||
}, "When the end mark is given and the start is unprovided, the end time of the measure entry should be the end mark's time, the start time should be 0.");
|
||||
|
||||
test(function() {
|
||||
performance.clearMarks();
|
||||
performance.clearMeasures();
|
||||
const markEntry = performance.mark("mark", {startTime: 123});
|
||||
const endMin = performance.now();
|
||||
const measureEntry = performance.measure("A", "mark", undefined);
|
||||
const endMax = performance.now();
|
||||
assert_equals(measureEntry.startTime, markEntry.startTime);
|
||||
assert_greater_than_equal(endTime(measureEntry), endMin);
|
||||
assert_greater_than_equal(endMax, endTime(measureEntry));
|
||||
}, "When the start mark is given and the end is unprovided, the start time of the measure entry should be the start mark's time, the end should be now.");
|
||||
|
||||
test(function() {
|
||||
performance.clearMarks();
|
||||
performance.clearMeasures();
|
||||
const markEntry = performance.mark("mark", {startTime: 123});
|
||||
const measureEntry = performance.measure("A", "mark", "mark");
|
||||
assert_equals(endTime(measureEntry), markEntry.startTime);
|
||||
assert_equals(measureEntry.startTime, markEntry.startTime);
|
||||
}, "When start and end mark are both given, the start time and end time of the measure entry should be the the marks' time, repectively");
|
112
test/fixtures/wpt/user-timing/measure-with-dict.any.js
vendored
Normal file
112
test/fixtures/wpt/user-timing/measure-with-dict.any.js
vendored
Normal file
@ -0,0 +1,112 @@
|
||||
// META: script=resources/user-timing-helper.js
|
||||
|
||||
function cleanupPerformanceTimeline() {
|
||||
performance.clearMarks();
|
||||
performance.clearMeasures();
|
||||
}
|
||||
|
||||
async_test(function (t) {
|
||||
this.add_cleanup(cleanupPerformanceTimeline);
|
||||
let measureEntries = [];
|
||||
const timeStamp1 = 784.4;
|
||||
const timeStamp2 = 1234.5;
|
||||
const timeStamp3 = 66.6;
|
||||
const timeStamp4 = 5566;
|
||||
const expectedEntries =
|
||||
[{ entryType: "measure", name: "measure1", detail: null, startTime: 0 },
|
||||
{ entryType: "measure", name: "measure2", detail: null, startTime: 0 },
|
||||
{ entryType: "measure", name: "measure3", detail: null, startTime: 0 },
|
||||
{ entryType: "measure", name: "measure4", detail: null },
|
||||
{ entryType: "measure", name: "measure5", detail: null, startTime: 0 },
|
||||
{ entryType: "measure", name: "measure6", detail: null, startTime: timeStamp1 },
|
||||
{ entryType: "measure", name: "measure7", detail: null, startTime: timeStamp1, duration: timeStamp2 - timeStamp1 },
|
||||
{ entryType: "measure", name: "measure8", detail: null, startTime: 0 },
|
||||
{ entryType: "measure", name: "measure9", detail: null, startTime: 0 },
|
||||
{ entryType: "measure", name: "measure10", detail: null, startTime: timeStamp1 },
|
||||
{ entryType: "measure", name: "measure11", detail: null, startTime: timeStamp3 },
|
||||
{ entryType: "measure", name: "measure12", detail: null, startTime: 0 },
|
||||
{ entryType: "measure", name: "measure13", detail: null, startTime: 0 },
|
||||
{ entryType: "measure", name: "measure14", detail: null, startTime: timeStamp3, duration: timeStamp1 - timeStamp3 },
|
||||
{ entryType: "measure", name: "measure15", detail: null, startTime: timeStamp1, duration: timeStamp2 - timeStamp1 },
|
||||
{ entryType: "measure", name: "measure16", detail: null, startTime: timeStamp1 },
|
||||
{ entryType: "measure", name: "measure17", detail: { customInfo: 159 }, startTime: timeStamp3, duration: timeStamp2 - timeStamp3 },
|
||||
{ entryType: "measure", name: "measure18", detail: null, startTime: timeStamp1, duration: timeStamp2 - timeStamp1 },
|
||||
{ entryType: "measure", name: "measure19", detail: null, startTime: timeStamp1, duration: timeStamp2 - timeStamp1 },
|
||||
{ entryType: "measure", name: "measure20", detail: null, startTime: 0 },
|
||||
{ entryType: "measure", name: "measure21", detail: null, startTime: 0 },
|
||||
{ entryType: "measure", name: "measure22", detail: null, startTime: 0 },
|
||||
{ entryType: "measure", name: "measure23", detail: null, startTime: 0 }];
|
||||
const observer = new PerformanceObserver(
|
||||
t.step_func(function (entryList, obs) {
|
||||
measureEntries =
|
||||
measureEntries.concat(entryList.getEntries());
|
||||
if (measureEntries.length >= expectedEntries.length) {
|
||||
checkEntries(measureEntries, expectedEntries);
|
||||
observer.disconnect();
|
||||
t.done();
|
||||
}
|
||||
})
|
||||
);
|
||||
observer.observe({ entryTypes: ["measure"] });
|
||||
self.performance.mark("mark1", { detail: { randomInfo: 3 }, startTime: timeStamp1 });
|
||||
self.performance.mark("mark2", { startTime: timeStamp2 });
|
||||
|
||||
const returnedEntries = [];
|
||||
returnedEntries.push(self.performance.measure("measure1"));
|
||||
returnedEntries.push(self.performance.measure("measure2", undefined));
|
||||
returnedEntries.push(self.performance.measure("measure3", null));
|
||||
returnedEntries.push(self.performance.measure("measure4", 'mark1'));
|
||||
returnedEntries.push(
|
||||
self.performance.measure("measure5", null, 'mark1'));
|
||||
returnedEntries.push(
|
||||
self.performance.measure("measure6", 'mark1', undefined));
|
||||
returnedEntries.push(
|
||||
self.performance.measure("measure7", 'mark1', 'mark2'));
|
||||
returnedEntries.push(
|
||||
self.performance.measure("measure8", {}));
|
||||
returnedEntries.push(
|
||||
self.performance.measure("measure9", { start: undefined }));
|
||||
returnedEntries.push(
|
||||
self.performance.measure("measure10", { start: 'mark1' }));
|
||||
returnedEntries.push(
|
||||
self.performance.measure("measure11", { start: timeStamp3 }));
|
||||
returnedEntries.push(
|
||||
self.performance.measure("measure12", { end: undefined }));
|
||||
returnedEntries.push(
|
||||
self.performance.measure("measure13", { end: 'mark1' }));
|
||||
returnedEntries.push(
|
||||
self.performance.measure("measure14", { start: timeStamp3, end: 'mark1' }));
|
||||
returnedEntries.push(
|
||||
self.performance.measure("measure15", { start: timeStamp1, end: timeStamp2, detail: undefined }));
|
||||
returnedEntries.push(
|
||||
self.performance.measure("measure16", { start: 'mark1', end: undefined, detail: null }));
|
||||
returnedEntries.push(
|
||||
self.performance.measure("measure17", { start: timeStamp3, end: 'mark2', detail: { customInfo: 159 }}));
|
||||
returnedEntries.push(
|
||||
self.performance.measure("measure18", { start: timeStamp1, duration: timeStamp2 - timeStamp1 }));
|
||||
returnedEntries.push(
|
||||
self.performance.measure("measure19", { duration: timeStamp2 - timeStamp1, end: timeStamp2 }));
|
||||
// {}, null, undefined, invalid-dict passed to startOrOptions are interpreted as start time being 0.
|
||||
returnedEntries.push(self.performance.measure("measure20", {}, 'mark1'));
|
||||
returnedEntries.push(self.performance.measure("measure21", null, 'mark1'));
|
||||
returnedEntries.push(self.performance.measure("measure22", undefined, 'mark1'));
|
||||
returnedEntries.push(self.performance.measure("measure23", { invalidDict:1 }, 'mark1'));
|
||||
checkEntries(returnedEntries, expectedEntries);
|
||||
}, "measure entries' detail and start/end are customizable");
|
||||
|
||||
test(function() {
|
||||
this.add_cleanup(cleanupPerformanceTimeline);
|
||||
assert_throws_js(TypeError, function() {
|
||||
self.performance.measure("optionsAndNumberEnd", {'start': 2}, 12);
|
||||
}, "measure should throw a TypeError when passed an options object and an end time");
|
||||
assert_throws_js(TypeError, function() {
|
||||
self.performance.measure("optionsAndMarkEnd", {'start': 2}, 'mark1');
|
||||
}, "measure should throw a TypeError when passed an options object and an end mark");
|
||||
assert_throws_js(TypeError, function() {
|
||||
self.performance.measure("negativeStartInOptions", {'start': -1});
|
||||
}, "measure cannot have a negative time stamp.");
|
||||
assert_throws_js(TypeError, function() {
|
||||
self.performance.measure("negativeEndInOptions", {'end': -1});
|
||||
}, "measure cannot have a negative time stamp for end.");
|
||||
}, "measure should throw a TypeError when passed an invalid argument combination");
|
||||
|
362
test/fixtures/wpt/user-timing/measure.html
vendored
Normal file
362
test/fixtures/wpt/user-timing/measure.html
vendored
Normal file
@ -0,0 +1,362 @@
|
||||
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<title>window.performance User Timing measure() method is working properly</title>
|
||||
<link rel="author" title="Microsoft" href="http://www.microsoft.com/" />
|
||||
<link rel="help" href="https://w3c.github.io/user-timing/#dom-performance-measure"/>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="/common/performance-timeline-utils.js"></script>
|
||||
<script src="resources/webperftestharness.js"></script>
|
||||
|
||||
<script>
|
||||
// test data
|
||||
var startMarkName = "mark_start";
|
||||
var startMarkValue;
|
||||
var endMarkName = "mark_end";
|
||||
var endMarkValue;
|
||||
var measures;
|
||||
var testThreshold = 20;
|
||||
|
||||
// test measures
|
||||
var measureTestDelay = 200;
|
||||
var TEST_MEASURES =
|
||||
[
|
||||
{
|
||||
name: "measure_no_start_no_end",
|
||||
startMark: undefined,
|
||||
endMark: undefined,
|
||||
startTime: undefined,
|
||||
duration: undefined,
|
||||
entryType: "measure",
|
||||
entryMatch: undefined,
|
||||
order: undefined,
|
||||
found: false
|
||||
},
|
||||
{
|
||||
name: "measure_start_no_end",
|
||||
startMark: "mark_start",
|
||||
endMark: undefined,
|
||||
startTime: undefined,
|
||||
duration: undefined,
|
||||
entryType: "measure",
|
||||
entryMatch: undefined,
|
||||
order: undefined,
|
||||
found: false
|
||||
},
|
||||
{
|
||||
name: "measure_start_end",
|
||||
startMark: "mark_start",
|
||||
endMark: "mark_end",
|
||||
startTime: undefined,
|
||||
duration: undefined,
|
||||
entryType: "measure",
|
||||
entryMatch: undefined,
|
||||
order: undefined,
|
||||
found: false
|
||||
},
|
||||
{
|
||||
name: "measure_no_start_end",
|
||||
startMark: undefined,
|
||||
endMark: "mark_end",
|
||||
startTime: undefined,
|
||||
duration: undefined,
|
||||
entryType: "measure",
|
||||
entryMatch: undefined,
|
||||
order: undefined,
|
||||
found: false
|
||||
},
|
||||
// intentional duplicate of the first measure, used to confirm names can be re-used
|
||||
{
|
||||
name: "measure_no_start_no_end",
|
||||
startMark: undefined,
|
||||
endMark: undefined,
|
||||
startTime: undefined,
|
||||
duration: undefined,
|
||||
entryType: "measure",
|
||||
entryMatch: undefined,
|
||||
order: undefined,
|
||||
found: false
|
||||
}
|
||||
];
|
||||
// the index of the duplicate "measure_no_start_no_end"
|
||||
const duplicate_index = TEST_MEASURES.map(m=>m.name).lastIndexOf('measure_no_start_no_end');
|
||||
|
||||
setup({explicit_done: true});
|
||||
|
||||
test_namespace();
|
||||
|
||||
function onload_test()
|
||||
{
|
||||
// test for existence of User Timing and Performance Timeline interface
|
||||
if (!has_required_interfaces())
|
||||
{
|
||||
test_true(false,
|
||||
"The User Timing and Performance Timeline interfaces, which are required for this test, " +
|
||||
"are defined.");
|
||||
|
||||
done();
|
||||
}
|
||||
else
|
||||
{
|
||||
// create the start mark for the test measures
|
||||
window.performance.mark(startMarkName);
|
||||
|
||||
// get the start mark's value
|
||||
startMarkValue = window.performance.getEntriesByName(startMarkName)[0].startTime;
|
||||
|
||||
// create the test end mark using the test delay; this will allow for a significant difference between
|
||||
// the mark values that should be represented in the duration of measures using these marks
|
||||
step_timeout(measure_test_cb, measureTestDelay);
|
||||
}
|
||||
}
|
||||
|
||||
function measure_test_cb()
|
||||
{
|
||||
// create the end mark for the test measures
|
||||
window.performance.mark(endMarkName);
|
||||
|
||||
// get the end mark's value
|
||||
endMarkValue = window.performance.getEntriesByName(endMarkName)[0].startTime;
|
||||
|
||||
// loop through all measure scenarios and create the corresponding measures
|
||||
for (var i in TEST_MEASURES)
|
||||
{
|
||||
var scenario = TEST_MEASURES[i];
|
||||
|
||||
if (scenario.startMark == undefined && scenario.endMark == undefined)
|
||||
{
|
||||
// both startMark and endMark are undefined, don't provide either parameters
|
||||
window.performance.measure(scenario.name);
|
||||
|
||||
// when startMark isn't provided to the measure() call, a DOMHighResTimeStamp corresponding
|
||||
// to the navigationStart attribute with a timebase of the same attribute is used; this is
|
||||
// equivalent to 0
|
||||
scenario.startTime = 0;
|
||||
|
||||
// when endMark isn't provided to the measure() call, a DOMHighResTimeStamp corresponding to
|
||||
// the current time with a timebase of the navigationStart attribute is used
|
||||
scenario.duration = (new Date()) - window.performance.timing.navigationStart;
|
||||
}
|
||||
else if (scenario.startMark != undefined && scenario.endMark == undefined)
|
||||
{
|
||||
// only startMark is defined, provide startMark and don't provide endMark
|
||||
window.performance.measure(scenario.name, scenario.startMark);
|
||||
|
||||
// when startMark is provided to the measure() call, the value of the mark whose name is
|
||||
// provided is used for the startMark
|
||||
scenario.startTime = startMarkValue;
|
||||
|
||||
// when endMark isn't provided to the measure() call, a DOMHighResTimeStamp corresponding to
|
||||
// the current time with a timebase of the navigationStart attribute is used
|
||||
scenario.duration = window.performance.now() -
|
||||
startMarkValue;
|
||||
}
|
||||
else if (scenario.startMark != undefined && scenario.endMark != undefined)
|
||||
{
|
||||
// both startMark and endMark are defined, provide both parameters
|
||||
window.performance.measure(scenario.name, scenario.startMark, scenario.endMark);
|
||||
|
||||
// when startMark is provided to the measure() call, the value of the mark whose name is
|
||||
// provided is used for the startMark
|
||||
scenario.startTime = startMarkValue;
|
||||
|
||||
// when endMark is provided to the measure() call, the value of the mark whose name is
|
||||
// provided is used for the endMark
|
||||
scenario.duration = endMarkValue - startMarkValue;
|
||||
}
|
||||
else if (scenario.startMark == undefined && scenario.endMark != undefined)
|
||||
{
|
||||
// endMark is defined but startMark is undefined, provide both parameters
|
||||
window.performance.measure(scenario.name, scenario.startMark, scenario.endMark);
|
||||
|
||||
// when startMark isn't provided to the measure() call, a DOMHighResTimeStamp corresponding
|
||||
// to the navigationStart attribute with a timebase of the same attribute is used; this is
|
||||
// equivalent to 0
|
||||
scenario.startTime = 0;
|
||||
|
||||
// when endMark is provided to the measure() call, the value of the mark whose name is
|
||||
// provided is used for the endMark
|
||||
scenario.duration = endMarkValue;
|
||||
} else
|
||||
{
|
||||
test_true(false, 'Test measure scenario unhandled');
|
||||
}
|
||||
}
|
||||
|
||||
// test that expected measures are returned by getEntriesByName
|
||||
for (var i in TEST_MEASURES)
|
||||
{
|
||||
entries = window.performance.getEntriesByName(TEST_MEASURES[i].name);
|
||||
// for all test measures, the test will be validate the test measure against the first entry returned
|
||||
// by getEntriesByName(), except for the last measure, where since it is a duplicate measure, the test
|
||||
// will validate it against the second entry returned by getEntriesByName()
|
||||
test_measure(entries[(i == duplicate_index ? 1 : 0)],
|
||||
"window.performance.getEntriesByName(\"" + TEST_MEASURES[i].name + "\")[" +
|
||||
(i == duplicate_index ? 1 : 0) + "]",
|
||||
TEST_MEASURES[i].name,
|
||||
TEST_MEASURES[i].startTime,
|
||||
TEST_MEASURES[i].duration);
|
||||
TEST_MEASURES[i].entryMatch = entries[(i == duplicate_index ? 1 : 0)];
|
||||
}
|
||||
|
||||
// test that expected measures are returned by getEntriesByName with the entryType parameter provided
|
||||
for (var i in TEST_MEASURES)
|
||||
{
|
||||
entries = window.performance.getEntriesByName(TEST_MEASURES[i].name, "measure");
|
||||
|
||||
test_true(match_entries(entries[(i == duplicate_index ? 1 : 0)], TEST_MEASURES[i].entryMatch),
|
||||
"window.performance.getEntriesByName(\"" + TEST_MEASURES[i].name + "\", \"measure\")[" +
|
||||
(i == duplicate_index ? 1 : 0) + "] returns an object containing the \"" + TEST_MEASURES[i].name +
|
||||
"\" measure in the correct order, and its value matches the \"" + TEST_MEASURES[i].name +
|
||||
"\" measure returned by window.performance.getEntriesByName(\"" + TEST_MEASURES[i].name +
|
||||
"\")");
|
||||
}
|
||||
|
||||
// test that expected measures are returned by getEntries
|
||||
entries = get_test_entries(window.performance.getEntries(), "measure");
|
||||
|
||||
test_measure_list(entries, "window.performance.getEntries()", TEST_MEASURES);
|
||||
|
||||
// test that expected measures are returned by getEntriesByType
|
||||
entries = window.performance.getEntriesByType("measure");
|
||||
|
||||
test_measure_list(entries, "window.performance.getEntriesByType(\"measure\")", TEST_MEASURES);
|
||||
|
||||
done();
|
||||
}
|
||||
|
||||
function match_entries(entry1, entry2, threshold)
|
||||
{
|
||||
if (threshold == undefined)
|
||||
{
|
||||
threshold = 0;
|
||||
}
|
||||
|
||||
var pass = true;
|
||||
|
||||
// match name
|
||||
pass = pass && (entry1.name == entry2.name);
|
||||
|
||||
// match startTime
|
||||
pass = pass && (Math.abs(entry1.startTime - entry2.startTime) <= testThreshold);
|
||||
|
||||
// match entryType
|
||||
pass = pass && (entry1.entryType == entry2.entryType);
|
||||
|
||||
// match duration
|
||||
pass = pass && (Math.abs(entry1.duration - entry2.duration) <= testThreshold);
|
||||
|
||||
return pass;
|
||||
}
|
||||
|
||||
function test_measure(measureEntry, measureEntryCommand, expectedName, expectedStartTime, expectedDuration)
|
||||
{
|
||||
// test name
|
||||
test_true(measureEntry.name == expectedName, measureEntryCommand + ".name == \"" + expectedName + "\"");
|
||||
|
||||
// test startTime; since for a mark, the startTime is always equal to a mark's value or the value of a
|
||||
// navigation timing attribute, the actual startTime should match the expected value exactly
|
||||
test_true(Math.abs(measureEntry.startTime - expectedStartTime) == 0,
|
||||
measureEntryCommand + ".startTime is correct");
|
||||
|
||||
// test entryType
|
||||
test_true(measureEntry.entryType == "measure", measureEntryCommand + ".entryType == \"measure\"");
|
||||
|
||||
// test duration, allow for an acceptable threshold in the difference between the actual duration and the
|
||||
// expected value for the duration
|
||||
test_true(Math.abs(measureEntry.duration - expectedDuration) <= testThreshold, measureEntryCommand +
|
||||
".duration is approximately correct (up to " + testThreshold + "ms difference allowed)");
|
||||
}
|
||||
|
||||
function test_measure_list(measureEntryList, measureEntryListCommand, measureScenarios)
|
||||
{
|
||||
// give all entries a "found" property that can be set to ensure it isn't tested twice
|
||||
for (var i in measureEntryList)
|
||||
{
|
||||
measureEntryList[i].found = false;
|
||||
}
|
||||
|
||||
for (var i in measureScenarios)
|
||||
{
|
||||
measureScenarios[i].found = false;
|
||||
|
||||
for (var j in measureEntryList)
|
||||
{
|
||||
if (match_entries(measureEntryList[j], measureScenarios[i]) && !measureEntryList[j].found)
|
||||
{
|
||||
test_true(match_entries(measureEntryList[j], measureScenarios[i].entryMatch),
|
||||
measureEntryListCommand + " returns an object containing the \"" +
|
||||
measureScenarios[i].name + "\" measure, and it's value matches the measure " +
|
||||
"returned by window.performance.getEntriesByName(\"" + measureScenarios[i].name +
|
||||
"\")[" + (i == duplicate_index ? 1 : 0) + "].");
|
||||
|
||||
measureEntryList[j].found = true;
|
||||
measureScenarios[i].found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!measureScenarios[i].found)
|
||||
{
|
||||
test_true(false,
|
||||
measureEntryListCommand + " returns an object containing the \"" +
|
||||
measureScenarios[i].name + "\" measure.");
|
||||
}
|
||||
}
|
||||
|
||||
// verify order of output of getEntriesByType
|
||||
var startTimeCurr = 0;
|
||||
var pass = true;
|
||||
for (var i in measureEntryList)
|
||||
{
|
||||
if (measureEntryList[i].startTime < startTimeCurr)
|
||||
{
|
||||
pass = false;
|
||||
}
|
||||
startTimeCurr = measureEntryList[i].startTime;
|
||||
}
|
||||
test_true(pass,
|
||||
measureEntryListCommand + " returns an object containing all test " +
|
||||
"measures in order.");
|
||||
}
|
||||
|
||||
function get_test_entries(entryList, entryType)
|
||||
{
|
||||
var testEntries = new Array();
|
||||
|
||||
// filter entryList
|
||||
for (var i in entryList)
|
||||
{
|
||||
if (entryList[i].entryType == entryType)
|
||||
{
|
||||
testEntries.push(entryList[i]);
|
||||
}
|
||||
}
|
||||
|
||||
return testEntries;
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
<body onload="onload_test();">
|
||||
<h1>Description</h1>
|
||||
<p>This test validates that the performance.measure() method is working properly. This test creates the
|
||||
following measures to test this method:
|
||||
<ul>
|
||||
<li>"measure_no_start_no_end": created using a measure() call without a startMark or endMark
|
||||
provided</li>
|
||||
<li>"measure_start_no_end": created using a measure() call with only the startMark provided</li>
|
||||
<li>"measure_start_end": created using a measure() call with both a startMark or endMark provided</li>
|
||||
<li>"measure_no_start_end": created using a measure() call with only the endMark provided</li>
|
||||
<li>"measure_no_start_no_end": duplicate of the first measure, used to confirm names can be re-used</li>
|
||||
</ul>
|
||||
After creating each measure, the existence of these measures is validated by calling
|
||||
performance.getEntriesByName() (both with and without the entryType parameter provided),
|
||||
performance.getEntriesByType(), and performance.getEntries()
|
||||
</p>
|
||||
|
||||
<div id="log"></div>
|
||||
</body>
|
||||
</html>
|
57
test/fixtures/wpt/user-timing/measure_associated_with_navigation_timing.html
vendored
Normal file
57
test/fixtures/wpt/user-timing/measure_associated_with_navigation_timing.html
vendored
Normal file
@ -0,0 +1,57 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<title>functionality test of window.performance.measure</title>
|
||||
<link rel="author" title="Intel" href="http://www.intel.com/" />
|
||||
<link rel="help" href="http://www.w3.org/TR/user-timing/#extensions-performance-interface"/>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="/common/performance-timeline-utils.js"></script>
|
||||
<script src="resources/webperftestharness.js"></script>
|
||||
<script src="resources/webperftestharnessextension.js"></script>
|
||||
<script>
|
||||
setup({ explicit_done: true });
|
||||
|
||||
function onload_test()
|
||||
{
|
||||
const measures_for_timing_order = [
|
||||
['nav2now', 'navigationStart'],
|
||||
['loadTime', 'navigationStart', 'loadEventEnd'],
|
||||
['loadEventEnd2a', 'loadEventEnd', 'abc'],
|
||||
['nav2a', 'navigationStart', 'abc'],
|
||||
['domComplete2a', 'domComplete', 'abc'],
|
||||
['negativeValue', 1, 'navigationStart'],
|
||||
];
|
||||
const context = new PerformanceContext(window.performance);
|
||||
|
||||
mark_names.forEach(function(name) {
|
||||
context.mark(name);
|
||||
});
|
||||
measures_for_timing_order.forEach(context.initialMeasures, context);
|
||||
test_greater_than(context.getEntriesByName('nav2now', 'measure')[0].duration, 0, 'Measure of navigationStart to now should be positive value.');
|
||||
test_greater_than(context.getEntriesByName('loadTime', 'measure')[0].duration, 0, 'Measure of navigationStart to loadEventEnd should be positive value.');
|
||||
test_greater_than(0, context.getEntriesByName('negativeValue', 'measure')[0].duration, 'Measure of current mark to navigationStart should be negative value.');
|
||||
test_equals(context.getEntriesByName('loadTime', 'measure')[0].duration + context.getEntriesByName('loadEventEnd2a', 'measure')[0].duration, context.getEntriesByName('nav2a', 'measure')[0].duration, 'loadTime plus loadEventEnd to a mark "a" should equal to navigationStart to "a".');
|
||||
|
||||
// Following cases test for scenarios that measure names are tied twice.
|
||||
mark_names.forEach(function(name) {
|
||||
context.mark(name);
|
||||
});
|
||||
measures_for_timing_order.forEach(context.initialMeasures, context);
|
||||
|
||||
test_greater_than(context.getEntriesByName('nav2now', 'measure')[1].duration, context.getEntriesByName('nav2now', 'measure')[0].duration, 'Second measure of current mark to navigationStart should be negative value.');
|
||||
test_equals(context.getEntriesByName('loadTime', 'measure')[0].duration, context.getEntriesByName('loadTime', 'measure')[1].duration, 'Measures of loadTime should have same duration.');
|
||||
test_greater_than(context.getEntriesByName('domComplete2a', 'measure')[1].duration, context.getEntriesByName('domComplete2a', 'measure')[0].duration, 'Measure from domComplete event to most recent mark "a" should have longer duration.');
|
||||
test_greater_than(context.getEntriesByName('negativeValue', 'measure')[0].duration, context.getEntriesByName('negativeValue', 'measure')[1].duration, 'Measure from most recent mark to navigationStart should have longer duration.');
|
||||
|
||||
done();
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
<body onload="setTimeout(onload_test,0)">
|
||||
<h1>Description</h1>
|
||||
<p>This test validates functionality of the interface window.performance.measure using keywords from the Navigation Timing spec.</p>
|
||||
<div id="log"></div>
|
||||
</body>
|
||||
</html>
|
34
test/fixtures/wpt/user-timing/measure_exception.html
vendored
Normal file
34
test/fixtures/wpt/user-timing/measure_exception.html
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<title>exception test of window.performance.measure</title>
|
||||
<link rel="author" title="Intel" href="http://www.intel.com/" />
|
||||
<link rel="help" href="http://www.w3.org/TR/user-timing/#extensions-performance-interface"/>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="/common/performance-timeline-utils.js"></script>
|
||||
<script src="resources/webperftestharness.js"></script>
|
||||
<script src="resources/webperftestharnessextension.js"></script>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<h1>Description</h1>
|
||||
<p>This test validates all exception scenarios of method window.performance.measure in User Timing API</p>
|
||||
|
||||
<div id="log"></div>
|
||||
<script>
|
||||
performance.mark('ExistMark');
|
||||
test_method_throw_exception('performance.measure()', TypeError);
|
||||
test_method_throw_exception('performance.measure("Exception1", "NonExistMark1")', 'SYNTAX_ERR');
|
||||
test_method_throw_exception('performance.measure("Exception2", "NonExistMark1", "navigationStart")', 'SYNTAX_ERR');
|
||||
test_method_throw_exception('performance.measure("Exception3", "navigationStart", "NonExistMark1")', 'SYNTAX_ERR');
|
||||
test_method_throw_exception('performance.measure("Exception4", "NonExistMark1", "ExistMark")', 'SYNTAX_ERR');
|
||||
test_method_throw_exception('performance.measure("Exception5", "ExistMark", "NonExistMark1")', 'SYNTAX_ERR');
|
||||
test_method_throw_exception('performance.measure("Exception6", "NonExistMark1", "NonExistMark2")', 'SYNTAX_ERR');
|
||||
test_method_throw_exception('performance.measure("Exception7", "redirectStart")', 'INVALID_ACCESS_ERR');
|
||||
test_method_throw_exception('performance.measure("Exception8", {"detail": "non-empty"})', TypeError);
|
||||
test_method_throw_exception('performance.measure("Exception9", {"start": 1, "duration": 2, "end": 3})', TypeError);
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
70
test/fixtures/wpt/user-timing/measure_exceptions_navigation_timing.html
vendored
Normal file
70
test/fixtures/wpt/user-timing/measure_exceptions_navigation_timing.html
vendored
Normal file
@ -0,0 +1,70 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<title>window.performance User Timing measure() method is throwing the proper exceptions</title>
|
||||
<link rel="author" title="Microsoft" href="http://www.microsoft.com/" />
|
||||
<link rel="help" href="https://w3c.github.io/user-timing/#dom-performance-measure"/>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="resources/webperftestharness.js"></script>
|
||||
|
||||
<script>
|
||||
// test data
|
||||
var zeroedNavTimingAtt = undefined;
|
||||
|
||||
setup(function () {
|
||||
// for testing the INVALID_ACCESS_ERR exception, find a navigation timing attribute with a value of zero
|
||||
for (var i in timingAttributes) {
|
||||
if (window.performance.timing[timingAttributes[i]] == 0) {
|
||||
zeroedNavTimingAtt = timingAttributes[i];
|
||||
}
|
||||
}
|
||||
if (zeroedNavTimingAtt == undefined) {
|
||||
throw new Error("A navigation timing attribute with a value of 0 was not found to test for the " +
|
||||
"INVALID_ACCESS_ERR exception thrown by window.performance.measure().")
|
||||
}
|
||||
});
|
||||
|
||||
test(function () {
|
||||
assert_throws_dom("InvalidAccessError", function () {
|
||||
window.performance.measure("measure", zeroedNavTimingAtt);
|
||||
});
|
||||
}, "window.performance.measure(\"measure\", \"" + zeroedNavTimingAtt + "\"), where \"" +
|
||||
zeroedNavTimingAtt + "\" is a navigation timing attribute with a value of 0, throws a " +
|
||||
"InvalidAccessError exception.");
|
||||
|
||||
test(function () {
|
||||
assert_throws_dom("InvalidAccessError", function () {
|
||||
window.performance.measure("measure", zeroedNavTimingAtt, "responseEnd");
|
||||
});
|
||||
}, "window.performance.measure(\"measure\", \"" + zeroedNavTimingAtt + "\", " +
|
||||
"\"responseEnd\"), where \"" + zeroedNavTimingAtt + "\" is a navigation timing " +
|
||||
"attribute with a value of 0, throws a InvalidAccessError exception.");
|
||||
|
||||
test(function () {
|
||||
assert_throws_dom("InvalidAccessError", function () {
|
||||
window.performance.measure("measure", "navigationStart", zeroedNavTimingAtt);
|
||||
});
|
||||
}, "window.performance.measure(\"measure\", \"navigationStart\", \"" + zeroedNavTimingAtt +
|
||||
"\"), where \"" + zeroedNavTimingAtt + "\" is a navigation timing attribute with a " +
|
||||
"value of 0, throws a InvalidAccessError exception.");
|
||||
|
||||
test(function () {
|
||||
assert_throws_dom("InvalidAccessError", function () {
|
||||
window.performance.measure("measure", zeroedNavTimingAtt, zeroedNavTimingAtt);
|
||||
});
|
||||
}, "window.performance.measure(\"measure\", \"" + zeroedNavTimingAtt + "\", \"" +
|
||||
zeroedNavTimingAtt + "\"), where \"" + zeroedNavTimingAtt + "\" is a navigation timing " +
|
||||
"attribute with a value of 0, throws a InvalidAccessError exception.");
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Description</h1>
|
||||
<p><code>window.performance.measure()</code> method throws a InvalidAccessError
|
||||
whenever a navigation timing attribute with a value of zero is provided as the startMark or endMark.
|
||||
</p>
|
||||
|
||||
<div id="log"></div>
|
||||
</body>
|
||||
</html>
|
205
test/fixtures/wpt/user-timing/measure_navigation_timing.html
vendored
Normal file
205
test/fixtures/wpt/user-timing/measure_navigation_timing.html
vendored
Normal file
@ -0,0 +1,205 @@
|
||||
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<title>window.performance User Timing clearMeasures() method is working properly with navigation timing
|
||||
attributes</title>
|
||||
<link rel="author" title="Microsoft" href="http://www.microsoft.com/" />
|
||||
<link rel="help" href="https://w3c.github.io/user-timing/#dom-performance-measure"/>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="/common/performance-timeline-utils.js"></script>
|
||||
<script src="resources/webperftestharness.js"></script>
|
||||
|
||||
<script>
|
||||
// test data
|
||||
var startMarkName = "mark_start";
|
||||
var startMarkValue;
|
||||
var endMarkName = "mark_end";
|
||||
var endMarkValue;
|
||||
var measures;
|
||||
var testThreshold = 20;
|
||||
|
||||
// test measures
|
||||
measureTestDelay = 200;
|
||||
var TEST_MEASURES =
|
||||
[
|
||||
{
|
||||
name: "measure_nav_start_no_end",
|
||||
startMark: "navigationStart",
|
||||
endMark: undefined,
|
||||
exceptionTestMessage: "window.performance.measure(\"measure_nav_start_no_end\", " +
|
||||
"\"navigationStart\") ran without throwing any exceptions.",
|
||||
expectedStartTime: undefined,
|
||||
expectedDuration: undefined,
|
||||
entryMatch: undefined
|
||||
},
|
||||
{
|
||||
name: "measure_nav_start_mark_end",
|
||||
startMark: "navigationStart",
|
||||
endMark: "mark_end",
|
||||
exceptionTestMessage: "window.performance.measure(\"measure_nav_start_end\", \"navigationStart\", " +
|
||||
"\"mark_end\") ran without throwing any exceptions.",
|
||||
expectedStartTime: undefined,
|
||||
expectedDuration: undefined,
|
||||
entryMatch: undefined
|
||||
},
|
||||
{
|
||||
name: "measure_mark_start_nav_end",
|
||||
startMark: "mark_start",
|
||||
endMark: "responseEnd",
|
||||
exceptionTestMessage: "window.performance.measure(\"measure_start_nav_end\", \"mark_start\", " +
|
||||
"\"responseEnd\") ran without throwing any exceptions.",
|
||||
expectedStartTime: undefined,
|
||||
expectedDuration: undefined,
|
||||
entryMatch: undefined
|
||||
},
|
||||
{
|
||||
name: "measure_nav_start_nav_end",
|
||||
startMark: "navigationStart",
|
||||
endMark: "responseEnd",
|
||||
exceptionTestMessage: "window.performance.measure(\"measure_nav_start_nav_end\", " +
|
||||
"\"navigationStart\", \"responseEnd\") ran without throwing any exceptions.",
|
||||
expectedStartTime: undefined,
|
||||
expectedDuration: undefined,
|
||||
entryMatch: undefined
|
||||
}
|
||||
];
|
||||
|
||||
setup({explicit_done: true});
|
||||
|
||||
test_namespace();
|
||||
|
||||
function onload_test()
|
||||
{
|
||||
// test for existence of User Timing and Performance Timeline interface
|
||||
if (!has_required_interfaces())
|
||||
{
|
||||
test_true(false,
|
||||
"The User Timing and Performance Timeline interfaces, which are required for this test, " +
|
||||
"are defined.");
|
||||
|
||||
done();
|
||||
}
|
||||
else
|
||||
{
|
||||
// create the start mark for the test measures
|
||||
window.performance.mark(startMarkName);
|
||||
|
||||
// get the start mark's value
|
||||
startMarkValue = window.performance.getEntriesByName(startMarkName)[0].startTime;
|
||||
|
||||
// create the test end mark using the test delay; this will allow for a significant difference between
|
||||
// the mark values that should be represented in the duration of measures using these marks
|
||||
step_timeout(measure_test_cb, measureTestDelay);
|
||||
}
|
||||
}
|
||||
|
||||
function measure_test_cb()
|
||||
{
|
||||
// create the end mark for the test measures
|
||||
window.performance.mark(endMarkName);
|
||||
|
||||
// get the end mark's value
|
||||
endMarkValue = window.performance.getEntriesByName(endMarkName)[0].startTime;
|
||||
|
||||
// loop through measure scenarios
|
||||
for (var i in TEST_MEASURES)
|
||||
{
|
||||
var scenario = TEST_MEASURES[i];
|
||||
|
||||
if (scenario.startMark != undefined && scenario.endMark == undefined)
|
||||
{
|
||||
// only startMark is defined, provide startMark and don't provide endMark
|
||||
window.performance.measure(scenario.name, scenario.startMark);
|
||||
|
||||
// when startMark is provided to the measure() call, the value of the mark or navigation
|
||||
// timing attribute whose name is provided is used for the startMark
|
||||
scenario.expectedStartTime = (timingAttributes.indexOf(scenario.startMark) != -1 ?
|
||||
window.performance.timing[scenario.startMark] -
|
||||
window.performance.timing.navigationStart :
|
||||
startMarkValue);
|
||||
|
||||
// when endMark isn't provided to the measure() call, a DOMHighResTimeStamp corresponding to
|
||||
// the current time with a timebase of the navigationStart attribute is used
|
||||
scenario.expectedDuration = ((new Date()) - window.performance.timing.navigationStart) -
|
||||
scenario.expectedStartTime;
|
||||
}
|
||||
else if (scenario.startMark != undefined && scenario.endMark != undefined)
|
||||
{
|
||||
// both startMark and endMark are defined, provide both parameters
|
||||
window.performance.measure(scenario.name, scenario.startMark, scenario.endMark);
|
||||
|
||||
// when startMark is provided to the measure() call, the value of the mark or navigation
|
||||
// timing attribute whose name is provided is used for the startMark
|
||||
scenario.expectedStartTime = (timingAttributes.indexOf(scenario.startMark) != -1 ?
|
||||
window.performance.timing[scenario.startMark] -
|
||||
window.performance.timing.navigationStart :
|
||||
startMarkValue);
|
||||
|
||||
// when endMark is provided to the measure() call, the value of the mark whose name is
|
||||
// provided is used for the startMark
|
||||
scenario.expectedDuration = (timingAttributes.indexOf(scenario.endMark) != -1 ?
|
||||
window.performance.timing[scenario.endMark] -
|
||||
window.performance.timing.navigationStart :
|
||||
endMarkValue) - scenario.expectedStartTime;
|
||||
}
|
||||
}
|
||||
|
||||
// test the test measures are returned by getEntriesByName
|
||||
for (var i in TEST_MEASURES)
|
||||
{
|
||||
entries = window.performance.getEntriesByName(TEST_MEASURES[i].name);
|
||||
test_measure(entries[0],
|
||||
"window.performance.getEntriesByName(\"" + TEST_MEASURES[i].name + "\")[0]",
|
||||
TEST_MEASURES[i].name,
|
||||
TEST_MEASURES[i].expectedStartTime,
|
||||
TEST_MEASURES[i].expectedDuration);
|
||||
TEST_MEASURES[i].entryMatch = entries[0];
|
||||
}
|
||||
|
||||
done();
|
||||
}
|
||||
|
||||
function test_measure(measureEntry, measureEntryCommand, expectedName, expectedStartTime, expectedDuration)
|
||||
{
|
||||
// test name
|
||||
test_true(measureEntry.name == expectedName, measureEntryCommand + ".name == \"" + expectedName + "\"");
|
||||
|
||||
// test startTime; since for a mark, the startTime is always equal to a mark's value or the value of a
|
||||
// navigation timing attribute, the actual startTime should match the expected value exactly
|
||||
test_true(Math.abs(measureEntry.startTime - expectedStartTime) == 0,
|
||||
measureEntryCommand + ".startTime is correct");
|
||||
|
||||
// test entryType
|
||||
test_true(measureEntry.entryType == "measure", measureEntryCommand + ".entryType == \"measure\"");
|
||||
|
||||
// test duration, allow for an acceptable threshold in the difference between the actual duration and the
|
||||
// expected value for the duration
|
||||
test_true(Math.abs(measureEntry.duration - expectedDuration) <= testThreshold, measureEntryCommand +
|
||||
".duration is approximately correct (up to " + testThreshold + "ms difference allowed)");
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
<body onload="onload_test();">
|
||||
<h1>Description</h1>
|
||||
<p>This test validates that the performance.measure() method is working properly when navigation timing
|
||||
attributes are used in place of mark names. This test creates the following measures to test this method:
|
||||
<ul>
|
||||
<li>"measure_nav_start_no_end": created using a measure() call with a navigation timing attribute
|
||||
provided as the startMark and nothing provided as the endMark</li>
|
||||
<li>"measure_nav_start_mark_end": created using a measure() call with a navigation timing attribute
|
||||
provided as the startMark and a mark name provided as the endMark</li>
|
||||
<li>"measure_mark_start_nav_end": created using a measure() call with a mark name provided as the
|
||||
startMark and a navigation timing attribute provided as the endMark</li>
|
||||
<li>"measure_nav_start_nav_end":created using a measure() call with a navigation timing attribute
|
||||
provided as both the startMark and endMark</li>
|
||||
</ul>
|
||||
After creating each measure, the existence of these measures is validated by calling
|
||||
performance.getEntriesByName() with each measure name
|
||||
</p>
|
||||
|
||||
<div id="log"></div>
|
||||
</body>
|
||||
</html>
|
33
test/fixtures/wpt/user-timing/measure_syntax_err.any.js
vendored
Normal file
33
test/fixtures/wpt/user-timing/measure_syntax_err.any.js
vendored
Normal file
@ -0,0 +1,33 @@
|
||||
test(function () {
|
||||
self.performance.mark("existing_mark");
|
||||
var entries = self.performance.getEntriesByName("existing_mark");
|
||||
assert_equals(entries.length, 1);
|
||||
self.performance.measure("measure", "existing_mark");
|
||||
}, "Create a mark \"existing_mark\"");
|
||||
test(function () {
|
||||
assert_throws_dom("SyntaxError", function () {
|
||||
self.performance.measure("measure", "mark");
|
||||
});
|
||||
}, "self.performance.measure(\"measure\", \"mark\"), where \"mark\" is a non-existent mark, " +
|
||||
"throws a SyntaxError exception.");
|
||||
|
||||
test(function () {
|
||||
assert_throws_dom("SyntaxError", function () {
|
||||
self.performance.measure("measure", "mark", "existing_mark");
|
||||
});
|
||||
}, "self.performance.measure(\"measure\", \"mark\", \"existing_mark\"), where \"mark\" is a " +
|
||||
"non-existent mark, throws a SyntaxError exception.");
|
||||
|
||||
test(function () {
|
||||
assert_throws_dom("SyntaxError", function () {
|
||||
self.performance.measure("measure", "existing_mark", "mark");
|
||||
});
|
||||
}, "self.performance.measure(\"measure\", \"existing_mark\", \"mark\"), where \"mark\" " +
|
||||
"is a non-existent mark, throws a SyntaxError exception.");
|
||||
|
||||
test(function () {
|
||||
assert_throws_dom("SyntaxError", function () {
|
||||
self.performance.measure("measure", "mark", "mark");
|
||||
});
|
||||
}, "self.performance.measure(\"measure\", \"mark\", \"mark\"), where \"mark\" is a " +
|
||||
"non-existent mark, throws a SyntaxError exception.");
|
66
test/fixtures/wpt/user-timing/measures.html
vendored
Normal file
66
test/fixtures/wpt/user-timing/measures.html
vendored
Normal file
@ -0,0 +1,66 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<title>functionality test of window.performance.measure</title>
|
||||
<link rel="author" title="Intel" href="http://www.intel.com/" />
|
||||
<link rel="help" href="http://www.w3.org/TR/user-timing/#extensions-performance-interface"/>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="/common/performance-timeline-utils.js"></script>
|
||||
<script src="resources/webperftestharness.js"></script>
|
||||
<script src="resources/webperftestharnessextension.js"></script>
|
||||
<script>
|
||||
setup({ explicit_done: true });
|
||||
|
||||
function onload_test()
|
||||
{
|
||||
const context = new PerformanceContext(window.performance);
|
||||
const entrylist_checker = new performance_entrylist_checker('measure');
|
||||
const measure_names = measures.map(function(x) {return x[0];});
|
||||
|
||||
test_equals(context.getEntriesByType('measure').length, 0, 'There should be ' + 0 + ' entries returned.');
|
||||
|
||||
mark_names.forEach(function(name) {
|
||||
context.mark(name);
|
||||
});
|
||||
measures.forEach(context.initialMeasures, context);
|
||||
|
||||
let measure_entrylist = context.getEntriesByType('measure');
|
||||
entrylist_checker.entrylist_check(measure_entrylist, measures.length, measure_names,
|
||||
'Checking all entries.');
|
||||
|
||||
for (let i = 0; i < measure_entrylist.length; ++i)
|
||||
{
|
||||
const measure_entrylist_by_name = context.getEntriesByName(measure_entrylist[i].name, 'measure');
|
||||
entrylist_checker.entrylist_check(measure_entrylist_by_name, 1, measure_names,
|
||||
'First loop: checking entry of name "' + measure_entrylist[i].name + '".');
|
||||
}
|
||||
|
||||
// Following cases test for scenarios that measure names are tied for two times
|
||||
mark_names.forEach(function(name) {
|
||||
context.mark(name);
|
||||
});
|
||||
measures.forEach(context.initialMeasures, context);
|
||||
|
||||
measure_entrylist = context.getEntriesByType('measure');
|
||||
entrylist_checker.entrylist_check(measure_entrylist, measures.length * 2, measure_names,
|
||||
'Checking all doubly measured entries.');
|
||||
|
||||
for (let i = 0; i < measure_entrylist.length; ++i)
|
||||
{
|
||||
const measure_entrylist_by_name = context.getEntriesByName(measure_entrylist[i].name, 'measure');
|
||||
entrylist_checker.entrylist_check(measure_entrylist_by_name, 2, measure_names,
|
||||
'Second loop step ' + i + ': checking entry of name "' + measure_entrylist[i].name + '".');
|
||||
}
|
||||
|
||||
done();
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
<body onload=onload_test()>
|
||||
<h1>Description</h1>
|
||||
<p>This test validates functionality of the interface window.performance.measure.</p>
|
||||
<div id="log"></div>
|
||||
</body>
|
||||
</html>
|
9
test/fixtures/wpt/user-timing/performance-measure-invalid.worker.js
vendored
Normal file
9
test/fixtures/wpt/user-timing/performance-measure-invalid.worker.js
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
importScripts("/resources/testharness.js");
|
||||
|
||||
test(() => {
|
||||
assert_throws_js(TypeError, () => {
|
||||
performance.measure('name', 'navigationStart', 'navigationStart');
|
||||
});
|
||||
}, "When converting 'navigationStart' to a timestamp, the global object has to be a Window object.");
|
||||
|
||||
done();
|
30
test/fixtures/wpt/user-timing/resources/user-timing-helper.js
vendored
Normal file
30
test/fixtures/wpt/user-timing/resources/user-timing-helper.js
vendored
Normal file
@ -0,0 +1,30 @@
|
||||
// Compares a list of performance entries to a predefined one.
|
||||
// actualEntries is an array of performance entries from the user agent,
|
||||
// and expectedEntries is an array of performance entries minted by the test.
|
||||
// The comparison doesn't assert the order of the entries.
|
||||
function checkEntries(actualEntries, expectedEntries) {
|
||||
assert_equals(actualEntries.length, expectedEntries.length,
|
||||
`The length of actual and expected entries should match.
|
||||
actual: ${JSON.stringify(actualEntries)},
|
||||
expected: ${JSON.stringify(expectedEntries)}`);
|
||||
const actualEntrySet = new Set(actualEntries.map(ae=>ae.name));
|
||||
assert_equals(actualEntrySet.size, actualEntries.length, `Actual entry names are not unique: ${JSON.stringify(actualEntries)}`);
|
||||
const expectedEntrySet = new Set(expectedEntries.map(ee=>ee.name));
|
||||
assert_equals(expectedEntrySet.size, expectedEntries.length, `Expected entry names are not unique: ${JSON.stringify(expectedEntries)}`);
|
||||
actualEntries.forEach(ae=>{
|
||||
const expectedEntry = expectedEntries.find(e=>e.name === ae.name);
|
||||
assert_true(!!expectedEntry, `Entry name '${ae.name}' was not found.`);
|
||||
checkEntry(ae, expectedEntry);
|
||||
});
|
||||
}
|
||||
|
||||
function checkEntry(entry, {name, entryType, startTime, detail, duration}) {
|
||||
assert_equals(entry.name, name);
|
||||
assert_equals(entry.entryType, entryType);
|
||||
if (startTime !== undefined)
|
||||
assert_equals(entry.startTime, startTime);
|
||||
if (detail !== undefined)
|
||||
assert_equals(JSON.stringify(entry.detail), JSON.stringify(detail));
|
||||
if (duration !== undefined)
|
||||
assert_equals(entry.duration, duration);
|
||||
}
|
124
test/fixtures/wpt/user-timing/resources/webperftestharness.js
vendored
Normal file
124
test/fixtures/wpt/user-timing/resources/webperftestharness.js
vendored
Normal file
@ -0,0 +1,124 @@
|
||||
//
|
||||
// Helper functions for User Timing tests
|
||||
//
|
||||
|
||||
var timingAttributes = [
|
||||
"navigationStart",
|
||||
"unloadEventStart",
|
||||
"unloadEventEnd",
|
||||
"redirectStart",
|
||||
"redirectEnd",
|
||||
"fetchStart",
|
||||
"domainLookupStart",
|
||||
"domainLookupEnd",
|
||||
"connectStart",
|
||||
"connectEnd",
|
||||
"secureConnectionStart",
|
||||
"requestStart",
|
||||
"responseStart",
|
||||
"responseEnd",
|
||||
"domLoading",
|
||||
"domInteractive",
|
||||
"domContentLoadedEventStart",
|
||||
"domContentLoadedEventEnd",
|
||||
"domComplete",
|
||||
"loadEventStart",
|
||||
"loadEventEnd"
|
||||
];
|
||||
|
||||
function has_required_interfaces()
|
||||
{
|
||||
if (window.performance.mark == undefined ||
|
||||
window.performance.clearMarks == undefined ||
|
||||
window.performance.measure == undefined ||
|
||||
window.performance.clearMeasures == undefined ||
|
||||
window.performance.getEntriesByName == undefined ||
|
||||
window.performance.getEntriesByType == undefined ||
|
||||
window.performance.getEntries == undefined) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
function test_namespace(child_name, skip_root)
|
||||
{
|
||||
if (skip_root === undefined) {
|
||||
var msg = 'window.performance is defined';
|
||||
wp_test(function () { assert_not_equals(performanceNamespace, undefined, msg); }, msg);
|
||||
}
|
||||
|
||||
if (child_name !== undefined) {
|
||||
var msg2 = 'window.performance.' + child_name + ' is defined';
|
||||
wp_test(function() { assert_not_equals(performanceNamespace[child_name], undefined, msg2); }, msg2);
|
||||
}
|
||||
}
|
||||
|
||||
function test_attribute_exists(parent_name, attribute_name, properties)
|
||||
{
|
||||
var msg = 'window.performance.' + parent_name + '.' + attribute_name + ' is defined.';
|
||||
wp_test(function() { assert_not_equals(performanceNamespace[parent_name][attribute_name], undefined, msg); }, msg, properties);
|
||||
}
|
||||
|
||||
function test_enum(parent_name, enum_name, value, properties)
|
||||
{
|
||||
var msg = 'window.performance.' + parent_name + '.' + enum_name + ' is defined.';
|
||||
wp_test(function() { assert_not_equals(performanceNamespace[parent_name][enum_name], undefined, msg); }, msg, properties);
|
||||
|
||||
msg = 'window.performance.' + parent_name + '.' + enum_name + ' = ' + value;
|
||||
wp_test(function() { assert_equals(performanceNamespace[parent_name][enum_name], value, msg); }, msg, properties);
|
||||
}
|
||||
|
||||
function test_timing_order(attribute_name, greater_than_attribute, properties)
|
||||
{
|
||||
// ensure it's not 0 first
|
||||
var msg = "window.performance.timing." + attribute_name + " > 0";
|
||||
wp_test(function() { assert_true(performanceNamespace.timing[attribute_name] > 0, msg); }, msg, properties);
|
||||
|
||||
// ensure it's in the right order
|
||||
msg = "window.performance.timing." + attribute_name + " >= window.performance.timing." + greater_than_attribute;
|
||||
wp_test(function() { assert_true(performanceNamespace.timing[attribute_name] >= performanceNamespace.timing[greater_than_attribute], msg); }, msg, properties);
|
||||
}
|
||||
|
||||
function test_timing_greater_than(attribute_name, greater_than, properties)
|
||||
{
|
||||
var msg = "window.performance.timing." + attribute_name + " > " + greater_than;
|
||||
test_greater_than(performanceNamespace.timing[attribute_name], greater_than, msg, properties);
|
||||
}
|
||||
|
||||
function test_timing_equals(attribute_name, equals, msg, properties)
|
||||
{
|
||||
var test_msg = msg || "window.performance.timing." + attribute_name + " == " + equals;
|
||||
test_equals(performanceNamespace.timing[attribute_name], equals, test_msg, properties);
|
||||
}
|
||||
|
||||
//
|
||||
// Non-test related helper functions
|
||||
//
|
||||
|
||||
function sleep_milliseconds(n)
|
||||
{
|
||||
var start = new Date().getTime();
|
||||
while (true) {
|
||||
if ((new Date().getTime() - start) >= n) break;
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// Common helper functions
|
||||
//
|
||||
|
||||
function test_greater_than(value, greater_than, msg, properties)
|
||||
{
|
||||
wp_test(function () { assert_true(value > greater_than, msg); }, msg, properties);
|
||||
}
|
||||
|
||||
function test_greater_or_equals(value, greater_than, msg, properties)
|
||||
{
|
||||
wp_test(function () { assert_true(value >= greater_than, msg); }, msg, properties);
|
||||
}
|
||||
|
||||
function test_not_equals(value, notequals, msg, properties)
|
||||
{
|
||||
wp_test(function() { assert_not_equals(value, notequals, msg); }, msg, properties);
|
||||
}
|
202
test/fixtures/wpt/user-timing/resources/webperftestharnessextension.js
vendored
Normal file
202
test/fixtures/wpt/user-timing/resources/webperftestharnessextension.js
vendored
Normal file
@ -0,0 +1,202 @@
|
||||
//
|
||||
// Helper functions for User Timing tests
|
||||
//
|
||||
|
||||
var mark_names = [
|
||||
'',
|
||||
'1',
|
||||
'abc',
|
||||
];
|
||||
|
||||
var measures = [
|
||||
[''],
|
||||
['2', 1],
|
||||
['aaa', 'navigationStart', ''],
|
||||
];
|
||||
|
||||
function test_method_exists(method, method_name, properties)
|
||||
{
|
||||
var msg;
|
||||
if (typeof method === 'function')
|
||||
msg = 'performance.' + method.name + ' is supported!';
|
||||
else
|
||||
msg = 'performance.' + method_name + ' is supported!';
|
||||
wp_test(function() { assert_equals(typeof method, 'function', msg); }, msg, properties);
|
||||
}
|
||||
|
||||
function test_method_throw_exception(func_str, exception, msg)
|
||||
{
|
||||
let exception_name;
|
||||
let test_func;
|
||||
if (typeof exception == "function") {
|
||||
exception_name = exception.name;
|
||||
test_func = assert_throws_js;
|
||||
} else {
|
||||
exception_name = exception;
|
||||
test_func = assert_throws_dom;
|
||||
}
|
||||
var msg = 'Invocation of ' + func_str + ' should throw ' + exception_name + ' Exception.';
|
||||
wp_test(function() { test_func(exception, function() {eval(func_str)}, msg); }, msg);
|
||||
}
|
||||
|
||||
function test_noless_than(value, greater_than, msg, properties)
|
||||
{
|
||||
wp_test(function () { assert_true(value >= greater_than, msg); }, msg, properties);
|
||||
}
|
||||
|
||||
function test_fail(msg, properties)
|
||||
{
|
||||
wp_test(function() { assert_unreached(); }, msg, properties);
|
||||
}
|
||||
|
||||
function test_resource_entries(entries, expected_entries)
|
||||
{
|
||||
// This is slightly convoluted so that we can sort the output.
|
||||
var actual_entries = {};
|
||||
var origin = window.location.protocol + "//" + window.location.host;
|
||||
|
||||
for (var i = 0; i < entries.length; ++i) {
|
||||
var entry = entries[i];
|
||||
var found = false;
|
||||
for (var expected_entry in expected_entries) {
|
||||
if (entry.name == origin + expected_entry) {
|
||||
found = true;
|
||||
if (expected_entry in actual_entries) {
|
||||
test_fail(expected_entry + ' is not expected to have duplicate entries');
|
||||
}
|
||||
actual_entries[expected_entry] = entry;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!found) {
|
||||
test_fail(entries[i].name + ' is not expected to be in the Resource Timing buffer');
|
||||
}
|
||||
}
|
||||
|
||||
sorted_urls = [];
|
||||
for (var i in actual_entries) {
|
||||
sorted_urls.push(i);
|
||||
}
|
||||
sorted_urls.sort();
|
||||
for (var i in sorted_urls) {
|
||||
var url = sorted_urls[i];
|
||||
test_equals(actual_entries[url].initiatorType,
|
||||
expected_entries[url],
|
||||
origin + url + ' is expected to have initiatorType ' + expected_entries[url]);
|
||||
}
|
||||
for (var j in expected_entries) {
|
||||
if (!(j in actual_entries)) {
|
||||
test_fail(origin + j + ' is expected to be in the Resource Timing buffer');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function performance_entrylist_checker(type)
|
||||
{
|
||||
const entryType = type;
|
||||
|
||||
function entry_check(entry, expectedNames, testDescription = '')
|
||||
{
|
||||
const msg = testDescription + 'Entry \"' + entry.name + '\" should be one that we have set.';
|
||||
wp_test(function() { assert_in_array(entry.name, expectedNames, msg); }, msg);
|
||||
test_equals(entry.entryType, entryType, testDescription + 'entryType should be \"' + entryType + '\".');
|
||||
if (type === "measure") {
|
||||
test_true(isFinite(entry.startTime), testDescription + 'startTime should be a number.');
|
||||
test_true(isFinite(entry.duration), testDescription + 'duration should be a number.');
|
||||
} else if (type === "mark") {
|
||||
test_greater_than(entry.startTime, 0, testDescription + 'startTime should greater than 0.');
|
||||
test_equals(entry.duration, 0, testDescription + 'duration of mark should be 0.');
|
||||
}
|
||||
}
|
||||
|
||||
function entrylist_order_check(entryList)
|
||||
{
|
||||
let inOrder = true;
|
||||
for (let i = 0; i < entryList.length - 1; ++i)
|
||||
{
|
||||
if (entryList[i + 1].startTime < entryList[i].startTime) {
|
||||
inOrder = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return inOrder;
|
||||
}
|
||||
|
||||
function entrylist_check(entryList, expectedLength, expectedNames, testDescription = '')
|
||||
{
|
||||
test_equals(entryList.length, expectedLength, testDescription + 'There should be ' + expectedLength + ' entries.');
|
||||
test_true(entrylist_order_check(entryList), testDescription + 'Entries in entrylist should be in order.');
|
||||
for (let i = 0; i < entryList.length; ++i)
|
||||
{
|
||||
entry_check(entryList[i], expectedNames, testDescription + 'Entry_list ' + i + '. ');
|
||||
}
|
||||
}
|
||||
|
||||
return{"entrylist_check":entrylist_check};
|
||||
}
|
||||
|
||||
function PerformanceContext(context)
|
||||
{
|
||||
this.performanceContext = context;
|
||||
}
|
||||
|
||||
PerformanceContext.prototype =
|
||||
{
|
||||
|
||||
initialMeasures: function(item, index, array)
|
||||
{
|
||||
this.performanceContext.measure.apply(this.performanceContext, item);
|
||||
},
|
||||
|
||||
mark: function()
|
||||
{
|
||||
this.performanceContext.mark.apply(this.performanceContext, arguments);
|
||||
},
|
||||
|
||||
measure: function()
|
||||
{
|
||||
this.performanceContext.measure.apply(this.performanceContext, arguments);
|
||||
},
|
||||
|
||||
clearMarks: function()
|
||||
{
|
||||
this.performanceContext.clearMarks.apply(this.performanceContext, arguments);
|
||||
},
|
||||
|
||||
clearMeasures: function()
|
||||
{
|
||||
this.performanceContext.clearMeasures.apply(this.performanceContext, arguments);
|
||||
|
||||
},
|
||||
|
||||
getEntries: function()
|
||||
{
|
||||
return this.performanceContext.getEntries.apply(this.performanceContext, arguments);
|
||||
},
|
||||
|
||||
getEntriesByType: function()
|
||||
{
|
||||
return this.performanceContext.getEntriesByType.apply(this.performanceContext, arguments);
|
||||
},
|
||||
|
||||
getEntriesByName: function()
|
||||
{
|
||||
return this.performanceContext.getEntriesByName.apply(this.performanceContext, arguments);
|
||||
},
|
||||
|
||||
setResourceTimingBufferSize: function()
|
||||
{
|
||||
return this.performanceContext.setResourceTimingBufferSize.apply(this.performanceContext, arguments);
|
||||
},
|
||||
|
||||
registerResourceTimingBufferFullCallback: function(func)
|
||||
{
|
||||
this.performanceContext.onresourcetimingbufferfull = func;
|
||||
},
|
||||
|
||||
clearResourceTimings: function()
|
||||
{
|
||||
this.performanceContext.clearResourceTimings.apply(this.performanceContext, arguments);
|
||||
}
|
||||
|
||||
};
|
64
test/fixtures/wpt/user-timing/structured-serialize-detail.any.js
vendored
Normal file
64
test/fixtures/wpt/user-timing/structured-serialize-detail.any.js
vendored
Normal file
@ -0,0 +1,64 @@
|
||||
test(function() {
|
||||
performance.clearMarks();
|
||||
const detail = { randomInfo: 123 }
|
||||
const markEntry = new PerformanceMark("A", { detail });
|
||||
assert_equals(markEntry.detail.randomInfo, detail.randomInfo);
|
||||
assert_not_equals(markEntry.detail, detail);
|
||||
}, "The detail property in the mark constructor should be structured-clone.");
|
||||
|
||||
test(function() {
|
||||
performance.clearMarks();
|
||||
const detail = { randomInfo: 123 }
|
||||
const markEntry = performance.mark("A", { detail });
|
||||
assert_not_equals(markEntry.detail, detail);
|
||||
}, "The detail property in the mark method should be structured-clone.");
|
||||
|
||||
test(function() {
|
||||
performance.clearMarks();
|
||||
const markEntry = performance.mark("A");
|
||||
assert_equals(markEntry.detail, null);
|
||||
}, "When accessing detail from a mark entry and the detail is not provided, just return a null value.");
|
||||
|
||||
test(function() {
|
||||
performance.clearMarks();
|
||||
const detail = { unserializable: Symbol() };
|
||||
assert_throws_dom("DataCloneError", ()=>{
|
||||
new PerformanceMark("A", { detail });
|
||||
}, "Trying to structured-serialize a Symbol.");
|
||||
}, "Mark: Throw an exception when the detail property cannot be structured-serialized.");
|
||||
|
||||
test(function() {
|
||||
performance.clearMeasures();
|
||||
const detail = { randomInfo: 123 }
|
||||
const measureEntry = performance.measure("A", { start: 0, detail });
|
||||
assert_not_equals(measureEntry.detail, detail);
|
||||
}, "The detail property in the measure method should be structured-clone.");
|
||||
|
||||
test(function() {
|
||||
performance.clearMeasures();
|
||||
const detail = { randomInfo: 123 }
|
||||
const measureEntry = performance.measure("A", { start: 0, detail });
|
||||
assert_equals(measureEntry.detail, measureEntry.detail);
|
||||
}, "The detail property in the measure method should be the same reference.");
|
||||
|
||||
test(function() {
|
||||
performance.clearMeasures();
|
||||
const measureEntry = performance.measure("A");
|
||||
assert_equals(measureEntry.detail, null);
|
||||
}, "When accessing detail from a measure entry and the detail is not provided, just return a null value.");
|
||||
|
||||
test(function() {
|
||||
performance.clearMeasures();
|
||||
const detail = { unserializable: Symbol() };
|
||||
assert_throws_dom("DataCloneError", ()=>{
|
||||
performance.measure("A", { start: 0, detail });
|
||||
}, "Trying to structured-serialize a Symbol.");
|
||||
}, "Measure: Throw an exception when the detail property cannot be structured-serialized.");
|
||||
|
||||
test(function() {
|
||||
const bar = { 1: 2 };
|
||||
const detail = { foo: 1, bar };
|
||||
const mark = performance.mark("m", { detail });
|
||||
detail.foo = 2;
|
||||
assert_equals(mark.detail.foo, 1);
|
||||
}, "The detail object is cloned when passed to mark API.");
|
37
test/fixtures/wpt/user-timing/supported-usertiming-types.any.js
vendored
Normal file
37
test/fixtures/wpt/user-timing/supported-usertiming-types.any.js
vendored
Normal file
@ -0,0 +1,37 @@
|
||||
test(() => {
|
||||
if (typeof PerformanceObserver.supportedEntryTypes === "undefined")
|
||||
assert_unreached("supportedEntryTypes is not supported.");
|
||||
const types = PerformanceObserver.supportedEntryTypes;
|
||||
assert_true(types.includes("mark"),
|
||||
"There should be 'mark' in PerformanceObserver.supportedEntryTypes");
|
||||
assert_true(types.includes("measure"),
|
||||
"There should be 'measure' in PerformanceObserver.supportedEntryTypes");
|
||||
assert_greater_than(types.indexOf("measure"), types.indexOf('mark'),
|
||||
"The 'measure' entry should appear after the 'mark' entry");
|
||||
}, "supportedEntryTypes contains 'mark' and 'measure'.");
|
||||
|
||||
if (typeof PerformanceObserver.supportedEntryTypes !== "undefined") {
|
||||
const entryTypes = {
|
||||
"mark": () => {
|
||||
performance.mark('foo');
|
||||
},
|
||||
"measure": () => {
|
||||
performance.measure('bar');
|
||||
}
|
||||
}
|
||||
for (let entryType in entryTypes) {
|
||||
if (PerformanceObserver.supportedEntryTypes.includes(entryType)) {
|
||||
promise_test(async() => {
|
||||
await new Promise((resolve) => {
|
||||
new PerformanceObserver(function (list, observer) {
|
||||
observer.disconnect();
|
||||
resolve();
|
||||
}).observe({entryTypes: [entryType]});
|
||||
|
||||
// Force the PerformanceEntry.
|
||||
entryTypes[entryType]();
|
||||
})
|
||||
}, `'${entryType}' entries should be observable.`)
|
||||
}
|
||||
}
|
||||
}
|
44
test/fixtures/wpt/user-timing/user-timing-tojson.html
vendored
Normal file
44
test/fixtures/wpt/user-timing/user-timing-tojson.html
vendored
Normal file
@ -0,0 +1,44 @@
|
||||
<!doctype html>
|
||||
<html>
|
||||
<head>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<script>
|
||||
const keys = [
|
||||
'name',
|
||||
'entryType',
|
||||
'startTime',
|
||||
'duration',
|
||||
];
|
||||
test(() => {
|
||||
performance.mark('a');
|
||||
const markEntries = performance.getEntriesByType('mark');
|
||||
assert_equals(1, markEntries.length);
|
||||
const markEntry = markEntries[0];
|
||||
assert_equals(markEntry.entryType, 'mark');
|
||||
assert_equals(typeof(markEntry.toJSON), 'function');
|
||||
const markJSON = markEntry.toJSON();
|
||||
assert_equals(typeof(markJSON), 'object');
|
||||
for (const key of keys) {
|
||||
assert_equals(markJSON[key], markEntry[key], `PerformanceMark ${key} entry does not match its toJSON value`);
|
||||
}
|
||||
}, 'Test toJSON() in PerformanceMark');
|
||||
|
||||
test(() => {
|
||||
performance.measure('m');
|
||||
const measureEntries = performance.getEntriesByType('measure');
|
||||
assert_equals(1, measureEntries.length);
|
||||
const measureEntry = measureEntries[0];
|
||||
assert_equals(measureEntry.entryType, 'measure');
|
||||
assert_equals(typeof(measureEntry.toJSON), 'function');
|
||||
const measureJSON = measureEntry.toJSON();
|
||||
assert_equals(typeof(measureJSON), 'object');
|
||||
for (const key of keys) {
|
||||
assert_equals(measureJSON[key], measureEntry[key], `PerformanceMeasure ${key} entry does not match its toJSON value`);
|
||||
}
|
||||
}, 'Test toJSON() in PerformanceMeasure');
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
12
test/fixtures/wpt/user-timing/user_timing_exists.any.js
vendored
Normal file
12
test/fixtures/wpt/user-timing/user_timing_exists.any.js
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
test(function() {
|
||||
assert_not_equals(self.performance.mark, undefined);
|
||||
}, "self.performance.mark is defined.");
|
||||
test(function() {
|
||||
assert_not_equals(self.performance.clearMarks, undefined);
|
||||
}, "self.performance.clearMarks is defined.");
|
||||
test(function() {
|
||||
assert_not_equals(self.performance.measure, undefined);
|
||||
}, "self.performance.measure is defined.");
|
||||
test(function() {
|
||||
assert_not_equals(self.performance.clearMeasures, undefined);
|
||||
}, "self.performance.clearMeasures is defined.");
|
10
test/fixtures/wpt/versions.json
vendored
10
test/fixtures/wpt/versions.json
vendored
@ -36,9 +36,13 @@
|
||||
"path": "html/webappapis/timers"
|
||||
},
|
||||
"interfaces": {
|
||||
"commit": "fcb671ed8b068b25cee87429d803833777f35c2c",
|
||||
"commit": "80a417662387b6eda904607d78ad246c5d8bf191",
|
||||
"path": "interfaces"
|
||||
},
|
||||
"performance-timeline": {
|
||||
"commit": "17ebc3aea0d6321e69554067c39ab5855e6fb67e",
|
||||
"path": "performance-timeline"
|
||||
},
|
||||
"resources": {
|
||||
"commit": "972ca5b6693bffebebc5805e1b9da68a6876e1f6",
|
||||
"path": "resources"
|
||||
@ -50,5 +54,9 @@
|
||||
"url": {
|
||||
"commit": "77d54aa9e0405f737987b59331f3584e3e1c26f9",
|
||||
"path": "url"
|
||||
},
|
||||
"user-timing": {
|
||||
"commit": "df24fb604e2d40528ac1d1b5dd970e32fc5c2978",
|
||||
"path": "user-timing"
|
||||
}
|
||||
}
|
@ -29,7 +29,7 @@ assert(measure);
|
||||
assert.strictEqual(m.entryType, 'mark');
|
||||
assert.strictEqual(typeof m.startTime, 'number');
|
||||
assert.strictEqual(m.duration, 0);
|
||||
assert.strictEqual(m.details, undefined);
|
||||
assert.strictEqual(m.detail, null);
|
||||
});
|
||||
|
||||
clearMarks();
|
||||
@ -38,11 +38,18 @@ assert.throws(() => mark(Symbol('a')), {
|
||||
message: /Cannot convert a Symbol value to a string/
|
||||
});
|
||||
|
||||
[undefined, null, 1, 'any', {}, []].forEach((detail) => {
|
||||
[undefined, null].forEach((detail) => {
|
||||
const m = mark('a', { detail });
|
||||
assert.strictEqual(m.name, 'a');
|
||||
assert.strictEqual(m.entryType, 'mark');
|
||||
assert.strictEqual(m.detail, detail);
|
||||
assert.deepStrictEqual(m.detail, null);
|
||||
});
|
||||
[1, 'any', {}, []].forEach((detail) => {
|
||||
const m = mark('a', { detail });
|
||||
assert.strictEqual(m.name, 'a');
|
||||
assert.strictEqual(m.entryType, 'mark');
|
||||
// Value of detail is structured cloned.
|
||||
assert.deepStrictEqual(m.detail, detail);
|
||||
});
|
||||
|
||||
clearMarks();
|
||||
|
1
test/wpt/status/performance-timeline.json
Normal file
1
test/wpt/status/performance-timeline.json
Normal file
@ -0,0 +1 @@
|
||||
{}
|
11
test/wpt/status/user-timing.json
Normal file
11
test/wpt/status/user-timing.json
Normal file
@ -0,0 +1,11 @@
|
||||
{
|
||||
"invoke_with_timing_attributes.worker.js": {
|
||||
"skip": "importScripts not supported"
|
||||
},
|
||||
"performance-measure-invalid.worker.js": {
|
||||
"skip": "importScripts not supported"
|
||||
},
|
||||
"idlharness.any.js": {
|
||||
"skip": "idlharness cannot recognize Node.js environment"
|
||||
}
|
||||
}
|
27
test/wpt/test-performance-timeline.js
Normal file
27
test/wpt/test-performance-timeline.js
Normal file
@ -0,0 +1,27 @@
|
||||
'use strict';
|
||||
require('../common');
|
||||
const { WPTRunner } = require('../common/wpt');
|
||||
|
||||
const runner = new WPTRunner('user-timing');
|
||||
|
||||
// Needed to access to DOMException.
|
||||
runner.setFlags(['--expose-internals']);
|
||||
|
||||
runner.setInitScript(`
|
||||
const {
|
||||
PerformanceMark,
|
||||
PerformanceMeasure,
|
||||
PerformanceObserver,
|
||||
performance,
|
||||
} = require('perf_hooks');
|
||||
global.PerformanceMark = performance;
|
||||
global.PerformanceMeasure = performance;
|
||||
global.PerformanceObserver = PerformanceObserver;
|
||||
global.performance = performance;
|
||||
|
||||
const { internalBinding } = require('internal/test/binding');
|
||||
const { DOMException } = internalBinding('messaging');
|
||||
global.DOMException = DOMException;
|
||||
`);
|
||||
|
||||
runner.runJsTests();
|
27
test/wpt/test-user-timing.js
Normal file
27
test/wpt/test-user-timing.js
Normal file
@ -0,0 +1,27 @@
|
||||
'use strict';
|
||||
require('../common');
|
||||
const { WPTRunner } = require('../common/wpt');
|
||||
|
||||
const runner = new WPTRunner('user-timing');
|
||||
|
||||
// Needed to access to DOMException.
|
||||
runner.setFlags(['--expose-internals']);
|
||||
|
||||
runner.setInitScript(`
|
||||
const {
|
||||
PerformanceMark,
|
||||
PerformanceMeasure,
|
||||
PerformanceObserver,
|
||||
performance,
|
||||
} = require('perf_hooks');
|
||||
global.PerformanceMark = performance;
|
||||
global.PerformanceMeasure = performance;
|
||||
global.PerformanceObserver = PerformanceObserver;
|
||||
global.performance = performance;
|
||||
|
||||
const { internalBinding } = require('internal/test/binding');
|
||||
const { DOMException } = internalBinding('messaging');
|
||||
global.DOMException = DOMException;
|
||||
`);
|
||||
|
||||
runner.runJsTests();
|
Loading…
x
Reference in New Issue
Block a user