1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
|
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8" />
<title>window.performance User Timing measure() method is working properly</title>
<link rel="author" title="Microsoft" href="http://www.microsoft.com/" />
<link rel="help" href="http://www.w3.org/TR/user-timing/#dom-performance-measure"/>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="resources/webperftestharness.js"></script>
<script type="text/javascript">
// test data
var startMarkName = "mark_start";
var startMarkValue;
var endMarkName = "mark_end";
var endMarkValue;
var measures;
var testThreshold = 20;
// test measures
var measureTestDelay = 200;
var TEST_MEASURES =
[
{
name: "measure_no_start_no_end",
startMark: undefined,
endMark: undefined,
startTime: undefined,
duration: undefined,
entryType: "measure",
entryMatch: undefined,
order: undefined,
found: false
},
{
name: "measure_start_no_end",
startMark: "mark_start",
endMark: undefined,
startTime: undefined,
duration: undefined,
entryType: "measure",
entryMatch: undefined,
order: undefined,
found: false
},
{
name: "measure_start_end",
startMark: "mark_start",
endMark: "mark_end",
startTime: undefined,
duration: undefined,
entryType: "measure",
entryMatch: undefined,
order: undefined,
found: false
},
{
name: "measure_no_start_no_end",
startMark: undefined,
endMark: undefined,
startTime: undefined,
duration: undefined,
entryType: "measure",
entryMatch: undefined,
order: undefined,
found: false
}
];
setup({explicit_done: true});
test_namespace();
function onload_test()
{
// test for existance of User Timing and Performance Timeline interface
if (window.performance.mark == undefined ||
window.performance.clearMarks == undefined ||
window.performance.measure == undefined ||
window.performance.clearMeasures == undefined ||
window.performance.getEntriesByName == undefined ||
window.performance.getEntriesByType == undefined ||
window.performance.getEntries == undefined)
{
test_true(false,
"The User Timing and Performance Timeline interfaces, which are required for this test, " +
"are defined.");
done();
}
else
{
// create the start mark for the test measures
window.performance.mark(startMarkName);
// get the start mark's value
startMarkValue = window.performance.getEntriesByName(startMarkName)[0].startTime;
// create the test end mark using the test delay; this will allow for a significant difference between
// the mark values that should be represented in the duration of measures using these marks
setTimeout(measure_test_cb, measureTestDelay);
}
}
function measure_test_cb()
{
// create the end mark for the test measures
window.performance.mark(endMarkName);
// get the end mark's value
endMarkValue = window.performance.getEntriesByName(endMarkName)[0].startTime;
// loop through all measure scenarios and create the corresponding measures
for (var i in TEST_MEASURES)
{
var scenario = TEST_MEASURES[i];
if (scenario.startMark == undefined && scenario.endMark == undefined)
{
// both startMark and endMark are undefined, don't provide either parameters
window.performance.measure(scenario.name);
// when startMark isn't provided to the measure() call, a DOMHighResTimeStamp corresponding
// to the navigationStart attribute with a timebase of the same attribute is used; this is
// equivalent to 0
scenario.startTime = 0;
// when endMark isn't provided to the measure() call, a DOMHighResTimeStamp corresponding to
// the current time with a timebase of the navigationStart attribute is used
scenario.duration = (new Date()) - window.performance.timing.navigationStart;
}
else if (scenario.startMark != undefined && scenario.endMark == undefined)
{
// only startMark is defined, provide startMark and don't provide endMark
window.performance.measure(scenario.name, scenario.startMark);
// when startMark is provided to the measure() call, the value of the mark whose name is
// provided is used for the startMark
scenario.startTime = startMarkValue;
// when endMark isn't provided to the measure() call, a DOMHighResTimeStamp corresponding to
// the current time with a timebase of the navigationStart attribute is used
scenario.duration = ((new Date()) - window.performance.timing.navigationStart) -
startMarkValue;
}
else if (scenario.startMark != undefined && scenario.endMark != undefined)
{
// both startMark and endMark are defined, provide both parameters
window.performance.measure(scenario.name, scenario.startMark, scenario.endMark);
// when startMark is provided to the measure() call, the value of the mark whose name is
// provided is used for the startMark
scenario.startTime = startMarkValue;
// when endMark is provided to the measure() call, the value of the mark whose name is
// provided is used for the startMark
scenario.duration = endMarkValue - startMarkValue;
}
}
// test that expected measures are returned by getEntriesByName
for (var i in TEST_MEASURES)
{
entries = window.performance.getEntriesByName(TEST_MEASURES[i].name);
// for all test measures, the test will be validate the test measure against the first entry returned
// by getEntriesByName(), except for the last measure, where since it is a duplicate measure, the test
// will validate it against the second entry returned by getEntriesByName()
test_measure(entries[(i == 3 ? 1 : 0)],
"window.performance.getEntriesByName(\"" + TEST_MEASURES[i].name + "\")[" +
(i == 3 ? 1 : 0) + "]",
TEST_MEASURES[i].name,
TEST_MEASURES[i].startTime,
TEST_MEASURES[i].duration);
TEST_MEASURES[i].entryMatch = entries[(i == 3 ? 1 : 0)];
}
// test that expected measures are returned by getEntriesByName with the entryType parameter provided
for (var i in TEST_MEASURES)
{
entries = window.performance.getEntriesByName(TEST_MEASURES[i].name, "measure");
test_true(match_entries(entries[(i == 3 ? 1 : 0)], TEST_MEASURES[i].entryMatch),
"window.performance.getEntriesByName(\"" + TEST_MEASURES[i].name + "\", \"measure\")[" +
(i == 3 ? 1 : 0) + "] returns an object containing the \"" + TEST_MEASURES[i].name +
"\" measure in the correct order, and its value matches the \"" + TEST_MEASURES[i].name +
"\" measure returned by window.performance.getEntriesByName(\"" + TEST_MEASURES[i].name +
"\")");
}
// test that expected measures are returned by getEntries
entries = get_test_entries(window.performance.getEntries(), "measure");
test_measure_list(entries, "window.performance.getEntries()", TEST_MEASURES);
// test that expected measures are returned by getEntriesByType
entries = window.performance.getEntriesByType("measure");
test_measure_list(entries, "window.performance.getEntriesByType(\"measure\")", TEST_MEASURES);
done();
}
function match_entries(entry1, entry2, threshold)
{
if (threshold == undefined)
{
threshold = 0;
}
var pass = true;
// match name
pass = pass && (entry1.name == entry2.name);
// match startTime
pass = pass && (Math.abs(entry1.startTime - entry2.startTime) <= testThreshold);
// match entryType
pass = pass && (entry1.entryType == entry2.entryType);
// match duration
pass = pass && (Math.abs(entry1.duration - entry2.duration) <= testThreshold);
return pass;
}
function test_measure(measureEntry, measureEntryCommand, expectedName, expectedStartTime, expectedDuration)
{
// test name
test_true(measureEntry.name == expectedName, measureEntryCommand + ".name == \"" + expectedName + "\"");
// test startTime; since for a mark, the startTime is always equal to a mark's value or the value of a
// navigation timing attribute, the actual startTime should match the expected value exactly
test_true(Math.abs(measureEntry.startTime - expectedStartTime) == 0,
measureEntryCommand + ".startTime is correct");
// test entryType
test_true(measureEntry.entryType == "measure", measureEntryCommand + ".entryType == \"measure\"");
// test duration, allow for an acceptable threshold in the difference between the actual duration and the
// expected value for the duration
test_true(Math.abs(measureEntry.duration - expectedDuration) <= testThreshold, measureEntryCommand +
".duration is approximately correct (up to " + testThreshold + "ms difference allowed)");
}
function test_measure_list(measureEntryList, measureEntryListCommand, measureScenarios)
{
// give all entries a "found" property that can be set to ensure it isn't tested twice
for (var i in measureEntryList)
{
measureEntryList[i].found = false;
}
for (var i in measureScenarios)
{
measureScenarios[i].found = false;
for (var j in measureEntryList)
{
if (match_entries(measureEntryList[j], measureScenarios[i]) && !measureEntryList[j].found)
{
test_true(match_entries(measureEntryList[j], measureScenarios[i].entryMatch),
measureEntryListCommand + " returns an object containing the \"" +
measureScenarios[i].name + "\" measure, and it's value matches the measure " +
"returned by window.performance.getEntriesByName(\"" + measureScenarios[i].name +
"\")[" + (i == 3 ? 1 : 0) + "].");
measureEntryList[j].found = true;
measureScenarios[i].found = true;
break;
}
}
if (!measureScenarios[i].found)
{
test_true(false,
measureEntryListCommand + " returns an object containing the \"" +
measureScenarios[i].name + "\" measure.");
}
}
// verify order of output of getEntriesByType
var startTimeCurr = 0;
var pass = true;
for (var i in measureEntryList)
{
if (measureEntryList[i].startTime < startTimeCurr)
{
pass = false;
}
startTimeCurr = measureEntryList[i].startTime;
}
test_true(pass,
measureEntryListCommand + " returns an object containing all test " +
"measures in order.");
}
function get_test_entries(entryList, entryType)
{
var testEntries = new Array();
// filter entryList
for (var i in entryList)
{
if (entryList[i].entryType == entryType)
{
testEntries.push(entryList[i]);
}
}
return testEntries;
}
</script>
</head>
<body onload="onload_test();">
<h1>Description</h1>
<p>This test validates that the performance.measure() method is working properly. This test creates the
following measures to test this method:
<ul>
<li>"measure_no_start_no_end": created using a measure() call without a startMark or endMark
provided</li>
<li>"measure_start_no_end": created using a measure() call with only the startMark provided</li>
<li>"measure_start_end": created using a measure() call with both a startMark or endMark provided</li>
<li>"measure_no_start_no_end": duplicate of the first measure, used to confirm names can be re-used</li>
</ul>
After creating each measure, the existence of these measures is validated by calling
performance.getEntriesByName() (both with and without the entryType parameter provided),
performance.getEntriesByType(), and performance.getEntries()
</p>
<div id="log"></div>
</body>
</html>
|