This repository has been archived by the owner on Feb 11, 2021. It is now read-only.
/
SDL_test_harness.c
607 lines (524 loc) · 17.8 KB
1
/*
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
Simple DirectMedia Layer
Copyright (C) 1997-2012 Sam Lantinga <slouken@libsdl.org>
This software is provided 'as-is', without any express or implied
warranty. In no event will the authors be held liable for any damages
arising from the use of this software.
Permission is granted to anyone to use this software for any purpose,
including commercial applications, and to alter it and redistribute it
freely, subject to the following restrictions:
1. The origin of this software must not be misrepresented; you must not
claim that you wrote the original software. If you use this software
in a product, an acknowledgment in the product documentation would be
appreciated but is not required.
2. Altered source versions must be plainly marked as such, and must not be
misrepresented as being the original software.
3. This notice may not be removed or altered from any source distribution.
20
21
22
23
24
25
*/
#include "SDL_config.h"
#include "SDL_test.h"
26
27
#include <stdio.h>
#include <stdlib.h>
28
#include <string.h>
29
#include <time.h>
30
31
/* Invalid test name/description message format */
32
33
34
35
36
37
38
const char *SDLTest_InvalidNameFormat = "(Invalid)";
/* Log summary message format */
const char *SDLTest_LogSummaryFormat = "%s Summary: Total=%d Passed=%d Failed=%d Skipped=%d";
/* Final result message format */
const char *SDLTest_FinalResultFormat = ">>> %s '%s': %s\n";
39
40
41
/*! \brief Timeout for single test case execution */
static Uint32 SDLTest_TestCaseTimeout = 3600;
42
43
/**
44
45
46
47
48
49
50
51
52
* Generates a random run seed string for the harness. The generated seed
* will contain alphanumeric characters (0-9A-Z).
*
* Note: The returned string needs to be deallocated by the caller.
*
* \param length The length of the seed string to generate
*
* \returns The generated seed string
*/
53
char *
54
SDLTest_GenerateRunSeed(const int length)
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
{
char *seed = NULL;
SDLTest_RandomContext randomContext;
int counter;
// Sanity check input
if (length <= 0) {
SDLTest_LogError("The length of the harness seed must be >0.");
return NULL;
}
// Allocate output buffer
seed = (char *)SDL_malloc((length + 1) * sizeof(char));
if (seed == NULL) {
SDLTest_LogError("SDL_malloc for run seed output buffer failed.");
return NULL;
}
// Generate a random string of alphanumeric characters
SDLTest_RandomInitTime(&randomContext);
for (counter = 0; counter < length - 1; ++counter) {
unsigned int number = SDLTest_Random(&randomContext);
char ch = (char) (number % (91 - 48)) + 48;
if (ch >= 58 && ch <= 64) {
ch = 65;
}
seed[counter] = ch;
}
seed[counter] = '\0';
return seed;
}
/**
89
90
91
92
93
94
95
96
97
98
* Generates an execution key for the fuzzer.
*
* \param runSeed The run seed to use
* \param suiteName The name of the test suite
* \param testName The name of the test
* \param iteration The iteration count
*
* \returns The generated execution key to initialize the fuzzer with.
*
*/
99
Uint64
100
SDLTest_GenerateExecKey(char *runSeed, char *suiteName, char *testName, int iteration)
101
102
103
104
105
106
107
108
109
110
111
{
SDLTest_Md5Context md5Context;
Uint64 *keys;
char iterationString[16];
Uint32 runSeedLength;
Uint32 suiteNameLength;
Uint32 testNameLength;
Uint32 iterationStringLength;
Uint32 entireStringLength;
char *buffer;
112
if (runSeed == NULL || SDL_strlen(runSeed)==0) {
113
114
115
116
SDLTest_LogError("Invalid runSeed string.");
return -1;
}
117
if (suiteName == NULL || SDL_strlen(suiteName)==0) {
118
119
120
121
SDLTest_LogError("Invalid suiteName string.");
return -1;
}
122
if (testName == NULL || SDL_strlen(testName)==0) {
123
124
125
126
127
128
129
130
131
132
SDLTest_LogError("Invalid testName string.");
return -1;
}
if (iteration <= 0) {
SDLTest_LogError("Invalid iteration count.");
return -1;
}
// Convert iteration number into a string
133
SDL_memset(iterationString, 0, sizeof(iterationString));
134
135
136
SDL_snprintf(iterationString, sizeof(iterationString) - 1, "%d", iteration);
// Combine the parameters into single string
137
138
139
140
runSeedLength = SDL_strlen(runSeed);
suiteNameLength = SDL_strlen(suiteName);
testNameLength = SDL_strlen(testName);
iterationStringLength = SDL_strlen(iterationString);
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
entireStringLength = runSeedLength + suiteNameLength + testNameLength + iterationStringLength + 1;
buffer = (char *)SDL_malloc(entireStringLength);
if (buffer == NULL) {
SDLTest_LogError("SDL_malloc failed to allocate buffer for execKey generation.");
return 0;
}
SDL_snprintf(buffer, entireStringLength, "%s%s%s%d", runSeed, suiteName, testName, iteration);
// Hash string and use half of the digest as 64bit exec key
SDLTest_Md5Init(&md5Context);
SDLTest_Md5Update(&md5Context, (unsigned char *)buffer, entireStringLength);
SDLTest_Md5Final(&md5Context);
SDL_free(buffer);
keys = (Uint64 *)md5Context.digest;
return keys[0];
}
158
159
/**
160
161
162
163
164
165
166
167
168
* \brief Set timeout handler for test.
*
* Note: SDL_Init(SDL_INIT_TIMER) will be called if it wasn't done so before.
*
* \param timeout Timeout interval in seconds.
* \param callback Function that will be called after timeout has elapsed.
*
* \return Timer id or -1 on failure.
*/
169
SDL_TimerID
170
SDLTest_SetTestTimeout(int timeout, void (*callback)())
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
{
Uint32 timeoutInMilliseconds;
SDL_TimerID timerID;
if (callback == NULL) {
SDLTest_LogError("Timeout callback can't be NULL");
return -1;
}
if (timeout < 0) {
SDLTest_LogError("Timeout value must be bigger than zero.");
return -1;
}
/* Init SDL timer if not initialized before */
if (SDL_WasInit(SDL_INIT_TIMER) == 0) {
if (SDL_InitSubSystem(SDL_INIT_TIMER)) {
SDLTest_LogError("Failed to init timer subsystem: %s", SDL_GetError());
return -1;
}
}
/* Set timer */
timeoutInMilliseconds = timeout * 1000;
timerID = SDL_AddTimer(timeoutInMilliseconds, (SDL_TimerCallback)callback, 0x0);
if (timerID == 0) {
SDLTest_LogError("Creation of SDL timer failed: %s", SDL_GetError());
return -1;
}
return timerID;
}
203
204
205
206
/**
* \brief Timeout handler. Aborts test run and exits harness process.
*/
207
void
208
SDLTest_BailOut()
209
210
211
212
213
214
{
SDLTest_LogError("TestCaseTimeout timer expired. Aborting test run.");
exit(TEST_ABORTED); // bail out from the test
}
/**
215
216
217
218
219
220
221
222
* \brief Execute a test using the given execution key.
*
* \param testSuite Suite containing the test case.
* \param testCase Case to execute.
* \param execKey Execution key for the fuzzer.
*
* \returns Test case result.
*/
223
int
224
SDLTest_RunTest(SDLTest_TestSuiteReference *testSuite, SDLTest_TestCaseReference *testCase, Uint64 execKey)
225
226
{
SDL_TimerID timer = 0;
227
int testResult = 0;
228
int fuzzerCount;
229
230
231
232
233
234
235
236
237
if (testSuite==NULL || testCase==NULL || testSuite->name==NULL || testCase->name==NULL)
{
SDLTest_LogError("Setup failure: testSuite or testCase references NULL");
return TEST_RESULT_SETUP_FAILURE;
}
if (!testCase->enabled)
{
238
SDLTest_Log((char *)SDLTest_FinalResultFormat, "Test", testCase->name, "Skipped (Disabled)");
239
240
241
return TEST_RESULT_SKIPPED;
}
242
243
// Initialize fuzzer
244
245
246
247
248
249
250
251
252
253
254
SDLTest_FuzzerInit(execKey);
// Reset assert tracker
SDLTest_ResetAssertSummary();
// Set timeout timer
timer = SDLTest_SetTestTimeout(SDLTest_TestCaseTimeout, SDLTest_BailOut);
// Maybe run suite initalizer function
if (testSuite->testSetUp) {
testSuite->testSetUp(0x0);
255
if (SDLTest_AssertSummaryToTestResult() == TEST_RESULT_FAILED) {
256
SDLTest_LogError((char *)SDLTest_FinalResultFormat, "Suite Setup", testSuite->name, "Failed");
257
258
259
260
261
262
return TEST_RESULT_SETUP_FAILURE;
}
}
// Run test case function
testCase->testCase(0x0);
263
testResult = SDLTest_AssertSummaryToTestResult();
264
265
// Maybe run suite cleanup function (ignore failed asserts)
266
267
268
269
270
271
272
273
274
275
if (testSuite->testTearDown) {
testSuite->testTearDown(0x0);
}
// Cancel timeout timer
if (timer) {
SDL_RemoveTimer(timer);
}
// Report on asserts and fuzzer usage
276
277
278
279
fuzzerCount = SDLTest_GetFuzzerInvocationCount();
if (fuzzerCount > 0) {
SDLTest_Log("Fuzzer invocations: %d", fuzzerCount);
}
280
281
SDLTest_LogAssertSummary();
282
return testResult;
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
}
/* Prints summary of all suites/tests contained in the given reference */
void SDLTest_LogTestSuiteSummary(SDLTest_TestSuiteReference *testSuites)
{
int suiteCounter;
int testCounter;
SDLTest_TestSuiteReference *testSuite;
SDLTest_TestCaseReference *testCase;
// Loop over all suites
suiteCounter = 0;
while(&testSuites[suiteCounter]) {
testSuite=&testSuites[suiteCounter];
suiteCounter++;
SDLTest_Log("Test Suite %i - %s\n", suiteCounter,
299
(testSuite->name) ? testSuite->name : SDLTest_InvalidNameFormat);
300
301
302
303
304
305
306
307
// Loop over all test cases
testCounter = 0;
while(testSuite->testCases[testCounter])
{
testCase=(SDLTest_TestCaseReference *)testSuite->testCases[testCounter];
testCounter++;
SDLTest_Log(" Test Case %i - %s: %s", testCounter,
308
309
(testCase->name) ? testCase->name : SDLTest_InvalidNameFormat,
(testCase->description) ? testCase->description : SDLTest_InvalidNameFormat);
310
311
312
313
}
}
}
314
315
316
317
318
319
/* Gets a timer value in seconds */
float GetClock()
{
float currentClock = (float)clock();
return currentClock / (float)CLOCKS_PER_SEC;
}
320
321
/**
322
323
324
325
326
327
328
329
330
331
332
333
334
* \brief Execute a test suite using the given run seend and execution key.
*
* The filter string is matched to the suite name (full comparison) to select a single suite,
* or if no suite matches, it is matched to the test names (full comparison) to select a single test.
*
* \param testSuites Suites containing the test case.
* \param userRunSeed Custom run seed provided by user, or NULL to autogenerate one.
* \param userExecKey Custom execution key provided by user, or 0 to autogenerate one.
* \param filter Filter specification. NULL disables. Case sensitive.
* \param testIterations Number of iterations to run each test case.
*
* \returns Test run result; 0 when all tests passed, 1 if any tests failed.
*/
335
int SDLTest_RunSuites(SDLTest_TestSuiteReference *testSuites[], const char *userRunSeed, Uint64 userExecKey, const char *filter, int testIterations)
336
337
338
339
340
341
{
int suiteCounter;
int testCounter;
int iterationCounter;
SDLTest_TestSuiteReference *testSuite;
SDLTest_TestCaseReference *testCase;
342
const char *runSeed = NULL;
343
344
char *currentSuiteName;
char *currentTestName;
345
Uint64 execKey;
346
347
348
349
350
351
float runStartSeconds;
float suiteStartSeconds;
float testStartSeconds;
float runEndSeconds;
float suiteEndSeconds;
float testEndSeconds;
352
float runtime;
353
354
355
356
int suiteFilter = 0;
char *suiteFilterName = NULL;
int testFilter = 0;
char *testFilterName = NULL;
357
358
359
360
361
362
363
364
365
366
int testResult = 0;
int runResult = 0;
Uint32 totalTestFailedCount = 0;
Uint32 totalTestPassedCount = 0;
Uint32 totalTestSkippedCount = 0;
Uint32 testFailedCount = 0;
Uint32 testPassedCount = 0;
Uint32 testSkippedCount = 0;
Uint32 countSum = 0;
char *logFormat = (char *)SDLTest_LogSummaryFormat;
367
368
369
370
371
372
373
// Sanitize test iterations
if (testIterations < 1) {
testIterations = 1;
}
// Generate run see if we don't have one already
374
if (userRunSeed == NULL || SDL_strlen(userRunSeed) == 0) {
375
376
runSeed = SDLTest_GenerateRunSeed(16);
if (runSeed == NULL) {
377
SDLTest_LogError("Generating a random seed failed");
378
379
return 2;
}
380
381
} else {
runSeed = userRunSeed;
382
383
}
384
385
// Reset per-run counters
386
387
388
totalTestFailedCount = 0;
totalTestPassedCount = 0;
totalTestSkippedCount = 0;
389
390
// Take time - run start
391
runStartSeconds = GetClock();
392
393
// Log run with fuzzer parameters
394
SDLTest_Log("::::: Test Run /w seed '%s' started\n", runSeed);
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
// Initialize filtering
if (filter != NULL && SDL_strlen(filter) > 0) {
/* Loop over all suites to check if we have a filter match */
suiteCounter = 0;
while (testSuites[suiteCounter] && suiteFilter == 0) {
testSuite=(SDLTest_TestSuiteReference *)testSuites[suiteCounter];
suiteCounter++;
if (testSuite->name != NULL && SDL_strcmp(filter, testSuite->name) == 0) {
/* Matched a suite name */
suiteFilter = 1;
suiteFilterName = testSuite->name;
SDLTest_Log("Filtering: running only suite '%s'", suiteFilterName);
break;
}
/* Within each suite, loop over all test cases to check if we have a filter match */
testCounter = 0;
while (testSuite->testCases[testCounter] && testFilter == 0)
{
testCase=(SDLTest_TestCaseReference *)testSuite->testCases[testCounter];
testCounter++;
if (testCase->name != NULL && SDL_strcmp(filter, testCase->name) == 0) {
/* Matched a test name */
suiteFilter = 1;
suiteFilterName = testSuite->name;
testFilter = 1;
testFilterName = testCase->name;
SDLTest_Log("Filtering: running only test '%s' in suite '%s'", testFilterName, suiteFilterName);
break;
}
}
}
if (suiteFilter == 0 && testFilter == 0) {
SDLTest_LogError("Filter '%s' did not match any test suite/case.", filter);
SDLTest_Log("Exit code: 2");
return 2;
}
}
436
437
// Loop over all suites
suiteCounter = 0;
438
439
440
while(testSuites[suiteCounter]) {
testSuite=(SDLTest_TestSuiteReference *)testSuites[suiteCounter];
currentSuiteName = (char *)((testSuite->name) ? testSuite->name : SDLTest_InvalidNameFormat);
441
suiteCounter++;
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
// Filter suite if flag set and we have a name
if (suiteFilter == 1 && suiteFilterName != NULL && testSuite->name != NULL &&
SDL_strcmp(suiteFilterName, testSuite->name) != 0) {
// Skip suite
SDLTest_Log("===== Test Suite %i: '%s' skipped\n",
suiteCounter,
currentSuiteName);
} else {
// Reset per-suite counters
testFailedCount = 0;
testPassedCount = 0;
testSkippedCount = 0;
// Take time - suite start
suiteStartSeconds = GetClock();
// Log suite started
SDLTest_Log("===== Test Suite %i: '%s' started\n",
suiteCounter,
currentSuiteName);
// Loop over all test cases
testCounter = 0;
while(testSuite->testCases[testCounter])
468
{
469
470
471
472
473
474
475
476
477
478
479
480
testCase=(SDLTest_TestCaseReference *)testSuite->testCases[testCounter];
currentTestName = (char *)((testCase->name) ? testCase->name : SDLTest_InvalidNameFormat);
testCounter++;
// Filter tests if flag set and we have a name
if (testFilter == 1 && testFilterName != NULL && testCase->name != NULL &&
SDL_strcmp(testFilterName, testCase->name) != 0) {
// Skip test
SDLTest_Log("===== Test Case %i.%i: '%s' skipped\n",
suiteCounter,
testCounter,
currentTestName);
481
} else {
482
483
484
485
486
// Override 'disabled' flag if we specified a test filter (i.e. force run for debugging)
if (testFilter == 1 && !testCase->enabled) {
SDLTest_Log("Force run of disabled test since test filter was set");
testCase->enabled = 1;
}
487
488
489
490
491
492
493
494
495
// Take time - test start
testStartSeconds = GetClock();
// Log test started
SDLTest_Log("----- Test Case %i.%i: '%s' started",
suiteCounter,
testCounter,
currentTestName);
496
if (testCase->description != NULL && SDL_strlen(testCase->description)>0) {
497
498
499
500
501
502
503
504
505
506
507
508
509
SDLTest_Log("Test Description: '%s'",
(testCase->description) ? testCase->description : SDLTest_InvalidNameFormat);
}
// Loop over all iterations
iterationCounter = 0;
while(iterationCounter < testIterations)
{
iterationCounter++;
if (userExecKey != 0) {
execKey = userExecKey;
} else {
510
execKey = SDLTest_GenerateExecKey((char *)runSeed, testSuite->name, testCase->name, iterationCounter);
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
}
SDLTest_Log("Test Iteration %i: execKey %llu", iterationCounter, execKey);
testResult = SDLTest_RunTest(testSuite, testCase, execKey);
if (testResult == TEST_RESULT_PASSED) {
testPassedCount++;
totalTestPassedCount++;
} else if (testResult == TEST_RESULT_SKIPPED) {
testSkippedCount++;
totalTestSkippedCount++;
} else {
testFailedCount++;
totalTestFailedCount++;
}
}
// Take time - test end
testEndSeconds = GetClock();
runtime = testEndSeconds - testStartSeconds;
if (runtime < 0.0f) runtime = 0.0f;
if (testIterations > 1) {
// Log test runtime
SDLTest_Log("Runtime of %i iterations: %.1f sec", testIterations, runtime);
SDLTest_Log("Average Test runtime: %.5f sec", runtime / (float)testIterations);
} else {
// Log test runtime
SDLTest_Log("Total Test runtime: %.1f sec", runtime);
}
// Log final test result
switch (testResult) {
case TEST_RESULT_PASSED:
SDLTest_Log((char *)SDLTest_FinalResultFormat, "Test", currentTestName, "Passed");
break;
case TEST_RESULT_FAILED:
SDLTest_LogError((char *)SDLTest_FinalResultFormat, "Test", currentTestName, "Failed");
break;
case TEST_RESULT_NO_ASSERT:
SDLTest_LogError((char *)SDLTest_FinalResultFormat,"Test", currentTestName, "No Asserts");
break;
}
554
555
556
557
}
}
558
559
560
// Take time - suite end
suiteEndSeconds = GetClock();
runtime = suiteEndSeconds - suiteStartSeconds;
561
if (runtime < 0.0f) runtime = 0.0f;
562
563
564
// Log suite runtime
SDLTest_Log("Total Suite runtime: %.1f sec", runtime);
565
566
567
568
569
570
571
572
573
574
575
576
577
// Log summary and final Suite result
countSum = testPassedCount + testFailedCount + testSkippedCount;
if (testFailedCount == 0)
{
SDLTest_Log(logFormat, "Suite", countSum, testPassedCount, testFailedCount, testSkippedCount);
SDLTest_Log((char *)SDLTest_FinalResultFormat, "Suite", currentSuiteName, "Passed");
}
else
{
SDLTest_LogError(logFormat, "Suite", countSum, testPassedCount, testFailedCount, testSkippedCount);
SDLTest_LogError((char *)SDLTest_FinalResultFormat, "Suite", currentSuiteName, "Failed");
}
578
579
}
580
581
582
}
// Take time - run end
583
runEndSeconds = GetClock();
584
585
runtime = runEndSeconds - runStartSeconds;
if (runtime < 0.0f) runtime = 0.0f;
586
587
// Log total runtime
588
SDLTest_Log("Total Run runtime: %.1f sec", runtime);
589
590
591
// Log summary and final run result
countSum = totalTestPassedCount + totalTestFailedCount + totalTestSkippedCount;
592
if (totalTestFailedCount == 0)
593
594
595
{
runResult = 0;
SDLTest_Log(logFormat, "Run", countSum, totalTestPassedCount, totalTestFailedCount, totalTestSkippedCount);
596
SDLTest_Log((char *)SDLTest_FinalResultFormat, "Run /w seed", runSeed, "Passed");
597
598
599
600
601
}
else
{
runResult = 1;
SDLTest_LogError(logFormat, "Run", countSum, totalTestPassedCount, totalTestFailedCount, totalTestSkippedCount);
602
SDLTest_LogError((char *)SDLTest_FinalResultFormat, "Run /w seed", runSeed, "Failed");
603
}
604
605
SDLTest_Log("Exit code: %d", runResult);
606
return runResult;
607
}