2 Simple DirectMedia Layer
3 Copyright (C) 1997-2016 Sam Lantinga <slouken@libsdl.org>
5 This software is provided 'as-is', without any express or implied
6 warranty. In no event will the authors be held liable for any damages
7 arising from the use of this software.
9 Permission is granted to anyone to use this software for any purpose,
10 including commercial applications, and to alter it and redistribute it
11 freely, subject to the following restrictions:
13 1. The origin of this software must not be misrepresented; you must not
14 claim that you wrote the original software. If you use this software
15 in a product, an acknowledgment in the product documentation would be
16 appreciated but is not required.
17 2. Altered source versions must be plainly marked as such, and must not be
18 misrepresented as being the original software.
19 3. This notice may not be removed or altered from any source distribution.
22 #include "SDL_config.h"
31 /* Invalid test name/description message format */
32 const char *SDLTest_InvalidNameFormat = "(Invalid)";
34 /* Log summary message format */
35 const char *SDLTest_LogSummaryFormat = "%s Summary: Total=%d Passed=%d Failed=%d Skipped=%d";
37 /* Final result message format */
38 const char *SDLTest_FinalResultFormat = ">>> %s '%s': %s\n";
40 /* ! \brief Timeout for single test case execution */
41 static Uint32 SDLTest_TestCaseTimeout = 3600;
44 * Generates a random run seed string for the harness. The generated seed
45 * will contain alphanumeric characters (0-9A-Z).
47 * Note: The returned string needs to be deallocated by the caller.
49 * \param length The length of the seed string to generate
51 * \returns The generated seed string
54 SDLTest_GenerateRunSeed(const int length)
57 SDLTest_RandomContext randomContext;
60 /* Sanity check input */
62 SDLTest_LogError("The length of the harness seed must be >0.");
66 /* Allocate output buffer */
67 seed = (char *)SDL_malloc((length + 1) * sizeof(char));
69 SDLTest_LogError("SDL_malloc for run seed output buffer failed.");
70 SDL_Error(SDL_ENOMEM);
74 /* Generate a random string of alphanumeric characters */
75 SDLTest_RandomInitTime(&randomContext);
76 for (counter = 0; counter < length; counter++) {
77 unsigned int number = SDLTest_Random(&randomContext);
78 char ch = (char) (number % (91 - 48)) + 48;
79 if (ch >= 58 && ch <= 64) {
90 * Generates an execution key for the fuzzer.
92 * \param runSeed The run seed to use
93 * \param suiteName The name of the test suite
94 * \param testName The name of the test
95 * \param iteration The iteration count
97 * \returns The generated execution key to initialize the fuzzer with.
101 SDLTest_GenerateExecKey(char *runSeed, char *suiteName, char *testName, int iteration)
103 SDLTest_Md5Context md5Context;
105 char iterationString[16];
106 Uint32 runSeedLength;
107 Uint32 suiteNameLength;
108 Uint32 testNameLength;
109 Uint32 iterationStringLength;
110 Uint32 entireStringLength;
113 if (runSeed == NULL || runSeed[0] == '\0') {
114 SDLTest_LogError("Invalid runSeed string.");
118 if (suiteName == NULL || suiteName[0] == '\0') {
119 SDLTest_LogError("Invalid suiteName string.");
123 if (testName == NULL || testName[0] == '\0') {
124 SDLTest_LogError("Invalid testName string.");
128 if (iteration <= 0) {
129 SDLTest_LogError("Invalid iteration count.");
133 /* Convert iteration number into a string */
134 SDL_memset(iterationString, 0, sizeof(iterationString));
135 SDL_snprintf(iterationString, sizeof(iterationString) - 1, "%d", iteration);
137 /* Combine the parameters into single string */
138 runSeedLength = SDL_strlen(runSeed);
139 suiteNameLength = SDL_strlen(suiteName);
140 testNameLength = SDL_strlen(testName);
141 iterationStringLength = SDL_strlen(iterationString);
142 entireStringLength = runSeedLength + suiteNameLength + testNameLength + iterationStringLength + 1;
143 buffer = (char *)SDL_malloc(entireStringLength);
144 if (buffer == NULL) {
145 SDLTest_LogError("Failed to allocate buffer for execKey generation.");
146 SDL_Error(SDL_ENOMEM);
149 SDL_snprintf(buffer, entireStringLength, "%s%s%s%d", runSeed, suiteName, testName, iteration);
151 /* Hash string and use half of the digest as 64bit exec key */
152 SDLTest_Md5Init(&md5Context);
153 SDLTest_Md5Update(&md5Context, (unsigned char *)buffer, entireStringLength);
154 SDLTest_Md5Final(&md5Context);
156 keys = (Uint64 *)md5Context.digest;
162 * \brief Set timeout handler for test.
164 * Note: SDL_Init(SDL_INIT_TIMER) will be called if it wasn't done so before.
166 * \param timeout Timeout interval in seconds.
167 * \param callback Function that will be called after timeout has elapsed.
169 * \return Timer id or -1 on failure.
172 SDLTest_SetTestTimeout(int timeout, void (*callback)())
174 Uint32 timeoutInMilliseconds;
177 if (callback == NULL) {
178 SDLTest_LogError("Timeout callback can't be NULL");
183 SDLTest_LogError("Timeout value must be bigger than zero.");
187 /* Init SDL timer if not initialized before */
188 if (SDL_WasInit(SDL_INIT_TIMER) == 0) {
189 if (SDL_InitSubSystem(SDL_INIT_TIMER)) {
190 SDLTest_LogError("Failed to init timer subsystem: %s", SDL_GetError());
196 timeoutInMilliseconds = timeout * 1000;
197 timerID = SDL_AddTimer(timeoutInMilliseconds, (SDL_TimerCallback)callback, 0x0);
199 SDLTest_LogError("Creation of SDL timer failed: %s", SDL_GetError());
207 * \brief Timeout handler. Aborts test run and exits harness process.
212 SDLTest_LogError("TestCaseTimeout timer expired. Aborting test run.");
213 exit(TEST_ABORTED); /* bail out from the test */
217 * \brief Execute a test using the given execution key.
219 * \param testSuite Suite containing the test case.
220 * \param testCase Case to execute.
221 * \param execKey Execution key for the fuzzer.
222 * \param forceTestRun Force test to run even if test was disabled in suite.
224 * \returns Test case result.
227 SDLTest_RunTest(SDLTest_TestSuiteReference *testSuite, SDLTest_TestCaseReference *testCase, Uint64 execKey, SDL_bool forceTestRun)
229 SDL_TimerID timer = 0;
230 int testCaseResult = 0;
234 if (testSuite==NULL || testCase==NULL || testSuite->name==NULL || testCase->name==NULL)
236 SDLTest_LogError("Setup failure: testSuite or testCase references NULL");
237 return TEST_RESULT_SETUP_FAILURE;
240 if (!testCase->enabled && forceTestRun == SDL_FALSE)
242 SDLTest_Log((char *)SDLTest_FinalResultFormat, "Test", testCase->name, "Skipped (Disabled)");
243 return TEST_RESULT_SKIPPED;
246 /* Initialize fuzzer */
247 SDLTest_FuzzerInit(execKey);
249 /* Reset assert tracker */
250 SDLTest_ResetAssertSummary();
252 /* Set timeout timer */
253 timer = SDLTest_SetTestTimeout(SDLTest_TestCaseTimeout, SDLTest_BailOut);
255 /* Maybe run suite initalizer function */
256 if (testSuite->testSetUp) {
257 testSuite->testSetUp(0x0);
258 if (SDLTest_AssertSummaryToTestResult() == TEST_RESULT_FAILED) {
259 SDLTest_LogError((char *)SDLTest_FinalResultFormat, "Suite Setup", testSuite->name, "Failed");
260 return TEST_RESULT_SETUP_FAILURE;
264 /* Run test case function */
265 testCaseResult = testCase->testCase(0x0);
267 /* Convert test execution result into harness result */
268 if (testCaseResult == TEST_SKIPPED) {
269 /* Test was programatically skipped */
270 testResult = TEST_RESULT_SKIPPED;
271 } else if (testCaseResult == TEST_STARTED) {
272 /* Test did not return a TEST_COMPLETED value; assume it failed */
273 testResult = TEST_RESULT_FAILED;
274 } else if (testCaseResult == TEST_ABORTED) {
275 /* Test was aborted early; assume it failed */
276 testResult = TEST_RESULT_FAILED;
278 /* Perform failure analysis based on asserts */
279 testResult = SDLTest_AssertSummaryToTestResult();
282 /* Maybe run suite cleanup function (ignore failed asserts) */
283 if (testSuite->testTearDown) {
284 testSuite->testTearDown(0x0);
287 /* Cancel timeout timer */
289 SDL_RemoveTimer(timer);
292 /* Report on asserts and fuzzer usage */
293 fuzzerCount = SDLTest_GetFuzzerInvocationCount();
294 if (fuzzerCount > 0) {
295 SDLTest_Log("Fuzzer invocations: %d", fuzzerCount);
298 /* Final log based on test execution result */
299 if (testCaseResult == TEST_SKIPPED) {
300 /* Test was programatically skipped */
301 SDLTest_Log((char *)SDLTest_FinalResultFormat, "Test", testCase->name, "Skipped (Programmatically)");
302 } else if (testCaseResult == TEST_STARTED) {
303 /* Test did not return a TEST_COMPLETED value; assume it failed */
304 SDLTest_LogError((char *)SDLTest_FinalResultFormat, "Test", testCase->name, "Failed (test started, but did not return TEST_COMPLETED)");
305 } else if (testCaseResult == TEST_ABORTED) {
306 /* Test was aborted early; assume it failed */
307 SDLTest_LogError((char *)SDLTest_FinalResultFormat, "Test", testCase->name, "Failed (Aborted)");
309 SDLTest_LogAssertSummary();
315 /* Prints summary of all suites/tests contained in the given reference */
316 void SDLTest_LogTestSuiteSummary(SDLTest_TestSuiteReference *testSuites)
320 SDLTest_TestSuiteReference *testSuite;
321 SDLTest_TestCaseReference *testCase;
323 /* Loop over all suites */
325 while(&testSuites[suiteCounter]) {
326 testSuite=&testSuites[suiteCounter];
328 SDLTest_Log("Test Suite %i - %s\n", suiteCounter,
329 (testSuite->name) ? testSuite->name : SDLTest_InvalidNameFormat);
331 /* Loop over all test cases */
333 while(testSuite->testCases[testCounter])
335 testCase=(SDLTest_TestCaseReference *)testSuite->testCases[testCounter];
337 SDLTest_Log(" Test Case %i - %s: %s", testCounter,
338 (testCase->name) ? testCase->name : SDLTest_InvalidNameFormat,
339 (testCase->description) ? testCase->description : SDLTest_InvalidNameFormat);
344 /* Gets a timer value in seconds */
347 float currentClock = (float)clock();
348 return currentClock / (float)CLOCKS_PER_SEC;
352 * \brief Execute a test suite using the given run seed and execution key.
354 * The filter string is matched to the suite name (full comparison) to select a single suite,
355 * or if no suite matches, it is matched to the test names (full comparison) to select a single test.
357 * \param testSuites Suites containing the test case.
358 * \param userRunSeed Custom run seed provided by user, or NULL to autogenerate one.
359 * \param userExecKey Custom execution key provided by user, or 0 to autogenerate one.
360 * \param filter Filter specification. NULL disables. Case sensitive.
361 * \param testIterations Number of iterations to run each test case.
363 * \returns Test run result; 0 when all tests passed, 1 if any tests failed.
365 int SDLTest_RunSuites(SDLTest_TestSuiteReference *testSuites[], const char *userRunSeed, Uint64 userExecKey, const char *filter, int testIterations)
367 int totalNumberOfTests = 0;
368 int failedNumberOfTests = 0;
371 int iterationCounter;
372 SDLTest_TestSuiteReference *testSuite;
373 SDLTest_TestCaseReference *testCase;
374 const char *runSeed = NULL;
375 char *currentSuiteName;
376 char *currentTestName;
378 float runStartSeconds;
379 float suiteStartSeconds;
380 float testStartSeconds;
382 float suiteEndSeconds;
383 float testEndSeconds;
386 char *suiteFilterName = NULL;
388 char *testFilterName = NULL;
389 SDL_bool forceTestRun = SDL_FALSE;
392 Uint32 totalTestFailedCount = 0;
393 Uint32 totalTestPassedCount = 0;
394 Uint32 totalTestSkippedCount = 0;
395 Uint32 testFailedCount = 0;
396 Uint32 testPassedCount = 0;
397 Uint32 testSkippedCount = 0;
399 char *logFormat = (char *)SDLTest_LogSummaryFormat;
400 SDLTest_TestCaseReference **failedTests;
402 /* Sanitize test iterations */
403 if (testIterations < 1) {
407 /* Generate run see if we don't have one already */
408 if (userRunSeed == NULL || userRunSeed[0] == '\0') {
409 runSeed = SDLTest_GenerateRunSeed(16);
410 if (runSeed == NULL) {
411 SDLTest_LogError("Generating a random seed failed");
415 runSeed = userRunSeed;
419 /* Reset per-run counters */
420 totalTestFailedCount = 0;
421 totalTestPassedCount = 0;
422 totalTestSkippedCount = 0;
424 /* Take time - run start */
425 runStartSeconds = GetClock();
427 /* Log run with fuzzer parameters */
428 SDLTest_Log("::::: Test Run /w seed '%s' started\n", runSeed);
430 /* Count the total number of tests */
432 while (testSuites[suiteCounter]) {
433 testSuite=(SDLTest_TestSuiteReference *)testSuites[suiteCounter];
436 while (testSuite->testCases[testCounter])
439 totalNumberOfTests++;
443 /* Pre-allocate an array for tracking failed tests (potentially all test cases) */
444 failedTests = (SDLTest_TestCaseReference **)SDL_malloc(totalNumberOfTests * sizeof(SDLTest_TestCaseReference *));
445 if (failedTests == NULL) {
446 SDLTest_LogError("Unable to allocate cache for failed tests");
447 SDL_Error(SDL_ENOMEM);
451 /* Initialize filtering */
452 if (filter != NULL && filter[0] != '\0') {
453 /* Loop over all suites to check if we have a filter match */
455 while (testSuites[suiteCounter] && suiteFilter == 0) {
456 testSuite=(SDLTest_TestSuiteReference *)testSuites[suiteCounter];
458 if (testSuite->name != NULL && SDL_strcmp(filter, testSuite->name) == 0) {
459 /* Matched a suite name */
461 suiteFilterName = testSuite->name;
462 SDLTest_Log("Filtering: running only suite '%s'", suiteFilterName);
466 /* Within each suite, loop over all test cases to check if we have a filter match */
468 while (testSuite->testCases[testCounter] && testFilter == 0)
470 testCase=(SDLTest_TestCaseReference *)testSuite->testCases[testCounter];
472 if (testCase->name != NULL && SDL_strcmp(filter, testCase->name) == 0) {
473 /* Matched a test name */
475 suiteFilterName = testSuite->name;
477 testFilterName = testCase->name;
478 SDLTest_Log("Filtering: running only test '%s' in suite '%s'", testFilterName, suiteFilterName);
484 if (suiteFilter == 0 && testFilter == 0) {
485 SDLTest_LogError("Filter '%s' did not match any test suite/case.", filter);
486 SDLTest_Log("Exit code: 2");
487 SDL_free(failedTests);
492 /* Loop over all suites */
494 while(testSuites[suiteCounter]) {
495 testSuite=(SDLTest_TestSuiteReference *)testSuites[suiteCounter];
496 currentSuiteName = (char *)((testSuite->name) ? testSuite->name : SDLTest_InvalidNameFormat);
499 /* Filter suite if flag set and we have a name */
500 if (suiteFilter == 1 && suiteFilterName != NULL && testSuite->name != NULL &&
501 SDL_strcmp(suiteFilterName, testSuite->name) != 0) {
503 SDLTest_Log("===== Test Suite %i: '%s' skipped\n",
508 /* Reset per-suite counters */
511 testSkippedCount = 0;
513 /* Take time - suite start */
514 suiteStartSeconds = GetClock();
516 /* Log suite started */
517 SDLTest_Log("===== Test Suite %i: '%s' started\n",
521 /* Loop over all test cases */
523 while(testSuite->testCases[testCounter])
525 testCase=(SDLTest_TestCaseReference *)testSuite->testCases[testCounter];
526 currentTestName = (char *)((testCase->name) ? testCase->name : SDLTest_InvalidNameFormat);
529 /* Filter tests if flag set and we have a name */
530 if (testFilter == 1 && testFilterName != NULL && testCase->name != NULL &&
531 SDL_strcmp(testFilterName, testCase->name) != 0) {
533 SDLTest_Log("===== Test Case %i.%i: '%s' skipped\n",
538 /* Override 'disabled' flag if we specified a test filter (i.e. force run for debugging) */
539 if (testFilter == 1 && !testCase->enabled) {
540 SDLTest_Log("Force run of disabled test since test filter was set");
541 forceTestRun = SDL_TRUE;
544 /* Take time - test start */
545 testStartSeconds = GetClock();
547 /* Log test started */
548 SDLTest_Log("----- Test Case %i.%i: '%s' started",
552 if (testCase->description != NULL && testCase->description[0] != '\0') {
553 SDLTest_Log("Test Description: '%s'",
554 (testCase->description) ? testCase->description : SDLTest_InvalidNameFormat);
557 /* Loop over all iterations */
558 iterationCounter = 0;
559 while(iterationCounter < testIterations)
563 if (userExecKey != 0) {
564 execKey = userExecKey;
566 execKey = SDLTest_GenerateExecKey((char *)runSeed, testSuite->name, testCase->name, iterationCounter);
569 SDLTest_Log("Test Iteration %i: execKey %" SDL_PRIu64, iterationCounter, execKey);
570 testResult = SDLTest_RunTest(testSuite, testCase, execKey, forceTestRun);
572 if (testResult == TEST_RESULT_PASSED) {
574 totalTestPassedCount++;
575 } else if (testResult == TEST_RESULT_SKIPPED) {
577 totalTestSkippedCount++;
580 totalTestFailedCount++;
584 /* Take time - test end */
585 testEndSeconds = GetClock();
586 runtime = testEndSeconds - testStartSeconds;
587 if (runtime < 0.0f) runtime = 0.0f;
589 if (testIterations > 1) {
590 /* Log test runtime */
591 SDLTest_Log("Runtime of %i iterations: %.1f sec", testIterations, runtime);
592 SDLTest_Log("Average Test runtime: %.5f sec", runtime / (float)testIterations);
594 /* Log test runtime */
595 SDLTest_Log("Total Test runtime: %.1f sec", runtime);
598 /* Log final test result */
599 switch (testResult) {
600 case TEST_RESULT_PASSED:
601 SDLTest_Log((char *)SDLTest_FinalResultFormat, "Test", currentTestName, "Passed");
603 case TEST_RESULT_FAILED:
604 SDLTest_LogError((char *)SDLTest_FinalResultFormat, "Test", currentTestName, "Failed");
606 case TEST_RESULT_NO_ASSERT:
607 SDLTest_LogError((char *)SDLTest_FinalResultFormat,"Test", currentTestName, "No Asserts");
611 /* Collect failed test case references for repro-step display */
612 if (testResult == TEST_RESULT_FAILED) {
613 failedTests[failedNumberOfTests] = testCase;
614 failedNumberOfTests++;
619 /* Take time - suite end */
620 suiteEndSeconds = GetClock();
621 runtime = suiteEndSeconds - suiteStartSeconds;
622 if (runtime < 0.0f) runtime = 0.0f;
624 /* Log suite runtime */
625 SDLTest_Log("Total Suite runtime: %.1f sec", runtime);
627 /* Log summary and final Suite result */
628 countSum = testPassedCount + testFailedCount + testSkippedCount;
629 if (testFailedCount == 0)
631 SDLTest_Log(logFormat, "Suite", countSum, testPassedCount, testFailedCount, testSkippedCount);
632 SDLTest_Log((char *)SDLTest_FinalResultFormat, "Suite", currentSuiteName, "Passed");
636 SDLTest_LogError(logFormat, "Suite", countSum, testPassedCount, testFailedCount, testSkippedCount);
637 SDLTest_LogError((char *)SDLTest_FinalResultFormat, "Suite", currentSuiteName, "Failed");
643 /* Take time - run end */
644 runEndSeconds = GetClock();
645 runtime = runEndSeconds - runStartSeconds;
646 if (runtime < 0.0f) runtime = 0.0f;
648 /* Log total runtime */
649 SDLTest_Log("Total Run runtime: %.1f sec", runtime);
651 /* Log summary and final run result */
652 countSum = totalTestPassedCount + totalTestFailedCount + totalTestSkippedCount;
653 if (totalTestFailedCount == 0)
656 SDLTest_Log(logFormat, "Run", countSum, totalTestPassedCount, totalTestFailedCount, totalTestSkippedCount);
657 SDLTest_Log((char *)SDLTest_FinalResultFormat, "Run /w seed", runSeed, "Passed");
662 SDLTest_LogError(logFormat, "Run", countSum, totalTestPassedCount, totalTestFailedCount, totalTestSkippedCount);
663 SDLTest_LogError((char *)SDLTest_FinalResultFormat, "Run /w seed", runSeed, "Failed");
666 /* Print repro steps for failed tests */
667 if (failedNumberOfTests > 0) {
668 SDLTest_Log("Harness input to repro failures:");
669 for (testCounter = 0; testCounter < failedNumberOfTests; testCounter++) {
670 SDLTest_Log(" --seed %s --filter %s", runSeed, failedTests[testCounter]->name);
673 SDL_free(failedTests);
675 SDLTest_Log("Exit code: %d", runResult);