Fixed compile warnings in test library about formats strings not being literals.
Partially fixes Bugzilla #3375.
2 Simple DirectMedia Layer
3 Copyright (C) 1997-2016 Sam Lantinga <slouken@libsdl.org>
5 This software is provided 'as-is', without any express or implied
6 warranty. In no event will the authors be held liable for any damages
7 arising from the use of this software.
9 Permission is granted to anyone to use this software for any purpose,
10 including commercial applications, and to alter it and redistribute it
11 freely, subject to the following restrictions:
13 1. The origin of this software must not be misrepresented; you must not
14 claim that you wrote the original software. If you use this software
15 in a product, an acknowledgment in the product documentation would be
16 appreciated but is not required.
17 2. Altered source versions must be plainly marked as such, and must not be
18 misrepresented as being the original software.
19 3. This notice may not be removed or altered from any source distribution.
22 #include "SDL_config.h"
31 /* Invalid test name/description message format */
32 #define SDLTEST_INVALID_NAME_FORMAT "(Invalid)"
34 /* Log summary message format */
35 #define SDLTEST_LOG_SUMMARY_FORMAT "%s Summary: Total=%d Passed=%d Failed=%d Skipped=%d"
37 /* Final result message format */
38 #define SDLTEST_FINAL_RESULT_FORMAT ">>> %s '%s': %s\n"
40 /* ! \brief Timeout for single test case execution */
41 static Uint32 SDLTest_TestCaseTimeout = 3600;
44 * Generates a random run seed string for the harness. The generated seed
45 * will contain alphanumeric characters (0-9A-Z).
47 * Note: The returned string needs to be deallocated by the caller.
49 * \param length The length of the seed string to generate
51 * \returns The generated seed string
54 SDLTest_GenerateRunSeed(const int length)
57 SDLTest_RandomContext randomContext;
60 /* Sanity check input */
62 SDLTest_LogError("The length of the harness seed must be >0.");
66 /* Allocate output buffer */
67 seed = (char *)SDL_malloc((length + 1) * sizeof(char));
69 SDLTest_LogError("SDL_malloc for run seed output buffer failed.");
70 SDL_Error(SDL_ENOMEM);
74 /* Generate a random string of alphanumeric characters */
75 SDLTest_RandomInitTime(&randomContext);
76 for (counter = 0; counter < length; counter++) {
77 unsigned int number = SDLTest_Random(&randomContext);
78 char ch = (char) (number % (91 - 48)) + 48;
79 if (ch >= 58 && ch <= 64) {
90 * Generates an execution key for the fuzzer.
92 * \param runSeed The run seed to use
93 * \param suiteName The name of the test suite
94 * \param testName The name of the test
95 * \param iteration The iteration count
97 * \returns The generated execution key to initialize the fuzzer with.
101 SDLTest_GenerateExecKey(char *runSeed, char *suiteName, char *testName, int iteration)
103 SDLTest_Md5Context md5Context;
105 char iterationString[16];
106 Uint32 runSeedLength;
107 Uint32 suiteNameLength;
108 Uint32 testNameLength;
109 Uint32 iterationStringLength;
110 Uint32 entireStringLength;
113 if (runSeed == NULL || runSeed[0] == '\0') {
114 SDLTest_LogError("Invalid runSeed string.");
118 if (suiteName == NULL || suiteName[0] == '\0') {
119 SDLTest_LogError("Invalid suiteName string.");
123 if (testName == NULL || testName[0] == '\0') {
124 SDLTest_LogError("Invalid testName string.");
128 if (iteration <= 0) {
129 SDLTest_LogError("Invalid iteration count.");
133 /* Convert iteration number into a string */
134 SDL_memset(iterationString, 0, sizeof(iterationString));
135 SDL_snprintf(iterationString, sizeof(iterationString) - 1, "%d", iteration);
137 /* Combine the parameters into single string */
138 runSeedLength = SDL_strlen(runSeed);
139 suiteNameLength = SDL_strlen(suiteName);
140 testNameLength = SDL_strlen(testName);
141 iterationStringLength = SDL_strlen(iterationString);
142 entireStringLength = runSeedLength + suiteNameLength + testNameLength + iterationStringLength + 1;
143 buffer = (char *)SDL_malloc(entireStringLength);
144 if (buffer == NULL) {
145 SDLTest_LogError("Failed to allocate buffer for execKey generation.");
146 SDL_Error(SDL_ENOMEM);
149 SDL_snprintf(buffer, entireStringLength, "%s%s%s%d", runSeed, suiteName, testName, iteration);
151 /* Hash string and use half of the digest as 64bit exec key */
152 SDLTest_Md5Init(&md5Context);
153 SDLTest_Md5Update(&md5Context, (unsigned char *)buffer, entireStringLength);
154 SDLTest_Md5Final(&md5Context);
156 keys = (Uint64 *)md5Context.digest;
162 * \brief Set timeout handler for test.
164 * Note: SDL_Init(SDL_INIT_TIMER) will be called if it wasn't done so before.
166 * \param timeout Timeout interval in seconds.
167 * \param callback Function that will be called after timeout has elapsed.
169 * \return Timer id or -1 on failure.
172 SDLTest_SetTestTimeout(int timeout, void (*callback)())
174 Uint32 timeoutInMilliseconds;
177 if (callback == NULL) {
178 SDLTest_LogError("Timeout callback can't be NULL");
183 SDLTest_LogError("Timeout value must be bigger than zero.");
187 /* Init SDL timer if not initialized before */
188 if (SDL_WasInit(SDL_INIT_TIMER) == 0) {
189 if (SDL_InitSubSystem(SDL_INIT_TIMER)) {
190 SDLTest_LogError("Failed to init timer subsystem: %s", SDL_GetError());
196 timeoutInMilliseconds = timeout * 1000;
197 timerID = SDL_AddTimer(timeoutInMilliseconds, (SDL_TimerCallback)callback, 0x0);
199 SDLTest_LogError("Creation of SDL timer failed: %s", SDL_GetError());
207 * \brief Timeout handler. Aborts test run and exits harness process.
212 SDLTest_LogError("TestCaseTimeout timer expired. Aborting test run.");
213 exit(TEST_ABORTED); /* bail out from the test */
217 * \brief Execute a test using the given execution key.
219 * \param testSuite Suite containing the test case.
220 * \param testCase Case to execute.
221 * \param execKey Execution key for the fuzzer.
222 * \param forceTestRun Force test to run even if test was disabled in suite.
224 * \returns Test case result.
227 SDLTest_RunTest(SDLTest_TestSuiteReference *testSuite, SDLTest_TestCaseReference *testCase, Uint64 execKey, SDL_bool forceTestRun)
229 SDL_TimerID timer = 0;
230 int testCaseResult = 0;
234 if (testSuite==NULL || testCase==NULL || testSuite->name==NULL || testCase->name==NULL)
236 SDLTest_LogError("Setup failure: testSuite or testCase references NULL");
237 return TEST_RESULT_SETUP_FAILURE;
240 if (!testCase->enabled && forceTestRun == SDL_FALSE)
242 SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Test", testCase->name, "Skipped (Disabled)");
243 return TEST_RESULT_SKIPPED;
246 /* Initialize fuzzer */
247 SDLTest_FuzzerInit(execKey);
249 /* Reset assert tracker */
250 SDLTest_ResetAssertSummary();
252 /* Set timeout timer */
253 timer = SDLTest_SetTestTimeout(SDLTest_TestCaseTimeout, SDLTest_BailOut);
255 /* Maybe run suite initalizer function */
256 if (testSuite->testSetUp) {
257 testSuite->testSetUp(0x0);
258 if (SDLTest_AssertSummaryToTestResult() == TEST_RESULT_FAILED) {
259 SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Suite Setup", testSuite->name, "Failed");
260 return TEST_RESULT_SETUP_FAILURE;
264 /* Run test case function */
265 testCaseResult = testCase->testCase(0x0);
267 /* Convert test execution result into harness result */
268 if (testCaseResult == TEST_SKIPPED) {
269 /* Test was programatically skipped */
270 testResult = TEST_RESULT_SKIPPED;
271 } else if (testCaseResult == TEST_STARTED) {
272 /* Test did not return a TEST_COMPLETED value; assume it failed */
273 testResult = TEST_RESULT_FAILED;
274 } else if (testCaseResult == TEST_ABORTED) {
275 /* Test was aborted early; assume it failed */
276 testResult = TEST_RESULT_FAILED;
278 /* Perform failure analysis based on asserts */
279 testResult = SDLTest_AssertSummaryToTestResult();
282 /* Maybe run suite cleanup function (ignore failed asserts) */
283 if (testSuite->testTearDown) {
284 testSuite->testTearDown(0x0);
287 /* Cancel timeout timer */
289 SDL_RemoveTimer(timer);
292 /* Report on asserts and fuzzer usage */
293 fuzzerCount = SDLTest_GetFuzzerInvocationCount();
294 if (fuzzerCount > 0) {
295 SDLTest_Log("Fuzzer invocations: %d", fuzzerCount);
298 /* Final log based on test execution result */
299 if (testCaseResult == TEST_SKIPPED) {
300 /* Test was programatically skipped */
301 SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Test", testCase->name, "Skipped (Programmatically)");
302 } else if (testCaseResult == TEST_STARTED) {
303 /* Test did not return a TEST_COMPLETED value; assume it failed */
304 SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Test", testCase->name, "Failed (test started, but did not return TEST_COMPLETED)");
305 } else if (testCaseResult == TEST_ABORTED) {
306 /* Test was aborted early; assume it failed */
307 SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Test", testCase->name, "Failed (Aborted)");
309 SDLTest_LogAssertSummary();
315 /* Prints summary of all suites/tests contained in the given reference */
316 void SDLTest_LogTestSuiteSummary(SDLTest_TestSuiteReference *testSuites)
320 SDLTest_TestSuiteReference *testSuite;
321 SDLTest_TestCaseReference *testCase;
323 /* Loop over all suites */
325 while(&testSuites[suiteCounter]) {
326 testSuite=&testSuites[suiteCounter];
328 SDLTest_Log("Test Suite %i - %s\n", suiteCounter,
329 (testSuite->name) ? testSuite->name : SDLTEST_INVALID_NAME_FORMAT);
331 /* Loop over all test cases */
333 while(testSuite->testCases[testCounter])
335 testCase=(SDLTest_TestCaseReference *)testSuite->testCases[testCounter];
337 SDLTest_Log(" Test Case %i - %s: %s", testCounter,
338 (testCase->name) ? testCase->name : SDLTEST_INVALID_NAME_FORMAT,
339 (testCase->description) ? testCase->description : SDLTEST_INVALID_NAME_FORMAT);
344 /* Gets a timer value in seconds */
347 float currentClock = (float)clock();
348 return currentClock / (float)CLOCKS_PER_SEC;
352 * \brief Execute a test suite using the given run seed and execution key.
354 * The filter string is matched to the suite name (full comparison) to select a single suite,
355 * or if no suite matches, it is matched to the test names (full comparison) to select a single test.
357 * \param testSuites Suites containing the test case.
358 * \param userRunSeed Custom run seed provided by user, or NULL to autogenerate one.
359 * \param userExecKey Custom execution key provided by user, or 0 to autogenerate one.
360 * \param filter Filter specification. NULL disables. Case sensitive.
361 * \param testIterations Number of iterations to run each test case.
363 * \returns Test run result; 0 when all tests passed, 1 if any tests failed.
365 int SDLTest_RunSuites(SDLTest_TestSuiteReference *testSuites[], const char *userRunSeed, Uint64 userExecKey, const char *filter, int testIterations)
367 int totalNumberOfTests = 0;
368 int failedNumberOfTests = 0;
371 int iterationCounter;
372 SDLTest_TestSuiteReference *testSuite;
373 SDLTest_TestCaseReference *testCase;
374 const char *runSeed = NULL;
375 char *currentSuiteName;
376 char *currentTestName;
378 float runStartSeconds;
379 float suiteStartSeconds;
380 float testStartSeconds;
382 float suiteEndSeconds;
383 float testEndSeconds;
386 char *suiteFilterName = NULL;
388 char *testFilterName = NULL;
389 SDL_bool forceTestRun = SDL_FALSE;
392 Uint32 totalTestFailedCount = 0;
393 Uint32 totalTestPassedCount = 0;
394 Uint32 totalTestSkippedCount = 0;
395 Uint32 testFailedCount = 0;
396 Uint32 testPassedCount = 0;
397 Uint32 testSkippedCount = 0;
399 SDLTest_TestCaseReference **failedTests;
401 /* Sanitize test iterations */
402 if (testIterations < 1) {
406 /* Generate run see if we don't have one already */
407 if (userRunSeed == NULL || userRunSeed[0] == '\0') {
408 runSeed = SDLTest_GenerateRunSeed(16);
409 if (runSeed == NULL) {
410 SDLTest_LogError("Generating a random seed failed");
414 runSeed = userRunSeed;
418 /* Reset per-run counters */
419 totalTestFailedCount = 0;
420 totalTestPassedCount = 0;
421 totalTestSkippedCount = 0;
423 /* Take time - run start */
424 runStartSeconds = GetClock();
426 /* Log run with fuzzer parameters */
427 SDLTest_Log("::::: Test Run /w seed '%s' started\n", runSeed);
429 /* Count the total number of tests */
431 while (testSuites[suiteCounter]) {
432 testSuite=(SDLTest_TestSuiteReference *)testSuites[suiteCounter];
435 while (testSuite->testCases[testCounter])
438 totalNumberOfTests++;
442 /* Pre-allocate an array for tracking failed tests (potentially all test cases) */
443 failedTests = (SDLTest_TestCaseReference **)SDL_malloc(totalNumberOfTests * sizeof(SDLTest_TestCaseReference *));
444 if (failedTests == NULL) {
445 SDLTest_LogError("Unable to allocate cache for failed tests");
446 SDL_Error(SDL_ENOMEM);
450 /* Initialize filtering */
451 if (filter != NULL && filter[0] != '\0') {
452 /* Loop over all suites to check if we have a filter match */
454 while (testSuites[suiteCounter] && suiteFilter == 0) {
455 testSuite=(SDLTest_TestSuiteReference *)testSuites[suiteCounter];
457 if (testSuite->name != NULL && SDL_strcmp(filter, testSuite->name) == 0) {
458 /* Matched a suite name */
460 suiteFilterName = testSuite->name;
461 SDLTest_Log("Filtering: running only suite '%s'", suiteFilterName);
465 /* Within each suite, loop over all test cases to check if we have a filter match */
467 while (testSuite->testCases[testCounter] && testFilter == 0)
469 testCase=(SDLTest_TestCaseReference *)testSuite->testCases[testCounter];
471 if (testCase->name != NULL && SDL_strcmp(filter, testCase->name) == 0) {
472 /* Matched a test name */
474 suiteFilterName = testSuite->name;
476 testFilterName = testCase->name;
477 SDLTest_Log("Filtering: running only test '%s' in suite '%s'", testFilterName, suiteFilterName);
483 if (suiteFilter == 0 && testFilter == 0) {
484 SDLTest_LogError("Filter '%s' did not match any test suite/case.", filter);
485 SDLTest_Log("Exit code: 2");
486 SDL_free(failedTests);
491 /* Loop over all suites */
493 while(testSuites[suiteCounter]) {
494 testSuite=(SDLTest_TestSuiteReference *)testSuites[suiteCounter];
495 currentSuiteName = (char *)((testSuite->name) ? testSuite->name : SDLTEST_INVALID_NAME_FORMAT);
498 /* Filter suite if flag set and we have a name */
499 if (suiteFilter == 1 && suiteFilterName != NULL && testSuite->name != NULL &&
500 SDL_strcmp(suiteFilterName, testSuite->name) != 0) {
502 SDLTest_Log("===== Test Suite %i: '%s' skipped\n",
507 /* Reset per-suite counters */
510 testSkippedCount = 0;
512 /* Take time - suite start */
513 suiteStartSeconds = GetClock();
515 /* Log suite started */
516 SDLTest_Log("===== Test Suite %i: '%s' started\n",
520 /* Loop over all test cases */
522 while(testSuite->testCases[testCounter])
524 testCase=(SDLTest_TestCaseReference *)testSuite->testCases[testCounter];
525 currentTestName = (char *)((testCase->name) ? testCase->name : SDLTEST_INVALID_NAME_FORMAT);
528 /* Filter tests if flag set and we have a name */
529 if (testFilter == 1 && testFilterName != NULL && testCase->name != NULL &&
530 SDL_strcmp(testFilterName, testCase->name) != 0) {
532 SDLTest_Log("===== Test Case %i.%i: '%s' skipped\n",
537 /* Override 'disabled' flag if we specified a test filter (i.e. force run for debugging) */
538 if (testFilter == 1 && !testCase->enabled) {
539 SDLTest_Log("Force run of disabled test since test filter was set");
540 forceTestRun = SDL_TRUE;
543 /* Take time - test start */
544 testStartSeconds = GetClock();
546 /* Log test started */
547 SDLTest_Log("----- Test Case %i.%i: '%s' started",
551 if (testCase->description != NULL && testCase->description[0] != '\0') {
552 SDLTest_Log("Test Description: '%s'",
553 (testCase->description) ? testCase->description : SDLTEST_INVALID_NAME_FORMAT);
556 /* Loop over all iterations */
557 iterationCounter = 0;
558 while(iterationCounter < testIterations)
562 if (userExecKey != 0) {
563 execKey = userExecKey;
565 execKey = SDLTest_GenerateExecKey((char *)runSeed, testSuite->name, testCase->name, iterationCounter);
568 SDLTest_Log("Test Iteration %i: execKey %" SDL_PRIu64, iterationCounter, execKey);
569 testResult = SDLTest_RunTest(testSuite, testCase, execKey, forceTestRun);
571 if (testResult == TEST_RESULT_PASSED) {
573 totalTestPassedCount++;
574 } else if (testResult == TEST_RESULT_SKIPPED) {
576 totalTestSkippedCount++;
579 totalTestFailedCount++;
583 /* Take time - test end */
584 testEndSeconds = GetClock();
585 runtime = testEndSeconds - testStartSeconds;
586 if (runtime < 0.0f) runtime = 0.0f;
588 if (testIterations > 1) {
589 /* Log test runtime */
590 SDLTest_Log("Runtime of %i iterations: %.1f sec", testIterations, runtime);
591 SDLTest_Log("Average Test runtime: %.5f sec", runtime / (float)testIterations);
593 /* Log test runtime */
594 SDLTest_Log("Total Test runtime: %.1f sec", runtime);
597 /* Log final test result */
598 switch (testResult) {
599 case TEST_RESULT_PASSED:
600 SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Test", currentTestName, "Passed");
602 case TEST_RESULT_FAILED:
603 SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Test", currentTestName, "Failed");
605 case TEST_RESULT_NO_ASSERT:
606 SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT,"Test", currentTestName, "No Asserts");
610 /* Collect failed test case references for repro-step display */
611 if (testResult == TEST_RESULT_FAILED) {
612 failedTests[failedNumberOfTests] = testCase;
613 failedNumberOfTests++;
618 /* Take time - suite end */
619 suiteEndSeconds = GetClock();
620 runtime = suiteEndSeconds - suiteStartSeconds;
621 if (runtime < 0.0f) runtime = 0.0f;
623 /* Log suite runtime */
624 SDLTest_Log("Total Suite runtime: %.1f sec", runtime);
626 /* Log summary and final Suite result */
627 countSum = testPassedCount + testFailedCount + testSkippedCount;
628 if (testFailedCount == 0)
630 SDLTest_Log(SDLTEST_LOG_SUMMARY_FORMAT, "Suite", countSum, testPassedCount, testFailedCount, testSkippedCount);
631 SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Suite", currentSuiteName, "Passed");
635 SDLTest_LogError(SDLTEST_LOG_SUMMARY_FORMAT, "Suite", countSum, testPassedCount, testFailedCount, testSkippedCount);
636 SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Suite", currentSuiteName, "Failed");
642 /* Take time - run end */
643 runEndSeconds = GetClock();
644 runtime = runEndSeconds - runStartSeconds;
645 if (runtime < 0.0f) runtime = 0.0f;
647 /* Log total runtime */
648 SDLTest_Log("Total Run runtime: %.1f sec", runtime);
650 /* Log summary and final run result */
651 countSum = totalTestPassedCount + totalTestFailedCount + totalTestSkippedCount;
652 if (totalTestFailedCount == 0)
655 SDLTest_Log(SDLTEST_LOG_SUMMARY_FORMAT, "Run", countSum, totalTestPassedCount, totalTestFailedCount, totalTestSkippedCount);
656 SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Run /w seed", runSeed, "Passed");
661 SDLTest_LogError(SDLTEST_LOG_SUMMARY_FORMAT, "Run", countSum, totalTestPassedCount, totalTestFailedCount, totalTestSkippedCount);
662 SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Run /w seed", runSeed, "Failed");
665 /* Print repro steps for failed tests */
666 if (failedNumberOfTests > 0) {
667 SDLTest_Log("Harness input to repro failures:");
668 for (testCounter = 0; testCounter < failedNumberOfTests; testCounter++) {
669 SDLTest_Log(" --seed %s --filter %s", runSeed, failedTests[testCounter]->name);
672 SDL_free(failedTests);
674 SDLTest_Log("Exit code: %d", runResult);