1.1 --- a/src/test/SDL_test_harness.c Thu Dec 20 08:50:36 2012 -0800
1.2 +++ b/src/test/SDL_test_harness.c Sat Dec 22 16:06:55 2012 -0800
1.3 @@ -1,22 +1,22 @@
1.4 /*
1.5 - Simple DirectMedia Layer
1.6 - Copyright (C) 1997-2012 Sam Lantinga <slouken@libsdl.org>
1.7 +Simple DirectMedia Layer
1.8 +Copyright (C) 1997-2012 Sam Lantinga <slouken@libsdl.org>
1.9
1.10 - This software is provided 'as-is', without any express or implied
1.11 - warranty. In no event will the authors be held liable for any damages
1.12 - arising from the use of this software.
1.13 +This software is provided 'as-is', without any express or implied
1.14 +warranty. In no event will the authors be held liable for any damages
1.15 +arising from the use of this software.
1.16
1.17 - Permission is granted to anyone to use this software for any purpose,
1.18 - including commercial applications, and to alter it and redistribute it
1.19 - freely, subject to the following restrictions:
1.20 +Permission is granted to anyone to use this software for any purpose,
1.21 +including commercial applications, and to alter it and redistribute it
1.22 +freely, subject to the following restrictions:
1.23
1.24 - 1. The origin of this software must not be misrepresented; you must not
1.25 - claim that you wrote the original software. If you use this software
1.26 - in a product, an acknowledgment in the product documentation would be
1.27 - appreciated but is not required.
1.28 - 2. Altered source versions must be plainly marked as such, and must not be
1.29 - misrepresented as being the original software.
1.30 - 3. This notice may not be removed or altered from any source distribution.
1.31 +1. The origin of this software must not be misrepresented; you must not
1.32 +claim that you wrote the original software. If you use this software
1.33 +in a product, an acknowledgment in the product documentation would be
1.34 +appreciated but is not required.
1.35 +2. Altered source versions must be plainly marked as such, and must not be
1.36 +misrepresented as being the original software.
1.37 +3. This notice may not be removed or altered from any source distribution.
1.38 */
1.39
1.40 #include "SDL_config.h"
1.41 @@ -41,17 +41,17 @@
1.42 static Uint32 SDLTest_TestCaseTimeout = 3600;
1.43
1.44 /**
1.45 - * Generates a random run seed string for the harness. The generated seed
1.46 - * will contain alphanumeric characters (0-9A-Z).
1.47 - *
1.48 - * Note: The returned string needs to be deallocated by the caller.
1.49 - *
1.50 - * \param length The length of the seed string to generate
1.51 - *
1.52 - * \returns The generated seed string
1.53 - */
1.54 +* Generates a random run seed string for the harness. The generated seed
1.55 +* will contain alphanumeric characters (0-9A-Z).
1.56 +*
1.57 +* Note: The returned string needs to be deallocated by the caller.
1.58 +*
1.59 +* \param length The length of the seed string to generate
1.60 +*
1.61 +* \returns The generated seed string
1.62 +*/
1.63 char *
1.64 -SDLTest_GenerateRunSeed(const int length)
1.65 + SDLTest_GenerateRunSeed(const int length)
1.66 {
1.67 char *seed = NULL;
1.68 SDLTest_RandomContext randomContext;
1.69 @@ -86,18 +86,18 @@
1.70 }
1.71
1.72 /**
1.73 - * Generates an execution key for the fuzzer.
1.74 - *
1.75 - * \param runSeed The run seed to use
1.76 - * \param suiteName The name of the test suite
1.77 - * \param testName The name of the test
1.78 - * \param iteration The iteration count
1.79 - *
1.80 - * \returns The generated execution key to initialize the fuzzer with.
1.81 - *
1.82 - */
1.83 +* Generates an execution key for the fuzzer.
1.84 +*
1.85 +* \param runSeed The run seed to use
1.86 +* \param suiteName The name of the test suite
1.87 +* \param testName The name of the test
1.88 +* \param iteration The iteration count
1.89 +*
1.90 +* \returns The generated execution key to initialize the fuzzer with.
1.91 +*
1.92 +*/
1.93 Uint64
1.94 -SDLTest_GenerateExecKey(char *runSeed, char *suiteName, char *testName, int iteration)
1.95 + SDLTest_GenerateExecKey(char *runSeed, char *suiteName, char *testName, int iteration)
1.96 {
1.97 SDLTest_Md5Context md5Context;
1.98 Uint64 *keys;
1.99 @@ -157,17 +157,17 @@
1.100 }
1.101
1.102 /**
1.103 - * \brief Set timeout handler for test.
1.104 - *
1.105 - * Note: SDL_Init(SDL_INIT_TIMER) will be called if it wasn't done so before.
1.106 - *
1.107 - * \param timeout Timeout interval in seconds.
1.108 - * \param callback Function that will be called after timeout has elapsed.
1.109 - *
1.110 - * \return Timer id or -1 on failure.
1.111 - */
1.112 +* \brief Set timeout handler for test.
1.113 +*
1.114 +* Note: SDL_Init(SDL_INIT_TIMER) will be called if it wasn't done so before.
1.115 +*
1.116 +* \param timeout Timeout interval in seconds.
1.117 +* \param callback Function that will be called after timeout has elapsed.
1.118 +*
1.119 +* \return Timer id or -1 on failure.
1.120 +*/
1.121 SDL_TimerID
1.122 -SDLTest_SetTestTimeout(int timeout, void (*callback)())
1.123 + SDLTest_SetTestTimeout(int timeout, void (*callback)())
1.124 {
1.125 Uint32 timeoutInMilliseconds;
1.126 SDL_TimerID timerID;
1.127 @@ -201,24 +201,27 @@
1.128 return timerID;
1.129 }
1.130
1.131 +/**
1.132 +* \brief Timeout handler. Aborts test run and exits harness process.
1.133 +*/
1.134 void
1.135 -SDLTest_BailOut()
1.136 + SDLTest_BailOut()
1.137 {
1.138 SDLTest_LogError("TestCaseTimeout timer expired. Aborting test run.");
1.139 exit(TEST_ABORTED); // bail out from the test
1.140 }
1.141
1.142 /**
1.143 - * \brief Execute a test using the given execution key.
1.144 - *
1.145 - * \param testSuite Suite containing the test case.
1.146 - * \param testCase Case to execute.
1.147 - * \param execKey Execution key for the fuzzer.
1.148 - *
1.149 - * \returns Test case result.
1.150 - */
1.151 +* \brief Execute a test using the given execution key.
1.152 +*
1.153 +* \param testSuite Suite containing the test case.
1.154 +* \param testCase Case to execute.
1.155 +* \param execKey Execution key for the fuzzer.
1.156 +*
1.157 +* \returns Test case result.
1.158 +*/
1.159 int
1.160 -SDLTest_RunTest(SDLTest_TestSuiteReference *testSuite, SDLTest_TestCaseReference *testCase, Uint64 execKey)
1.161 + SDLTest_RunTest(SDLTest_TestSuiteReference *testSuite, SDLTest_TestCaseReference *testCase, Uint64 execKey)
1.162 {
1.163 SDL_TimerID timer = 0;
1.164 int testResult = 0;
1.165 @@ -232,11 +235,12 @@
1.166
1.167 if (!testCase->enabled)
1.168 {
1.169 - SDLTest_Log((char *)SDLTest_FinalResultFormat, "Test", testCase->name, "Skipped");
1.170 + SDLTest_Log((char *)SDLTest_FinalResultFormat, "Test", testCase->name, "Skipped (Disabled)");
1.171 return TEST_RESULT_SKIPPED;
1.172 }
1.173
1.174 - // Initialize fuzzer
1.175 +
1.176 + // Initialize fuzzer
1.177 SDLTest_FuzzerInit(execKey);
1.178
1.179 // Reset assert tracker
1.180 @@ -315,17 +319,21 @@
1.181 }
1.182
1.183 /**
1.184 - * \brief Execute a test suite using the given run seend and execution key.
1.185 - *
1.186 - * \param testSuites Suites containing the test case.
1.187 - * \param userRunSeed Custom run seed provided by user, or NULL to autogenerate one.
1.188 - * \param userExecKey Custom execution key provided by user, or 0 to autogenerate one.
1.189 - * \param testIterations Number of iterations to run each test case.
1.190 - *
1.191 - * \returns Test run result; 0 when all tests passed, 1 if any tests failed.
1.192 - */
1.193 +* \brief Execute a test suite using the given run seend and execution key.
1.194 +*
1.195 +* The filter string is matched to the suite name (full comparison) to select a single suite,
1.196 +* or if no suite matches, it is matched to the test names (full comparison) to select a single test.
1.197 +*
1.198 +* \param testSuites Suites containing the test case.
1.199 +* \param userRunSeed Custom run seed provided by user, or NULL to autogenerate one.
1.200 +* \param userExecKey Custom execution key provided by user, or 0 to autogenerate one.
1.201 +* \param filter Filter specification. NULL disables. Case sensitive.
1.202 +* \param testIterations Number of iterations to run each test case.
1.203 +*
1.204 +* \returns Test run result; 0 when all tests passed, 1 if any tests failed.
1.205 +*/
1.206 int
1.207 -SDLTest_RunSuites(SDLTest_TestSuiteReference *testSuites[], char *userRunSeed, Uint64 userExecKey, int testIterations)
1.208 + SDLTest_RunSuites(SDLTest_TestSuiteReference *testSuites[], char *userRunSeed, Uint64 userExecKey, char *filter, int testIterations)
1.209 {
1.210 int suiteCounter;
1.211 int testCounter;
1.212 @@ -343,6 +351,10 @@
1.213 float suiteEndSeconds;
1.214 float testEndSeconds;
1.215 float runtime;
1.216 + int suiteFilter = 0;
1.217 + char *suiteFilterName = NULL;
1.218 + int testFilter = 0;
1.219 + char *testFilterName = NULL;
1.220 int testResult = 0;
1.221 int runResult = 0;
1.222 Uint32 totalTestFailedCount = 0;
1.223 @@ -370,6 +382,7 @@
1.224 runSeed = userRunSeed;
1.225 }
1.226
1.227 +
1.228 // Reset per-run counters
1.229 totalTestFailedCount = 0;
1.230 totalTestPassedCount = 0;
1.231 @@ -381,121 +394,184 @@
1.232 // Log run with fuzzer parameters
1.233 SDLTest_Log("::::: Test Run /w seed '%s' started\n", runSeed);
1.234
1.235 + // Initialize filtering
1.236 + if (filter != NULL && SDL_strlen(filter) > 0) {
1.237 + /* Loop over all suites to check if we have a filter match */
1.238 + suiteCounter = 0;
1.239 + while (testSuites[suiteCounter] && suiteFilter == 0) {
1.240 + testSuite=(SDLTest_TestSuiteReference *)testSuites[suiteCounter];
1.241 + suiteCounter++;
1.242 + if (testSuite->name != NULL && SDL_strcmp(filter, testSuite->name) == 0) {
1.243 + /* Matched a suite name */
1.244 + suiteFilter = 1;
1.245 + suiteFilterName = testSuite->name;
1.246 + SDLTest_Log("Filtering: running only suite '%s'", suiteFilterName);
1.247 + break;
1.248 + }
1.249 +
1.250 + /* Within each suite, loop over all test cases to check if we have a filter match */
1.251 + testCounter = 0;
1.252 + while (testSuite->testCases[testCounter] && testFilter == 0)
1.253 + {
1.254 + testCase=(SDLTest_TestCaseReference *)testSuite->testCases[testCounter];
1.255 + testCounter++;
1.256 + if (testCase->name != NULL && SDL_strcmp(filter, testCase->name) == 0) {
1.257 + /* Matched a test name */
1.258 + suiteFilter = 1;
1.259 + suiteFilterName = testSuite->name;
1.260 + testFilter = 1;
1.261 + testFilterName = testCase->name;
1.262 + SDLTest_Log("Filtering: running only test '%s' in suite '%s'", testFilterName, suiteFilterName);
1.263 + break;
1.264 + }
1.265 + }
1.266 + }
1.267 +
1.268 + if (suiteFilter == 0 && testFilter == 0) {
1.269 + SDLTest_LogError("Filter '%s' did not match any test suite/case.", filter);
1.270 + SDLTest_Log("Exit code: 2");
1.271 + return 2;
1.272 + }
1.273 + }
1.274 +
1.275 // Loop over all suites
1.276 suiteCounter = 0;
1.277 while(testSuites[suiteCounter]) {
1.278 testSuite=(SDLTest_TestSuiteReference *)testSuites[suiteCounter];
1.279 + currentSuiteName = (char *)((testSuite->name) ? testSuite->name : SDLTest_InvalidNameFormat);
1.280 suiteCounter++;
1.281
1.282 - // Reset per-suite counters
1.283 - testFailedCount = 0;
1.284 - testPassedCount = 0;
1.285 - testSkippedCount = 0;
1.286 + // Filter suite if flag set and we have a name
1.287 + if (suiteFilter == 1 && suiteFilterName != NULL && testSuite->name != NULL &&
1.288 + SDL_strcmp(suiteFilterName, testSuite->name) != 0) {
1.289 + // Skip suite
1.290 + SDLTest_Log("===== Test Suite %i: '%s' skipped\n",
1.291 + suiteCounter,
1.292 + currentSuiteName);
1.293 + } else {
1.294
1.295 - // Take time - suite start
1.296 - suiteStartSeconds = GetClock();
1.297 + // Reset per-suite counters
1.298 + testFailedCount = 0;
1.299 + testPassedCount = 0;
1.300 + testSkippedCount = 0;
1.301 +
1.302 + // Take time - suite start
1.303 + suiteStartSeconds = GetClock();
1.304 +
1.305 + // Log suite started
1.306 + SDLTest_Log("===== Test Suite %i: '%s' started\n",
1.307 + suiteCounter,
1.308 + currentSuiteName);
1.309
1.310 - // Log suite started
1.311 - currentSuiteName = (char *)((testSuite->name) ? testSuite->name : SDLTest_InvalidNameFormat);
1.312 - SDLTest_Log("===== Test Suite %i: '%s' started\n",
1.313 - suiteCounter,
1.314 - currentSuiteName);
1.315 + // Loop over all test cases
1.316 + testCounter = 0;
1.317 + while(testSuite->testCases[testCounter])
1.318 + {
1.319 + testCase=(SDLTest_TestCaseReference *)testSuite->testCases[testCounter];
1.320 + currentTestName = (char *)((testCase->name) ? testCase->name : SDLTest_InvalidNameFormat);
1.321 + testCounter++;
1.322
1.323 - // Loop over all test cases
1.324 - testCounter = 0;
1.325 - while(testSuite->testCases[testCounter])
1.326 - {
1.327 - testCase=(SDLTest_TestCaseReference *)testSuite->testCases[testCounter];
1.328 - testCounter++;
1.329 -
1.330 - // Take time - test start
1.331 - testStartSeconds = GetClock();
1.332 + // Filter tests if flag set and we have a name
1.333 + if (testFilter == 1 && testFilterName != NULL && testCase->name != NULL &&
1.334 + SDL_strcmp(testFilterName, testCase->name) != 0) {
1.335 + // Skip test
1.336 + SDLTest_Log("===== Test Case %i.%i: '%s' skipped\n",
1.337 + suiteCounter,
1.338 + testCounter,
1.339 + currentTestName);
1.340 + } else {
1.341 +
1.342 + // Take time - test start
1.343 + testStartSeconds = GetClock();
1.344 +
1.345 + // Log test started
1.346 + SDLTest_Log("----- Test Case %i.%i: '%s' started",
1.347 + suiteCounter,
1.348 + testCounter,
1.349 + currentTestName);
1.350 + if (testCase->description != NULL && strlen(testCase->description)>0) {
1.351 + SDLTest_Log("Test Description: '%s'",
1.352 + (testCase->description) ? testCase->description : SDLTest_InvalidNameFormat);
1.353 + }
1.354
1.355 - // Log test started
1.356 - currentTestName = (char *)((testCase->name) ? testCase->name : SDLTest_InvalidNameFormat);
1.357 - SDLTest_Log("----- Test Case %i.%i: '%s' started",
1.358 - suiteCounter,
1.359 - testCounter,
1.360 - currentTestName);
1.361 - if (testCase->description != NULL && strlen(testCase->description)>0) {
1.362 - SDLTest_Log("Test Description: '%s'",
1.363 - (testCase->description) ? testCase->description : SDLTest_InvalidNameFormat);
1.364 - }
1.365 -
1.366 - // Loop over all iterations
1.367 - iterationCounter = 0;
1.368 - while(iterationCounter < testIterations)
1.369 - {
1.370 - iterationCounter++;
1.371 + // Loop over all iterations
1.372 + iterationCounter = 0;
1.373 + while(iterationCounter < testIterations)
1.374 + {
1.375 + iterationCounter++;
1.376 +
1.377 + if (userExecKey != 0) {
1.378 + execKey = userExecKey;
1.379 + } else {
1.380 + execKey = SDLTest_GenerateExecKey(runSeed, testSuite->name, testCase->name, iterationCounter);
1.381 + }
1.382 +
1.383 + SDLTest_Log("Test Iteration %i: execKey %llu", iterationCounter, execKey);
1.384 + testResult = SDLTest_RunTest(testSuite, testCase, execKey);
1.385 +
1.386 + if (testResult == TEST_RESULT_PASSED) {
1.387 + testPassedCount++;
1.388 + totalTestPassedCount++;
1.389 + } else if (testResult == TEST_RESULT_SKIPPED) {
1.390 + testSkippedCount++;
1.391 + totalTestSkippedCount++;
1.392 + } else {
1.393 + testFailedCount++;
1.394 + totalTestFailedCount++;
1.395 + }
1.396 + }
1.397
1.398 - if (userExecKey != 0) {
1.399 - execKey = userExecKey;
1.400 - } else {
1.401 - execKey = SDLTest_GenerateExecKey(runSeed, testSuite->name, testCase->name, iterationCounter);
1.402 - }
1.403 + // Take time - test end
1.404 + testEndSeconds = GetClock();
1.405 + runtime = testEndSeconds - testStartSeconds;
1.406 + if (runtime < 0.0f) runtime = 0.0f;
1.407
1.408 - SDLTest_Log("Test Iteration %i: execKey %llu", iterationCounter, execKey);
1.409 - testResult = SDLTest_RunTest(testSuite, testCase, execKey);
1.410 + if (testIterations > 1) {
1.411 + // Log test runtime
1.412 + SDLTest_Log("Runtime of %i iterations: %.1f sec", testIterations, runtime);
1.413 + SDLTest_Log("Average Test runtime: %.5f sec", runtime / (float)testIterations);
1.414 + } else {
1.415 + // Log test runtime
1.416 + SDLTest_Log("Total Test runtime: %.1f sec", runtime);
1.417 + }
1.418
1.419 - if (testResult == TEST_RESULT_PASSED) {
1.420 - testPassedCount++;
1.421 - totalTestPassedCount++;
1.422 - } else if (testResult == TEST_RESULT_SKIPPED) {
1.423 - testSkippedCount++;
1.424 - totalTestSkippedCount++;
1.425 - } else {
1.426 - testFailedCount++;
1.427 - totalTestFailedCount++;
1.428 + // Log final test result
1.429 + switch (testResult) {
1.430 + case TEST_RESULT_PASSED:
1.431 + SDLTest_Log((char *)SDLTest_FinalResultFormat, "Test", currentTestName, "Passed");
1.432 + break;
1.433 + case TEST_RESULT_FAILED:
1.434 + SDLTest_LogError((char *)SDLTest_FinalResultFormat, "Test", currentTestName, "Failed");
1.435 + break;
1.436 + case TEST_RESULT_NO_ASSERT:
1.437 + SDLTest_LogError((char *)SDLTest_FinalResultFormat,"Test", currentTestName, "No Asserts");
1.438 + break;
1.439 + }
1.440 +
1.441 }
1.442 }
1.443
1.444 - // Take time - test end
1.445 - testEndSeconds = GetClock();
1.446 - runtime = testEndSeconds - testStartSeconds;
1.447 + // Take time - suite end
1.448 + suiteEndSeconds = GetClock();
1.449 + runtime = suiteEndSeconds - suiteStartSeconds;
1.450 if (runtime < 0.0f) runtime = 0.0f;
1.451
1.452 - if (testIterations > 1) {
1.453 - // Log test runtime
1.454 - SDLTest_Log("Runtime of %i iterations: %.1f sec", testIterations, runtime);
1.455 - SDLTest_Log("Test runtime: %.5f sec", runtime / (float)testIterations);
1.456 - } else {
1.457 - // Log test runtime
1.458 - SDLTest_Log("Test runtime: %.1f sec", runtime);
1.459 - }
1.460 + // Log suite runtime
1.461 + SDLTest_Log("Total Suite runtime: %.1f sec", runtime);
1.462
1.463 - // Log final test result
1.464 - switch (testResult) {
1.465 - case TEST_RESULT_PASSED:
1.466 - SDLTest_Log((char *)SDLTest_FinalResultFormat, "Test", currentTestName, "Passed");
1.467 - break;
1.468 - case TEST_RESULT_FAILED:
1.469 - SDLTest_LogError((char *)SDLTest_FinalResultFormat, "Test", currentTestName, "Failed");
1.470 - break;
1.471 - case TEST_RESULT_NO_ASSERT:
1.472 - SDLTest_LogError((char *)SDLTest_FinalResultFormat,"Test", currentTestName, "No Asserts");
1.473 - break;
1.474 + // Log summary and final Suite result
1.475 + countSum = testPassedCount + testFailedCount + testSkippedCount;
1.476 + if (testFailedCount == 0)
1.477 + {
1.478 + SDLTest_Log(logFormat, "Suite", countSum, testPassedCount, testFailedCount, testSkippedCount);
1.479 + SDLTest_Log((char *)SDLTest_FinalResultFormat, "Suite", currentSuiteName, "Passed");
1.480 + }
1.481 + else
1.482 + {
1.483 + SDLTest_LogError(logFormat, "Suite", countSum, testPassedCount, testFailedCount, testSkippedCount);
1.484 + SDLTest_LogError((char *)SDLTest_FinalResultFormat, "Suite", currentSuiteName, "Failed");
1.485 }
1.486 - }
1.487
1.488 - // Take time - suite end
1.489 - suiteEndSeconds = GetClock();
1.490 - runtime = suiteEndSeconds - suiteStartSeconds;
1.491 - if (runtime < 0.0f) runtime = 0.0f;
1.492 -
1.493 - // Log suite runtime
1.494 - SDLTest_Log("Suite runtime: %.1f sec", runtime);
1.495 -
1.496 - // Log summary and final Suite result
1.497 - countSum = testPassedCount + testFailedCount + testSkippedCount;
1.498 - if (testFailedCount == 0)
1.499 - {
1.500 - SDLTest_Log(logFormat, "Suite", countSum, testPassedCount, testFailedCount, testSkippedCount);
1.501 - SDLTest_Log((char *)SDLTest_FinalResultFormat, "Suite", currentSuiteName, "Passed");
1.502 - }
1.503 - else
1.504 - {
1.505 - SDLTest_LogError(logFormat, "Suite", countSum, testPassedCount, testFailedCount, testSkippedCount);
1.506 - SDLTest_LogError((char *)SDLTest_FinalResultFormat, "Suite", currentSuiteName, "Failed");
1.507 }
1.508 }
1.509
1.510 @@ -505,11 +581,11 @@
1.511 if (runtime < 0.0f) runtime = 0.0f;
1.512
1.513 // Log total runtime
1.514 - SDLTest_Log("Total runtime: %.1f sec", runtime);
1.515 + SDLTest_Log("Total Run runtime: %.1f sec", runtime);
1.516
1.517 // Log summary and final run result
1.518 countSum = totalTestPassedCount + totalTestFailedCount + totalTestSkippedCount;
1.519 - if (testFailedCount == 0)
1.520 + if (totalTestFailedCount == 0)
1.521 {
1.522 runResult = 0;
1.523 SDLTest_Log(logFormat, "Run", countSum, totalTestPassedCount, totalTestFailedCount, totalTestSkippedCount);