Added to harness in test lib (work in progress)
authorAndreas Schiffler <aschiffler@ferzkopp.net>
Tue, 04 Dec 2012 19:21:10 -0800
changeset 672153b71f45a53a
parent 6720 5d20f1296bca
child 6722 37283d4416e0
Added to harness in test lib (work in progress)
include/SDL_test_assert.h
include/SDL_test_harness.h
src/test/SDL_test_assert.c
src/test/SDL_test_harness.c
     1.1 --- a/include/SDL_test_assert.h	Sat Dec 01 23:42:17 2012 -0800
     1.2 +++ b/include/SDL_test_assert.h	Tue Dec 04 19:21:10 2012 -0800
     1.3 @@ -54,6 +54,12 @@
     1.4   */
     1.5  #define ASSERT_PASS		1
     1.6  
     1.7 +/*! \brief counts the failed asserts */
     1.8 +static Uint32 SDLTest_AssertsFailed = 0;
     1.9 +
    1.10 +/*! \brief counts the passed asserts */
    1.11 +static Uint32 SDLTest_AssertsPassed = 0;
    1.12 +
    1.13  /**
    1.14   * \brief Assert that logs and break execution flow on failures.
    1.15   *
     2.1 --- a/include/SDL_test_harness.h	Sat Dec 01 23:42:17 2012 -0800
     2.2 +++ b/include/SDL_test_harness.h	Tue Dec 04 19:21:10 2012 -0800
     2.3 @@ -49,22 +49,17 @@
     2.4  #define TEST_ENABLED  1
     2.5  #define TEST_DISABLED 0
     2.6  
     2.7 -//! Definitions of assert results
     2.8 -#define ASSERT_PASS		1
     2.9 -#define ASSERT_FAIL		0
    2.10 -
    2.11  //! Definition of all the possible test return values of the test case method
    2.12  #define TEST_ABORTED		-1
    2.13  #define TEST_COMPLETED		 0
    2.14  #define TEST_SKIPPED		 1
    2.15  
    2.16  //! Definition of all the possible test results for the harness
    2.17 -#define TEST_RESULT_PASSED			0
    2.18 -#define TEST_RESULT_FAILED			1
    2.19 +#define TEST_RESULT_PASSED				0
    2.20 +#define TEST_RESULT_FAILED				1
    2.21  #define TEST_RESULT_NO_ASSERT			2
    2.22 -#define TEST_RESULT_SKIPPED			3
    2.23 -#define TEST_RESULT_KILLED			4
    2.24 -#define TEST_RESULT_SETUP_FAILURE		5
    2.25 +#define TEST_RESULT_SKIPPED				3
    2.26 +#define TEST_RESULT_SETUP_FAILURE		4
    2.27  
    2.28  //!< Function pointer to a test case setup function (run before every test)
    2.29  typedef void (*SDLTest_TestCaseSetUpFp)(void *arg);
    2.30 @@ -92,7 +87,7 @@
    2.31  /**
    2.32   * Holds information about a test suite (multiple test cases).
    2.33   */
    2.34 -typedef struct TestSuiteReference {
    2.35 +typedef struct SDLTest_TestSuiteReference {
    2.36  	/*!< "PlatformSuite" */
    2.37  	char *name;
    2.38  	/*!< The function that is run before each test. NULL skips. */
    2.39 @@ -101,7 +96,7 @@
    2.40  	const SDLTest_TestCaseReference **testCases;
    2.41  	/*!< The function that is run after each test. NULL skips. */
    2.42  	SDLTest_TestCaseTearDownFp testTearDown;
    2.43 -} TestSuiteReference;
    2.44 +} SDLTest_TestSuiteReference;
    2.45  
    2.46  /* Ends C function definitions when using C++ */
    2.47  #ifdef __cplusplus
     3.1 --- a/src/test/SDL_test_assert.c	Sat Dec 01 23:42:17 2012 -0800
     3.2 +++ b/src/test/SDL_test_assert.c	Tue Dec 04 19:21:10 2012 -0800
     3.3 @@ -29,14 +29,8 @@
     3.4  
     3.5  #include "SDL_test.h"
     3.6  
     3.7 -/*! \brief counts the failed asserts */
     3.8 -static Uint32 SDLTest_AssertsFailed = 0;
     3.9 -
    3.10 -/*! \brief counts the passed asserts */
    3.11 -static Uint32 SDLTest_AssertsPassed = 0;
    3.12 -
    3.13  /* Assert check message format */
    3.14 -const char *SDLTest_AssertCheckFmt = "Assert %s: %s";
    3.15 +const char *SDLTest_AssertCheckFmt = "Assert '%s': %s";
    3.16  
    3.17  /* Assert summary message format */
    3.18  const char *SDLTest_AssertSummaryFmt = "Assert Summary: Total=%d Passed=%d Failed=%d";
    3.19 @@ -58,12 +52,12 @@
    3.20  	if (assertCondition == ASSERT_FAIL)
    3.21  	{
    3.22  		SDLTest_AssertsFailed++;
    3.23 -		SDLTest_LogError(fmt, "Failed", assertDescription);
    3.24 +		SDLTest_LogError(fmt, assertDescription, "Failed");
    3.25  	} 
    3.26  	else 
    3.27  	{
    3.28  		SDLTest_AssertsPassed++;
    3.29 -		SDLTest_Log(fmt, "Passed", assertDescription);
    3.30 +		SDLTest_Log(fmt, assertDescription, "Passed");
    3.31  	}
    3.32  
    3.33  	return assertCondition;
     4.1 --- a/src/test/SDL_test_harness.c	Sat Dec 01 23:42:17 2012 -0800
     4.2 +++ b/src/test/SDL_test_harness.c	Tue Dec 04 19:21:10 2012 -0800
     4.3 @@ -23,10 +23,19 @@
     4.4  
     4.5  #include "SDL_test.h"
     4.6  
     4.7 -#include <stdio.h>
     4.8 +#include <stdio.h>
     4.9 +#include <stdlib.h>
    4.10  #include <string.h>
    4.11 +#include <time.h>
    4.12  
    4.13 -// TODO: port over remaining harness
    4.14 +/* Assert check message format */
    4.15 +const char *SDLTest_TestCheckFmt = "Test '%s': %s";
    4.16 +
    4.17 +/* Invalid test name/description message format */
    4.18 +const char *SDLTest_InvalidNameFmt = "(Invalid)";
    4.19 +
    4.20 +/*! \brief Timeout for single test case execution */
    4.21 +static Uint32 SDLTest_TestCaseTimeout = 3600;
    4.22  
    4.23  /**
    4.24   * Generates a random run seed string for the harness. The generated seed
    4.25 @@ -155,7 +164,7 @@
    4.26   * \return Timer id or -1 on failure.
    4.27   */
    4.28  SDL_TimerID
    4.29 -SetTestTimeout(int timeout, void (*callback)())
    4.30 +SDLTest_SetTestTimeout(int timeout, void (*callback)())
    4.31  {
    4.32  	Uint32 timeoutInMilliseconds;
    4.33  	SDL_TimerID timerID;
    4.34 @@ -188,3 +197,258 @@
    4.35  
    4.36  	return timerID;
    4.37  }
    4.38 +
    4.39 +void
    4.40 +SDLTest_BailOut()
    4.41 +{
    4.42 +	SDLTest_LogError("TestCaseTimeout timer expired. Aborting test run.");
    4.43 +	exit(TEST_ABORTED); // bail out from the test
    4.44 +}
    4.45 +
    4.46 +/**
    4.47 + * \brief Execute a test using the given execution key.
    4.48 + *
    4.49 + * \param testSuite Suite containing the test case.
    4.50 + * \param testCase Case to execute.
    4.51 + * \param execKey Execution key for the fuzzer.
    4.52 + *
    4.53 + * \returns Test case result.
    4.54 + */
    4.55 +int
    4.56 +SDLTest_RunTest(SDLTest_TestSuiteReference *testSuite, SDLTest_TestCaseReference *testCase, Uint64 execKey)
    4.57 +{
    4.58 +	SDL_TimerID timer = 0;
    4.59 +
    4.60 +	if (testSuite==NULL || testCase==NULL || testSuite->name==NULL || testCase->name==NULL)
    4.61 +	{
    4.62 +		SDLTest_LogError("Setup failure: testSuite or testCase references NULL");
    4.63 +		return TEST_RESULT_SETUP_FAILURE;
    4.64 +	}
    4.65 +
    4.66 +	if (!testCase->enabled)
    4.67 +	{
    4.68 +		SDLTest_Log((char *)SDLTest_TestCheckFmt, testCase->name, "Skipped");
    4.69 +		return TEST_RESULT_SKIPPED;
    4.70 +	}
    4.71 +
    4.72 +    // Initialize fuzzer
    4.73 +	SDLTest_FuzzerInit(execKey);
    4.74 +
    4.75 +	// Reset assert tracker
    4.76 +	SDLTest_ResetAssertSummary();
    4.77 +
    4.78 +	// Set timeout timer
    4.79 +	timer = SDLTest_SetTestTimeout(SDLTest_TestCaseTimeout, SDLTest_BailOut);
    4.80 +
    4.81 +	// Maybe run suite initalizer function
    4.82 +	if (testSuite->testSetUp) {
    4.83 +		testSuite->testSetUp(0x0);
    4.84 +		if (SDLTest_AssertsFailed > 0) {
    4.85 +			SDLTest_LogError((char *)SDLTest_TestCheckFmt, testSuite->name, "Failed");
    4.86 +			return TEST_RESULT_SETUP_FAILURE;
    4.87 +		}
    4.88 +	}
    4.89 +
    4.90 +	// Run test case function
    4.91 +	testCase->testCase(0x0);
    4.92 +
    4.93 +	// Maybe run suite cleanup function
    4.94 +	if (testSuite->testTearDown) {
    4.95 +		testSuite->testTearDown(0x0);
    4.96 +	}
    4.97 +
    4.98 +	// Cancel timeout timer
    4.99 +	if (timer) {
   4.100 +		SDL_RemoveTimer(timer);
   4.101 +	}
   4.102 +
   4.103 +	// Report on asserts and fuzzer usage
   4.104 +	SDLTest_Log("Fuzzer invocations: %d", SDLTest_GetFuzzerInvocationCount());
   4.105 +	SDLTest_LogAssertSummary();
   4.106 +
   4.107 +	// Analyze assert count to determine test case result
   4.108 +	if (SDLTest_AssertsFailed > 0) {
   4.109 +		SDLTest_LogError((char *)SDLTest_TestCheckFmt, testCase->name, "Failed");
   4.110 +		return TEST_RESULT_FAILED;
   4.111 +	} else {
   4.112 +		if (SDLTest_AssertsPassed > 0) {
   4.113 +			SDLTest_Log((char *)SDLTest_TestCheckFmt, testCase->name, "Passed");
   4.114 +			return TEST_RESULT_PASSED;
   4.115 +		} else {
   4.116 +			SDLTest_LogError((char *)SDLTest_TestCheckFmt, testCase->name, "No Asserts");
   4.117 +			return TEST_RESULT_NO_ASSERT;
   4.118 +		}
   4.119 +	}
   4.120 +}
   4.121 +
   4.122 +/* Prints summary of all suites/tests contained in the given reference */
   4.123 +void SDLTest_LogTestSuiteSummary(SDLTest_TestSuiteReference *testSuites)
   4.124 +{
   4.125 +	int suiteCounter;
   4.126 +	int testCounter;
   4.127 +	SDLTest_TestSuiteReference *testSuite;
   4.128 +	SDLTest_TestCaseReference *testCase;
   4.129 +
   4.130 +	// Loop over all suites
   4.131 +	suiteCounter = 0;
   4.132 +	while(&testSuites[suiteCounter]) {
   4.133 +		testSuite=&testSuites[suiteCounter];
   4.134 +		suiteCounter++;
   4.135 +		SDLTest_Log("Test Suite %i - %s\n", suiteCounter, 
   4.136 +			(testSuite->name) ? testSuite->name : SDLTest_InvalidNameFmt);
   4.137 +
   4.138 +		// Loop over all test cases
   4.139 +		testCounter = 0;
   4.140 +		while(testSuite->testCases[testCounter])
   4.141 +		{
   4.142 +			testCase=(SDLTest_TestCaseReference *)testSuite->testCases[testCounter];
   4.143 +			testCounter++;
   4.144 +			SDLTest_Log("  Test Case %i - %s: %s", testCounter, 
   4.145 +				(testCase->name) ? testCase->name : SDLTest_InvalidNameFmt, 
   4.146 +				(testCase->description) ? testCase->description : SDLTest_InvalidNameFmt);
   4.147 +		}
   4.148 +	}
   4.149 +}
   4.150 +
   4.151 +
   4.152 +/**
   4.153 + * \brief Execute a test using the given execution key.
   4.154 + *
   4.155 + * \param testSuites Suites containing the test case.
   4.156 + * \param userRunSeed Custom run seed provided by user, or NULL to autogenerate one.
   4.157 + * \param userExecKey Custom execution key provided by user, or 0 to autogenerate one.
   4.158 + * \param testIterations Number of iterations to run each test case.
   4.159 + *
   4.160 + * \returns Test run result; 0 when all tests passed, 1 if any tests failed.
   4.161 + */
   4.162 +int
   4.163 +SDLTest_RunSuites(SDLTest_TestSuiteReference *testSuites, char *userRunSeed, Uint64 userExecKey, int testIterations)
   4.164 +{
   4.165 +	int suiteCounter;
   4.166 +	int testCounter;
   4.167 +	int iterationCounter;
   4.168 +	SDLTest_TestSuiteReference *testSuite;
   4.169 +	SDLTest_TestCaseReference *testCase;
   4.170 +	char *runSeed;
   4.171 +	Uint64 execKey;
   4.172 +	Uint32 runStartTicks;
   4.173 +	time_t runStartTimestamp;
   4.174 +	Uint32 suiteStartTicks;
   4.175 +	time_t suiteStartTimestamp;
   4.176 +	Uint32 testStartTicks;
   4.177 +	time_t testStartTimestamp;
   4.178 +	Uint32 runEndTicks;
   4.179 +	time_t runEndTimestamp;
   4.180 +	Uint32 suiteEndTicks;
   4.181 +	time_t suiteEndTimestamp;
   4.182 +	Uint32 testEndTicks;
   4.183 +	time_t testEndTimestamp;
   4.184 +	int testResult;
   4.185 +	int totalTestFailedCount, totalTestPassedCount, totalTestSkippedCount;
   4.186 +	int testFailedCount, testPassedCount, testSkippedCount;
   4.187 +
   4.188 +	// Sanitize test iterations
   4.189 +	if (testIterations < 1) {
   4.190 +		testIterations = 1;
   4.191 +	}
   4.192 +
   4.193 +	// Generate run see if we don't have one already
   4.194 +	if (userRunSeed == NULL || strlen(userRunSeed) == 0) {
   4.195 +		runSeed = SDLTest_GenerateRunSeed(16);
   4.196 +		if (runSeed == NULL) {
   4.197 +			SDLTest_LogError("Generating a random run seed failed");
   4.198 +			return 2;
   4.199 +		}
   4.200 +	}
   4.201 +
   4.202 +	// Reset per-run counters
   4.203 +	totalTestFailedCount = totalTestPassedCount = totalTestSkippedCount = 0;
   4.204 +
   4.205 +	// Take time - run start
   4.206 +	runStartTicks = SDL_GetTicks();
   4.207 +	runStartTimestamp = time(0);
   4.208 +
   4.209 +	// TODO log run started
   4.210 +
   4.211 +	// Loop over all suites
   4.212 +	suiteCounter = 0;
   4.213 +	while(&testSuites[suiteCounter]) {
   4.214 +		testSuite=&testSuites[suiteCounter];
   4.215 +		suiteCounter++;
   4.216 +
   4.217 +		// Reset per-suite counters
   4.218 +		testFailedCount = testPassedCount = testSkippedCount = 0;
   4.219 +
   4.220 +		// Take time - suite start
   4.221 +		suiteStartTicks = SDL_GetTicks();
   4.222 +		suiteStartTimestamp = time(0);
   4.223 +
   4.224 +		// TODO log suite started
   4.225 +		SDLTest_Log("Test Suite %i - %s\n", suiteCounter, 
   4.226 +			(testSuite->name) ? testSuite->name : SDLTest_InvalidNameFmt);
   4.227 +
   4.228 +		// Loop over all test cases
   4.229 +		testCounter = 0;
   4.230 +		while(testSuite->testCases[testCounter])
   4.231 +		{
   4.232 +			testCase=(SDLTest_TestCaseReference *)testSuite->testCases[testCounter];
   4.233 +			testCounter++;
   4.234 +			
   4.235 +			// Take time - test start
   4.236 +			testStartTicks = SDL_GetTicks();
   4.237 +			testStartTimestamp = time(0);
   4.238 +
   4.239 +			// TODO log test started
   4.240 +			SDLTest_Log("Test Case %i - %s: %s", testCounter, 
   4.241 +				(testCase->name) ? testCase->name : SDLTest_InvalidNameFmt, 
   4.242 +				(testCase->description) ? testCase->description : SDLTest_InvalidNameFmt);
   4.243 +
   4.244 +			// Loop over all iterations
   4.245 +			iterationCounter = 0;
   4.246 +			while(iterationCounter < testIterations)
   4.247 +			{
   4.248 +				iterationCounter++;
   4.249 +
   4.250 +				if(userExecKey != 0) {
   4.251 +					execKey = userExecKey;
   4.252 +				} else {
   4.253 +					execKey = SDLTest_GenerateExecKey(runSeed, testSuite->name, testCase->name, iterationCounter);
   4.254 +				}
   4.255 +
   4.256 +				SDLTest_Log("Test Iteration %i: execKey %d", iterationCounter, execKey);
   4.257 +				testResult = SDLTest_RunTest(testSuite, testCase, execKey);
   4.258 +
   4.259 +				if (testResult == TEST_RESULT_PASSED) {
   4.260 +					testPassedCount++;
   4.261 +					totalTestPassedCount++;
   4.262 +				} else if (testResult == TEST_RESULT_SKIPPED) {
   4.263 +					testSkippedCount++;
   4.264 +					totalTestSkippedCount++;
   4.265 +				} else {
   4.266 +					testFailedCount++;
   4.267 +					totalTestFailedCount++;
   4.268 +				}
   4.269 +			}
   4.270 +
   4.271 +			// Take time - test end
   4.272 +			testEndTicks = SDL_GetTicks();
   4.273 +			testEndTimestamp = time(0);
   4.274 +
   4.275 +			// TODO log test ended
   4.276 +		}
   4.277 +
   4.278 +		// Take time - suite end
   4.279 +		suiteEndTicks = SDL_GetTicks();
   4.280 +		suiteEndTimestamp = time(0);
   4.281 +
   4.282 +		// TODO log suite ended
   4.283 +	}
   4.284 +
   4.285 +	// Take time - run end
   4.286 +	runEndTicks = SDL_GetTicks();
   4.287 +	runEndTimestamp = time(0);
   4.288 +
   4.289 +	// TODO log run ended
   4.290 +
   4.291 +	return (totalTestFailedCount ? 1 : 0);
   4.292 +}