If any assert in SetUp function fails that test will be skipped.
authorMarkus Kauppila
Mon, 11 Jul 2011 21:09:28 +0300
changeset 575535d4ae5dea90
parent 5754 2eead798346a
child 5756 3581346510f3
If any assert in SetUp function fails that test will be skipped.
test/test-automation/SDL_test.c
test/test-automation/SDL_test.h
test/test-automation/logger.h
test/test-automation/plain_logger.c
test/test-automation/plain_logger.h
test/test-automation/runner.c
test/test-automation/style.xsl
test/test-automation/testdummy/testdummy.c
test/test-automation/testsurface/testsurface.c
test/test-automation/xml_logger.c
test/test-automation/xml_logger.h
     1.1 --- a/test/test-automation/SDL_test.c	Mon Jul 11 17:55:35 2011 +0300
     1.2 +++ b/test/test-automation/SDL_test.c	Mon Jul 11 21:09:28 2011 +0300
     1.3 @@ -36,7 +36,7 @@
     1.4  int _testAssertsPassed;
     1.5  
     1.6  void
     1.7 -_InitTestEnvironment() // InitTestEnvironment
     1.8 +_InitTestEnvironment()
     1.9  {
    1.10  	_testReturnValue = 0;
    1.11  	_testAssertsFailed = 0;
    1.12 @@ -56,8 +56,13 @@
    1.13  	return _testReturnValue;
    1.14  }
    1.15  
    1.16 +int
    1.17 +_CountFailedAsserts() {
    1.18 +	return _testAssertsFailed;
    1.19 +}
    1.20 +
    1.21  void
    1.22 -AssertEquals(const int expected, const int actual, char *message, ...)
    1.23 +AssertEquals(int expected, int actual, char *message, ...)
    1.24  {
    1.25     va_list args;
    1.26     char buf[256];
     2.1 --- a/test/test-automation/SDL_test.h	Mon Jul 11 17:55:35 2011 +0300
     2.2 +++ b/test/test-automation/SDL_test.h	Mon Jul 11 21:09:28 2011 +0300
     2.3 @@ -70,12 +70,18 @@
     2.4  int _QuitTestEnvironment();
     2.5  
     2.6  /*!
     2.7 + * Can be used to query the number of failed asserts
     2.8 + * \return Returns the failed assert count.
     2.9 + */
    2.10 +int _CountFailedAsserts();
    2.11 +
    2.12 +/*!
    2.13   *  Assert function. Tests if the expected value equals the actual value, then
    2.14   *  the test assert succeeds, otherwise it fails and warns about it.
    2.15   *
    2.16   * \param expected Value user expects to have
    2.17   * \param actual The actual value of tested variable
    2.18 - * \param message Message that will be printed if assert fails
    2.19 + * \param message Message that will be printed
    2.20   */
    2.21  void AssertEquals(const int expected, const int actual, char *message, ...);
    2.22  
    2.23 @@ -85,18 +91,22 @@
    2.24   *  assert passes, otherwise it fails.
    2.25   *
    2.26   * \param condition Condition which will be evaluated
    2.27 - * \param message Message that will be printed if assert fails
    2.28 + * \param message Message that will be printed
    2.29   */
    2.30  void AssertTrue(int condition, char *message, ...);
    2.31  
    2.32  /*!
    2.33 -\todo add markup
    2.34 -*/
    2.35 + *  Assert function which will always fail
    2.36 + *
    2.37 + * \param message Message that will be printed
    2.38 + */
    2.39  void AssertFail(char *message, ...);
    2.40  
    2.41  /*!
    2.42 -\todo add markup
    2.43 -*/
    2.44 + *  Assert function which will always pass
    2.45 + *
    2.46 + * \param message Message that will be printed
    2.47 + */
    2.48  void AssertPass(char *message, ...);
    2.49  
    2.50  #endif
     3.1 --- a/test/test-automation/logger.h	Mon Jul 11 17:55:35 2011 +0300
     3.2 +++ b/test/test-automation/logger.h	Mon Jul 11 21:09:28 2011 +0300
     3.3 @@ -30,7 +30,7 @@
     3.4   */
     3.5  typedef	void (*RunStartedFp)(int parameterCount, char *runnerParameters[], time_t eventTime, void *data);
     3.6  typedef	void (*RunEndedFp)(int testCount, int suiteCount, int testPassCount, int testFailCount,
     3.7 -                           time_t endTime, double totalRuntime);
     3.8 +                           int testSkippedCount, time_t endTime, double totalRuntime);
     3.9  
    3.10  typedef	void (*SuiteStartedFp)(const char *suiteName, time_t eventTime);
    3.11  typedef	void (*SuiteEndedFp)(int testsPassed, int testsFailed, int testsSkipped,
     4.1 --- a/test/test-automation/plain_logger.c	Mon Jul 11 17:55:35 2011 +0300
     4.2 +++ b/test/test-automation/plain_logger.c	Mon Jul 11 21:09:28 2011 +0300
     4.3 @@ -54,13 +54,14 @@
     4.4  
     4.5  void
     4.6  PlainRunEnded(int testCount, int suiteCount, int testPassCount, int testFailCount,
     4.7 -              time_t endTime, double totalRuntime)
     4.8 +			  int testSkippedCount, time_t endTime, double totalRuntime)
     4.9  {
    4.10  	Output(indentLevel, "Ran %d tests in %0.5f seconds from %d suites.",
    4.11  			testCount, totalRuntime, suiteCount);
    4.12  
    4.13  	Output(indentLevel, "%d tests passed", testPassCount);
    4.14  	Output(indentLevel, "%d tests failed", testFailCount);
    4.15 +	Output(indentLevel, "%d tests skipped", testSkippedCount);
    4.16  }
    4.17  
    4.18  void
    4.19 @@ -91,6 +92,9 @@
    4.20  	if(testResult) {
    4.21  		if(testResult == 2) {
    4.22  			Output(--indentLevel, "%s: failed -> no assert", testName);
    4.23 +		}
    4.24 +		else if(testResult == 3) {
    4.25 +			Output(--indentLevel, "%s: skipped", testName);
    4.26  		} else {
    4.27  			Output(--indentLevel, "%s: failed", testName);
    4.28  		}
    4.29 @@ -104,7 +108,7 @@
    4.30  		time_t eventTime)
    4.31  {
    4.32  	const char *result = (assertResult) ? "passed" : "failed";
    4.33 -	Output(indentLevel, "%s: %s; %s", assertName, result, assertMessage);
    4.34 +	Output(indentLevel, "%s: %s - %s", assertName, result, assertMessage);
    4.35  }
    4.36  
    4.37  void
    4.38 @@ -112,7 +116,7 @@
    4.39  		int actualValue, int expected, time_t eventTime)
    4.40  {
    4.41  	const char *result = (assertResult) ? "passed" : "failed";
    4.42 -	Output(indentLevel, "%s %s (expected %d, actualValue &d): %s",
    4.43 +	Output(indentLevel, "%s: %s (expected %d, actualValue &d) - %s",
    4.44  			assertName, result, expected, actualValue, assertMessage);
    4.45  }
    4.46  
     5.1 --- a/test/test-automation/plain_logger.h	Mon Jul 11 17:55:35 2011 +0300
     5.2 +++ b/test/test-automation/plain_logger.h	Mon Jul 11 21:09:28 2011 +0300
     5.3 @@ -26,7 +26,7 @@
     5.4   * \param totalRuntime How long the execution took
     5.5   */
     5.6  void PlainRunEnded(int testCount, int suiteCount, int testPassCount, int testFailCount,
     5.7 -                   time_t endTime, double totalRuntime);
     5.8 +				   int testSkippedCount, time_t endTime, double totalRuntime);
     5.9  
    5.10  /*!
    5.11   * Prints the data about the test suite that'll be executed next
     6.1 --- a/test/test-automation/runner.c	Mon Jul 11 17:55:35 2011 +0300
     6.2 +++ b/test/test-automation/runner.c	Mon Jul 11 21:09:28 2011 +0300
     6.3 @@ -46,6 +46,8 @@
     6.4  typedef void (*TestCaseSetUpFp)(void *arg);
     6.5  //!< Function pointer to a test case tear down function
     6.6  typedef void  (*TestCaseTearDownFp)(void *arg);
     6.7 +//!< Function pointer to a function which returns the failed assert count
     6.8 +typedef int (*CountFailedAssertsFp)(void);
     6.9  
    6.10  
    6.11  //!< Flag for executing tests in-process
    6.12 @@ -115,6 +117,8 @@
    6.13  	TestCaseTearDownFp testTearDown;
    6.14   	QuitTestInvironmentFp quitTestEnvironment;
    6.15  
    6.16 + 	CountFailedAssertsFp countFailedAsserts;
    6.17 +
    6.18  	struct TestCaseItem *next;
    6.19  } TestCase;
    6.20  
    6.21 @@ -126,6 +130,7 @@
    6.22  TestCaseReference **QueryTestCaseReferences(void *library);
    6.23  TestCaseSetUpFp LoadTestSetUpFunction(void *suite);
    6.24  TestCaseTearDownFp LoadTestTearDownFunction(void *suite);
    6.25 +CountFailedAssertsFp LoadCountFailedAssertsFunction(void *suite);
    6.26  
    6.27  
    6.28  /*! Pointers to selected logger implementation */
    6.29 @@ -142,143 +147,6 @@
    6.30  
    6.31  
    6.32  /*!
    6.33 - * Goes through the previously loaded test suites and
    6.34 - * loads test cases from them. Test cases are filtered
    6.35 - * during the process. Function will only return the
    6.36 - * test cases which aren't filtered out.
    6.37 - *
    6.38 - * \param suites previously loaded test suites
    6.39 - *
    6.40 - * \return Test cases that survived filtering process.
    6.41 - */
    6.42 -TestCase *
    6.43 -LoadTestCases(TestSuiteReference *suites)
    6.44 -{
    6.45 -	TestCase *testCases = NULL;
    6.46 -
    6.47 -	TestSuiteReference *suiteReference = NULL;
    6.48 -	for(suiteReference = suites; suiteReference; suiteReference = suiteReference->next) {
    6.49 -		TestCaseReference **tests = QueryTestCaseReferences(suiteReference->library);
    6.50 -
    6.51 -		TestCaseReference *testReference = NULL;
    6.52 -		int counter = 0;
    6.53 -		for(testReference = tests[counter]; testReference; testReference = tests[++counter]) {
    6.54 -
    6.55 -			void *suite = suiteReference->library;
    6.56 -
    6.57 -			// Load test case functions
    6.58 -			InitTestInvironmentFp initTestEnvironment = LoadInitTestInvironmentFunction(suiteReference->library);
    6.59 -			QuitTestInvironmentFp quitTestEnvironment = LoadQuitTestInvironmentFunction(suiteReference->library);
    6.60 -
    6.61 -			TestCaseSetUpFp testSetUp = LoadTestSetUpFunction(suiteReference->library);
    6.62 -			TestCaseTearDownFp testTearDown = LoadTestTearDownFunction(suiteReference->library);
    6.63 -
    6.64 -			TestCaseFp testCase = LoadTestCaseFunction(suiteReference->library, testReference->name);
    6.65 -
    6.66 -			// Do the filtering
    6.67 -			if(FilterTestCase(testReference)) {
    6.68 -				TestCase *item = SDL_malloc(sizeof(TestCase));
    6.69 -				memset(item, 0, sizeof(TestCase));
    6.70 -
    6.71 -				item->initTestEnvironment = initTestEnvironment;
    6.72 -				item->quitTestEnvironment = quitTestEnvironment;
    6.73 -
    6.74 -				item->testSetUp = testSetUp;
    6.75 -				item->testTearDown = testTearDown;
    6.76 -
    6.77 -				item->testCase = testCase;
    6.78 -
    6.79 -				// copy suite name
    6.80 -				int length = SDL_strlen(suiteReference->name) + 1;
    6.81 -				item->suiteName = SDL_malloc(length);
    6.82 -				strncpy(item->suiteName, suiteReference->name, length);
    6.83 -
    6.84 -				// copy test name
    6.85 -				length = SDL_strlen(testReference->name) + 1;
    6.86 -				item->testName = SDL_malloc(length);
    6.87 -				strncpy(item->testName, testReference->name, length);
    6.88 -
    6.89 -				// copy test description
    6.90 -				length = SDL_strlen(testReference->description) + 1;
    6.91 -				item->description = SDL_malloc(length);
    6.92 -				strncpy(item->description, testReference->description, length);
    6.93 -
    6.94 -				item->requirements = testReference->requirements;
    6.95 -				item->timeout = testReference->timeout;
    6.96 -
    6.97 -				// prepend the list
    6.98 -				item->next = testCases;
    6.99 -				testCases = item;
   6.100 -
   6.101 -				//printf("Added test: %s\n", testReference->name);
   6.102 -			}
   6.103 -		}
   6.104 -	}
   6.105 -
   6.106 -	return testCases;
   6.107 -}
   6.108 -
   6.109 -
   6.110 -/*!
   6.111 - * Unloads the given TestCases. Frees all the resources
   6.112 - * allocated for test cases.
   6.113 - *
   6.114 - * \param testCases Test cases to be deallocated
   6.115 - */
   6.116 -void
   6.117 -UnloadTestCases(TestCase *testCases)
   6.118 -{
   6.119 -	TestCase *ref = testCases;
   6.120 -	while(ref) {
   6.121 -		SDL_free(ref->testName);
   6.122 -		SDL_free(ref->suiteName);
   6.123 -		SDL_free(ref->description);
   6.124 -
   6.125 -		TestCase *temp = ref->next;
   6.126 -		SDL_free(ref);
   6.127 -		ref = temp;
   6.128 -	}
   6.129 -
   6.130 -	testCases = NULL;
   6.131 -}
   6.132 -
   6.133 -
   6.134 -/*!
   6.135 - * Filters a test case based on its properties in TestCaseReference and user
   6.136 - * preference.
   6.137 - *
   6.138 - * \return Non-zero means test will be added to execution list, zero means opposite
   6.139 - */
   6.140 -int
   6.141 -FilterTestCase(TestCaseReference *testReference)
   6.142 -{
   6.143 -	int retVal = 1;
   6.144 -
   6.145 -	if(testReference->enabled == TEST_DISABLED) {
   6.146 -		retVal = 0;
   6.147 -	}
   6.148 -
   6.149 -	if(only_selected_test) {
   6.150 -		if(SDL_strncmp(testReference->name, selected_test_name, NAME_BUFFER_SIZE) == 0) {
   6.151 -			retVal = 1;
   6.152 -		} else {
   6.153 -			retVal = 0;
   6.154 -		}
   6.155 -	}
   6.156 -
   6.157 -	if(only_tests_with_string) {
   6.158 -		if(strstr(testReference->name, testcase_name_substring) != NULL) {
   6.159 -			retVal = 1;
   6.160 -		} else {
   6.161 -			retVal = 0;
   6.162 -		}
   6.163 -	}
   6.164 -
   6.165 -	return retVal;
   6.166 -}
   6.167 -
   6.168 -
   6.169 -/*!
   6.170   * Scans the tests/ directory and returns the names
   6.171   * of the dynamic libraries implementing the test suites.
   6.172   *
   6.173 @@ -298,10 +166,9 @@
   6.174  {
   6.175  	typedef struct dirent Entry;
   6.176  	DIR *directory = opendir(directoryName);
   6.177 +	TestSuiteReference *suites = NULL;
   6.178 +	Entry *entry = NULL;
   6.179  
   6.180 -	TestSuiteReference *suites = NULL;
   6.181 -
   6.182 -	Entry *entry = NULL;
   6.183  	if(!directory) {
   6.184  		fprintf(stderr, "Failed to open test suite directory: %s\n", directoryName);
   6.185  		perror("Error message");
   6.186 @@ -323,9 +190,12 @@
   6.187  			if(ok && SDL_strcmp(ext, extension)  == 0) {
   6.188  				// create test suite reference
   6.189  				TestSuiteReference *reference = (TestSuiteReference *) SDL_malloc(sizeof(TestSuiteReference));
   6.190 +				if(reference == NULL) {
   6.191 +					fprintf(stderr, "Allocating TestSuiteReference failed\n");
   6.192 +				}
   6.193 +
   6.194  				memset(reference, 0, sizeof(TestSuiteReference));
   6.195  
   6.196 -
   6.197  				const int dirSize = SDL_strlen(directoryName);
   6.198  				const int extSize = SDL_strlen(ext);
   6.199  				const int nameSize = SDL_strlen(name) + 1;
   6.200 @@ -428,6 +298,147 @@
   6.201  
   6.202  
   6.203  /*!
   6.204 + * Goes through the previously loaded test suites and
   6.205 + * loads test cases from them. Test cases are filtered
   6.206 + * during the process. Function will only return the
   6.207 + * test cases which aren't filtered out.
   6.208 + *
   6.209 + * \param suites previously loaded test suites
   6.210 + *
   6.211 + * \return Test cases that survived filtering process.
   6.212 + */
   6.213 +TestCase *
   6.214 +LoadTestCases(TestSuiteReference *suites)
   6.215 +{
   6.216 +	TestCase *testCases = NULL;
   6.217 +
   6.218 +	TestSuiteReference *suiteReference = NULL;
   6.219 +	for(suiteReference = suites; suiteReference; suiteReference = suiteReference->next) {
   6.220 +		TestCaseReference **tests = QueryTestCaseReferences(suiteReference->library);
   6.221 +
   6.222 +		TestCaseReference *testReference = NULL;
   6.223 +		int counter = 0;
   6.224 +		for(testReference = tests[counter]; testReference; testReference = tests[++counter]) {
   6.225 +
   6.226 +			void *suite = suiteReference->library;
   6.227 +
   6.228 +			// Load test case functions
   6.229 +			InitTestInvironmentFp initTestEnvironment = LoadInitTestInvironmentFunction(suiteReference->library);
   6.230 +			QuitTestInvironmentFp quitTestEnvironment = LoadQuitTestInvironmentFunction(suiteReference->library);
   6.231 +
   6.232 +			TestCaseSetUpFp testSetUp = LoadTestSetUpFunction(suiteReference->library);
   6.233 +			TestCaseTearDownFp testTearDown = LoadTestTearDownFunction(suiteReference->library);
   6.234 +
   6.235 +			TestCaseFp testCase = LoadTestCaseFunction(suiteReference->library, testReference->name);
   6.236 +
   6.237 +			CountFailedAssertsFp countFailedAsserts = LoadCountFailedAssertsFunction(suiteReference->library);
   6.238 +
   6.239 +			// Do the filtering
   6.240 +			if(FilterTestCase(testReference)) {
   6.241 +				TestCase *item = SDL_malloc(sizeof(TestCase));
   6.242 +				memset(item, 0, sizeof(TestCase));
   6.243 +
   6.244 +				item->initTestEnvironment = initTestEnvironment;
   6.245 +				item->quitTestEnvironment = quitTestEnvironment;
   6.246 +
   6.247 +				item->testSetUp = testSetUp;
   6.248 +				item->testTearDown = testTearDown;
   6.249 +
   6.250 +				item->testCase = testCase;
   6.251 +
   6.252 +				item->countFailedAsserts = countFailedAsserts;
   6.253 +
   6.254 +				// copy suite name
   6.255 +				int length = SDL_strlen(suiteReference->name) + 1;
   6.256 +				item->suiteName = SDL_malloc(length);
   6.257 +				strncpy(item->suiteName, suiteReference->name, length);
   6.258 +
   6.259 +				// copy test name
   6.260 +				length = SDL_strlen(testReference->name) + 1;
   6.261 +				item->testName = SDL_malloc(length);
   6.262 +				strncpy(item->testName, testReference->name, length);
   6.263 +
   6.264 +				// copy test description
   6.265 +				length = SDL_strlen(testReference->description) + 1;
   6.266 +				item->description = SDL_malloc(length);
   6.267 +				strncpy(item->description, testReference->description, length);
   6.268 +
   6.269 +				item->requirements = testReference->requirements;
   6.270 +				item->timeout = testReference->timeout;
   6.271 +
   6.272 +				// prepend the list
   6.273 +				item->next = testCases;
   6.274 +				testCases = item;
   6.275 +
   6.276 +				//printf("Added test: %s\n", testReference->name);
   6.277 +			}
   6.278 +		}
   6.279 +	}
   6.280 +
   6.281 +	return testCases;
   6.282 +}
   6.283 +
   6.284 +
   6.285 +/*!
   6.286 + * Unloads the given TestCases. Frees all the resources
   6.287 + * allocated for test cases.
   6.288 + *
   6.289 + * \param testCases Test cases to be deallocated
   6.290 + */
   6.291 +void
   6.292 +UnloadTestCases(TestCase *testCases)
   6.293 +{
   6.294 +	TestCase *ref = testCases;
   6.295 +	while(ref) {
   6.296 +		SDL_free(ref->testName);
   6.297 +		SDL_free(ref->suiteName);
   6.298 +		SDL_free(ref->description);
   6.299 +
   6.300 +		TestCase *temp = ref->next;
   6.301 +		SDL_free(ref);
   6.302 +		ref = temp;
   6.303 +	}
   6.304 +
   6.305 +	testCases = NULL;
   6.306 +}
   6.307 +
   6.308 +
   6.309 +/*!
   6.310 + * Filters a test case based on its properties in TestCaseReference and user
   6.311 + * preference.
   6.312 + *
   6.313 + * \return Non-zero means test will be added to execution list, zero means opposite
   6.314 + */
   6.315 +int
   6.316 +FilterTestCase(TestCaseReference *testReference)
   6.317 +{
   6.318 +	int retVal = 1;
   6.319 +
   6.320 +	if(testReference->enabled == TEST_DISABLED) {
   6.321 +		retVal = 0;
   6.322 +	}
   6.323 +
   6.324 +	if(only_selected_test) {
   6.325 +		if(SDL_strncmp(testReference->name, selected_test_name, NAME_BUFFER_SIZE) == 0) {
   6.326 +			retVal = 1;
   6.327 +		} else {
   6.328 +			retVal = 0;
   6.329 +		}
   6.330 +	}
   6.331 +
   6.332 +	if(only_tests_with_string) {
   6.333 +		if(strstr(testReference->name, testcase_name_substring) != NULL) {
   6.334 +			retVal = 1;
   6.335 +		} else {
   6.336 +			retVal = 0;
   6.337 +		}
   6.338 +	}
   6.339 +
   6.340 +	return retVal;
   6.341 +}
   6.342 +
   6.343 +
   6.344 +/*!
   6.345   * Loads the test case references from the given test suite.
   6.346  
   6.347   * \param library Previously loaded dynamic library AKA test suite
   6.348 @@ -436,21 +447,21 @@
   6.349  TestCaseReference **
   6.350  QueryTestCaseReferences(void *library)
   6.351  {
   6.352 -        TestCaseReference **(*suite)(void);
   6.353 +	TestCaseReference **(*suite)(void);
   6.354  
   6.355 -        suite = (TestCaseReference **(*)(void)) SDL_LoadFunction(library, "QueryTestSuite");
   6.356 -        if(suite == NULL) {
   6.357 -                fprintf(stderr, "Loading QueryTestCaseReferences() failed.\n");
   6.358 -                fprintf(stderr, "%s\n", SDL_GetError());
   6.359 -        }
   6.360 +	suite = (TestCaseReference **(*)(void)) SDL_LoadFunction(library, "QueryTestSuite");
   6.361 +	if(suite == NULL) {
   6.362 +		fprintf(stderr, "Loading QueryTestCaseReferences() failed.\n");
   6.363 +		fprintf(stderr, "%s\n", SDL_GetError());
   6.364 +	}
   6.365  
   6.366 -        TestCaseReference **tests = suite();
   6.367 -        if(tests == NULL) {
   6.368 -                fprintf(stderr, "Failed to load test references.\n");
   6.369 -                fprintf(stderr, "%s\n", SDL_GetError());
   6.370 -        }
   6.371 +	TestCaseReference **tests = suite();
   6.372 +	if(tests == NULL) {
   6.373 +		fprintf(stderr, "Failed to load test references.\n");
   6.374 +		fprintf(stderr, "%s\n", SDL_GetError());
   6.375 +	}
   6.376  
   6.377 -        return tests;
   6.378 +	return tests;
   6.379  }
   6.380  
   6.381  
   6.382 @@ -554,6 +565,81 @@
   6.383  	return testEnvQuit;
   6.384  }
   6.385  
   6.386 +/*!
   6.387 + * Loads function that returns failed assert count in the current
   6.388 + * test environment
   6.389 + *
   6.390 + * \param suite Used test suite
   6.391 + *
   6.392 + * \return Function pointer to _CountFailedAsserts function
   6.393 + */
   6.394 +CountFailedAssertsFp
   6.395 +LoadCountFailedAssertsFunction(void *suite) {
   6.396 +	CountFailedAssertsFp countFailedAssert = (CountFailedAssertsFp) SDL_LoadFunction(suite, "_CountFailedAsserts");
   6.397 +	if(countFailedAssert == NULL) {
   6.398 +		fprintf(stderr, "Loading _CountFailedAsserts function failed, countFailedAssert == NULL\n");
   6.399 +		fprintf(stderr, "%s\n", SDL_GetError());
   6.400 +	}
   6.401 +
   6.402 +	return countFailedAssert;
   6.403 +}
   6.404 +
   6.405 +
   6.406 +/*
   6.407 + * Execute the test
   6.408 + *
   6.409 + * \param testItem Test to be executed
   6.410 + */
   6.411 +int
   6.412 +RunTest(TestCase *testItem) {
   6.413 +	testItem->initTestEnvironment();
   6.414 +
   6.415 +	if(testItem->testSetUp) {
   6.416 +		testItem->testSetUp(0x0);
   6.417 +	}
   6.418 +
   6.419 +	int cntFailedAsserts = testItem->countFailedAsserts();
   6.420 +	if(cntFailedAsserts != 0) {
   6.421 +		return 3;
   6.422 +	}
   6.423 +
   6.424 +	testItem->testCase(0x0);
   6.425 +
   6.426 +	if(testItem->testTearDown) {
   6.427 +		testItem->testTearDown(0x0);
   6.428 +	}
   6.429 +
   6.430 +	return testItem->quitTestEnvironment();
   6.431 +}
   6.432 +
   6.433 +/*!
   6.434 + * Executes a test case. Loads the test, executes it and
   6.435 + * returns the tests return value to the caller.
   6.436 + *
   6.437 + * \param testItem The test case that will be executed
   6.438 + * \return The return value of the test. Zero means success, non-zero failure.
   6.439 + */
   6.440 +int
   6.441 +ExecuteTest(TestCase *testItem) {
   6.442 +	int retVal = 1;
   6.443 +
   6.444 +	if(execute_inproc) {
   6.445 +		retVal = RunTest(testItem);
   6.446 +	} else {
   6.447 +		int childpid = fork();
   6.448 +		if(childpid == 0) {
   6.449 +			exit(RunTest(testItem));
   6.450 +		} else {
   6.451 +			int stat_lock = -1;
   6.452 +			int child = wait(&stat_lock);
   6.453 +
   6.454 +			retVal = HandleChildProcessReturnValue(stat_lock);
   6.455 +		}
   6.456 +	}
   6.457 +
   6.458 +	return retVal;
   6.459 +}
   6.460 +
   6.461  
   6.462  /*!
   6.463   * If using out-of-proc execution of tests. This function
   6.464 @@ -584,56 +670,58 @@
   6.465  
   6.466  
   6.467  /*!
   6.468 - * Executes a test case. Loads the test, executes it and
   6.469 - * returns the tests return value to the caller.
   6.470 + * Sets up the logger.
   6.471   *
   6.472 - * \param testItem The test case that will be executed
   6.473 - * \return The return value of the test. Zero means success, non-zero failure.
   6.474 + * \return Some special data that will be passed to StartRun() logger call
   6.475   */
   6.476 -int
   6.477 -ExecuteTest(TestCase *testItem) {
   6.478 -	int retVal = 1;
   6.479 -	if(execute_inproc) {
   6.480 -		testItem->initTestEnvironment();
   6.481 +void *
   6.482 +SetUpLogger()
   6.483 +{
   6.484 +	void *loggerData = NULL;
   6.485 +	if(xml_enabled) {
   6.486 +		RunStarted = XMLRunStarted;
   6.487 +		RunEnded = XMLRunEnded;
   6.488 +
   6.489 +		SuiteStarted = XMLSuiteStarted;
   6.490 +		SuiteEnded = XMLSuiteEnded;
   6.491  
   6.492 -		if(testItem->testSetUp) {
   6.493 -			testItem->testSetUp(0x0);
   6.494 -		}
   6.495 +		TestStarted = XMLTestStarted;
   6.496 +		TestEnded = XMLTestEnded;
   6.497  
   6.498 -		testItem->testCase(0x0);
   6.499 +		Assert = XMLAssert;
   6.500 +		AssertWithValues = XMLAssertWithValues;
   6.501 +		AssertSummary = XMLAssertSummary;
   6.502  
   6.503 -		if(testItem->testTearDown) {
   6.504 -			testItem->testTearDown(0x0);
   6.505 +		Log = XMLLog;
   6.506 +
   6.507 +		char *sheet = NULL;
   6.508 +		if(xsl_enabled) {
   6.509 +			sheet = "style.xsl"; // default style sheet;
   6.510  		}
   6.511  
   6.512 -		retVal = testItem->quitTestEnvironment();
   6.513 -	} else {
   6.514 -		int childpid = fork();
   6.515 -		if(childpid == 0) {
   6.516 -			testItem->initTestEnvironment();
   6.517 +		if(custom_xsl_enabled) {
   6.518 +			sheet = xsl_stylesheet_name;
   6.519 +		}
   6.520  
   6.521 -			if(testItem->testSetUp) {
   6.522 -				testItem->testSetUp(0x0);
   6.523 -			}
   6.524 -
   6.525 -			testItem->testCase(0x0);
   6.526 +		loggerData = sheet;
   6.527 +	} else {
   6.528 +		RunStarted = PlainRunStarted;
   6.529 +		RunEnded = PlainRunEnded;
   6.530  
   6.531 -			// note: if test case is is aborted by some signal
   6.532 -			// then TearDown function won't be called
   6.533 -			if(testItem->testTearDown) {
   6.534 -				testItem->testTearDown(0x0);
   6.535 -			}
   6.536 +		SuiteStarted = PlainSuiteStarted;
   6.537 +		SuiteEnded = PlainSuiteEnded;
   6.538 +
   6.539 +		TestStarted = PlainTestStarted;
   6.540 +		TestEnded = PlainTestEnded;
   6.541  
   6.542 -			exit(testItem->quitTestEnvironment());
   6.543 -		} else {
   6.544 -			int stat_lock = -1;
   6.545 -			int child = wait(&stat_lock);
   6.546 +		Assert = PlainAssert;
   6.547 +		AssertWithValues = PlainAssertWithValues;
   6.548 +		AssertSummary = PlainAssertSummary;
   6.549  
   6.550 -			retVal = HandleChildProcessReturnValue(stat_lock);
   6.551 -		}
   6.552 +		Log = PlainLog;
   6.553  	}
   6.554  
   6.555 -	return retVal;
   6.556 +	return loggerData;
   6.557  }
   6.558  
   6.559  
   6.560 @@ -771,7 +859,7 @@
   6.561  
   6.562  	// print: Testing against SDL version fuu (rev: bar) if verbose == true
   6.563  
   6.564 -	int totalTestfailureCount = 0, totalTestPassCount = 0;
   6.565 +	int totalTestFailureCount = 0, totalTestPassCount = 0, totalTestSkipCount = 0;
   6.566  	int testFailureCount = 0, testPassCount = 0, testSkipCount = 0;
   6.567  	char *testSuiteName = NULL;
   6.568  	int suiteCounter = 0;
   6.569 @@ -782,49 +870,7 @@
   6.570  	char *extension = "dylib";
   6.571  #endif
   6.572  
   6.573 -	void *loggerData = NULL;
   6.574 -	if(xml_enabled) {
   6.575 -		RunStarted = XMLRunStarted;
   6.576 -		RunEnded = XMLRunEnded;
   6.577 -
   6.578 -		SuiteStarted = XMLSuiteStarted;
   6.579 -		SuiteEnded = XMLSuiteEnded;
   6.580 -
   6.581 -		TestStarted = XMLTestStarted;
   6.582 -		TestEnded = XMLTestEnded;
   6.583 -
   6.584 -		Assert = XMLAssert;
   6.585 -		AssertWithValues = XMLAssertWithValues;
   6.586 -		AssertSummary = XMLAssertSummary;
   6.587 -
   6.588 -		Log = XMLLog;
   6.589 -
   6.590 -		char *sheet = NULL;
   6.591 -		if(xsl_enabled) {
   6.592 -			sheet = "style.xsl"; // default style sheet;
   6.593 -		}
   6.594 -
   6.595 -		if(custom_xsl_enabled) {
   6.596 -			sheet = xsl_stylesheet_name;
   6.597 -		}
   6.598 -
   6.599 -		loggerData = sheet;
   6.600 -	} else {
   6.601 -		RunStarted = PlainRunStarted;
   6.602 -		RunEnded = PlainRunEnded;
   6.603 -
   6.604 -		SuiteStarted = PlainSuiteStarted;
   6.605 -		SuiteEnded = PlainSuiteEnded;
   6.606 -
   6.607 -		TestStarted = PlainTestStarted;
   6.608 -		TestEnded = PlainTestEnded;
   6.609 -
   6.610 -		Assert = PlainAssert;
   6.611 -		AssertWithValues = PlainAssertWithValues;
   6.612 -		AssertSummary = PlainAssertSummary;
   6.613 -
   6.614 -		Log = PlainLog;
   6.615 -	}
   6.616 +	void *loggerData = SetUpLogger();
   6.617  
   6.618  	const Uint32 startTicks = SDL_GetTicks();
   6.619  
   6.620 @@ -845,9 +891,7 @@
   6.621  
   6.622  	RunStarted(argc, argv, time(0), loggerData);
   6.623  
   6.624 -
   6.625  	char *currentSuiteName = NULL;
   6.626 -
   6.627  	int suiteStartTime = SDL_GetTicks();
   6.628  
   6.629  	TestCase *testItem = NULL;
   6.630 @@ -856,7 +900,7 @@
   6.631  			currentSuiteName = testItem->suiteName;
   6.632  			SuiteStarted(currentSuiteName, time(0));
   6.633  
   6.634 -			testFailureCount = testPassCount = 0;
   6.635 +			testFailureCount = testPassCount = testSkipCount = 0;
   6.636  
   6.637  			suiteCounter++;
   6.638  		}
   6.639 @@ -871,7 +915,7 @@
   6.640  			currentSuiteName = testItem->suiteName;
   6.641  			SuiteStarted(currentSuiteName, time(0));
   6.642  
   6.643 -			testFailureCount = testPassCount = 0;
   6.644 +			testFailureCount = testPassCount = testSkipCount = 0;
   6.645  
   6.646  			suiteCounter++;
   6.647  		}
   6.648 @@ -882,8 +926,12 @@
   6.649  		const Uint32 testTimeStart = SDL_GetTicks();
   6.650  
   6.651  		int retVal = ExecuteTest(testItem);
   6.652 -		if(retVal) {
   6.653 -			totalTestfailureCount++;
   6.654 +		if(retVal == 3) {
   6.655 +			testSkipCount++;
   6.656 +			totalTestSkipCount++;
   6.657 +		}
   6.658 +		else if(retVal) {
   6.659 +			totalTestFailureCount++;
   6.660  			testFailureCount++;
   6.661  		} else {
   6.662  			totalTestPassCount++;
   6.663 @@ -906,8 +954,8 @@
   6.664  	const Uint32 endTicks = SDL_GetTicks();
   6.665  	const double totalRunTime = (endTicks - startTicks) / 1000.0f;
   6.666  
   6.667 -	RunEnded(totalTestPassCount + totalTestfailureCount, suiteCounter,
   6.668 -			 totalTestPassCount, totalTestfailureCount, time(0), totalRunTime);
   6.669 +	RunEnded(totalTestPassCount + totalTestFailureCount, suiteCounter,
   6.670 +			 totalTestPassCount, totalTestFailureCount, totalTestSkipCount, time(0), totalRunTime);
   6.671  
   6.672 -	return (totalTestfailureCount ? 1 : 0);
   6.673 +	return (totalTestFailureCount ? 1 : 0);
   6.674  }
     7.1 --- a/test/test-automation/style.xsl	Mon Jul 11 17:55:35 2011 +0300
     7.2 +++ b/test/test-automation/style.xsl	Mon Jul 11 21:09:28 2011 +0300
     7.3 @@ -104,6 +104,7 @@
     7.4  	/* Color the tests based on the result */
     7.5  	$("span.testResult[result='passed']").addClass('passed');
     7.6  	$("span.testResult[result='failed']").addClass('failed');
     7.7 +	$("span.testResult[result='skipped']").addClass('skipped');
     7.8  	
     7.9  	/* Color the asserts based on the result */
    7.10  	$("span.assertResult[result='pass']").addClass('passed');
    7.11 @@ -157,6 +158,10 @@
    7.12   color: red;
    7.13  }
    7.14  
    7.15 +.skipped {
    7.16 + color: gray;
    7.17 +}
    7.18 +
    7.19  </style>
    7.20  
    7.21  </head>
     8.1 --- a/test/test-automation/testdummy/testdummy.c	Mon Jul 11 17:55:35 2011 +0300
     8.2 +++ b/test/test-automation/testdummy/testdummy.c	Mon Jul 11 21:09:28 2011 +0300
     8.3 @@ -56,6 +56,9 @@
     8.4   * SetUp function can be used to create a test fixture for test cases.
     8.5   * The function will be called right before executing the test case.
     8.6   *
     8.7 + * Note: If any assert in the function fails then the test will be skipped.
     8.8 + * In practice, the entire suite will be skipped if assert failure happens.
     8.9 + *
    8.10   * Note: this function is optional.
    8.11   *
    8.12   * \param arg parameters given to test. Usually NULL
     9.1 --- a/test/test-automation/testsurface/testsurface.c	Mon Jul 11 17:55:35 2011 +0300
     9.2 +++ b/test/test-automation/testsurface/testsurface.c	Mon Jul 11 21:09:28 2011 +0300
     9.3 @@ -32,11 +32,39 @@
     9.4  	return (TestCaseReference **)testSuite;
     9.5  }
     9.6  
     9.7 +/* Function prototypes */
     9.8 +SDL_Surface *_CreateTestSurface();
     9.9 +
    9.10 +
    9.11 +/* Create test fixture */
    9.12 +
    9.13 +static SDL_Surface *testsur = NULL;
    9.14 +
    9.15 +
    9.16 +void
    9.17 +SetUp(void *arg)
    9.18 +{
    9.19 +	int ret = SDL_Init(SDL_INIT_VIDEO);
    9.20 +	AssertTrue(ret == 0, "SDL_Init(SDL_INIT_VIDEO)");
    9.21 +
    9.22 +	testsur = _CreateTestSurface();
    9.23 +	AssertTrue(testsur != NULL, "SDL_Init(SDL_INIT_VIDEO)");
    9.24 +}
    9.25 +
    9.26 +void
    9.27 +TearDown(void *arg)
    9.28 +{
    9.29 +	SDL_FreeSurface( testsur );
    9.30 +
    9.31 +	SDL_Quit();
    9.32 +}
    9.33 +
    9.34  /* Helper functions for the test cases */
    9.35  
    9.36  #define TEST_SURFACE_WIDTH 80
    9.37  #define TEST_SURFACE_HEIGHT 60
    9.38  
    9.39 +
    9.40  /*!
    9.41   * Creates test surface
    9.42   */
    9.43 @@ -66,7 +94,7 @@
    9.44  /**
    9.45   * @brief Tests a blend mode.
    9.46   */
    9.47 -int _testBlitBlendMode(SDL_Surface *testsur, SDL_Surface *face, int mode)
    9.48 +void _testBlitBlendMode(SDL_Surface *testsur, SDL_Surface *face, int mode)
    9.49  {
    9.50  	int ret;
    9.51  	int i, j, ni, nj;
    9.52 @@ -102,8 +130,6 @@
    9.53  		 ret = SDL_BlitSurface( face, NULL, testsur, &rect );
    9.54  		 AssertTrue(ret != 0, "SDL_BlitSurface");	  }
    9.55  	}
    9.56 -
    9.57 -	return 0;
    9.58  }
    9.59  
    9.60  /* Test case functions */
    9.61 @@ -115,13 +141,8 @@
    9.62  	int ret;
    9.63      SDL_Surface *face, *rface;
    9.64  
    9.65 -	ret = SDL_Init(SDL_INIT_VIDEO);
    9.66 -	AssertTrue(ret == 0, "SDL_Init(SDL_INIT_VIDEO)");
    9.67 -
    9.68 -	SDL_Surface *testsur = _CreateTestSurface();
    9.69 -
    9.70 -   /* Clear surface. */
    9.71 -   ret = SDL_FillRect( testsur, NULL,
    9.72 +    /* Clear surface. */
    9.73 +    ret = SDL_FillRect( testsur, NULL,
    9.74           SDL_MapRGB( testsur->format, 0, 0, 0 ) );
    9.75  	AssertTrue(ret == 0,  "SDL_FillRect");
    9.76  
    9.77 @@ -151,10 +172,6 @@
    9.78     /* Clean up. */
    9.79     SDL_FreeSurface( rface );
    9.80     SDL_FreeSurface( face );
    9.81 -
    9.82 -   SDL_FreeSurface( testsur );
    9.83 -
    9.84 -   SDL_Quit();
    9.85  }
    9.86  
    9.87  
    9.88 @@ -163,14 +180,8 @@
    9.89   */
    9.90  void surface_testLoadFailure(void *arg)
    9.91  {
    9.92 -	int ret = SDL_Init(SDL_INIT_VIDEO);
    9.93 -	AssertTrue(ret == 0, "SDL_Init(SDL_INIT_VIDEO)");
    9.94 -
    9.95  	SDL_Surface *face = SDL_LoadBMP("nonexistant.bmp");
    9.96 -
    9.97  	AssertTrue(face == NULL, "SDL_CreateLoadBmp");
    9.98 -
    9.99 -	SDL_Quit();
   9.100  }
   9.101  
   9.102  
   9.103 @@ -184,11 +195,6 @@
   9.104     SDL_Surface *face;
   9.105     int i, j, ni, nj;
   9.106  
   9.107 -	ret = SDL_Init(SDL_INIT_VIDEO);
   9.108 -	AssertTrue(ret == 0, "SDL_Init(SDL_INIT_VIDEO)");
   9.109 -
   9.110 -   SDL_Surface *testsur = _CreateTestSurface();
   9.111 -
   9.112     /* Clear surface. */
   9.113     ret = SDL_FillRect( testsur, NULL,
   9.114           SDL_MapRGB( testsur->format, 0, 0, 0 ) );
   9.115 @@ -292,9 +298,6 @@
   9.116  
   9.117     /* Clean up. */
   9.118     SDL_FreeSurface( face );
   9.119 -   SDL_FreeSurface( testsur );
   9.120 -
   9.121 -   SDL_Quit();
   9.122  }
   9.123  
   9.124  /**
   9.125 @@ -308,11 +311,6 @@
   9.126     int i, j, ni, nj;
   9.127     int mode;
   9.128  
   9.129 -	ret = SDL_Init(SDL_INIT_VIDEO);
   9.130 -	AssertTrue(ret == 0, "SDL_Init(SDL_INIT_VIDEO)");
   9.131 -
   9.132 -   SDL_Surface *testsur = _CreateTestSurface();
   9.133 -
   9.134     /* Clear surface. */
   9.135     ret = SDL_FillRect( testsur, NULL,
   9.136           SDL_MapRGB( testsur->format, 0, 0, 0 ) );
   9.137 @@ -415,7 +413,4 @@
   9.138  
   9.139     /* Clean up. */
   9.140     SDL_FreeSurface( face );
   9.141 -   SDL_FreeSurface( testsur );
   9.142 -
   9.143 -   SDL_Quit();
   9.144  }
    10.1 --- a/test/test-automation/xml_logger.c	Mon Jul 11 17:55:35 2011 +0300
    10.2 +++ b/test/test-automation/xml_logger.c	Mon Jul 11 21:09:28 2011 +0300
    10.3 @@ -38,6 +38,7 @@
    10.4  const char *numTestElementName = "numTests";
    10.5  const char *numPassedTestsElementName = "numPassedTests";
    10.6  const char *numFailedTestsElementName = "numFailedTests";
    10.7 +const char *numSkippedTestsElementName = "numSkippedTests";
    10.8  const char *endTimeElementName = "endTime";
    10.9  const char *totalRuntimeElementName = "totalRuntime";
   10.10  const char *suiteElementName = "suite";
   10.11 @@ -145,7 +146,7 @@
   10.12  
   10.13  void
   10.14  XMLRunEnded(int testCount, int suiteCount, int testPassCount, int testFailCount,
   10.15 -            time_t endTime, double totalRuntime)
   10.16 +			int testSkippedCount, time_t endTime, double totalRuntime)
   10.17  {
   10.18  	// log suite count
   10.19  	char *output = XMLOpenElement(numSuitesElementName);
   10.20 @@ -187,7 +188,17 @@
   10.21  	output = XMLCloseElement(numFailedTestsElementName);
   10.22  	XMLOutputter(--indentLevel, YES, output);
   10.23  
   10.24 -	// log end timte
   10.25 +	// log skipped test count
   10.26 +	output = XMLOpenElement(numSkippedTestsElementName);
   10.27 +	XMLOutputter(indentLevel++, NO, output);
   10.28 +
   10.29 +	output = XMLAddContent(IntToString(testSkippedCount));
   10.30 +	XMLOutputter(indentLevel, NO, output);
   10.31 +
   10.32 +	output = XMLCloseElement(numSkippedTestsElementName);
   10.33 +	XMLOutputter(--indentLevel, YES, output);
   10.34 +
   10.35 +	// log end tite
   10.36  	output = XMLOpenElement(endTimeElementName);
   10.37  	XMLOutputter(indentLevel++, NO, output);
   10.38  
   10.39 @@ -342,6 +353,9 @@
   10.40  	if(testResult) {
   10.41  		if(testResult == 2) {
   10.42  			output = XMLAddContent("failed. No assert");
   10.43 +		}
   10.44 +		else if(testResult == 3) {
   10.45 +			output = XMLAddContent("skipped");
   10.46  		} else {
   10.47  			output = XMLAddContent("failed");
   10.48  		}
    11.1 --- a/test/test-automation/xml_logger.h	Mon Jul 11 17:55:35 2011 +0300
    11.2 +++ b/test/test-automation/xml_logger.h	Mon Jul 11 21:09:28 2011 +0300
    11.3 @@ -24,7 +24,7 @@
    11.4   * \param totalRuntime How long the execution took
    11.5   */
    11.6  void XMLRunEnded(int testCount, int suiteCount, int testPassCount, int testFailCount,
    11.7 -                 time_t endTime, double totalRuntime);
    11.8 +				 int testSkippedCount, time_t endTime, double totalRuntime);
    11.9  
   11.10  /*!
   11.11   * Prints the data about the test suite that'll be executed next in XML