If any assert in SetUp function fails that test will be skipped.
authorMarkus Kauppila <markus.kauppila@gmail.com>
Mon, 11 Jul 2011 21:09:28 +0300
changeset 5755 35d4ae5dea90
parent 5754 2eead798346a
child 5756 3581346510f3
If any assert in SetUp function fails that test will be skipped.
test/test-automation/SDL_test.c
test/test-automation/SDL_test.h
test/test-automation/logger.h
test/test-automation/plain_logger.c
test/test-automation/plain_logger.h
test/test-automation/runner.c
test/test-automation/style.xsl
test/test-automation/testdummy/testdummy.c
test/test-automation/testsurface/testsurface.c
test/test-automation/xml_logger.c
test/test-automation/xml_logger.h
--- a/test/test-automation/SDL_test.c	Mon Jul 11 17:55:35 2011 +0300
+++ b/test/test-automation/SDL_test.c	Mon Jul 11 21:09:28 2011 +0300
@@ -36,7 +36,7 @@
 int _testAssertsPassed;
 
 void
-_InitTestEnvironment() // InitTestEnvironment
+_InitTestEnvironment()
 {
 	_testReturnValue = 0;
 	_testAssertsFailed = 0;
@@ -56,8 +56,13 @@
 	return _testReturnValue;
 }
 
+int
+_CountFailedAsserts() {
+	return _testAssertsFailed;
+}
+
 void
-AssertEquals(const int expected, const int actual, char *message, ...)
+AssertEquals(int expected, int actual, char *message, ...)
 {
    va_list args;
    char buf[256];
--- a/test/test-automation/SDL_test.h	Mon Jul 11 17:55:35 2011 +0300
+++ b/test/test-automation/SDL_test.h	Mon Jul 11 21:09:28 2011 +0300
@@ -70,12 +70,18 @@
 int _QuitTestEnvironment();
 
 /*!
+ * Can be used to query the number of failed asserts
+ * \return Returns the failed assert count.
+ */
+int _CountFailedAsserts();
+
+/*!
  *  Assert function. Tests if the expected value equals the actual value, then
  *  the test assert succeeds, otherwise it fails and warns about it.
  *
  * \param expected Value user expects to have
  * \param actual The actual value of tested variable
- * \param message Message that will be printed if assert fails
+ * \param message Message that will be printed
  */
 void AssertEquals(const int expected, const int actual, char *message, ...);
 
@@ -85,18 +91,22 @@
  *  assert passes, otherwise it fails.
  *
  * \param condition Condition which will be evaluated
- * \param message Message that will be printed if assert fails
+ * \param message Message that will be printed
  */
 void AssertTrue(int condition, char *message, ...);
 
 /*!
-\todo add markup
-*/
+ *  Assert function which will always fail
+ *
+ * \param message Message that will be printed
+ */
 void AssertFail(char *message, ...);
 
 /*!
-\todo add markup
-*/
+ *  Assert function which will always pass
+ *
+ * \param message Message that will be printed
+ */
 void AssertPass(char *message, ...);
 
 #endif
--- a/test/test-automation/logger.h	Mon Jul 11 17:55:35 2011 +0300
+++ b/test/test-automation/logger.h	Mon Jul 11 21:09:28 2011 +0300
@@ -30,7 +30,7 @@
  */
 typedef	void (*RunStartedFp)(int parameterCount, char *runnerParameters[], time_t eventTime, void *data);
 typedef	void (*RunEndedFp)(int testCount, int suiteCount, int testPassCount, int testFailCount,
-                           time_t endTime, double totalRuntime);
+                           int testSkippedCount, time_t endTime, double totalRuntime);
 
 typedef	void (*SuiteStartedFp)(const char *suiteName, time_t eventTime);
 typedef	void (*SuiteEndedFp)(int testsPassed, int testsFailed, int testsSkipped,
--- a/test/test-automation/plain_logger.c	Mon Jul 11 17:55:35 2011 +0300
+++ b/test/test-automation/plain_logger.c	Mon Jul 11 21:09:28 2011 +0300
@@ -54,13 +54,14 @@
 
 void
 PlainRunEnded(int testCount, int suiteCount, int testPassCount, int testFailCount,
-              time_t endTime, double totalRuntime)
+			  int testSkippedCount, time_t endTime, double totalRuntime)
 {
 	Output(indentLevel, "Ran %d tests in %0.5f seconds from %d suites.",
 			testCount, totalRuntime, suiteCount);
 
 	Output(indentLevel, "%d tests passed", testPassCount);
 	Output(indentLevel, "%d tests failed", testFailCount);
+	Output(indentLevel, "%d tests skipped", testSkippedCount);
 }
 
 void
@@ -91,6 +92,9 @@
 	if(testResult) {
 		if(testResult == 2) {
 			Output(--indentLevel, "%s: failed -> no assert", testName);
+		}
+		else if(testResult == 3) {
+			Output(--indentLevel, "%s: skipped", testName);
 		} else {
 			Output(--indentLevel, "%s: failed", testName);
 		}
@@ -104,7 +108,7 @@
 		time_t eventTime)
 {
 	const char *result = (assertResult) ? "passed" : "failed";
-	Output(indentLevel, "%s: %s; %s", assertName, result, assertMessage);
+	Output(indentLevel, "%s: %s - %s", assertName, result, assertMessage);
 }
 
 void
@@ -112,7 +116,7 @@
 		int actualValue, int expected, time_t eventTime)
 {
 	const char *result = (assertResult) ? "passed" : "failed";
-	Output(indentLevel, "%s %s (expected %d, actualValue &d): %s",
+	Output(indentLevel, "%s: %s (expected %d, actualValue &d) - %s",
 			assertName, result, expected, actualValue, assertMessage);
 }
 
--- a/test/test-automation/plain_logger.h	Mon Jul 11 17:55:35 2011 +0300
+++ b/test/test-automation/plain_logger.h	Mon Jul 11 21:09:28 2011 +0300
@@ -26,7 +26,7 @@
  * \param totalRuntime How long the execution took
  */
 void PlainRunEnded(int testCount, int suiteCount, int testPassCount, int testFailCount,
-                   time_t endTime, double totalRuntime);
+				   int testSkippedCount, time_t endTime, double totalRuntime);
 
 /*!
  * Prints the data about the test suite that'll be executed next
--- a/test/test-automation/runner.c	Mon Jul 11 17:55:35 2011 +0300
+++ b/test/test-automation/runner.c	Mon Jul 11 21:09:28 2011 +0300
@@ -46,6 +46,8 @@
 typedef void (*TestCaseSetUpFp)(void *arg);
 //!< Function pointer to a test case tear down function
 typedef void  (*TestCaseTearDownFp)(void *arg);
+//!< Function pointer to a function which returns the failed assert count
+typedef int (*CountFailedAssertsFp)(void);
 
 
 //!< Flag for executing tests in-process
@@ -115,6 +117,8 @@
 	TestCaseTearDownFp testTearDown;
  	QuitTestInvironmentFp quitTestEnvironment;
 
+ 	CountFailedAssertsFp countFailedAsserts;
+
 	struct TestCaseItem *next;
 } TestCase;
 
@@ -126,6 +130,7 @@
 TestCaseReference **QueryTestCaseReferences(void *library);
 TestCaseSetUpFp LoadTestSetUpFunction(void *suite);
 TestCaseTearDownFp LoadTestTearDownFunction(void *suite);
+CountFailedAssertsFp LoadCountFailedAssertsFunction(void *suite);
 
 
 /*! Pointers to selected logger implementation */
@@ -142,143 +147,6 @@
 
 
 /*!
- * Goes through the previously loaded test suites and
- * loads test cases from them. Test cases are filtered
- * during the process. Function will only return the
- * test cases which aren't filtered out.
- *
- * \param suites previously loaded test suites
- *
- * \return Test cases that survived filtering process.
- */
-TestCase *
-LoadTestCases(TestSuiteReference *suites)
-{
-	TestCase *testCases = NULL;
-
-	TestSuiteReference *suiteReference = NULL;
-	for(suiteReference = suites; suiteReference; suiteReference = suiteReference->next) {
-		TestCaseReference **tests = QueryTestCaseReferences(suiteReference->library);
-
-		TestCaseReference *testReference = NULL;
-		int counter = 0;
-		for(testReference = tests[counter]; testReference; testReference = tests[++counter]) {
-
-			void *suite = suiteReference->library;
-
-			// Load test case functions
-			InitTestInvironmentFp initTestEnvironment = LoadInitTestInvironmentFunction(suiteReference->library);
-			QuitTestInvironmentFp quitTestEnvironment = LoadQuitTestInvironmentFunction(suiteReference->library);
-
-			TestCaseSetUpFp testSetUp = LoadTestSetUpFunction(suiteReference->library);
-			TestCaseTearDownFp testTearDown = LoadTestTearDownFunction(suiteReference->library);
-
-			TestCaseFp testCase = LoadTestCaseFunction(suiteReference->library, testReference->name);
-
-			// Do the filtering
-			if(FilterTestCase(testReference)) {
-				TestCase *item = SDL_malloc(sizeof(TestCase));
-				memset(item, 0, sizeof(TestCase));
-
-				item->initTestEnvironment = initTestEnvironment;
-				item->quitTestEnvironment = quitTestEnvironment;
-
-				item->testSetUp = testSetUp;
-				item->testTearDown = testTearDown;
-
-				item->testCase = testCase;
-
-				// copy suite name
-				int length = SDL_strlen(suiteReference->name) + 1;
-				item->suiteName = SDL_malloc(length);
-				strncpy(item->suiteName, suiteReference->name, length);
-
-				// copy test name
-				length = SDL_strlen(testReference->name) + 1;
-				item->testName = SDL_malloc(length);
-				strncpy(item->testName, testReference->name, length);
-
-				// copy test description
-				length = SDL_strlen(testReference->description) + 1;
-				item->description = SDL_malloc(length);
-				strncpy(item->description, testReference->description, length);
-
-				item->requirements = testReference->requirements;
-				item->timeout = testReference->timeout;
-
-				// prepend the list
-				item->next = testCases;
-				testCases = item;
-
-				//printf("Added test: %s\n", testReference->name);
-			}
-		}
-	}
-
-	return testCases;
-}
-
-
-/*!
- * Unloads the given TestCases. Frees all the resources
- * allocated for test cases.
- *
- * \param testCases Test cases to be deallocated
- */
-void
-UnloadTestCases(TestCase *testCases)
-{
-	TestCase *ref = testCases;
-	while(ref) {
-		SDL_free(ref->testName);
-		SDL_free(ref->suiteName);
-		SDL_free(ref->description);
-
-		TestCase *temp = ref->next;
-		SDL_free(ref);
-		ref = temp;
-	}
-
-	testCases = NULL;
-}
-
-
-/*!
- * Filters a test case based on its properties in TestCaseReference and user
- * preference.
- *
- * \return Non-zero means test will be added to execution list, zero means opposite
- */
-int
-FilterTestCase(TestCaseReference *testReference)
-{
-	int retVal = 1;
-
-	if(testReference->enabled == TEST_DISABLED) {
-		retVal = 0;
-	}
-
-	if(only_selected_test) {
-		if(SDL_strncmp(testReference->name, selected_test_name, NAME_BUFFER_SIZE) == 0) {
-			retVal = 1;
-		} else {
-			retVal = 0;
-		}
-	}
-
-	if(only_tests_with_string) {
-		if(strstr(testReference->name, testcase_name_substring) != NULL) {
-			retVal = 1;
-		} else {
-			retVal = 0;
-		}
-	}
-
-	return retVal;
-}
-
-
-/*!
  * Scans the tests/ directory and returns the names
  * of the dynamic libraries implementing the test suites.
  *
@@ -298,10 +166,9 @@
 {
 	typedef struct dirent Entry;
 	DIR *directory = opendir(directoryName);
+	TestSuiteReference *suites = NULL;
+	Entry *entry = NULL;
 
-	TestSuiteReference *suites = NULL;
-
-	Entry *entry = NULL;
 	if(!directory) {
 		fprintf(stderr, "Failed to open test suite directory: %s\n", directoryName);
 		perror("Error message");
@@ -323,9 +190,12 @@
 			if(ok && SDL_strcmp(ext, extension)  == 0) {
 				// create test suite reference
 				TestSuiteReference *reference = (TestSuiteReference *) SDL_malloc(sizeof(TestSuiteReference));
+				if(reference == NULL) {
+					fprintf(stderr, "Allocating TestSuiteReference failed\n");
+				}
+
 				memset(reference, 0, sizeof(TestSuiteReference));
 
-
 				const int dirSize = SDL_strlen(directoryName);
 				const int extSize = SDL_strlen(ext);
 				const int nameSize = SDL_strlen(name) + 1;
@@ -428,6 +298,147 @@
 
 
 /*!
+ * Goes through the previously loaded test suites and
+ * loads test cases from them. Test cases are filtered
+ * during the process. Function will only return the
+ * test cases which aren't filtered out.
+ *
+ * \param suites previously loaded test suites
+ *
+ * \return Test cases that survived filtering process.
+ */
+TestCase *
+LoadTestCases(TestSuiteReference *suites)
+{
+	TestCase *testCases = NULL;
+
+	TestSuiteReference *suiteReference = NULL;
+	for(suiteReference = suites; suiteReference; suiteReference = suiteReference->next) {
+		TestCaseReference **tests = QueryTestCaseReferences(suiteReference->library);
+
+		TestCaseReference *testReference = NULL;
+		int counter = 0;
+		for(testReference = tests[counter]; testReference; testReference = tests[++counter]) {
+
+			void *suite = suiteReference->library;
+
+			// Load test case functions
+			InitTestInvironmentFp initTestEnvironment = LoadInitTestInvironmentFunction(suiteReference->library);
+			QuitTestInvironmentFp quitTestEnvironment = LoadQuitTestInvironmentFunction(suiteReference->library);
+
+			TestCaseSetUpFp testSetUp = LoadTestSetUpFunction(suiteReference->library);
+			TestCaseTearDownFp testTearDown = LoadTestTearDownFunction(suiteReference->library);
+
+			TestCaseFp testCase = LoadTestCaseFunction(suiteReference->library, testReference->name);
+
+			CountFailedAssertsFp countFailedAsserts = LoadCountFailedAssertsFunction(suiteReference->library);
+
+			// Do the filtering
+			if(FilterTestCase(testReference)) {
+				TestCase *item = SDL_malloc(sizeof(TestCase));
+				memset(item, 0, sizeof(TestCase));
+
+				item->initTestEnvironment = initTestEnvironment;
+				item->quitTestEnvironment = quitTestEnvironment;
+
+				item->testSetUp = testSetUp;
+				item->testTearDown = testTearDown;
+
+				item->testCase = testCase;
+
+				item->countFailedAsserts = countFailedAsserts;
+
+				// copy suite name
+				int length = SDL_strlen(suiteReference->name) + 1;
+				item->suiteName = SDL_malloc(length);
+				strncpy(item->suiteName, suiteReference->name, length);
+
+				// copy test name
+				length = SDL_strlen(testReference->name) + 1;
+				item->testName = SDL_malloc(length);
+				strncpy(item->testName, testReference->name, length);
+
+				// copy test description
+				length = SDL_strlen(testReference->description) + 1;
+				item->description = SDL_malloc(length);
+				strncpy(item->description, testReference->description, length);
+
+				item->requirements = testReference->requirements;
+				item->timeout = testReference->timeout;
+
+				// prepend the list
+				item->next = testCases;
+				testCases = item;
+
+				//printf("Added test: %s\n", testReference->name);
+			}
+		}
+	}
+
+	return testCases;
+}
+
+
+/*!
+ * Unloads the given TestCases. Frees all the resources
+ * allocated for test cases.
+ *
+ * \param testCases Test cases to be deallocated
+ */
+void
+UnloadTestCases(TestCase *testCases)
+{
+	TestCase *ref = testCases;
+	while(ref) {
+		SDL_free(ref->testName);
+		SDL_free(ref->suiteName);
+		SDL_free(ref->description);
+
+		TestCase *temp = ref->next;
+		SDL_free(ref);
+		ref = temp;
+	}
+
+	testCases = NULL;
+}
+
+
+/*!
+ * Filters a test case based on its properties in TestCaseReference and user
+ * preference.
+ *
+ * \return Non-zero means test will be added to execution list, zero means opposite
+ */
+int
+FilterTestCase(TestCaseReference *testReference)
+{
+	int retVal = 1;
+
+	if(testReference->enabled == TEST_DISABLED) {
+		retVal = 0;
+	}
+
+	if(only_selected_test) {
+		if(SDL_strncmp(testReference->name, selected_test_name, NAME_BUFFER_SIZE) == 0) {
+			retVal = 1;
+		} else {
+			retVal = 0;
+		}
+	}
+
+	if(only_tests_with_string) {
+		if(strstr(testReference->name, testcase_name_substring) != NULL) {
+			retVal = 1;
+		} else {
+			retVal = 0;
+		}
+	}
+
+	return retVal;
+}
+
+
+/*!
  * Loads the test case references from the given test suite.
 
  * \param library Previously loaded dynamic library AKA test suite
@@ -436,21 +447,21 @@
 TestCaseReference **
 QueryTestCaseReferences(void *library)
 {
-        TestCaseReference **(*suite)(void);
+	TestCaseReference **(*suite)(void);
 
-        suite = (TestCaseReference **(*)(void)) SDL_LoadFunction(library, "QueryTestSuite");
-        if(suite == NULL) {
-                fprintf(stderr, "Loading QueryTestCaseReferences() failed.\n");
-                fprintf(stderr, "%s\n", SDL_GetError());
-        }
+	suite = (TestCaseReference **(*)(void)) SDL_LoadFunction(library, "QueryTestSuite");
+	if(suite == NULL) {
+		fprintf(stderr, "Loading QueryTestCaseReferences() failed.\n");
+		fprintf(stderr, "%s\n", SDL_GetError());
+	}
 
-        TestCaseReference **tests = suite();
-        if(tests == NULL) {
-                fprintf(stderr, "Failed to load test references.\n");
-                fprintf(stderr, "%s\n", SDL_GetError());
-        }
+	TestCaseReference **tests = suite();
+	if(tests == NULL) {
+		fprintf(stderr, "Failed to load test references.\n");
+		fprintf(stderr, "%s\n", SDL_GetError());
+	}
 
-        return tests;
+	return tests;
 }
 
 
@@ -554,6 +565,81 @@
 	return testEnvQuit;
 }
 
+/*!
+ * Loads function that returns failed assert count in the current
+ * test environment
+ *
+ * \param suite Used test suite
+ *
+ * \return Function pointer to _CountFailedAsserts function
+ */
+CountFailedAssertsFp
+LoadCountFailedAssertsFunction(void *suite) {
+	CountFailedAssertsFp countFailedAssert = (CountFailedAssertsFp) SDL_LoadFunction(suite, "_CountFailedAsserts");
+	if(countFailedAssert == NULL) {
+		fprintf(stderr, "Loading _CountFailedAsserts function failed, countFailedAssert == NULL\n");
+		fprintf(stderr, "%s\n", SDL_GetError());
+	}
+
+	return countFailedAssert;
+}
+
+
+/*
+ * Execute the test
+ *
+ * \param testItem Test to be executed
+ */
+int
+RunTest(TestCase *testItem) {
+	testItem->initTestEnvironment();
+
+	if(testItem->testSetUp) {
+		testItem->testSetUp(0x0);
+	}
+
+	int cntFailedAsserts = testItem->countFailedAsserts();
+	if(cntFailedAsserts != 0) {
+		return 3;
+	}
+
+	testItem->testCase(0x0);
+
+	if(testItem->testTearDown) {
+		testItem->testTearDown(0x0);
+	}
+
+	return testItem->quitTestEnvironment();
+}
+
+/*!
+ * Executes a test case. Loads the test, executes it and
+ * returns the tests return value to the caller.
+ *
+ * \param testItem The test case that will be executed
+ * \return The return value of the test. Zero means success, non-zero failure.
+ */
+int
+ExecuteTest(TestCase *testItem) {
+	int retVal = 1;
+
+	if(execute_inproc) {
+		retVal = RunTest(testItem);
+	} else {
+		int childpid = fork();
+		if(childpid == 0) {
+			exit(RunTest(testItem));
+		} else {
+			int stat_lock = -1;
+			int child = wait(&stat_lock);
+
+			retVal = HandleChildProcessReturnValue(stat_lock);
+		}
+	}
+
+	return retVal;
+}
+
 
 /*!
  * If using out-of-proc execution of tests. This function
@@ -584,56 +670,58 @@
 
 
 /*!
- * Executes a test case. Loads the test, executes it and
- * returns the tests return value to the caller.
+ * Sets up the logger.
  *
- * \param testItem The test case that will be executed
- * \return The return value of the test. Zero means success, non-zero failure.
+ * \return Some special data that will be passed to StartRun() logger call
  */
-int
-ExecuteTest(TestCase *testItem) {
-	int retVal = 1;
-	if(execute_inproc) {
-		testItem->initTestEnvironment();
+void *
+SetUpLogger()
+{
+	void *loggerData = NULL;
+	if(xml_enabled) {
+		RunStarted = XMLRunStarted;
+		RunEnded = XMLRunEnded;
+
+		SuiteStarted = XMLSuiteStarted;
+		SuiteEnded = XMLSuiteEnded;
 
-		if(testItem->testSetUp) {
-			testItem->testSetUp(0x0);
-		}
+		TestStarted = XMLTestStarted;
+		TestEnded = XMLTestEnded;
 
-		testItem->testCase(0x0);
+		Assert = XMLAssert;
+		AssertWithValues = XMLAssertWithValues;
+		AssertSummary = XMLAssertSummary;
 
-		if(testItem->testTearDown) {
-			testItem->testTearDown(0x0);
+		Log = XMLLog;
+
+		char *sheet = NULL;
+		if(xsl_enabled) {
+			sheet = "style.xsl"; // default style sheet;
 		}
 
-		retVal = testItem->quitTestEnvironment();
-	} else {
-		int childpid = fork();
-		if(childpid == 0) {
-			testItem->initTestEnvironment();
+		if(custom_xsl_enabled) {
+			sheet = xsl_stylesheet_name;
+		}
 
-			if(testItem->testSetUp) {
-				testItem->testSetUp(0x0);
-			}
-
-			testItem->testCase(0x0);
+		loggerData = sheet;
+	} else {
+		RunStarted = PlainRunStarted;
+		RunEnded = PlainRunEnded;
 
-			// note: if test case is is aborted by some signal
-			// then TearDown function won't be called
-			if(testItem->testTearDown) {
-				testItem->testTearDown(0x0);
-			}
+		SuiteStarted = PlainSuiteStarted;
+		SuiteEnded = PlainSuiteEnded;
+
+		TestStarted = PlainTestStarted;
+		TestEnded = PlainTestEnded;
 
-			exit(testItem->quitTestEnvironment());
-		} else {
-			int stat_lock = -1;
-			int child = wait(&stat_lock);
+		Assert = PlainAssert;
+		AssertWithValues = PlainAssertWithValues;
+		AssertSummary = PlainAssertSummary;
 
-			retVal = HandleChildProcessReturnValue(stat_lock);
-		}
+		Log = PlainLog;
 	}
 
-	return retVal;
+	return loggerData;
 }
 
 
@@ -771,7 +859,7 @@
 
 	// print: Testing against SDL version fuu (rev: bar) if verbose == true
 
-	int totalTestfailureCount = 0, totalTestPassCount = 0;
+	int totalTestFailureCount = 0, totalTestPassCount = 0, totalTestSkipCount = 0;
 	int testFailureCount = 0, testPassCount = 0, testSkipCount = 0;
 	char *testSuiteName = NULL;
 	int suiteCounter = 0;
@@ -782,49 +870,7 @@
 	char *extension = "dylib";
 #endif
 
-	void *loggerData = NULL;
-	if(xml_enabled) {
-		RunStarted = XMLRunStarted;
-		RunEnded = XMLRunEnded;
-
-		SuiteStarted = XMLSuiteStarted;
-		SuiteEnded = XMLSuiteEnded;
-
-		TestStarted = XMLTestStarted;
-		TestEnded = XMLTestEnded;
-
-		Assert = XMLAssert;
-		AssertWithValues = XMLAssertWithValues;
-		AssertSummary = XMLAssertSummary;
-
-		Log = XMLLog;
-
-		char *sheet = NULL;
-		if(xsl_enabled) {
-			sheet = "style.xsl"; // default style sheet;
-		}
-
-		if(custom_xsl_enabled) {
-			sheet = xsl_stylesheet_name;
-		}
-
-		loggerData = sheet;
-	} else {
-		RunStarted = PlainRunStarted;
-		RunEnded = PlainRunEnded;
-
-		SuiteStarted = PlainSuiteStarted;
-		SuiteEnded = PlainSuiteEnded;
-
-		TestStarted = PlainTestStarted;
-		TestEnded = PlainTestEnded;
-
-		Assert = PlainAssert;
-		AssertWithValues = PlainAssertWithValues;
-		AssertSummary = PlainAssertSummary;
-
-		Log = PlainLog;
-	}
+	void *loggerData = SetUpLogger();
 
 	const Uint32 startTicks = SDL_GetTicks();
 
@@ -845,9 +891,7 @@
 
 	RunStarted(argc, argv, time(0), loggerData);
 
-
 	char *currentSuiteName = NULL;
-
 	int suiteStartTime = SDL_GetTicks();
 
 	TestCase *testItem = NULL;
@@ -856,7 +900,7 @@
 			currentSuiteName = testItem->suiteName;
 			SuiteStarted(currentSuiteName, time(0));
 
-			testFailureCount = testPassCount = 0;
+			testFailureCount = testPassCount = testSkipCount = 0;
 
 			suiteCounter++;
 		}
@@ -871,7 +915,7 @@
 			currentSuiteName = testItem->suiteName;
 			SuiteStarted(currentSuiteName, time(0));
 
-			testFailureCount = testPassCount = 0;
+			testFailureCount = testPassCount = testSkipCount = 0;
 
 			suiteCounter++;
 		}
@@ -882,8 +926,12 @@
 		const Uint32 testTimeStart = SDL_GetTicks();
 
 		int retVal = ExecuteTest(testItem);
-		if(retVal) {
-			totalTestfailureCount++;
+		if(retVal == 3) {
+			testSkipCount++;
+			totalTestSkipCount++;
+		}
+		else if(retVal) {
+			totalTestFailureCount++;
 			testFailureCount++;
 		} else {
 			totalTestPassCount++;
@@ -906,8 +954,8 @@
 	const Uint32 endTicks = SDL_GetTicks();
 	const double totalRunTime = (endTicks - startTicks) / 1000.0f;
 
-	RunEnded(totalTestPassCount + totalTestfailureCount, suiteCounter,
-			 totalTestPassCount, totalTestfailureCount, time(0), totalRunTime);
+	RunEnded(totalTestPassCount + totalTestFailureCount, suiteCounter,
+			 totalTestPassCount, totalTestFailureCount, totalTestSkipCount, time(0), totalRunTime);
 
-	return (totalTestfailureCount ? 1 : 0);
+	return (totalTestFailureCount ? 1 : 0);
 }
--- a/test/test-automation/style.xsl	Mon Jul 11 17:55:35 2011 +0300
+++ b/test/test-automation/style.xsl	Mon Jul 11 21:09:28 2011 +0300
@@ -104,6 +104,7 @@
 	/* Color the tests based on the result */
 	$("span.testResult[result='passed']").addClass('passed');
 	$("span.testResult[result='failed']").addClass('failed');
+	$("span.testResult[result='skipped']").addClass('skipped');
 	
 	/* Color the asserts based on the result */
 	$("span.assertResult[result='pass']").addClass('passed');
@@ -157,6 +158,10 @@
  color: red;
 }
 
+.skipped {
+ color: gray;
+}
+
 </style>
 
 </head>
--- a/test/test-automation/testdummy/testdummy.c	Mon Jul 11 17:55:35 2011 +0300
+++ b/test/test-automation/testdummy/testdummy.c	Mon Jul 11 21:09:28 2011 +0300
@@ -56,6 +56,9 @@
  * SetUp function can be used to create a test fixture for test cases.
  * The function will be called right before executing the test case.
  *
+ * Note: If any assert in the function fails then the test will be skipped.
+ * In practice, the entire suite will be skipped if assert failure happens.
+ *
  * Note: this function is optional.
  *
  * \param arg parameters given to test. Usually NULL
--- a/test/test-automation/testsurface/testsurface.c	Mon Jul 11 17:55:35 2011 +0300
+++ b/test/test-automation/testsurface/testsurface.c	Mon Jul 11 21:09:28 2011 +0300
@@ -32,11 +32,39 @@
 	return (TestCaseReference **)testSuite;
 }
 
+/* Function prototypes */
+SDL_Surface *_CreateTestSurface();
+
+
+/* Create test fixture */
+
+static SDL_Surface *testsur = NULL;
+
+
+void
+SetUp(void *arg)
+{
+	int ret = SDL_Init(SDL_INIT_VIDEO);
+	AssertTrue(ret == 0, "SDL_Init(SDL_INIT_VIDEO)");
+
+	testsur = _CreateTestSurface();
+	AssertTrue(testsur != NULL, "SDL_Init(SDL_INIT_VIDEO)");
+}
+
+void
+TearDown(void *arg)
+{
+	SDL_FreeSurface( testsur );
+
+	SDL_Quit();
+}
+
 /* Helper functions for the test cases */
 
 #define TEST_SURFACE_WIDTH 80
 #define TEST_SURFACE_HEIGHT 60
 
+
 /*!
  * Creates test surface
  */
@@ -66,7 +94,7 @@
 /**
  * @brief Tests a blend mode.
  */
-int _testBlitBlendMode(SDL_Surface *testsur, SDL_Surface *face, int mode)
+void _testBlitBlendMode(SDL_Surface *testsur, SDL_Surface *face, int mode)
 {
 	int ret;
 	int i, j, ni, nj;
@@ -102,8 +130,6 @@
 		 ret = SDL_BlitSurface( face, NULL, testsur, &rect );
 		 AssertTrue(ret != 0, "SDL_BlitSurface");	  }
 	}
-
-	return 0;
 }
 
 /* Test case functions */
@@ -115,13 +141,8 @@
 	int ret;
     SDL_Surface *face, *rface;
 
-	ret = SDL_Init(SDL_INIT_VIDEO);
-	AssertTrue(ret == 0, "SDL_Init(SDL_INIT_VIDEO)");
-
-	SDL_Surface *testsur = _CreateTestSurface();
-
-   /* Clear surface. */
-   ret = SDL_FillRect( testsur, NULL,
+    /* Clear surface. */
+    ret = SDL_FillRect( testsur, NULL,
          SDL_MapRGB( testsur->format, 0, 0, 0 ) );
 	AssertTrue(ret == 0,  "SDL_FillRect");
 
@@ -151,10 +172,6 @@
    /* Clean up. */
    SDL_FreeSurface( rface );
    SDL_FreeSurface( face );
-
-   SDL_FreeSurface( testsur );
-
-   SDL_Quit();
 }
 
 
@@ -163,14 +180,8 @@
  */
 void surface_testLoadFailure(void *arg)
 {
-	int ret = SDL_Init(SDL_INIT_VIDEO);
-	AssertTrue(ret == 0, "SDL_Init(SDL_INIT_VIDEO)");
-
 	SDL_Surface *face = SDL_LoadBMP("nonexistant.bmp");
-
 	AssertTrue(face == NULL, "SDL_CreateLoadBmp");
-
-	SDL_Quit();
 }
 
 
@@ -184,11 +195,6 @@
    SDL_Surface *face;
    int i, j, ni, nj;
 
-	ret = SDL_Init(SDL_INIT_VIDEO);
-	AssertTrue(ret == 0, "SDL_Init(SDL_INIT_VIDEO)");
-
-   SDL_Surface *testsur = _CreateTestSurface();
-
    /* Clear surface. */
    ret = SDL_FillRect( testsur, NULL,
          SDL_MapRGB( testsur->format, 0, 0, 0 ) );
@@ -292,9 +298,6 @@
 
    /* Clean up. */
    SDL_FreeSurface( face );
-   SDL_FreeSurface( testsur );
-
-   SDL_Quit();
 }
 
 /**
@@ -308,11 +311,6 @@
    int i, j, ni, nj;
    int mode;
 
-	ret = SDL_Init(SDL_INIT_VIDEO);
-	AssertTrue(ret == 0, "SDL_Init(SDL_INIT_VIDEO)");
-
-   SDL_Surface *testsur = _CreateTestSurface();
-
    /* Clear surface. */
    ret = SDL_FillRect( testsur, NULL,
          SDL_MapRGB( testsur->format, 0, 0, 0 ) );
@@ -415,7 +413,4 @@
 
    /* Clean up. */
    SDL_FreeSurface( face );
-   SDL_FreeSurface( testsur );
-
-   SDL_Quit();
 }
--- a/test/test-automation/xml_logger.c	Mon Jul 11 17:55:35 2011 +0300
+++ b/test/test-automation/xml_logger.c	Mon Jul 11 21:09:28 2011 +0300
@@ -38,6 +38,7 @@
 const char *numTestElementName = "numTests";
 const char *numPassedTestsElementName = "numPassedTests";
 const char *numFailedTestsElementName = "numFailedTests";
+const char *numSkippedTestsElementName = "numSkippedTests";
 const char *endTimeElementName = "endTime";
 const char *totalRuntimeElementName = "totalRuntime";
 const char *suiteElementName = "suite";
@@ -145,7 +146,7 @@
 
 void
 XMLRunEnded(int testCount, int suiteCount, int testPassCount, int testFailCount,
-            time_t endTime, double totalRuntime)
+			int testSkippedCount, time_t endTime, double totalRuntime)
 {
 	// log suite count
 	char *output = XMLOpenElement(numSuitesElementName);
@@ -187,7 +188,17 @@
 	output = XMLCloseElement(numFailedTestsElementName);
 	XMLOutputter(--indentLevel, YES, output);
 
-	// log end timte
+	// log skipped test count
+	output = XMLOpenElement(numSkippedTestsElementName);
+	XMLOutputter(indentLevel++, NO, output);
+
+	output = XMLAddContent(IntToString(testSkippedCount));
+	XMLOutputter(indentLevel, NO, output);
+
+	output = XMLCloseElement(numSkippedTestsElementName);
+	XMLOutputter(--indentLevel, YES, output);
+
+	// log end tite
 	output = XMLOpenElement(endTimeElementName);
 	XMLOutputter(indentLevel++, NO, output);
 
@@ -342,6 +353,9 @@
 	if(testResult) {
 		if(testResult == 2) {
 			output = XMLAddContent("failed. No assert");
+		}
+		else if(testResult == 3) {
+			output = XMLAddContent("skipped");
 		} else {
 			output = XMLAddContent("failed");
 		}
--- a/test/test-automation/xml_logger.h	Mon Jul 11 17:55:35 2011 +0300
+++ b/test/test-automation/xml_logger.h	Mon Jul 11 21:09:28 2011 +0300
@@ -24,7 +24,7 @@
  * \param totalRuntime How long the execution took
  */
 void XMLRunEnded(int testCount, int suiteCount, int testPassCount, int testFailCount,
-                 time_t endTime, double totalRuntime);
+				 int testSkippedCount, time_t endTime, double totalRuntime);
 
 /*!
  * Prints the data about the test suite that'll be executed next in XML