Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
L
libSDL
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
PocketInsanity
libSDL
Commits
6d3454e1
Commit
6d3454e1
authored
Jul 11, 2011
by
Markus Kauppila
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
If any assert in SetUp function fails that test will be skipped.
parent
3efe0fed
Changes
11
Hide whitespace changes
Inline
Side-by-side
Showing
11 changed files
with
377 additions
and
293 deletions
+377
-293
SDL_test.c
test/test-automation/SDL_test.c
+7
-2
SDL_test.h
test/test-automation/SDL_test.h
+16
-6
logger.h
test/test-automation/logger.h
+1
-1
plain_logger.c
test/test-automation/plain_logger.c
+7
-3
plain_logger.h
test/test-automation/plain_logger.h
+1
-1
runner.c
test/test-automation/runner.c
+289
-241
style.xsl
test/test-automation/style.xsl
+5
-0
testdummy.c
test/test-automation/testdummy/testdummy.c
+3
-0
testsurface.c
test/test-automation/testsurface/testsurface.c
+31
-36
xml_logger.c
test/test-automation/xml_logger.c
+16
-2
xml_logger.h
test/test-automation/xml_logger.h
+1
-1
No files found.
test/test-automation/SDL_test.c
View file @
6d3454e1
...
...
@@ -36,7 +36,7 @@ int _testAssertsFailed;
int
_testAssertsPassed
;
void
_InitTestEnvironment
()
// InitTestEnvironment
_InitTestEnvironment
()
{
_testReturnValue
=
0
;
_testAssertsFailed
=
0
;
...
...
@@ -56,8 +56,13 @@ _QuitTestEnvironment()
return
_testReturnValue
;
}
int
_CountFailedAsserts
()
{
return
_testAssertsFailed
;
}
void
AssertEquals
(
const
int
expected
,
const
int
actual
,
char
*
message
,
...)
AssertEquals
(
int
expected
,
int
actual
,
char
*
message
,
...)
{
va_list
args
;
char
buf
[
256
];
...
...
test/test-automation/SDL_test.h
View file @
6d3454e1
...
...
@@ -69,13 +69,19 @@ void _InitTestEnvironment();
*/
int
_QuitTestEnvironment
();
/*!
* Can be used to query the number of failed asserts
* \return Returns the failed assert count.
*/
int
_CountFailedAsserts
();
/*!
* Assert function. Tests if the expected value equals the actual value, then
* the test assert succeeds, otherwise it fails and warns about it.
*
* \param expected Value user expects to have
* \param actual The actual value of tested variable
* \param message Message that will be printed
if assert fails
* \param message Message that will be printed
*/
void
AssertEquals
(
const
int
expected
,
const
int
actual
,
char
*
message
,
...);
...
...
@@ -85,18 +91,22 @@ void AssertEquals(const int expected, const int actual, char *message, ...);
* assert passes, otherwise it fails.
*
* \param condition Condition which will be evaluated
* \param message Message that will be printed
if assert fails
* \param message Message that will be printed
*/
void
AssertTrue
(
int
condition
,
char
*
message
,
...);
/*!
\todo add markup
*/
* Assert function which will always fail
*
* \param message Message that will be printed
*/
void
AssertFail
(
char
*
message
,
...);
/*!
\todo add markup
*/
* Assert function which will always pass
*
* \param message Message that will be printed
*/
void
AssertPass
(
char
*
message
,
...);
#endif
test/test-automation/logger.h
View file @
6d3454e1
...
...
@@ -30,7 +30,7 @@
*/
typedef
void
(
*
RunStartedFp
)(
int
parameterCount
,
char
*
runnerParameters
[],
time_t
eventTime
,
void
*
data
);
typedef
void
(
*
RunEndedFp
)(
int
testCount
,
int
suiteCount
,
int
testPassCount
,
int
testFailCount
,
time_t
endTime
,
double
totalRuntime
);
int
testSkippedCount
,
time_t
endTime
,
double
totalRuntime
);
typedef
void
(
*
SuiteStartedFp
)(
const
char
*
suiteName
,
time_t
eventTime
);
typedef
void
(
*
SuiteEndedFp
)(
int
testsPassed
,
int
testsFailed
,
int
testsSkipped
,
...
...
test/test-automation/plain_logger.c
View file @
6d3454e1
...
...
@@ -54,13 +54,14 @@ PlainRunStarted(int parameterCount, char *runnerParameters[], time_t eventTime,
void
PlainRunEnded
(
int
testCount
,
int
suiteCount
,
int
testPassCount
,
int
testFailCount
,
time_t
endTime
,
double
totalRuntime
)
int
testSkippedCount
,
time_t
endTime
,
double
totalRuntime
)
{
Output
(
indentLevel
,
"Ran %d tests in %0.5f seconds from %d suites."
,
testCount
,
totalRuntime
,
suiteCount
);
Output
(
indentLevel
,
"%d tests passed"
,
testPassCount
);
Output
(
indentLevel
,
"%d tests failed"
,
testFailCount
);
Output
(
indentLevel
,
"%d tests skipped"
,
testSkippedCount
);
}
void
...
...
@@ -91,6 +92,9 @@ PlainTestEnded(const char *testName, const char *suiteName,
if
(
testResult
)
{
if
(
testResult
==
2
)
{
Output
(
--
indentLevel
,
"%s: failed -> no assert"
,
testName
);
}
else
if
(
testResult
==
3
)
{
Output
(
--
indentLevel
,
"%s: skipped"
,
testName
);
}
else
{
Output
(
--
indentLevel
,
"%s: failed"
,
testName
);
}
...
...
@@ -104,7 +108,7 @@ PlainAssert(const char *assertName, int assertResult, const char *assertMessage,
time_t
eventTime
)
{
const
char
*
result
=
(
assertResult
)
?
"passed"
:
"failed"
;
Output
(
indentLevel
,
"%s: %s
;
%s"
,
assertName
,
result
,
assertMessage
);
Output
(
indentLevel
,
"%s: %s
-
%s"
,
assertName
,
result
,
assertMessage
);
}
void
...
...
@@ -112,7 +116,7 @@ PlainAssertWithValues(const char *assertName, int assertResult, const char *asse
int
actualValue
,
int
expected
,
time_t
eventTime
)
{
const
char
*
result
=
(
assertResult
)
?
"passed"
:
"failed"
;
Output
(
indentLevel
,
"%s
%s (expected %d, actualValue &d):
%s"
,
Output
(
indentLevel
,
"%s
: %s (expected %d, actualValue &d) -
%s"
,
assertName
,
result
,
expected
,
actualValue
,
assertMessage
);
}
...
...
test/test-automation/plain_logger.h
View file @
6d3454e1
...
...
@@ -26,7 +26,7 @@ void PlainRunStarted(int parameterCount, char *runnerParameters[], time_t eventT
* \param totalRuntime How long the execution took
*/
void
PlainRunEnded
(
int
testCount
,
int
suiteCount
,
int
testPassCount
,
int
testFailCount
,
time_t
endTime
,
double
totalRuntime
);
int
testSkippedCount
,
time_t
endTime
,
double
totalRuntime
);
/*!
* Prints the data about the test suite that'll be executed next
...
...
test/test-automation/runner.c
View file @
6d3454e1
...
...
@@ -46,6 +46,8 @@ typedef int (*QuitTestInvironmentFp)(void);
typedef
void
(
*
TestCaseSetUpFp
)(
void
*
arg
);
//!< Function pointer to a test case tear down function
typedef
void
(
*
TestCaseTearDownFp
)(
void
*
arg
);
//!< Function pointer to a function which returns the failed assert count
typedef
int
(
*
CountFailedAssertsFp
)(
void
);
//!< Flag for executing tests in-process
...
...
@@ -115,6 +117,8 @@ typedef struct TestCaseItem {
TestCaseTearDownFp
testTearDown
;
QuitTestInvironmentFp
quitTestEnvironment
;
CountFailedAssertsFp
countFailedAsserts
;
struct
TestCaseItem
*
next
;
}
TestCase
;
...
...
@@ -126,6 +130,7 @@ QuitTestInvironmentFp LoadQuitTestInvironmentFunction(void *suite);
TestCaseReference
**
QueryTestCaseReferences
(
void
*
library
);
TestCaseSetUpFp
LoadTestSetUpFunction
(
void
*
suite
);
TestCaseTearDownFp
LoadTestTearDownFunction
(
void
*
suite
);
CountFailedAssertsFp
LoadCountFailedAssertsFunction
(
void
*
suite
);
/*! Pointers to selected logger implementation */
...
...
@@ -141,143 +146,6 @@ AssertSummaryFp AssertSummary = NULL;
LogFp
Log
=
NULL
;
/*!
* Goes through the previously loaded test suites and
* loads test cases from them. Test cases are filtered
* during the process. Function will only return the
* test cases which aren't filtered out.
*
* \param suites previously loaded test suites
*
* \return Test cases that survived filtering process.
*/
TestCase
*
LoadTestCases
(
TestSuiteReference
*
suites
)
{
TestCase
*
testCases
=
NULL
;
TestSuiteReference
*
suiteReference
=
NULL
;
for
(
suiteReference
=
suites
;
suiteReference
;
suiteReference
=
suiteReference
->
next
)
{
TestCaseReference
**
tests
=
QueryTestCaseReferences
(
suiteReference
->
library
);
TestCaseReference
*
testReference
=
NULL
;
int
counter
=
0
;
for
(
testReference
=
tests
[
counter
];
testReference
;
testReference
=
tests
[
++
counter
])
{
void
*
suite
=
suiteReference
->
library
;
// Load test case functions
InitTestInvironmentFp
initTestEnvironment
=
LoadInitTestInvironmentFunction
(
suiteReference
->
library
);
QuitTestInvironmentFp
quitTestEnvironment
=
LoadQuitTestInvironmentFunction
(
suiteReference
->
library
);
TestCaseSetUpFp
testSetUp
=
LoadTestSetUpFunction
(
suiteReference
->
library
);
TestCaseTearDownFp
testTearDown
=
LoadTestTearDownFunction
(
suiteReference
->
library
);
TestCaseFp
testCase
=
LoadTestCaseFunction
(
suiteReference
->
library
,
testReference
->
name
);
// Do the filtering
if
(
FilterTestCase
(
testReference
))
{
TestCase
*
item
=
SDL_malloc
(
sizeof
(
TestCase
));
memset
(
item
,
0
,
sizeof
(
TestCase
));
item
->
initTestEnvironment
=
initTestEnvironment
;
item
->
quitTestEnvironment
=
quitTestEnvironment
;
item
->
testSetUp
=
testSetUp
;
item
->
testTearDown
=
testTearDown
;
item
->
testCase
=
testCase
;
// copy suite name
int
length
=
SDL_strlen
(
suiteReference
->
name
)
+
1
;
item
->
suiteName
=
SDL_malloc
(
length
);
strncpy
(
item
->
suiteName
,
suiteReference
->
name
,
length
);
// copy test name
length
=
SDL_strlen
(
testReference
->
name
)
+
1
;
item
->
testName
=
SDL_malloc
(
length
);
strncpy
(
item
->
testName
,
testReference
->
name
,
length
);
// copy test description
length
=
SDL_strlen
(
testReference
->
description
)
+
1
;
item
->
description
=
SDL_malloc
(
length
);
strncpy
(
item
->
description
,
testReference
->
description
,
length
);
item
->
requirements
=
testReference
->
requirements
;
item
->
timeout
=
testReference
->
timeout
;
// prepend the list
item
->
next
=
testCases
;
testCases
=
item
;
//printf("Added test: %s\n", testReference->name);
}
}
}
return
testCases
;
}
/*!
* Unloads the given TestCases. Frees all the resources
* allocated for test cases.
*
* \param testCases Test cases to be deallocated
*/
void
UnloadTestCases
(
TestCase
*
testCases
)
{
TestCase
*
ref
=
testCases
;
while
(
ref
)
{
SDL_free
(
ref
->
testName
);
SDL_free
(
ref
->
suiteName
);
SDL_free
(
ref
->
description
);
TestCase
*
temp
=
ref
->
next
;
SDL_free
(
ref
);
ref
=
temp
;
}
testCases
=
NULL
;
}
/*!
* Filters a test case based on its properties in TestCaseReference and user
* preference.
*
* \return Non-zero means test will be added to execution list, zero means opposite
*/
int
FilterTestCase
(
TestCaseReference
*
testReference
)
{
int
retVal
=
1
;
if
(
testReference
->
enabled
==
TEST_DISABLED
)
{
retVal
=
0
;
}
if
(
only_selected_test
)
{
if
(
SDL_strncmp
(
testReference
->
name
,
selected_test_name
,
NAME_BUFFER_SIZE
)
==
0
)
{
retVal
=
1
;
}
else
{
retVal
=
0
;
}
}
if
(
only_tests_with_string
)
{
if
(
strstr
(
testReference
->
name
,
testcase_name_substring
)
!=
NULL
)
{
retVal
=
1
;
}
else
{
retVal
=
0
;
}
}
return
retVal
;
}
/*!
* Scans the tests/ directory and returns the names
* of the dynamic libraries implementing the test suites.
...
...
@@ -298,10 +166,9 @@ ScanForTestSuites(char *directoryName, char *extension)
{
typedef
struct
dirent
Entry
;
DIR
*
directory
=
opendir
(
directoryName
);
TestSuiteReference
*
suites
=
NULL
;
Entry
*
entry
=
NULL
;
if
(
!
directory
)
{
fprintf
(
stderr
,
"Failed to open test suite directory: %s
\n
"
,
directoryName
);
perror
(
"Error message"
);
...
...
@@ -323,8 +190,11 @@ ScanForTestSuites(char *directoryName, char *extension)
if
(
ok
&&
SDL_strcmp
(
ext
,
extension
)
==
0
)
{
// create test suite reference
TestSuiteReference
*
reference
=
(
TestSuiteReference
*
)
SDL_malloc
(
sizeof
(
TestSuiteReference
));
memset
(
reference
,
0
,
sizeof
(
TestSuiteReference
));
if
(
reference
==
NULL
)
{
fprintf
(
stderr
,
"Allocating TestSuiteReference failed
\n
"
);
}
memset
(
reference
,
0
,
sizeof
(
TestSuiteReference
));
const
int
dirSize
=
SDL_strlen
(
directoryName
);
const
int
extSize
=
SDL_strlen
(
ext
);
...
...
@@ -427,6 +297,147 @@ UnloadTestSuites(TestSuiteReference *suites)
}
/*!
* Goes through the previously loaded test suites and
* loads test cases from them. Test cases are filtered
* during the process. Function will only return the
* test cases which aren't filtered out.
*
* \param suites previously loaded test suites
*
* \return Test cases that survived filtering process.
*/
TestCase
*
LoadTestCases
(
TestSuiteReference
*
suites
)
{
TestCase
*
testCases
=
NULL
;
TestSuiteReference
*
suiteReference
=
NULL
;
for
(
suiteReference
=
suites
;
suiteReference
;
suiteReference
=
suiteReference
->
next
)
{
TestCaseReference
**
tests
=
QueryTestCaseReferences
(
suiteReference
->
library
);
TestCaseReference
*
testReference
=
NULL
;
int
counter
=
0
;
for
(
testReference
=
tests
[
counter
];
testReference
;
testReference
=
tests
[
++
counter
])
{
void
*
suite
=
suiteReference
->
library
;
// Load test case functions
InitTestInvironmentFp
initTestEnvironment
=
LoadInitTestInvironmentFunction
(
suiteReference
->
library
);
QuitTestInvironmentFp
quitTestEnvironment
=
LoadQuitTestInvironmentFunction
(
suiteReference
->
library
);
TestCaseSetUpFp
testSetUp
=
LoadTestSetUpFunction
(
suiteReference
->
library
);
TestCaseTearDownFp
testTearDown
=
LoadTestTearDownFunction
(
suiteReference
->
library
);
TestCaseFp
testCase
=
LoadTestCaseFunction
(
suiteReference
->
library
,
testReference
->
name
);
CountFailedAssertsFp
countFailedAsserts
=
LoadCountFailedAssertsFunction
(
suiteReference
->
library
);
// Do the filtering
if
(
FilterTestCase
(
testReference
))
{
TestCase
*
item
=
SDL_malloc
(
sizeof
(
TestCase
));
memset
(
item
,
0
,
sizeof
(
TestCase
));
item
->
initTestEnvironment
=
initTestEnvironment
;
item
->
quitTestEnvironment
=
quitTestEnvironment
;
item
->
testSetUp
=
testSetUp
;
item
->
testTearDown
=
testTearDown
;
item
->
testCase
=
testCase
;
item
->
countFailedAsserts
=
countFailedAsserts
;
// copy suite name
int
length
=
SDL_strlen
(
suiteReference
->
name
)
+
1
;
item
->
suiteName
=
SDL_malloc
(
length
);
strncpy
(
item
->
suiteName
,
suiteReference
->
name
,
length
);
// copy test name
length
=
SDL_strlen
(
testReference
->
name
)
+
1
;
item
->
testName
=
SDL_malloc
(
length
);
strncpy
(
item
->
testName
,
testReference
->
name
,
length
);
// copy test description
length
=
SDL_strlen
(
testReference
->
description
)
+
1
;
item
->
description
=
SDL_malloc
(
length
);
strncpy
(
item
->
description
,
testReference
->
description
,
length
);
item
->
requirements
=
testReference
->
requirements
;
item
->
timeout
=
testReference
->
timeout
;
// prepend the list
item
->
next
=
testCases
;
testCases
=
item
;
//printf("Added test: %s\n", testReference->name);
}
}
}
return
testCases
;
}
/*!
* Unloads the given TestCases. Frees all the resources
* allocated for test cases.
*
* \param testCases Test cases to be deallocated
*/
void
UnloadTestCases
(
TestCase
*
testCases
)
{
TestCase
*
ref
=
testCases
;
while
(
ref
)
{
SDL_free
(
ref
->
testName
);
SDL_free
(
ref
->
suiteName
);
SDL_free
(
ref
->
description
);
TestCase
*
temp
=
ref
->
next
;
SDL_free
(
ref
);
ref
=
temp
;
}
testCases
=
NULL
;
}
/*!
* Filters a test case based on its properties in TestCaseReference and user
* preference.
*
* \return Non-zero means test will be added to execution list, zero means opposite
*/
int
FilterTestCase
(
TestCaseReference
*
testReference
)
{
int
retVal
=
1
;
if
(
testReference
->
enabled
==
TEST_DISABLED
)
{
retVal
=
0
;
}
if
(
only_selected_test
)
{
if
(
SDL_strncmp
(
testReference
->
name
,
selected_test_name
,
NAME_BUFFER_SIZE
)
==
0
)
{
retVal
=
1
;
}
else
{
retVal
=
0
;
}
}
if
(
only_tests_with_string
)
{
if
(
strstr
(
testReference
->
name
,
testcase_name_substring
)
!=
NULL
)
{
retVal
=
1
;
}
else
{
retVal
=
0
;
}
}
return
retVal
;
}
/*!
* Loads the test case references from the given test suite.
...
...
@@ -436,21 +447,21 @@ UnloadTestSuites(TestSuiteReference *suites)
TestCaseReference
**
QueryTestCaseReferences
(
void
*
library
)
{
TestCaseReference
**
(
*
suite
)(
void
);
TestCaseReference
**
(
*
suite
)(
void
);
suite
=
(
TestCaseReference
**
(
*
)(
void
))
SDL_LoadFunction
(
library
,
"QueryTestSuite"
);
if
(
suite
==
NULL
)
{
fprintf
(
stderr
,
"Loading QueryTestCaseReferences() failed.
\n
"
);
fprintf
(
stderr
,
"%s
\n
"
,
SDL_GetError
());
}
suite
=
(
TestCaseReference
**
(
*
)(
void
))
SDL_LoadFunction
(
library
,
"QueryTestSuite"
);
if
(
suite
==
NULL
)
{
fprintf
(
stderr
,
"Loading QueryTestCaseReferences() failed.
\n
"
);
fprintf
(
stderr
,
"%s
\n
"
,
SDL_GetError
());
}
TestCaseReference
**
tests
=
suite
();
if
(
tests
==
NULL
)
{
fprintf
(
stderr
,
"Failed to load test references.
\n
"
);
fprintf
(
stderr
,
"%s
\n
"
,
SDL_GetError
());
}
TestCaseReference
**
tests
=
suite
();
if
(
tests
==
NULL
)
{
fprintf
(
stderr
,
"Failed to load test references.
\n
"
);
fprintf
(
stderr
,
"%s
\n
"
,
SDL_GetError
());
}
return
tests
;
return
tests
;
}
...
...
@@ -554,6 +565,81 @@ LoadQuitTestInvironmentFunction(void *suite) {
return
testEnvQuit
;
}
/*!
* Loads function that returns failed assert count in the current
* test environment
*
* \param suite Used test suite
*
* \return Function pointer to _CountFailedAsserts function
*/
CountFailedAssertsFp
LoadCountFailedAssertsFunction
(
void
*
suite
)
{
CountFailedAssertsFp
countFailedAssert
=
(
CountFailedAssertsFp
)
SDL_LoadFunction
(
suite
,
"_CountFailedAsserts"
);
if
(
countFailedAssert
==
NULL
)
{
fprintf
(
stderr
,
"Loading _CountFailedAsserts function failed, countFailedAssert == NULL
\n
"
);
fprintf
(
stderr
,
"%s
\n
"
,
SDL_GetError
());
}
return
countFailedAssert
;
}
/*
* Execute the test
*
* \param testItem Test to be executed
*/
int
RunTest
(
TestCase
*
testItem
)
{
testItem
->
initTestEnvironment
();
if
(
testItem
->
testSetUp
)
{
testItem
->
testSetUp
(
0x0
);
}
int
cntFailedAsserts
=
testItem
->
countFailedAsserts
();
if
(
cntFailedAsserts
!=
0
)
{
return
3
;
}
testItem
->
testCase
(
0x0
);
if
(
testItem
->
testTearDown
)
{
testItem
->
testTearDown
(
0x0
);
}
return
testItem
->
quitTestEnvironment
();
}
/*!
* Executes a test case. Loads the test, executes it and
* returns the tests return value to the caller.
*
* \param testItem The test case that will be executed
* \return The return value of the test. Zero means success, non-zero failure.
*/
int
ExecuteTest
(
TestCase
*
testItem
)
{
int
retVal
=
1
;
if
(
execute_inproc
)
{
retVal
=
RunTest
(
testItem
);
}
else
{
int
childpid
=
fork
();
if
(
childpid
==
0
)
{
exit
(
RunTest
(
testItem
));
}
else
{
int
stat_lock
=
-
1
;
int
child
=
wait
(
&
stat_lock
);
retVal
=
HandleChildProcessReturnValue
(
stat_lock
);
}
}
return
retVal
;
}
/*!
* If using out-of-proc execution of tests. This function
...
...
@@ -584,56 +670,58 @@ HandleChildProcessReturnValue(int stat_lock)
/*!
* Executes a test case. Loads the test, executes it and
* returns the tests return value to the caller.
* Sets up the logger.
*
* \param testItem The test case that will be executed
* \return The return value of the test. Zero means success, non-zero failure.
* \return Some special data that will be passed to StartRun() logger call
*/
int
ExecuteTest
(
TestCase
*
testItem
)
{
int
retVal
=
1
;
if
(
execute_inproc
)
{
testItem
->
initTestEnvironment
();
void
*
SetUpLogger
()
{
void
*
loggerData
=
NULL
;
if
(
xml_enabled
)
{
RunStarted
=
XMLRunStarted
;
RunEnded
=
XMLRunEnded
;
if
(
testItem
->
testSetUp
)
{
testItem
->
testSetUp
(
0x0
);
}
SuiteStarted
=
XMLSuiteStarted
;
SuiteEnded
=
XMLSuiteEnded
;
testItem
->
testCase
(
0x0
);
TestStarted
=
XMLTestStarted
;
TestEnded
=
XMLTestEnded
;
if
(
testItem
->
testTearDown
)
{
testItem
->
testTearDown
(
0x0
);
Assert
=
XMLAssert
;
AssertWithValues
=
XMLAssertWithValues
;
AssertSummary
=
XMLAssertSummary
;
Log
=
XMLLog
;
char
*
sheet
=
NULL
;
if
(
xsl_enabled
)
{
sheet
=
"style.xsl"
;
// default style sheet;
}
retVal
=
testItem
->
quitTestEnvironment
();
}
else
{
int
childpid
=
fork
();
if
(
childpid
==
0
)
{
testItem
->
initTestEnvironment
();
if
(
custom_xsl_enabled
)
{
sheet
=
xsl_stylesheet_name
;
}
if
(
testItem
->
testSetUp
)
{
testItem
->
testSetUp
(
0x0
);
}
loggerData
=
sheet
;
}
else
{
RunStarted
=
PlainRunStarted
;
RunEnded
=
PlainRunEnded
;
testItem
->
testCase
(
0x0
);
SuiteStarted
=
PlainSuiteStarted
;
SuiteEnded
=
PlainSuiteEnded
;
// note: if test case is is aborted by some signal
// then TearDown function won't be called
if
(
testItem
->
testTearDown
)
{
testItem
->
testTearDown
(
0x0
);
}
TestStarted
=
PlainTestStarted
;
TestEnded
=
PlainTestEnded
;
exit
(
testItem
->
quitTestEnvironment
());
}
else
{
int
stat_lock
=
-
1
;
int
child
=
wait
(
&
stat_lock
);
Assert
=
PlainAssert
;
AssertWithValues
=
PlainAssertWithValues
;
AssertSummary
=
PlainAssertSummary
;
retVal
=
HandleChildProcessReturnValue
(
stat_lock
);
}
Log
=
PlainLog
;
}
return
retVal
;
return
loggerData
;
}
...
...
@@ -771,7 +859,7 @@ main(int argc, char *argv[])
// print: Testing against SDL version fuu (rev: bar) if verbose == true
int
totalTest
failureCount
=
0
,
totalTestPass
Count
=
0
;
int
totalTest
FailureCount
=
0
,
totalTestPassCount
=
0
,
totalTestSkip
Count
=
0
;
int
testFailureCount
=
0
,
testPassCount
=
0
,
testSkipCount
=
0
;
char
*
testSuiteName
=
NULL
;
int
suiteCounter
=
0
;
...
...
@@ -782,49 +870,7 @@ main(int argc, char *argv[])
char
*
extension
=
"dylib"
;
#endif
void
*
loggerData
=
NULL
;
if
(
xml_enabled
)
{
RunStarted
=
XMLRunStarted
;
RunEnded
=
XMLRunEnded
;
SuiteStarted
=
XMLSuiteStarted
;
SuiteEnded
=
XMLSuiteEnded
;
TestStarted
=
XMLTestStarted
;
TestEnded
=
XMLTestEnded
;
Assert
=
XMLAssert
;
AssertWithValues
=
XMLAssertWithValues
;
AssertSummary
=
XMLAssertSummary
;
Log
=
XMLLog
;
char
*
sheet
=
NULL
;
if
(
xsl_enabled
)
{
sheet
=
"style.xsl"
;
// default style sheet;
}
if
(
custom_xsl_enabled
)
{
sheet
=
xsl_stylesheet_name
;
}
loggerData
=
sheet
;
}
else
{
RunStarted
=
PlainRunStarted
;
RunEnded
=
PlainRunEnded
;
SuiteStarted
=
PlainSuiteStarted
;
SuiteEnded
=
PlainSuiteEnded
;
TestStarted
=
PlainTestStarted
;
TestEnded
=
PlainTestEnded
;
Assert
=
PlainAssert
;
AssertWithValues
=
PlainAssertWithValues
;
AssertSummary
=
PlainAssertSummary
;
Log
=
PlainLog
;
}
void
*
loggerData
=
SetUpLogger
();
const
Uint32
startTicks
=
SDL_GetTicks
();
...
...
@@ -845,9 +891,7 @@ main(int argc, char *argv[])
RunStarted
(
argc
,
argv
,
time
(
0
),
loggerData
);
char
*
currentSuiteName
=
NULL
;
int
suiteStartTime
=
SDL_GetTicks
();
TestCase
*
testItem
=
NULL
;
...
...
@@ -856,7 +900,7 @@ main(int argc, char *argv[])
currentSuiteName
=
testItem
->
suiteName
;
SuiteStarted
(
currentSuiteName
,
time
(
0
));
testFailureCount
=
testPassCount
=
0
;
testFailureCount
=
testPassCount
=
testSkipCount
=
0
;
suiteCounter
++
;
}
...
...
@@ -871,7 +915,7 @@ main(int argc, char *argv[])
currentSuiteName
=
testItem
->
suiteName
;
SuiteStarted
(
currentSuiteName
,
time
(
0
));
testFailureCount
=
testPassCount
=
0
;
testFailureCount
=
testPassCount
=
testSkipCount
=
0
;
suiteCounter
++
;
}
...
...
@@ -882,8 +926,12 @@ main(int argc, char *argv[])
const
Uint32
testTimeStart
=
SDL_GetTicks
();
int
retVal
=
ExecuteTest
(
testItem
);
if
(
retVal
)
{
totalTestfailureCount
++
;
if
(
retVal
==
3
)
{
testSkipCount
++
;
totalTestSkipCount
++
;
}
else
if
(
retVal
)
{
totalTestFailureCount
++
;
testFailureCount
++
;
}
else
{
totalTestPassCount
++
;
...
...
@@ -906,8 +954,8 @@ main(int argc, char *argv[])
const
Uint32
endTicks
=
SDL_GetTicks
();
const
double
totalRunTime
=
(
endTicks
-
startTicks
)
/
1000
.
0
f
;
RunEnded
(
totalTestPassCount
+
totalTest
f
ailureCount
,
suiteCounter
,
totalTestPassCount
,
totalTest
failure
Count
,
time
(
0
),
totalRunTime
);
RunEnded
(
totalTestPassCount
+
totalTest
F
ailureCount
,
suiteCounter
,
totalTestPassCount
,
totalTest
FailureCount
,
totalTestSkip
Count
,
time
(
0
),
totalRunTime
);
return
(
totalTest
f
ailureCount
?
1
:
0
);
return
(
totalTest
F
ailureCount
?
1
:
0
);
}
test/test-automation/style.xsl
View file @
6d3454e1
...
...
@@ -104,6 +104,7 @@ $(document).ready(function() {
/* Color the tests based on the result */
$("span.testResult[result='passed']").addClass('passed');
$("span.testResult[result='failed']").addClass('failed');
$("span.testResult[result='skipped']").addClass('skipped');
/* Color the asserts based on the result */
$("span.assertResult[result='pass']").addClass('passed');
...
...
@@ -157,6 +158,10 @@ div, h1 {
color: red;
}
.skipped {
color: gray;
}
</style>
</head>
...
...
test/test-automation/testdummy/testdummy.c
View file @
6d3454e1
...
...
@@ -56,6 +56,9 @@ TestCaseReference **QueryTestSuite() {
* SetUp function can be used to create a test fixture for test cases.
* The function will be called right before executing the test case.
*
* Note: If any assert in the function fails then the test will be skipped.
* In practice, the entire suite will be skipped if assert failure happens.
*
* Note: this function is optional.
*
* \param arg parameters given to test. Usually NULL
...
...
test/test-automation/testsurface/testsurface.c
View file @
6d3454e1
...
...
@@ -32,11 +32,39 @@ TestCaseReference **QueryTestSuite() {
return
(
TestCaseReference
**
)
testSuite
;
}
/* Function prototypes */
SDL_Surface
*
_CreateTestSurface
();
/* Create test fixture */
static
SDL_Surface
*
testsur
=
NULL
;
void
SetUp
(
void
*
arg
)
{
int
ret
=
SDL_Init
(
SDL_INIT_VIDEO
);
AssertTrue
(
ret
==
0
,
"SDL_Init(SDL_INIT_VIDEO)"
);
testsur
=
_CreateTestSurface
();
AssertTrue
(
testsur
!=
NULL
,
"SDL_Init(SDL_INIT_VIDEO)"
);
}
void
TearDown
(
void
*
arg
)
{
SDL_FreeSurface
(
testsur
);
SDL_Quit
();
}
/* Helper functions for the test cases */
#define TEST_SURFACE_WIDTH 80
#define TEST_SURFACE_HEIGHT 60
/*!
* Creates test surface
*/
...
...
@@ -66,7 +94,7 @@ _CreateTestSurface()
/**
* @brief Tests a blend mode.
*/
int
_testBlitBlendMode
(
SDL_Surface
*
testsur
,
SDL_Surface
*
face
,
int
mode
)
void
_testBlitBlendMode
(
SDL_Surface
*
testsur
,
SDL_Surface
*
face
,
int
mode
)
{
int
ret
;
int
i
,
j
,
ni
,
nj
;
...
...
@@ -102,8 +130,6 @@ int _testBlitBlendMode(SDL_Surface *testsur, SDL_Surface *face, int mode)
ret
=
SDL_BlitSurface
(
face
,
NULL
,
testsur
,
&
rect
);
AssertTrue
(
ret
!=
0
,
"SDL_BlitSurface"
);
}
}
return
0
;
}
/* Test case functions */
...
...
@@ -115,13 +141,8 @@ void surface_testLoad(void *arg)
int
ret
;
SDL_Surface
*
face
,
*
rface
;
ret
=
SDL_Init
(
SDL_INIT_VIDEO
);
AssertTrue
(
ret
==
0
,
"SDL_Init(SDL_INIT_VIDEO)"
);
SDL_Surface
*
testsur
=
_CreateTestSurface
();
/* Clear surface. */
ret
=
SDL_FillRect
(
testsur
,
NULL
,
/* Clear surface. */
ret
=
SDL_FillRect
(
testsur
,
NULL
,
SDL_MapRGB
(
testsur
->
format
,
0
,
0
,
0
)
);
AssertTrue
(
ret
==
0
,
"SDL_FillRect"
);
...
...
@@ -151,10 +172,6 @@ void surface_testLoad(void *arg)
/* Clean up. */
SDL_FreeSurface
(
rface
);
SDL_FreeSurface
(
face
);
SDL_FreeSurface
(
testsur
);
SDL_Quit
();
}
...
...
@@ -163,14 +180,8 @@ void surface_testLoad(void *arg)
*/
void
surface_testLoadFailure
(
void
*
arg
)
{
int
ret
=
SDL_Init
(
SDL_INIT_VIDEO
);
AssertTrue
(
ret
==
0
,
"SDL_Init(SDL_INIT_VIDEO)"
);
SDL_Surface
*
face
=
SDL_LoadBMP
(
"nonexistant.bmp"
);
AssertTrue
(
face
==
NULL
,
"SDL_CreateLoadBmp"
);
SDL_Quit
();
}
...
...
@@ -184,11 +195,6 @@ void surface_testBlit(void *arg)
SDL_Surface
*
face
;
int
i
,
j
,
ni
,
nj
;
ret
=
SDL_Init
(
SDL_INIT_VIDEO
);
AssertTrue
(
ret
==
0
,
"SDL_Init(SDL_INIT_VIDEO)"
);
SDL_Surface
*
testsur
=
_CreateTestSurface
();
/* Clear surface. */
ret
=
SDL_FillRect
(
testsur
,
NULL
,
SDL_MapRGB
(
testsur
->
format
,
0
,
0
,
0
)
);
...
...
@@ -292,9 +298,6 @@ void surface_testBlit(void *arg)
/* Clean up. */
SDL_FreeSurface
(
face
);
SDL_FreeSurface
(
testsur
);
SDL_Quit
();
}
/**
...
...
@@ -308,11 +311,6 @@ void surface_testBlitBlend(void *arg)
int
i
,
j
,
ni
,
nj
;
int
mode
;
ret
=
SDL_Init
(
SDL_INIT_VIDEO
);
AssertTrue
(
ret
==
0
,
"SDL_Init(SDL_INIT_VIDEO)"
);
SDL_Surface
*
testsur
=
_CreateTestSurface
();
/* Clear surface. */
ret
=
SDL_FillRect
(
testsur
,
NULL
,
SDL_MapRGB
(
testsur
->
format
,
0
,
0
,
0
)
);
...
...
@@ -415,7 +413,4 @@ void surface_testBlitBlend(void *arg)
/* Clean up. */
SDL_FreeSurface
(
face
);
SDL_FreeSurface
(
testsur
);
SDL_Quit
();
}
test/test-automation/xml_logger.c
View file @
6d3454e1
...
...
@@ -38,6 +38,7 @@ const char *numSuitesElementName = "numSuites";
const
char
*
numTestElementName
=
"numTests"
;
const
char
*
numPassedTestsElementName
=
"numPassedTests"
;
const
char
*
numFailedTestsElementName
=
"numFailedTests"
;
const
char
*
numSkippedTestsElementName
=
"numSkippedTests"
;
const
char
*
endTimeElementName
=
"endTime"
;
const
char
*
totalRuntimeElementName
=
"totalRuntime"
;
const
char
*
suiteElementName
=
"suite"
;
...
...
@@ -145,7 +146,7 @@ XMLRunStarted(int parameterCount, char *runnerParameters[], time_t eventTime,
void
XMLRunEnded
(
int
testCount
,
int
suiteCount
,
int
testPassCount
,
int
testFailCount
,
time_t
endTime
,
double
totalRuntime
)
int
testSkippedCount
,
time_t
endTime
,
double
totalRuntime
)
{
// log suite count
char
*
output
=
XMLOpenElement
(
numSuitesElementName
);
...
...
@@ -187,7 +188,17 @@ XMLRunEnded(int testCount, int suiteCount, int testPassCount, int testFailCount,
output
=
XMLCloseElement
(
numFailedTestsElementName
);
XMLOutputter
(
--
indentLevel
,
YES
,
output
);
// log end timte
// log skipped test count
output
=
XMLOpenElement
(
numSkippedTestsElementName
);
XMLOutputter
(
indentLevel
++
,
NO
,
output
);
output
=
XMLAddContent
(
IntToString
(
testSkippedCount
));
XMLOutputter
(
indentLevel
,
NO
,
output
);
output
=
XMLCloseElement
(
numSkippedTestsElementName
);
XMLOutputter
(
--
indentLevel
,
YES
,
output
);
// log end tite
output
=
XMLOpenElement
(
endTimeElementName
);
XMLOutputter
(
indentLevel
++
,
NO
,
output
);
...
...
@@ -342,6 +353,9 @@ XMLTestEnded(const char *testName, const char *suiteName,
if
(
testResult
)
{
if
(
testResult
==
2
)
{
output
=
XMLAddContent
(
"failed. No assert"
);
}
else
if
(
testResult
==
3
)
{
output
=
XMLAddContent
(
"skipped"
);
}
else
{
output
=
XMLAddContent
(
"failed"
);
}
...
...
test/test-automation/xml_logger.h
View file @
6d3454e1
...
...
@@ -24,7 +24,7 @@ void XMLRunStarted(int parameterCount, char *runnerParameters[], time_t eventTim
* \param totalRuntime How long the execution took
*/
void
XMLRunEnded
(
int
testCount
,
int
suiteCount
,
int
testPassCount
,
int
testFailCount
,
time_t
endTime
,
double
totalRuntime
);
int
testSkippedCount
,
time_t
endTime
,
double
totalRuntime
);
/*!
* Prints the data about the test suite that'll be executed next in XML
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment