| 1 | /* |
| 2 | Simple DirectMedia Layer |
| 3 | Copyright (C) 1997-2025 Sam Lantinga <slouken@libsdl.org> |
| 4 | |
| 5 | This software is provided 'as-is', without any express or implied |
| 6 | warranty. In no event will the authors be held liable for any damages |
| 7 | arising from the use of this software. |
| 8 | |
| 9 | Permission is granted to anyone to use this software for any purpose, |
| 10 | including commercial applications, and to alter it and redistribute it |
| 11 | freely, subject to the following restrictions: |
| 12 | |
| 13 | 1. The origin of this software must not be misrepresented; you must not |
| 14 | claim that you wrote the original software. If you use this software |
| 15 | in a product, an acknowledgment in the product documentation would be |
| 16 | appreciated but is not required. |
| 17 | 2. Altered source versions must be plainly marked as such, and must not be |
| 18 | misrepresented as being the original software. |
| 19 | 3. This notice may not be removed or altered from any source distribution. |
| 20 | */ |
| 21 | #include <SDL3/SDL_test.h> |
| 22 | |
| 23 | #include <stdlib.h> /* Needed for exit() */ |
| 24 | |
| 25 | /* Enable to have color in logs */ |
| 26 | #if 1 |
| 27 | #define COLOR_RED "\033[0;31m" |
| 28 | #define COLOR_GREEN "\033[0;32m" |
| 29 | #define COLOR_YELLOW "\033[0;93m" |
| 30 | #define COLOR_BLUE "\033[0;94m" |
| 31 | #define COLOR_END "\033[0m" |
| 32 | #else |
| 33 | #define COLOR_RED "" |
| 34 | #define COLOR_GREEN "" |
| 35 | #define COLOR_BLUE "" |
| 36 | #define COLOR_YELLOW "" |
| 37 | #define COLOR_END "" |
| 38 | #endif |
| 39 | |
| 40 | /* Invalid test name/description message format */ |
| 41 | #define SDLTEST_INVALID_NAME_FORMAT "(Invalid)" |
| 42 | |
| 43 | /* Log summary message format */ |
| 44 | #define SDLTEST_LOG_SUMMARY_FORMAT "%s Summary: Total=%d " COLOR_GREEN "Passed=%d" COLOR_END " " COLOR_RED "Failed=%d" COLOR_END " " COLOR_BLUE "Skipped=%d" COLOR_END |
| 45 | #define SDLTEST_LOG_SUMMARY_FORMAT_OK "%s Summary: Total=%d " COLOR_GREEN "Passed=%d" COLOR_END " " COLOR_GREEN "Failed=%d" COLOR_END " " COLOR_BLUE "Skipped=%d" COLOR_END |
| 46 | |
| 47 | /* Final result message format */ |
| 48 | #define SDLTEST_FINAL_RESULT_FORMAT COLOR_YELLOW ">>> %s '%s':" COLOR_END " %s\n" |
| 49 | |
| 50 | struct SDLTest_TestSuiteRunner { |
| 51 | struct |
| 52 | { |
| 53 | SDLTest_TestSuiteReference **testSuites; |
| 54 | char *runSeed; |
| 55 | Uint64 execKey; |
| 56 | char *filter; |
| 57 | int testIterations; |
| 58 | bool randomOrder; |
| 59 | } user; |
| 60 | |
| 61 | SDLTest_ArgumentParser argparser; |
| 62 | }; |
| 63 | |
| 64 | /* ! Timeout for single test case execution */ |
| 65 | static Uint32 SDLTest_TestCaseTimeout = 3600; |
| 66 | |
| 67 | static const char *common_harness_usage[] = { |
| 68 | "[--iterations #]" , |
| 69 | "[--execKey #]" , |
| 70 | "[--seed string]" , |
| 71 | "[--filter suite_name|test_name]" , |
| 72 | "[--random-order]" , |
| 73 | NULL |
| 74 | }; |
| 75 | |
| 76 | char *SDLTest_GenerateRunSeed(char *buffer, int length) |
| 77 | { |
| 78 | Uint64 randomContext = SDL_GetPerformanceCounter(); |
| 79 | int counter; |
| 80 | |
| 81 | if (!buffer) { |
| 82 | SDLTest_LogError("Input buffer must not be NULL." ); |
| 83 | return NULL; |
| 84 | } |
| 85 | |
| 86 | /* Sanity check input */ |
| 87 | if (length <= 0) { |
| 88 | SDLTest_LogError("The length of the harness seed must be >0." ); |
| 89 | return NULL; |
| 90 | } |
| 91 | |
| 92 | /* Generate a random string of alphanumeric characters */ |
| 93 | for (counter = 0; counter < length; counter++) { |
| 94 | char ch; |
| 95 | int v = SDL_rand_r(&randomContext, 10 + 26); |
| 96 | if (v < 10) { |
| 97 | ch = (char)('0' + v); |
| 98 | } else { |
| 99 | ch = (char)('A' + v - 10); |
| 100 | } |
| 101 | buffer[counter] = ch; |
| 102 | } |
| 103 | buffer[length] = '\0'; |
| 104 | |
| 105 | return buffer; |
| 106 | } |
| 107 | |
| 108 | /** |
| 109 | * Generates an execution key for the fuzzer. |
| 110 | * |
| 111 | * \param runSeed The run seed to use |
| 112 | * \param suiteName The name of the test suite |
| 113 | * \param testName The name of the test |
| 114 | * \param iteration The iteration count |
| 115 | * |
| 116 | * \returns The generated execution key to initialize the fuzzer with. |
| 117 | * |
| 118 | */ |
| 119 | static Uint64 SDLTest_GenerateExecKey(const char *runSeed, const char *suiteName, const char *testName, int iteration) |
| 120 | { |
| 121 | SDLTest_Md5Context md5Context; |
| 122 | Uint64 *keys; |
| 123 | char iterationString[16]; |
| 124 | size_t runSeedLength; |
| 125 | size_t suiteNameLength; |
| 126 | size_t testNameLength; |
| 127 | size_t iterationStringLength; |
| 128 | size_t entireStringLength; |
| 129 | char *buffer; |
| 130 | |
| 131 | if (!runSeed || runSeed[0] == '\0') { |
| 132 | SDLTest_LogError("Invalid runSeed string." ); |
| 133 | return 0; |
| 134 | } |
| 135 | |
| 136 | if (!suiteName || suiteName[0] == '\0') { |
| 137 | SDLTest_LogError("Invalid suiteName string." ); |
| 138 | return 0; |
| 139 | } |
| 140 | |
| 141 | if (!testName || testName[0] == '\0') { |
| 142 | SDLTest_LogError("Invalid testName string." ); |
| 143 | return 0; |
| 144 | } |
| 145 | |
| 146 | if (iteration <= 0) { |
| 147 | SDLTest_LogError("Invalid iteration count." ); |
| 148 | return 0; |
| 149 | } |
| 150 | |
| 151 | /* Convert iteration number into a string */ |
| 152 | SDL_memset(iterationString, 0, sizeof(iterationString)); |
| 153 | (void)SDL_snprintf(iterationString, sizeof(iterationString) - 1, "%d" , iteration); |
| 154 | |
| 155 | /* Combine the parameters into single string */ |
| 156 | runSeedLength = SDL_strlen(runSeed); |
| 157 | suiteNameLength = SDL_strlen(suiteName); |
| 158 | testNameLength = SDL_strlen(testName); |
| 159 | iterationStringLength = SDL_strlen(iterationString); |
| 160 | entireStringLength = runSeedLength + suiteNameLength + testNameLength + iterationStringLength + 1; |
| 161 | buffer = (char *)SDL_malloc(entireStringLength); |
| 162 | if (!buffer) { |
| 163 | SDLTest_LogError("Failed to allocate buffer for execKey generation." ); |
| 164 | return 0; |
| 165 | } |
| 166 | (void)SDL_snprintf(buffer, entireStringLength, "%s%s%s%d" , runSeed, suiteName, testName, iteration); |
| 167 | |
| 168 | /* Hash string and use half of the digest as 64bit exec key */ |
| 169 | SDLTest_Md5Init(&md5Context); |
| 170 | SDLTest_Md5Update(&md5Context, (unsigned char *)buffer, (unsigned int)entireStringLength); |
| 171 | SDLTest_Md5Final(&md5Context); |
| 172 | SDL_free(buffer); |
| 173 | keys = (Uint64 *)md5Context.digest; |
| 174 | |
| 175 | return keys[0]; |
| 176 | } |
| 177 | |
| 178 | /** |
| 179 | * Set timeout handler for test. |
| 180 | * |
| 181 | * \param timeout Timeout interval in seconds. |
| 182 | * \param callback Function that will be called after timeout has elapsed. |
| 183 | * |
| 184 | * \return Timer id or -1 on failure. |
| 185 | */ |
| 186 | static SDL_TimerID SDLTest_SetTestTimeout(int timeout, SDL_TimerCallback callback) |
| 187 | { |
| 188 | Uint32 timeoutInMilliseconds; |
| 189 | SDL_TimerID timerID; |
| 190 | |
| 191 | if (!callback) { |
| 192 | SDLTest_LogError("Timeout callback can't be NULL" ); |
| 193 | return 0; |
| 194 | } |
| 195 | |
| 196 | if (timeout < 0) { |
| 197 | SDLTest_LogError("Timeout value must be bigger than zero." ); |
| 198 | return 0; |
| 199 | } |
| 200 | |
| 201 | /* Set timer */ |
| 202 | timeoutInMilliseconds = timeout * 1000; |
| 203 | timerID = SDL_AddTimer(timeoutInMilliseconds, callback, 0x0); |
| 204 | if (timerID == 0) { |
| 205 | SDLTest_LogError("Creation of SDL timer failed: %s" , SDL_GetError()); |
| 206 | return 0; |
| 207 | } |
| 208 | |
| 209 | return timerID; |
| 210 | } |
| 211 | |
| 212 | /** |
| 213 | * Timeout handler. Aborts test run and exits harness process. |
| 214 | */ |
| 215 | static Uint32 SDLCALL SDLTest_BailOut(void *userdata, SDL_TimerID timerID, Uint32 interval) |
| 216 | { |
| 217 | SDLTest_LogError("TestCaseTimeout timer expired. Aborting test run." ); |
| 218 | exit(TEST_ABORTED); /* bail out from the test */ |
| 219 | return 0; |
| 220 | } |
| 221 | |
| 222 | /** |
| 223 | * Execute a test using the given execution key. |
| 224 | * |
| 225 | * \param testSuite Suite containing the test case. |
| 226 | * \param testCase Case to execute. |
| 227 | * \param execKey Execution key for the fuzzer. |
| 228 | * \param forceTestRun Force test to run even if test was disabled in suite. |
| 229 | * |
| 230 | * \returns Test case result. |
| 231 | */ |
| 232 | static int SDLTest_RunTest(SDLTest_TestSuiteReference *testSuite, const SDLTest_TestCaseReference *testCase, Uint64 execKey, bool forceTestRun) |
| 233 | { |
| 234 | SDL_TimerID timer = 0; |
| 235 | int testCaseResult = 0; |
| 236 | int testResult = 0; |
| 237 | int fuzzerCount; |
| 238 | void *data = NULL; |
| 239 | |
| 240 | if (!testSuite || !testCase || !testSuite->name || !testCase->name) { |
| 241 | SDLTest_LogError("Setup failure: testSuite or testCase references NULL" ); |
| 242 | return TEST_RESULT_SETUP_FAILURE; |
| 243 | } |
| 244 | |
| 245 | if (!testCase->enabled && forceTestRun == false) { |
| 246 | SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Test" , testCase->name, "Skipped (Disabled)" ); |
| 247 | return TEST_RESULT_SKIPPED; |
| 248 | } |
| 249 | |
| 250 | /* Initialize fuzzer */ |
| 251 | SDLTest_FuzzerInit(execKey); |
| 252 | |
| 253 | /* Reset assert tracker */ |
| 254 | SDLTest_ResetAssertSummary(); |
| 255 | |
| 256 | /* Set timeout timer */ |
| 257 | timer = SDLTest_SetTestTimeout(SDLTest_TestCaseTimeout, SDLTest_BailOut); |
| 258 | |
| 259 | /* Maybe run suite initializer function */ |
| 260 | if (testSuite->testSetUp) { |
| 261 | testSuite->testSetUp(&data); |
| 262 | if (SDLTest_AssertSummaryToTestResult() == TEST_RESULT_FAILED) { |
| 263 | SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Suite Setup" , testSuite->name, COLOR_RED "Failed" COLOR_END); |
| 264 | return TEST_RESULT_SETUP_FAILURE; |
| 265 | } |
| 266 | } |
| 267 | |
| 268 | /* Run test case function */ |
| 269 | testCaseResult = testCase->testCase(data); |
| 270 | |
| 271 | /* Convert test execution result into harness result */ |
| 272 | if (testCaseResult == TEST_SKIPPED) { |
| 273 | /* Test was programmatically skipped */ |
| 274 | testResult = TEST_RESULT_SKIPPED; |
| 275 | } else if (testCaseResult == TEST_STARTED) { |
| 276 | /* Test did not return a TEST_COMPLETED value; assume it failed */ |
| 277 | testResult = TEST_RESULT_FAILED; |
| 278 | } else if (testCaseResult == TEST_ABORTED) { |
| 279 | /* Test was aborted early; assume it failed */ |
| 280 | testResult = TEST_RESULT_FAILED; |
| 281 | } else { |
| 282 | /* Perform failure analysis based on asserts */ |
| 283 | testResult = SDLTest_AssertSummaryToTestResult(); |
| 284 | } |
| 285 | |
| 286 | /* Maybe run suite cleanup function (ignore failed asserts) */ |
| 287 | if (testSuite->testTearDown) { |
| 288 | testSuite->testTearDown(data); |
| 289 | } |
| 290 | |
| 291 | /* Cancel timeout timer */ |
| 292 | if (timer) { |
| 293 | SDL_RemoveTimer(timer); |
| 294 | } |
| 295 | |
| 296 | /* Report on asserts and fuzzer usage */ |
| 297 | fuzzerCount = SDLTest_GetFuzzerInvocationCount(); |
| 298 | if (fuzzerCount > 0) { |
| 299 | SDLTest_Log("Fuzzer invocations: %d" , fuzzerCount); |
| 300 | } |
| 301 | |
| 302 | /* Final log based on test execution result */ |
| 303 | if (testCaseResult == TEST_SKIPPED) { |
| 304 | /* Test was programmatically skipped */ |
| 305 | SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Test" , testCase->name, COLOR_BLUE "Skipped (Programmatically)" COLOR_END); |
| 306 | } else if (testCaseResult == TEST_STARTED) { |
| 307 | /* Test did not return a TEST_COMPLETED value; assume it failed */ |
| 308 | SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Test" , testCase->name, COLOR_RED "Failed (test started, but did not return TEST_COMPLETED)" COLOR_END); |
| 309 | } else if (testCaseResult == TEST_ABORTED) { |
| 310 | /* Test was aborted early; assume it failed */ |
| 311 | SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Test" , testCase->name, COLOR_RED "Failed (Aborted)" COLOR_END); |
| 312 | } else { |
| 313 | SDLTest_LogAssertSummary(); |
| 314 | } |
| 315 | |
| 316 | return testResult; |
| 317 | } |
| 318 | |
| 319 | /* Prints summary of all suites/tests contained in the given reference */ |
| 320 | #if 0 |
| 321 | static void SDLTest_LogTestSuiteSummary(SDLTest_TestSuiteReference *testSuites) |
| 322 | { |
| 323 | int suiteCounter; |
| 324 | int testCounter; |
| 325 | SDLTest_TestSuiteReference *testSuite; |
| 326 | SDLTest_TestCaseReference *testCase; |
| 327 | |
| 328 | /* Loop over all suites */ |
| 329 | suiteCounter = 0; |
| 330 | while (&testSuites[suiteCounter]) { |
| 331 | testSuite=&testSuites[suiteCounter]; |
| 332 | suiteCounter++; |
| 333 | SDLTest_Log("Test Suite %i - %s\n" , suiteCounter, |
| 334 | (testSuite->name) ? testSuite->name : SDLTEST_INVALID_NAME_FORMAT); |
| 335 | |
| 336 | /* Loop over all test cases */ |
| 337 | testCounter = 0; |
| 338 | while (testSuite->testCases[testCounter]) { |
| 339 | testCase=(SDLTest_TestCaseReference *)testSuite->testCases[testCounter]; |
| 340 | testCounter++; |
| 341 | SDLTest_Log(" Test Case %i - %s: %s" , testCounter, |
| 342 | (testCase->name) ? testCase->name : SDLTEST_INVALID_NAME_FORMAT, |
| 343 | (testCase->description) ? testCase->description : SDLTEST_INVALID_NAME_FORMAT); |
| 344 | } |
| 345 | } |
| 346 | } |
| 347 | #endif |
| 348 | |
| 349 | /* Gets a timer value in seconds */ |
| 350 | static float GetClock(void) |
| 351 | { |
| 352 | float currentClock = SDL_GetPerformanceCounter() / (float)SDL_GetPerformanceFrequency(); |
| 353 | return currentClock; |
| 354 | } |
| 355 | |
| 356 | /** |
| 357 | * Execute a test suite using the given run seed and execution key. |
| 358 | * |
| 359 | * The filter string is matched to the suite name (full comparison) to select a single suite, |
| 360 | * or if no suite matches, it is matched to the test names (full comparison) to select a single test. |
| 361 | * |
| 362 | * \param runner The runner to execute. |
| 363 | * |
| 364 | * \returns Test run result; 0 when all tests passed, 1 if any tests failed. |
| 365 | */ |
| 366 | int SDLTest_ExecuteTestSuiteRunner(SDLTest_TestSuiteRunner *runner) |
| 367 | { |
| 368 | int totalNumberOfTests = 0; |
| 369 | int failedNumberOfTests = 0; |
| 370 | int suiteCounter; |
| 371 | int testCounter; |
| 372 | int iterationCounter; |
| 373 | SDLTest_TestSuiteReference *testSuite; |
| 374 | const SDLTest_TestCaseReference *testCase; |
| 375 | const char *runSeed = NULL; |
| 376 | const char *currentSuiteName; |
| 377 | const char *currentTestName; |
| 378 | Uint64 execKey; |
| 379 | float runStartSeconds; |
| 380 | float suiteStartSeconds; |
| 381 | float testStartSeconds; |
| 382 | float runEndSeconds; |
| 383 | float suiteEndSeconds; |
| 384 | float testEndSeconds; |
| 385 | float runtime; |
| 386 | int suiteFilter = 0; |
| 387 | const char *suiteFilterName = NULL; |
| 388 | int testFilter = 0; |
| 389 | const char *testFilterName = NULL; |
| 390 | bool forceTestRun = false; |
| 391 | int testResult = 0; |
| 392 | int runResult = 0; |
| 393 | int totalTestFailedCount = 0; |
| 394 | int totalTestPassedCount = 0; |
| 395 | int totalTestSkippedCount = 0; |
| 396 | int testFailedCount = 0; |
| 397 | int testPassedCount = 0; |
| 398 | int testSkippedCount = 0; |
| 399 | int countSum = 0; |
| 400 | const SDLTest_TestCaseReference **failedTests; |
| 401 | char generatedSeed[16 + 1]; |
| 402 | int nbSuites = 0; |
| 403 | int i = 0; |
| 404 | int *arraySuites = NULL; |
| 405 | |
| 406 | /* Sanitize test iterations */ |
| 407 | if (runner->user.testIterations < 1) { |
| 408 | runner->user.testIterations = 1; |
| 409 | } |
| 410 | |
| 411 | /* Generate run see if we don't have one already */ |
| 412 | if (!runner->user.runSeed || runner->user.runSeed[0] == '\0') { |
| 413 | runSeed = SDLTest_GenerateRunSeed(generatedSeed, 16); |
| 414 | if (!runSeed) { |
| 415 | SDLTest_LogError("Generating a random seed failed" ); |
| 416 | return 2; |
| 417 | } |
| 418 | } else { |
| 419 | runSeed = runner->user.runSeed; |
| 420 | } |
| 421 | |
| 422 | /* Reset per-run counters */ |
| 423 | totalTestFailedCount = 0; |
| 424 | totalTestPassedCount = 0; |
| 425 | totalTestSkippedCount = 0; |
| 426 | |
| 427 | /* Take time - run start */ |
| 428 | runStartSeconds = GetClock(); |
| 429 | |
| 430 | /* Log run with fuzzer parameters */ |
| 431 | SDLTest_Log("::::: Test Run /w seed '%s' started\n" , runSeed); |
| 432 | |
| 433 | /* Count the total number of tests */ |
| 434 | suiteCounter = 0; |
| 435 | while (runner->user.testSuites[suiteCounter]) { |
| 436 | testSuite = runner->user.testSuites[suiteCounter]; |
| 437 | suiteCounter++; |
| 438 | testCounter = 0; |
| 439 | while (testSuite->testCases[testCounter]) { |
| 440 | testCounter++; |
| 441 | totalNumberOfTests++; |
| 442 | } |
| 443 | } |
| 444 | |
| 445 | if (totalNumberOfTests == 0) { |
| 446 | SDLTest_LogError("No tests to run?" ); |
| 447 | return -1; |
| 448 | } |
| 449 | |
| 450 | /* Pre-allocate an array for tracking failed tests (potentially all test cases) */ |
| 451 | failedTests = (const SDLTest_TestCaseReference **)SDL_malloc(totalNumberOfTests * sizeof(SDLTest_TestCaseReference *)); |
| 452 | if (!failedTests) { |
| 453 | SDLTest_LogError("Unable to allocate cache for failed tests" ); |
| 454 | return -1; |
| 455 | } |
| 456 | |
| 457 | /* Initialize filtering */ |
| 458 | if (runner->user.filter && runner->user.filter[0] != '\0') { |
| 459 | /* Loop over all suites to check if we have a filter match */ |
| 460 | suiteCounter = 0; |
| 461 | while (runner->user.testSuites[suiteCounter] && suiteFilter == 0) { |
| 462 | testSuite = runner->user.testSuites[suiteCounter]; |
| 463 | suiteCounter++; |
| 464 | if (testSuite->name && SDL_strcasecmp(runner->user.filter, testSuite->name) == 0) { |
| 465 | /* Matched a suite name */ |
| 466 | suiteFilter = 1; |
| 467 | suiteFilterName = testSuite->name; |
| 468 | SDLTest_Log("Filtering: running only suite '%s'" , suiteFilterName); |
| 469 | break; |
| 470 | } |
| 471 | |
| 472 | /* Within each suite, loop over all test cases to check if we have a filter match */ |
| 473 | testCounter = 0; |
| 474 | while (testSuite->testCases[testCounter] && testFilter == 0) { |
| 475 | testCase = testSuite->testCases[testCounter]; |
| 476 | testCounter++; |
| 477 | if (testCase->name && SDL_strcasecmp(runner->user.filter, testCase->name) == 0) { |
| 478 | /* Matched a test name */ |
| 479 | suiteFilter = 1; |
| 480 | suiteFilterName = testSuite->name; |
| 481 | testFilter = 1; |
| 482 | testFilterName = testCase->name; |
| 483 | SDLTest_Log("Filtering: running only test '%s' in suite '%s'" , testFilterName, suiteFilterName); |
| 484 | break; |
| 485 | } |
| 486 | } |
| 487 | } |
| 488 | |
| 489 | if (suiteFilter == 0 && testFilter == 0) { |
| 490 | SDLTest_LogError("Filter '%s' did not match any test suite/case." , runner->user.filter); |
| 491 | for (suiteCounter = 0; runner->user.testSuites[suiteCounter]; ++suiteCounter) { |
| 492 | testSuite = runner->user.testSuites[suiteCounter]; |
| 493 | if (testSuite->name) { |
| 494 | SDLTest_Log("Test suite: %s" , testSuite->name); |
| 495 | } |
| 496 | |
| 497 | /* Within each suite, loop over all test cases to check if we have a filter match */ |
| 498 | for (testCounter = 0; testSuite->testCases[testCounter]; ++testCounter) { |
| 499 | testCase = testSuite->testCases[testCounter]; |
| 500 | SDLTest_Log(" test: %s%s" , testCase->name, testCase->enabled ? "" : " (disabled)" ); |
| 501 | } |
| 502 | } |
| 503 | SDLTest_Log("Exit code: 2" ); |
| 504 | SDL_free((void *)failedTests); |
| 505 | return 2; |
| 506 | } |
| 507 | |
| 508 | runner->user.randomOrder = false; |
| 509 | } |
| 510 | |
| 511 | /* Number of test suites */ |
| 512 | while (runner->user.testSuites[nbSuites]) { |
| 513 | nbSuites++; |
| 514 | } |
| 515 | |
| 516 | arraySuites = SDL_malloc(nbSuites * sizeof(int)); |
| 517 | if (!arraySuites) { |
| 518 | SDL_free((void *)failedTests); |
| 519 | return SDL_OutOfMemory(); |
| 520 | } |
| 521 | for (i = 0; i < nbSuites; i++) { |
| 522 | arraySuites[i] = i; |
| 523 | } |
| 524 | |
| 525 | /* Mix the list of suites to run them in random order */ |
| 526 | { |
| 527 | /* Exclude last test "subsystemsTestSuite" which is said to interfere with other tests */ |
| 528 | nbSuites--; |
| 529 | |
| 530 | if (runner->user.execKey != 0) { |
| 531 | execKey = runner->user.execKey; |
| 532 | } else { |
| 533 | /* dummy values to have random numbers working */ |
| 534 | execKey = SDLTest_GenerateExecKey(runSeed, "random testSuites" , "initialisation" , 1); |
| 535 | } |
| 536 | |
| 537 | /* Initialize fuzzer */ |
| 538 | SDLTest_FuzzerInit(execKey); |
| 539 | |
| 540 | i = 100; |
| 541 | while (i--) { |
| 542 | int a, b; |
| 543 | int tmp; |
| 544 | a = SDLTest_RandomIntegerInRange(0, nbSuites - 1); |
| 545 | b = SDLTest_RandomIntegerInRange(0, nbSuites - 1); |
| 546 | /* |
| 547 | * NB: prevent swapping here to make sure the tests start with the same |
| 548 | * random seed (whether they are run in order or not). |
| 549 | * So we consume same number of SDLTest_RandomIntegerInRange() in all cases. |
| 550 | * |
| 551 | * If some random value were used at initialization before the tests start, the --seed wouldn't do the same with or without randomOrder. |
| 552 | */ |
| 553 | /* Swap */ |
| 554 | if (runner->user.randomOrder) { |
| 555 | tmp = arraySuites[b]; |
| 556 | arraySuites[b] = arraySuites[a]; |
| 557 | arraySuites[a] = tmp; |
| 558 | } |
| 559 | } |
| 560 | |
| 561 | /* re-add last lest */ |
| 562 | nbSuites++; |
| 563 | } |
| 564 | |
| 565 | /* Loop over all suites */ |
| 566 | for (i = 0; i < nbSuites; i++) { |
| 567 | suiteCounter = arraySuites[i]; |
| 568 | testSuite = runner->user.testSuites[suiteCounter]; |
| 569 | currentSuiteName = (testSuite->name ? testSuite->name : SDLTEST_INVALID_NAME_FORMAT); |
| 570 | suiteCounter++; |
| 571 | |
| 572 | /* Filter suite if flag set and we have a name */ |
| 573 | if (suiteFilter == 1 && suiteFilterName && testSuite->name && |
| 574 | SDL_strcasecmp(suiteFilterName, testSuite->name) != 0) { |
| 575 | /* Skip suite */ |
| 576 | SDLTest_Log("===== Test Suite %i: '%s' " COLOR_BLUE "skipped" COLOR_END "\n" , |
| 577 | suiteCounter, |
| 578 | currentSuiteName); |
| 579 | } else { |
| 580 | |
| 581 | int nbTestCases = 0; |
| 582 | int *arrayTestCases; |
| 583 | int j; |
| 584 | while (testSuite->testCases[nbTestCases]) { |
| 585 | nbTestCases++; |
| 586 | } |
| 587 | |
| 588 | arrayTestCases = SDL_malloc(nbTestCases * sizeof(int)); |
| 589 | if (!arrayTestCases) { |
| 590 | SDL_free(arraySuites); |
| 591 | SDL_free((void *)failedTests); |
| 592 | return SDL_OutOfMemory(); |
| 593 | } |
| 594 | for (j = 0; j < nbTestCases; j++) { |
| 595 | arrayTestCases[j] = j; |
| 596 | } |
| 597 | |
| 598 | /* Mix the list of testCases to run them in random order */ |
| 599 | j = 100; |
| 600 | while (j--) { |
| 601 | int a, b; |
| 602 | int tmp; |
| 603 | a = SDLTest_RandomIntegerInRange(0, nbTestCases - 1); |
| 604 | b = SDLTest_RandomIntegerInRange(0, nbTestCases - 1); |
| 605 | /* Swap */ |
| 606 | /* See previous note */ |
| 607 | if (runner->user.randomOrder) { |
| 608 | tmp = arrayTestCases[b]; |
| 609 | arrayTestCases[b] = arrayTestCases[a]; |
| 610 | arrayTestCases[a] = tmp; |
| 611 | } |
| 612 | } |
| 613 | |
| 614 | /* Reset per-suite counters */ |
| 615 | testFailedCount = 0; |
| 616 | testPassedCount = 0; |
| 617 | testSkippedCount = 0; |
| 618 | |
| 619 | /* Take time - suite start */ |
| 620 | suiteStartSeconds = GetClock(); |
| 621 | |
| 622 | /* Log suite started */ |
| 623 | SDLTest_Log("===== Test Suite %i: '%s' started\n" , |
| 624 | suiteCounter, |
| 625 | currentSuiteName); |
| 626 | |
| 627 | /* Loop over all test cases */ |
| 628 | for (j = 0; j < nbTestCases; j++) { |
| 629 | testCounter = arrayTestCases[j]; |
| 630 | testCase = testSuite->testCases[testCounter]; |
| 631 | currentTestName = (testCase->name ? testCase->name : SDLTEST_INVALID_NAME_FORMAT); |
| 632 | testCounter++; |
| 633 | |
| 634 | /* Filter tests if flag set and we have a name */ |
| 635 | if (testFilter == 1 && testFilterName && testCase->name && |
| 636 | SDL_strcasecmp(testFilterName, testCase->name) != 0) { |
| 637 | /* Skip test */ |
| 638 | SDLTest_Log("===== Test Case %i.%i: '%s' " COLOR_BLUE "skipped" COLOR_END "\n" , |
| 639 | suiteCounter, |
| 640 | testCounter, |
| 641 | currentTestName); |
| 642 | } else { |
| 643 | /* Override 'disabled' flag if we specified a test filter (i.e. force run for debugging) */ |
| 644 | if (testFilter == 1 && !testCase->enabled) { |
| 645 | SDLTest_Log("Force run of disabled test since test filter was set" ); |
| 646 | forceTestRun = true; |
| 647 | } |
| 648 | |
| 649 | /* Take time - test start */ |
| 650 | testStartSeconds = GetClock(); |
| 651 | |
| 652 | /* Log test started */ |
| 653 | SDLTest_Log(COLOR_YELLOW "----- Test Case %i.%i: '%s' started" COLOR_END, |
| 654 | suiteCounter, |
| 655 | testCounter, |
| 656 | currentTestName); |
| 657 | if (testCase->description && testCase->description[0] != '\0') { |
| 658 | SDLTest_Log("Test Description: '%s'" , |
| 659 | (testCase->description) ? testCase->description : SDLTEST_INVALID_NAME_FORMAT); |
| 660 | } |
| 661 | |
| 662 | /* Loop over all iterations */ |
| 663 | iterationCounter = 0; |
| 664 | while (iterationCounter < runner->user.testIterations) { |
| 665 | iterationCounter++; |
| 666 | |
| 667 | if (runner->user.execKey != 0) { |
| 668 | execKey = runner->user.execKey; |
| 669 | } else { |
| 670 | execKey = SDLTest_GenerateExecKey(runSeed, testSuite->name, testCase->name, iterationCounter); |
| 671 | } |
| 672 | |
| 673 | SDLTest_Log("Test Iteration %i: execKey %" SDL_PRIu64, iterationCounter, execKey); |
| 674 | testResult = SDLTest_RunTest(testSuite, testCase, execKey, forceTestRun); |
| 675 | |
| 676 | if (testResult == TEST_RESULT_PASSED) { |
| 677 | testPassedCount++; |
| 678 | totalTestPassedCount++; |
| 679 | } else if (testResult == TEST_RESULT_SKIPPED) { |
| 680 | testSkippedCount++; |
| 681 | totalTestSkippedCount++; |
| 682 | } else { |
| 683 | testFailedCount++; |
| 684 | totalTestFailedCount++; |
| 685 | } |
| 686 | } |
| 687 | |
| 688 | /* Take time - test end */ |
| 689 | testEndSeconds = GetClock(); |
| 690 | runtime = testEndSeconds - testStartSeconds; |
| 691 | if (runtime < 0.0f) { |
| 692 | runtime = 0.0f; |
| 693 | } |
| 694 | |
| 695 | if (runner->user.testIterations > 1) { |
| 696 | /* Log test runtime */ |
| 697 | SDLTest_Log("Runtime of %i iterations: %.1f sec" , runner->user.testIterations, runtime); |
| 698 | SDLTest_Log("Average Test runtime: %.5f sec" , runtime / (float)runner->user.testIterations); |
| 699 | } else { |
| 700 | /* Log test runtime */ |
| 701 | SDLTest_Log("Total Test runtime: %.1f sec" , runtime); |
| 702 | } |
| 703 | |
| 704 | /* Log final test result */ |
| 705 | switch (testResult) { |
| 706 | case TEST_RESULT_PASSED: |
| 707 | SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Test" , currentTestName, COLOR_GREEN "Passed" COLOR_END); |
| 708 | break; |
| 709 | case TEST_RESULT_FAILED: |
| 710 | SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Test" , currentTestName, COLOR_RED "Failed" COLOR_END); |
| 711 | break; |
| 712 | case TEST_RESULT_NO_ASSERT: |
| 713 | SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Test" , currentTestName, COLOR_BLUE "No Asserts" COLOR_END); |
| 714 | break; |
| 715 | } |
| 716 | |
| 717 | /* Collect failed test case references for repro-step display */ |
| 718 | if (testResult == TEST_RESULT_FAILED) { |
| 719 | failedTests[failedNumberOfTests] = testCase; |
| 720 | failedNumberOfTests++; |
| 721 | } |
| 722 | } |
| 723 | } |
| 724 | |
| 725 | /* Take time - suite end */ |
| 726 | suiteEndSeconds = GetClock(); |
| 727 | runtime = suiteEndSeconds - suiteStartSeconds; |
| 728 | if (runtime < 0.0f) { |
| 729 | runtime = 0.0f; |
| 730 | } |
| 731 | |
| 732 | /* Log suite runtime */ |
| 733 | SDLTest_Log("Total Suite runtime: %.1f sec" , runtime); |
| 734 | |
| 735 | /* Log summary and final Suite result */ |
| 736 | countSum = testPassedCount + testFailedCount + testSkippedCount; |
| 737 | if (testFailedCount == 0) { |
| 738 | SDLTest_Log(SDLTEST_LOG_SUMMARY_FORMAT_OK, "Suite" , countSum, testPassedCount, testFailedCount, testSkippedCount); |
| 739 | SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Suite" , currentSuiteName, COLOR_GREEN "Passed" COLOR_END); |
| 740 | } else { |
| 741 | SDLTest_LogError(SDLTEST_LOG_SUMMARY_FORMAT, "Suite" , countSum, testPassedCount, testFailedCount, testSkippedCount); |
| 742 | SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Suite" , currentSuiteName, COLOR_RED "Failed" COLOR_END); |
| 743 | } |
| 744 | |
| 745 | SDL_free(arrayTestCases); |
| 746 | } |
| 747 | } |
| 748 | |
| 749 | SDL_free(arraySuites); |
| 750 | |
| 751 | /* Take time - run end */ |
| 752 | runEndSeconds = GetClock(); |
| 753 | runtime = runEndSeconds - runStartSeconds; |
| 754 | if (runtime < 0.0f) { |
| 755 | runtime = 0.0f; |
| 756 | } |
| 757 | |
| 758 | /* Log total runtime */ |
| 759 | SDLTest_Log("Total Run runtime: %.1f sec" , runtime); |
| 760 | |
| 761 | /* Log summary and final run result */ |
| 762 | countSum = totalTestPassedCount + totalTestFailedCount + totalTestSkippedCount; |
| 763 | if (totalTestFailedCount == 0) { |
| 764 | runResult = 0; |
| 765 | SDLTest_Log(SDLTEST_LOG_SUMMARY_FORMAT_OK, "Run" , countSum, totalTestPassedCount, totalTestFailedCount, totalTestSkippedCount); |
| 766 | SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Run /w seed" , runSeed, COLOR_GREEN "Passed" COLOR_END); |
| 767 | } else { |
| 768 | runResult = 1; |
| 769 | SDLTest_LogError(SDLTEST_LOG_SUMMARY_FORMAT, "Run" , countSum, totalTestPassedCount, totalTestFailedCount, totalTestSkippedCount); |
| 770 | SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Run /w seed" , runSeed, COLOR_RED "Failed" COLOR_END); |
| 771 | } |
| 772 | |
| 773 | /* Print repro steps for failed tests */ |
| 774 | if (failedNumberOfTests > 0) { |
| 775 | SDLTest_Log("Harness input to repro failures:" ); |
| 776 | for (testCounter = 0; testCounter < failedNumberOfTests; testCounter++) { |
| 777 | SDLTest_Log(COLOR_RED " --seed %s --filter %s" COLOR_END, runSeed, failedTests[testCounter]->name); |
| 778 | } |
| 779 | } |
| 780 | SDL_free((void *)failedTests); |
| 781 | |
| 782 | SDLTest_Log("Exit code: %d" , runResult); |
| 783 | return runResult; |
| 784 | } |
| 785 | |
| 786 | static int SDLCALL SDLTest_TestSuiteCommonArg(void *data, char **argv, int index) |
| 787 | { |
| 788 | SDLTest_TestSuiteRunner *runner = data; |
| 789 | |
| 790 | if (SDL_strcasecmp(argv[index], "--iterations" ) == 0) { |
| 791 | if (argv[index + 1]) { |
| 792 | runner->user.testIterations = SDL_atoi(argv[index + 1]); |
| 793 | if (runner->user.testIterations < 1) { |
| 794 | runner->user.testIterations = 1; |
| 795 | } |
| 796 | return 2; |
| 797 | } |
| 798 | } |
| 799 | else if (SDL_strcasecmp(argv[index], "--execKey" ) == 0) { |
| 800 | if (argv[index + 1]) { |
| 801 | (void)SDL_sscanf(argv[index + 1], "%" SDL_PRIu64, &runner->user.execKey); |
| 802 | return 2; |
| 803 | } |
| 804 | } |
| 805 | else if (SDL_strcasecmp(argv[index], "--seed" ) == 0) { |
| 806 | if (argv[index + 1]) { |
| 807 | runner->user.runSeed = SDL_strdup(argv[index + 1]); |
| 808 | return 2; |
| 809 | } |
| 810 | } |
| 811 | else if (SDL_strcasecmp(argv[index], "--filter" ) == 0) { |
| 812 | if (argv[index + 1]) { |
| 813 | runner->user.filter = SDL_strdup(argv[index + 1]); |
| 814 | return 2; |
| 815 | } |
| 816 | } |
| 817 | else if (SDL_strcasecmp(argv[index], "--random-order" ) == 0) { |
| 818 | runner->user.randomOrder = true; |
| 819 | return 1; |
| 820 | } |
| 821 | return 0; |
| 822 | } |
| 823 | |
| 824 | SDLTest_TestSuiteRunner *SDLTest_CreateTestSuiteRunner(SDLTest_CommonState *state, SDLTest_TestSuiteReference *testSuites[]) |
| 825 | { |
| 826 | SDLTest_TestSuiteRunner *runner; |
| 827 | SDLTest_ArgumentParser *argparser; |
| 828 | |
| 829 | if (!state) { |
| 830 | SDLTest_LogError("SDL Test Suites require a common state" ); |
| 831 | return NULL; |
| 832 | } |
| 833 | |
| 834 | runner = SDL_calloc(1, sizeof(SDLTest_TestSuiteRunner)); |
| 835 | if (!runner) { |
| 836 | SDLTest_LogError("Failed to allocate memory for test suite runner" ); |
| 837 | return NULL; |
| 838 | } |
| 839 | runner->user.testSuites = testSuites; |
| 840 | |
| 841 | runner->argparser.parse_arguments = SDLTest_TestSuiteCommonArg; |
| 842 | runner->argparser.usage = common_harness_usage; |
| 843 | runner->argparser.data = runner; |
| 844 | |
| 845 | /* Find last argument description and append our description */ |
| 846 | argparser = state->argparser; |
| 847 | for (;;) { |
| 848 | if (argparser->next == NULL) { |
| 849 | argparser->next = &runner->argparser; |
| 850 | break; |
| 851 | } |
| 852 | argparser = argparser->next; |
| 853 | |
| 854 | } |
| 855 | |
| 856 | return runner; |
| 857 | } |
| 858 | |
| 859 | void SDLTest_DestroyTestSuiteRunner(SDLTest_TestSuiteRunner *runner) { |
| 860 | |
| 861 | SDL_free(runner->user.filter); |
| 862 | SDL_free(runner->user.runSeed); |
| 863 | SDL_free(runner); |
| 864 | } |
| 865 | |