1/*
2 Simple DirectMedia Layer
3 Copyright (C) 1997-2021 Sam Lantinga <slouken@libsdl.org>
4
5 This software is provided 'as-is', without any express or implied
6 warranty. In no event will the authors be held liable for any damages
7 arising from the use of this software.
8
9 Permission is granted to anyone to use this software for any purpose,
10 including commercial applications, and to alter it and redistribute it
11 freely, subject to the following restrictions:
12
13 1. The origin of this software must not be misrepresented; you must not
14 claim that you wrote the original software. If you use this software
15 in a product, an acknowledgment in the product documentation would be
16 appreciated but is not required.
17 2. Altered source versions must be plainly marked as such, and must not be
18 misrepresented as being the original software.
19 3. This notice may not be removed or altered from any source distribution.
20*/
21
22#include "SDL_config.h"
23
24#include "SDL_test.h"
25
26#include <stdio.h>
27#include <stdlib.h>
28#include <string.h>
29#include <time.h>
30
31/* Invalid test name/description message format */
32#define SDLTEST_INVALID_NAME_FORMAT "(Invalid)"
33
34/* Log summary message format */
35#define SDLTEST_LOG_SUMMARY_FORMAT "%s Summary: Total=%d Passed=%d Failed=%d Skipped=%d"
36
37/* Final result message format */
38#define SDLTEST_FINAL_RESULT_FORMAT ">>> %s '%s': %s\n"
39
40/* ! \brief Timeout for single test case execution */
41static Uint32 SDLTest_TestCaseTimeout = 3600;
42
43/**
44* Generates a random run seed string for the harness. The generated seed
45* will contain alphanumeric characters (0-9A-Z).
46*
47* Note: The returned string needs to be deallocated by the caller.
48*
49* \param length The length of the seed string to generate
50*
51* \returns The generated seed string
52*/
53char *
54SDLTest_GenerateRunSeed(const int length)
55{
56 char *seed = NULL;
57 SDLTest_RandomContext randomContext;
58 int counter;
59
60 /* Sanity check input */
61 if (length <= 0) {
62 SDLTest_LogError("The length of the harness seed must be >0.");
63 return NULL;
64 }
65
66 /* Allocate output buffer */
67 seed = (char *)SDL_malloc((length + 1) * sizeof(char));
68 if (seed == NULL) {
69 SDLTest_LogError("SDL_malloc for run seed output buffer failed.");
70 SDL_Error(SDL_ENOMEM);
71 return NULL;
72 }
73
74 /* Generate a random string of alphanumeric characters */
75 SDLTest_RandomInitTime(&randomContext);
76 for (counter = 0; counter < length; counter++) {
77 unsigned int number = SDLTest_Random(&randomContext);
78 char ch = (char) (number % (91 - 48)) + 48;
79 if (ch >= 58 && ch <= 64) {
80 ch = 65;
81 }
82 seed[counter] = ch;
83 }
84 seed[length] = '\0';
85
86 return seed;
87}
88
89/**
90* Generates an execution key for the fuzzer.
91*
92* \param runSeed The run seed to use
93* \param suiteName The name of the test suite
94* \param testName The name of the test
95* \param iteration The iteration count
96*
97* \returns The generated execution key to initialize the fuzzer with.
98*
99*/
100static Uint64
101SDLTest_GenerateExecKey(const char *runSeed, char *suiteName, char *testName, int iteration)
102{
103 SDLTest_Md5Context md5Context;
104 Uint64 *keys;
105 char iterationString[16];
106 size_t runSeedLength;
107 size_t suiteNameLength;
108 size_t testNameLength;
109 size_t iterationStringLength;
110 size_t entireStringLength;
111 char *buffer;
112
113 if (runSeed == NULL || runSeed[0] == '\0') {
114 SDLTest_LogError("Invalid runSeed string.");
115 return -1;
116 }
117
118 if (suiteName == NULL || suiteName[0] == '\0') {
119 SDLTest_LogError("Invalid suiteName string.");
120 return -1;
121 }
122
123 if (testName == NULL || testName[0] == '\0') {
124 SDLTest_LogError("Invalid testName string.");
125 return -1;
126 }
127
128 if (iteration <= 0) {
129 SDLTest_LogError("Invalid iteration count.");
130 return -1;
131 }
132
133 /* Convert iteration number into a string */
134 SDL_memset(iterationString, 0, sizeof(iterationString));
135 SDL_snprintf(iterationString, sizeof(iterationString) - 1, "%d", iteration);
136
137 /* Combine the parameters into single string */
138 runSeedLength = SDL_strlen(runSeed);
139 suiteNameLength = SDL_strlen(suiteName);
140 testNameLength = SDL_strlen(testName);
141 iterationStringLength = SDL_strlen(iterationString);
142 entireStringLength = runSeedLength + suiteNameLength + testNameLength + iterationStringLength + 1;
143 buffer = (char *)SDL_malloc(entireStringLength);
144 if (buffer == NULL) {
145 SDLTest_LogError("Failed to allocate buffer for execKey generation.");
146 SDL_Error(SDL_ENOMEM);
147 return 0;
148 }
149 SDL_snprintf(buffer, entireStringLength, "%s%s%s%d", runSeed, suiteName, testName, iteration);
150
151 /* Hash string and use half of the digest as 64bit exec key */
152 SDLTest_Md5Init(&md5Context);
153 SDLTest_Md5Update(&md5Context, (unsigned char *)buffer, (unsigned int) entireStringLength);
154 SDLTest_Md5Final(&md5Context);
155 SDL_free(buffer);
156 keys = (Uint64 *)md5Context.digest;
157
158 return keys[0];
159}
160
161/**
162* \brief Set timeout handler for test.
163*
164* Note: SDL_Init(SDL_INIT_TIMER) will be called if it wasn't done so before.
165*
166* \param timeout Timeout interval in seconds.
167* \param callback Function that will be called after timeout has elapsed.
168*
169* \return Timer id or -1 on failure.
170*/
171static SDL_TimerID
172SDLTest_SetTestTimeout(int timeout, void (*callback)(void))
173{
174 Uint32 timeoutInMilliseconds;
175 SDL_TimerID timerID;
176
177 if (callback == NULL) {
178 SDLTest_LogError("Timeout callback can't be NULL");
179 return -1;
180 }
181
182 if (timeout < 0) {
183 SDLTest_LogError("Timeout value must be bigger than zero.");
184 return -1;
185 }
186
187 /* Init SDL timer if not initialized before */
188 if (SDL_WasInit(SDL_INIT_TIMER) == 0) {
189 if (SDL_InitSubSystem(SDL_INIT_TIMER)) {
190 SDLTest_LogError("Failed to init timer subsystem: %s", SDL_GetError());
191 return -1;
192 }
193 }
194
195 /* Set timer */
196 timeoutInMilliseconds = timeout * 1000;
197 timerID = SDL_AddTimer(timeoutInMilliseconds, (SDL_TimerCallback)callback, 0x0);
198 if (timerID == 0) {
199 SDLTest_LogError("Creation of SDL timer failed: %s", SDL_GetError());
200 return -1;
201 }
202
203 return timerID;
204}
205
206/**
207* \brief Timeout handler. Aborts test run and exits harness process.
208*/
209#if defined(__WATCOMC__)
210#pragma aux SDLTest_BailOut aborts;
211#endif
212static SDL_NORETURN void
213SDLTest_BailOut(void)
214{
215 SDLTest_LogError("TestCaseTimeout timer expired. Aborting test run.");
216 exit(TEST_ABORTED); /* bail out from the test */
217}
218
219/**
220* \brief Execute a test using the given execution key.
221*
222* \param testSuite Suite containing the test case.
223* \param testCase Case to execute.
224* \param execKey Execution key for the fuzzer.
225* \param forceTestRun Force test to run even if test was disabled in suite.
226*
227* \returns Test case result.
228*/
229static int
230SDLTest_RunTest(SDLTest_TestSuiteReference *testSuite, const SDLTest_TestCaseReference *testCase, Uint64 execKey, SDL_bool forceTestRun)
231{
232 SDL_TimerID timer = 0;
233 int testCaseResult = 0;
234 int testResult = 0;
235 int fuzzerCount;
236
237 if (testSuite==NULL || testCase==NULL || testSuite->name==NULL || testCase->name==NULL)
238 {
239 SDLTest_LogError("Setup failure: testSuite or testCase references NULL");
240 return TEST_RESULT_SETUP_FAILURE;
241 }
242
243 if (!testCase->enabled && forceTestRun == SDL_FALSE)
244 {
245 SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Test", testCase->name, "Skipped (Disabled)");
246 return TEST_RESULT_SKIPPED;
247 }
248
249 /* Initialize fuzzer */
250 SDLTest_FuzzerInit(execKey);
251
252 /* Reset assert tracker */
253 SDLTest_ResetAssertSummary();
254
255 /* Set timeout timer */
256 timer = SDLTest_SetTestTimeout(SDLTest_TestCaseTimeout, SDLTest_BailOut);
257
258 /* Maybe run suite initalizer function */
259 if (testSuite->testSetUp) {
260 testSuite->testSetUp(0x0);
261 if (SDLTest_AssertSummaryToTestResult() == TEST_RESULT_FAILED) {
262 SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Suite Setup", testSuite->name, "Failed");
263 return TEST_RESULT_SETUP_FAILURE;
264 }
265 }
266
267 /* Run test case function */
268 testCaseResult = testCase->testCase(0x0);
269
270 /* Convert test execution result into harness result */
271 if (testCaseResult == TEST_SKIPPED) {
272 /* Test was programatically skipped */
273 testResult = TEST_RESULT_SKIPPED;
274 } else if (testCaseResult == TEST_STARTED) {
275 /* Test did not return a TEST_COMPLETED value; assume it failed */
276 testResult = TEST_RESULT_FAILED;
277 } else if (testCaseResult == TEST_ABORTED) {
278 /* Test was aborted early; assume it failed */
279 testResult = TEST_RESULT_FAILED;
280 } else {
281 /* Perform failure analysis based on asserts */
282 testResult = SDLTest_AssertSummaryToTestResult();
283 }
284
285 /* Maybe run suite cleanup function (ignore failed asserts) */
286 if (testSuite->testTearDown) {
287 testSuite->testTearDown(0x0);
288 }
289
290 /* Cancel timeout timer */
291 if (timer) {
292 SDL_RemoveTimer(timer);
293 }
294
295 /* Report on asserts and fuzzer usage */
296 fuzzerCount = SDLTest_GetFuzzerInvocationCount();
297 if (fuzzerCount > 0) {
298 SDLTest_Log("Fuzzer invocations: %d", fuzzerCount);
299 }
300
301 /* Final log based on test execution result */
302 if (testCaseResult == TEST_SKIPPED) {
303 /* Test was programatically skipped */
304 SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Test", testCase->name, "Skipped (Programmatically)");
305 } else if (testCaseResult == TEST_STARTED) {
306 /* Test did not return a TEST_COMPLETED value; assume it failed */
307 SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Test", testCase->name, "Failed (test started, but did not return TEST_COMPLETED)");
308 } else if (testCaseResult == TEST_ABORTED) {
309 /* Test was aborted early; assume it failed */
310 SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Test", testCase->name, "Failed (Aborted)");
311 } else {
312 SDLTest_LogAssertSummary();
313 }
314
315 return testResult;
316}
317
318/* Prints summary of all suites/tests contained in the given reference */
319#if 0
320static void SDLTest_LogTestSuiteSummary(SDLTest_TestSuiteReference *testSuites)
321{
322 int suiteCounter;
323 int testCounter;
324 SDLTest_TestSuiteReference *testSuite;
325 SDLTest_TestCaseReference *testCase;
326
327 /* Loop over all suites */
328 suiteCounter = 0;
329 while(&testSuites[suiteCounter]) {
330 testSuite=&testSuites[suiteCounter];
331 suiteCounter++;
332 SDLTest_Log("Test Suite %i - %s\n", suiteCounter,
333 (testSuite->name) ? testSuite->name : SDLTEST_INVALID_NAME_FORMAT);
334
335 /* Loop over all test cases */
336 testCounter = 0;
337 while(testSuite->testCases[testCounter])
338 {
339 testCase=(SDLTest_TestCaseReference *)testSuite->testCases[testCounter];
340 testCounter++;
341 SDLTest_Log(" Test Case %i - %s: %s", testCounter,
342 (testCase->name) ? testCase->name : SDLTEST_INVALID_NAME_FORMAT,
343 (testCase->description) ? testCase->description : SDLTEST_INVALID_NAME_FORMAT);
344 }
345 }
346}
347#endif
348
349/* Gets a timer value in seconds */
350static float GetClock()
351{
352 float currentClock = clock() / (float) CLOCKS_PER_SEC;
353 return currentClock;
354}
355
356/**
357* \brief Execute a test suite using the given run seed and execution key.
358*
359* The filter string is matched to the suite name (full comparison) to select a single suite,
360* or if no suite matches, it is matched to the test names (full comparison) to select a single test.
361*
362* \param testSuites Suites containing the test case.
363* \param userRunSeed Custom run seed provided by user, or NULL to autogenerate one.
364* \param userExecKey Custom execution key provided by user, or 0 to autogenerate one.
365* \param filter Filter specification. NULL disables. Case sensitive.
366* \param testIterations Number of iterations to run each test case.
367*
368* \returns Test run result; 0 when all tests passed, 1 if any tests failed.
369*/
370int SDLTest_RunSuites(SDLTest_TestSuiteReference *testSuites[], const char *userRunSeed, Uint64 userExecKey, const char *filter, int testIterations)
371{
372 int totalNumberOfTests = 0;
373 int failedNumberOfTests = 0;
374 int suiteCounter;
375 int testCounter;
376 int iterationCounter;
377 SDLTest_TestSuiteReference *testSuite;
378 const SDLTest_TestCaseReference *testCase;
379 const char *runSeed = NULL;
380 char *currentSuiteName;
381 char *currentTestName;
382 Uint64 execKey;
383 float runStartSeconds;
384 float suiteStartSeconds;
385 float testStartSeconds;
386 float runEndSeconds;
387 float suiteEndSeconds;
388 float testEndSeconds;
389 float runtime;
390 int suiteFilter = 0;
391 char *suiteFilterName = NULL;
392 int testFilter = 0;
393 char *testFilterName = NULL;
394 SDL_bool forceTestRun = SDL_FALSE;
395 int testResult = 0;
396 int runResult = 0;
397 int totalTestFailedCount = 0;
398 int totalTestPassedCount = 0;
399 int totalTestSkippedCount = 0;
400 int testFailedCount = 0;
401 int testPassedCount = 0;
402 int testSkippedCount = 0;
403 int countSum = 0;
404 const SDLTest_TestCaseReference **failedTests;
405
406 /* Sanitize test iterations */
407 if (testIterations < 1) {
408 testIterations = 1;
409 }
410
411 /* Generate run see if we don't have one already */
412 if (userRunSeed == NULL || userRunSeed[0] == '\0') {
413 runSeed = SDLTest_GenerateRunSeed(16);
414 if (runSeed == NULL) {
415 SDLTest_LogError("Generating a random seed failed");
416 return 2;
417 }
418 } else {
419 runSeed = userRunSeed;
420 }
421
422
423 /* Reset per-run counters */
424 totalTestFailedCount = 0;
425 totalTestPassedCount = 0;
426 totalTestSkippedCount = 0;
427
428 /* Take time - run start */
429 runStartSeconds = GetClock();
430
431 /* Log run with fuzzer parameters */
432 SDLTest_Log("::::: Test Run /w seed '%s' started\n", runSeed);
433
434 /* Count the total number of tests */
435 suiteCounter = 0;
436 while (testSuites[suiteCounter]) {
437 testSuite = testSuites[suiteCounter];
438 suiteCounter++;
439 testCounter = 0;
440 while (testSuite->testCases[testCounter])
441 {
442 testCounter++;
443 totalNumberOfTests++;
444 }
445 }
446
447 /* Pre-allocate an array for tracking failed tests (potentially all test cases) */
448 failedTests = (const SDLTest_TestCaseReference **)SDL_malloc(totalNumberOfTests * sizeof(SDLTest_TestCaseReference *));
449 if (failedTests == NULL) {
450 SDLTest_LogError("Unable to allocate cache for failed tests");
451 SDL_Error(SDL_ENOMEM);
452 return -1;
453 }
454
455 /* Initialize filtering */
456 if (filter != NULL && filter[0] != '\0') {
457 /* Loop over all suites to check if we have a filter match */
458 suiteCounter = 0;
459 while (testSuites[suiteCounter] && suiteFilter == 0) {
460 testSuite = testSuites[suiteCounter];
461 suiteCounter++;
462 if (testSuite->name != NULL && SDL_strcmp(filter, testSuite->name) == 0) {
463 /* Matched a suite name */
464 suiteFilter = 1;
465 suiteFilterName = testSuite->name;
466 SDLTest_Log("Filtering: running only suite '%s'", suiteFilterName);
467 break;
468 }
469
470 /* Within each suite, loop over all test cases to check if we have a filter match */
471 testCounter = 0;
472 while (testSuite->testCases[testCounter] && testFilter == 0)
473 {
474 testCase = testSuite->testCases[testCounter];
475 testCounter++;
476 if (testCase->name != NULL && SDL_strcmp(filter, testCase->name) == 0) {
477 /* Matched a test name */
478 suiteFilter = 1;
479 suiteFilterName = testSuite->name;
480 testFilter = 1;
481 testFilterName = testCase->name;
482 SDLTest_Log("Filtering: running only test '%s' in suite '%s'", testFilterName, suiteFilterName);
483 break;
484 }
485 }
486 }
487
488 if (suiteFilter == 0 && testFilter == 0) {
489 SDLTest_LogError("Filter '%s' did not match any test suite/case.", filter);
490 SDLTest_Log("Exit code: 2");
491 SDL_free((void *) failedTests);
492 return 2;
493 }
494 }
495
496 /* Loop over all suites */
497 suiteCounter = 0;
498 while(testSuites[suiteCounter]) {
499 testSuite = testSuites[suiteCounter];
500 currentSuiteName = (testSuite->name ? testSuite->name : SDLTEST_INVALID_NAME_FORMAT);
501 suiteCounter++;
502
503 /* Filter suite if flag set and we have a name */
504 if (suiteFilter == 1 && suiteFilterName != NULL && testSuite->name != NULL &&
505 SDL_strcmp(suiteFilterName, testSuite->name) != 0) {
506 /* Skip suite */
507 SDLTest_Log("===== Test Suite %i: '%s' skipped\n",
508 suiteCounter,
509 currentSuiteName);
510 } else {
511
512 /* Reset per-suite counters */
513 testFailedCount = 0;
514 testPassedCount = 0;
515 testSkippedCount = 0;
516
517 /* Take time - suite start */
518 suiteStartSeconds = GetClock();
519
520 /* Log suite started */
521 SDLTest_Log("===== Test Suite %i: '%s' started\n",
522 suiteCounter,
523 currentSuiteName);
524
525 /* Loop over all test cases */
526 testCounter = 0;
527 while(testSuite->testCases[testCounter])
528 {
529 testCase = testSuite->testCases[testCounter];
530 currentTestName = (testCase->name ? testCase->name : SDLTEST_INVALID_NAME_FORMAT);
531 testCounter++;
532
533 /* Filter tests if flag set and we have a name */
534 if (testFilter == 1 && testFilterName != NULL && testCase->name != NULL &&
535 SDL_strcmp(testFilterName, testCase->name) != 0) {
536 /* Skip test */
537 SDLTest_Log("===== Test Case %i.%i: '%s' skipped\n",
538 suiteCounter,
539 testCounter,
540 currentTestName);
541 } else {
542 /* Override 'disabled' flag if we specified a test filter (i.e. force run for debugging) */
543 if (testFilter == 1 && !testCase->enabled) {
544 SDLTest_Log("Force run of disabled test since test filter was set");
545 forceTestRun = SDL_TRUE;
546 }
547
548 /* Take time - test start */
549 testStartSeconds = GetClock();
550
551 /* Log test started */
552 SDLTest_Log("----- Test Case %i.%i: '%s' started",
553 suiteCounter,
554 testCounter,
555 currentTestName);
556 if (testCase->description != NULL && testCase->description[0] != '\0') {
557 SDLTest_Log("Test Description: '%s'",
558 (testCase->description) ? testCase->description : SDLTEST_INVALID_NAME_FORMAT);
559 }
560
561 /* Loop over all iterations */
562 iterationCounter = 0;
563 while(iterationCounter < testIterations)
564 {
565 iterationCounter++;
566
567 if (userExecKey != 0) {
568 execKey = userExecKey;
569 } else {
570 execKey = SDLTest_GenerateExecKey(runSeed, testSuite->name, testCase->name, iterationCounter);
571 }
572
573 SDLTest_Log("Test Iteration %i: execKey %" SDL_PRIu64, iterationCounter, execKey);
574 testResult = SDLTest_RunTest(testSuite, testCase, execKey, forceTestRun);
575
576 if (testResult == TEST_RESULT_PASSED) {
577 testPassedCount++;
578 totalTestPassedCount++;
579 } else if (testResult == TEST_RESULT_SKIPPED) {
580 testSkippedCount++;
581 totalTestSkippedCount++;
582 } else {
583 testFailedCount++;
584 totalTestFailedCount++;
585 }
586 }
587
588 /* Take time - test end */
589 testEndSeconds = GetClock();
590 runtime = testEndSeconds - testStartSeconds;
591 if (runtime < 0.0f) runtime = 0.0f;
592
593 if (testIterations > 1) {
594 /* Log test runtime */
595 SDLTest_Log("Runtime of %i iterations: %.1f sec", testIterations, runtime);
596 SDLTest_Log("Average Test runtime: %.5f sec", runtime / (float)testIterations);
597 } else {
598 /* Log test runtime */
599 SDLTest_Log("Total Test runtime: %.1f sec", runtime);
600 }
601
602 /* Log final test result */
603 switch (testResult) {
604 case TEST_RESULT_PASSED:
605 SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Test", currentTestName, "Passed");
606 break;
607 case TEST_RESULT_FAILED:
608 SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Test", currentTestName, "Failed");
609 break;
610 case TEST_RESULT_NO_ASSERT:
611 SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT,"Test", currentTestName, "No Asserts");
612 break;
613 }
614
615 /* Collect failed test case references for repro-step display */
616 if (testResult == TEST_RESULT_FAILED) {
617 failedTests[failedNumberOfTests] = testCase;
618 failedNumberOfTests++;
619 }
620 }
621 }
622
623 /* Take time - suite end */
624 suiteEndSeconds = GetClock();
625 runtime = suiteEndSeconds - suiteStartSeconds;
626 if (runtime < 0.0f) runtime = 0.0f;
627
628 /* Log suite runtime */
629 SDLTest_Log("Total Suite runtime: %.1f sec", runtime);
630
631 /* Log summary and final Suite result */
632 countSum = testPassedCount + testFailedCount + testSkippedCount;
633 if (testFailedCount == 0)
634 {
635 SDLTest_Log(SDLTEST_LOG_SUMMARY_FORMAT, "Suite", countSum, testPassedCount, testFailedCount, testSkippedCount);
636 SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Suite", currentSuiteName, "Passed");
637 }
638 else
639 {
640 SDLTest_LogError(SDLTEST_LOG_SUMMARY_FORMAT, "Suite", countSum, testPassedCount, testFailedCount, testSkippedCount);
641 SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Suite", currentSuiteName, "Failed");
642 }
643
644 }
645 }
646
647 /* Take time - run end */
648 runEndSeconds = GetClock();
649 runtime = runEndSeconds - runStartSeconds;
650 if (runtime < 0.0f) runtime = 0.0f;
651
652 /* Log total runtime */
653 SDLTest_Log("Total Run runtime: %.1f sec", runtime);
654
655 /* Log summary and final run result */
656 countSum = totalTestPassedCount + totalTestFailedCount + totalTestSkippedCount;
657 if (totalTestFailedCount == 0)
658 {
659 runResult = 0;
660 SDLTest_Log(SDLTEST_LOG_SUMMARY_FORMAT, "Run", countSum, totalTestPassedCount, totalTestFailedCount, totalTestSkippedCount);
661 SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Run /w seed", runSeed, "Passed");
662 }
663 else
664 {
665 runResult = 1;
666 SDLTest_LogError(SDLTEST_LOG_SUMMARY_FORMAT, "Run", countSum, totalTestPassedCount, totalTestFailedCount, totalTestSkippedCount);
667 SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Run /w seed", runSeed, "Failed");
668 }
669
670 /* Print repro steps for failed tests */
671 if (failedNumberOfTests > 0) {
672 SDLTest_Log("Harness input to repro failures:");
673 for (testCounter = 0; testCounter < failedNumberOfTests; testCounter++) {
674 SDLTest_Log(" --seed %s --filter %s", runSeed, failedTests[testCounter]->name);
675 }
676 }
677 SDL_free((void *) failedTests);
678
679 SDLTest_Log("Exit code: %d", runResult);
680 return runResult;
681}
682
683/* vi: set ts=4 sw=4 expandtab: */
684