Atlas - SDL_test_harness.c
Home / ext / SDL / src / test Lines: 6 | Size: 31070 bytes [Download] [Show on GitHub] [Search similar files] [Raw] [Raw (proxy)][FILE BEGIN]1/* 2 Simple DirectMedia Layer 3 Copyright (C) 1997-2025 Sam Lantinga <[email protected]> 4 5 This software is provided 'as-is', without any express or implied 6 warranty. In no event will the authors be held liable for any damages 7 arising from the use of this software. 8 9 Permission is granted to anyone to use this software for any purpose, 10 including commercial applications, and to alter it and redistribute it 11 freely, subject to the following restrictions: 12 13 1. The origin of this software must not be misrepresented; you must not 14 claim that you wrote the original software. If you use this software 15 in a product, an acknowledgment in the product documentation would be 16 appreciated but is not required. 17 2. Altered source versions must be plainly marked as such, and must not be 18 misrepresented as being the original software. 19 3. This notice may not be removed or altered from any source distribution. 20*/ 21#include <SDL3/SDL_test.h> 22#include "SDL_test_internal.h" 23 24#include <stdlib.h> /* Needed for exit() */ 25 26/* Invalid test name/description message format */ 27#define SDLTEST_INVALID_NAME_FORMAT "(Invalid)" 28 29static void SDLTest_LogSummary(bool success, const char *name, int total, int passed, int failed, int skipped) 30{ 31 SDLTest_LogMessage(success ? SDL_LOG_PRIORITY_INFO : SDL_LOG_PRIORITY_ERROR, 32 "%s Summary: Total=%d " "%s" "Passed=%d" "%s" " " "%s" "Failed=%d" "%s" " " "%s" "Skipped=%d" "%s", 33 name, total, COLOR_GREEN, passed, COLOR_END, success ? COLOR_GREEN : COLOR_RED, failed, COLOR_END, COLOR_BLUE, skipped, COLOR_END); 34} 35 36static void SDLTest_LogFinalResult(bool success, const char *stage, const char *name, const char *color_message, const char *message) 37{ 38 SDL_LogPriority priority = success ? SDL_LOG_PRIORITY_INFO : SDL_LOG_PRIORITY_ERROR; 39 SDLTest_LogMessage(priority, "%s>>> %s '%s':" "%s" " " "%s" "%s" "%s", COLOR_YELLOW, stage, name, COLOR_END, color_message ? color_message : "", message, color_message ? COLOR_END : ""); 40} 41 42struct SDLTest_TestSuiteRunner { 43 struct 44 { 45 SDLTest_TestSuiteReference **testSuites; 46 char *runSeed; 47 Uint64 execKey; 48 char *filter; 49 int testIterations; 50 bool randomOrder; 51 } user; 52 53 SDLTest_ArgumentParser argparser; 54}; 55 56/* ! Timeout for single test case execution */ 57static Uint32 SDLTest_TestCaseTimeout = 3600; 58 59static const char *common_harness_usage[] = { 60 "[--iterations #]", 61 "[--execKey #]", 62 "[--seed string]", 63 "[--filter suite_name|test_name]", 64 "[--random-order]", 65 NULL 66}; 67 68char *SDLTest_GenerateRunSeed(char *buffer, int length) 69{ 70 Uint64 randomContext = SDL_GetPerformanceCounter(); 71 int counter; 72 73 if (!buffer) { 74 SDLTest_LogError("Input buffer must not be NULL."); 75 return NULL; 76 } 77 78 /* Sanity check input */ 79 if (length <= 0) { 80 SDLTest_LogError("The length of the harness seed must be >0."); 81 return NULL; 82 } 83 84 /* Generate a random string of alphanumeric characters */ 85 for (counter = 0; counter < length; counter++) { 86 char ch; 87 int v = SDL_rand_r(&randomContext, 10 + 26); 88 if (v < 10) { 89 ch = (char)('0' + v); 90 } else { 91 ch = (char)('A' + v - 10); 92 } 93 buffer[counter] = ch; 94 } 95 buffer[length] = '\0'; 96 97 return buffer; 98} 99 100/** 101 * Generates an execution key for the fuzzer. 102 * 103 * \param runSeed The run seed to use 104 * \param suiteName The name of the test suite 105 * \param testName The name of the test 106 * \param iteration The iteration count 107 * 108 * \returns The generated execution key to initialize the fuzzer with. 109 * 110 */ 111static Uint64 SDLTest_GenerateExecKey(const char *runSeed, const char *suiteName, const char *testName, int iteration) 112{ 113 SDLTest_Md5Context md5Context; 114 Uint64 *keys; 115 char iterationString[16]; 116 size_t runSeedLength; 117 size_t suiteNameLength; 118 size_t testNameLength; 119 size_t iterationStringLength; 120 size_t entireStringLength; 121 char *buffer; 122 123 if (!runSeed || runSeed[0] == '\0') { 124 SDLTest_LogError("Invalid runSeed string."); 125 return 0; 126 } 127 128 if (!suiteName || suiteName[0] == '\0') { 129 SDLTest_LogError("Invalid suiteName string."); 130 return 0; 131 } 132 133 if (!testName || testName[0] == '\0') { 134 SDLTest_LogError("Invalid testName string."); 135 return 0; 136 } 137 138 if (iteration <= 0) { 139 SDLTest_LogError("Invalid iteration count."); 140 return 0; 141 } 142 143 /* Convert iteration number into a string */ 144 SDL_memset(iterationString, 0, sizeof(iterationString)); 145 (void)SDL_snprintf(iterationString, sizeof(iterationString) - 1, "%d", iteration); 146 147 /* Combine the parameters into single string */ 148 runSeedLength = SDL_strlen(runSeed); 149 suiteNameLength = SDL_strlen(suiteName); 150 testNameLength = SDL_strlen(testName); 151 iterationStringLength = SDL_strlen(iterationString); 152 entireStringLength = runSeedLength + suiteNameLength + testNameLength + iterationStringLength + 1; 153 buffer = (char *)SDL_malloc(entireStringLength); 154 if (!buffer) { 155 SDLTest_LogError("Failed to allocate buffer for execKey generation."); 156 return 0; 157 } 158 (void)SDL_snprintf(buffer, entireStringLength, "%s%s%s%d", runSeed, suiteName, testName, iteration); 159 160 /* Hash string and use half of the digest as 64bit exec key */ 161 SDLTest_Md5Init(&md5Context); 162 SDLTest_Md5Update(&md5Context, (unsigned char *)buffer, (unsigned int)entireStringLength); 163 SDLTest_Md5Final(&md5Context); 164 SDL_free(buffer); 165 keys = (Uint64 *)md5Context.digest; 166 167 return keys[0]; 168} 169 170/** 171 * Set timeout handler for test. 172 * 173 * \param timeout Timeout interval in seconds. 174 * \param callback Function that will be called after timeout has elapsed. 175 * 176 * \return Timer id or -1 on failure. 177 */ 178static SDL_TimerID SDLTest_SetTestTimeout(int timeout, SDL_TimerCallback callback) 179{ 180 Uint32 timeoutInMilliseconds; 181 SDL_TimerID timerID; 182 183 if (!callback) { 184 SDLTest_LogError("Timeout callback can't be NULL"); 185 return 0; 186 } 187 if (timeout < 0) { 188 SDLTest_LogError("Timeout value must be bigger than zero."); 189 return 0; 190 } 191 192 /* Set timer */ 193 timeoutInMilliseconds = timeout * 1000; 194 timerID = SDL_AddTimer(timeoutInMilliseconds, callback, NULL); 195 if (timerID == 0) { 196 SDLTest_LogError("Creation of SDL timer failed: %s", SDL_GetError()); 197 return 0; 198 } 199 200 return timerID; 201} 202 203/** 204 * Timeout handler. Aborts test run and exits harness process. 205 */ 206static Uint32 SDLCALL SDLTest_BailOut(void *userdata, SDL_TimerID timerID, Uint32 interval) 207{ 208 SDLTest_LogError("TestCaseTimeout timer expired. Aborting test run."); 209 exit(TEST_ABORTED); /* bail out from the test */ 210 return 0; 211} 212 213/** 214 * Execute a test using the given execution key. 215 * 216 * \param testSuite Suite containing the test case. 217 * \param testCase Case to execute. 218 * \param execKey Execution key for the fuzzer. 219 * \param forceTestRun Force test to run even if test was disabled in suite. 220 * 221 * \returns Test case result. 222 */ 223static int SDLTest_RunTest(SDLTest_TestSuiteReference *testSuite, const SDLTest_TestCaseReference *testCase, Uint64 execKey, bool forceTestRun) 224{ 225 SDL_TimerID timer = 0; 226 int testCaseResult = 0; 227 int testResult = 0; 228 int fuzzerCount; 229 void *data = NULL; 230 231 if (!testSuite || !testCase || !testSuite->name || !testCase->name) { 232 SDLTest_LogError("Setup failure: testSuite or testCase references NULL"); 233 return TEST_RESULT_SETUP_FAILURE; 234 } 235 236 if (!testCase->enabled && forceTestRun == false) { 237 SDLTest_LogFinalResult(true, "Test", testCase->name, NULL, "Skipped (Disabled)"); 238 return TEST_RESULT_SKIPPED; 239 } 240 241 /* Initialize fuzzer */ 242 SDLTest_FuzzerInit(execKey); 243 244 /* Reset assert tracker */ 245 SDLTest_ResetAssertSummary(); 246 247 /* Set timeout timer */ 248 timer = SDLTest_SetTestTimeout(SDLTest_TestCaseTimeout, SDLTest_BailOut); 249 250 /* Maybe run suite initializer function */ 251 if (testSuite->testSetUp) { 252 testSuite->testSetUp(&data); 253 if (SDLTest_AssertSummaryToTestResult() == TEST_RESULT_FAILED) { 254 SDLTest_LogFinalResult(false, "Suite Setup", testSuite->name, COLOR_RED, "Failed"); 255 return TEST_RESULT_SETUP_FAILURE; 256 } 257 } 258 259 /* Run test case function */ 260 testCaseResult = testCase->testCase(data); 261 262 /* Convert test execution result into harness result */ 263 if (testCaseResult == TEST_SKIPPED) { 264 /* Test was programmatically skipped */ 265 testResult = TEST_RESULT_SKIPPED; 266 } else if (testCaseResult == TEST_STARTED) { 267 /* Test did not return a TEST_COMPLETED value; assume it failed */ 268 testResult = TEST_RESULT_FAILED; 269 } else if (testCaseResult == TEST_ABORTED) { 270 /* Test was aborted early; assume it failed */ 271 testResult = TEST_RESULT_FAILED; 272 } else { 273 /* Perform failure analysis based on asserts */ 274 testResult = SDLTest_AssertSummaryToTestResult(); 275 } 276 277 /* Maybe run suite cleanup function (ignore failed asserts) */ 278 if (testSuite->testTearDown) { 279 testSuite->testTearDown(data); 280 } 281 282 /* Cancel timeout timer */ 283 if (timer) { 284 SDL_RemoveTimer(timer); 285 } 286 287 /* Report on asserts and fuzzer usage */ 288 fuzzerCount = SDLTest_GetFuzzerInvocationCount(); 289 if (fuzzerCount > 0) { 290 SDLTest_Log("Fuzzer invocations: %d", fuzzerCount); 291 } 292 293 /* Final log based on test execution result */ 294 if (testCaseResult == TEST_SKIPPED) { 295 /* Test was programmatically skipped */ 296 SDLTest_LogFinalResult(true, "Test", testCase->name, COLOR_BLUE, "Skipped (Programmatically)"); 297 } else if (testCaseResult == TEST_STARTED) { 298 /* Test did not return a TEST_COMPLETED value; assume it failed */ 299 SDLTest_LogFinalResult(false, "Test", testCase->name, COLOR_RED, "Skipped (test started, but did not return TEST_COMPLETED)"); 300 } else if (testCaseResult == TEST_ABORTED) { 301 /* Test was aborted early; assume it failed */ 302 SDLTest_LogFinalResult(false, "Test", testCase->name, COLOR_RED, "Failed (Aborted)"); 303 } else { 304 SDLTest_LogAssertSummary(); 305 } 306 307 return testResult; 308} 309 310/* Prints summary of all suites/tests contained in the given reference */ 311#if 0 312static void SDLTest_LogTestSuiteSummary(SDLTest_TestSuiteReference *testSuites) 313{ 314 int suiteCounter; 315 int testCounter; 316 SDLTest_TestSuiteReference *testSuite; 317 SDLTest_TestCaseReference *testCase; 318 319 /* Loop over all suites */ 320 suiteCounter = 0; 321 while (&testSuites[suiteCounter]) { 322 testSuite=&testSuites[suiteCounter]; 323 suiteCounter++; 324 SDLTest_Log("Test Suite %i - %s\n", suiteCounter, 325 (testSuite->name) ? testSuite->name : SDLTEST_INVALID_NAME_FORMAT); 326 327 /* Loop over all test cases */ 328 testCounter = 0; 329 while (testSuite->testCases[testCounter]) { 330 testCase=(SDLTest_TestCaseReference *)testSuite->testCases[testCounter]; 331 testCounter++; 332 SDLTest_Log(" Test Case %i - %s: %s", testCounter, 333 (testCase->name) ? testCase->name : SDLTEST_INVALID_NAME_FORMAT, 334 (testCase->description) ? testCase->description : SDLTEST_INVALID_NAME_FORMAT); 335 } 336 } 337} 338#endif 339 340/* Gets a timer value in seconds */ 341static float GetClock(void) 342{ 343 float currentClock = SDL_GetPerformanceCounter() / (float)SDL_GetPerformanceFrequency(); 344 return currentClock; 345} 346 347/** 348 * Execute a test suite using the given run seed and execution key. 349 * 350 * The filter string is matched to the suite name (full comparison) to select a single suite, 351 * or if no suite matches, it is matched to the test names (full comparison) to select a single test. 352 * 353 * \param runner The runner to execute. 354 * 355 * \returns Test run result; 0 when all tests passed, 1 if any tests failed. 356 */ 357int SDLTest_ExecuteTestSuiteRunner(SDLTest_TestSuiteRunner *runner) 358{ 359 int totalNumberOfTests = 0; 360 int failedNumberOfTests = 0; 361 int suiteCounter; 362 int testCounter; 363 int iterationCounter; 364 SDLTest_TestSuiteReference *testSuite; 365 const SDLTest_TestCaseReference *testCase; 366 const char *runSeed = NULL; 367 const char *currentSuiteName; 368 const char *currentTestName; 369 Uint64 execKey; 370 float runStartSeconds; 371 float suiteStartSeconds; 372 float testStartSeconds; 373 float runEndSeconds; 374 float suiteEndSeconds; 375 float testEndSeconds; 376 float runtime; 377 int suiteFilter = 0; 378 const char *suiteFilterName = NULL; 379 int testFilter = 0; 380 const char *testFilterName = NULL; 381 bool forceTestRun = false; 382 int testResult = 0; 383 int runResult = 0; 384 int totalTestFailedCount = 0; 385 int totalTestPassedCount = 0; 386 int totalTestSkippedCount = 0; 387 int testFailedCount = 0; 388 int testPassedCount = 0; 389 int testSkippedCount = 0; 390 int countSum = 0; 391 const SDLTest_TestCaseReference **failedTests; 392 char generatedSeed[16 + 1]; 393 int nbSuites = 0; 394 int i = 0; 395 int *arraySuites = NULL; 396 397 /* Sanitize test iterations */ 398 if (runner->user.testIterations < 1) { 399 runner->user.testIterations = 1; 400 } 401 402 /* Generate run see if we don't have one already */ 403 if (!runner->user.runSeed || runner->user.runSeed[0] == '\0') { 404 runSeed = SDLTest_GenerateRunSeed(generatedSeed, 16); 405 if (!runSeed) { 406 SDLTest_LogError("Generating a random seed failed"); 407 return 2; 408 } 409 } else { 410 runSeed = runner->user.runSeed; 411 } 412 413 /* Reset per-run counters */ 414 totalTestFailedCount = 0; 415 totalTestPassedCount = 0; 416 totalTestSkippedCount = 0; 417 418 /* Take time - run start */ 419 runStartSeconds = GetClock(); 420 421 /* Log run with fuzzer parameters */ 422 SDLTest_Log("::::: Test Run /w seed '%s' started\n", runSeed); 423 424 /* Count the total number of tests */ 425 suiteCounter = 0; 426 while (runner->user.testSuites[suiteCounter]) { 427 testSuite = runner->user.testSuites[suiteCounter]; 428 suiteCounter++; 429 testCounter = 0; 430 while (testSuite->testCases[testCounter]) { 431 testCounter++; 432 totalNumberOfTests++; 433 } 434 } 435 436 if (totalNumberOfTests == 0) { 437 SDLTest_LogError("No tests to run?"); 438 return -1; 439 } 440 441 /* Pre-allocate an array for tracking failed tests (potentially all test cases) */ 442 failedTests = (const SDLTest_TestCaseReference **)SDL_malloc(totalNumberOfTests * sizeof(SDLTest_TestCaseReference *)); 443 if (!failedTests) { 444 SDLTest_LogError("Unable to allocate cache for failed tests"); 445 return -1; 446 } 447 448 /* Initialize filtering */ 449 if (runner->user.filter && runner->user.filter[0] != '\0') { 450 /* Loop over all suites to check if we have a filter match */ 451 suiteCounter = 0; 452 while (runner->user.testSuites[suiteCounter] && suiteFilter == 0) { 453 testSuite = runner->user.testSuites[suiteCounter]; 454 suiteCounter++; 455 if (testSuite->name && SDL_strcasecmp(runner->user.filter, testSuite->name) == 0) { 456 /* Matched a suite name */ 457 suiteFilter = 1; 458 suiteFilterName = testSuite->name; 459 SDLTest_Log("Filtering: running only suite '%s'", suiteFilterName); 460 break; 461 } 462 463 /* Within each suite, loop over all test cases to check if we have a filter match */ 464 testCounter = 0; 465 while (testSuite->testCases[testCounter] && testFilter == 0) { 466 testCase = testSuite->testCases[testCounter]; 467 testCounter++; 468 if (testCase->name && SDL_strcasecmp(runner->user.filter, testCase->name) == 0) { 469 /* Matched a test name */ 470 suiteFilter = 1; 471 suiteFilterName = testSuite->name; 472 testFilter = 1; 473 testFilterName = testCase->name; 474 SDLTest_Log("Filtering: running only test '%s' in suite '%s'", testFilterName, suiteFilterName); 475 break; 476 } 477 } 478 } 479 480 if (suiteFilter == 0 && testFilter == 0) { 481 SDLTest_LogError("Filter '%s' did not match any test suite/case.", runner->user.filter); 482 for (suiteCounter = 0; runner->user.testSuites[suiteCounter]; ++suiteCounter) { 483 testSuite = runner->user.testSuites[suiteCounter]; 484 if (testSuite->name) { 485 SDLTest_Log("Test suite: %s", testSuite->name); 486 } 487 488 /* Within each suite, loop over all test cases to check if we have a filter match */ 489 for (testCounter = 0; testSuite->testCases[testCounter]; ++testCounter) { 490 testCase = testSuite->testCases[testCounter]; 491 SDLTest_Log(" test: %s%s", testCase->name, testCase->enabled ? "" : " (disabled)"); 492 } 493 } 494 SDLTest_Log("Exit code: 2"); 495 SDL_free((void *)failedTests); 496 return 2; 497 } 498 499 runner->user.randomOrder = false; 500 } 501 502 /* Number of test suites */ 503 while (runner->user.testSuites[nbSuites]) { 504 nbSuites++; 505 } 506 507 arraySuites = SDL_malloc(nbSuites * sizeof(int)); 508 if (!arraySuites) { 509 SDL_free((void *)failedTests); 510 return SDL_OutOfMemory(); 511 } 512 for (i = 0; i < nbSuites; i++) { 513 arraySuites[i] = i; 514 } 515 516 /* Mix the list of suites to run them in random order */ 517 { 518 /* Exclude last test "subsystemsTestSuite" which is said to interfere with other tests */ 519 nbSuites--; 520 521 if (runner->user.execKey != 0) { 522 execKey = runner->user.execKey; 523 } else { 524 /* dummy values to have random numbers working */ 525 execKey = SDLTest_GenerateExecKey(runSeed, "random testSuites", "initialisation", 1); 526 } 527 528 /* Initialize fuzzer */ 529 SDLTest_FuzzerInit(execKey); 530 531 i = 100; 532 while (i--) { 533 int a, b; 534 int tmp; 535 a = SDLTest_RandomIntegerInRange(0, nbSuites - 1); 536 b = SDLTest_RandomIntegerInRange(0, nbSuites - 1); 537 /* 538 * NB: prevent swapping here to make sure the tests start with the same 539 * random seed (whether they are run in order or not). 540 * So we consume same number of SDLTest_RandomIntegerInRange() in all cases. 541 * 542 * If some random value were used at initialization before the tests start, the --seed wouldn't do the same with or without randomOrder. 543 */ 544 /* Swap */ 545 if (runner->user.randomOrder) { 546 tmp = arraySuites[b]; 547 arraySuites[b] = arraySuites[a]; 548 arraySuites[a] = tmp; 549 } 550 } 551 552 /* re-add last lest */ 553 nbSuites++; 554 } 555 556 /* Loop over all suites */ 557 for (i = 0; i < nbSuites; i++) { 558 suiteCounter = arraySuites[i]; 559 testSuite = runner->user.testSuites[suiteCounter]; 560 currentSuiteName = (testSuite->name ? testSuite->name : SDLTEST_INVALID_NAME_FORMAT); 561 suiteCounter++; 562 563 /* Filter suite if flag set and we have a name */ 564 if (suiteFilter == 1 && suiteFilterName && testSuite->name && 565 SDL_strcasecmp(suiteFilterName, testSuite->name) != 0) { 566 /* Skip suite */ 567 SDLTest_Log("===== Test Suite %i: '%s' " "%s" "skipped" "%s" "\n", 568 suiteCounter, 569 currentSuiteName, 570 COLOR_BLUE, 571 COLOR_END); 572 } else { 573 574 int nbTestCases = 0; 575 int *arrayTestCases; 576 int j; 577 while (testSuite->testCases[nbTestCases]) { 578 nbTestCases++; 579 } 580 581 arrayTestCases = SDL_malloc(nbTestCases * sizeof(int)); 582 if (!arrayTestCases) { 583 SDL_free(arraySuites); 584 SDL_free((void *)failedTests); 585 return SDL_OutOfMemory(); 586 } 587 for (j = 0; j < nbTestCases; j++) { 588 arrayTestCases[j] = j; 589 } 590 591 /* Mix the list of testCases to run them in random order */ 592 j = 100; 593 while (j--) { 594 int a, b; 595 int tmp; 596 a = SDLTest_RandomIntegerInRange(0, nbTestCases - 1); 597 b = SDLTest_RandomIntegerInRange(0, nbTestCases - 1); 598 /* Swap */ 599 /* See previous note */ 600 if (runner->user.randomOrder) { 601 tmp = arrayTestCases[b]; 602 arrayTestCases[b] = arrayTestCases[a]; 603 arrayTestCases[a] = tmp; 604 } 605 } 606 607 /* Reset per-suite counters */ 608 testFailedCount = 0; 609 testPassedCount = 0; 610 testSkippedCount = 0; 611 612 /* Take time - suite start */ 613 suiteStartSeconds = GetClock(); 614 615 /* Log suite started */ 616 SDLTest_Log("===== Test Suite %i: '%s' started\n", 617 suiteCounter, 618 currentSuiteName); 619 620 /* Loop over all test cases */ 621 for (j = 0; j < nbTestCases; j++) { 622 testCounter = arrayTestCases[j]; 623 testCase = testSuite->testCases[testCounter]; 624 currentTestName = (testCase->name ? testCase->name : SDLTEST_INVALID_NAME_FORMAT); 625 testCounter++; 626 627 /* Filter tests if flag set and we have a name */ 628 if (testFilter == 1 && testFilterName && testCase->name && 629 SDL_strcasecmp(testFilterName, testCase->name) != 0) { 630 /* Skip test */ 631 SDLTest_Log("===== Test Case %i.%i: '%s' " "%s" "skipped" "%s" "\n", 632 suiteCounter, 633 testCounter, 634 currentTestName, 635 COLOR_BLUE, 636 COLOR_END); 637 } else { 638 /* Override 'disabled' flag if we specified a test filter (i.e. force run for debugging) */ 639 if (testFilter == 1 && !testCase->enabled) { 640 SDLTest_Log("Force run of disabled test since test filter was set"); 641 forceTestRun = true; 642 } 643 644 /* Take time - test start */ 645 testStartSeconds = GetClock(); 646 647 /* Log test started */ 648 SDLTest_Log("%s" "----- Test Case %i.%i: '%s' started" "%s", 649 COLOR_YELLOW, 650 suiteCounter, 651 testCounter, 652 currentTestName, 653 COLOR_END); 654 if (testCase->description && testCase->description[0] != '\0') { 655 SDLTest_Log("Test Description: '%s'", 656 (testCase->description) ? testCase->description : SDLTEST_INVALID_NAME_FORMAT); 657 } 658 659 /* Loop over all iterations */ 660 iterationCounter = 0; 661 while (iterationCounter < runner->user.testIterations) { 662 iterationCounter++; 663 664 if (runner->user.execKey != 0) { 665 execKey = runner->user.execKey; 666 } else { 667 execKey = SDLTest_GenerateExecKey(runSeed, testSuite->name, testCase->name, iterationCounter); 668 } 669 670 SDLTest_Log("Test Iteration %i: execKey %" SDL_PRIu64, iterationCounter, execKey); 671 testResult = SDLTest_RunTest(testSuite, testCase, execKey, forceTestRun); 672 673 if (testResult == TEST_RESULT_PASSED) { 674 testPassedCount++; 675 totalTestPassedCount++; 676 } else if (testResult == TEST_RESULT_SKIPPED) { 677 testSkippedCount++; 678 totalTestSkippedCount++; 679 } else { 680 testFailedCount++; 681 totalTestFailedCount++; 682 } 683 } 684 685 /* Take time - test end */ 686 testEndSeconds = GetClock(); 687 runtime = testEndSeconds - testStartSeconds; 688 if (runtime < 0.0f) { 689 runtime = 0.0f; 690 } 691 692 if (runner->user.testIterations > 1) { 693 /* Log test runtime */ 694 SDLTest_Log("Runtime of %i iterations: %.1f sec", runner->user.testIterations, runtime); 695 SDLTest_Log("Average Test runtime: %.5f sec", runtime / (float)runner->user.testIterations); 696 } else { 697 /* Log test runtime */ 698 SDLTest_Log("Total Test runtime: %.1f sec", runtime); 699 } 700 701 /* Log final test result */ 702 switch (testResult) { 703 case TEST_RESULT_PASSED: 704 SDLTest_LogFinalResult(true, "Test", currentTestName, COLOR_GREEN, "Passed"); 705 break; 706 case TEST_RESULT_FAILED: 707 SDLTest_LogFinalResult(false, "Test", currentTestName, COLOR_RED, "Failed"); 708 break; 709 case TEST_RESULT_NO_ASSERT: 710 SDLTest_LogFinalResult(false, "Test", currentTestName, COLOR_BLUE, "No Asserts"); 711 break; 712 } 713 714 /* Collect failed test case references for repro-step display */ 715 if (testResult == TEST_RESULT_FAILED) { 716 failedTests[failedNumberOfTests] = testCase; 717 failedNumberOfTests++; 718 } 719 } 720 } 721 722 /* Take time - suite end */ 723 suiteEndSeconds = GetClock(); 724 runtime = suiteEndSeconds - suiteStartSeconds; 725 if (runtime < 0.0f) { 726 runtime = 0.0f; 727 } 728 729 /* Log suite runtime */ 730 SDLTest_Log("Total Suite runtime: %.1f sec", runtime); 731 732 /* Log summary and final Suite result */ 733 countSum = testPassedCount + testFailedCount + testSkippedCount; 734 if (testFailedCount == 0) { 735 SDLTest_LogSummary(true, "Suite", countSum, testPassedCount, testFailedCount, testSkippedCount); 736 SDLTest_LogFinalResult(true, "Suite", currentSuiteName, COLOR_GREEN, "Passed"); 737 } else { 738 SDLTest_LogSummary(false, "Suite", countSum, testPassedCount, testFailedCount, testSkippedCount); 739 SDLTest_LogFinalResult(false, "Suite", currentSuiteName, COLOR_RED, "Failed"); 740 } 741 742 SDL_free(arrayTestCases); 743 } 744 } 745 746 SDL_free(arraySuites); 747 748 /* Take time - run end */ 749 runEndSeconds = GetClock(); 750 runtime = runEndSeconds - runStartSeconds; 751 if (runtime < 0.0f) { 752 runtime = 0.0f; 753 } 754 755 /* Log total runtime */ 756 SDLTest_Log("Total Run runtime: %.1f sec", runtime); 757 758 /* Log summary and final run result */ 759 countSum = totalTestPassedCount + totalTestFailedCount + totalTestSkippedCount; 760 if (totalTestFailedCount == 0) { 761 runResult = 0; 762 SDLTest_LogSummary(true, "Run", countSum, totalTestPassedCount, totalTestFailedCount, totalTestSkippedCount); 763 SDLTest_LogFinalResult(true, "Run /w seed", runSeed, COLOR_GREEN, "Passed"); 764 } else { 765 runResult = 1; 766 SDLTest_LogSummary(false, "Run", countSum, totalTestPassedCount, totalTestFailedCount, totalTestSkippedCount); 767 SDLTest_LogFinalResult(false, "Run /w seed", runSeed, COLOR_RED, "Failed"); 768 } 769 770 /* Print repro steps for failed tests */ 771 if (failedNumberOfTests > 0) { 772 SDLTest_Log("Harness input to repro failures:"); 773 for (testCounter = 0; testCounter < failedNumberOfTests; testCounter++) { 774 SDLTest_Log("%s" " --seed %s --filter %s" "%s", COLOR_RED, runSeed, failedTests[testCounter]->name, COLOR_END); 775 } 776 } 777 SDL_free((void *)failedTests); 778 779 SDLTest_Log("Exit code: %d", runResult); 780 return runResult; 781} 782 783static int SDLCALL SDLTest_TestSuiteCommonArg(void *data, char **argv, int index) 784{ 785 SDLTest_TestSuiteRunner *runner = data; 786 787 if (SDL_strcasecmp(argv[index], "--iterations") == 0) { 788 if (argv[index + 1]) { 789 runner->user.testIterations = SDL_atoi(argv[index + 1]); 790 if (runner->user.testIterations < 1) { 791 runner->user.testIterations = 1; 792 } 793 return 2; 794 } 795 } 796 else if (SDL_strcasecmp(argv[index], "--execKey") == 0) { 797 if (argv[index + 1]) { 798 (void)SDL_sscanf(argv[index + 1], "%" SDL_PRIu64, &runner->user.execKey); 799 return 2; 800 } 801 } 802 else if (SDL_strcasecmp(argv[index], "--seed") == 0) { 803 if (argv[index + 1]) { 804 runner->user.runSeed = SDL_strdup(argv[index + 1]); 805 return 2; 806 } 807 } 808 else if (SDL_strcasecmp(argv[index], "--filter") == 0) { 809 if (argv[index + 1]) { 810 runner->user.filter = SDL_strdup(argv[index + 1]); 811 return 2; 812 } 813 } 814 else if (SDL_strcasecmp(argv[index], "--random-order") == 0) { 815 runner->user.randomOrder = true; 816 return 1; 817 } 818 return 0; 819} 820 821SDLTest_TestSuiteRunner *SDLTest_CreateTestSuiteRunner(SDLTest_CommonState *state, SDLTest_TestSuiteReference *testSuites[]) 822{ 823 SDLTest_TestSuiteRunner *runner; 824 SDLTest_ArgumentParser *argparser; 825 826 if (!state) { 827 SDLTest_LogError("SDL Test Suites require a common state"); 828 return NULL; 829 } 830 831 runner = SDL_calloc(1, sizeof(SDLTest_TestSuiteRunner)); 832 if (!runner) { 833 SDLTest_LogError("Failed to allocate memory for test suite runner"); 834 return NULL; 835 } 836 runner->user.testSuites = testSuites; 837 838 runner->argparser.parse_arguments = SDLTest_TestSuiteCommonArg; 839 runner->argparser.usage = common_harness_usage; 840 runner->argparser.data = runner; 841 842 /* Find last argument description and append our description */ 843 argparser = state->argparser; 844 for (;;) { 845 if (argparser->next == NULL) { 846 argparser->next = &runner->argparser; 847 break; 848 } 849 argparser = argparser->next; 850 851 } 852 853 return runner; 854} 855 856void SDLTest_DestroyTestSuiteRunner(SDLTest_TestSuiteRunner *runner) { 857 858 SDL_free(runner->user.filter); 859 SDL_free(runner->user.runSeed); 860 SDL_free(runner); 861} 862[FILE END](C) 2025 0x4248 (C) 2025 4248 Media and 4248 Systems, All part of 0x4248 See LICENCE files for more information. Not all files are by 0x4248 always check Licencing.