925 lines
41 KiB
C
925 lines
41 KiB
C
|
|
#include "unittest.h"
|
||
|
|
#include <math.h>
|
||
|
|
#include <unistd.h>
|
||
|
|
|
||
|
|
#define ANSI_RED "\x1b[31m"
|
||
|
|
#define ANSI_GREEN "\x1b[32m"
|
||
|
|
#define ANSI_YELLOW "\x1b[33m"
|
||
|
|
#define ANSI_BLUE "\x1b[34m"
|
||
|
|
#define ANSI_RESET "\x1b[0m"
|
||
|
|
|
||
|
|
static char* _strdup_safe(const char *str) {
|
||
|
|
if (!str) return NULL;
|
||
|
|
return strdup(str);
|
||
|
|
}
|
||
|
|
|
||
|
|
static void _free_safe(void *ptr) {
|
||
|
|
if (ptr) free(ptr);
|
||
|
|
}
|
||
|
|
|
||
|
|
double unittest_get_time_ms(void) {
|
||
|
|
struct timespec ts;
|
||
|
|
clock_gettime(CLOCK_MONOTONIC, &ts);
|
||
|
|
return (ts.tv_sec * 1000.0) + (ts.tv_nsec / 1000000.0);
|
||
|
|
}
|
||
|
|
|
||
|
|
static char* _format_int(int value) {
|
||
|
|
char *buf = malloc(32);
|
||
|
|
snprintf(buf, 32, "%d", value);
|
||
|
|
return buf;
|
||
|
|
}
|
||
|
|
|
||
|
|
static char* _format_long(long value) {
|
||
|
|
char *buf = malloc(32);
|
||
|
|
snprintf(buf, 32, "%ld", value);
|
||
|
|
return buf;
|
||
|
|
}
|
||
|
|
|
||
|
|
static char* _format_double(double value) {
|
||
|
|
char *buf = malloc(64);
|
||
|
|
snprintf(buf, 64, "%.15g", value);
|
||
|
|
return buf;
|
||
|
|
}
|
||
|
|
|
||
|
|
static char* _format_ptr(void *ptr) {
|
||
|
|
char *buf = malloc(32);
|
||
|
|
if (ptr) {
|
||
|
|
snprintf(buf, 32, "%p", ptr);
|
||
|
|
} else {
|
||
|
|
snprintf(buf, 32, "NULL");
|
||
|
|
}
|
||
|
|
return buf;
|
||
|
|
}
|
||
|
|
|
||
|
|
static char* _format_bool(bool value) {
|
||
|
|
return strdup(value ? "true" : "false");
|
||
|
|
}
|
||
|
|
|
||
|
|
static char* _escape_json(const char *str) {
|
||
|
|
if (!str) return strdup("null");
|
||
|
|
size_t len = strlen(str);
|
||
|
|
char *buf = malloc(len * 6 + 3);
|
||
|
|
char *p = buf;
|
||
|
|
*p++ = '"';
|
||
|
|
for (size_t i = 0; i < len; i++) {
|
||
|
|
switch (str[i]) {
|
||
|
|
case '"': *p++ = '\\'; *p++ = '"'; break;
|
||
|
|
case '\\': *p++ = '\\'; *p++ = '\\'; break;
|
||
|
|
case '\n': *p++ = '\\'; *p++ = 'n'; break;
|
||
|
|
case '\r': *p++ = '\\'; *p++ = 'r'; break;
|
||
|
|
case '\t': *p++ = '\\'; *p++ = 't'; break;
|
||
|
|
default:
|
||
|
|
if ((unsigned char)str[i] < 32) {
|
||
|
|
p += sprintf(p, "\\u%04x", (unsigned char)str[i]);
|
||
|
|
} else {
|
||
|
|
*p++ = str[i];
|
||
|
|
}
|
||
|
|
}
|
||
|
|
}
|
||
|
|
*p++ = '"';
|
||
|
|
*p = '\0';
|
||
|
|
return buf;
|
||
|
|
}
|
||
|
|
|
||
|
|
static char* _escape_xml(const char *str) {
|
||
|
|
if (!str) return strdup("");
|
||
|
|
size_t len = strlen(str);
|
||
|
|
char *buf = malloc(len * 6 + 1);
|
||
|
|
char *p = buf;
|
||
|
|
for (size_t i = 0; i < len; i++) {
|
||
|
|
switch (str[i]) {
|
||
|
|
case '<': strcpy(p, "<"); p += 4; break;
|
||
|
|
case '>': strcpy(p, ">"); p += 4; break;
|
||
|
|
case '&': strcpy(p, "&"); p += 5; break;
|
||
|
|
case '"': strcpy(p, """); p += 6; break;
|
||
|
|
case '\'': strcpy(p, "'"); p += 6; break;
|
||
|
|
default: *p++ = str[i];
|
||
|
|
}
|
||
|
|
}
|
||
|
|
*p = '\0';
|
||
|
|
return buf;
|
||
|
|
}
|
||
|
|
|
||
|
|
static char* _escape_html(const char *str) {
|
||
|
|
return _escape_xml(str);
|
||
|
|
}
|
||
|
|
|
||
|
|
UnittestConfig_t* unittest_config_create(void) {
|
||
|
|
UnittestConfig_t *config = calloc(1, sizeof(UnittestConfig_t));
|
||
|
|
config->output_format = UNITTEST_FORMAT_TEXT;
|
||
|
|
config->verbosity = 1;
|
||
|
|
config->output_stream = stdout;
|
||
|
|
config->track_execution_time = true;
|
||
|
|
config->show_full_traceback = true;
|
||
|
|
config->max_traceback_depth = 10;
|
||
|
|
config->use_colors = isatty(STDOUT_FILENO);
|
||
|
|
return config;
|
||
|
|
}
|
||
|
|
|
||
|
|
void unittest_config_destroy(UnittestConfig_t *config) {
|
||
|
|
if (!config) return;
|
||
|
|
_free_safe(config->output_file);
|
||
|
|
if (config->test_names_to_run) {
|
||
|
|
for (size_t i = 0; i < config->test_names_count; i++) {
|
||
|
|
_free_safe(config->test_names_to_run[i]);
|
||
|
|
}
|
||
|
|
free(config->test_names_to_run);
|
||
|
|
}
|
||
|
|
_free_safe(config->test_pattern);
|
||
|
|
_free_safe(config->test_runner_name);
|
||
|
|
_free_safe(config->test_environment);
|
||
|
|
free(config);
|
||
|
|
}
|
||
|
|
|
||
|
|
UnittestTestSuite_t* unittest_test_suite_create(const char *suite_name) {
|
||
|
|
UnittestTestSuite_t *suite = calloc(1, sizeof(UnittestTestSuite_t));
|
||
|
|
suite->test_suite_name = _strdup_safe(suite_name);
|
||
|
|
suite->test_case_capacity = 16;
|
||
|
|
suite->test_cases = calloc(suite->test_case_capacity, sizeof(UnittestTestCase_t*));
|
||
|
|
suite->start_time = time(NULL);
|
||
|
|
return suite;
|
||
|
|
}
|
||
|
|
|
||
|
|
void unittest_test_suite_add_test_case(UnittestTestSuite_t *suite, UnittestTestCase_t *test_case) {
|
||
|
|
if (!suite || !test_case) return;
|
||
|
|
if (suite->test_case_count >= suite->test_case_capacity) {
|
||
|
|
suite->test_case_capacity *= 2;
|
||
|
|
suite->test_cases = realloc(suite->test_cases, suite->test_case_capacity * sizeof(UnittestTestCase_t*));
|
||
|
|
}
|
||
|
|
suite->test_cases[suite->test_case_count++] = test_case;
|
||
|
|
suite->total_passed += test_case->passed_count;
|
||
|
|
suite->total_failed += test_case->failed_count;
|
||
|
|
suite->total_errors += test_case->error_count;
|
||
|
|
suite->total_skipped += test_case->skipped_count;
|
||
|
|
suite->total_xfail += test_case->xfail_count;
|
||
|
|
suite->total_xpass += test_case->xpass_count;
|
||
|
|
suite->total_suite_time_ms += test_case->total_time_ms;
|
||
|
|
}
|
||
|
|
|
||
|
|
void unittest_test_suite_destroy(UnittestTestSuite_t *suite) {
|
||
|
|
if (!suite) return;
|
||
|
|
for (size_t i = 0; i < suite->test_case_count; i++) {
|
||
|
|
unittest_test_case_destroy(suite->test_cases[i]);
|
||
|
|
}
|
||
|
|
free(suite->test_cases);
|
||
|
|
_free_safe(suite->test_suite_name);
|
||
|
|
free(suite);
|
||
|
|
}
|
||
|
|
|
||
|
|
UnittestTestCase_t* unittest_test_case_create(const char *class_name) {
|
||
|
|
UnittestTestCase_t *tc = calloc(1, sizeof(UnittestTestCase_t));
|
||
|
|
tc->class_name = _strdup_safe(class_name);
|
||
|
|
tc->result_capacity = 16;
|
||
|
|
tc->results = calloc(tc->result_capacity, sizeof(UnittestTestResult_t*));
|
||
|
|
return tc;
|
||
|
|
}
|
||
|
|
|
||
|
|
void unittest_test_case_add_result(UnittestTestCase_t *test_case, UnittestTestResult_t *result) {
|
||
|
|
if (!test_case || !result) return;
|
||
|
|
if (test_case->result_count >= test_case->result_capacity) {
|
||
|
|
test_case->result_capacity *= 2;
|
||
|
|
test_case->results = realloc(test_case->results, test_case->result_capacity * sizeof(UnittestTestResult_t*));
|
||
|
|
}
|
||
|
|
test_case->results[test_case->result_count++] = result;
|
||
|
|
test_case->total_time_ms += result->execution_time_ms;
|
||
|
|
switch (result->result_type) {
|
||
|
|
case UNITTEST_PASS: test_case->passed_count++; break;
|
||
|
|
case UNITTEST_FAIL: test_case->failed_count++; break;
|
||
|
|
case UNITTEST_ERROR: test_case->error_count++; break;
|
||
|
|
case UNITTEST_SKIP: test_case->skipped_count++; break;
|
||
|
|
case UNITTEST_XFAIL: test_case->xfail_count++; break;
|
||
|
|
case UNITTEST_XPASS: test_case->xpass_count++; break;
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
void unittest_test_case_destroy(UnittestTestCase_t *test_case) {
|
||
|
|
if (!test_case) return;
|
||
|
|
for (size_t i = 0; i < test_case->result_count; i++) {
|
||
|
|
unittest_test_result_destroy(test_case->results[i]);
|
||
|
|
}
|
||
|
|
free(test_case->results);
|
||
|
|
_free_safe(test_case->class_name);
|
||
|
|
free(test_case);
|
||
|
|
}
|
||
|
|
|
||
|
|
UnittestTestResult_t* unittest_test_result_create(const char *test_class, const char *test_method, int line_number, const char *file_name) {
|
||
|
|
UnittestTestResult_t *result = calloc(1, sizeof(UnittestTestResult_t));
|
||
|
|
result->test_class = _strdup_safe(test_class);
|
||
|
|
result->test_method = _strdup_safe(test_method);
|
||
|
|
result->line_number = line_number;
|
||
|
|
result->file_name = _strdup_safe(file_name);
|
||
|
|
result->result_type = UNITTEST_PASS;
|
||
|
|
result->assertion_capacity = 16;
|
||
|
|
result->assertions = calloc(result->assertion_capacity, sizeof(UnittestAssertionInfo_t*));
|
||
|
|
if (test_class && test_method) {
|
||
|
|
size_t len = strlen(test_class) + strlen(test_method) + 2;
|
||
|
|
result->test_name = malloc(len);
|
||
|
|
snprintf(result->test_name, len, "%s.%s", test_class, test_method);
|
||
|
|
}
|
||
|
|
return result;
|
||
|
|
}
|
||
|
|
|
||
|
|
void unittest_test_result_set_skip(UnittestTestResult_t *result, const char *reason) {
|
||
|
|
if (!result) return;
|
||
|
|
result->result_type = UNITTEST_SKIP;
|
||
|
|
result->skip_reason = _strdup_safe(reason);
|
||
|
|
}
|
||
|
|
|
||
|
|
void unittest_test_result_set_xfail(UnittestTestResult_t *result, const char *reason) {
|
||
|
|
if (!result) return;
|
||
|
|
result->result_type = UNITTEST_XFAIL;
|
||
|
|
result->error_message = _strdup_safe(reason);
|
||
|
|
}
|
||
|
|
|
||
|
|
void unittest_test_result_set_error(UnittestTestResult_t *result, const char *error_message, const char *traceback) {
|
||
|
|
if (!result) return;
|
||
|
|
result->result_type = UNITTEST_ERROR;
|
||
|
|
_free_safe(result->error_message);
|
||
|
|
_free_safe(result->traceback);
|
||
|
|
result->error_message = _strdup_safe(error_message);
|
||
|
|
result->traceback = _strdup_safe(traceback);
|
||
|
|
}
|
||
|
|
|
||
|
|
void unittest_test_result_destroy(UnittestTestResult_t *result) {
|
||
|
|
if (!result) return;
|
||
|
|
_free_safe(result->test_name);
|
||
|
|
_free_safe(result->test_method);
|
||
|
|
_free_safe(result->test_class);
|
||
|
|
_free_safe(result->error_message);
|
||
|
|
_free_safe(result->file_name);
|
||
|
|
_free_safe(result->traceback);
|
||
|
|
_free_safe(result->skip_reason);
|
||
|
|
for (size_t i = 0; i < result->assertion_count; i++) {
|
||
|
|
UnittestAssertionInfo_t *a = result->assertions[i];
|
||
|
|
_free_safe(a->assertion_type);
|
||
|
|
_free_safe(a->message);
|
||
|
|
_free_safe(a->file_name);
|
||
|
|
_free_safe(a->expected_str);
|
||
|
|
_free_safe(a->actual_str);
|
||
|
|
free(a);
|
||
|
|
}
|
||
|
|
free(result->assertions);
|
||
|
|
free(result);
|
||
|
|
}
|
||
|
|
|
||
|
|
void unittest_record_assertion(UnittestTestResult_t *result, const char *assertion_type, const char *expected_str, const char *actual_str, const char *message, int line_number, const char *file_name, bool passed) {
|
||
|
|
if (!result) return;
|
||
|
|
if (result->assertion_count >= result->assertion_capacity) {
|
||
|
|
result->assertion_capacity *= 2;
|
||
|
|
result->assertions = realloc(result->assertions, result->assertion_capacity * sizeof(UnittestAssertionInfo_t*));
|
||
|
|
}
|
||
|
|
UnittestAssertionInfo_t *info = calloc(1, sizeof(UnittestAssertionInfo_t));
|
||
|
|
info->assertion_type = _strdup_safe(assertion_type);
|
||
|
|
info->expected_str = _strdup_safe(expected_str);
|
||
|
|
info->actual_str = _strdup_safe(actual_str);
|
||
|
|
info->message = _strdup_safe(message);
|
||
|
|
info->line_number = line_number;
|
||
|
|
info->file_name = _strdup_safe(file_name);
|
||
|
|
info->passed = passed;
|
||
|
|
result->assertions[result->assertion_count++] = info;
|
||
|
|
if (!passed && result->result_type == UNITTEST_PASS) {
|
||
|
|
result->result_type = UNITTEST_FAIL;
|
||
|
|
char buf[512];
|
||
|
|
snprintf(buf, sizeof(buf), "AssertionError: %s - Expected %s, got %s", message ? message : assertion_type, expected_str ? expected_str : "?", actual_str ? actual_str : "?");
|
||
|
|
_free_safe(result->error_message);
|
||
|
|
result->error_message = strdup(buf);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
bool unittest_assert_int_equal(UnittestTestResult_t *result, int expected, int actual, const char *message, int line, const char *file) {
|
||
|
|
bool passed = (expected == actual);
|
||
|
|
char *exp_str = _format_int(expected);
|
||
|
|
char *act_str = _format_int(actual);
|
||
|
|
unittest_record_assertion(result, "assertEqual", exp_str, act_str, message, line, file, passed);
|
||
|
|
free(exp_str);
|
||
|
|
free(act_str);
|
||
|
|
return passed;
|
||
|
|
}
|
||
|
|
|
||
|
|
bool unittest_assert_int_not_equal(UnittestTestResult_t *result, int expected, int actual, const char *message, int line, const char *file) {
|
||
|
|
bool passed = (expected != actual);
|
||
|
|
char *exp_str = _format_int(expected);
|
||
|
|
char *act_str = _format_int(actual);
|
||
|
|
unittest_record_assertion(result, "assertNotEqual", exp_str, act_str, message, line, file, passed);
|
||
|
|
free(exp_str);
|
||
|
|
free(act_str);
|
||
|
|
return passed;
|
||
|
|
}
|
||
|
|
|
||
|
|
bool unittest_assert_int_greater(UnittestTestResult_t *result, int actual, int threshold, const char *message, int line, const char *file) {
|
||
|
|
bool passed = (actual > threshold);
|
||
|
|
char *exp_str = _format_int(threshold);
|
||
|
|
char *act_str = _format_int(actual);
|
||
|
|
unittest_record_assertion(result, "assertGreater", exp_str, act_str, message, line, file, passed);
|
||
|
|
free(exp_str);
|
||
|
|
free(act_str);
|
||
|
|
return passed;
|
||
|
|
}
|
||
|
|
|
||
|
|
bool unittest_assert_int_less(UnittestTestResult_t *result, int actual, int threshold, const char *message, int line, const char *file) {
|
||
|
|
bool passed = (actual < threshold);
|
||
|
|
char *exp_str = _format_int(threshold);
|
||
|
|
char *act_str = _format_int(actual);
|
||
|
|
unittest_record_assertion(result, "assertLess", exp_str, act_str, message, line, file, passed);
|
||
|
|
free(exp_str);
|
||
|
|
free(act_str);
|
||
|
|
return passed;
|
||
|
|
}
|
||
|
|
|
||
|
|
bool unittest_assert_int_greater_equal(UnittestTestResult_t *result, int actual, int threshold, const char *message, int line, const char *file) {
|
||
|
|
bool passed = (actual >= threshold);
|
||
|
|
char *exp_str = _format_int(threshold);
|
||
|
|
char *act_str = _format_int(actual);
|
||
|
|
unittest_record_assertion(result, "assertGreaterEqual", exp_str, act_str, message, line, file, passed);
|
||
|
|
free(exp_str);
|
||
|
|
free(act_str);
|
||
|
|
return passed;
|
||
|
|
}
|
||
|
|
|
||
|
|
bool unittest_assert_int_less_equal(UnittestTestResult_t *result, int actual, int threshold, const char *message, int line, const char *file) {
|
||
|
|
bool passed = (actual <= threshold);
|
||
|
|
char *exp_str = _format_int(threshold);
|
||
|
|
char *act_str = _format_int(actual);
|
||
|
|
unittest_record_assertion(result, "assertLessEqual", exp_str, act_str, message, line, file, passed);
|
||
|
|
free(exp_str);
|
||
|
|
free(act_str);
|
||
|
|
return passed;
|
||
|
|
}
|
||
|
|
|
||
|
|
bool unittest_assert_long_equal(UnittestTestResult_t *result, long expected, long actual, const char *message, int line, const char *file) {
|
||
|
|
bool passed = (expected == actual);
|
||
|
|
char *exp_str = _format_long(expected);
|
||
|
|
char *act_str = _format_long(actual);
|
||
|
|
unittest_record_assertion(result, "assertEqual", exp_str, act_str, message, line, file, passed);
|
||
|
|
free(exp_str);
|
||
|
|
free(act_str);
|
||
|
|
return passed;
|
||
|
|
}
|
||
|
|
|
||
|
|
bool unittest_assert_long_not_equal(UnittestTestResult_t *result, long expected, long actual, const char *message, int line, const char *file) {
|
||
|
|
bool passed = (expected != actual);
|
||
|
|
char *exp_str = _format_long(expected);
|
||
|
|
char *act_str = _format_long(actual);
|
||
|
|
unittest_record_assertion(result, "assertNotEqual", exp_str, act_str, message, line, file, passed);
|
||
|
|
free(exp_str);
|
||
|
|
free(act_str);
|
||
|
|
return passed;
|
||
|
|
}
|
||
|
|
|
||
|
|
bool unittest_assert_double_equal(UnittestTestResult_t *result, double expected, double actual, double epsilon, const char *message, int line, const char *file) {
|
||
|
|
bool passed = fabs(expected - actual) <= epsilon;
|
||
|
|
char *exp_str = _format_double(expected);
|
||
|
|
char *act_str = _format_double(actual);
|
||
|
|
unittest_record_assertion(result, "assertAlmostEqual", exp_str, act_str, message, line, file, passed);
|
||
|
|
free(exp_str);
|
||
|
|
free(act_str);
|
||
|
|
return passed;
|
||
|
|
}
|
||
|
|
|
||
|
|
bool unittest_assert_double_not_equal(UnittestTestResult_t *result, double expected, double actual, double epsilon, const char *message, int line, const char *file) {
|
||
|
|
bool passed = fabs(expected - actual) > epsilon;
|
||
|
|
char *exp_str = _format_double(expected);
|
||
|
|
char *act_str = _format_double(actual);
|
||
|
|
unittest_record_assertion(result, "assertNotAlmostEqual", exp_str, act_str, message, line, file, passed);
|
||
|
|
free(exp_str);
|
||
|
|
free(act_str);
|
||
|
|
return passed;
|
||
|
|
}
|
||
|
|
|
||
|
|
bool unittest_assert_string_equal(UnittestTestResult_t *result, const char *expected, const char *actual, const char *message, int line, const char *file) {
|
||
|
|
bool passed;
|
||
|
|
if (expected == NULL && actual == NULL) {
|
||
|
|
passed = true;
|
||
|
|
} else if (expected == NULL || actual == NULL) {
|
||
|
|
passed = false;
|
||
|
|
} else {
|
||
|
|
passed = (strcmp(expected, actual) == 0);
|
||
|
|
}
|
||
|
|
unittest_record_assertion(result, "assertEqual", expected ? expected : "NULL", actual ? actual : "NULL", message, line, file, passed);
|
||
|
|
return passed;
|
||
|
|
}
|
||
|
|
|
||
|
|
bool unittest_assert_string_not_equal(UnittestTestResult_t *result, const char *expected, const char *actual, const char *message, int line, const char *file) {
|
||
|
|
bool passed;
|
||
|
|
if (expected == NULL && actual == NULL) {
|
||
|
|
passed = false;
|
||
|
|
} else if (expected == NULL || actual == NULL) {
|
||
|
|
passed = true;
|
||
|
|
} else {
|
||
|
|
passed = (strcmp(expected, actual) != 0);
|
||
|
|
}
|
||
|
|
unittest_record_assertion(result, "assertNotEqual", expected ? expected : "NULL", actual ? actual : "NULL", message, line, file, passed);
|
||
|
|
return passed;
|
||
|
|
}
|
||
|
|
|
||
|
|
bool unittest_assert_string_contains(UnittestTestResult_t *result, const char *substring, const char *string, const char *message, int line, const char *file) {
|
||
|
|
bool passed = false;
|
||
|
|
if (string && substring) {
|
||
|
|
passed = (strstr(string, substring) != NULL);
|
||
|
|
}
|
||
|
|
unittest_record_assertion(result, "assertIn", substring ? substring : "NULL", string ? string : "NULL", message, line, file, passed);
|
||
|
|
return passed;
|
||
|
|
}
|
||
|
|
|
||
|
|
bool unittest_assert_string_not_contains(UnittestTestResult_t *result, const char *substring, const char *string, const char *message, int line, const char *file) {
|
||
|
|
bool passed = true;
|
||
|
|
if (string && substring) {
|
||
|
|
passed = (strstr(string, substring) == NULL);
|
||
|
|
}
|
||
|
|
unittest_record_assertion(result, "assertNotIn", substring ? substring : "NULL", string ? string : "NULL", message, line, file, passed);
|
||
|
|
return passed;
|
||
|
|
}
|
||
|
|
|
||
|
|
bool unittest_assert_string_starts_with(UnittestTestResult_t *result, const char *prefix, const char *string, const char *message, int line, const char *file) {
|
||
|
|
bool passed = false;
|
||
|
|
if (string && prefix) {
|
||
|
|
size_t prefix_len = strlen(prefix);
|
||
|
|
passed = (strncmp(string, prefix, prefix_len) == 0);
|
||
|
|
}
|
||
|
|
unittest_record_assertion(result, "assertStartsWith", prefix ? prefix : "NULL", string ? string : "NULL", message, line, file, passed);
|
||
|
|
return passed;
|
||
|
|
}
|
||
|
|
|
||
|
|
bool unittest_assert_string_ends_with(UnittestTestResult_t *result, const char *suffix, const char *string, const char *message, int line, const char *file) {
|
||
|
|
bool passed = false;
|
||
|
|
if (string && suffix) {
|
||
|
|
size_t str_len = strlen(string);
|
||
|
|
size_t suffix_len = strlen(suffix);
|
||
|
|
if (str_len >= suffix_len) {
|
||
|
|
passed = (strcmp(string + str_len - suffix_len, suffix) == 0);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
unittest_record_assertion(result, "assertEndsWith", suffix ? suffix : "NULL", string ? string : "NULL", message, line, file, passed);
|
||
|
|
return passed;
|
||
|
|
}
|
||
|
|
|
||
|
|
bool unittest_assert_true(UnittestTestResult_t *result, bool condition, const char *message, int line, const char *file) {
|
||
|
|
char *act_str = _format_bool(condition);
|
||
|
|
unittest_record_assertion(result, "assertTrue", "true", act_str, message, line, file, condition);
|
||
|
|
free(act_str);
|
||
|
|
return condition;
|
||
|
|
}
|
||
|
|
|
||
|
|
bool unittest_assert_false(UnittestTestResult_t *result, bool condition, const char *message, int line, const char *file) {
|
||
|
|
char *act_str = _format_bool(condition);
|
||
|
|
unittest_record_assertion(result, "assertFalse", "false", act_str, message, line, file, !condition);
|
||
|
|
free(act_str);
|
||
|
|
return !condition;
|
||
|
|
}
|
||
|
|
|
||
|
|
bool unittest_assert_null(UnittestTestResult_t *result, void *ptr, const char *message, int line, const char *file) {
|
||
|
|
bool passed = (ptr == NULL);
|
||
|
|
char *act_str = _format_ptr(ptr);
|
||
|
|
unittest_record_assertion(result, "assertIsNone", "NULL", act_str, message, line, file, passed);
|
||
|
|
free(act_str);
|
||
|
|
return passed;
|
||
|
|
}
|
||
|
|
|
||
|
|
bool unittest_assert_not_null(UnittestTestResult_t *result, void *ptr, const char *message, int line, const char *file) {
|
||
|
|
bool passed = (ptr != NULL);
|
||
|
|
char *act_str = _format_ptr(ptr);
|
||
|
|
unittest_record_assertion(result, "assertIsNotNone", "not NULL", act_str, message, line, file, passed);
|
||
|
|
free(act_str);
|
||
|
|
return passed;
|
||
|
|
}
|
||
|
|
|
||
|
|
bool unittest_assert_memory_equal(UnittestTestResult_t *result, const void *expected, const void *actual, size_t length, const char *message, int line, const char *file) {
|
||
|
|
bool passed = false;
|
||
|
|
if (expected && actual) {
|
||
|
|
passed = (memcmp(expected, actual, length) == 0);
|
||
|
|
} else if (expected == NULL && actual == NULL) {
|
||
|
|
passed = true;
|
||
|
|
}
|
||
|
|
char exp_str[64], act_str[64];
|
||
|
|
snprintf(exp_str, sizeof(exp_str), "memory[%zu bytes]", length);
|
||
|
|
snprintf(act_str, sizeof(act_str), "memory[%zu bytes]", length);
|
||
|
|
unittest_record_assertion(result, "assertMemoryEqual", exp_str, act_str, message, line, file, passed);
|
||
|
|
return passed;
|
||
|
|
}
|
||
|
|
|
||
|
|
bool unittest_assert_array_int_equal(UnittestTestResult_t *result, const int *expected, const int *actual, size_t length, const char *message, int line, const char *file) {
|
||
|
|
bool passed = true;
|
||
|
|
if (expected && actual) {
|
||
|
|
for (size_t i = 0; i < length; i++) {
|
||
|
|
if (expected[i] != actual[i]) {
|
||
|
|
passed = false;
|
||
|
|
break;
|
||
|
|
}
|
||
|
|
}
|
||
|
|
} else if (expected != actual) {
|
||
|
|
passed = false;
|
||
|
|
}
|
||
|
|
char exp_str[64], act_str[64];
|
||
|
|
snprintf(exp_str, sizeof(exp_str), "int[%zu]", length);
|
||
|
|
snprintf(act_str, sizeof(act_str), "int[%zu]", length);
|
||
|
|
unittest_record_assertion(result, "assertArrayEqual", exp_str, act_str, message, line, file, passed);
|
||
|
|
return passed;
|
||
|
|
}
|
||
|
|
|
||
|
|
bool unittest_assert_fail(UnittestTestResult_t *result, const char *message, int line, const char *file) {
|
||
|
|
unittest_record_assertion(result, "fail", "pass", "fail", message, line, file, false);
|
||
|
|
return false;
|
||
|
|
}
|
||
|
|
|
||
|
|
bool unittest_assert_pass(UnittestTestResult_t *result, const char *message, int line, const char *file) {
|
||
|
|
unittest_record_assertion(result, "pass", "pass", "pass", message, line, file, true);
|
||
|
|
return true;
|
||
|
|
}
|
||
|
|
|
||
|
|
int unittest_run_suite(UnittestTestSuite_t *suite, UnittestConfig_t *config) {
|
||
|
|
(void)config;
|
||
|
|
if (!suite) return -1;
|
||
|
|
return suite->total_failed + suite->total_errors;
|
||
|
|
}
|
||
|
|
|
||
|
|
void unittest_get_summary(UnittestTestSuite_t *suite, int *total, int *passed, int *failed, int *errors, int *skipped) {
|
||
|
|
if (!suite) return;
|
||
|
|
int t = 0;
|
||
|
|
for (size_t i = 0; i < suite->test_case_count; i++) {
|
||
|
|
t += (int)suite->test_cases[i]->result_count;
|
||
|
|
}
|
||
|
|
if (total) *total = t;
|
||
|
|
if (passed) *passed = suite->total_passed;
|
||
|
|
if (failed) *failed = suite->total_failed;
|
||
|
|
if (errors) *errors = suite->total_errors;
|
||
|
|
if (skipped) *skipped = suite->total_skipped;
|
||
|
|
}
|
||
|
|
|
||
|
|
static const char* _result_type_str(UnittestResultType_e type) {
|
||
|
|
switch (type) {
|
||
|
|
case UNITTEST_PASS: return "ok";
|
||
|
|
case UNITTEST_FAIL: return "FAIL";
|
||
|
|
case UNITTEST_ERROR: return "ERROR";
|
||
|
|
case UNITTEST_SKIP: return "SKIP";
|
||
|
|
case UNITTEST_XFAIL: return "xfail";
|
||
|
|
case UNITTEST_XPASS: return "XPASS";
|
||
|
|
default: return "UNKNOWN";
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
static const char* _result_type_color(UnittestResultType_e type, bool use_colors) {
|
||
|
|
if (!use_colors) return "";
|
||
|
|
switch (type) {
|
||
|
|
case UNITTEST_PASS: return ANSI_GREEN;
|
||
|
|
case UNITTEST_FAIL: return ANSI_RED;
|
||
|
|
case UNITTEST_ERROR: return ANSI_RED;
|
||
|
|
case UNITTEST_SKIP: return ANSI_YELLOW;
|
||
|
|
case UNITTEST_XFAIL: return ANSI_YELLOW;
|
||
|
|
case UNITTEST_XPASS: return ANSI_YELLOW;
|
||
|
|
default: return "";
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
void _unittest_format_text(UnittestTestSuite_t *suite, UnittestConfig_t *config, FILE *output) {
|
||
|
|
bool colors = config->use_colors;
|
||
|
|
const char *reset = colors ? ANSI_RESET : "";
|
||
|
|
fprintf(output, "================================================================================\n");
|
||
|
|
fprintf(output, "Test Suite: %s\n", suite->test_suite_name ? suite->test_suite_name : "Unnamed");
|
||
|
|
char time_buf[64];
|
||
|
|
struct tm *tm_info = localtime(&suite->start_time);
|
||
|
|
strftime(time_buf, sizeof(time_buf), "%Y-%m-%d %H:%M:%S", tm_info);
|
||
|
|
fprintf(output, "Started: %s\n", time_buf);
|
||
|
|
fprintf(output, "================================================================================\n\n");
|
||
|
|
for (size_t i = 0; i < suite->test_case_count; i++) {
|
||
|
|
UnittestTestCase_t *tc = suite->test_cases[i];
|
||
|
|
for (size_t j = 0; j < tc->result_count; j++) {
|
||
|
|
UnittestTestResult_t *r = tc->results[j];
|
||
|
|
const char *color = _result_type_color(r->result_type, colors);
|
||
|
|
fprintf(output, "%s (%s) ... %s%s%s", r->test_method ? r->test_method : "?", tc->class_name ? tc->class_name : "?", color, _result_type_str(r->result_type), reset);
|
||
|
|
if (config->track_execution_time) {
|
||
|
|
fprintf(output, " (%.1fms)", r->execution_time_ms);
|
||
|
|
}
|
||
|
|
if (config->verbosity >= 2 && r->assertion_count > 0) {
|
||
|
|
fprintf(output, " [%zu assertions]", r->assertion_count);
|
||
|
|
}
|
||
|
|
fprintf(output, "\n");
|
||
|
|
if (r->result_type == UNITTEST_FAIL || r->result_type == UNITTEST_ERROR) {
|
||
|
|
if (r->error_message) {
|
||
|
|
fprintf(output, " %s\n", r->error_message);
|
||
|
|
}
|
||
|
|
if (r->file_name && r->line_number > 0) {
|
||
|
|
fprintf(output, " File: %s, Line: %d\n", r->file_name, r->line_number);
|
||
|
|
}
|
||
|
|
if (config->show_full_traceback && r->traceback) {
|
||
|
|
fprintf(output, " Traceback:\n%s\n", r->traceback);
|
||
|
|
}
|
||
|
|
fprintf(output, "\n");
|
||
|
|
} else if (r->result_type == UNITTEST_SKIP && r->skip_reason) {
|
||
|
|
fprintf(output, " Reason: %s\n\n", r->skip_reason);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
}
|
||
|
|
fprintf(output, "================================================================================\n");
|
||
|
|
int total = 0;
|
||
|
|
for (size_t i = 0; i < suite->test_case_count; i++) {
|
||
|
|
total += (int)suite->test_cases[i]->result_count;
|
||
|
|
}
|
||
|
|
fprintf(output, "Ran %d tests in %.1fms\n", total, suite->total_suite_time_ms);
|
||
|
|
fprintf(output, "Result: %d passed, %d failed, %d errors, %d skipped", suite->total_passed, suite->total_failed, suite->total_errors, suite->total_skipped);
|
||
|
|
if (suite->total_xfail > 0) fprintf(output, ", %d xfail", suite->total_xfail);
|
||
|
|
if (suite->total_xpass > 0) fprintf(output, ", %d xpass", suite->total_xpass);
|
||
|
|
fprintf(output, "\n");
|
||
|
|
if (suite->total_failed > 0 || suite->total_errors > 0) {
|
||
|
|
fprintf(output, "%sFAILED%s (failures=%d, errors=%d)\n", colors ? ANSI_RED : "", reset, suite->total_failed, suite->total_errors);
|
||
|
|
} else {
|
||
|
|
fprintf(output, "%sOK%s\n", colors ? ANSI_GREEN : "", reset);
|
||
|
|
}
|
||
|
|
fprintf(output, "================================================================================\n");
|
||
|
|
}
|
||
|
|
|
||
|
|
void _unittest_format_quiet(UnittestTestSuite_t *suite, UnittestConfig_t *config, FILE *output) {
|
||
|
|
(void)config;
|
||
|
|
for (size_t i = 0; i < suite->test_case_count; i++) {
|
||
|
|
UnittestTestCase_t *tc = suite->test_cases[i];
|
||
|
|
for (size_t j = 0; j < tc->result_count; j++) {
|
||
|
|
UnittestTestResult_t *r = tc->results[j];
|
||
|
|
switch (r->result_type) {
|
||
|
|
case UNITTEST_PASS: fputc('.', output); break;
|
||
|
|
case UNITTEST_FAIL: fputc('F', output); break;
|
||
|
|
case UNITTEST_ERROR: fputc('E', output); break;
|
||
|
|
case UNITTEST_SKIP: fputc('S', output); break;
|
||
|
|
case UNITTEST_XFAIL: fputc('x', output); break;
|
||
|
|
case UNITTEST_XPASS: fputc('X', output); break;
|
||
|
|
}
|
||
|
|
}
|
||
|
|
}
|
||
|
|
fputc('\n', output);
|
||
|
|
if (suite->total_failed > 0 || suite->total_errors > 0) {
|
||
|
|
fprintf(output, "FAILED (failures=%d, errors=%d)\n", suite->total_failed, suite->total_errors);
|
||
|
|
} else {
|
||
|
|
fprintf(output, "OK\n");
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
void _unittest_format_json(UnittestTestSuite_t *suite, UnittestConfig_t *config, FILE *output) {
|
||
|
|
(void)config;
|
||
|
|
char time_buf[64];
|
||
|
|
struct tm *tm_info = gmtime(&suite->start_time);
|
||
|
|
strftime(time_buf, sizeof(time_buf), "%Y-%m-%dT%H:%M:%SZ", tm_info);
|
||
|
|
char *suite_name = _escape_json(suite->test_suite_name);
|
||
|
|
fprintf(output, "{\n");
|
||
|
|
fprintf(output, " \"testSuite\": %s,\n", suite_name);
|
||
|
|
fprintf(output, " \"timestamp\": \"%s\",\n", time_buf);
|
||
|
|
fprintf(output, " \"duration\": %.1f,\n", suite->total_suite_time_ms);
|
||
|
|
fprintf(output, " \"tests\": [\n");
|
||
|
|
bool first_test = true;
|
||
|
|
for (size_t i = 0; i < suite->test_case_count; i++) {
|
||
|
|
UnittestTestCase_t *tc = suite->test_cases[i];
|
||
|
|
for (size_t j = 0; j < tc->result_count; j++) {
|
||
|
|
if (!first_test) fprintf(output, ",\n");
|
||
|
|
first_test = false;
|
||
|
|
UnittestTestResult_t *r = tc->results[j];
|
||
|
|
char *name = _escape_json(r->test_method);
|
||
|
|
char *cls = _escape_json(tc->class_name);
|
||
|
|
char *msg = _escape_json(r->error_message);
|
||
|
|
char *file = _escape_json(r->file_name);
|
||
|
|
const char *status;
|
||
|
|
switch (r->result_type) {
|
||
|
|
case UNITTEST_PASS: status = "PASS"; break;
|
||
|
|
case UNITTEST_FAIL: status = "FAIL"; break;
|
||
|
|
case UNITTEST_ERROR: status = "ERROR"; break;
|
||
|
|
case UNITTEST_SKIP: status = "SKIP"; break;
|
||
|
|
case UNITTEST_XFAIL: status = "XFAIL"; break;
|
||
|
|
case UNITTEST_XPASS: status = "XPASS"; break;
|
||
|
|
default: status = "UNKNOWN";
|
||
|
|
}
|
||
|
|
fprintf(output, " {\n");
|
||
|
|
fprintf(output, " \"name\": %s,\n", name);
|
||
|
|
fprintf(output, " \"className\": %s,\n", cls);
|
||
|
|
fprintf(output, " \"status\": \"%s\",\n", status);
|
||
|
|
fprintf(output, " \"duration\": %.1f,\n", r->execution_time_ms);
|
||
|
|
fprintf(output, " \"assertions\": %zu,\n", r->assertion_count);
|
||
|
|
fprintf(output, " \"message\": %s,\n", msg);
|
||
|
|
fprintf(output, " \"file\": %s,\n", file);
|
||
|
|
fprintf(output, " \"line\": %d\n", r->line_number);
|
||
|
|
fprintf(output, " }");
|
||
|
|
free(name);
|
||
|
|
free(cls);
|
||
|
|
free(msg);
|
||
|
|
free(file);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
fprintf(output, "\n ],\n");
|
||
|
|
int total = 0;
|
||
|
|
for (size_t i = 0; i < suite->test_case_count; i++) {
|
||
|
|
total += (int)suite->test_cases[i]->result_count;
|
||
|
|
}
|
||
|
|
fprintf(output, " \"summary\": {\n");
|
||
|
|
fprintf(output, " \"total\": %d,\n", total);
|
||
|
|
fprintf(output, " \"passed\": %d,\n", suite->total_passed);
|
||
|
|
fprintf(output, " \"failed\": %d,\n", suite->total_failed);
|
||
|
|
fprintf(output, " \"errors\": %d,\n", suite->total_errors);
|
||
|
|
fprintf(output, " \"skipped\": %d,\n", suite->total_skipped);
|
||
|
|
fprintf(output, " \"xfail\": %d,\n", suite->total_xfail);
|
||
|
|
fprintf(output, " \"xpass\": %d\n", suite->total_xpass);
|
||
|
|
fprintf(output, " }\n");
|
||
|
|
fprintf(output, "}\n");
|
||
|
|
free(suite_name);
|
||
|
|
}
|
||
|
|
|
||
|
|
void _unittest_format_xml(UnittestTestSuite_t *suite, UnittestConfig_t *config, FILE *output) {
|
||
|
|
(void)config;
|
||
|
|
int total = 0;
|
||
|
|
for (size_t i = 0; i < suite->test_case_count; i++) {
|
||
|
|
total += (int)suite->test_cases[i]->result_count;
|
||
|
|
}
|
||
|
|
char *suite_name = _escape_xml(suite->test_suite_name);
|
||
|
|
fprintf(output, "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n");
|
||
|
|
fprintf(output, "<testsuites name=\"%s\" tests=\"%d\" failures=\"%d\" errors=\"%d\" skipped=\"%d\" time=\"%.3f\">\n", suite_name, total, suite->total_failed, suite->total_errors, suite->total_skipped, suite->total_suite_time_ms / 1000.0);
|
||
|
|
for (size_t i = 0; i < suite->test_case_count; i++) {
|
||
|
|
UnittestTestCase_t *tc = suite->test_cases[i];
|
||
|
|
char *cls_name = _escape_xml(tc->class_name);
|
||
|
|
fprintf(output, " <testsuite name=\"%s\" tests=\"%zu\" failures=\"%d\" errors=\"%d\" skipped=\"%d\" time=\"%.3f\">\n", cls_name, tc->result_count, tc->failed_count, tc->error_count, tc->skipped_count, tc->total_time_ms / 1000.0);
|
||
|
|
for (size_t j = 0; j < tc->result_count; j++) {
|
||
|
|
UnittestTestResult_t *r = tc->results[j];
|
||
|
|
char *method_name = _escape_xml(r->test_method);
|
||
|
|
fprintf(output, " <testcase classname=\"%s\" name=\"%s\" time=\"%.3f\" assertions=\"%zu\"", cls_name, method_name, r->execution_time_ms / 1000.0, r->assertion_count);
|
||
|
|
if (r->result_type == UNITTEST_PASS) {
|
||
|
|
fprintf(output, "/>\n");
|
||
|
|
} else {
|
||
|
|
fprintf(output, ">\n");
|
||
|
|
if (r->result_type == UNITTEST_FAIL) {
|
||
|
|
char *msg = _escape_xml(r->error_message);
|
||
|
|
fprintf(output, " <failure message=\"%s\" type=\"AssertionError\">\n", msg);
|
||
|
|
if (r->file_name) fprintf(output, " File: %s, Line: %d\n", r->file_name, r->line_number);
|
||
|
|
fprintf(output, " </failure>\n");
|
||
|
|
free(msg);
|
||
|
|
} else if (r->result_type == UNITTEST_ERROR) {
|
||
|
|
char *msg = _escape_xml(r->error_message);
|
||
|
|
fprintf(output, " <error message=\"%s\" type=\"RuntimeError\">\n", msg);
|
||
|
|
if (r->traceback) {
|
||
|
|
char *tb = _escape_xml(r->traceback);
|
||
|
|
fprintf(output, " %s\n", tb);
|
||
|
|
free(tb);
|
||
|
|
}
|
||
|
|
fprintf(output, " </error>\n");
|
||
|
|
free(msg);
|
||
|
|
} else if (r->result_type == UNITTEST_SKIP) {
|
||
|
|
char *reason = _escape_xml(r->skip_reason);
|
||
|
|
fprintf(output, " <skipped message=\"%s\"/>\n", reason ? reason : "");
|
||
|
|
free(reason);
|
||
|
|
}
|
||
|
|
fprintf(output, " </testcase>\n");
|
||
|
|
}
|
||
|
|
free(method_name);
|
||
|
|
}
|
||
|
|
fprintf(output, " </testsuite>\n");
|
||
|
|
free(cls_name);
|
||
|
|
}
|
||
|
|
fprintf(output, "</testsuites>\n");
|
||
|
|
free(suite_name);
|
||
|
|
}
|
||
|
|
|
||
|
|
void _unittest_format_html(UnittestTestSuite_t *suite, UnittestConfig_t *config, FILE *output) {
|
||
|
|
(void)config;
|
||
|
|
int total = 0;
|
||
|
|
for (size_t i = 0; i < suite->test_case_count; i++) {
|
||
|
|
total += (int)suite->test_cases[i]->result_count;
|
||
|
|
}
|
||
|
|
char *suite_name = _escape_html(suite->test_suite_name);
|
||
|
|
char time_buf[64];
|
||
|
|
struct tm *tm_info = localtime(&suite->start_time);
|
||
|
|
strftime(time_buf, sizeof(time_buf), "%Y-%m-%d %H:%M:%S", tm_info);
|
||
|
|
fprintf(output, "<!DOCTYPE html>\n<html>\n<head>\n");
|
||
|
|
fprintf(output, "<meta charset=\"UTF-8\">\n");
|
||
|
|
fprintf(output, "<title>Test Report - %s</title>\n", suite_name);
|
||
|
|
fprintf(output, "<style>\n");
|
||
|
|
fprintf(output, "body { font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif; margin: 20px; background: #f5f5f5; }\n");
|
||
|
|
fprintf(output, ".container { max-width: 1200px; margin: 0 auto; background: white; padding: 20px; border-radius: 8px; box-shadow: 0 2px 4px rgba(0,0,0,0.1); }\n");
|
||
|
|
fprintf(output, "h1 { color: #333; border-bottom: 2px solid #333; padding-bottom: 10px; }\n");
|
||
|
|
fprintf(output, ".summary { display: flex; gap: 20px; margin: 20px 0; flex-wrap: wrap; }\n");
|
||
|
|
fprintf(output, ".stat { padding: 15px 25px; border-radius: 6px; color: white; font-weight: bold; }\n");
|
||
|
|
fprintf(output, ".stat-pass { background: #28a745; }\n");
|
||
|
|
fprintf(output, ".stat-fail { background: #dc3545; }\n");
|
||
|
|
fprintf(output, ".stat-error { background: #fd7e14; }\n");
|
||
|
|
fprintf(output, ".stat-skip { background: #6c757d; }\n");
|
||
|
|
fprintf(output, ".stat-total { background: #007bff; }\n");
|
||
|
|
fprintf(output, ".test-case { margin: 15px 0; border: 1px solid #ddd; border-radius: 6px; overflow: hidden; }\n");
|
||
|
|
fprintf(output, ".test-case-header { background: #f8f9fa; padding: 10px 15px; font-weight: bold; border-bottom: 1px solid #ddd; }\n");
|
||
|
|
fprintf(output, ".test-result { padding: 10px 15px; border-bottom: 1px solid #eee; display: flex; align-items: center; gap: 10px; }\n");
|
||
|
|
fprintf(output, ".test-result:last-child { border-bottom: none; }\n");
|
||
|
|
fprintf(output, ".badge { padding: 3px 8px; border-radius: 4px; font-size: 12px; font-weight: bold; color: white; }\n");
|
||
|
|
fprintf(output, ".badge-pass { background: #28a745; }\n");
|
||
|
|
fprintf(output, ".badge-fail { background: #dc3545; }\n");
|
||
|
|
fprintf(output, ".badge-error { background: #fd7e14; }\n");
|
||
|
|
fprintf(output, ".badge-skip { background: #6c757d; }\n");
|
||
|
|
fprintf(output, ".test-name { flex: 1; }\n");
|
||
|
|
fprintf(output, ".test-time { color: #666; font-size: 14px; }\n");
|
||
|
|
fprintf(output, ".error-details { background: #fff3cd; padding: 10px 15px; font-family: monospace; font-size: 13px; white-space: pre-wrap; }\n");
|
||
|
|
fprintf(output, ".meta { color: #666; font-size: 14px; margin-bottom: 20px; }\n");
|
||
|
|
fprintf(output, "</style>\n</head>\n<body>\n<div class=\"container\">\n");
|
||
|
|
fprintf(output, "<h1>Test Report: %s</h1>\n", suite_name);
|
||
|
|
fprintf(output, "<div class=\"meta\">Started: %s | Duration: %.1fms</div>\n", time_buf, suite->total_suite_time_ms);
|
||
|
|
fprintf(output, "<div class=\"summary\">\n");
|
||
|
|
fprintf(output, "<div class=\"stat stat-total\">Total: %d</div>\n", total);
|
||
|
|
fprintf(output, "<div class=\"stat stat-pass\">Passed: %d</div>\n", suite->total_passed);
|
||
|
|
fprintf(output, "<div class=\"stat stat-fail\">Failed: %d</div>\n", suite->total_failed);
|
||
|
|
fprintf(output, "<div class=\"stat stat-error\">Errors: %d</div>\n", suite->total_errors);
|
||
|
|
fprintf(output, "<div class=\"stat stat-skip\">Skipped: %d</div>\n", suite->total_skipped);
|
||
|
|
fprintf(output, "</div>\n");
|
||
|
|
for (size_t i = 0; i < suite->test_case_count; i++) {
|
||
|
|
UnittestTestCase_t *tc = suite->test_cases[i];
|
||
|
|
char *cls_name = _escape_html(tc->class_name);
|
||
|
|
fprintf(output, "<div class=\"test-case\">\n");
|
||
|
|
fprintf(output, "<div class=\"test-case-header\">%s (%zu tests)</div>\n", cls_name, tc->result_count);
|
||
|
|
for (size_t j = 0; j < tc->result_count; j++) {
|
||
|
|
UnittestTestResult_t *r = tc->results[j];
|
||
|
|
char *method = _escape_html(r->test_method);
|
||
|
|
const char *badge_class;
|
||
|
|
const char *badge_text;
|
||
|
|
switch (r->result_type) {
|
||
|
|
case UNITTEST_PASS: badge_class = "badge-pass"; badge_text = "PASS"; break;
|
||
|
|
case UNITTEST_FAIL: badge_class = "badge-fail"; badge_text = "FAIL"; break;
|
||
|
|
case UNITTEST_ERROR: badge_class = "badge-error"; badge_text = "ERROR"; break;
|
||
|
|
case UNITTEST_SKIP: badge_class = "badge-skip"; badge_text = "SKIP"; break;
|
||
|
|
case UNITTEST_XFAIL: badge_class = "badge-skip"; badge_text = "XFAIL"; break;
|
||
|
|
case UNITTEST_XPASS: badge_class = "badge-skip"; badge_text = "XPASS"; break;
|
||
|
|
default: badge_class = "badge-skip"; badge_text = "?"; break;
|
||
|
|
}
|
||
|
|
fprintf(output, "<div class=\"test-result\">\n");
|
||
|
|
fprintf(output, "<span class=\"badge %s\">%s</span>\n", badge_class, badge_text);
|
||
|
|
fprintf(output, "<span class=\"test-name\">%s</span>\n", method);
|
||
|
|
fprintf(output, "<span class=\"test-time\">%.1fms</span>\n", r->execution_time_ms);
|
||
|
|
fprintf(output, "</div>\n");
|
||
|
|
if (r->result_type == UNITTEST_FAIL || r->result_type == UNITTEST_ERROR) {
|
||
|
|
char *msg = _escape_html(r->error_message);
|
||
|
|
fprintf(output, "<div class=\"error-details\">%s\nFile: %s, Line: %d</div>\n", msg ? msg : "", r->file_name ? r->file_name : "?", r->line_number);
|
||
|
|
free(msg);
|
||
|
|
}
|
||
|
|
free(method);
|
||
|
|
}
|
||
|
|
fprintf(output, "</div>\n");
|
||
|
|
free(cls_name);
|
||
|
|
}
|
||
|
|
fprintf(output, "</div>\n</body>\n</html>\n");
|
||
|
|
free(suite_name);
|
||
|
|
}
|
||
|
|
|
||
|
|
void _unittest_format_tap(UnittestTestSuite_t *suite, UnittestConfig_t *config, FILE *output) {
|
||
|
|
(void)config;
|
||
|
|
int total = 0;
|
||
|
|
for (size_t i = 0; i < suite->test_case_count; i++) {
|
||
|
|
total += (int)suite->test_cases[i]->result_count;
|
||
|
|
}
|
||
|
|
fprintf(output, "TAP version 13\n");
|
||
|
|
fprintf(output, "1..%d\n", total);
|
||
|
|
int test_num = 0;
|
||
|
|
for (size_t i = 0; i < suite->test_case_count; i++) {
|
||
|
|
UnittestTestCase_t *tc = suite->test_cases[i];
|
||
|
|
for (size_t j = 0; j < tc->result_count; j++) {
|
||
|
|
test_num++;
|
||
|
|
UnittestTestResult_t *r = tc->results[j];
|
||
|
|
bool ok = (r->result_type == UNITTEST_PASS || r->result_type == UNITTEST_XFAIL);
|
||
|
|
fprintf(output, "%s %d - %s", ok ? "ok" : "not ok", test_num, r->test_name ? r->test_name : "?");
|
||
|
|
if (r->result_type == UNITTEST_SKIP) {
|
||
|
|
fprintf(output, " # SKIP %s", r->skip_reason ? r->skip_reason : "");
|
||
|
|
} else if (config->track_execution_time) {
|
||
|
|
fprintf(output, " # %.1fms", r->execution_time_ms);
|
||
|
|
}
|
||
|
|
fprintf(output, "\n");
|
||
|
|
if (r->result_type == UNITTEST_FAIL || r->result_type == UNITTEST_ERROR) {
|
||
|
|
fprintf(output, " ---\n");
|
||
|
|
fprintf(output, " message: %s\n", r->error_message ? r->error_message : "");
|
||
|
|
fprintf(output, " severity: %s\n", r->result_type == UNITTEST_FAIL ? "fail" : "error");
|
||
|
|
if (r->file_name) {
|
||
|
|
fprintf(output, " file: %s\n", r->file_name);
|
||
|
|
fprintf(output, " line: %d\n", r->line_number);
|
||
|
|
}
|
||
|
|
fprintf(output, " ...\n");
|
||
|
|
}
|
||
|
|
}
|
||
|
|
}
|
||
|
|
fprintf(output, "# Tests run: %d, Passed: %d, Failed: %d, Errors: %d, Skipped: %d\n", total, suite->total_passed, suite->total_failed, suite->total_errors, suite->total_skipped);
|
||
|
|
}
|
||
|
|
|
||
|
|
void unittest_generate_report(UnittestTestSuite_t *suite, UnittestConfig_t *config) {
|
||
|
|
if (!suite || !config) return;
|
||
|
|
FILE *output = config->output_stream ? config->output_stream : stdout;
|
||
|
|
FILE *file_output = NULL;
|
||
|
|
if (config->output_file) {
|
||
|
|
file_output = fopen(config->output_file, "w");
|
||
|
|
if (file_output) output = file_output;
|
||
|
|
}
|
||
|
|
switch (config->output_format) {
|
||
|
|
case UNITTEST_FORMAT_TEXT:
|
||
|
|
_unittest_format_text(suite, config, output);
|
||
|
|
break;
|
||
|
|
case UNITTEST_FORMAT_QUIET:
|
||
|
|
_unittest_format_quiet(suite, config, output);
|
||
|
|
break;
|
||
|
|
case UNITTEST_FORMAT_JSON:
|
||
|
|
_unittest_format_json(suite, config, output);
|
||
|
|
break;
|
||
|
|
case UNITTEST_FORMAT_XML:
|
||
|
|
_unittest_format_xml(suite, config, output);
|
||
|
|
break;
|
||
|
|
case UNITTEST_FORMAT_HTML:
|
||
|
|
_unittest_format_html(suite, config, output);
|
||
|
|
break;
|
||
|
|
case UNITTEST_FORMAT_TAP:
|
||
|
|
_unittest_format_tap(suite, config, output);
|
||
|
|
break;
|
||
|
|
}
|
||
|
|
if (file_output) fclose(file_output);
|
||
|
|
}
|