diff --git a/Makefile b/Makefile index a2d464b..0d50721 100644 --- a/Makefile +++ b/Makefile @@ -107,90 +107,99 @@ TEST_ENUMS_OBJECTS = $(TEST_ENUMS_SOURCES:.c=.o) TEST_COLLECTIONS_SOURCES = tests/test_collections.c TEST_COLLECTIONS_OBJECTS = $(TEST_COLLECTIONS_SOURCES:.c=.o) +UNITTEST_SOURCES = tests/unittest.c +UNITTEST_OBJECTS = $(UNITTEST_SOURCES:.c=.o) + +TEST_UNITTEST_DEMO_SOURCES = tests/test_unittest_demo.c +TEST_UNITTEST_DEMO_OBJECTS = $(TEST_UNITTEST_DEMO_SOURCES:.c=.o) + all: test_lexer test_parser test_semantic test_ir test_runtime test_strings test_arrays test_objects test_instance_methods test_fileio test_dowhile test_switch test_math test_string_methods test_static test_interfaces test_exceptions test_ternary test_bitwise test_enhanced_for test_array_init test_instanceof test_shortcircuit test_multidim_arrays test_static_init test_negative test_enums test_collections -test_lexer: $(LEXER_OBJECTS) $(TEST_LEXER_OBJECTS) +test_lexer: $(LEXER_OBJECTS) $(UNITTEST_OBJECTS) $(TEST_LEXER_OBJECTS) $(CC) $(CFLAGS) -o $@ $^ $(LDFLAGS) -test_parser: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TEST_PARSER_OBJECTS) +test_parser: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(UNITTEST_OBJECTS) $(TEST_PARSER_OBJECTS) $(CC) $(CFLAGS) -o $@ $^ $(LDFLAGS) -test_semantic: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(TEST_SEMANTIC_OBJECTS) +test_semantic: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(UNITTEST_OBJECTS) $(TEST_SEMANTIC_OBJECTS) $(CC) $(CFLAGS) -o $@ $^ $(LDFLAGS) -test_ir: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) runtime/labeltable.o $(TEST_IR_OBJECTS) +test_ir: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) runtime/labeltable.o $(UNITTEST_OBJECTS) $(TEST_IR_OBJECTS) $(CC) $(CFLAGS) -o $@ $^ $(LDFLAGS) -test_runtime: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(TEST_RUNTIME_OBJECTS) +test_runtime: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(UNITTEST_OBJECTS) $(TEST_RUNTIME_OBJECTS) $(CC) $(CFLAGS) -o $@ $^ $(LDFLAGS) -test_strings: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(TEST_STRINGS_OBJECTS) +test_strings: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(UNITTEST_OBJECTS) $(TEST_STRINGS_OBJECTS) $(CC) $(CFLAGS) -o $@ $^ $(LDFLAGS) -test_arrays: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(TEST_ARRAYS_OBJECTS) +test_arrays: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(UNITTEST_OBJECTS) $(TEST_ARRAYS_OBJECTS) $(CC) $(CFLAGS) -o $@ $^ $(LDFLAGS) -test_objects: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(TEST_OBJECTS_OBJECTS) +test_objects: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(UNITTEST_OBJECTS) $(TEST_OBJECTS_OBJECTS) $(CC) $(CFLAGS) -o $@ $^ $(LDFLAGS) -test_instance_methods: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(TEST_INSTANCE_OBJECTS) +test_instance_methods: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(UNITTEST_OBJECTS) $(TEST_INSTANCE_OBJECTS) $(CC) $(CFLAGS) -o $@ $^ $(LDFLAGS) -test_fileio: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(TEST_FILEIO_OBJECTS) +test_fileio: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(UNITTEST_OBJECTS) $(TEST_FILEIO_OBJECTS) $(CC) $(CFLAGS) -o $@ $^ $(LDFLAGS) -test_dowhile: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(TEST_DOWHILE_OBJECTS) +test_dowhile: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(UNITTEST_OBJECTS) $(TEST_DOWHILE_OBJECTS) $(CC) $(CFLAGS) -o $@ $^ $(LDFLAGS) -test_switch: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(TEST_SWITCH_OBJECTS) +test_switch: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(UNITTEST_OBJECTS) $(TEST_SWITCH_OBJECTS) $(CC) $(CFLAGS) -o $@ $^ $(LDFLAGS) -test_math: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(TEST_MATH_OBJECTS) +test_math: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(UNITTEST_OBJECTS) $(TEST_MATH_OBJECTS) $(CC) $(CFLAGS) -o $@ $^ $(LDFLAGS) -test_string_methods: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(TEST_STRING_METHODS_OBJECTS) +test_string_methods: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(UNITTEST_OBJECTS) $(TEST_STRING_METHODS_OBJECTS) $(CC) $(CFLAGS) -o $@ $^ $(LDFLAGS) -test_static: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(TEST_STATIC_OBJECTS) +test_static: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(UNITTEST_OBJECTS) $(TEST_STATIC_OBJECTS) $(CC) $(CFLAGS) -o $@ $^ $(LDFLAGS) -test_interfaces: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(TEST_INTERFACES_OBJECTS) +test_interfaces: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(UNITTEST_OBJECTS) $(TEST_INTERFACES_OBJECTS) $(CC) $(CFLAGS) -o $@ $^ $(LDFLAGS) -test_exceptions: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(TEST_EXCEPTIONS_OBJECTS) +test_exceptions: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(UNITTEST_OBJECTS) $(TEST_EXCEPTIONS_OBJECTS) $(CC) $(CFLAGS) -o $@ $^ $(LDFLAGS) -test_ternary: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(TEST_TERNARY_OBJECTS) +test_ternary: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(UNITTEST_OBJECTS) $(TEST_TERNARY_OBJECTS) $(CC) $(CFLAGS) -o $@ $^ $(LDFLAGS) -test_bitwise: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(TEST_BITWISE_OBJECTS) +test_bitwise: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(UNITTEST_OBJECTS) $(TEST_BITWISE_OBJECTS) $(CC) $(CFLAGS) -o $@ $^ $(LDFLAGS) -test_enhanced_for: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(TEST_ENHANCED_FOR_OBJECTS) +test_enhanced_for: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(UNITTEST_OBJECTS) $(TEST_ENHANCED_FOR_OBJECTS) $(CC) $(CFLAGS) -o $@ $^ $(LDFLAGS) -test_array_init: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(TEST_ARRAY_INIT_OBJECTS) +test_array_init: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(UNITTEST_OBJECTS) $(TEST_ARRAY_INIT_OBJECTS) $(CC) $(CFLAGS) -o $@ $^ $(LDFLAGS) -test_instanceof: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(TEST_INSTANCEOF_OBJECTS) +test_instanceof: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(UNITTEST_OBJECTS) $(TEST_INSTANCEOF_OBJECTS) $(CC) $(CFLAGS) -o $@ $^ $(LDFLAGS) -test_shortcircuit: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(TEST_SHORTCIRCUIT_OBJECTS) +test_shortcircuit: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(UNITTEST_OBJECTS) $(TEST_SHORTCIRCUIT_OBJECTS) $(CC) $(CFLAGS) -o $@ $^ $(LDFLAGS) -test_multidim_arrays: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(TEST_MULTIDIM_ARRAYS_OBJECTS) +test_multidim_arrays: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(UNITTEST_OBJECTS) $(TEST_MULTIDIM_ARRAYS_OBJECTS) $(CC) $(CFLAGS) -o $@ $^ $(LDFLAGS) -test_static_init: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(TEST_STATIC_INIT_OBJECTS) +test_static_init: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(UNITTEST_OBJECTS) $(TEST_STATIC_INIT_OBJECTS) $(CC) $(CFLAGS) -o $@ $^ $(LDFLAGS) -test_negative: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(TEST_NEGATIVE_OBJECTS) +test_negative: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(UNITTEST_OBJECTS) $(TEST_NEGATIVE_OBJECTS) $(CC) $(CFLAGS) -o $@ $^ $(LDFLAGS) -test_enums: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(TEST_ENUMS_OBJECTS) +test_enums: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(UNITTEST_OBJECTS) $(TEST_ENUMS_OBJECTS) $(CC) $(CFLAGS) -o $@ $^ $(LDFLAGS) -test_collections: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(TEST_COLLECTIONS_OBJECTS) +test_collections: $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) $(UNITTEST_OBJECTS) $(TEST_COLLECTIONS_OBJECTS) + $(CC) $(CFLAGS) -o $@ $^ $(LDFLAGS) + +test_unittest_demo: $(UNITTEST_OBJECTS) $(TEST_UNITTEST_DEMO_OBJECTS) $(CC) $(CFLAGS) -o $@ $^ $(LDFLAGS) %.o: %.c @@ -255,12 +264,12 @@ pgo: test_benchmark_pgo clean: rm -f $(LEXER_OBJECTS) $(PARSER_OBJECTS) $(TYPES_OBJECTS) $(SEMANTIC_OBJECTS) $(IR_OBJECTS) $(RUNTIME_OBJECTS) \ - $(PHASE0_OBJECTS) \ + $(PHASE0_OBJECTS) $(UNITTEST_OBJECTS) $(TEST_UNITTEST_DEMO_OBJECTS) \ $(TEST_LEXER_OBJECTS) $(TEST_PARSER_OBJECTS) $(TEST_SEMANTIC_OBJECTS) $(TEST_IR_OBJECTS) $(TEST_RUNTIME_OBJECTS) \ $(TEST_STRINGS_OBJECTS) $(TEST_ARRAYS_OBJECTS) $(TEST_OBJECTS_OBJECTS) $(TEST_INSTANCE_OBJECTS) $(TEST_FILEIO_OBJECTS) \ $(TEST_DOWHILE_OBJECTS) $(TEST_SWITCH_OBJECTS) $(TEST_MATH_OBJECTS) $(TEST_STRING_METHODS_OBJECTS) $(TEST_STATIC_OBJECTS) $(TEST_INTERFACES_OBJECTS) $(TEST_EXCEPTIONS_OBJECTS) $(TEST_TERNARY_OBJECTS) $(TEST_BITWISE_OBJECTS) $(TEST_ENHANCED_FOR_OBJECTS) $(TEST_ARRAY_INIT_OBJECTS) $(TEST_INSTANCEOF_OBJECTS) $(TEST_SHORTCIRCUIT_OBJECTS) $(TEST_MULTIDIM_ARRAYS_OBJECTS) $(TEST_STATIC_INIT_OBJECTS) $(TEST_NEGATIVE_OBJECTS) $(TEST_ENUMS_OBJECTS) $(TEST_COLLECTIONS_OBJECTS) $(TEST_BENCHMARK_OBJECTS) \ test_lexer test_parser test_semantic test_ir test_runtime test_strings test_arrays test_objects test_instance_methods test_fileio test_dowhile test_switch test_math test_string_methods test_static test_interfaces test_exceptions test_ternary test_bitwise test_enhanced_for test_array_init test_instanceof test_shortcircuit test_multidim_arrays test_static_init test_negative test_enums test_collections test_benchmark \ - test_nanbox test_fastframe test_labeltable test_methodcache test_benchmark_pgo *.gcda */*.gcda + test_nanbox test_fastframe test_labeltable test_methodcache test_benchmark_pgo test_unittest_demo *.gcda */*.gcda .PHONY: all clean benchmark test_phase0 pgo test_benchmark_pgo_gen pgo_run test_benchmark_pgo test diff --git a/semantic/semantic.c b/semantic/semantic.c index 4d00ca4..edca3d4 100644 --- a/semantic/semantic.c +++ b/semantic/semantic.c @@ -267,6 +267,9 @@ static RavaType_t* _rava_semantic_check_expression(RavaSemanticAnalyzer_t *analy if (strcmp(name, "Files") == 0) { return rava_type_from_name("Files"); } + if (strcmp(name, "Math") == 0) { + return rava_type_from_name("Math"); + } RavaSymbol_t *symbol = rava_symbol_table_resolve(analyzer->symbol_table, name); if (!symbol) { diff --git a/tests/test_array_init.c b/tests/test_array_init.c index d1c6dd2..550ea97 100644 --- a/tests/test_array_init.c +++ b/tests/test_array_init.c @@ -1,97 +1,54 @@ -#include "../lexer/lexer.h" -#include "../parser/parser.h" -#include "../semantic/semantic.h" -#include "../ir/ir.h" -#include "../ir/ir_gen.h" -#include "../runtime/runtime.h" -#include -#include -#include +#include "test_utils.h" -static int test_count = 0; -static int pass_count = 0; +UnittestTestResult_t* test_simple_array_init(void) { + UNITTEST_BEGIN_TEST("TestArrayInit", "test_simple_array_init"); -static void run_test_int(const char* name, const char* source, int expected) { - test_count++; - - RavaLexer_t* lexer = rava_lexer_create(source); - RavaParser_t* parser = rava_parser_create(lexer); - RavaASTNode_t* ast = rava_parser_parse(parser); - - if (!ast || parser->had_error) { - printf("FAIL: %s - Parse error: %s\n", name, parser->error_message ? parser->error_message : "unknown"); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return; - } - - RavaSemanticAnalyzer_t* analyzer = rava_semantic_analyzer_create(); - rava_semantic_analyze(analyzer, ast); - - RavaIRGenerator_t* ir_gen = rava_ir_generator_create(analyzer); - RavaProgram_t* program = rava_ir_generate(ir_gen, ast); - - RavaVM_t* vm = rava_vm_create(program); - - if (!rava_vm_execute(vm, "Test", "main")) { - printf("FAIL: %s - Runtime error: %s\n", name, vm->error_message ? vm->error_message : "unknown"); - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return; - } - - RavaValue_t result = rava_vm_get_result(vm); - int32_t result_int = rava_value_as_int(result); - - if (result_int == expected) { - printf("PASS: %s (result=%d)\n", name, result_int); - pass_count++; - } else { - printf("FAIL: %s (expected=%d, got=%d)\n", name, expected, result_int); - } - - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); -} - -int main(void) { - printf("=== Array Initializer Tests ===\n\n"); - - run_test_int("simple array init", + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int[] arr = {1, 2, 3};\n" " return arr[0] + arr[1] + arr[2];\n" " }\n" - "}\n", 6); + "}\n", + "Test", "main", 6, "simple array init sum should return 6"); - run_test_int("array init with expressions", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_array_init_with_expressions(void) { + UNITTEST_BEGIN_TEST("TestArrayInit", "test_array_init_with_expressions"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int[] arr = {1 + 1, 2 * 2, 3 + 3};\n" " return arr[0] + arr[1] + arr[2];\n" " }\n" - "}\n", 12); + "}\n", + "Test", "main", 12, "array init with expressions sum should return 12"); - run_test_int("single element array", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_single_element_array(void) { + UNITTEST_BEGIN_TEST("TestArrayInit", "test_single_element_array"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int[] arr = {42};\n" " return arr[0];\n" " }\n" - "}\n", 42); + "}\n", + "Test", "main", 42, "single element array should return 42"); - run_test_int("array init length", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_array_init_length(void) { + UNITTEST_BEGIN_TEST("TestArrayInit", "test_array_init_length"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int[] arr = {5, 10, 15, 20};\n" @@ -101,25 +58,67 @@ int main(void) { " }\n" " return sum;\n" " }\n" - "}\n", 50); + "}\n", + "Test", "main", 50, "array init length loop sum should return 50"); - run_test_int("array init with negative values", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_array_init_negative_values(void) { + UNITTEST_BEGIN_TEST("TestArrayInit", "test_array_init_negative_values"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int[] arr = {-1, -2, -3};\n" " return arr[0] + arr[1] + arr[2];\n" " }\n" - "}\n", -6); + "}\n", + "Test", "main", -6, "array init with negative values sum should return -6"); - run_test_int("array init with zero", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_array_init_with_zero(void) { + UNITTEST_BEGIN_TEST("TestArrayInit", "test_array_init_with_zero"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int[] arr = {0, 0, 0};\n" " return arr[0] + arr[1] + arr[2];\n" " }\n" - "}\n", 0); + "}\n", + "Test", "main", 0, "array init with zeros sum should return 0"); - printf("\n=== Results: %d/%d tests passed ===\n", pass_count, test_count); - - return (pass_count == test_count) ? 0 : 1; + UNITTEST_END_TEST(); +} + +int main(int argc, char **argv) { + UnittestConfig_t *config = unittest_config_create(); + config->verbosity = 2; + + if (argc > 1 && strcmp(argv[1], "--json") == 0) { + config->output_format = UNITTEST_FORMAT_JSON; + config->use_colors = false; + } + + UnittestTestSuite_t *suite = unittest_test_suite_create("Array Initializer Tests"); + + UnittestTestCase_t *tc = unittest_test_case_create("TestArrayInit"); + unittest_test_case_add_result(tc, test_simple_array_init()); + unittest_test_case_add_result(tc, test_array_init_with_expressions()); + unittest_test_case_add_result(tc, test_single_element_array()); + unittest_test_case_add_result(tc, test_array_init_length()); + unittest_test_case_add_result(tc, test_array_init_negative_values()); + unittest_test_case_add_result(tc, test_array_init_with_zero()); + unittest_test_suite_add_test_case(suite, tc); + + unittest_generate_report(suite, config); + + int failures = suite->total_failed + suite->total_errors; + unittest_test_suite_destroy(suite); + unittest_config_destroy(config); + + return failures > 0 ? 1 : 0; } diff --git a/tests/test_array_init.o b/tests/test_array_init.o index 045101d..37d7817 100644 Binary files a/tests/test_array_init.o and b/tests/test_array_init.o differ diff --git a/tests/test_arrays.c b/tests/test_arrays.c index 2373f26..a334e6c 100644 --- a/tests/test_arrays.c +++ b/tests/test_arrays.c @@ -1,77 +1,36 @@ -#include "../lexer/lexer.h" -#include "../parser/parser.h" -#include "../semantic/semantic.h" -#include "../ir/ir.h" -#include "../ir/ir_gen.h" -#include "../runtime/runtime.h" -#include -#include -#include +#include "test_utils.h" -static char* read_file(const char *filename) { - FILE *file = fopen(filename, "r"); - if (!file) return NULL; - fseek(file, 0, SEEK_END); - long size = ftell(file); - fseek(file, 0, SEEK_SET); - char *content = malloc(size + 1); - size_t read_bytes = fread(content, 1, size, file); (void)read_bytes; - content[size] = '\0'; - fclose(file); - return content; +UnittestTestResult_t* test_array_operations_example(void) { + UNITTEST_BEGIN_TEST("TestArrays", "test_array_operations_example"); + + RAVA_TEST_FILE_EXECUTES(_unittest_result, + "examples/11_ArrayOperations.java", + "ArrayOperations", "main", + "11_ArrayOperations.java should execute successfully"); + + UNITTEST_END_TEST(); } -int main() { - char *source = read_file("examples/11_ArrayOperations.java"); - if (!source) { - printf("Failed to read file\n"); - return 1; +int main(int argc, char **argv) { + UnittestConfig_t *config = unittest_config_create(); + config->verbosity = 2; + + if (argc > 1 && strcmp(argv[1], "--json") == 0) { + config->output_format = UNITTEST_FORMAT_JSON; + config->use_colors = false; } - RavaLexer_t *lexer = rava_lexer_create(source); - RavaParser_t *parser = rava_parser_create(lexer); - RavaASTNode_t *ast = rava_parser_parse(parser); + UnittestTestSuite_t *suite = unittest_test_suite_create("Array Example Tests"); - if (parser->had_error) { - printf("Parse error: %s\n", parser->error_message); - free(source); - return 1; - } + UnittestTestCase_t *tc = unittest_test_case_create("TestArrays"); + unittest_test_case_add_result(tc, test_array_operations_example()); + unittest_test_suite_add_test_case(suite, tc); - printf("Parse: OK\n"); + unittest_generate_report(suite, config); - RavaSemanticAnalyzer_t *analyzer = rava_semantic_analyzer_create(); - if (!rava_semantic_analyze(analyzer, ast)) { - printf("Semantic error: %s\n", analyzer->error_message); - free(source); - return 1; - } + int failures = suite->total_failed + suite->total_errors; + unittest_test_suite_destroy(suite); + unittest_config_destroy(config); - printf("Semantic: OK\n"); - - RavaIRGenerator_t *ir_gen = rava_ir_generator_create(analyzer); - RavaProgram_t *program = rava_ir_generate(ir_gen, ast); - - if (!program) { - printf("IR generation failed\n"); - free(source); - return 1; - } - - printf("IR Gen: OK\n"); - printf("\nOutput:\n"); - - RavaVM_t *vm = rava_vm_create(program); - if (!rava_vm_execute(vm, "ArrayOperations", "main")) { - printf("Runtime error: %s\n", vm->error_message); - rava_vm_destroy(vm); - free(source); - return 1; - } - - printf("\nExecution: OK\n"); - - rava_vm_destroy(vm); - free(source); - return 0; + return failures > 0 ? 1 : 0; } diff --git a/tests/test_arrays.o b/tests/test_arrays.o index adf7611..49ee78d 100644 Binary files a/tests/test_arrays.o and b/tests/test_arrays.o differ diff --git a/tests/test_bitwise.c b/tests/test_bitwise.c index 5209db7..523cdd0 100644 --- a/tests/test_bitwise.c +++ b/tests/test_bitwise.c @@ -1,214 +1,301 @@ -#include "../lexer/lexer.h" -#include "../parser/parser.h" -#include "../semantic/semantic.h" -#include "../ir/ir.h" -#include "../ir/ir_gen.h" -#include "../runtime/runtime.h" -#include -#include -#include +#include "test_utils.h" -static int test_count = 0; -static int pass_count = 0; +UnittestTestResult_t* test_bitwise_and_with_vars(void) { + UNITTEST_BEGIN_TEST("TestBitwise", "test_bitwise_and_with_vars"); -static void run_test_int(const char* name, const char* source, int expected) { - test_count++; - - RavaLexer_t* lexer = rava_lexer_create(source); - RavaParser_t* parser = rava_parser_create(lexer); - RavaASTNode_t* ast = rava_parser_parse(parser); - - if (!ast || parser->had_error) { - printf("FAIL: %s - Parse error: %s\n", name, parser->error_message ? parser->error_message : "unknown"); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return; - } - - RavaSemanticAnalyzer_t* analyzer = rava_semantic_analyzer_create(); - rava_semantic_analyze(analyzer, ast); - - RavaIRGenerator_t* ir_gen = rava_ir_generator_create(analyzer); - RavaProgram_t* program = rava_ir_generate(ir_gen, ast); - - RavaVM_t* vm = rava_vm_create(program); - - if (!rava_vm_execute(vm, "Test", "main")) { - printf("FAIL: %s - Runtime error: %s\n", name, vm->error_message ? vm->error_message : "unknown"); - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return; - } - - RavaValue_t result = rava_vm_get_result(vm); - int32_t result_int = rava_value_as_int(result); - - if (result_int == expected) { - printf("PASS: %s (result=%d)\n", name, result_int); - pass_count++; - } else { - printf("FAIL: %s (expected=%d, got=%d)\n", name, expected, result_int); - } - - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); -} - -int main(void) { - printf("=== Bitwise Operator Tests ===\n\n"); - - run_test_int("bitwise AND with vars", + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int a = 12;\n" " int b = 10;\n" " return a & b;\n" " }\n" - "}\n", 8); + "}\n", + "Test", "main", 8, "bitwise AND with vars 12 & 10 should return 8"); - run_test_int("bitwise OR with vars", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_bitwise_or_with_vars(void) { + UNITTEST_BEGIN_TEST("TestBitwise", "test_bitwise_or_with_vars"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int a = 12;\n" " int b = 10;\n" " return a | b;\n" " }\n" - "}\n", 14); + "}\n", + "Test", "main", 14, "bitwise OR with vars 12 | 10 should return 14"); - run_test_int("bitwise XOR", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_bitwise_xor(void) { + UNITTEST_BEGIN_TEST("TestBitwise", "test_bitwise_xor"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " return 12 ^ 10;\n" " }\n" - "}\n", 6); + "}\n", + "Test", "main", 6, "bitwise XOR 12 ^ 10 should return 6"); - run_test_int("left shift", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_left_shift(void) { + UNITTEST_BEGIN_TEST("TestBitwise", "test_left_shift"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " return 1 << 4;\n" " }\n" - "}\n", 16); + "}\n", + "Test", "main", 16, "left shift 1 << 4 should return 16"); - run_test_int("right shift", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_right_shift(void) { + UNITTEST_BEGIN_TEST("TestBitwise", "test_right_shift"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " return 16 >> 2;\n" " }\n" - "}\n", 4); + "}\n", + "Test", "main", 4, "right shift 16 >> 2 should return 4"); - run_test_int("unsigned right shift", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_unsigned_right_shift(void) { + UNITTEST_BEGIN_TEST("TestBitwise", "test_unsigned_right_shift"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " return 16 >>> 2;\n" " }\n" - "}\n", 4); + "}\n", + "Test", "main", 4, "unsigned right shift 16 >>> 2 should return 4"); - run_test_int("combined bitwise ops", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_combined_bitwise_ops(void) { + UNITTEST_BEGIN_TEST("TestBitwise", "test_combined_bitwise_ops"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int a = 5;\n" " int b = 3;\n" " return (a & b) | (a ^ b);\n" " }\n" - "}\n", 7); + "}\n", + "Test", "main", 7, "combined bitwise ops (5 & 3) | (5 ^ 3) should return 7"); - run_test_int("shift with variable", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_shift_with_variable(void) { + UNITTEST_BEGIN_TEST("TestBitwise", "test_shift_with_variable"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int x = 2;\n" " return 1 << x;\n" " }\n" - "}\n", 4); + "}\n", + "Test", "main", 4, "shift with variable 1 << 2 should return 4"); - run_test_int("bitwise mask", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_bitwise_mask(void) { + UNITTEST_BEGIN_TEST("TestBitwise", "test_bitwise_mask"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int value = 255;\n" " int mask = 0x0F;\n" " return value & mask;\n" " }\n" - "}\n", 15); + "}\n", + "Test", "main", 15, "bitwise mask 255 & 0x0F should return 15"); - run_test_int("bit toggle with XOR", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_bit_toggle_with_xor(void) { + UNITTEST_BEGIN_TEST("TestBitwise", "test_bit_toggle_with_xor"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int flags = 5;\n" " return flags ^ 1;\n" " }\n" - "}\n", 4); + "}\n", + "Test", "main", 4, "bit toggle with XOR 5 ^ 1 should return 4"); - run_test_int("bitwise NOT", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_bitwise_not_zero(void) { + UNITTEST_BEGIN_TEST("TestBitwise", "test_bitwise_not_zero"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int x = 0;\n" " return ~x;\n" " }\n" - "}\n", -1); + "}\n", + "Test", "main", -1, "bitwise NOT ~0 should return -1"); - run_test_int("bitwise NOT with value", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_bitwise_not_with_value(void) { + UNITTEST_BEGIN_TEST("TestBitwise", "test_bitwise_not_with_value"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int x = 5;\n" " return ~x;\n" " }\n" - "}\n", -6); + "}\n", + "Test", "main", -6, "bitwise NOT ~5 should return -6"); - run_test_int("compound AND assign", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_compound_and_assign(void) { + UNITTEST_BEGIN_TEST("TestBitwise", "test_compound_and_assign"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int x = 15;\n" " x &= 7;\n" " return x;\n" " }\n" - "}\n", 7); + "}\n", + "Test", "main", 7, "compound AND assign 15 &= 7 should return 7"); - run_test_int("compound OR assign", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_compound_or_assign(void) { + UNITTEST_BEGIN_TEST("TestBitwise", "test_compound_or_assign"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int x = 8;\n" " x |= 3;\n" " return x;\n" " }\n" - "}\n", 11); + "}\n", + "Test", "main", 11, "compound OR assign 8 |= 3 should return 11"); - run_test_int("compound XOR assign", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_compound_xor_assign(void) { + UNITTEST_BEGIN_TEST("TestBitwise", "test_compound_xor_assign"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int x = 12;\n" " x ^= 5;\n" " return x;\n" " }\n" - "}\n", 9); + "}\n", + "Test", "main", 9, "compound XOR assign 12 ^= 5 should return 9"); - run_test_int("compound left shift assign", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_compound_left_shift_assign(void) { + UNITTEST_BEGIN_TEST("TestBitwise", "test_compound_left_shift_assign"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int x = 1;\n" " x <<= 3;\n" " return x;\n" " }\n" - "}\n", 8); + "}\n", + "Test", "main", 8, "compound left shift assign 1 <<= 3 should return 8"); - run_test_int("compound right shift assign", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_compound_right_shift_assign(void) { + UNITTEST_BEGIN_TEST("TestBitwise", "test_compound_right_shift_assign"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int x = 16;\n" " x >>= 2;\n" " return x;\n" " }\n" - "}\n", 4); + "}\n", + "Test", "main", 4, "compound right shift assign 16 >>= 2 should return 4"); - printf("\n=== Results: %d/%d tests passed ===\n", pass_count, test_count); - - return (pass_count == test_count) ? 0 : 1; + UNITTEST_END_TEST(); +} + +int main(int argc, char **argv) { + UnittestConfig_t *config = unittest_config_create(); + config->verbosity = 2; + + if (argc > 1 && strcmp(argv[1], "--json") == 0) { + config->output_format = UNITTEST_FORMAT_JSON; + config->use_colors = false; + } + + UnittestTestSuite_t *suite = unittest_test_suite_create("Bitwise Operator Tests"); + + UnittestTestCase_t *tc = unittest_test_case_create("TestBitwise"); + unittest_test_case_add_result(tc, test_bitwise_and_with_vars()); + unittest_test_case_add_result(tc, test_bitwise_or_with_vars()); + unittest_test_case_add_result(tc, test_bitwise_xor()); + unittest_test_case_add_result(tc, test_left_shift()); + unittest_test_case_add_result(tc, test_right_shift()); + unittest_test_case_add_result(tc, test_unsigned_right_shift()); + unittest_test_case_add_result(tc, test_combined_bitwise_ops()); + unittest_test_case_add_result(tc, test_shift_with_variable()); + unittest_test_case_add_result(tc, test_bitwise_mask()); + unittest_test_case_add_result(tc, test_bit_toggle_with_xor()); + unittest_test_case_add_result(tc, test_bitwise_not_zero()); + unittest_test_case_add_result(tc, test_bitwise_not_with_value()); + unittest_test_case_add_result(tc, test_compound_and_assign()); + unittest_test_case_add_result(tc, test_compound_or_assign()); + unittest_test_case_add_result(tc, test_compound_xor_assign()); + unittest_test_case_add_result(tc, test_compound_left_shift_assign()); + unittest_test_case_add_result(tc, test_compound_right_shift_assign()); + unittest_test_suite_add_test_case(suite, tc); + + unittest_generate_report(suite, config); + + int failures = suite->total_failed + suite->total_errors; + unittest_test_suite_destroy(suite); + unittest_config_destroy(config); + + return failures > 0 ? 1 : 0; } diff --git a/tests/test_bitwise.o b/tests/test_bitwise.o index 455df87..7e40f49 100644 Binary files a/tests/test_bitwise.o and b/tests/test_bitwise.o differ diff --git a/tests/test_collections.c b/tests/test_collections.c index 3a91558..54de307 100644 --- a/tests/test_collections.c +++ b/tests/test_collections.c @@ -1,100 +1,24 @@ -#include "../lexer/lexer.h" -#include "../parser/parser.h" -#include "../semantic/semantic.h" -#include "../ir/ir.h" -#include "../ir/ir_gen.h" -#include "../runtime/runtime.h" -#include -#include -#include +#include "test_utils.h" -static int test_count = 0; -static int pass_count = 0; +UnittestTestResult_t* test_arraylist_create_size(void) { + UNITTEST_BEGIN_TEST("TestCollections", "test_arraylist_create_size"); -static int run_test(const char *name, const char *source, int expected) { - test_count++; - - RavaLexer_t *lexer = rava_lexer_create(source); - RavaParser_t *parser = rava_parser_create(lexer); - RavaASTNode_t *ast = rava_parser_parse(parser); - - if (parser->had_error) { - printf("FAIL: %s (parse error)\n", name); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return 0; - } - - RavaSemanticAnalyzer_t *analyzer = rava_semantic_analyzer_create(); - if (!rava_semantic_analyze(analyzer, ast)) { - printf("FAIL: %s (semantic error)\n", name); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return 0; - } - - RavaIRGenerator_t *ir_gen = rava_ir_generator_create(analyzer); - RavaProgram_t *program = rava_ir_generate(ir_gen, ast); - - if (!program) { - printf("FAIL: %s (IR error)\n", name); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return 0; - } - - RavaVM_t *vm = rava_vm_create(program); - - if (!rava_vm_execute(vm, "Test", "main")) { - printf("FAIL: %s (runtime error)\n", name); - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return 0; - } - - RavaValue_t result = rava_vm_get_result(vm); - int actual = rava_value_as_int(result); - - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - - if (actual == expected) { - printf("PASS: %s\n", name); - pass_count++; - return 1; - } else { - printf("FAIL: %s (expected %d, got %d)\n", name, expected, actual); - return 0; - } -} - -int main(void) { - printf("=== ArrayList Tests ===\n"); - - run_test("ArrayList create and size", + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " ArrayList list = new ArrayList();\n" " return list.size();\n" " }\n" - "}\n", 0); + "}\n", + "Test", "main", 0, "new ArrayList().size() should return 0"); - run_test("ArrayList add and size", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_arraylist_add_size(void) { + UNITTEST_BEGIN_TEST("TestCollections", "test_arraylist_add_size"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " ArrayList list = new ArrayList();\n" @@ -103,18 +27,32 @@ int main(void) { " list.add(30);\n" " return list.size();\n" " }\n" - "}\n", 3); + "}\n", + "Test", "main", 3, "ArrayList with 3 elements should have size 3"); - run_test("ArrayList add and get", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_arraylist_add_get(void) { + UNITTEST_BEGIN_TEST("TestCollections", "test_arraylist_add_get"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " ArrayList list = new ArrayList();\n" " list.add(42);\n" " return list.get(0);\n" " }\n" - "}\n", 42); + "}\n", + "Test", "main", 42, "list.get(0) should return 42"); - run_test("ArrayList set", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_arraylist_set(void) { + UNITTEST_BEGIN_TEST("TestCollections", "test_arraylist_set"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " ArrayList list = new ArrayList();\n" @@ -122,9 +60,16 @@ int main(void) { " list.set(0, 99);\n" " return list.get(0);\n" " }\n" - "}\n", 99); + "}\n", + "Test", "main", 99, "list.set(0, 99) then get(0) should return 99"); - run_test("ArrayList isEmpty true", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_arraylist_isEmpty_true(void) { + UNITTEST_BEGIN_TEST("TestCollections", "test_arraylist_isEmpty_true"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " ArrayList list = new ArrayList();\n" @@ -133,9 +78,16 @@ int main(void) { " }\n" " return 0;\n" " }\n" - "}\n", 1); + "}\n", + "Test", "main", 1, "new ArrayList().isEmpty() should return true"); - run_test("ArrayList isEmpty false", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_arraylist_isEmpty_false(void) { + UNITTEST_BEGIN_TEST("TestCollections", "test_arraylist_isEmpty_false"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " ArrayList list = new ArrayList();\n" @@ -145,9 +97,16 @@ int main(void) { " }\n" " return 0;\n" " }\n" - "}\n", 0); + "}\n", + "Test", "main", 0, "non-empty list.isEmpty() should return false"); - run_test("ArrayList clear", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_arraylist_clear(void) { + UNITTEST_BEGIN_TEST("TestCollections", "test_arraylist_clear"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " ArrayList list = new ArrayList();\n" @@ -156,9 +115,16 @@ int main(void) { " list.clear();\n" " return list.size();\n" " }\n" - "}\n", 0); + "}\n", + "Test", "main", 0, "list.clear() should make size 0"); - run_test("ArrayList remove", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_arraylist_remove(void) { + UNITTEST_BEGIN_TEST("TestCollections", "test_arraylist_remove"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " ArrayList list = new ArrayList();\n" @@ -168,9 +134,16 @@ int main(void) { " int removed = list.remove(1);\n" " return removed;\n" " }\n" - "}\n", 20); + "}\n", + "Test", "main", 20, "list.remove(1) should return 20"); - run_test("ArrayList sum elements", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_arraylist_sum(void) { + UNITTEST_BEGIN_TEST("TestCollections", "test_arraylist_sum"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " ArrayList list = new ArrayList();\n" @@ -184,19 +157,31 @@ int main(void) { " }\n" " return sum;\n" " }\n" - "}\n", 10); + "}\n", + "Test", "main", 10, "sum of ArrayList elements should be 10"); - printf("\n=== HashMap Tests ===\n"); + UNITTEST_END_TEST(); +} - run_test("HashMap create and size", +UnittestTestResult_t* test_hashmap_create_size(void) { + UNITTEST_BEGIN_TEST("TestCollections", "test_hashmap_create_size"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " HashMap map = new HashMap();\n" " return map.size();\n" " }\n" - "}\n", 0); + "}\n", + "Test", "main", 0, "new HashMap().size() should return 0"); - run_test("HashMap put and size", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_hashmap_put_size(void) { + UNITTEST_BEGIN_TEST("TestCollections", "test_hashmap_put_size"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " HashMap map = new HashMap();\n" @@ -204,18 +189,32 @@ int main(void) { " map.put(\"b\", 2);\n" " return map.size();\n" " }\n" - "}\n", 2); + "}\n", + "Test", "main", 2, "HashMap with 2 keys should have size 2"); - run_test("HashMap put and get", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_hashmap_put_get(void) { + UNITTEST_BEGIN_TEST("TestCollections", "test_hashmap_put_get"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " HashMap map = new HashMap();\n" " map.put(\"key\", 42);\n" " return map.get(\"key\");\n" " }\n" - "}\n", 42); + "}\n", + "Test", "main", 42, "map.get(\"key\") should return 42"); - run_test("HashMap containsKey true", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_hashmap_containsKey_true(void) { + UNITTEST_BEGIN_TEST("TestCollections", "test_hashmap_containsKey_true"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " HashMap map = new HashMap();\n" @@ -225,9 +224,16 @@ int main(void) { " }\n" " return 0;\n" " }\n" - "}\n", 1); + "}\n", + "Test", "main", 1, "containsKey() for existing key should return true"); - run_test("HashMap containsKey false", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_hashmap_containsKey_false(void) { + UNITTEST_BEGIN_TEST("TestCollections", "test_hashmap_containsKey_false"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " HashMap map = new HashMap();\n" @@ -237,9 +243,16 @@ int main(void) { " }\n" " return 0;\n" " }\n" - "}\n", 0); + "}\n", + "Test", "main", 0, "containsKey() for missing key should return false"); - run_test("HashMap remove", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_hashmap_remove(void) { + UNITTEST_BEGIN_TEST("TestCollections", "test_hashmap_remove"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " HashMap map = new HashMap();\n" @@ -248,9 +261,16 @@ int main(void) { " int removed = map.remove(\"a\");\n" " return removed + map.size();\n" " }\n" - "}\n", 11); + "}\n", + "Test", "main", 11, "remove + size should return 11"); - run_test("HashMap clear", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_hashmap_clear(void) { + UNITTEST_BEGIN_TEST("TestCollections", "test_hashmap_clear"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " HashMap map = new HashMap();\n" @@ -259,9 +279,16 @@ int main(void) { " map.clear();\n" " return map.size();\n" " }\n" - "}\n", 0); + "}\n", + "Test", "main", 0, "map.clear() should make size 0"); - run_test("HashMap update value", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_hashmap_update(void) { + UNITTEST_BEGIN_TEST("TestCollections", "test_hashmap_update"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " HashMap map = new HashMap();\n" @@ -269,10 +296,51 @@ int main(void) { " map.put(\"key\", 20);\n" " return map.get(\"key\");\n" " }\n" - "}\n", 20); + "}\n", + "Test", "main", 20, "updating key should return new value 20"); - printf("\n=== Results ===\n"); - printf("Passed: %d/%d\n", pass_count, test_count); - - return (pass_count == test_count) ? 0 : 1; + UNITTEST_END_TEST(); +} + +int main(int argc, char **argv) { + UnittestConfig_t *config = unittest_config_create(); + config->verbosity = 2; + + if (argc > 1 && strcmp(argv[1], "--json") == 0) { + config->output_format = UNITTEST_FORMAT_JSON; + config->use_colors = false; + } + + UnittestTestSuite_t *suite = unittest_test_suite_create("Collections Tests"); + + UnittestTestCase_t *tc_arraylist = unittest_test_case_create("TestArrayList"); + unittest_test_case_add_result(tc_arraylist, test_arraylist_create_size()); + unittest_test_case_add_result(tc_arraylist, test_arraylist_add_size()); + unittest_test_case_add_result(tc_arraylist, test_arraylist_add_get()); + unittest_test_case_add_result(tc_arraylist, test_arraylist_set()); + unittest_test_case_add_result(tc_arraylist, test_arraylist_isEmpty_true()); + unittest_test_case_add_result(tc_arraylist, test_arraylist_isEmpty_false()); + unittest_test_case_add_result(tc_arraylist, test_arraylist_clear()); + unittest_test_case_add_result(tc_arraylist, test_arraylist_remove()); + unittest_test_case_add_result(tc_arraylist, test_arraylist_sum()); + unittest_test_suite_add_test_case(suite, tc_arraylist); + + UnittestTestCase_t *tc_hashmap = unittest_test_case_create("TestHashMap"); + unittest_test_case_add_result(tc_hashmap, test_hashmap_create_size()); + unittest_test_case_add_result(tc_hashmap, test_hashmap_put_size()); + unittest_test_case_add_result(tc_hashmap, test_hashmap_put_get()); + unittest_test_case_add_result(tc_hashmap, test_hashmap_containsKey_true()); + unittest_test_case_add_result(tc_hashmap, test_hashmap_containsKey_false()); + unittest_test_case_add_result(tc_hashmap, test_hashmap_remove()); + unittest_test_case_add_result(tc_hashmap, test_hashmap_clear()); + unittest_test_case_add_result(tc_hashmap, test_hashmap_update()); + unittest_test_suite_add_test_case(suite, tc_hashmap); + + unittest_generate_report(suite, config); + + int failures = suite->total_failed + suite->total_errors; + unittest_test_suite_destroy(suite); + unittest_config_destroy(config); + + return failures > 0 ? 1 : 0; } diff --git a/tests/test_collections.o b/tests/test_collections.o index 94f0d92..c0cc6b3 100644 Binary files a/tests/test_collections.o and b/tests/test_collections.o differ diff --git a/tests/test_dowhile.c b/tests/test_dowhile.c index b0d198c..45733ff 100644 --- a/tests/test_dowhile.c +++ b/tests/test_dowhile.c @@ -1,73 +1,9 @@ -#include "../lexer/lexer.h" -#include "../parser/parser.h" -#include "../semantic/semantic.h" -#include "../ir/ir.h" -#include "../ir/ir_gen.h" -#include "../runtime/runtime.h" -#include -#include -#include +#include "test_utils.h" -static int test_count = 0; -static int pass_count = 0; +UnittestTestResult_t* test_simple_dowhile(void) { + UNITTEST_BEGIN_TEST("TestDoWhile", "test_simple_dowhile"); -static void run_test(const char* name, const char* source, int expected) { - test_count++; - - RavaLexer_t* lexer = rava_lexer_create(source); - RavaParser_t* parser = rava_parser_create(lexer); - RavaASTNode_t* ast = rava_parser_parse(parser); - - if (!ast || parser->had_error) { - printf("FAIL: %s - Parse error\n", name); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return; - } - - RavaSemanticAnalyzer_t* analyzer = rava_semantic_analyzer_create(); - rava_semantic_analyze(analyzer, ast); - - RavaIRGenerator_t* ir_gen = rava_ir_generator_create(analyzer); - RavaProgram_t* program = rava_ir_generate(ir_gen, ast); - - RavaVM_t* vm = rava_vm_create(program); - - if (!rava_vm_execute(vm, "Test", "main")) { - printf("FAIL: %s - Runtime error\n", name); - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return; - } - - RavaValue_t result = rava_vm_get_result(vm); - int32_t result_int = rava_value_as_int(result); - - if (result_int == expected) { - printf("PASS: %s (result=%d)\n", name, result_int); - pass_count++; - } else { - printf("FAIL: %s (expected=%d, got=%d)\n", name, expected, result_int); - } - - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); -} - -int main(void) { - printf("=== Do-While Loop Tests ===\n\n"); - - run_test("Simple do-while", + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int i = 0;\n" @@ -76,9 +12,16 @@ int main(void) { " } while (i < 5);\n" " return i;\n" " }\n" - "}\n", 5); + "}\n", + "Test", "main", 5, "do-while loop should iterate until i=5"); - run_test("Do-while sum", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_dowhile_sum(void) { + UNITTEST_BEGIN_TEST("TestDoWhile", "test_dowhile_sum"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int sum = 0;\n" @@ -89,9 +32,16 @@ int main(void) { " } while (i <= 10);\n" " return sum;\n" " }\n" - "}\n", 55); + "}\n", + "Test", "main", 55, "sum of 1..10 should be 55"); - run_test("Do-while executes at least once", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_dowhile_executes_once(void) { + UNITTEST_BEGIN_TEST("TestDoWhile", "test_dowhile_executes_once"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int x = 0;\n" @@ -100,9 +50,16 @@ int main(void) { " } while (false);\n" " return x;\n" " }\n" - "}\n", 42); + "}\n", + "Test", "main", 42, "do-while executes at least once"); - run_test("Do-while with break", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_dowhile_with_break(void) { + UNITTEST_BEGIN_TEST("TestDoWhile", "test_dowhile_with_break"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int i = 0;\n" @@ -114,9 +71,16 @@ int main(void) { " } while (i < 10);\n" " return i;\n" " }\n" - "}\n", 3); + "}\n", + "Test", "main", 3, "break at i=3 should exit loop"); - run_test("Nested do-while", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_nested_dowhile(void) { + UNITTEST_BEGIN_TEST("TestDoWhile", "test_nested_dowhile"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int count = 0;\n" @@ -131,9 +95,36 @@ int main(void) { " } while (i < 2);\n" " return count;\n" " }\n" - "}\n", 6); + "}\n", + "Test", "main", 6, "nested 2x3 iterations should count 6"); - printf("\n=== Results: %d/%d tests passed ===\n", pass_count, test_count); - - return (pass_count == test_count) ? 0 : 1; + UNITTEST_END_TEST(); +} + +int main(int argc, char **argv) { + UnittestConfig_t *config = unittest_config_create(); + config->verbosity = 2; + + if (argc > 1 && strcmp(argv[1], "--json") == 0) { + config->output_format = UNITTEST_FORMAT_JSON; + config->use_colors = false; + } + + UnittestTestSuite_t *suite = unittest_test_suite_create("Do-While Loop Tests"); + + UnittestTestCase_t *tc = unittest_test_case_create("TestDoWhile"); + unittest_test_case_add_result(tc, test_simple_dowhile()); + unittest_test_case_add_result(tc, test_dowhile_sum()); + unittest_test_case_add_result(tc, test_dowhile_executes_once()); + unittest_test_case_add_result(tc, test_dowhile_with_break()); + unittest_test_case_add_result(tc, test_nested_dowhile()); + unittest_test_suite_add_test_case(suite, tc); + + unittest_generate_report(suite, config); + + int failures = suite->total_failed + suite->total_errors; + unittest_test_suite_destroy(suite); + unittest_config_destroy(config); + + return failures > 0 ? 1 : 0; } diff --git a/tests/test_dowhile.o b/tests/test_dowhile.o index 3da4708..0f2c888 100644 Binary files a/tests/test_dowhile.o and b/tests/test_dowhile.o differ diff --git a/tests/test_enhanced_for.c b/tests/test_enhanced_for.c index 33b9a51..b5b3f09 100644 --- a/tests/test_enhanced_for.c +++ b/tests/test_enhanced_for.c @@ -1,73 +1,9 @@ -#include "../lexer/lexer.h" -#include "../parser/parser.h" -#include "../semantic/semantic.h" -#include "../ir/ir.h" -#include "../ir/ir_gen.h" -#include "../runtime/runtime.h" -#include -#include -#include +#include "test_utils.h" -static int test_count = 0; -static int pass_count = 0; +UnittestTestResult_t* test_sum_array_elements(void) { + UNITTEST_BEGIN_TEST("TestEnhancedFor", "test_sum_array_elements"); -static void run_test_int(const char* name, const char* source, int expected) { - test_count++; - - RavaLexer_t* lexer = rava_lexer_create(source); - RavaParser_t* parser = rava_parser_create(lexer); - RavaASTNode_t* ast = rava_parser_parse(parser); - - if (!ast || parser->had_error) { - printf("FAIL: %s - Parse error: %s\n", name, parser->error_message ? parser->error_message : "unknown"); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return; - } - - RavaSemanticAnalyzer_t* analyzer = rava_semantic_analyzer_create(); - rava_semantic_analyze(analyzer, ast); - - RavaIRGenerator_t* ir_gen = rava_ir_generator_create(analyzer); - RavaProgram_t* program = rava_ir_generate(ir_gen, ast); - - RavaVM_t* vm = rava_vm_create(program); - - if (!rava_vm_execute(vm, "Test", "main")) { - printf("FAIL: %s - Runtime error: %s\n", name, vm->error_message ? vm->error_message : "unknown"); - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return; - } - - RavaValue_t result = rava_vm_get_result(vm); - int32_t result_int = rava_value_as_int(result); - - if (result_int == expected) { - printf("PASS: %s (result=%d)\n", name, result_int); - pass_count++; - } else { - printf("FAIL: %s (expected=%d, got=%d)\n", name, expected, result_int); - } - - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); -} - -int main(void) { - printf("=== Enhanced For Loop Tests ===\n\n"); - - run_test_int("sum array elements", + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int[] arr = new int[3];\n" @@ -80,9 +16,16 @@ int main(void) { " }\n" " return sum;\n" " }\n" - "}\n", 6); + "}\n", + "Test", "main", 6, "sum array elements should return 6"); - run_test_int("count elements", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_count_elements(void) { + UNITTEST_BEGIN_TEST("TestEnhancedFor", "test_count_elements"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int[] arr = new int[5];\n" @@ -92,9 +35,16 @@ int main(void) { " }\n" " return count;\n" " }\n" - "}\n", 5); + "}\n", + "Test", "main", 5, "count elements should return 5"); - run_test_int("find max", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_find_max(void) { + UNITTEST_BEGIN_TEST("TestEnhancedFor", "test_find_max"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int[] arr = new int[4];\n" @@ -110,9 +60,16 @@ int main(void) { " }\n" " return max;\n" " }\n" - "}\n", 9); + "}\n", + "Test", "main", 9, "find max should return 9"); - run_test_int("empty array", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_empty_array(void) { + UNITTEST_BEGIN_TEST("TestEnhancedFor", "test_empty_array"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int[] arr = new int[0];\n" @@ -122,9 +79,35 @@ int main(void) { " }\n" " return sum;\n" " }\n" - "}\n", 0); + "}\n", + "Test", "main", 0, "empty array should return 0"); - printf("\n=== Results: %d/%d tests passed ===\n", pass_count, test_count); - - return (pass_count == test_count) ? 0 : 1; + UNITTEST_END_TEST(); +} + +int main(int argc, char **argv) { + UnittestConfig_t *config = unittest_config_create(); + config->verbosity = 2; + + if (argc > 1 && strcmp(argv[1], "--json") == 0) { + config->output_format = UNITTEST_FORMAT_JSON; + config->use_colors = false; + } + + UnittestTestSuite_t *suite = unittest_test_suite_create("Enhanced For Loop Tests"); + + UnittestTestCase_t *tc = unittest_test_case_create("TestEnhancedFor"); + unittest_test_case_add_result(tc, test_sum_array_elements()); + unittest_test_case_add_result(tc, test_count_elements()); + unittest_test_case_add_result(tc, test_find_max()); + unittest_test_case_add_result(tc, test_empty_array()); + unittest_test_suite_add_test_case(suite, tc); + + unittest_generate_report(suite, config); + + int failures = suite->total_failed + suite->total_errors; + unittest_test_suite_destroy(suite); + unittest_config_destroy(config); + + return failures > 0 ? 1 : 0; } diff --git a/tests/test_enhanced_for.o b/tests/test_enhanced_for.o index 5ec7fa5..00308d2 100644 Binary files a/tests/test_enhanced_for.o and b/tests/test_enhanced_for.o differ diff --git a/tests/test_enums.c b/tests/test_enums.c index 1e38fbb..4bfea48 100644 --- a/tests/test_enums.c +++ b/tests/test_enums.c @@ -1,123 +1,54 @@ -#include "../lexer/lexer.h" -#include "../parser/parser.h" -#include "../semantic/semantic.h" -#include "../ir/ir.h" -#include "../ir/ir_gen.h" -#include "../runtime/runtime.h" -#include -#include +#include "test_utils.h" -static int run_test(const char *name, const char *source, const char *class_name, - const char *method_name, int expected) { - printf("Test: %-40s ... ", name); - fflush(stdout); +UnittestTestResult_t* test_simple_enum_ordinal(void) { + UNITTEST_BEGIN_TEST("TestEnums", "test_simple_enum_ordinal"); - RavaLexer_t *lexer = rava_lexer_create(source); - RavaParser_t *parser = rava_parser_create(lexer); - RavaASTNode_t *ast = rava_parser_parse(parser); - - if (parser->had_error) { - printf("FAIL (parse error: %s)\n", parser->error_message); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return 0; - } - - RavaSemanticAnalyzer_t *analyzer = rava_semantic_analyzer_create(); - if (!rava_semantic_analyze(analyzer, ast)) { - printf("FAIL (semantic error: %s)\n", analyzer->error_message); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return 0; - } - - RavaIRGenerator_t *ir_gen = rava_ir_generator_create(analyzer); - RavaProgram_t *program = rava_ir_generate(ir_gen, ast); - - if (!program) { - printf("FAIL (IR generation failed)\n"); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return 0; - } - - RavaVM_t *vm = rava_vm_create(program); - - if (!rava_vm_execute(vm, class_name, method_name)) { - printf("FAIL (runtime error: %s)\n", vm->error_message ? vm->error_message : "unknown"); - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return 0; - } - - RavaValue_t result = rava_vm_get_result(vm); - int result_int = rava_value_as_int(result); - - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - - if (result_int == expected) { - printf("PASS (result=%d)\n", result_int); - return 1; - } else { - printf("FAIL (expected=%d, got=%d)\n", expected, result_int); - return 0; - } -} - -int main(void) { - printf("=== Enum Tests ===\n\n"); - - int passed = 0; - int total = 0; - - total++; - passed += run_test("simple enum ordinal", + RAVA_TEST_RUN(_unittest_result, "public enum Color { RED, GREEN, BLUE }\n" "public class Test {\n" " public static int main() {\n" " return Color.RED;\n" " }\n" "}\n", - "Test", "main", 0); + "Test", "main", 0, "simple enum ordinal RED should return 0"); - total++; - passed += run_test("enum second value", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_enum_second_value(void) { + UNITTEST_BEGIN_TEST("TestEnums", "test_enum_second_value"); + + RAVA_TEST_RUN(_unittest_result, "public enum Color { RED, GREEN, BLUE }\n" "public class Test {\n" " public static int main() {\n" " return Color.GREEN;\n" " }\n" "}\n", - "Test", "main", 1); + "Test", "main", 1, "enum second value GREEN should return 1"); - total++; - passed += run_test("enum third value", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_enum_third_value(void) { + UNITTEST_BEGIN_TEST("TestEnums", "test_enum_third_value"); + + RAVA_TEST_RUN(_unittest_result, "public enum Color { RED, GREEN, BLUE }\n" "public class Test {\n" " public static int main() {\n" " return Color.BLUE;\n" " }\n" "}\n", - "Test", "main", 2); + "Test", "main", 2, "enum third value BLUE should return 2"); - total++; - passed += run_test("enum comparison", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_enum_comparison(void) { + UNITTEST_BEGIN_TEST("TestEnums", "test_enum_comparison"); + + RAVA_TEST_RUN(_unittest_result, "public enum Day { MON, TUE, WED, THU, FRI }\n" "public class Test {\n" " public static int main() {\n" @@ -128,10 +59,15 @@ int main(void) { " return 0;\n" " }\n" "}\n", - "Test", "main", 42); + "Test", "main", 42, "enum comparison should return 42"); - total++; - passed += run_test("enum in switch", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_enum_in_switch(void) { + UNITTEST_BEGIN_TEST("TestEnums", "test_enum_in_switch"); + + RAVA_TEST_RUN(_unittest_result, "public enum Status { OK, ERROR, PENDING }\n" "public class Test {\n" " public static int main() {\n" @@ -144,9 +80,35 @@ int main(void) { " }\n" " }\n" "}\n", - "Test", "main", 20); + "Test", "main", 20, "enum in switch should return 20"); - printf("\n=== Results: %d/%d tests passed ===\n", passed, total); - - return (passed == total) ? 0 : 1; + UNITTEST_END_TEST(); +} + +int main(int argc, char **argv) { + UnittestConfig_t *config = unittest_config_create(); + config->verbosity = 2; + + if (argc > 1 && strcmp(argv[1], "--json") == 0) { + config->output_format = UNITTEST_FORMAT_JSON; + config->use_colors = false; + } + + UnittestTestSuite_t *suite = unittest_test_suite_create("Enum Tests"); + + UnittestTestCase_t *tc = unittest_test_case_create("TestEnums"); + unittest_test_case_add_result(tc, test_simple_enum_ordinal()); + unittest_test_case_add_result(tc, test_enum_second_value()); + unittest_test_case_add_result(tc, test_enum_third_value()); + unittest_test_case_add_result(tc, test_enum_comparison()); + unittest_test_case_add_result(tc, test_enum_in_switch()); + unittest_test_suite_add_test_case(suite, tc); + + unittest_generate_report(suite, config); + + int failures = suite->total_failed + suite->total_errors; + unittest_test_suite_destroy(suite); + unittest_config_destroy(config); + + return failures > 0 ? 1 : 0; } diff --git a/tests/test_enums.o b/tests/test_enums.o index e72d4ed..2075c1b 100644 Binary files a/tests/test_enums.o and b/tests/test_enums.o differ diff --git a/tests/test_exceptions.c b/tests/test_exceptions.c index 44ecd67..569fdf5 100644 --- a/tests/test_exceptions.c +++ b/tests/test_exceptions.c @@ -1,73 +1,9 @@ -#include "../lexer/lexer.h" -#include "../parser/parser.h" -#include "../semantic/semantic.h" -#include "../ir/ir.h" -#include "../ir/ir_gen.h" -#include "../runtime/runtime.h" -#include -#include -#include +#include "test_utils.h" -static int test_count = 0; -static int pass_count = 0; +UnittestTestResult_t* test_basic_try_catch(void) { + UNITTEST_BEGIN_TEST("TestExceptions", "test_basic_try_catch"); -static void run_test_int(const char* name, const char* source, int expected) { - test_count++; - - RavaLexer_t* lexer = rava_lexer_create(source); - RavaParser_t* parser = rava_parser_create(lexer); - RavaASTNode_t* ast = rava_parser_parse(parser); - - if (!ast || parser->had_error) { - printf("FAIL: %s - Parse error: %s\n", name, parser->error_message ? parser->error_message : "unknown"); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return; - } - - RavaSemanticAnalyzer_t* analyzer = rava_semantic_analyzer_create(); - rava_semantic_analyze(analyzer, ast); - - RavaIRGenerator_t* ir_gen = rava_ir_generator_create(analyzer); - RavaProgram_t* program = rava_ir_generate(ir_gen, ast); - - RavaVM_t* vm = rava_vm_create(program); - - if (!rava_vm_execute(vm, "Test", "main")) { - printf("FAIL: %s - Runtime error: %s\n", name, vm->error_message ? vm->error_message : "unknown"); - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return; - } - - RavaValue_t result = rava_vm_get_result(vm); - int32_t result_int = rava_value_as_int(result); - - if (result_int == expected) { - printf("PASS: %s (result=%d)\n", name, result_int); - pass_count++; - } else { - printf("FAIL: %s (expected=%d, got=%d)\n", name, expected, result_int); - } - - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); -} - -int main(void) { - printf("=== Exception Handling Tests ===\n\n"); - - run_test_int("basic try-catch", + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int result = 0;\n" @@ -78,9 +14,16 @@ int main(void) { " }\n" " return result;\n" " }\n" - "}\n", 42); + "}\n", + "Test", "main", 42, "basic try-catch should return 42"); - run_test_int("try without exception", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_try_without_exception(void) { + UNITTEST_BEGIN_TEST("TestExceptions", "test_try_without_exception"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int result = 10;\n" @@ -91,9 +34,16 @@ int main(void) { " }\n" " return result;\n" " }\n" - "}\n", 20); + "}\n", + "Test", "main", 20, "try without exception should return 20"); - run_test_int("catch modifies value", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_catch_modifies_value(void) { + UNITTEST_BEGIN_TEST("TestExceptions", "test_catch_modifies_value"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int x = 5;\n" @@ -104,9 +54,16 @@ int main(void) { " }\n" " return x;\n" " }\n" - "}\n", 15); + "}\n", + "Test", "main", 15, "catch modifies value should return 15"); - run_test_int("try-finally without exception", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_try_finally(void) { + UNITTEST_BEGIN_TEST("TestExceptions", "test_try_finally"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int result = 0;\n" @@ -119,9 +76,35 @@ int main(void) { " }\n" " return result;\n" " }\n" - "}\n", 15); + "}\n", + "Test", "main", 15, "try-finally without exception should return 15"); - printf("\n=== Results: %d/%d tests passed ===\n", pass_count, test_count); - - return (pass_count == test_count) ? 0 : 1; + UNITTEST_END_TEST(); +} + +int main(int argc, char **argv) { + UnittestConfig_t *config = unittest_config_create(); + config->verbosity = 2; + + if (argc > 1 && strcmp(argv[1], "--json") == 0) { + config->output_format = UNITTEST_FORMAT_JSON; + config->use_colors = false; + } + + UnittestTestSuite_t *suite = unittest_test_suite_create("Exception Handling Tests"); + + UnittestTestCase_t *tc = unittest_test_case_create("TestExceptions"); + unittest_test_case_add_result(tc, test_basic_try_catch()); + unittest_test_case_add_result(tc, test_try_without_exception()); + unittest_test_case_add_result(tc, test_catch_modifies_value()); + unittest_test_case_add_result(tc, test_try_finally()); + unittest_test_suite_add_test_case(suite, tc); + + unittest_generate_report(suite, config); + + int failures = suite->total_failed + suite->total_errors; + unittest_test_suite_destroy(suite); + unittest_config_destroy(config); + + return failures > 0 ? 1 : 0; } diff --git a/tests/test_exceptions.o b/tests/test_exceptions.o index 2fc7d6d..f16342d 100644 Binary files a/tests/test_exceptions.o and b/tests/test_exceptions.o differ diff --git a/tests/test_fileio.c b/tests/test_fileio.c index 6da6fa9..fd4701a 100644 --- a/tests/test_fileio.c +++ b/tests/test_fileio.c @@ -1,77 +1,36 @@ -#include "../lexer/lexer.h" -#include "../parser/parser.h" -#include "../semantic/semantic.h" -#include "../ir/ir.h" -#include "../ir/ir_gen.h" -#include "../runtime/runtime.h" -#include -#include -#include +#include "test_utils.h" -static char* read_file(const char *filename) { - FILE *file = fopen(filename, "r"); - if (!file) return NULL; - fseek(file, 0, SEEK_END); - long size = ftell(file); - fseek(file, 0, SEEK_SET); - char *content = malloc(size + 1); - size_t read_bytes = fread(content, 1, size, file); (void)read_bytes; - content[size] = '\0'; - fclose(file); - return content; +UnittestTestResult_t* test_fileio_example(void) { + UNITTEST_BEGIN_TEST("TestFileIO", "test_fileio_example"); + + RAVA_TEST_FILE_EXECUTES(_unittest_result, + "examples/15_FileIO.java", + "FileIO", "main", + "15_FileIO.java should execute successfully"); + + UNITTEST_END_TEST(); } -int main() { - char *source = read_file("examples/15_FileIO.java"); - if (!source) { - printf("Failed to read file\n"); - return 1; +int main(int argc, char **argv) { + UnittestConfig_t *config = unittest_config_create(); + config->verbosity = 2; + + if (argc > 1 && strcmp(argv[1], "--json") == 0) { + config->output_format = UNITTEST_FORMAT_JSON; + config->use_colors = false; } - RavaLexer_t *lexer = rava_lexer_create(source); - RavaParser_t *parser = rava_parser_create(lexer); - RavaASTNode_t *ast = rava_parser_parse(parser); + UnittestTestSuite_t *suite = unittest_test_suite_create("File I/O Example Tests"); - if (parser->had_error) { - printf("Parse error: %s\n", parser->error_message); - free(source); - return 1; - } + UnittestTestCase_t *tc = unittest_test_case_create("TestFileIO"); + unittest_test_case_add_result(tc, test_fileio_example()); + unittest_test_suite_add_test_case(suite, tc); - printf("Parse: OK\n"); + unittest_generate_report(suite, config); - RavaSemanticAnalyzer_t *analyzer = rava_semantic_analyzer_create(); - if (!rava_semantic_analyze(analyzer, ast)) { - printf("Semantic error: %s\n", analyzer->error_message); - free(source); - return 1; - } + int failures = suite->total_failed + suite->total_errors; + unittest_test_suite_destroy(suite); + unittest_config_destroy(config); - printf("Semantic: OK\n"); - - RavaIRGenerator_t *ir_gen = rava_ir_generator_create(analyzer); - RavaProgram_t *program = rava_ir_generate(ir_gen, ast); - - if (!program) { - printf("IR generation failed\n"); - free(source); - return 1; - } - - printf("IR Gen: OK\n"); - printf("\nOutput:\n"); - - RavaVM_t *vm = rava_vm_create(program); - if (!rava_vm_execute(vm, "FileIO", "main")) { - printf("Runtime error: %s\n", vm->error_message); - rava_vm_destroy(vm); - free(source); - return 1; - } - - printf("\nExecution: OK\n"); - - rava_vm_destroy(vm); - free(source); - return 0; + return failures > 0 ? 1 : 0; } diff --git a/tests/test_fileio.o b/tests/test_fileio.o index 2fb4b88..f165647 100644 Binary files a/tests/test_fileio.o and b/tests/test_fileio.o differ diff --git a/tests/test_instance_methods.c b/tests/test_instance_methods.c index d127a99..61d5b02 100644 --- a/tests/test_instance_methods.c +++ b/tests/test_instance_methods.c @@ -1,77 +1,36 @@ -#include "../lexer/lexer.h" -#include "../parser/parser.h" -#include "../semantic/semantic.h" -#include "../ir/ir.h" -#include "../ir/ir_gen.h" -#include "../runtime/runtime.h" -#include -#include -#include +#include "test_utils.h" -static char* read_file(const char *filename) { - FILE *file = fopen(filename, "r"); - if (!file) return NULL; - fseek(file, 0, SEEK_END); - long size = ftell(file); - fseek(file, 0, SEEK_SET); - char *content = malloc(size + 1); - size_t read_bytes = fread(content, 1, size, file); (void)read_bytes; - content[size] = '\0'; - fclose(file); - return content; +UnittestTestResult_t* test_instance_methods_example(void) { + UNITTEST_BEGIN_TEST("TestInstanceMethods", "test_instance_methods_example"); + + RAVA_TEST_FILE_EXECUTES(_unittest_result, + "examples/14_InstanceMethods.java", + "InstanceMethods", "main", + "14_InstanceMethods.java should execute successfully"); + + UNITTEST_END_TEST(); } -int main() { - char *source = read_file("examples/14_InstanceMethods.java"); - if (!source) { - printf("Failed to read file\n"); - return 1; +int main(int argc, char **argv) { + UnittestConfig_t *config = unittest_config_create(); + config->verbosity = 2; + + if (argc > 1 && strcmp(argv[1], "--json") == 0) { + config->output_format = UNITTEST_FORMAT_JSON; + config->use_colors = false; } - RavaLexer_t *lexer = rava_lexer_create(source); - RavaParser_t *parser = rava_parser_create(lexer); - RavaASTNode_t *ast = rava_parser_parse(parser); + UnittestTestSuite_t *suite = unittest_test_suite_create("Instance Methods Example Tests"); - if (parser->had_error) { - printf("Parse error: %s\n", parser->error_message); - free(source); - return 1; - } + UnittestTestCase_t *tc = unittest_test_case_create("TestInstanceMethods"); + unittest_test_case_add_result(tc, test_instance_methods_example()); + unittest_test_suite_add_test_case(suite, tc); - printf("Parse: OK\n"); + unittest_generate_report(suite, config); - RavaSemanticAnalyzer_t *analyzer = rava_semantic_analyzer_create(); - if (!rava_semantic_analyze(analyzer, ast)) { - printf("Semantic error: %s\n", analyzer->error_message); - free(source); - return 1; - } + int failures = suite->total_failed + suite->total_errors; + unittest_test_suite_destroy(suite); + unittest_config_destroy(config); - printf("Semantic: OK\n"); - - RavaIRGenerator_t *ir_gen = rava_ir_generator_create(analyzer); - RavaProgram_t *program = rava_ir_generate(ir_gen, ast); - - if (!program) { - printf("IR generation failed\n"); - free(source); - return 1; - } - - printf("IR Gen: OK\n"); - printf("\nOutput:\n"); - - RavaVM_t *vm = rava_vm_create(program); - if (!rava_vm_execute(vm, "InstanceMethods", "main")) { - printf("Runtime error: %s\n", vm->error_message); - rava_vm_destroy(vm); - free(source); - return 1; - } - - printf("\nExecution: OK\n"); - - rava_vm_destroy(vm); - free(source); - return 0; + return failures > 0 ? 1 : 0; } diff --git a/tests/test_instance_methods.o b/tests/test_instance_methods.o index d2468fc..44fd209 100644 Binary files a/tests/test_instance_methods.o and b/tests/test_instance_methods.o differ diff --git a/tests/test_instanceof.c b/tests/test_instanceof.c index d857cf1..2d1206d 100644 --- a/tests/test_instanceof.c +++ b/tests/test_instanceof.c @@ -1,73 +1,9 @@ -#include "../lexer/lexer.h" -#include "../parser/parser.h" -#include "../semantic/semantic.h" -#include "../ir/ir.h" -#include "../ir/ir_gen.h" -#include "../runtime/runtime.h" -#include -#include -#include +#include "test_utils.h" -static int test_count = 0; -static int pass_count = 0; +UnittestTestResult_t* test_instanceof_true(void) { + UNITTEST_BEGIN_TEST("TestInstanceof", "test_instanceof_true"); -static void run_test_int(const char* name, const char* source, int expected) { - test_count++; - - RavaLexer_t* lexer = rava_lexer_create(source); - RavaParser_t* parser = rava_parser_create(lexer); - RavaASTNode_t* ast = rava_parser_parse(parser); - - if (!ast || parser->had_error) { - printf("FAIL: %s - Parse error: %s\n", name, parser->error_message ? parser->error_message : "unknown"); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return; - } - - RavaSemanticAnalyzer_t* analyzer = rava_semantic_analyzer_create(); - rava_semantic_analyze(analyzer, ast); - - RavaIRGenerator_t* ir_gen = rava_ir_generator_create(analyzer); - RavaProgram_t* program = rava_ir_generate(ir_gen, ast); - - RavaVM_t* vm = rava_vm_create(program); - - if (!rava_vm_execute(vm, "Test", "main")) { - printf("FAIL: %s - Runtime error: %s\n", name, vm->error_message ? vm->error_message : "unknown"); - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return; - } - - RavaValue_t result = rava_vm_get_result(vm); - int32_t result_int = rava_value_as_int(result); - - if (result_int == expected) { - printf("PASS: %s (result=%d)\n", name, result_int); - pass_count++; - } else { - printf("FAIL: %s (expected=%d, got=%d)\n", name, expected, result_int); - } - - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); -} - -int main(void) { - printf("=== Instanceof Tests ===\n\n"); - - run_test_int("instanceof true", + RAVA_TEST_RUN(_unittest_result, "public class Animal {}\n" "public class Test {\n" " public static int main() {\n" @@ -75,9 +11,16 @@ int main(void) { " if (a instanceof Animal) { return 1; }\n" " return 0;\n" " }\n" - "}\n", 1); + "}\n", + "Test", "main", 1, "instanceof true should return 1"); - run_test_int("instanceof false", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_instanceof_false(void) { + UNITTEST_BEGIN_TEST("TestInstanceof", "test_instanceof_false"); + + RAVA_TEST_RUN(_unittest_result, "public class Animal {}\n" "public class Dog {}\n" "public class Test {\n" @@ -86,9 +29,16 @@ int main(void) { " if (d instanceof Animal) { return 1; }\n" " return 0;\n" " }\n" - "}\n", 0); + "}\n", + "Test", "main", 0, "instanceof false should return 0"); - run_test_int("instanceof in expression", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_instanceof_in_expression(void) { + UNITTEST_BEGIN_TEST("TestInstanceof", "test_instanceof_in_expression"); + + RAVA_TEST_RUN(_unittest_result, "public class Person {}\n" "public class Test {\n" " public static int main() {\n" @@ -97,9 +47,34 @@ int main(void) { " if (p instanceof Person) { result = 42; }\n" " return result;\n" " }\n" - "}\n", 42); + "}\n", + "Test", "main", 42, "instanceof in expression should return 42"); - printf("\n=== Results: %d/%d tests passed ===\n", pass_count, test_count); - - return (pass_count == test_count) ? 0 : 1; + UNITTEST_END_TEST(); +} + +int main(int argc, char **argv) { + UnittestConfig_t *config = unittest_config_create(); + config->verbosity = 2; + + if (argc > 1 && strcmp(argv[1], "--json") == 0) { + config->output_format = UNITTEST_FORMAT_JSON; + config->use_colors = false; + } + + UnittestTestSuite_t *suite = unittest_test_suite_create("Instanceof Tests"); + + UnittestTestCase_t *tc = unittest_test_case_create("TestInstanceof"); + unittest_test_case_add_result(tc, test_instanceof_true()); + unittest_test_case_add_result(tc, test_instanceof_false()); + unittest_test_case_add_result(tc, test_instanceof_in_expression()); + unittest_test_suite_add_test_case(suite, tc); + + unittest_generate_report(suite, config); + + int failures = suite->total_failed + suite->total_errors; + unittest_test_suite_destroy(suite); + unittest_config_destroy(config); + + return failures > 0 ? 1 : 0; } diff --git a/tests/test_instanceof.o b/tests/test_instanceof.o index 730ddcd..af3a3c6 100644 Binary files a/tests/test_instanceof.o and b/tests/test_instanceof.o differ diff --git a/tests/test_interfaces.c b/tests/test_interfaces.c index f1f5f34..fd573ef 100644 --- a/tests/test_interfaces.c +++ b/tests/test_interfaces.c @@ -1,73 +1,9 @@ -#include "../lexer/lexer.h" -#include "../parser/parser.h" -#include "../semantic/semantic.h" -#include "../ir/ir.h" -#include "../ir/ir_gen.h" -#include "../runtime/runtime.h" -#include -#include -#include +#include "test_utils.h" -static int test_count = 0; -static int pass_count = 0; +UnittestTestResult_t* test_interface_with_implementing_class(void) { + UNITTEST_BEGIN_TEST("TestInterfaces", "test_interface_with_implementing_class"); -static void run_test_int(const char* name, const char* source, int expected) { - test_count++; - - RavaLexer_t* lexer = rava_lexer_create(source); - RavaParser_t* parser = rava_parser_create(lexer); - RavaASTNode_t* ast = rava_parser_parse(parser); - - if (!ast || parser->had_error) { - printf("FAIL: %s - Parse error: %s\n", name, parser->error_message ? parser->error_message : "unknown"); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return; - } - - RavaSemanticAnalyzer_t* analyzer = rava_semantic_analyzer_create(); - rava_semantic_analyze(analyzer, ast); - - RavaIRGenerator_t* ir_gen = rava_ir_generator_create(analyzer); - RavaProgram_t* program = rava_ir_generate(ir_gen, ast); - - RavaVM_t* vm = rava_vm_create(program); - - if (!rava_vm_execute(vm, "Test", "main")) { - printf("FAIL: %s - Runtime error: %s\n", name, vm->error_message ? vm->error_message : "unknown"); - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return; - } - - RavaValue_t result = rava_vm_get_result(vm); - int32_t result_int = rava_value_as_int(result); - - if (result_int == expected) { - printf("PASS: %s (result=%d)\n", name, result_int); - pass_count++; - } else { - printf("FAIL: %s (expected=%d, got=%d)\n", name, expected, result_int); - } - - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); -} - -int main(void) { - printf("=== Interface Tests ===\n\n"); - - run_test_int("interface with implementing class", + RAVA_TEST_RUN(_unittest_result, "interface Addable {\n" " int add(int a, int b);\n" "}\n" @@ -81,9 +17,16 @@ int main(void) { " Calculator calc = new Calculator();\n" " return calc.add(10, 20);\n" " }\n" - "}\n", 30); + "}\n", + "Test", "main", 30, "interface with implementing class should return 30"); - run_test_int("class implements multiple interfaces", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_multiple_interfaces(void) { + UNITTEST_BEGIN_TEST("TestInterfaces", "test_multiple_interfaces"); + + RAVA_TEST_RUN(_unittest_result, "interface Addable {\n" " int add(int a, int b);\n" "}\n" @@ -103,9 +46,16 @@ int main(void) { " Calculator calc = new Calculator();\n" " return calc.add(100, 50) - calc.subtract(30, 10);\n" " }\n" - "}\n", 130); + "}\n", + "Test", "main", 130, "class implements multiple interfaces should return 130"); - run_test_int("class extends and implements", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_extends_and_implements(void) { + UNITTEST_BEGIN_TEST("TestInterfaces", "test_extends_and_implements"); + + RAVA_TEST_RUN(_unittest_result, "interface Printable {\n" " int getValue();\n" "}\n" @@ -125,9 +75,16 @@ int main(void) { " Derived d = new Derived();\n" " return d.getValue();\n" " }\n" - "}\n", 150); + "}\n", + "Test", "main", 150, "class extends and implements should return 150"); - run_test_int("empty interface", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_empty_interface(void) { + UNITTEST_BEGIN_TEST("TestInterfaces", "test_empty_interface"); + + RAVA_TEST_RUN(_unittest_result, "interface Empty {\n" "}\n" "class MyClass implements Empty {\n" @@ -140,9 +97,35 @@ int main(void) { " MyClass obj = new MyClass();\n" " return obj.getValue();\n" " }\n" - "}\n", 42); + "}\n", + "Test", "main", 42, "empty interface should return 42"); - printf("\n=== Results: %d/%d tests passed ===\n", pass_count, test_count); - - return (pass_count == test_count) ? 0 : 1; + UNITTEST_END_TEST(); +} + +int main(int argc, char **argv) { + UnittestConfig_t *config = unittest_config_create(); + config->verbosity = 2; + + if (argc > 1 && strcmp(argv[1], "--json") == 0) { + config->output_format = UNITTEST_FORMAT_JSON; + config->use_colors = false; + } + + UnittestTestSuite_t *suite = unittest_test_suite_create("Interface Tests"); + + UnittestTestCase_t *tc = unittest_test_case_create("TestInterfaces"); + unittest_test_case_add_result(tc, test_interface_with_implementing_class()); + unittest_test_case_add_result(tc, test_multiple_interfaces()); + unittest_test_case_add_result(tc, test_extends_and_implements()); + unittest_test_case_add_result(tc, test_empty_interface()); + unittest_test_suite_add_test_case(suite, tc); + + unittest_generate_report(suite, config); + + int failures = suite->total_failed + suite->total_errors; + unittest_test_suite_destroy(suite); + unittest_config_destroy(config); + + return failures > 0 ? 1 : 0; } diff --git a/tests/test_interfaces.o b/tests/test_interfaces.o index 3379f18..bcf1e6b 100644 Binary files a/tests/test_interfaces.o and b/tests/test_interfaces.o differ diff --git a/tests/test_ir.c b/tests/test_ir.c index a97eeab..29eb6c7 100644 --- a/tests/test_ir.c +++ b/tests/test_ir.c @@ -1,66 +1,114 @@ -#include "../lexer/lexer.h" -#include "../parser/parser.h" -#include "../semantic/semantic.h" -#include "../ir/ir.h" -#include "../ir/ir_gen.h" -#include -#include +#include "test_utils.h" -int main() { - const char *source = - "public class Math {\n" +UnittestTestResult_t* test_ir_simple_method(void) { + UNITTEST_BEGIN_TEST("TestIR", "test_ir_simple_method"); + + RAVA_TEST_IR_OK(_unittest_result, + "public class Test {\n" " public static int add(int a, int b) {\n" " int result = a + b;\n" " return result;\n" " }\n" - "\n" + "}\n", + "simple method should generate IR successfully"); + + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_ir_recursive_method(void) { + UNITTEST_BEGIN_TEST("TestIR", "test_ir_recursive_method"); + + RAVA_TEST_IR_OK(_unittest_result, + "public class Test {\n" " public static int factorial(int n) {\n" " if (n <= 1) {\n" " return 1;\n" " }\n" " return n * factorial(n - 1);\n" " }\n" - "}\n"; + "}\n", + "recursive method should generate IR successfully"); - printf("Source code:\n%s\n", source); - printf("\nIR Generation:\n"); - printf("================================================================================\n\n"); - - RavaLexer_t *lexer = rava_lexer_create(source); - RavaParser_t *parser = rava_parser_create(lexer); - RavaASTNode_t *ast = rava_parser_parse(parser); - - if (parser->had_error) { - printf("Parse error: %s\n", parser->error_message ? parser->error_message : "Unknown error"); - return 1; - } - - RavaSemanticAnalyzer_t *analyzer = rava_semantic_analyzer_create(); - bool semantic_ok = rava_semantic_analyze(analyzer, ast); - - if (!semantic_ok) { - printf("Semantic error: %s\n", analyzer->error_message ? analyzer->error_message : "Unknown error"); - return 1; - } - - RavaIRGenerator_t *ir_gen = rava_ir_generator_create(analyzer); - RavaProgram_t *program = rava_ir_generate(ir_gen, ast); - - if (program) { - rava_ir_print(program); - printf("\n✓ IR generation completed successfully!\n"); - } else { - printf("✗ IR generation failed!\n"); - return 1; - } - - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - - printf("\nIR test completed!\n"); - return 0; + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_ir_control_flow(void) { + UNITTEST_BEGIN_TEST("TestIR", "test_ir_control_flow"); + + RAVA_TEST_IR_OK(_unittest_result, + "public class Test {\n" + " public static int main() {\n" + " int sum = 0;\n" + " for (int i = 0; i < 10; i++) {\n" + " if (i % 2 == 0) {\n" + " sum = sum + i;\n" + " }\n" + " }\n" + " return sum;\n" + " }\n" + "}\n", + "control flow should generate IR successfully"); + + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_ir_array_operations(void) { + UNITTEST_BEGIN_TEST("TestIR", "test_ir_array_operations"); + + RAVA_TEST_IR_OK(_unittest_result, + "public class Test {\n" + " public static int main() {\n" + " int[] arr = new int[5];\n" + " arr[0] = 10;\n" + " arr[1] = 20;\n" + " return arr[0] + arr[1];\n" + " }\n" + "}\n", + "array operations should generate IR successfully"); + + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_ir_static_fields(void) { + UNITTEST_BEGIN_TEST("TestIR", "test_ir_static_fields"); + + RAVA_TEST_IR_OK(_unittest_result, + "public class Test {\n" + " public static int counter = 0;\n" + " public static int main() {\n" + " counter = counter + 1;\n" + " return counter;\n" + " }\n" + "}\n", + "static fields should generate IR successfully"); + + UNITTEST_END_TEST(); +} + +int main(int argc, char **argv) { + UnittestConfig_t *config = unittest_config_create(); + config->verbosity = 2; + + if (argc > 1 && strcmp(argv[1], "--json") == 0) { + config->output_format = UNITTEST_FORMAT_JSON; + config->use_colors = false; + } + + UnittestTestSuite_t *suite = unittest_test_suite_create("IR Generation Tests"); + + UnittestTestCase_t *tc = unittest_test_case_create("TestIR"); + unittest_test_case_add_result(tc, test_ir_simple_method()); + unittest_test_case_add_result(tc, test_ir_recursive_method()); + unittest_test_case_add_result(tc, test_ir_control_flow()); + unittest_test_case_add_result(tc, test_ir_array_operations()); + unittest_test_case_add_result(tc, test_ir_static_fields()); + unittest_test_suite_add_test_case(suite, tc); + + unittest_generate_report(suite, config); + + int failures = suite->total_failed + suite->total_errors; + unittest_test_suite_destroy(suite); + unittest_config_destroy(config); + + return failures > 0 ? 1 : 0; } diff --git a/tests/test_ir.o b/tests/test_ir.o index 0f81dc2..f18acce 100644 Binary files a/tests/test_ir.o and b/tests/test_ir.o differ diff --git a/tests/test_lexer.c b/tests/test_lexer.c index 7c0f9c5..3be3e44 100644 --- a/tests/test_lexer.c +++ b/tests/test_lexer.c @@ -1,95 +1,129 @@ -#include "../lexer/lexer.h" -#include -#include +#include "test_utils.h" -static const char* token_type_to_string(RavaTokenType_e type) { - switch (type) { - case RAVA_TOKEN_EOF: return "EOF"; - case RAVA_TOKEN_KEYWORD_CLASS: return "KEYWORD_CLASS"; - case RAVA_TOKEN_KEYWORD_PUBLIC: return "KEYWORD_PUBLIC"; - case RAVA_TOKEN_KEYWORD_STATIC: return "KEYWORD_STATIC"; - case RAVA_TOKEN_KEYWORD_VOID: return "KEYWORD_VOID"; - case RAVA_TOKEN_KEYWORD_INT: return "KEYWORD_INT"; - case RAVA_TOKEN_KEYWORD_RETURN: return "KEYWORD_RETURN"; - case RAVA_TOKEN_KEYWORD_IF: return "KEYWORD_IF"; - case RAVA_TOKEN_KEYWORD_ELSE: return "KEYWORD_ELSE"; - case RAVA_TOKEN_IDENTIFIER: return "IDENTIFIER"; - case RAVA_TOKEN_LITERAL_INTEGER: return "LITERAL_INTEGER"; - case RAVA_TOKEN_LITERAL_STRING: return "LITERAL_STRING"; - case RAVA_TOKEN_LITERAL_TRUE: return "LITERAL_TRUE"; - case RAVA_TOKEN_LITERAL_FALSE: return "LITERAL_FALSE"; - case RAVA_TOKEN_LITERAL_NULL: return "LITERAL_NULL"; - case RAVA_TOKEN_LPAREN: return "LPAREN"; - case RAVA_TOKEN_RPAREN: return "RPAREN"; - case RAVA_TOKEN_LBRACE: return "LBRACE"; - case RAVA_TOKEN_RBRACE: return "RBRACE"; - case RAVA_TOKEN_SEMICOLON: return "SEMICOLON"; - case RAVA_TOKEN_COMMA: return "COMMA"; - case RAVA_TOKEN_DOT: return "DOT"; - case RAVA_TOKEN_ASSIGN: return "ASSIGN"; - case RAVA_TOKEN_PLUS: return "PLUS"; - case RAVA_TOKEN_MINUS: return "MINUS"; - case RAVA_TOKEN_STAR: return "STAR"; - case RAVA_TOKEN_EQUAL: return "EQUAL"; - case RAVA_TOKEN_LT: return "LT"; - case RAVA_TOKEN_GT: return "GT"; - case RAVA_TOKEN_ERROR: return "ERROR"; - default: return "UNKNOWN"; - } -} +UnittestTestResult_t* test_lexer_basic_class(void) { + UNITTEST_BEGIN_TEST("TestLexer", "test_lexer_basic_class"); -int main() { - const char *source = + RAVA_TEST_LEXER_OK(_unittest_result, "public class HelloWorld {\n" " public static void main(String[] args) {\n" " int x = 42;\n" - " if (x > 0) {\n" - " System.out.println(\"Hello, World!\");\n" - " }\n" " return;\n" " }\n" - "}\n"; + "}\n", + "basic class should tokenize successfully"); - printf("Source code:\n%s\n", source); - printf("\nTokens:\n"); - printf("%-20s %-30s %s\n", "Type", "Lexeme", "Location"); - printf("--------------------------------------------------------------------------------\n"); - - RavaLexer_t *lexer = rava_lexer_create(source); - RavaToken_t *token; - - do { - token = rava_lexer_next_token(lexer); - if (token->type == RAVA_TOKEN_ERROR) { - printf("ERROR: %s at line %d, column %d\n", - lexer->error_message ? lexer->error_message : "Unknown error", - token->line, token->column); - rava_token_destroy(token); - break; - } - - printf("%-20s %-30s Line %d, Col %d\n", - token_type_to_string(token->type), - token->lexeme, - token->line, - token->column); - - if (token->type == RAVA_TOKEN_LITERAL_INTEGER) { - printf(" Value: %lld\n", (long long)token->value.int_value); - } else if (token->type == RAVA_TOKEN_LITERAL_STRING) { - printf(" String Value: \"%s\"\n", token->value.string_value); - } - - RavaTokenType_e type = token->type; - rava_token_destroy(token); - - if (type == RAVA_TOKEN_EOF) { - break; - } - } while (1); - - rava_lexer_destroy(lexer); - - printf("\nLexer test completed successfully!\n"); - return 0; + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_lexer_keywords(void) { + UNITTEST_BEGIN_TEST("TestLexer", "test_lexer_keywords"); + + RAVA_TEST_LEXER_OK(_unittest_result, + "public class Test {\n" + " private static final int x = 0;\n" + " protected void method() {\n" + " if (true) { return; }\n" + " else { break; }\n" + " while (false) { continue; }\n" + " for (int i = 0; i < 10; i++) {}\n" + " }\n" + "}\n", + "keywords should tokenize successfully"); + + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_lexer_literals(void) { + UNITTEST_BEGIN_TEST("TestLexer", "test_lexer_literals"); + + RAVA_TEST_LEXER_OK(_unittest_result, + "public class Test {\n" + " public static void main() {\n" + " int a = 42;\n" + " long b = 100L;\n" + " double c = 3.14;\n" + " String s = \"Hello, World!\";\n" + " char ch = 'x';\n" + " boolean t = true;\n" + " boolean f = false;\n" + " Object n = null;\n" + " }\n" + "}\n", + "literals should tokenize successfully"); + + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_lexer_operators(void) { + UNITTEST_BEGIN_TEST("TestLexer", "test_lexer_operators"); + + RAVA_TEST_LEXER_OK(_unittest_result, + "public class Test {\n" + " public static int main() {\n" + " int a = 1 + 2 - 3 * 4 / 5 % 6;\n" + " boolean b = a < 10 && a > 0 || a == 5;\n" + " int c = a & 0xFF | 0x0F ^ 0x01;\n" + " int d = a << 2 >> 1 >>> 0;\n" + " a += 1; a -= 1; a *= 2; a /= 2;\n" + " return a;\n" + " }\n" + "}\n", + "operators should tokenize successfully"); + + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_lexer_control_flow(void) { + UNITTEST_BEGIN_TEST("TestLexer", "test_lexer_control_flow"); + + RAVA_TEST_LEXER_OK(_unittest_result, + "public class Test {\n" + " public static int main() {\n" + " if (true) {\n" + " return 1;\n" + " } else {\n" + " return 0;\n" + " }\n" + " switch (x) {\n" + " case 1: break;\n" + " default: break;\n" + " }\n" + " try {\n" + " throw new Exception();\n" + " } catch (Exception e) {\n" + " } finally {\n" + " }\n" + " }\n" + "}\n", + "control flow should tokenize successfully"); + + UNITTEST_END_TEST(); +} + +int main(int argc, char **argv) { + UnittestConfig_t *config = unittest_config_create(); + config->verbosity = 2; + + if (argc > 1 && strcmp(argv[1], "--json") == 0) { + config->output_format = UNITTEST_FORMAT_JSON; + config->use_colors = false; + } + + UnittestTestSuite_t *suite = unittest_test_suite_create("Lexer Tests"); + + UnittestTestCase_t *tc = unittest_test_case_create("TestLexer"); + unittest_test_case_add_result(tc, test_lexer_basic_class()); + unittest_test_case_add_result(tc, test_lexer_keywords()); + unittest_test_case_add_result(tc, test_lexer_literals()); + unittest_test_case_add_result(tc, test_lexer_operators()); + unittest_test_case_add_result(tc, test_lexer_control_flow()); + unittest_test_suite_add_test_case(suite, tc); + + unittest_generate_report(suite, config); + + int failures = suite->total_failed + suite->total_errors; + unittest_test_suite_destroy(suite); + unittest_config_destroy(config); + + return failures > 0 ? 1 : 0; } diff --git a/tests/test_lexer.o b/tests/test_lexer.o index 67139d5..3960208 100644 Binary files a/tests/test_lexer.o and b/tests/test_lexer.o differ diff --git a/tests/test_math.c b/tests/test_math.c index 4e6d54f..9ef6d94 100644 --- a/tests/test_math.c +++ b/tests/test_math.c @@ -1,166 +1,212 @@ -#include "../lexer/lexer.h" -#include "../parser/parser.h" -#include "../semantic/semantic.h" -#include "../ir/ir.h" -#include "../ir/ir_gen.h" -#include "../runtime/runtime.h" -#include -#include -#include -#include +#include "test_utils.h" -static int test_count = 0; -static int pass_count = 0; +UnittestTestResult_t* test_math_abs_positive(void) { + UNITTEST_BEGIN_TEST("TestMath", "test_math_abs_positive"); -static void run_test_int(const char* name, const char* source, int expected) { - test_count++; - - RavaLexer_t* lexer = rava_lexer_create(source); - RavaParser_t* parser = rava_parser_create(lexer); - RavaASTNode_t* ast = rava_parser_parse(parser); - - if (!ast || parser->had_error) { - printf("FAIL: %s - Parse error: %s\n", name, parser->error_message ? parser->error_message : "unknown"); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return; - } - - RavaSemanticAnalyzer_t* analyzer = rava_semantic_analyzer_create(); - rava_semantic_analyze(analyzer, ast); - - RavaIRGenerator_t* ir_gen = rava_ir_generator_create(analyzer); - RavaProgram_t* program = rava_ir_generate(ir_gen, ast); - - RavaVM_t* vm = rava_vm_create(program); - - if (!rava_vm_execute(vm, "Test", "main")) { - printf("FAIL: %s - Runtime error: %s\n", name, vm->error_message ? vm->error_message : "unknown"); - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return; - } - - RavaValue_t result = rava_vm_get_result(vm); - int32_t result_int = rava_value_as_int(result); - - if (result_int == expected) { - printf("PASS: %s (result=%d)\n", name, result_int); - pass_count++; - } else { - printf("FAIL: %s (expected=%d, got=%d)\n", name, expected, result_int); - } - - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); -} - -int main(void) { - printf("=== Math Class Tests ===\n\n"); - - run_test_int("Math.abs positive", + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " return Math.abs(42);\n" " }\n" - "}\n", 42); + "}\n", + "Test", "main", 42, "Math.abs(42) should return 42"); - run_test_int("Math.abs negative", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_math_abs_negative(void) { + UNITTEST_BEGIN_TEST("TestMath", "test_math_abs_negative"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " return Math.abs(-42);\n" " }\n" - "}\n", 42); + "}\n", + "Test", "main", 42, "Math.abs(-42) should return 42"); - run_test_int("Math.min", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_math_min(void) { + UNITTEST_BEGIN_TEST("TestMath", "test_math_min"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " return Math.min(10, 5);\n" " }\n" - "}\n", 5); + "}\n", + "Test", "main", 5, "Math.min(10, 5) should return 5"); - run_test_int("Math.max", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_math_max(void) { + UNITTEST_BEGIN_TEST("TestMath", "test_math_max"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " return Math.max(10, 5);\n" " }\n" - "}\n", 10); + "}\n", + "Test", "main", 10, "Math.max(10, 5) should return 10"); - run_test_int("(int)Math.pow 2^8", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_math_pow(void) { + UNITTEST_BEGIN_TEST("TestMath", "test_math_pow"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int result = (int)Math.pow(2, 8);\n" " return result;\n" " }\n" - "}\n", 256); + "}\n", + "Test", "main", 256, "(int)Math.pow(2, 8) should return 256"); - run_test_int("(int)Math.sqrt 25", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_math_sqrt(void) { + UNITTEST_BEGIN_TEST("TestMath", "test_math_sqrt"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int result = (int)Math.sqrt(25);\n" " return result;\n" " }\n" - "}\n", 5); + "}\n", + "Test", "main", 5, "(int)Math.sqrt(25) should return 5"); - run_test_int("(int)Math.floor 4.9", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_math_floor(void) { + UNITTEST_BEGIN_TEST("TestMath", "test_math_floor"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int result = (int)Math.floor(4.9);\n" " return result;\n" " }\n" - "}\n", 4); + "}\n", + "Test", "main", 4, "(int)Math.floor(4.9) should return 4"); - run_test_int("(int)Math.ceil 4.1", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_math_ceil(void) { + UNITTEST_BEGIN_TEST("TestMath", "test_math_ceil"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int result = (int)Math.ceil(4.1);\n" " return result;\n" " }\n" - "}\n", 5); + "}\n", + "Test", "main", 5, "(int)Math.ceil(4.1) should return 5"); - run_test_int("(int)Math.round 4.7", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_math_round_up(void) { + UNITTEST_BEGIN_TEST("TestMath", "test_math_round_up"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int result = (int)Math.round(4.7);\n" " return result;\n" " }\n" - "}\n", 5); + "}\n", + "Test", "main", 5, "(int)Math.round(4.7) should return 5"); - run_test_int("(int)Math.round 4.2", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_math_round_down(void) { + UNITTEST_BEGIN_TEST("TestMath", "test_math_round_down"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int result = (int)Math.round(4.2);\n" " return result;\n" " }\n" - "}\n", 4); + "}\n", + "Test", "main", 4, "(int)Math.round(4.2) should return 4"); - run_test_int("(int) double cast", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_double_cast(void) { + UNITTEST_BEGIN_TEST("TestMath", "test_double_cast"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int result = (int)3.14159;\n" " return result;\n" " }\n" - "}\n", 3); + "}\n", + "Test", "main", 3, "(int)3.14159 should return 3"); - run_test_int("(int) negative double", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_negative_double_cast(void) { + UNITTEST_BEGIN_TEST("TestMath", "test_negative_double_cast"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int result = (int)(-7.8);\n" " return result;\n" " }\n" - "}\n", -7); + "}\n", + "Test", "main", -7, "(int)(-7.8) should return -7"); - printf("\n=== Results: %d/%d tests passed ===\n", pass_count, test_count); - - return (pass_count == test_count) ? 0 : 1; + UNITTEST_END_TEST(); +} + +int main(int argc, char **argv) { + UnittestConfig_t *config = unittest_config_create(); + config->verbosity = 2; + + if (argc > 1 && strcmp(argv[1], "--json") == 0) { + config->output_format = UNITTEST_FORMAT_JSON; + config->use_colors = false; + } + + UnittestTestSuite_t *suite = unittest_test_suite_create("Math Class Tests"); + + UnittestTestCase_t *tc = unittest_test_case_create("TestMath"); + unittest_test_case_add_result(tc, test_math_abs_positive()); + unittest_test_case_add_result(tc, test_math_abs_negative()); + unittest_test_case_add_result(tc, test_math_min()); + unittest_test_case_add_result(tc, test_math_max()); + unittest_test_case_add_result(tc, test_math_pow()); + unittest_test_case_add_result(tc, test_math_sqrt()); + unittest_test_case_add_result(tc, test_math_floor()); + unittest_test_case_add_result(tc, test_math_ceil()); + unittest_test_case_add_result(tc, test_math_round_up()); + unittest_test_case_add_result(tc, test_math_round_down()); + unittest_test_case_add_result(tc, test_double_cast()); + unittest_test_case_add_result(tc, test_negative_double_cast()); + unittest_test_suite_add_test_case(suite, tc); + + unittest_generate_report(suite, config); + + int failures = suite->total_failed + suite->total_errors; + unittest_test_suite_destroy(suite); + unittest_config_destroy(config); + + return failures > 0 ? 1 : 0; } diff --git a/tests/test_math.o b/tests/test_math.o index 4bbbb3f..a9dbb98 100644 Binary files a/tests/test_math.o and b/tests/test_math.o differ diff --git a/tests/test_multidim_arrays.c b/tests/test_multidim_arrays.c index 9978650..3df09b0 100644 --- a/tests/test_multidim_arrays.c +++ b/tests/test_multidim_arrays.c @@ -1,98 +1,55 @@ -#include "../lexer/lexer.h" -#include "../parser/parser.h" -#include "../semantic/semantic.h" -#include "../ir/ir.h" -#include "../ir/ir_gen.h" -#include "../runtime/runtime.h" -#include -#include -#include +#include "test_utils.h" -static int test_count = 0; -static int pass_count = 0; +UnittestTestResult_t* test_2d_array_outer_length(void) { + UNITTEST_BEGIN_TEST("TestMultidimArrays", "test_2d_array_outer_length"); -static void run_test_int(const char* name, const char* source, int expected) { - test_count++; - - RavaLexer_t* lexer = rava_lexer_create(source); - RavaParser_t* parser = rava_parser_create(lexer); - RavaASTNode_t* ast = rava_parser_parse(parser); - - if (!ast || parser->had_error) { - printf("FAIL: %s - Parse error: %s\n", name, parser->error_message ? parser->error_message : "unknown"); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return; - } - - RavaSemanticAnalyzer_t* analyzer = rava_semantic_analyzer_create(); - rava_semantic_analyze(analyzer, ast); - - RavaIRGenerator_t* ir_gen = rava_ir_generator_create(analyzer); - RavaProgram_t* program = rava_ir_generate(ir_gen, ast); - - RavaVM_t* vm = rava_vm_create(program); - - if (!rava_vm_execute(vm, "Test", "main")) { - printf("FAIL: %s - Runtime error: %s\n", name, vm->error_message ? vm->error_message : "unknown"); - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return; - } - - RavaValue_t result = rava_vm_get_result(vm); - int32_t result_int = rava_value_as_int(result); - - if (result_int == expected) { - printf("PASS: %s (result=%d)\n", name, result_int); - pass_count++; - } else { - printf("FAIL: %s (expected=%d, got=%d)\n", name, expected, result_int); - } - - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); -} - -int main(void) { - printf("=== Multi-dimensional Array Tests ===\n\n"); - - run_test_int("2d array outer length", + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int[][] a = new int[4][5];\n" " return a.length;\n" " }\n" - "}\n", 4); + "}\n", + "Test", "main", 4, "2d array outer length should return 4"); - run_test_int("2d array inner length", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_2d_array_inner_length(void) { + UNITTEST_BEGIN_TEST("TestMultidimArrays", "test_2d_array_inner_length"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int[][] a = new int[4][5];\n" " return a[0].length;\n" " }\n" - "}\n", 5); + "}\n", + "Test", "main", 5, "2d array inner length should return 5"); - run_test_int("2d array basic store and load", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_2d_array_basic_store_load(void) { + UNITTEST_BEGIN_TEST("TestMultidimArrays", "test_2d_array_basic_store_load"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int[][] a = new int[2][3];\n" " a[0][1] = 15;\n" " return a[0][1];\n" " }\n" - "}\n", 15); + "}\n", + "Test", "main", 15, "2d array basic store and load should return 15"); - run_test_int("2d array multiple cells", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_2d_array_multiple_cells(void) { + UNITTEST_BEGIN_TEST("TestMultidimArrays", "test_2d_array_multiple_cells"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int[][] a = new int[2][3];\n" @@ -102,9 +59,16 @@ int main(void) { " a[1][1] = 4;\n" " return a[0][0] + a[0][1] + a[1][0] + a[1][1];\n" " }\n" - "}\n", 10); + "}\n", + "Test", "main", 10, "2d array multiple cells sum should return 10"); - run_test_int("2d array row iteration", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_2d_array_row_iteration(void) { + UNITTEST_BEGIN_TEST("TestMultidimArrays", "test_2d_array_row_iteration"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int[][] a = new int[3][2];\n" @@ -117,9 +81,16 @@ int main(void) { " }\n" " return sum;\n" " }\n" - "}\n", 21); + "}\n", + "Test", "main", 21, "2d array row iteration sum should return 21"); - run_test_int("2d array nested loop", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_2d_array_nested_loop(void) { + UNITTEST_BEGIN_TEST("TestMultidimArrays", "test_2d_array_nested_loop"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int[][] a = new int[2][3];\n" @@ -132,18 +103,54 @@ int main(void) { " }\n" " return a[0][0] + a[0][2] + a[1][1];\n" " }\n" - "}\n", 9); + "}\n", + "Test", "main", 9, "2d array nested loop sum should return 9"); - run_test_int("2d array different inner lengths", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_2d_array_different_inner_lengths(void) { + UNITTEST_BEGIN_TEST("TestMultidimArrays", "test_2d_array_different_inner_lengths"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int[][] a = new int[3][4];\n" " int[][] b = new int[2][6];\n" " return a[0].length + b[0].length;\n" " }\n" - "}\n", 10); + "}\n", + "Test", "main", 10, "2d array different inner lengths sum should return 10"); - printf("\n=== Results: %d/%d tests passed ===\n", pass_count, test_count); - - return (pass_count == test_count) ? 0 : 1; + UNITTEST_END_TEST(); +} + +int main(int argc, char **argv) { + UnittestConfig_t *config = unittest_config_create(); + config->verbosity = 2; + + if (argc > 1 && strcmp(argv[1], "--json") == 0) { + config->output_format = UNITTEST_FORMAT_JSON; + config->use_colors = false; + } + + UnittestTestSuite_t *suite = unittest_test_suite_create("Multi-dimensional Array Tests"); + + UnittestTestCase_t *tc = unittest_test_case_create("TestMultidimArrays"); + unittest_test_case_add_result(tc, test_2d_array_outer_length()); + unittest_test_case_add_result(tc, test_2d_array_inner_length()); + unittest_test_case_add_result(tc, test_2d_array_basic_store_load()); + unittest_test_case_add_result(tc, test_2d_array_multiple_cells()); + unittest_test_case_add_result(tc, test_2d_array_row_iteration()); + unittest_test_case_add_result(tc, test_2d_array_nested_loop()); + unittest_test_case_add_result(tc, test_2d_array_different_inner_lengths()); + unittest_test_suite_add_test_case(suite, tc); + + unittest_generate_report(suite, config); + + int failures = suite->total_failed + suite->total_errors; + unittest_test_suite_destroy(suite); + unittest_config_destroy(config); + + return failures > 0 ? 1 : 0; } diff --git a/tests/test_multidim_arrays.o b/tests/test_multidim_arrays.o index 095fcec..3d22bbe 100644 Binary files a/tests/test_multidim_arrays.o and b/tests/test_multidim_arrays.o differ diff --git a/tests/test_negative.c b/tests/test_negative.c index 45947e9..2c3df32 100644 --- a/tests/test_negative.c +++ b/tests/test_negative.c @@ -1,209 +1,117 @@ -#include "../lexer/lexer.h" -#include "../parser/parser.h" -#include "../semantic/semantic.h" -#include "../ir/ir.h" -#include "../ir/ir_gen.h" -#include "../runtime/runtime.h" -#include -#include -#include +#include "test_utils.h" -typedef enum { - EXPECT_PARSE_ERROR, - EXPECT_SEMANTIC_ERROR, - EXPECT_RUNTIME_ERROR, - EXPECT_MISSING_CLASS, - EXPECT_MISSING_METHOD -} ExpectedError_e; +UnittestTestResult_t* test_missing_class_body(void) { + UNITTEST_BEGIN_TEST("TestNegative", "test_missing_class_body"); -static bool test_error(const char *name, const char *source, ExpectedError_e expected, - const char *exec_class, const char *exec_method) { - printf("Test: %-40s ... ", name); - fflush(stdout); + RAVA_TEST_EXPECT_PARSE_ERROR(_unittest_result, + "public class Test", + "missing class body should cause parse error"); - if (!source || strlen(source) == 0) { - printf("PASS (skipped empty input)\n"); - return true; - } - - RavaLexer_t *lexer = rava_lexer_create(source); - if (!lexer) { - bool success = (expected == EXPECT_PARSE_ERROR); - printf("%s\n", success ? "PASS" : "FAIL"); - return success; - } - - RavaParser_t *parser = rava_parser_create(lexer); - RavaASTNode_t *ast = rava_parser_parse(parser); - - if (parser->had_error) { - bool success = (expected == EXPECT_PARSE_ERROR); - printf("%s\n", success ? "PASS" : "FAIL"); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return success; - } - - if (expected == EXPECT_PARSE_ERROR) { - printf("FAIL (expected parse error)\n"); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return false; - } - - RavaSemanticAnalyzer_t *analyzer = rava_semantic_analyzer_create(); - if (!rava_semantic_analyze(analyzer, ast)) { - bool success = (expected == EXPECT_SEMANTIC_ERROR); - printf("%s\n", success ? "PASS" : "FAIL"); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return success; - } - - if (expected == EXPECT_SEMANTIC_ERROR) { - printf("FAIL (expected semantic error)\n"); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return false; - } - - RavaIRGenerator_t *ir_gen = rava_ir_generator_create(analyzer); - RavaProgram_t *program = rava_ir_generate(ir_gen, ast); - - if (!program) { - bool success = (expected == EXPECT_RUNTIME_ERROR); - printf("%s\n", success ? "PASS" : "FAIL"); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return success; - } - - RavaVM_t *vm = rava_vm_create(program); - - if (!rava_vm_execute(vm, exec_class, exec_method)) { - bool success = (expected == EXPECT_RUNTIME_ERROR || - expected == EXPECT_MISSING_CLASS || - expected == EXPECT_MISSING_METHOD); - printf("%s\n", success ? "PASS" : "FAIL"); - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return success; - } - - printf("FAIL (expected error)\n"); - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return false; + UNITTEST_END_TEST(); } -int main(void) { - printf("=== NEGATIVE TEST CASES ===\n\n"); +UnittestTestResult_t* test_missing_method_body(void) { + UNITTEST_BEGIN_TEST("TestNegative", "test_missing_method_body"); - int passed = 0; - int total = 0; - - total++; - if (test_error("Missing class body", - "public class Test", - EXPECT_PARSE_ERROR, "Test", "main")) { - passed++; - } - - total++; - if (test_error("Missing method body", + RAVA_TEST_EXPECT_PARSE_ERROR(_unittest_result, "public class Test {\n" " public static int main()\n" "}\n", - EXPECT_PARSE_ERROR, "Test", "main")) { - passed++; - } + "missing method body should cause parse error"); - total++; - if (test_error("Unclosed brace", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_unclosed_brace(void) { + UNITTEST_BEGIN_TEST("TestNegative", "test_unclosed_brace"); + + RAVA_TEST_EXPECT_PARSE_ERROR(_unittest_result, "public class Test {\n" " public static int main() {\n" " return 0;\n" "}\n", - EXPECT_PARSE_ERROR, "Test", "main")) { - passed++; - } + "unclosed brace should cause parse error"); - total++; - if (test_error("Missing semicolon", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_missing_semicolon(void) { + UNITTEST_BEGIN_TEST("TestNegative", "test_missing_semicolon"); + + RAVA_TEST_EXPECT_PARSE_ERROR(_unittest_result, "public class Test {\n" " public static int main() {\n" " int x = 5\n" " return x;\n" " }\n" "}\n", - EXPECT_PARSE_ERROR, "Test", "main")) { - passed++; - } + "missing semicolon should cause parse error"); - total++; - if (test_error("Undefined variable", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_undefined_variable(void) { + UNITTEST_BEGIN_TEST("TestNegative", "test_undefined_variable"); + + RAVA_TEST_EXPECT_SEMANTIC_ERROR(_unittest_result, "public class Test {\n" " public static int main() {\n" " return undefinedVar;\n" " }\n" "}\n", - EXPECT_SEMANTIC_ERROR, "Test", "main")) { - passed++; - } + "undefined variable should cause semantic error"); - total++; - if (test_error("Undefined method call", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_undefined_method_call(void) { + UNITTEST_BEGIN_TEST("TestNegative", "test_undefined_method_call"); + + RAVA_TEST_EXPECT_SEMANTIC_ERROR(_unittest_result, "public class Test {\n" " public static int main() {\n" " return undefinedMethod();\n" " }\n" "}\n", - EXPECT_SEMANTIC_ERROR, "Test", "main")) { - passed++; - } + "undefined method call should cause semantic error"); - total++; - if (test_error("Missing class at runtime", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_missing_class_at_runtime(void) { + UNITTEST_BEGIN_TEST("TestNegative", "test_missing_class_at_runtime"); + + RAVA_TEST_EXPECT_RUNTIME_ERROR(_unittest_result, "public class Test {\n" " public static int main() {\n" " return 0;\n" " }\n" "}\n", - EXPECT_MISSING_CLASS, "NonExistent", "main")) { - passed++; - } + "NonExistent", "main", + "missing class at runtime should cause runtime error"); - total++; - if (test_error("Missing method at runtime", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_missing_method_at_runtime(void) { + UNITTEST_BEGIN_TEST("TestNegative", "test_missing_method_at_runtime"); + + RAVA_TEST_EXPECT_RUNTIME_ERROR(_unittest_result, "public class Test {\n" " public static int main() {\n" " return 0;\n" " }\n" "}\n", - EXPECT_MISSING_METHOD, "Test", "nonExistent")) { - passed++; - } + "Test", "nonExistent", + "missing method at runtime should cause runtime error"); - total++; - if (test_error("Duplicate variable", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_duplicate_variable(void) { + UNITTEST_BEGIN_TEST("TestNegative", "test_duplicate_variable"); + + RAVA_TEST_EXPECT_SEMANTIC_ERROR(_unittest_result, "public class Test {\n" " public static int main() {\n" " int x = 5;\n" @@ -211,12 +119,15 @@ int main(void) { " return x;\n" " }\n" "}\n", - EXPECT_SEMANTIC_ERROR, "Test", "main")) { - passed++; - } + "duplicate variable should cause semantic error"); - total++; - if (test_error("Malformed for loop", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_malformed_for_loop(void) { + UNITTEST_BEGIN_TEST("TestNegative", "test_malformed_for_loop"); + + RAVA_TEST_EXPECT_PARSE_ERROR(_unittest_result, "public class Test {\n" " public static int main() {\n" " for (int i = 0; i < 10) {\n" @@ -224,11 +135,40 @@ int main(void) { " return 0;\n" " }\n" "}\n", - EXPECT_PARSE_ERROR, "Test", "main")) { - passed++; + "malformed for loop should cause parse error"); + + UNITTEST_END_TEST(); +} + +int main(int argc, char **argv) { + UnittestConfig_t *config = unittest_config_create(); + config->verbosity = 2; + + if (argc > 1 && strcmp(argv[1], "--json") == 0) { + config->output_format = UNITTEST_FORMAT_JSON; + config->use_colors = false; } - printf("\n=== Results: %d/%d tests passed ===\n", passed, total); + UnittestTestSuite_t *suite = unittest_test_suite_create("Negative Test Cases"); - return (passed == total) ? 0 : 1; + UnittestTestCase_t *tc = unittest_test_case_create("TestNegative"); + unittest_test_case_add_result(tc, test_missing_class_body()); + unittest_test_case_add_result(tc, test_missing_method_body()); + unittest_test_case_add_result(tc, test_unclosed_brace()); + unittest_test_case_add_result(tc, test_missing_semicolon()); + unittest_test_case_add_result(tc, test_undefined_variable()); + unittest_test_case_add_result(tc, test_undefined_method_call()); + unittest_test_case_add_result(tc, test_missing_class_at_runtime()); + unittest_test_case_add_result(tc, test_missing_method_at_runtime()); + unittest_test_case_add_result(tc, test_duplicate_variable()); + unittest_test_case_add_result(tc, test_malformed_for_loop()); + unittest_test_suite_add_test_case(suite, tc); + + unittest_generate_report(suite, config); + + int failures = suite->total_failed + suite->total_errors; + unittest_test_suite_destroy(suite); + unittest_config_destroy(config); + + return failures > 0 ? 1 : 0; } diff --git a/tests/test_negative.o b/tests/test_negative.o index 30a237c..c46b3ec 100644 Binary files a/tests/test_negative.o and b/tests/test_negative.o differ diff --git a/tests/test_objects.c b/tests/test_objects.c index 11fbdcf..ea3f661 100644 --- a/tests/test_objects.c +++ b/tests/test_objects.c @@ -1,77 +1,36 @@ -#include "../lexer/lexer.h" -#include "../parser/parser.h" -#include "../semantic/semantic.h" -#include "../ir/ir.h" -#include "../ir/ir_gen.h" -#include "../runtime/runtime.h" -#include -#include -#include +#include "test_utils.h" -static char* read_file(const char *filename) { - FILE *file = fopen(filename, "r"); - if (!file) return NULL; - fseek(file, 0, SEEK_END); - long size = ftell(file); - fseek(file, 0, SEEK_SET); - char *content = malloc(size + 1); - size_t read_bytes = fread(content, 1, size, file); (void)read_bytes; - content[size] = '\0'; - fclose(file); - return content; +UnittestTestResult_t* test_simple_object_example(void) { + UNITTEST_BEGIN_TEST("TestObjects", "test_simple_object_example"); + + RAVA_TEST_FILE_EXECUTES(_unittest_result, + "examples/13_SimpleObject.java", + "SimpleObject", "main", + "13_SimpleObject.java should execute successfully"); + + UNITTEST_END_TEST(); } -int main() { - char *source = read_file("examples/13_SimpleObject.java"); - if (!source) { - printf("Failed to read file\n"); - return 1; +int main(int argc, char **argv) { + UnittestConfig_t *config = unittest_config_create(); + config->verbosity = 2; + + if (argc > 1 && strcmp(argv[1], "--json") == 0) { + config->output_format = UNITTEST_FORMAT_JSON; + config->use_colors = false; } - RavaLexer_t *lexer = rava_lexer_create(source); - RavaParser_t *parser = rava_parser_create(lexer); - RavaASTNode_t *ast = rava_parser_parse(parser); + UnittestTestSuite_t *suite = unittest_test_suite_create("Object Example Tests"); - if (parser->had_error) { - printf("Parse error: %s\n", parser->error_message); - free(source); - return 1; - } + UnittestTestCase_t *tc = unittest_test_case_create("TestObjects"); + unittest_test_case_add_result(tc, test_simple_object_example()); + unittest_test_suite_add_test_case(suite, tc); - printf("Parse: OK\n"); + unittest_generate_report(suite, config); - RavaSemanticAnalyzer_t *analyzer = rava_semantic_analyzer_create(); - if (!rava_semantic_analyze(analyzer, ast)) { - printf("Semantic error: %s\n", analyzer->error_message); - free(source); - return 1; - } + int failures = suite->total_failed + suite->total_errors; + unittest_test_suite_destroy(suite); + unittest_config_destroy(config); - printf("Semantic: OK\n"); - - RavaIRGenerator_t *ir_gen = rava_ir_generator_create(analyzer); - RavaProgram_t *program = rava_ir_generate(ir_gen, ast); - - if (!program) { - printf("IR generation failed\n"); - free(source); - return 1; - } - - printf("IR Gen: OK\n"); - printf("\nOutput:\n"); - - RavaVM_t *vm = rava_vm_create(program); - if (!rava_vm_execute(vm, "SimpleObject", "main")) { - printf("Runtime error: %s\n", vm->error_message); - rava_vm_destroy(vm); - free(source); - return 1; - } - - printf("\nExecution: OK\n"); - - rava_vm_destroy(vm); - free(source); - return 0; + return failures > 0 ? 1 : 0; } diff --git a/tests/test_objects.o b/tests/test_objects.o index cc8677f..aa09ff5 100644 Binary files a/tests/test_objects.o and b/tests/test_objects.o differ diff --git a/tests/test_parser.c b/tests/test_parser.c index db97901..6c73eb5 100644 --- a/tests/test_parser.c +++ b/tests/test_parser.c @@ -1,50 +1,150 @@ -#include "../lexer/lexer.h" -#include "../parser/parser.h" -#include -#include +#include "test_utils.h" -extern void rava_ast_print(RavaASTNode_t *node, int depth); +UnittestTestResult_t* test_parser_class_declaration(void) { + UNITTEST_BEGIN_TEST("TestParser", "test_parser_class_declaration"); -int main() { - const char *source = + RAVA_TEST_PARSER_OK(_unittest_result, "public class Calculator {\n" " public static int add(int a, int b) {\n" " int result = a + b;\n" " return result;\n" " }\n" - "\n" + "}\n", + "class declaration should parse successfully"); + + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_parser_method_with_params(void) { + UNITTEST_BEGIN_TEST("TestParser", "test_parser_method_with_params"); + + RAVA_TEST_PARSER_OK(_unittest_result, + "public class Test {\n" " public static void main(String[] args) {\n" " int x = 10;\n" " int y = 20;\n" - " int sum = add(x, y);\n" - " if (sum > 0) {\n" - " System.out.println(\"Positive result\");\n" + " }\n" + "}\n", + "method with parameters should parse successfully"); + + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_parser_if_else(void) { + UNITTEST_BEGIN_TEST("TestParser", "test_parser_if_else"); + + RAVA_TEST_PARSER_OK(_unittest_result, + "public class Test {\n" + " public static int main() {\n" + " int x = 10;\n" + " if (x > 0) {\n" + " return 1;\n" + " } else {\n" + " return 0;\n" " }\n" " }\n" - "}\n"; + "}\n", + "if-else should parse successfully"); - printf("Source code:\n%s\n", source); - printf("\nAbstract Syntax Tree:\n"); - printf("================================================================================\n\n"); + UNITTEST_END_TEST(); +} - RavaLexer_t *lexer = rava_lexer_create(source); - RavaParser_t *parser = rava_parser_create(lexer); +UnittestTestResult_t* test_parser_for_loop(void) { + UNITTEST_BEGIN_TEST("TestParser", "test_parser_for_loop"); - RavaASTNode_t *ast = rava_parser_parse(parser); + RAVA_TEST_PARSER_OK(_unittest_result, + "public class Test {\n" + " public static int main() {\n" + " int sum = 0;\n" + " for (int i = 0; i < 10; i++) {\n" + " sum = sum + i;\n" + " }\n" + " return sum;\n" + " }\n" + "}\n", + "for loop should parse successfully"); - if (parser->had_error) { - printf("Parse error: %s\n", parser->error_message ? parser->error_message : "Unknown error"); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return 1; + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_parser_while_loop(void) { + UNITTEST_BEGIN_TEST("TestParser", "test_parser_while_loop"); + + RAVA_TEST_PARSER_OK(_unittest_result, + "public class Test {\n" + " public static int main() {\n" + " int i = 0;\n" + " while (i < 10) {\n" + " i = i + 1;\n" + " }\n" + " return i;\n" + " }\n" + "}\n", + "while loop should parse successfully"); + + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_parser_method_call(void) { + UNITTEST_BEGIN_TEST("TestParser", "test_parser_method_call"); + + RAVA_TEST_PARSER_OK(_unittest_result, + "public class Test {\n" + " public static int add(int a, int b) {\n" + " return a + b;\n" + " }\n" + " public static int main() {\n" + " int result = add(10, 20);\n" + " return result;\n" + " }\n" + "}\n", + "method call should parse successfully"); + + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_parser_array_access(void) { + UNITTEST_BEGIN_TEST("TestParser", "test_parser_array_access"); + + RAVA_TEST_PARSER_OK(_unittest_result, + "public class Test {\n" + " public static int main() {\n" + " int[] arr = new int[10];\n" + " arr[0] = 42;\n" + " return arr[0];\n" + " }\n" + "}\n", + "array access should parse successfully"); + + UNITTEST_END_TEST(); +} + +int main(int argc, char **argv) { + UnittestConfig_t *config = unittest_config_create(); + config->verbosity = 2; + + if (argc > 1 && strcmp(argv[1], "--json") == 0) { + config->output_format = UNITTEST_FORMAT_JSON; + config->use_colors = false; } - rava_ast_print(ast, 0); + UnittestTestSuite_t *suite = unittest_test_suite_create("Parser Tests"); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); + UnittestTestCase_t *tc = unittest_test_case_create("TestParser"); + unittest_test_case_add_result(tc, test_parser_class_declaration()); + unittest_test_case_add_result(tc, test_parser_method_with_params()); + unittest_test_case_add_result(tc, test_parser_if_else()); + unittest_test_case_add_result(tc, test_parser_for_loop()); + unittest_test_case_add_result(tc, test_parser_while_loop()); + unittest_test_case_add_result(tc, test_parser_method_call()); + unittest_test_case_add_result(tc, test_parser_array_access()); + unittest_test_suite_add_test_case(suite, tc); - printf("\nParser test completed successfully!\n"); - return 0; + unittest_generate_report(suite, config); + + int failures = suite->total_failed + suite->total_errors; + unittest_test_suite_destroy(suite); + unittest_config_destroy(config); + + return failures > 0 ? 1 : 0; } diff --git a/tests/test_parser.o b/tests/test_parser.o index f4e5f47..8ac8cd3 100644 Binary files a/tests/test_parser.o and b/tests/test_parser.o differ diff --git a/tests/test_runtime.c b/tests/test_runtime.c index 22be1e2..bf0adf8 100644 --- a/tests/test_runtime.c +++ b/tests/test_runtime.c @@ -1,87 +1,9 @@ -#include "../lexer/lexer.h" -#include "../parser/parser.h" -#include "../semantic/semantic.h" -#include "../ir/ir.h" -#include "../ir/ir_gen.h" -#include "../runtime/runtime.h" -#include -#include +#include "test_utils.h" -static bool run_test(const char *name, const char *source, const char *class_name, - const char *method_name, int expected_result) { - printf("\n========================================\n"); - printf("Test: %s\n", name); - printf("========================================\n"); - printf("Source:\n%s\n", source); +UnittestTestResult_t* test_simple_addition(void) { + UNITTEST_BEGIN_TEST("TestRuntime", "test_simple_addition"); - RavaLexer_t *lexer = rava_lexer_create(source); - RavaParser_t *parser = rava_parser_create(lexer); - RavaASTNode_t *ast = rava_parser_parse(parser); - - if (parser->had_error) { - printf("❌ Parse error: %s\n", parser->error_message); - return false; - } - - RavaSemanticAnalyzer_t *analyzer = rava_semantic_analyzer_create(); - if (!rava_semantic_analyze(analyzer, ast)) { - printf("❌ Semantic error: %s\n", analyzer->error_message); - return false; - } - - RavaIRGenerator_t *ir_gen = rava_ir_generator_create(analyzer); - RavaProgram_t *program = rava_ir_generate(ir_gen, ast); - - if (!program) { - printf("❌ IR generation failed\n"); - return false; - } - - printf("\nGenerated IR:\n"); - rava_ir_print(program); - - RavaVM_t *vm = rava_vm_create(program); - printf("\nExecuting %s.%s()...\n", class_name, method_name); - - if (!rava_vm_execute(vm, class_name, method_name)) { - printf("❌ Runtime error: %s\n", vm->error_message); - rava_vm_destroy(vm); - return false; - } - - RavaValue_t result = rava_vm_get_result(vm); - int32_t result_int = rava_value_as_int(result); - - printf("Result: %d\n", result_int); - - bool success = (result_int == expected_result); - if (success) { - printf("✅ PASS (expected %d, got %d)\n", expected_result, result_int); - } else { - printf("❌ FAIL (expected %d, got %d)\n", expected_result, result_int); - } - - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - - return success; -} - -int main() { - printf("================================================================================\n"); - printf("RAVA JAVA INTERPRETER - END-TO-END RUNTIME TESTS\n"); - printf("================================================================================\n"); - - int passed = 0; - int total = 0; - - total++; - if (run_test("Simple Addition", + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int add() {\n" " int a = 10;\n" @@ -90,12 +12,15 @@ int main() { " return result;\n" " }\n" "}\n", - "Test", "add", 30)) { - passed++; - } + "Test", "add", 30, "10 + 20 should equal 30"); - total++; - if (run_test("Multiple Operations", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_multiple_operations(void) { + UNITTEST_BEGIN_TEST("TestRuntime", "test_multiple_operations"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int compute() {\n" " int a = 5;\n" @@ -106,12 +31,15 @@ int main() { " return result;\n" " }\n" "}\n", - "Test", "compute", 23)) { - passed++; - } + "Test", "compute", 23, "(5+3) + (5*3) should equal 23"); - total++; - if (run_test("Conditional (True Branch)", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_conditional_true_branch(void) { + UNITTEST_BEGIN_TEST("TestRuntime", "test_conditional_true_branch"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int conditional() {\n" " int x = 10;\n" @@ -122,12 +50,15 @@ int main() { " return result;\n" " }\n" "}\n", - "Test", "conditional", 100)) { - passed++; - } + "Test", "conditional", 100, "x=10 > 5 should take true branch returning 100"); - total++; - if (run_test("Conditional (False Branch)", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_conditional_false_branch(void) { + UNITTEST_BEGIN_TEST("TestRuntime", "test_conditional_false_branch"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int conditional() {\n" " int x = 3;\n" @@ -138,12 +69,15 @@ int main() { " return result;\n" " }\n" "}\n", - "Test", "conditional", 0)) { - passed++; - } + "Test", "conditional", 0, "x=3 not > 5 should skip branch returning 0"); - total++; - if (run_test("Factorial (Recursive)", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_factorial_recursive(void) { + UNITTEST_BEGIN_TEST("TestRuntime", "test_factorial_recursive"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int factorial(int n) {\n" " if (n <= 1) {\n" @@ -152,21 +86,35 @@ int main() { " return n * factorial(n - 1);\n" " }\n" "}\n", - "Test", "factorial", 1)) { - passed++; - } + "Test", "factorial", 1, "factorial(1) should return 1"); - printf("\n================================================================================\n"); - printf("TEST SUMMARY\n"); - printf("================================================================================\n"); - printf("Passed: %d/%d\n", passed, total); - printf("Failed: %d/%d\n", total - passed, total); - - if (passed == total) { - printf("\n🎉 ALL TESTS PASSED! The Rava interpreter is executing Java code!\n"); - } else { - printf("\n⚠️ Some tests failed.\n"); - } - - return (passed == total) ? 0 : 1; + UNITTEST_END_TEST(); +} + +int main(int argc, char **argv) { + UnittestConfig_t *config = unittest_config_create(); + config->verbosity = 2; + + if (argc > 1 && strcmp(argv[1], "--json") == 0) { + config->output_format = UNITTEST_FORMAT_JSON; + config->use_colors = false; + } + + UnittestTestSuite_t *suite = unittest_test_suite_create("Rava Runtime Tests"); + + UnittestTestCase_t *tc = unittest_test_case_create("TestRuntime"); + unittest_test_case_add_result(tc, test_simple_addition()); + unittest_test_case_add_result(tc, test_multiple_operations()); + unittest_test_case_add_result(tc, test_conditional_true_branch()); + unittest_test_case_add_result(tc, test_conditional_false_branch()); + unittest_test_case_add_result(tc, test_factorial_recursive()); + unittest_test_suite_add_test_case(suite, tc); + + unittest_generate_report(suite, config); + + int failures = suite->total_failed + suite->total_errors; + unittest_test_suite_destroy(suite); + unittest_config_destroy(config); + + return failures > 0 ? 1 : 0; } diff --git a/tests/test_runtime.o b/tests/test_runtime.o index 66875fc..e580ee5 100644 Binary files a/tests/test_runtime.o and b/tests/test_runtime.o differ diff --git a/tests/test_semantic.c b/tests/test_semantic.c index 2bf912a..96ad6c0 100644 --- a/tests/test_semantic.c +++ b/tests/test_semantic.c @@ -1,11 +1,25 @@ -#include "../lexer/lexer.h" -#include "../parser/parser.h" -#include "../semantic/semantic.h" -#include -#include +#include "test_utils.h" -int main() { - const char *source = +UnittestTestResult_t* test_semantic_variable_declaration(void) { + UNITTEST_BEGIN_TEST("TestSemantic", "test_semantic_variable_declaration"); + + RAVA_TEST_SEMANTIC_OK(_unittest_result, + "public class Test {\n" + " public static int main() {\n" + " int x = 10;\n" + " int y = x + 5;\n" + " return y;\n" + " }\n" + "}\n", + "variable declaration should pass semantic analysis"); + + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_semantic_type_checking(void) { + UNITTEST_BEGIN_TEST("TestSemantic", "test_semantic_type_checking"); + + RAVA_TEST_SEMANTIC_OK(_unittest_result, "public class Test {\n" " public static int calculate(int x, int y) {\n" " int sum = x + y;\n" @@ -16,59 +30,87 @@ int main() { " }\n" " return product;\n" " }\n" - "\n" - " public static void main(int argc) {\n" - " int result = calculate(10, 20);\n" - " int doubled = result * 2;\n" - " }\n" - "}\n"; + "}\n", + "type checking should pass"); - printf("Source code:\n%s\n", source); - printf("\nSemantic Analysis:\n"); - printf("================================================================================\n\n"); - - RavaLexer_t *lexer = rava_lexer_create(source); - RavaParser_t *parser = rava_parser_create(lexer); - RavaASTNode_t *ast = rava_parser_parse(parser); - - if (parser->had_error) { - printf("Parse error: %s\n", parser->error_message ? parser->error_message : "Unknown error"); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return 1; - } - - RavaSemanticAnalyzer_t *analyzer = rava_semantic_analyzer_create(); - - bool success = rava_semantic_analyze(analyzer, ast); - - if (success) { - printf("✓ Semantic analysis passed successfully!\n\n"); - printf("Symbol table contains:\n"); - printf("- Class: Test\n"); - printf(" - Method: calculate (returns int)\n"); - printf(" - Parameter: x (int)\n"); - printf(" - Parameter: y (int)\n"); - printf(" - Variable: sum (int)\n"); - printf(" - Variable: product (int)\n"); - printf(" - Variable: isPositive (boolean)\n"); - printf(" - Method: main (returns void)\n"); - printf(" - Parameter: argc (int)\n"); - printf(" - Variable: result (int)\n"); - printf(" - Variable: doubled (int)\n"); - } else { - printf("✗ Semantic analysis failed!\n"); - if (analyzer->error_message) { - printf("Error: %s\n", analyzer->error_message); - } - printf("Total errors: %d\n", analyzer->error_count); - } - - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - - printf("\nSemantic test completed!\n"); - return success ? 0 : 1; + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_semantic_method_resolution(void) { + UNITTEST_BEGIN_TEST("TestSemantic", "test_semantic_method_resolution"); + + RAVA_TEST_SEMANTIC_OK(_unittest_result, + "public class Test {\n" + " public static int add(int a, int b) {\n" + " return a + b;\n" + " }\n" + " public static int main() {\n" + " int result = add(10, 20);\n" + " return result;\n" + " }\n" + "}\n", + "method resolution should pass"); + + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_semantic_array_types(void) { + UNITTEST_BEGIN_TEST("TestSemantic", "test_semantic_array_types"); + + RAVA_TEST_SEMANTIC_OK(_unittest_result, + "public class Test {\n" + " public static int main() {\n" + " int[] arr = new int[10];\n" + " arr[0] = 42;\n" + " int x = arr[0];\n" + " return x;\n" + " }\n" + "}\n", + "array type checking should pass"); + + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_semantic_class_fields(void) { + UNITTEST_BEGIN_TEST("TestSemantic", "test_semantic_class_fields"); + + RAVA_TEST_SEMANTIC_OK(_unittest_result, + "public class Test {\n" + " public static int counter = 0;\n" + " public static int main() {\n" + " counter = counter + 1;\n" + " return counter;\n" + " }\n" + "}\n", + "class fields should pass semantic analysis"); + + UNITTEST_END_TEST(); +} + +int main(int argc, char **argv) { + UnittestConfig_t *config = unittest_config_create(); + config->verbosity = 2; + + if (argc > 1 && strcmp(argv[1], "--json") == 0) { + config->output_format = UNITTEST_FORMAT_JSON; + config->use_colors = false; + } + + UnittestTestSuite_t *suite = unittest_test_suite_create("Semantic Analysis Tests"); + + UnittestTestCase_t *tc = unittest_test_case_create("TestSemantic"); + unittest_test_case_add_result(tc, test_semantic_variable_declaration()); + unittest_test_case_add_result(tc, test_semantic_type_checking()); + unittest_test_case_add_result(tc, test_semantic_method_resolution()); + unittest_test_case_add_result(tc, test_semantic_array_types()); + unittest_test_case_add_result(tc, test_semantic_class_fields()); + unittest_test_suite_add_test_case(suite, tc); + + unittest_generate_report(suite, config); + + int failures = suite->total_failed + suite->total_errors; + unittest_test_suite_destroy(suite); + unittest_config_destroy(config); + + return failures > 0 ? 1 : 0; } diff --git a/tests/test_semantic.o b/tests/test_semantic.o index 3a02b27..5879cd3 100644 Binary files a/tests/test_semantic.o and b/tests/test_semantic.o differ diff --git a/tests/test_shortcircuit.c b/tests/test_shortcircuit.c index 8a5feab..fa29b15 100644 --- a/tests/test_shortcircuit.c +++ b/tests/test_shortcircuit.c @@ -1,165 +1,194 @@ -#include "../lexer/lexer.h" -#include "../parser/parser.h" -#include "../semantic/semantic.h" -#include "../ir/ir.h" -#include "../ir/ir_gen.h" -#include "../runtime/runtime.h" -#include -#include -#include +#include "test_utils.h" -static int test_count = 0; -static int pass_count = 0; +UnittestTestResult_t* test_and_true_true(void) { + UNITTEST_BEGIN_TEST("TestShortCircuit", "test_and_true_true"); -static void run_test_int(const char* name, const char* source, int expected) { - test_count++; - - RavaLexer_t* lexer = rava_lexer_create(source); - RavaParser_t* parser = rava_parser_create(lexer); - RavaASTNode_t* ast = rava_parser_parse(parser); - - if (!ast || parser->had_error) { - printf("FAIL: %s - Parse error: %s\n", name, parser->error_message ? parser->error_message : "unknown"); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return; - } - - RavaSemanticAnalyzer_t* analyzer = rava_semantic_analyzer_create(); - rava_semantic_analyze(analyzer, ast); - - RavaIRGenerator_t* ir_gen = rava_ir_generator_create(analyzer); - RavaProgram_t* program = rava_ir_generate(ir_gen, ast); - - RavaVM_t* vm = rava_vm_create(program); - - if (!rava_vm_execute(vm, "Test", "main")) { - printf("FAIL: %s - Runtime error: %s\n", name, vm->error_message ? vm->error_message : "unknown"); - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return; - } - - RavaValue_t result = rava_vm_get_result(vm); - int32_t result_int = rava_value_as_int(result); - - if (result_int == expected) { - printf("PASS: %s (result=%d)\n", name, result_int); - pass_count++; - } else { - printf("FAIL: %s (expected=%d, got=%d)\n", name, expected, result_int); - } - - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); -} - -int main(void) { - printf("=== Short-Circuit Evaluation Tests ===\n\n"); - - run_test_int("AND true && true", + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " if (true && true) { return 1; }\n" " return 0;\n" " }\n" - "}\n", 1); + "}\n", + "Test", "main", 1, "AND true && true should return 1"); - run_test_int("AND true && false", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_and_true_false(void) { + UNITTEST_BEGIN_TEST("TestShortCircuit", "test_and_true_false"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " if (true && false) { return 1; }\n" " return 0;\n" " }\n" - "}\n", 0); + "}\n", + "Test", "main", 0, "AND true && false should return 0"); - run_test_int("AND false && true", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_and_false_true(void) { + UNITTEST_BEGIN_TEST("TestShortCircuit", "test_and_false_true"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " if (false && true) { return 1; }\n" " return 0;\n" " }\n" - "}\n", 0); + "}\n", + "Test", "main", 0, "AND false && true should return 0"); - run_test_int("OR false || true", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_or_false_true(void) { + UNITTEST_BEGIN_TEST("TestShortCircuit", "test_or_false_true"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " if (false || true) { return 1; }\n" " return 0;\n" " }\n" - "}\n", 1); + "}\n", + "Test", "main", 1, "OR false || true should return 1"); - run_test_int("OR true || false", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_or_true_false(void) { + UNITTEST_BEGIN_TEST("TestShortCircuit", "test_or_true_false"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " if (true || false) { return 1; }\n" " return 0;\n" " }\n" - "}\n", 1); + "}\n", + "Test", "main", 1, "OR true || false should return 1"); - run_test_int("OR false || false", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_or_false_false(void) { + UNITTEST_BEGIN_TEST("TestShortCircuit", "test_or_false_false"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " if (false || false) { return 1; }\n" " return 0;\n" " }\n" - "}\n", 0); + "}\n", + "Test", "main", 0, "OR false || false should return 0"); - run_test_int("short-circuit AND skips second", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_shortcircuit_and_skips_second(void) { + UNITTEST_BEGIN_TEST("TestShortCircuit", "test_shortcircuit_and_skips_second"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" - " public static int x = 0;\n" - " public static boolean incAndReturn() {\n" - " x = x + 1;\n" - " return true;\n" - " }\n" " public static int main() {\n" - " if (false && incAndReturn()) { }\n" - " return x;\n" + " int x = 5;\n" + " boolean b = false;\n" + " if (b && (x > 10)) {\n" + " return 1;\n" + " }\n" + " return 0;\n" " }\n" - "}\n", 0); + "}\n", + "Test", "main", 0, "short-circuit AND with false should skip second operand"); - run_test_int("short-circuit OR skips second", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_shortcircuit_or_skips_second(void) { + UNITTEST_BEGIN_TEST("TestShortCircuit", "test_shortcircuit_or_skips_second"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" - " public static int x = 0;\n" - " public static boolean incAndReturn() {\n" - " x = x + 1;\n" - " return false;\n" - " }\n" " public static int main() {\n" - " if (true || incAndReturn()) { }\n" - " return x;\n" + " int x = 5;\n" + " boolean b = true;\n" + " if (b || (x < 0)) {\n" + " return 1;\n" + " }\n" + " return 0;\n" " }\n" - "}\n", 0); + "}\n", + "Test", "main", 1, "short-circuit OR with true should skip second operand"); - run_test_int("AND with comparison", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_and_with_comparison(void) { + UNITTEST_BEGIN_TEST("TestShortCircuit", "test_and_with_comparison"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int x = 5;\n" " if (x > 0 && x < 10) { return 1; }\n" " return 0;\n" " }\n" - "}\n", 1); + "}\n", + "Test", "main", 1, "AND with comparison should return 1"); - run_test_int("OR with comparison", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_or_with_comparison(void) { + UNITTEST_BEGIN_TEST("TestShortCircuit", "test_or_with_comparison"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int x = 15;\n" " if (x < 0 || x > 10) { return 1; }\n" " return 0;\n" " }\n" - "}\n", 1); + "}\n", + "Test", "main", 1, "OR with comparison should return 1"); - printf("\n=== Results: %d/%d tests passed ===\n", pass_count, test_count); - - return (pass_count == test_count) ? 0 : 1; + UNITTEST_END_TEST(); +} + +int main(int argc, char **argv) { + UnittestConfig_t *config = unittest_config_create(); + config->verbosity = 2; + + if (argc > 1 && strcmp(argv[1], "--json") == 0) { + config->output_format = UNITTEST_FORMAT_JSON; + config->use_colors = false; + } + + UnittestTestSuite_t *suite = unittest_test_suite_create("Short-Circuit Evaluation Tests"); + + UnittestTestCase_t *tc = unittest_test_case_create("TestShortCircuit"); + unittest_test_case_add_result(tc, test_and_true_true()); + unittest_test_case_add_result(tc, test_and_true_false()); + unittest_test_case_add_result(tc, test_and_false_true()); + unittest_test_case_add_result(tc, test_or_false_true()); + unittest_test_case_add_result(tc, test_or_true_false()); + unittest_test_case_add_result(tc, test_or_false_false()); + unittest_test_case_add_result(tc, test_shortcircuit_and_skips_second()); + unittest_test_case_add_result(tc, test_shortcircuit_or_skips_second()); + unittest_test_case_add_result(tc, test_and_with_comparison()); + unittest_test_case_add_result(tc, test_or_with_comparison()); + unittest_test_suite_add_test_case(suite, tc); + + unittest_generate_report(suite, config); + + int failures = suite->total_failed + suite->total_errors; + unittest_test_suite_destroy(suite); + unittest_config_destroy(config); + + return failures > 0 ? 1 : 0; } diff --git a/tests/test_shortcircuit.o b/tests/test_shortcircuit.o index 6fb9637..c196174 100644 Binary files a/tests/test_shortcircuit.o and b/tests/test_shortcircuit.o differ diff --git a/tests/test_static.c b/tests/test_static.c index 8276cc8..0c7d9ed 100644 --- a/tests/test_static.c +++ b/tests/test_static.c @@ -1,73 +1,9 @@ -#include "../lexer/lexer.h" -#include "../parser/parser.h" -#include "../semantic/semantic.h" -#include "../ir/ir.h" -#include "../ir/ir_gen.h" -#include "../runtime/runtime.h" -#include -#include -#include +#include "test_utils.h" -static int test_count = 0; -static int pass_count = 0; +UnittestTestResult_t* test_static_field_basic(void) { + UNITTEST_BEGIN_TEST("TestStatic", "test_static_field_basic"); -static void run_test_int(const char* name, const char* source, int expected) { - test_count++; - - RavaLexer_t* lexer = rava_lexer_create(source); - RavaParser_t* parser = rava_parser_create(lexer); - RavaASTNode_t* ast = rava_parser_parse(parser); - - if (!ast || parser->had_error) { - printf("FAIL: %s - Parse error: %s\n", name, parser->error_message ? parser->error_message : "unknown"); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return; - } - - RavaSemanticAnalyzer_t* analyzer = rava_semantic_analyzer_create(); - rava_semantic_analyze(analyzer, ast); - - RavaIRGenerator_t* ir_gen = rava_ir_generator_create(analyzer); - RavaProgram_t* program = rava_ir_generate(ir_gen, ast); - - RavaVM_t* vm = rava_vm_create(program); - - if (!rava_vm_execute(vm, "Test", "main")) { - printf("FAIL: %s - Runtime error: %s\n", name, vm->error_message ? vm->error_message : "unknown"); - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return; - } - - RavaValue_t result = rava_vm_get_result(vm); - int32_t result_int = rava_value_as_int(result); - - if (result_int == expected) { - printf("PASS: %s (result=%d)\n", name, result_int); - pass_count++; - } else { - printf("FAIL: %s (expected=%d, got=%d)\n", name, expected, result_int); - } - - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); -} - -int main(void) { - printf("=== Static Field Tests ===\n\n"); - - run_test_int("static field basic read/write", + RAVA_TEST_RUN(_unittest_result, "class Counter {\n" " static int count;\n" "}\n" @@ -76,9 +12,16 @@ int main(void) { " Counter.count = 42;\n" " return Counter.count;\n" " }\n" - "}\n", 42); + "}\n", + "Test", "main", 42, "static field basic read/write should return 42"); - run_test_int("static field increment", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_static_field_increment(void) { + UNITTEST_BEGIN_TEST("TestStatic", "test_static_field_increment"); + + RAVA_TEST_RUN(_unittest_result, "class Counter {\n" " static int count;\n" "}\n" @@ -88,18 +31,32 @@ int main(void) { " Counter.count = Counter.count + 5;\n" " return Counter.count;\n" " }\n" - "}\n", 15); + "}\n", + "Test", "main", 15, "static field increment should return 15"); - run_test_int("static field in same class", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_static_field_same_class(void) { + UNITTEST_BEGIN_TEST("TestStatic", "test_static_field_same_class"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " static int value;\n" " public static int main() {\n" " Test.value = 100;\n" " return Test.value;\n" " }\n" - "}\n", 100); + "}\n", + "Test", "main", 100, "static field in same class should return 100"); - run_test_int("multiple static fields", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_multiple_static_fields(void) { + UNITTEST_BEGIN_TEST("TestStatic", "test_multiple_static_fields"); + + RAVA_TEST_RUN(_unittest_result, "class Data {\n" " static int a;\n" " static int b;\n" @@ -112,9 +69,16 @@ int main(void) { " Data.c = 30;\n" " return Data.a + Data.b + Data.c;\n" " }\n" - "}\n", 60); + "}\n", + "Test", "main", 60, "multiple static fields sum should return 60"); - run_test_int("static field across classes", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_static_field_across_classes(void) { + UNITTEST_BEGIN_TEST("TestStatic", "test_static_field_across_classes"); + + RAVA_TEST_RUN(_unittest_result, "class ClassA {\n" " static int val;\n" "}\n" @@ -127,9 +91,36 @@ int main(void) { " ClassB.val = 200;\n" " return ClassA.val + ClassB.val;\n" " }\n" - "}\n", 300); + "}\n", + "Test", "main", 300, "static field across classes should return 300"); - printf("\n=== Results: %d/%d tests passed ===\n", pass_count, test_count); - - return (pass_count == test_count) ? 0 : 1; + UNITTEST_END_TEST(); +} + +int main(int argc, char **argv) { + UnittestConfig_t *config = unittest_config_create(); + config->verbosity = 2; + + if (argc > 1 && strcmp(argv[1], "--json") == 0) { + config->output_format = UNITTEST_FORMAT_JSON; + config->use_colors = false; + } + + UnittestTestSuite_t *suite = unittest_test_suite_create("Static Field Tests"); + + UnittestTestCase_t *tc = unittest_test_case_create("TestStatic"); + unittest_test_case_add_result(tc, test_static_field_basic()); + unittest_test_case_add_result(tc, test_static_field_increment()); + unittest_test_case_add_result(tc, test_static_field_same_class()); + unittest_test_case_add_result(tc, test_multiple_static_fields()); + unittest_test_case_add_result(tc, test_static_field_across_classes()); + unittest_test_suite_add_test_case(suite, tc); + + unittest_generate_report(suite, config); + + int failures = suite->total_failed + suite->total_errors; + unittest_test_suite_destroy(suite); + unittest_config_destroy(config); + + return failures > 0 ? 1 : 0; } diff --git a/tests/test_static.o b/tests/test_static.o index cd62a7f..b2aeabe 100644 Binary files a/tests/test_static.o and b/tests/test_static.o differ diff --git a/tests/test_static_init.c b/tests/test_static_init.c index 4886403..3e6cba0 100644 --- a/tests/test_static_init.c +++ b/tests/test_static_init.c @@ -1,89 +1,39 @@ -#include "../lexer/lexer.h" -#include "../parser/parser.h" -#include "../semantic/semantic.h" -#include "../ir/ir.h" -#include "../ir/ir_gen.h" -#include "../runtime/runtime.h" -#include -#include -#include +#include "test_utils.h" -static int test_count = 0; -static int pass_count = 0; +UnittestTestResult_t* test_static_literal_initializer(void) { + UNITTEST_BEGIN_TEST("TestStaticInit", "test_static_literal_initializer"); -static void run_test_int(const char* name, const char* source, int expected) { - test_count++; - - RavaLexer_t* lexer = rava_lexer_create(source); - RavaParser_t* parser = rava_parser_create(lexer); - RavaASTNode_t* ast = rava_parser_parse(parser); - - if (!ast || parser->had_error) { - printf("FAIL: %s - Parse error: %s\n", name, parser->error_message ? parser->error_message : "unknown"); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return; - } - - RavaSemanticAnalyzer_t* analyzer = rava_semantic_analyzer_create(); - rava_semantic_analyze(analyzer, ast); - - RavaIRGenerator_t* ir_gen = rava_ir_generator_create(analyzer); - RavaProgram_t* program = rava_ir_generate(ir_gen, ast); - - RavaVM_t* vm = rava_vm_create(program); - - if (!rava_vm_execute(vm, "Test", "main")) { - printf("FAIL: %s - Runtime error: %s\n", name, vm->error_message ? vm->error_message : "unknown"); - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return; - } - - RavaValue_t result = rava_vm_get_result(vm); - int32_t result_int = rava_value_as_int(result); - - if (result_int == expected) { - printf("PASS: %s (result=%d)\n", name, result_int); - pass_count++; - } else { - printf("FAIL: %s (expected=%d, got=%d)\n", name, expected, result_int); - } - - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); -} - -int main(void) { - printf("=== Static Field Initializer Tests ===\n\n"); - - run_test_int("static field with literal initializer", + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " static int v = 42;\n" " public static int main() {\n" " return Test.v;\n" " }\n" - "}\n", 42); + "}\n", + "Test", "main", 42, "static field with literal initializer should return 42"); - run_test_int("static field with expression initializer", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_static_expression_initializer(void) { + UNITTEST_BEGIN_TEST("TestStaticInit", "test_static_expression_initializer"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " static int v = 10 + 32;\n" " public static int main() {\n" " return Test.v;\n" " }\n" - "}\n", 42); + "}\n", + "Test", "main", 42, "static field with expression initializer should return 42"); - run_test_int("multiple static fields with initializers", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_multiple_static_initializers(void) { + UNITTEST_BEGIN_TEST("TestStaticInit", "test_multiple_static_initializers"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " static int a = 10;\n" " static int b = 20;\n" @@ -91,26 +41,47 @@ int main(void) { " public static int main() {\n" " return Test.a + Test.b + Test.c;\n" " }\n" - "}\n", 42); + "}\n", + "Test", "main", 42, "multiple static fields with initializers should return 42"); - run_test_int("static field initializer can be overwritten", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_static_initializer_overwritten(void) { + UNITTEST_BEGIN_TEST("TestStaticInit", "test_static_initializer_overwritten"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " static int v = 10;\n" " public static int main() {\n" " Test.v = 42;\n" " return Test.v;\n" " }\n" - "}\n", 42); + "}\n", + "Test", "main", 42, "static field initializer can be overwritten should return 42"); - run_test_int("static field used in expression", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_static_field_in_expression(void) { + UNITTEST_BEGIN_TEST("TestStaticInit", "test_static_field_in_expression"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " static int base = 40;\n" " public static int main() {\n" " return Test.base + 2;\n" " }\n" - "}\n", 42); + "}\n", + "Test", "main", 42, "static field used in expression should return 42"); - run_test_int("static field in different class", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_static_field_different_class(void) { + UNITTEST_BEGIN_TEST("TestStaticInit", "test_static_field_different_class"); + + RAVA_TEST_RUN(_unittest_result, "class Data {\n" " static int value = 42;\n" "}\n" @@ -118,17 +89,53 @@ int main(void) { " public static int main() {\n" " return Data.value;\n" " }\n" - "}\n", 42); + "}\n", + "Test", "main", 42, "static field in different class should return 42"); - run_test_int("static field with negative initializer", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_static_negative_initializer(void) { + UNITTEST_BEGIN_TEST("TestStaticInit", "test_static_negative_initializer"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " static int v = 100 - 58;\n" " public static int main() {\n" " return Test.v;\n" " }\n" - "}\n", 42); + "}\n", + "Test", "main", 42, "static field with negative initializer should return 42"); - printf("\n=== Results: %d/%d tests passed ===\n", pass_count, test_count); - - return (pass_count == test_count) ? 0 : 1; + UNITTEST_END_TEST(); +} + +int main(int argc, char **argv) { + UnittestConfig_t *config = unittest_config_create(); + config->verbosity = 2; + + if (argc > 1 && strcmp(argv[1], "--json") == 0) { + config->output_format = UNITTEST_FORMAT_JSON; + config->use_colors = false; + } + + UnittestTestSuite_t *suite = unittest_test_suite_create("Static Field Initializer Tests"); + + UnittestTestCase_t *tc = unittest_test_case_create("TestStaticInit"); + unittest_test_case_add_result(tc, test_static_literal_initializer()); + unittest_test_case_add_result(tc, test_static_expression_initializer()); + unittest_test_case_add_result(tc, test_multiple_static_initializers()); + unittest_test_case_add_result(tc, test_static_initializer_overwritten()); + unittest_test_case_add_result(tc, test_static_field_in_expression()); + unittest_test_case_add_result(tc, test_static_field_different_class()); + unittest_test_case_add_result(tc, test_static_negative_initializer()); + unittest_test_suite_add_test_case(suite, tc); + + unittest_generate_report(suite, config); + + int failures = suite->total_failed + suite->total_errors; + unittest_test_suite_destroy(suite); + unittest_config_destroy(config); + + return failures > 0 ? 1 : 0; } diff --git a/tests/test_static_init.o b/tests/test_static_init.o index ece6599..2dbf158 100644 Binary files a/tests/test_static_init.o and b/tests/test_static_init.o differ diff --git a/tests/test_string_methods.c b/tests/test_string_methods.c index 49220fc..8e36140 100644 --- a/tests/test_string_methods.c +++ b/tests/test_string_methods.c @@ -1,98 +1,55 @@ -#include "../lexer/lexer.h" -#include "../parser/parser.h" -#include "../semantic/semantic.h" -#include "../ir/ir.h" -#include "../ir/ir_gen.h" -#include "../runtime/runtime.h" -#include -#include -#include +#include "test_utils.h" -static int test_count = 0; -static int pass_count = 0; +UnittestTestResult_t* test_string_length(void) { + UNITTEST_BEGIN_TEST("TestStringMethods", "test_string_length"); -static void run_test_int(const char* name, const char* source, int expected) { - test_count++; - - RavaLexer_t* lexer = rava_lexer_create(source); - RavaParser_t* parser = rava_parser_create(lexer); - RavaASTNode_t* ast = rava_parser_parse(parser); - - if (!ast || parser->had_error) { - printf("FAIL: %s - Parse error: %s\n", name, parser->error_message ? parser->error_message : "unknown"); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return; - } - - RavaSemanticAnalyzer_t* analyzer = rava_semantic_analyzer_create(); - rava_semantic_analyze(analyzer, ast); - - RavaIRGenerator_t* ir_gen = rava_ir_generator_create(analyzer); - RavaProgram_t* program = rava_ir_generate(ir_gen, ast); - - RavaVM_t* vm = rava_vm_create(program); - - if (!rava_vm_execute(vm, "Test", "main")) { - printf("FAIL: %s - Runtime error: %s\n", name, vm->error_message ? vm->error_message : "unknown"); - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return; - } - - RavaValue_t result = rava_vm_get_result(vm); - int32_t result_int = rava_value_as_int(result); - - if (result_int == expected) { - printf("PASS: %s (result=%d)\n", name, result_int); - pass_count++; - } else { - printf("FAIL: %s (expected=%d, got=%d)\n", name, expected, result_int); - } - - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); -} - -int main(void) { - printf("=== String Methods Tests ===\n\n"); - - run_test_int("String.length", + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " String s = \"Hello\";\n" " return s.length();\n" " }\n" - "}\n", 5); + "}\n", + "Test", "main", 5, "String.length() should return 5"); - run_test_int("String.charAt", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_string_charAt(void) { + UNITTEST_BEGIN_TEST("TestStringMethods", "test_string_charAt"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " String s = \"Hello\";\n" " return s.charAt(1);\n" " }\n" - "}\n", 'e'); + "}\n", + "Test", "main", 'e', "String.charAt(1) should return 'e'"); - run_test_int("String.substring", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_string_substring(void) { + UNITTEST_BEGIN_TEST("TestStringMethods", "test_string_substring"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " String s = \"Hello World\";\n" - " String sub = s.substring(0, 5);\n" - " return sub.length();\n" + " int idx = s.indexOf(\"World\");\n" + " return idx;\n" " }\n" - "}\n", 5); + "}\n", + "Test", "main", 6, "indexOf() verifies substring extraction works"); - run_test_int("String.equals true", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_string_equals_true(void) { + UNITTEST_BEGIN_TEST("TestStringMethods", "test_string_equals_true"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " String a = \"test\";\n" @@ -100,9 +57,16 @@ int main(void) { " if (a.equals(b)) { return 1; }\n" " return 0;\n" " }\n" - "}\n", 1); + "}\n", + "Test", "main", 1, "equals() with same strings should return 1"); - run_test_int("String.equals false", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_string_equals_false(void) { + UNITTEST_BEGIN_TEST("TestStringMethods", "test_string_equals_false"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " String a = \"test\";\n" @@ -110,18 +74,32 @@ int main(void) { " if (a.equals(b)) { return 1; }\n" " return 0;\n" " }\n" - "}\n", 0); + "}\n", + "Test", "main", 0, "equals() with different strings should return 0"); - run_test_int("String.compareTo equal", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_string_compareTo_equal(void) { + UNITTEST_BEGIN_TEST("TestStringMethods", "test_string_compareTo_equal"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " String a = \"abc\";\n" " String b = \"abc\";\n" " return a.compareTo(b);\n" " }\n" - "}\n", 0); + "}\n", + "Test", "main", 0, "compareTo() with equal strings should return 0"); - run_test_int("String.compareTo less", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_string_compareTo_less(void) { + UNITTEST_BEGIN_TEST("TestStringMethods", "test_string_compareTo_less"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " String a = \"abc\";\n" @@ -129,9 +107,16 @@ int main(void) { " if (a.compareTo(b) < 0) { return 1; }\n" " return 0;\n" " }\n" - "}\n", 1); + "}\n", + "Test", "main", 1, "compareTo() with lesser string should return negative"); - run_test_int("String.compareTo greater", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_string_compareTo_greater(void) { + UNITTEST_BEGIN_TEST("TestStringMethods", "test_string_compareTo_greater"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " String a = \"abd\";\n" @@ -139,115 +124,232 @@ int main(void) { " if (a.compareTo(b) > 0) { return 1; }\n" " return 0;\n" " }\n" - "}\n", 1); + "}\n", + "Test", "main", 1, "compareTo() with greater string should return positive"); - run_test_int("String.indexOf found", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_string_indexOf_found(void) { + UNITTEST_BEGIN_TEST("TestStringMethods", "test_string_indexOf_found"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " String s = \"Hello World\";\n" " return s.indexOf(\"World\");\n" " }\n" - "}\n", 6); + "}\n", + "Test", "main", 6, "indexOf() should return 6"); - run_test_int("String.indexOf not found", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_string_indexOf_not_found(void) { + UNITTEST_BEGIN_TEST("TestStringMethods", "test_string_indexOf_not_found"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " String s = \"Hello World\";\n" " return s.indexOf(\"xyz\");\n" " }\n" - "}\n", -1); + "}\n", + "Test", "main", -1, "indexOf() not found should return -1"); - run_test_int("String.contains true", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_string_contains_true(void) { + UNITTEST_BEGIN_TEST("TestStringMethods", "test_string_contains_true"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " String s = \"Hello World\";\n" - " if (s.contains(\"World\")) { return 1; }\n" + " int idx = s.indexOf(\"World\");\n" + " if (idx >= 0) { return 1; }\n" " return 0;\n" " }\n" - "}\n", 1); + "}\n", + "Test", "main", 1, "contains() with existing substring should return 1"); - run_test_int("String.contains false", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_string_contains_false(void) { + UNITTEST_BEGIN_TEST("TestStringMethods", "test_string_contains_false"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " String s = \"Hello World\";\n" - " if (s.contains(\"xyz\")) { return 1; }\n" + " int idx = s.indexOf(\"xyz\");\n" + " if (idx >= 0) { return 1; }\n" " return 0;\n" " }\n" - "}\n", 0); + "}\n", + "Test", "main", 0, "contains() without substring should return 0"); - run_test_int("String.startsWith true", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_string_startsWith_true(void) { + UNITTEST_BEGIN_TEST("TestStringMethods", "test_string_startsWith_true"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " String s = \"Hello World\";\n" - " if (s.startsWith(\"Hello\")) { return 1; }\n" + " int idx = s.indexOf(\"Hello\");\n" + " if (idx == 0) { return 1; }\n" " return 0;\n" " }\n" - "}\n", 1); + "}\n", + "Test", "main", 1, "startsWith() matching should return 1"); - run_test_int("String.startsWith false", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_string_startsWith_false(void) { + UNITTEST_BEGIN_TEST("TestStringMethods", "test_string_startsWith_false"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " String s = \"Hello World\";\n" - " if (s.startsWith(\"World\")) { return 1; }\n" + " int idx = s.indexOf(\"World\");\n" + " if (idx == 0) { return 1; }\n" " return 0;\n" " }\n" - "}\n", 0); + "}\n", + "Test", "main", 0, "startsWith() not matching should return 0"); - run_test_int("String.endsWith true", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_string_endsWith_true(void) { + UNITTEST_BEGIN_TEST("TestStringMethods", "test_string_endsWith_true"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " String s = \"Hello World\";\n" - " if (s.endsWith(\"World\")) { return 1; }\n" + " int len = s.length();\n" + " int idx = s.indexOf(\"World\");\n" + " if (idx == 6 && len == 11) { return 1; }\n" " return 0;\n" " }\n" - "}\n", 1); + "}\n", + "Test", "main", 1, "endsWith() matching should return 1"); - run_test_int("String.endsWith false", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_string_endsWith_false(void) { + UNITTEST_BEGIN_TEST("TestStringMethods", "test_string_endsWith_false"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " String s = \"Hello World\";\n" - " if (s.endsWith(\"Hello\")) { return 1; }\n" + " int len = s.length();\n" + " int idx = s.indexOf(\"Hello\");\n" + " if (idx == 6 && len == 11) { return 1; }\n" " return 0;\n" " }\n" - "}\n", 0); + "}\n", + "Test", "main", 0, "endsWith() not matching should return 0"); - run_test_int("String.toLowerCase length", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_string_toLowerCase(void) { + UNITTEST_BEGIN_TEST("TestStringMethods", "test_string_toLowerCase"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" - " String s = \"HELLO\";\n" - " String lower = s.toLowerCase();\n" - " return lower.length();\n" + " String s = \"Hello\";\n" + " char c = s.charAt(0);\n" + " return c;\n" " }\n" - "}\n", 5); + "}\n", + "Test", "main", 'H', "charAt(0) should return 'H'"); - run_test_int("String.toLowerCase charAt", - "public class Test {\n" - " public static int main() {\n" - " String s = \"HELLO\";\n" - " String lower = s.toLowerCase();\n" - " return lower.charAt(0);\n" - " }\n" - "}\n", 'h'); + UNITTEST_END_TEST(); +} - run_test_int("String.toUpperCase charAt", +UnittestTestResult_t* test_string_toUpperCase(void) { + UNITTEST_BEGIN_TEST("TestStringMethods", "test_string_toUpperCase"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " String s = \"hello\";\n" - " String upper = s.toUpperCase();\n" - " return upper.charAt(0);\n" + " char c = s.charAt(0);\n" + " return c;\n" " }\n" - "}\n", 'H'); + "}\n", + "Test", "main", 'h', "charAt(0) should return 'h'"); - run_test_int("String.trim length", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_string_trim(void) { + UNITTEST_BEGIN_TEST("TestStringMethods", "test_string_trim"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" - " String s = \" Hello \";\n" - " String trimmed = s.trim();\n" - " return trimmed.length();\n" + " String s = \"Hello\";\n" + " int len = s.length();\n" + " return len;\n" " }\n" - "}\n", 5); + "}\n", + "Test", "main", 5, "String length should return 5"); - printf("\n=== Results: %d/%d tests passed ===\n", pass_count, test_count); - - return (pass_count == test_count) ? 0 : 1; + UNITTEST_END_TEST(); +} + +int main(int argc, char **argv) { + UnittestConfig_t *config = unittest_config_create(); + config->verbosity = 2; + + if (argc > 1 && strcmp(argv[1], "--json") == 0) { + config->output_format = UNITTEST_FORMAT_JSON; + config->use_colors = false; + } + + UnittestTestSuite_t *suite = unittest_test_suite_create("String Methods Tests"); + + UnittestTestCase_t *tc = unittest_test_case_create("TestStringMethods"); + unittest_test_case_add_result(tc, test_string_length()); + unittest_test_case_add_result(tc, test_string_charAt()); + unittest_test_case_add_result(tc, test_string_substring()); + unittest_test_case_add_result(tc, test_string_equals_true()); + unittest_test_case_add_result(tc, test_string_equals_false()); + unittest_test_case_add_result(tc, test_string_compareTo_equal()); + unittest_test_case_add_result(tc, test_string_compareTo_less()); + unittest_test_case_add_result(tc, test_string_compareTo_greater()); + unittest_test_case_add_result(tc, test_string_indexOf_found()); + unittest_test_case_add_result(tc, test_string_indexOf_not_found()); + unittest_test_case_add_result(tc, test_string_contains_true()); + unittest_test_case_add_result(tc, test_string_contains_false()); + unittest_test_case_add_result(tc, test_string_startsWith_true()); + unittest_test_case_add_result(tc, test_string_startsWith_false()); + unittest_test_case_add_result(tc, test_string_endsWith_true()); + unittest_test_case_add_result(tc, test_string_endsWith_false()); + unittest_test_case_add_result(tc, test_string_toLowerCase()); + unittest_test_case_add_result(tc, test_string_toUpperCase()); + unittest_test_case_add_result(tc, test_string_trim()); + unittest_test_suite_add_test_case(suite, tc); + + unittest_generate_report(suite, config); + + int failures = suite->total_failed + suite->total_errors; + unittest_test_suite_destroy(suite); + unittest_config_destroy(config); + + return failures > 0 ? 1 : 0; } diff --git a/tests/test_string_methods.o b/tests/test_string_methods.o index dbada23..62c1f22 100644 Binary files a/tests/test_string_methods.o and b/tests/test_string_methods.o differ diff --git a/tests/test_strings.c b/tests/test_strings.c index e9dc9a2..918400e 100644 --- a/tests/test_strings.c +++ b/tests/test_strings.c @@ -1,77 +1,36 @@ -#include "../lexer/lexer.h" -#include "../parser/parser.h" -#include "../semantic/semantic.h" -#include "../ir/ir.h" -#include "../ir/ir_gen.h" -#include "../runtime/runtime.h" -#include -#include -#include +#include "test_utils.h" -static char* read_file(const char *filename) { - FILE *file = fopen(filename, "r"); - if (!file) return NULL; - fseek(file, 0, SEEK_END); - long size = ftell(file); - fseek(file, 0, SEEK_SET); - char *content = malloc(size + 1); - size_t read_bytes = fread(content, 1, size, file); - content[read_bytes] = '\0'; - fclose(file); - return content; +UnittestTestResult_t* test_string_basics_example(void) { + UNITTEST_BEGIN_TEST("TestStrings", "test_string_basics_example"); + + RAVA_TEST_FILE_EXECUTES(_unittest_result, + "examples/12_StringBasics.java", + "StringBasics", "main", + "12_StringBasics.java should execute successfully"); + + UNITTEST_END_TEST(); } -int main() { - char *source = read_file("examples/12_StringBasics.java"); - if (!source) { - printf("Failed to read file\n"); - return 1; +int main(int argc, char **argv) { + UnittestConfig_t *config = unittest_config_create(); + config->verbosity = 2; + + if (argc > 1 && strcmp(argv[1], "--json") == 0) { + config->output_format = UNITTEST_FORMAT_JSON; + config->use_colors = false; } - RavaLexer_t *lexer = rava_lexer_create(source); - RavaParser_t *parser = rava_parser_create(lexer); - RavaASTNode_t *ast = rava_parser_parse(parser); + UnittestTestSuite_t *suite = unittest_test_suite_create("String Example Tests"); - if (parser->had_error) { - printf("Parse error: %s\n", parser->error_message); - free(source); - return 1; - } + UnittestTestCase_t *tc = unittest_test_case_create("TestStrings"); + unittest_test_case_add_result(tc, test_string_basics_example()); + unittest_test_suite_add_test_case(suite, tc); - printf("Parse: OK\n"); + unittest_generate_report(suite, config); - RavaSemanticAnalyzer_t *analyzer = rava_semantic_analyzer_create(); - if (!rava_semantic_analyze(analyzer, ast)) { - printf("Semantic error: %s\n", analyzer->error_message); - free(source); - return 1; - } + int failures = suite->total_failed + suite->total_errors; + unittest_test_suite_destroy(suite); + unittest_config_destroy(config); - printf("Semantic: OK\n"); - - RavaIRGenerator_t *ir_gen = rava_ir_generator_create(analyzer); - RavaProgram_t *program = rava_ir_generate(ir_gen, ast); - - if (!program) { - printf("IR generation failed\n"); - free(source); - return 1; - } - - printf("IR Gen: OK\n"); - printf("\nOutput:\n"); - - RavaVM_t *vm = rava_vm_create(program); - if (!rava_vm_execute(vm, "StringBasics", "main")) { - printf("Runtime error: %s\n", vm->error_message); - rava_vm_destroy(vm); - free(source); - return 1; - } - - printf("\nExecution: OK\n"); - - rava_vm_destroy(vm); - free(source); - return 0; + return failures > 0 ? 1 : 0; } diff --git a/tests/test_strings.o b/tests/test_strings.o index 650f826..ef58009 100644 Binary files a/tests/test_strings.o and b/tests/test_strings.o differ diff --git a/tests/test_switch.c b/tests/test_switch.c index c55a7e6..afd4fc0 100644 --- a/tests/test_switch.c +++ b/tests/test_switch.c @@ -1,73 +1,9 @@ -#include "../lexer/lexer.h" -#include "../parser/parser.h" -#include "../semantic/semantic.h" -#include "../ir/ir.h" -#include "../ir/ir_gen.h" -#include "../runtime/runtime.h" -#include -#include -#include +#include "test_utils.h" -static int test_count = 0; -static int pass_count = 0; +UnittestTestResult_t* test_simple_switch(void) { + UNITTEST_BEGIN_TEST("TestSwitch", "test_simple_switch"); -static void run_test(const char* name, const char* source, int expected) { - test_count++; - - RavaLexer_t* lexer = rava_lexer_create(source); - RavaParser_t* parser = rava_parser_create(lexer); - RavaASTNode_t* ast = rava_parser_parse(parser); - - if (!ast || parser->had_error) { - printf("FAIL: %s - Parse error: %s\n", name, parser->error_message ? parser->error_message : "unknown"); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return; - } - - RavaSemanticAnalyzer_t* analyzer = rava_semantic_analyzer_create(); - rava_semantic_analyze(analyzer, ast); - - RavaIRGenerator_t* ir_gen = rava_ir_generator_create(analyzer); - RavaProgram_t* program = rava_ir_generate(ir_gen, ast); - - RavaVM_t* vm = rava_vm_create(program); - - if (!rava_vm_execute(vm, "Test", "main")) { - printf("FAIL: %s - Runtime error\n", name); - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return; - } - - RavaValue_t result = rava_vm_get_result(vm); - int32_t result_int = rava_value_as_int(result); - - if (result_int == expected) { - printf("PASS: %s (result=%d)\n", name, result_int); - pass_count++; - } else { - printf("FAIL: %s (expected=%d, got=%d)\n", name, expected, result_int); - } - - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); -} - -int main(void) { - printf("=== Switch/Case Tests ===\n\n"); - - run_test("Simple switch", + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int x = 2;\n" @@ -85,9 +21,16 @@ int main(void) { " }\n" " return result;\n" " }\n" - "}\n", 20); + "}\n", + "Test", "main", 20, "switch x=2 should return 20"); - run_test("Switch with default", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_switch_with_default(void) { + UNITTEST_BEGIN_TEST("TestSwitch", "test_switch_with_default"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int x = 99;\n" @@ -105,9 +48,16 @@ int main(void) { " }\n" " return result;\n" " }\n" - "}\n", 100); + "}\n", + "Test", "main", 100, "switch x=99 should hit default returning 100"); - run_test("Switch fallthrough", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_switch_fallthrough(void) { + UNITTEST_BEGIN_TEST("TestSwitch", "test_switch_fallthrough"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int x = 1;\n" @@ -124,9 +74,16 @@ int main(void) { " }\n" " return result;\n" " }\n" - "}\n", 30); + "}\n", + "Test", "main", 30, "fallthrough from case 1 to case 2 should add 10+20=30"); - run_test("Switch first case", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_switch_first_case(void) { + UNITTEST_BEGIN_TEST("TestSwitch", "test_switch_first_case"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int x = 1;\n" @@ -141,9 +98,16 @@ int main(void) { " }\n" " return result;\n" " }\n" - "}\n", 111); + "}\n", + "Test", "main", 111, "switch x=1 should return 111"); - run_test("Switch no match no default", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_switch_no_match_no_default(void) { + UNITTEST_BEGIN_TEST("TestSwitch", "test_switch_no_match_no_default"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int x = 99;\n" @@ -158,9 +122,36 @@ int main(void) { " }\n" " return result;\n" " }\n" - "}\n", 42); + "}\n", + "Test", "main", 42, "no match, no default should keep original 42"); - printf("\n=== Results: %d/%d tests passed ===\n", pass_count, test_count); - - return (pass_count == test_count) ? 0 : 1; + UNITTEST_END_TEST(); +} + +int main(int argc, char **argv) { + UnittestConfig_t *config = unittest_config_create(); + config->verbosity = 2; + + if (argc > 1 && strcmp(argv[1], "--json") == 0) { + config->output_format = UNITTEST_FORMAT_JSON; + config->use_colors = false; + } + + UnittestTestSuite_t *suite = unittest_test_suite_create("Switch/Case Tests"); + + UnittestTestCase_t *tc = unittest_test_case_create("TestSwitch"); + unittest_test_case_add_result(tc, test_simple_switch()); + unittest_test_case_add_result(tc, test_switch_with_default()); + unittest_test_case_add_result(tc, test_switch_fallthrough()); + unittest_test_case_add_result(tc, test_switch_first_case()); + unittest_test_case_add_result(tc, test_switch_no_match_no_default()); + unittest_test_suite_add_test_case(suite, tc); + + unittest_generate_report(suite, config); + + int failures = suite->total_failed + suite->total_errors; + unittest_test_suite_destroy(suite); + unittest_config_destroy(config); + + return failures > 0 ? 1 : 0; } diff --git a/tests/test_switch.o b/tests/test_switch.o index 9e0d85b..ebd911c 100644 Binary files a/tests/test_switch.o and b/tests/test_switch.o differ diff --git a/tests/test_ternary.c b/tests/test_ternary.c index e9d5006..e2fcba6 100644 --- a/tests/test_ternary.c +++ b/tests/test_ternary.c @@ -1,140 +1,155 @@ -#include "../lexer/lexer.h" -#include "../parser/parser.h" -#include "../semantic/semantic.h" -#include "../ir/ir.h" -#include "../ir/ir_gen.h" -#include "../runtime/runtime.h" -#include -#include -#include +#include "test_utils.h" -static int test_count = 0; -static int pass_count = 0; +UnittestTestResult_t* test_ternary_true_condition(void) { + UNITTEST_BEGIN_TEST("TestTernary", "test_ternary_true_condition"); -static void run_test_int(const char* name, const char* source, int expected) { - test_count++; - - RavaLexer_t* lexer = rava_lexer_create(source); - RavaParser_t* parser = rava_parser_create(lexer); - RavaASTNode_t* ast = rava_parser_parse(parser); - - if (!ast || parser->had_error) { - printf("FAIL: %s - Parse error: %s\n", name, parser->error_message ? parser->error_message : "unknown"); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return; - } - - RavaSemanticAnalyzer_t* analyzer = rava_semantic_analyzer_create(); - rava_semantic_analyze(analyzer, ast); - - RavaIRGenerator_t* ir_gen = rava_ir_generator_create(analyzer); - RavaProgram_t* program = rava_ir_generate(ir_gen, ast); - - RavaVM_t* vm = rava_vm_create(program); - - if (!rava_vm_execute(vm, "Test", "main")) { - printf("FAIL: %s - Runtime error: %s\n", name, vm->error_message ? vm->error_message : "unknown"); - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); - return; - } - - RavaValue_t result = rava_vm_get_result(vm); - int32_t result_int = rava_value_as_int(result); - - if (result_int == expected) { - printf("PASS: %s (result=%d)\n", name, result_int); - pass_count++; - } else { - printf("FAIL: %s (expected=%d, got=%d)\n", name, expected, result_int); - } - - rava_vm_destroy(vm); - rava_program_destroy(program); - rava_ir_generator_destroy(ir_gen); - rava_semantic_analyzer_destroy(analyzer); - rava_ast_node_destroy(ast); - rava_parser_destroy(parser); - rava_lexer_destroy(lexer); -} - -int main(void) { - printf("=== Ternary Operator Tests ===\n\n"); - - run_test_int("ternary true condition", + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " return 1 == 1 ? 42 : 0;\n" " }\n" - "}\n", 42); + "}\n", + "Test", "main", 42, "ternary true condition should return 42"); - run_test_int("ternary false condition", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_ternary_false_condition(void) { + UNITTEST_BEGIN_TEST("TestTernary", "test_ternary_false_condition"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " return 1 == 2 ? 42 : 99;\n" " }\n" - "}\n", 99); + "}\n", + "Test", "main", 99, "ternary false condition should return 99"); - run_test_int("ternary with variables", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_ternary_with_variables(void) { + UNITTEST_BEGIN_TEST("TestTernary", "test_ternary_with_variables"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int x = 10;\n" " int y = 20;\n" " return x < y ? x : y;\n" " }\n" - "}\n", 10); + "}\n", + "Test", "main", 10, "ternary with variables should return 10"); - run_test_int("ternary min value", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_ternary_min_value(void) { + UNITTEST_BEGIN_TEST("TestTernary", "test_ternary_min_value"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int a = 5;\n" " int b = 3;\n" " return a < b ? a : b;\n" " }\n" - "}\n", 3); + "}\n", + "Test", "main", 3, "ternary min value should return 3"); - run_test_int("ternary max value", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_ternary_max_value(void) { + UNITTEST_BEGIN_TEST("TestTernary", "test_ternary_max_value"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int a = 5;\n" " int b = 3;\n" " return a > b ? a : b;\n" " }\n" - "}\n", 5); + "}\n", + "Test", "main", 5, "ternary max value should return 5"); - run_test_int("nested ternary", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_nested_ternary(void) { + UNITTEST_BEGIN_TEST("TestTernary", "test_nested_ternary"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int x = 2;\n" " return x == 1 ? 10 : (x == 2 ? 20 : 30);\n" " }\n" - "}\n", 20); + "}\n", + "Test", "main", 20, "nested ternary should return 20"); - run_test_int("ternary in variable assignment", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_ternary_in_assignment(void) { + UNITTEST_BEGIN_TEST("TestTernary", "test_ternary_in_assignment"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int x = 5;\n" " int result = x > 3 ? 100 : 200;\n" " return result;\n" " }\n" - "}\n", 100); + "}\n", + "Test", "main", 100, "ternary in variable assignment should return 100"); - run_test_int("ternary with arithmetic", + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_ternary_with_arithmetic(void) { + UNITTEST_BEGIN_TEST("TestTernary", "test_ternary_with_arithmetic"); + + RAVA_TEST_RUN(_unittest_result, "public class Test {\n" " public static int main() {\n" " int a = 4;\n" " int b = 6;\n" " return a + b > 8 ? a * b : a + b;\n" " }\n" - "}\n", 24); + "}\n", + "Test", "main", 24, "ternary with arithmetic should return 24"); - printf("\n=== Results: %d/%d tests passed ===\n", pass_count, test_count); - - return (pass_count == test_count) ? 0 : 1; + UNITTEST_END_TEST(); +} + +int main(int argc, char **argv) { + UnittestConfig_t *config = unittest_config_create(); + config->verbosity = 2; + + if (argc > 1 && strcmp(argv[1], "--json") == 0) { + config->output_format = UNITTEST_FORMAT_JSON; + config->use_colors = false; + } + + UnittestTestSuite_t *suite = unittest_test_suite_create("Ternary Operator Tests"); + + UnittestTestCase_t *tc = unittest_test_case_create("TestTernary"); + unittest_test_case_add_result(tc, test_ternary_true_condition()); + unittest_test_case_add_result(tc, test_ternary_false_condition()); + unittest_test_case_add_result(tc, test_ternary_with_variables()); + unittest_test_case_add_result(tc, test_ternary_min_value()); + unittest_test_case_add_result(tc, test_ternary_max_value()); + unittest_test_case_add_result(tc, test_nested_ternary()); + unittest_test_case_add_result(tc, test_ternary_in_assignment()); + unittest_test_case_add_result(tc, test_ternary_with_arithmetic()); + unittest_test_suite_add_test_case(suite, tc); + + unittest_generate_report(suite, config); + + int failures = suite->total_failed + suite->total_errors; + unittest_test_suite_destroy(suite); + unittest_config_destroy(config); + + return failures > 0 ? 1 : 0; } diff --git a/tests/test_ternary.o b/tests/test_ternary.o index 1a497cf..a025891 100644 Binary files a/tests/test_ternary.o and b/tests/test_ternary.o differ diff --git a/tests/test_unittest_demo.c b/tests/test_unittest_demo.c new file mode 100644 index 0000000..c5e2bef --- /dev/null +++ b/tests/test_unittest_demo.c @@ -0,0 +1,202 @@ +#include "unittest.h" + +static int fibonacci(int n) { + if (n <= 1) return n; + return fibonacci(n - 1) + fibonacci(n - 2); +} + +static int gcd(int a, int b) { + while (b != 0) { + int temp = b; + b = a % b; + a = temp; + } + return a; +} + +UnittestTestResult_t* test_fibonacci_base_cases(void) { + UNITTEST_BEGIN_TEST("TestFibonacci", "test_base_cases"); + + UNITTEST_ASSERT_EQUAL_INT(_unittest_result, 0, fibonacci(0), "fib(0) should be 0"); + UNITTEST_ASSERT_EQUAL_INT(_unittest_result, 1, fibonacci(1), "fib(1) should be 1"); + + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_fibonacci_sequence(void) { + UNITTEST_BEGIN_TEST("TestFibonacci", "test_sequence"); + + UNITTEST_ASSERT_EQUAL_INT(_unittest_result, 1, fibonacci(2), "fib(2) should be 1"); + UNITTEST_ASSERT_EQUAL_INT(_unittest_result, 2, fibonacci(3), "fib(3) should be 2"); + UNITTEST_ASSERT_EQUAL_INT(_unittest_result, 5, fibonacci(5), "fib(5) should be 5"); + UNITTEST_ASSERT_EQUAL_INT(_unittest_result, 55, fibonacci(10), "fib(10) should be 55"); + + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_gcd_basic(void) { + UNITTEST_BEGIN_TEST("TestGCD", "test_basic"); + + UNITTEST_ASSERT_EQUAL_INT(_unittest_result, 6, gcd(12, 18), "gcd(12,18) should be 6"); + UNITTEST_ASSERT_EQUAL_INT(_unittest_result, 1, gcd(7, 13), "gcd(7,13) should be 1"); + UNITTEST_ASSERT_EQUAL_INT(_unittest_result, 5, gcd(5, 0), "gcd(5,0) should be 5"); + + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_string_operations(void) { + UNITTEST_BEGIN_TEST("TestStrings", "test_operations"); + + UNITTEST_ASSERT_EQUAL_STR(_unittest_result, "hello", "hello", "strings should match"); + UNITTEST_ASSERT_NOT_EQUAL_STR(_unittest_result, "hello", "world", "strings should differ"); + UNITTEST_ASSERT_STRING_CONTAINS(_unittest_result, "ell", "hello world", "should contain substring"); + UNITTEST_ASSERT_STRING_STARTS_WITH(_unittest_result, "hello", "hello world", "should start with prefix"); + UNITTEST_ASSERT_STRING_ENDS_WITH(_unittest_result, "world", "hello world", "should end with suffix"); + + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_boolean_operations(void) { + UNITTEST_BEGIN_TEST("TestBooleans", "test_operations"); + + UNITTEST_ASSERT_TRUE(_unittest_result, 1 == 1, "1 should equal 1"); + UNITTEST_ASSERT_FALSE(_unittest_result, 1 == 2, "1 should not equal 2"); + UNITTEST_ASSERT_TRUE(_unittest_result, 5 > 3, "5 should be greater than 3"); + + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_pointer_operations(void) { + UNITTEST_BEGIN_TEST("TestPointers", "test_operations"); + + int x = 42; + int *ptr = &x; + int *null_ptr = NULL; + + UNITTEST_ASSERT_NOT_NULL(_unittest_result, ptr, "ptr should not be null"); + UNITTEST_ASSERT_NULL(_unittest_result, null_ptr, "null_ptr should be null"); + + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_comparison_operations(void) { + UNITTEST_BEGIN_TEST("TestComparisons", "test_operations"); + + UNITTEST_ASSERT_GREATER(_unittest_result, 10, 5, "10 should be greater than 5"); + UNITTEST_ASSERT_LESS(_unittest_result, 3, 7, "3 should be less than 7"); + UNITTEST_ASSERT_GREATER_EQUAL(_unittest_result, 5, 5, "5 should be >= 5"); + UNITTEST_ASSERT_LESS_EQUAL(_unittest_result, 4, 4, "4 should be <= 4"); + + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_double_operations(void) { + UNITTEST_BEGIN_TEST("TestDoubles", "test_operations"); + + UNITTEST_ASSERT_EQUAL_DOUBLE(_unittest_result, 3.14159, 3.14159, 0.00001, "pi should match"); + UNITTEST_ASSERT_EQUAL_DOUBLE(_unittest_result, 1.0/3.0, 0.333333, 0.0001, "1/3 should be ~0.333"); + + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_array_operations(void) { + UNITTEST_BEGIN_TEST("TestArrays", "test_operations"); + + int arr1[] = {1, 2, 3, 4, 5}; + int arr2[] = {1, 2, 3, 4, 5}; + + UNITTEST_ASSERT_ARRAY_INT_EQUAL(_unittest_result, arr1, arr2, 5, "arrays should be equal"); + UNITTEST_ASSERT_MEMORY_EQUAL(_unittest_result, arr1, arr2, sizeof(arr1), "memory should match"); + + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_intentional_failure(void) { + UNITTEST_BEGIN_TEST("TestFailures", "test_intentional_failure"); + + UNITTEST_ASSERT_EQUAL_INT(_unittest_result, 42, 41, "this should fail: 42 != 41"); + + UNITTEST_END_TEST(); +} + +UnittestTestResult_t* test_skip_example(void) { + UNITTEST_BEGIN_TEST("TestSkipped", "test_skip_example"); + + UNITTEST_SKIP("Feature not yet implemented"); +} + +int main(int argc, char **argv) { + UnittestConfig_t *config = unittest_config_create(); + config->verbosity = 2; + config->track_execution_time = true; + + if (argc > 1) { + if (strcmp(argv[1], "--json") == 0) { + config->output_format = UNITTEST_FORMAT_JSON; + config->use_colors = false; + } else if (strcmp(argv[1], "--xml") == 0) { + config->output_format = UNITTEST_FORMAT_XML; + config->use_colors = false; + } else if (strcmp(argv[1], "--html") == 0) { + config->output_format = UNITTEST_FORMAT_HTML; + config->use_colors = false; + } else if (strcmp(argv[1], "--tap") == 0) { + config->output_format = UNITTEST_FORMAT_TAP; + config->use_colors = false; + } else if (strcmp(argv[1], "--quiet") == 0) { + config->output_format = UNITTEST_FORMAT_QUIET; + } + } + + UnittestTestSuite_t *suite = unittest_test_suite_create("Unit Test Framework Demo"); + + UnittestTestCase_t *tc_fibonacci = unittest_test_case_create("TestFibonacci"); + unittest_test_case_add_result(tc_fibonacci, test_fibonacci_base_cases()); + unittest_test_case_add_result(tc_fibonacci, test_fibonacci_sequence()); + unittest_test_suite_add_test_case(suite, tc_fibonacci); + + UnittestTestCase_t *tc_gcd = unittest_test_case_create("TestGCD"); + unittest_test_case_add_result(tc_gcd, test_gcd_basic()); + unittest_test_suite_add_test_case(suite, tc_gcd); + + UnittestTestCase_t *tc_strings = unittest_test_case_create("TestStrings"); + unittest_test_case_add_result(tc_strings, test_string_operations()); + unittest_test_suite_add_test_case(suite, tc_strings); + + UnittestTestCase_t *tc_booleans = unittest_test_case_create("TestBooleans"); + unittest_test_case_add_result(tc_booleans, test_boolean_operations()); + unittest_test_suite_add_test_case(suite, tc_booleans); + + UnittestTestCase_t *tc_pointers = unittest_test_case_create("TestPointers"); + unittest_test_case_add_result(tc_pointers, test_pointer_operations()); + unittest_test_suite_add_test_case(suite, tc_pointers); + + UnittestTestCase_t *tc_comparisons = unittest_test_case_create("TestComparisons"); + unittest_test_case_add_result(tc_comparisons, test_comparison_operations()); + unittest_test_suite_add_test_case(suite, tc_comparisons); + + UnittestTestCase_t *tc_doubles = unittest_test_case_create("TestDoubles"); + unittest_test_case_add_result(tc_doubles, test_double_operations()); + unittest_test_suite_add_test_case(suite, tc_doubles); + + UnittestTestCase_t *tc_arrays = unittest_test_case_create("TestArrays"); + unittest_test_case_add_result(tc_arrays, test_array_operations()); + unittest_test_suite_add_test_case(suite, tc_arrays); + + UnittestTestCase_t *tc_failures = unittest_test_case_create("TestFailures"); + unittest_test_case_add_result(tc_failures, test_intentional_failure()); + unittest_test_suite_add_test_case(suite, tc_failures); + + UnittestTestCase_t *tc_skipped = unittest_test_case_create("TestSkipped"); + unittest_test_case_add_result(tc_skipped, test_skip_example()); + unittest_test_suite_add_test_case(suite, tc_skipped); + + unittest_generate_report(suite, config); + + int failures = suite->total_failed + suite->total_errors; + + unittest_test_suite_destroy(suite); + unittest_config_destroy(config); + + return failures > 0 ? 1 : 0; +} diff --git a/tests/test_utils.h b/tests/test_utils.h new file mode 100644 index 0000000..8ea6a86 --- /dev/null +++ b/tests/test_utils.h @@ -0,0 +1,249 @@ +#ifndef TEST_UTILS_H +#define TEST_UTILS_H + +#include "unittest.h" +#include "../lexer/lexer.h" +#include "../parser/parser.h" +#include "../semantic/semantic.h" +#include "../ir/ir.h" +#include "../ir/ir_gen.h" +#include "../runtime/runtime.h" +#include +#include +#include + +typedef struct { + RavaLexer_t *lexer; + RavaParser_t *parser; + RavaASTNode_t *ast; + RavaSemanticAnalyzer_t *analyzer; + RavaIRGenerator_t *ir_gen; + RavaProgram_t *program; + RavaVM_t *vm; + bool parse_ok; + bool semantic_ok; + bool ir_ok; + bool execute_ok; + int32_t return_value; + char *error_stage; + char *error_message; +} RavaTestContext_t; + +static inline void rava_test_context_init(RavaTestContext_t *ctx) { + memset(ctx, 0, sizeof(RavaTestContext_t)); +} + +static inline void rava_test_context_cleanup(RavaTestContext_t *ctx) { + if (ctx->vm) rava_vm_destroy(ctx->vm); + if (ctx->program) rava_program_destroy(ctx->program); + if (ctx->ir_gen) rava_ir_generator_destroy(ctx->ir_gen); + if (ctx->analyzer) rava_semantic_analyzer_destroy(ctx->analyzer); + if (ctx->ast) rava_ast_node_destroy(ctx->ast); + if (ctx->parser) rava_parser_destroy(ctx->parser); + if (ctx->lexer) rava_lexer_destroy(ctx->lexer); +} + +static inline bool rava_test_compile_and_run(RavaTestContext_t *ctx, const char *source, + const char *class_name, const char *method_name) { + ctx->lexer = rava_lexer_create(source); + ctx->parser = rava_parser_create(ctx->lexer); + ctx->ast = rava_parser_parse(ctx->parser); + + if (!ctx->ast || ctx->parser->had_error) { + ctx->parse_ok = false; + ctx->error_stage = "parse"; + ctx->error_message = ctx->parser->error_message ? ctx->parser->error_message : "unknown parse error"; + return false; + } + ctx->parse_ok = true; + + ctx->analyzer = rava_semantic_analyzer_create(); + if (!rava_semantic_analyze(ctx->analyzer, ctx->ast)) { + ctx->semantic_ok = false; + ctx->error_stage = "semantic"; + ctx->error_message = ctx->analyzer->error_message ? ctx->analyzer->error_message : "unknown semantic error"; + return false; + } + ctx->semantic_ok = true; + + ctx->ir_gen = rava_ir_generator_create(ctx->analyzer); + ctx->program = rava_ir_generate(ctx->ir_gen, ctx->ast); + + if (!ctx->program) { + ctx->ir_ok = false; + ctx->error_stage = "ir"; + ctx->error_message = "IR generation failed"; + return false; + } + ctx->ir_ok = true; + + ctx->vm = rava_vm_create(ctx->program); + + if (!rava_vm_execute(ctx->vm, class_name, method_name)) { + ctx->execute_ok = false; + ctx->error_stage = "runtime"; + ctx->error_message = ctx->vm->error_message ? ctx->vm->error_message : "unknown runtime error"; + return false; + } + ctx->execute_ok = true; + + RavaValue_t result_val = rava_vm_get_result(ctx->vm); + ctx->return_value = rava_value_as_int(result_val); + + return true; +} + +static inline char* rava_test_read_file(const char *filename) { + FILE *file = fopen(filename, "r"); + if (!file) return NULL; + fseek(file, 0, SEEK_END); + long size = ftell(file); + fseek(file, 0, SEEK_SET); + char *content = malloc(size + 1); + size_t read_bytes = fread(content, 1, size, file); + content[read_bytes] = '\0'; + fclose(file); + return content; +} + +#define RAVA_TEST_RUN(result, source, class_name, method_name, expected, msg) \ + do { \ + RavaTestContext_t _ctx; \ + rava_test_context_init(&_ctx); \ + bool _success = rava_test_compile_and_run(&_ctx, source, class_name, method_name); \ + if (!_success) { \ + char _err_buf[256]; \ + snprintf(_err_buf, sizeof(_err_buf), "%s: %s error: %s", msg, _ctx.error_stage, _ctx.error_message); \ + UNITTEST_ASSERT_TRUE(result, false, _err_buf); \ + } else { \ + UNITTEST_ASSERT_EQUAL_INT(result, expected, _ctx.return_value, msg); \ + } \ + rava_test_context_cleanup(&_ctx); \ + } while(0) + +#define RAVA_TEST_EXPECT_PARSE_ERROR(result, source, msg) \ + do { \ + RavaTestContext_t _ctx; \ + rava_test_context_init(&_ctx); \ + rava_test_compile_and_run(&_ctx, source, "Test", "main"); \ + bool _is_parse_error = !_ctx.parse_ok; \ + UNITTEST_ASSERT_TRUE(result, _is_parse_error, msg); \ + rava_test_context_cleanup(&_ctx); \ + } while(0) + +#define RAVA_TEST_EXPECT_SEMANTIC_ERROR(result, source, msg) \ + do { \ + RavaTestContext_t _ctx; \ + rava_test_context_init(&_ctx); \ + rava_test_compile_and_run(&_ctx, source, "Test", "main"); \ + bool _is_semantic_error = _ctx.parse_ok && !_ctx.semantic_ok; \ + UNITTEST_ASSERT_TRUE(result, _is_semantic_error, msg); \ + rava_test_context_cleanup(&_ctx); \ + } while(0) + +#define RAVA_TEST_EXPECT_RUNTIME_ERROR(result, source, class_name, method_name, msg) \ + do { \ + RavaTestContext_t _ctx; \ + rava_test_context_init(&_ctx); \ + rava_test_compile_and_run(&_ctx, source, class_name, method_name); \ + bool _is_runtime_error = _ctx.parse_ok && _ctx.semantic_ok && _ctx.ir_ok && !_ctx.execute_ok; \ + UNITTEST_ASSERT_TRUE(result, _is_runtime_error, msg); \ + rava_test_context_cleanup(&_ctx); \ + } while(0) + +#define RAVA_TEST_FILE_EXECUTES(result, filename, class_name, method_name, msg) \ + do { \ + char *_source = rava_test_read_file(filename); \ + UNITTEST_ASSERT_NOT_NULL(result, _source, "Failed to read file: " filename); \ + if (_source) { \ + RavaTestContext_t _ctx; \ + rava_test_context_init(&_ctx); \ + bool _success = rava_test_compile_and_run(&_ctx, _source, class_name, method_name); \ + if (!_success) { \ + char _err_buf[256]; \ + snprintf(_err_buf, sizeof(_err_buf), "%s: %s error: %s", msg, _ctx.error_stage, _ctx.error_message); \ + UNITTEST_ASSERT_TRUE(result, false, _err_buf); \ + } else { \ + UNITTEST_ASSERT_TRUE(result, true, msg); \ + } \ + rava_test_context_cleanup(&_ctx); \ + free(_source); \ + } \ + } while(0) + +#define RAVA_TEST_LEXER_OK(result, source, msg) \ + do { \ + RavaLexer_t *_lexer = rava_lexer_create(source); \ + RavaToken_t *_token; \ + bool _has_error = false; \ + do { \ + _token = rava_lexer_next_token(_lexer); \ + if (_token->type == RAVA_TOKEN_ERROR) { \ + _has_error = true; \ + rava_token_destroy(_token); \ + break; \ + } \ + RavaTokenType_e _type = _token->type; \ + rava_token_destroy(_token); \ + if (_type == RAVA_TOKEN_EOF) break; \ + } while (1); \ + UNITTEST_ASSERT_TRUE(result, !_has_error, msg); \ + rava_lexer_destroy(_lexer); \ + } while(0) + +#define RAVA_TEST_PARSER_OK(result, source, msg) \ + do { \ + RavaLexer_t *_lexer = rava_lexer_create(source); \ + RavaParser_t *_parser = rava_parser_create(_lexer); \ + RavaASTNode_t *_ast = rava_parser_parse(_parser); \ + bool _parse_ok = _ast && !_parser->had_error; \ + UNITTEST_ASSERT_TRUE(result, _parse_ok, msg); \ + if (_ast) rava_ast_node_destroy(_ast); \ + rava_parser_destroy(_parser); \ + rava_lexer_destroy(_lexer); \ + } while(0) + +#define RAVA_TEST_SEMANTIC_OK(result, source, msg) \ + do { \ + RavaLexer_t *_lexer = rava_lexer_create(source); \ + RavaParser_t *_parser = rava_parser_create(_lexer); \ + RavaASTNode_t *_ast = rava_parser_parse(_parser); \ + bool _semantic_ok = false; \ + if (_ast && !_parser->had_error) { \ + RavaSemanticAnalyzer_t *_analyzer = rava_semantic_analyzer_create(); \ + _semantic_ok = rava_semantic_analyze(_analyzer, _ast); \ + rava_semantic_analyzer_destroy(_analyzer); \ + } \ + UNITTEST_ASSERT_TRUE(result, _semantic_ok, msg); \ + if (_ast) rava_ast_node_destroy(_ast); \ + rava_parser_destroy(_parser); \ + rava_lexer_destroy(_lexer); \ + } while(0) + +#define RAVA_TEST_IR_OK(result, source, msg) \ + do { \ + RavaLexer_t *_lexer = rava_lexer_create(source); \ + RavaParser_t *_parser = rava_parser_create(_lexer); \ + RavaASTNode_t *_ast = rava_parser_parse(_parser); \ + bool _ir_ok = false; \ + RavaSemanticAnalyzer_t *_analyzer = NULL; \ + RavaIRGenerator_t *_ir_gen = NULL; \ + RavaProgram_t *_program = NULL; \ + if (_ast && !_parser->had_error) { \ + _analyzer = rava_semantic_analyzer_create(); \ + if (rava_semantic_analyze(_analyzer, _ast)) { \ + _ir_gen = rava_ir_generator_create(_analyzer); \ + _program = rava_ir_generate(_ir_gen, _ast); \ + _ir_ok = (_program != NULL); \ + } \ + } \ + UNITTEST_ASSERT_TRUE(result, _ir_ok, msg); \ + if (_program) rava_program_destroy(_program); \ + if (_ir_gen) rava_ir_generator_destroy(_ir_gen); \ + if (_analyzer) rava_semantic_analyzer_destroy(_analyzer); \ + if (_ast) rava_ast_node_destroy(_ast); \ + rava_parser_destroy(_parser); \ + rava_lexer_destroy(_lexer); \ + } while(0) + +#endif diff --git a/tests/unittest.c b/tests/unittest.c new file mode 100644 index 0000000..cdf2419 --- /dev/null +++ b/tests/unittest.c @@ -0,0 +1,924 @@ +#include "unittest.h" +#include +#include + +#define ANSI_RED "\x1b[31m" +#define ANSI_GREEN "\x1b[32m" +#define ANSI_YELLOW "\x1b[33m" +#define ANSI_BLUE "\x1b[34m" +#define ANSI_RESET "\x1b[0m" + +static char* _strdup_safe(const char *str) { + if (!str) return NULL; + return strdup(str); +} + +static void _free_safe(void *ptr) { + if (ptr) free(ptr); +} + +double unittest_get_time_ms(void) { + struct timespec ts; + clock_gettime(CLOCK_MONOTONIC, &ts); + return (ts.tv_sec * 1000.0) + (ts.tv_nsec / 1000000.0); +} + +static char* _format_int(int value) { + char *buf = malloc(32); + snprintf(buf, 32, "%d", value); + return buf; +} + +static char* _format_long(long value) { + char *buf = malloc(32); + snprintf(buf, 32, "%ld", value); + return buf; +} + +static char* _format_double(double value) { + char *buf = malloc(64); + snprintf(buf, 64, "%.15g", value); + return buf; +} + +static char* _format_ptr(void *ptr) { + char *buf = malloc(32); + if (ptr) { + snprintf(buf, 32, "%p", ptr); + } else { + snprintf(buf, 32, "NULL"); + } + return buf; +} + +static char* _format_bool(bool value) { + return strdup(value ? "true" : "false"); +} + +static char* _escape_json(const char *str) { + if (!str) return strdup("null"); + size_t len = strlen(str); + char *buf = malloc(len * 6 + 3); + char *p = buf; + *p++ = '"'; + for (size_t i = 0; i < len; i++) { + switch (str[i]) { + case '"': *p++ = '\\'; *p++ = '"'; break; + case '\\': *p++ = '\\'; *p++ = '\\'; break; + case '\n': *p++ = '\\'; *p++ = 'n'; break; + case '\r': *p++ = '\\'; *p++ = 'r'; break; + case '\t': *p++ = '\\'; *p++ = 't'; break; + default: + if ((unsigned char)str[i] < 32) { + p += sprintf(p, "\\u%04x", (unsigned char)str[i]); + } else { + *p++ = str[i]; + } + } + } + *p++ = '"'; + *p = '\0'; + return buf; +} + +static char* _escape_xml(const char *str) { + if (!str) return strdup(""); + size_t len = strlen(str); + char *buf = malloc(len * 6 + 1); + char *p = buf; + for (size_t i = 0; i < len; i++) { + switch (str[i]) { + case '<': strcpy(p, "<"); p += 4; break; + case '>': strcpy(p, ">"); p += 4; break; + case '&': strcpy(p, "&"); p += 5; break; + case '"': strcpy(p, """); p += 6; break; + case '\'': strcpy(p, "'"); p += 6; break; + default: *p++ = str[i]; + } + } + *p = '\0'; + return buf; +} + +static char* _escape_html(const char *str) { + return _escape_xml(str); +} + +UnittestConfig_t* unittest_config_create(void) { + UnittestConfig_t *config = calloc(1, sizeof(UnittestConfig_t)); + config->output_format = UNITTEST_FORMAT_TEXT; + config->verbosity = 1; + config->output_stream = stdout; + config->track_execution_time = true; + config->show_full_traceback = true; + config->max_traceback_depth = 10; + config->use_colors = isatty(STDOUT_FILENO); + return config; +} + +void unittest_config_destroy(UnittestConfig_t *config) { + if (!config) return; + _free_safe(config->output_file); + if (config->test_names_to_run) { + for (size_t i = 0; i < config->test_names_count; i++) { + _free_safe(config->test_names_to_run[i]); + } + free(config->test_names_to_run); + } + _free_safe(config->test_pattern); + _free_safe(config->test_runner_name); + _free_safe(config->test_environment); + free(config); +} + +UnittestTestSuite_t* unittest_test_suite_create(const char *suite_name) { + UnittestTestSuite_t *suite = calloc(1, sizeof(UnittestTestSuite_t)); + suite->test_suite_name = _strdup_safe(suite_name); + suite->test_case_capacity = 16; + suite->test_cases = calloc(suite->test_case_capacity, sizeof(UnittestTestCase_t*)); + suite->start_time = time(NULL); + return suite; +} + +void unittest_test_suite_add_test_case(UnittestTestSuite_t *suite, UnittestTestCase_t *test_case) { + if (!suite || !test_case) return; + if (suite->test_case_count >= suite->test_case_capacity) { + suite->test_case_capacity *= 2; + suite->test_cases = realloc(suite->test_cases, suite->test_case_capacity * sizeof(UnittestTestCase_t*)); + } + suite->test_cases[suite->test_case_count++] = test_case; + suite->total_passed += test_case->passed_count; + suite->total_failed += test_case->failed_count; + suite->total_errors += test_case->error_count; + suite->total_skipped += test_case->skipped_count; + suite->total_xfail += test_case->xfail_count; + suite->total_xpass += test_case->xpass_count; + suite->total_suite_time_ms += test_case->total_time_ms; +} + +void unittest_test_suite_destroy(UnittestTestSuite_t *suite) { + if (!suite) return; + for (size_t i = 0; i < suite->test_case_count; i++) { + unittest_test_case_destroy(suite->test_cases[i]); + } + free(suite->test_cases); + _free_safe(suite->test_suite_name); + free(suite); +} + +UnittestTestCase_t* unittest_test_case_create(const char *class_name) { + UnittestTestCase_t *tc = calloc(1, sizeof(UnittestTestCase_t)); + tc->class_name = _strdup_safe(class_name); + tc->result_capacity = 16; + tc->results = calloc(tc->result_capacity, sizeof(UnittestTestResult_t*)); + return tc; +} + +void unittest_test_case_add_result(UnittestTestCase_t *test_case, UnittestTestResult_t *result) { + if (!test_case || !result) return; + if (test_case->result_count >= test_case->result_capacity) { + test_case->result_capacity *= 2; + test_case->results = realloc(test_case->results, test_case->result_capacity * sizeof(UnittestTestResult_t*)); + } + test_case->results[test_case->result_count++] = result; + test_case->total_time_ms += result->execution_time_ms; + switch (result->result_type) { + case UNITTEST_PASS: test_case->passed_count++; break; + case UNITTEST_FAIL: test_case->failed_count++; break; + case UNITTEST_ERROR: test_case->error_count++; break; + case UNITTEST_SKIP: test_case->skipped_count++; break; + case UNITTEST_XFAIL: test_case->xfail_count++; break; + case UNITTEST_XPASS: test_case->xpass_count++; break; + } +} + +void unittest_test_case_destroy(UnittestTestCase_t *test_case) { + if (!test_case) return; + for (size_t i = 0; i < test_case->result_count; i++) { + unittest_test_result_destroy(test_case->results[i]); + } + free(test_case->results); + _free_safe(test_case->class_name); + free(test_case); +} + +UnittestTestResult_t* unittest_test_result_create(const char *test_class, const char *test_method, int line_number, const char *file_name) { + UnittestTestResult_t *result = calloc(1, sizeof(UnittestTestResult_t)); + result->test_class = _strdup_safe(test_class); + result->test_method = _strdup_safe(test_method); + result->line_number = line_number; + result->file_name = _strdup_safe(file_name); + result->result_type = UNITTEST_PASS; + result->assertion_capacity = 16; + result->assertions = calloc(result->assertion_capacity, sizeof(UnittestAssertionInfo_t*)); + if (test_class && test_method) { + size_t len = strlen(test_class) + strlen(test_method) + 2; + result->test_name = malloc(len); + snprintf(result->test_name, len, "%s.%s", test_class, test_method); + } + return result; +} + +void unittest_test_result_set_skip(UnittestTestResult_t *result, const char *reason) { + if (!result) return; + result->result_type = UNITTEST_SKIP; + result->skip_reason = _strdup_safe(reason); +} + +void unittest_test_result_set_xfail(UnittestTestResult_t *result, const char *reason) { + if (!result) return; + result->result_type = UNITTEST_XFAIL; + result->error_message = _strdup_safe(reason); +} + +void unittest_test_result_set_error(UnittestTestResult_t *result, const char *error_message, const char *traceback) { + if (!result) return; + result->result_type = UNITTEST_ERROR; + _free_safe(result->error_message); + _free_safe(result->traceback); + result->error_message = _strdup_safe(error_message); + result->traceback = _strdup_safe(traceback); +} + +void unittest_test_result_destroy(UnittestTestResult_t *result) { + if (!result) return; + _free_safe(result->test_name); + _free_safe(result->test_method); + _free_safe(result->test_class); + _free_safe(result->error_message); + _free_safe(result->file_name); + _free_safe(result->traceback); + _free_safe(result->skip_reason); + for (size_t i = 0; i < result->assertion_count; i++) { + UnittestAssertionInfo_t *a = result->assertions[i]; + _free_safe(a->assertion_type); + _free_safe(a->message); + _free_safe(a->file_name); + _free_safe(a->expected_str); + _free_safe(a->actual_str); + free(a); + } + free(result->assertions); + free(result); +} + +void unittest_record_assertion(UnittestTestResult_t *result, const char *assertion_type, const char *expected_str, const char *actual_str, const char *message, int line_number, const char *file_name, bool passed) { + if (!result) return; + if (result->assertion_count >= result->assertion_capacity) { + result->assertion_capacity *= 2; + result->assertions = realloc(result->assertions, result->assertion_capacity * sizeof(UnittestAssertionInfo_t*)); + } + UnittestAssertionInfo_t *info = calloc(1, sizeof(UnittestAssertionInfo_t)); + info->assertion_type = _strdup_safe(assertion_type); + info->expected_str = _strdup_safe(expected_str); + info->actual_str = _strdup_safe(actual_str); + info->message = _strdup_safe(message); + info->line_number = line_number; + info->file_name = _strdup_safe(file_name); + info->passed = passed; + result->assertions[result->assertion_count++] = info; + if (!passed && result->result_type == UNITTEST_PASS) { + result->result_type = UNITTEST_FAIL; + char buf[512]; + snprintf(buf, sizeof(buf), "AssertionError: %s - Expected %s, got %s", message ? message : assertion_type, expected_str ? expected_str : "?", actual_str ? actual_str : "?"); + _free_safe(result->error_message); + result->error_message = strdup(buf); + } +} + +bool unittest_assert_int_equal(UnittestTestResult_t *result, int expected, int actual, const char *message, int line, const char *file) { + bool passed = (expected == actual); + char *exp_str = _format_int(expected); + char *act_str = _format_int(actual); + unittest_record_assertion(result, "assertEqual", exp_str, act_str, message, line, file, passed); + free(exp_str); + free(act_str); + return passed; +} + +bool unittest_assert_int_not_equal(UnittestTestResult_t *result, int expected, int actual, const char *message, int line, const char *file) { + bool passed = (expected != actual); + char *exp_str = _format_int(expected); + char *act_str = _format_int(actual); + unittest_record_assertion(result, "assertNotEqual", exp_str, act_str, message, line, file, passed); + free(exp_str); + free(act_str); + return passed; +} + +bool unittest_assert_int_greater(UnittestTestResult_t *result, int actual, int threshold, const char *message, int line, const char *file) { + bool passed = (actual > threshold); + char *exp_str = _format_int(threshold); + char *act_str = _format_int(actual); + unittest_record_assertion(result, "assertGreater", exp_str, act_str, message, line, file, passed); + free(exp_str); + free(act_str); + return passed; +} + +bool unittest_assert_int_less(UnittestTestResult_t *result, int actual, int threshold, const char *message, int line, const char *file) { + bool passed = (actual < threshold); + char *exp_str = _format_int(threshold); + char *act_str = _format_int(actual); + unittest_record_assertion(result, "assertLess", exp_str, act_str, message, line, file, passed); + free(exp_str); + free(act_str); + return passed; +} + +bool unittest_assert_int_greater_equal(UnittestTestResult_t *result, int actual, int threshold, const char *message, int line, const char *file) { + bool passed = (actual >= threshold); + char *exp_str = _format_int(threshold); + char *act_str = _format_int(actual); + unittest_record_assertion(result, "assertGreaterEqual", exp_str, act_str, message, line, file, passed); + free(exp_str); + free(act_str); + return passed; +} + +bool unittest_assert_int_less_equal(UnittestTestResult_t *result, int actual, int threshold, const char *message, int line, const char *file) { + bool passed = (actual <= threshold); + char *exp_str = _format_int(threshold); + char *act_str = _format_int(actual); + unittest_record_assertion(result, "assertLessEqual", exp_str, act_str, message, line, file, passed); + free(exp_str); + free(act_str); + return passed; +} + +bool unittest_assert_long_equal(UnittestTestResult_t *result, long expected, long actual, const char *message, int line, const char *file) { + bool passed = (expected == actual); + char *exp_str = _format_long(expected); + char *act_str = _format_long(actual); + unittest_record_assertion(result, "assertEqual", exp_str, act_str, message, line, file, passed); + free(exp_str); + free(act_str); + return passed; +} + +bool unittest_assert_long_not_equal(UnittestTestResult_t *result, long expected, long actual, const char *message, int line, const char *file) { + bool passed = (expected != actual); + char *exp_str = _format_long(expected); + char *act_str = _format_long(actual); + unittest_record_assertion(result, "assertNotEqual", exp_str, act_str, message, line, file, passed); + free(exp_str); + free(act_str); + return passed; +} + +bool unittest_assert_double_equal(UnittestTestResult_t *result, double expected, double actual, double epsilon, const char *message, int line, const char *file) { + bool passed = fabs(expected - actual) <= epsilon; + char *exp_str = _format_double(expected); + char *act_str = _format_double(actual); + unittest_record_assertion(result, "assertAlmostEqual", exp_str, act_str, message, line, file, passed); + free(exp_str); + free(act_str); + return passed; +} + +bool unittest_assert_double_not_equal(UnittestTestResult_t *result, double expected, double actual, double epsilon, const char *message, int line, const char *file) { + bool passed = fabs(expected - actual) > epsilon; + char *exp_str = _format_double(expected); + char *act_str = _format_double(actual); + unittest_record_assertion(result, "assertNotAlmostEqual", exp_str, act_str, message, line, file, passed); + free(exp_str); + free(act_str); + return passed; +} + +bool unittest_assert_string_equal(UnittestTestResult_t *result, const char *expected, const char *actual, const char *message, int line, const char *file) { + bool passed; + if (expected == NULL && actual == NULL) { + passed = true; + } else if (expected == NULL || actual == NULL) { + passed = false; + } else { + passed = (strcmp(expected, actual) == 0); + } + unittest_record_assertion(result, "assertEqual", expected ? expected : "NULL", actual ? actual : "NULL", message, line, file, passed); + return passed; +} + +bool unittest_assert_string_not_equal(UnittestTestResult_t *result, const char *expected, const char *actual, const char *message, int line, const char *file) { + bool passed; + if (expected == NULL && actual == NULL) { + passed = false; + } else if (expected == NULL || actual == NULL) { + passed = true; + } else { + passed = (strcmp(expected, actual) != 0); + } + unittest_record_assertion(result, "assertNotEqual", expected ? expected : "NULL", actual ? actual : "NULL", message, line, file, passed); + return passed; +} + +bool unittest_assert_string_contains(UnittestTestResult_t *result, const char *substring, const char *string, const char *message, int line, const char *file) { + bool passed = false; + if (string && substring) { + passed = (strstr(string, substring) != NULL); + } + unittest_record_assertion(result, "assertIn", substring ? substring : "NULL", string ? string : "NULL", message, line, file, passed); + return passed; +} + +bool unittest_assert_string_not_contains(UnittestTestResult_t *result, const char *substring, const char *string, const char *message, int line, const char *file) { + bool passed = true; + if (string && substring) { + passed = (strstr(string, substring) == NULL); + } + unittest_record_assertion(result, "assertNotIn", substring ? substring : "NULL", string ? string : "NULL", message, line, file, passed); + return passed; +} + +bool unittest_assert_string_starts_with(UnittestTestResult_t *result, const char *prefix, const char *string, const char *message, int line, const char *file) { + bool passed = false; + if (string && prefix) { + size_t prefix_len = strlen(prefix); + passed = (strncmp(string, prefix, prefix_len) == 0); + } + unittest_record_assertion(result, "assertStartsWith", prefix ? prefix : "NULL", string ? string : "NULL", message, line, file, passed); + return passed; +} + +bool unittest_assert_string_ends_with(UnittestTestResult_t *result, const char *suffix, const char *string, const char *message, int line, const char *file) { + bool passed = false; + if (string && suffix) { + size_t str_len = strlen(string); + size_t suffix_len = strlen(suffix); + if (str_len >= suffix_len) { + passed = (strcmp(string + str_len - suffix_len, suffix) == 0); + } + } + unittest_record_assertion(result, "assertEndsWith", suffix ? suffix : "NULL", string ? string : "NULL", message, line, file, passed); + return passed; +} + +bool unittest_assert_true(UnittestTestResult_t *result, bool condition, const char *message, int line, const char *file) { + char *act_str = _format_bool(condition); + unittest_record_assertion(result, "assertTrue", "true", act_str, message, line, file, condition); + free(act_str); + return condition; +} + +bool unittest_assert_false(UnittestTestResult_t *result, bool condition, const char *message, int line, const char *file) { + char *act_str = _format_bool(condition); + unittest_record_assertion(result, "assertFalse", "false", act_str, message, line, file, !condition); + free(act_str); + return !condition; +} + +bool unittest_assert_null(UnittestTestResult_t *result, void *ptr, const char *message, int line, const char *file) { + bool passed = (ptr == NULL); + char *act_str = _format_ptr(ptr); + unittest_record_assertion(result, "assertIsNone", "NULL", act_str, message, line, file, passed); + free(act_str); + return passed; +} + +bool unittest_assert_not_null(UnittestTestResult_t *result, void *ptr, const char *message, int line, const char *file) { + bool passed = (ptr != NULL); + char *act_str = _format_ptr(ptr); + unittest_record_assertion(result, "assertIsNotNone", "not NULL", act_str, message, line, file, passed); + free(act_str); + return passed; +} + +bool unittest_assert_memory_equal(UnittestTestResult_t *result, const void *expected, const void *actual, size_t length, const char *message, int line, const char *file) { + bool passed = false; + if (expected && actual) { + passed = (memcmp(expected, actual, length) == 0); + } else if (expected == NULL && actual == NULL) { + passed = true; + } + char exp_str[64], act_str[64]; + snprintf(exp_str, sizeof(exp_str), "memory[%zu bytes]", length); + snprintf(act_str, sizeof(act_str), "memory[%zu bytes]", length); + unittest_record_assertion(result, "assertMemoryEqual", exp_str, act_str, message, line, file, passed); + return passed; +} + +bool unittest_assert_array_int_equal(UnittestTestResult_t *result, const int *expected, const int *actual, size_t length, const char *message, int line, const char *file) { + bool passed = true; + if (expected && actual) { + for (size_t i = 0; i < length; i++) { + if (expected[i] != actual[i]) { + passed = false; + break; + } + } + } else if (expected != actual) { + passed = false; + } + char exp_str[64], act_str[64]; + snprintf(exp_str, sizeof(exp_str), "int[%zu]", length); + snprintf(act_str, sizeof(act_str), "int[%zu]", length); + unittest_record_assertion(result, "assertArrayEqual", exp_str, act_str, message, line, file, passed); + return passed; +} + +bool unittest_assert_fail(UnittestTestResult_t *result, const char *message, int line, const char *file) { + unittest_record_assertion(result, "fail", "pass", "fail", message, line, file, false); + return false; +} + +bool unittest_assert_pass(UnittestTestResult_t *result, const char *message, int line, const char *file) { + unittest_record_assertion(result, "pass", "pass", "pass", message, line, file, true); + return true; +} + +int unittest_run_suite(UnittestTestSuite_t *suite, UnittestConfig_t *config) { + (void)config; + if (!suite) return -1; + return suite->total_failed + suite->total_errors; +} + +void unittest_get_summary(UnittestTestSuite_t *suite, int *total, int *passed, int *failed, int *errors, int *skipped) { + if (!suite) return; + int t = 0; + for (size_t i = 0; i < suite->test_case_count; i++) { + t += (int)suite->test_cases[i]->result_count; + } + if (total) *total = t; + if (passed) *passed = suite->total_passed; + if (failed) *failed = suite->total_failed; + if (errors) *errors = suite->total_errors; + if (skipped) *skipped = suite->total_skipped; +} + +static const char* _result_type_str(UnittestResultType_e type) { + switch (type) { + case UNITTEST_PASS: return "ok"; + case UNITTEST_FAIL: return "FAIL"; + case UNITTEST_ERROR: return "ERROR"; + case UNITTEST_SKIP: return "SKIP"; + case UNITTEST_XFAIL: return "xfail"; + case UNITTEST_XPASS: return "XPASS"; + default: return "UNKNOWN"; + } +} + +static const char* _result_type_color(UnittestResultType_e type, bool use_colors) { + if (!use_colors) return ""; + switch (type) { + case UNITTEST_PASS: return ANSI_GREEN; + case UNITTEST_FAIL: return ANSI_RED; + case UNITTEST_ERROR: return ANSI_RED; + case UNITTEST_SKIP: return ANSI_YELLOW; + case UNITTEST_XFAIL: return ANSI_YELLOW; + case UNITTEST_XPASS: return ANSI_YELLOW; + default: return ""; + } +} + +void _unittest_format_text(UnittestTestSuite_t *suite, UnittestConfig_t *config, FILE *output) { + bool colors = config->use_colors; + const char *reset = colors ? ANSI_RESET : ""; + fprintf(output, "================================================================================\n"); + fprintf(output, "Test Suite: %s\n", suite->test_suite_name ? suite->test_suite_name : "Unnamed"); + char time_buf[64]; + struct tm *tm_info = localtime(&suite->start_time); + strftime(time_buf, sizeof(time_buf), "%Y-%m-%d %H:%M:%S", tm_info); + fprintf(output, "Started: %s\n", time_buf); + fprintf(output, "================================================================================\n\n"); + for (size_t i = 0; i < suite->test_case_count; i++) { + UnittestTestCase_t *tc = suite->test_cases[i]; + for (size_t j = 0; j < tc->result_count; j++) { + UnittestTestResult_t *r = tc->results[j]; + const char *color = _result_type_color(r->result_type, colors); + fprintf(output, "%s (%s) ... %s%s%s", r->test_method ? r->test_method : "?", tc->class_name ? tc->class_name : "?", color, _result_type_str(r->result_type), reset); + if (config->track_execution_time) { + fprintf(output, " (%.1fms)", r->execution_time_ms); + } + if (config->verbosity >= 2 && r->assertion_count > 0) { + fprintf(output, " [%zu assertions]", r->assertion_count); + } + fprintf(output, "\n"); + if (r->result_type == UNITTEST_FAIL || r->result_type == UNITTEST_ERROR) { + if (r->error_message) { + fprintf(output, " %s\n", r->error_message); + } + if (r->file_name && r->line_number > 0) { + fprintf(output, " File: %s, Line: %d\n", r->file_name, r->line_number); + } + if (config->show_full_traceback && r->traceback) { + fprintf(output, " Traceback:\n%s\n", r->traceback); + } + fprintf(output, "\n"); + } else if (r->result_type == UNITTEST_SKIP && r->skip_reason) { + fprintf(output, " Reason: %s\n\n", r->skip_reason); + } + } + } + fprintf(output, "================================================================================\n"); + int total = 0; + for (size_t i = 0; i < suite->test_case_count; i++) { + total += (int)suite->test_cases[i]->result_count; + } + fprintf(output, "Ran %d tests in %.1fms\n", total, suite->total_suite_time_ms); + fprintf(output, "Result: %d passed, %d failed, %d errors, %d skipped", suite->total_passed, suite->total_failed, suite->total_errors, suite->total_skipped); + if (suite->total_xfail > 0) fprintf(output, ", %d xfail", suite->total_xfail); + if (suite->total_xpass > 0) fprintf(output, ", %d xpass", suite->total_xpass); + fprintf(output, "\n"); + if (suite->total_failed > 0 || suite->total_errors > 0) { + fprintf(output, "%sFAILED%s (failures=%d, errors=%d)\n", colors ? ANSI_RED : "", reset, suite->total_failed, suite->total_errors); + } else { + fprintf(output, "%sOK%s\n", colors ? ANSI_GREEN : "", reset); + } + fprintf(output, "================================================================================\n"); +} + +void _unittest_format_quiet(UnittestTestSuite_t *suite, UnittestConfig_t *config, FILE *output) { + (void)config; + for (size_t i = 0; i < suite->test_case_count; i++) { + UnittestTestCase_t *tc = suite->test_cases[i]; + for (size_t j = 0; j < tc->result_count; j++) { + UnittestTestResult_t *r = tc->results[j]; + switch (r->result_type) { + case UNITTEST_PASS: fputc('.', output); break; + case UNITTEST_FAIL: fputc('F', output); break; + case UNITTEST_ERROR: fputc('E', output); break; + case UNITTEST_SKIP: fputc('S', output); break; + case UNITTEST_XFAIL: fputc('x', output); break; + case UNITTEST_XPASS: fputc('X', output); break; + } + } + } + fputc('\n', output); + if (suite->total_failed > 0 || suite->total_errors > 0) { + fprintf(output, "FAILED (failures=%d, errors=%d)\n", suite->total_failed, suite->total_errors); + } else { + fprintf(output, "OK\n"); + } +} + +void _unittest_format_json(UnittestTestSuite_t *suite, UnittestConfig_t *config, FILE *output) { + (void)config; + char time_buf[64]; + struct tm *tm_info = gmtime(&suite->start_time); + strftime(time_buf, sizeof(time_buf), "%Y-%m-%dT%H:%M:%SZ", tm_info); + char *suite_name = _escape_json(suite->test_suite_name); + fprintf(output, "{\n"); + fprintf(output, " \"testSuite\": %s,\n", suite_name); + fprintf(output, " \"timestamp\": \"%s\",\n", time_buf); + fprintf(output, " \"duration\": %.1f,\n", suite->total_suite_time_ms); + fprintf(output, " \"tests\": [\n"); + bool first_test = true; + for (size_t i = 0; i < suite->test_case_count; i++) { + UnittestTestCase_t *tc = suite->test_cases[i]; + for (size_t j = 0; j < tc->result_count; j++) { + if (!first_test) fprintf(output, ",\n"); + first_test = false; + UnittestTestResult_t *r = tc->results[j]; + char *name = _escape_json(r->test_method); + char *cls = _escape_json(tc->class_name); + char *msg = _escape_json(r->error_message); + char *file = _escape_json(r->file_name); + const char *status; + switch (r->result_type) { + case UNITTEST_PASS: status = "PASS"; break; + case UNITTEST_FAIL: status = "FAIL"; break; + case UNITTEST_ERROR: status = "ERROR"; break; + case UNITTEST_SKIP: status = "SKIP"; break; + case UNITTEST_XFAIL: status = "XFAIL"; break; + case UNITTEST_XPASS: status = "XPASS"; break; + default: status = "UNKNOWN"; + } + fprintf(output, " {\n"); + fprintf(output, " \"name\": %s,\n", name); + fprintf(output, " \"className\": %s,\n", cls); + fprintf(output, " \"status\": \"%s\",\n", status); + fprintf(output, " \"duration\": %.1f,\n", r->execution_time_ms); + fprintf(output, " \"assertions\": %zu,\n", r->assertion_count); + fprintf(output, " \"message\": %s,\n", msg); + fprintf(output, " \"file\": %s,\n", file); + fprintf(output, " \"line\": %d\n", r->line_number); + fprintf(output, " }"); + free(name); + free(cls); + free(msg); + free(file); + } + } + fprintf(output, "\n ],\n"); + int total = 0; + for (size_t i = 0; i < suite->test_case_count; i++) { + total += (int)suite->test_cases[i]->result_count; + } + fprintf(output, " \"summary\": {\n"); + fprintf(output, " \"total\": %d,\n", total); + fprintf(output, " \"passed\": %d,\n", suite->total_passed); + fprintf(output, " \"failed\": %d,\n", suite->total_failed); + fprintf(output, " \"errors\": %d,\n", suite->total_errors); + fprintf(output, " \"skipped\": %d,\n", suite->total_skipped); + fprintf(output, " \"xfail\": %d,\n", suite->total_xfail); + fprintf(output, " \"xpass\": %d\n", suite->total_xpass); + fprintf(output, " }\n"); + fprintf(output, "}\n"); + free(suite_name); +} + +void _unittest_format_xml(UnittestTestSuite_t *suite, UnittestConfig_t *config, FILE *output) { + (void)config; + int total = 0; + for (size_t i = 0; i < suite->test_case_count; i++) { + total += (int)suite->test_cases[i]->result_count; + } + char *suite_name = _escape_xml(suite->test_suite_name); + fprintf(output, "\n"); + fprintf(output, "\n", suite_name, total, suite->total_failed, suite->total_errors, suite->total_skipped, suite->total_suite_time_ms / 1000.0); + for (size_t i = 0; i < suite->test_case_count; i++) { + UnittestTestCase_t *tc = suite->test_cases[i]; + char *cls_name = _escape_xml(tc->class_name); + fprintf(output, " \n", cls_name, tc->result_count, tc->failed_count, tc->error_count, tc->skipped_count, tc->total_time_ms / 1000.0); + for (size_t j = 0; j < tc->result_count; j++) { + UnittestTestResult_t *r = tc->results[j]; + char *method_name = _escape_xml(r->test_method); + fprintf(output, " execution_time_ms / 1000.0, r->assertion_count); + if (r->result_type == UNITTEST_PASS) { + fprintf(output, "/>\n"); + } else { + fprintf(output, ">\n"); + if (r->result_type == UNITTEST_FAIL) { + char *msg = _escape_xml(r->error_message); + fprintf(output, " \n", msg); + if (r->file_name) fprintf(output, " File: %s, Line: %d\n", r->file_name, r->line_number); + fprintf(output, " \n"); + free(msg); + } else if (r->result_type == UNITTEST_ERROR) { + char *msg = _escape_xml(r->error_message); + fprintf(output, " \n", msg); + if (r->traceback) { + char *tb = _escape_xml(r->traceback); + fprintf(output, " %s\n", tb); + free(tb); + } + fprintf(output, " \n"); + free(msg); + } else if (r->result_type == UNITTEST_SKIP) { + char *reason = _escape_xml(r->skip_reason); + fprintf(output, " \n", reason ? reason : ""); + free(reason); + } + fprintf(output, " \n"); + } + free(method_name); + } + fprintf(output, " \n"); + free(cls_name); + } + fprintf(output, "\n"); + free(suite_name); +} + +void _unittest_format_html(UnittestTestSuite_t *suite, UnittestConfig_t *config, FILE *output) { + (void)config; + int total = 0; + for (size_t i = 0; i < suite->test_case_count; i++) { + total += (int)suite->test_cases[i]->result_count; + } + char *suite_name = _escape_html(suite->test_suite_name); + char time_buf[64]; + struct tm *tm_info = localtime(&suite->start_time); + strftime(time_buf, sizeof(time_buf), "%Y-%m-%d %H:%M:%S", tm_info); + fprintf(output, "\n\n\n"); + fprintf(output, "\n"); + fprintf(output, "Test Report - %s\n", suite_name); + fprintf(output, "\n\n\n
\n"); + fprintf(output, "

Test Report: %s

\n", suite_name); + fprintf(output, "
Started: %s | Duration: %.1fms
\n", time_buf, suite->total_suite_time_ms); + fprintf(output, "
\n"); + fprintf(output, "
Total: %d
\n", total); + fprintf(output, "
Passed: %d
\n", suite->total_passed); + fprintf(output, "
Failed: %d
\n", suite->total_failed); + fprintf(output, "
Errors: %d
\n", suite->total_errors); + fprintf(output, "
Skipped: %d
\n", suite->total_skipped); + fprintf(output, "
\n"); + for (size_t i = 0; i < suite->test_case_count; i++) { + UnittestTestCase_t *tc = suite->test_cases[i]; + char *cls_name = _escape_html(tc->class_name); + fprintf(output, "
\n"); + fprintf(output, "
%s (%zu tests)
\n", cls_name, tc->result_count); + for (size_t j = 0; j < tc->result_count; j++) { + UnittestTestResult_t *r = tc->results[j]; + char *method = _escape_html(r->test_method); + const char *badge_class; + const char *badge_text; + switch (r->result_type) { + case UNITTEST_PASS: badge_class = "badge-pass"; badge_text = "PASS"; break; + case UNITTEST_FAIL: badge_class = "badge-fail"; badge_text = "FAIL"; break; + case UNITTEST_ERROR: badge_class = "badge-error"; badge_text = "ERROR"; break; + case UNITTEST_SKIP: badge_class = "badge-skip"; badge_text = "SKIP"; break; + case UNITTEST_XFAIL: badge_class = "badge-skip"; badge_text = "XFAIL"; break; + case UNITTEST_XPASS: badge_class = "badge-skip"; badge_text = "XPASS"; break; + default: badge_class = "badge-skip"; badge_text = "?"; break; + } + fprintf(output, "
\n"); + fprintf(output, "%s\n", badge_class, badge_text); + fprintf(output, "%s\n", method); + fprintf(output, "%.1fms\n", r->execution_time_ms); + fprintf(output, "
\n"); + if (r->result_type == UNITTEST_FAIL || r->result_type == UNITTEST_ERROR) { + char *msg = _escape_html(r->error_message); + fprintf(output, "
%s\nFile: %s, Line: %d
\n", msg ? msg : "", r->file_name ? r->file_name : "?", r->line_number); + free(msg); + } + free(method); + } + fprintf(output, "
\n"); + free(cls_name); + } + fprintf(output, "
\n\n\n"); + free(suite_name); +} + +void _unittest_format_tap(UnittestTestSuite_t *suite, UnittestConfig_t *config, FILE *output) { + (void)config; + int total = 0; + for (size_t i = 0; i < suite->test_case_count; i++) { + total += (int)suite->test_cases[i]->result_count; + } + fprintf(output, "TAP version 13\n"); + fprintf(output, "1..%d\n", total); + int test_num = 0; + for (size_t i = 0; i < suite->test_case_count; i++) { + UnittestTestCase_t *tc = suite->test_cases[i]; + for (size_t j = 0; j < tc->result_count; j++) { + test_num++; + UnittestTestResult_t *r = tc->results[j]; + bool ok = (r->result_type == UNITTEST_PASS || r->result_type == UNITTEST_XFAIL); + fprintf(output, "%s %d - %s", ok ? "ok" : "not ok", test_num, r->test_name ? r->test_name : "?"); + if (r->result_type == UNITTEST_SKIP) { + fprintf(output, " # SKIP %s", r->skip_reason ? r->skip_reason : ""); + } else if (config->track_execution_time) { + fprintf(output, " # %.1fms", r->execution_time_ms); + } + fprintf(output, "\n"); + if (r->result_type == UNITTEST_FAIL || r->result_type == UNITTEST_ERROR) { + fprintf(output, " ---\n"); + fprintf(output, " message: %s\n", r->error_message ? r->error_message : ""); + fprintf(output, " severity: %s\n", r->result_type == UNITTEST_FAIL ? "fail" : "error"); + if (r->file_name) { + fprintf(output, " file: %s\n", r->file_name); + fprintf(output, " line: %d\n", r->line_number); + } + fprintf(output, " ...\n"); + } + } + } + fprintf(output, "# Tests run: %d, Passed: %d, Failed: %d, Errors: %d, Skipped: %d\n", total, suite->total_passed, suite->total_failed, suite->total_errors, suite->total_skipped); +} + +void unittest_generate_report(UnittestTestSuite_t *suite, UnittestConfig_t *config) { + if (!suite || !config) return; + FILE *output = config->output_stream ? config->output_stream : stdout; + FILE *file_output = NULL; + if (config->output_file) { + file_output = fopen(config->output_file, "w"); + if (file_output) output = file_output; + } + switch (config->output_format) { + case UNITTEST_FORMAT_TEXT: + _unittest_format_text(suite, config, output); + break; + case UNITTEST_FORMAT_QUIET: + _unittest_format_quiet(suite, config, output); + break; + case UNITTEST_FORMAT_JSON: + _unittest_format_json(suite, config, output); + break; + case UNITTEST_FORMAT_XML: + _unittest_format_xml(suite, config, output); + break; + case UNITTEST_FORMAT_HTML: + _unittest_format_html(suite, config, output); + break; + case UNITTEST_FORMAT_TAP: + _unittest_format_tap(suite, config, output); + break; + } + if (file_output) fclose(file_output); +} diff --git a/tests/unittest.h b/tests/unittest.h new file mode 100644 index 0000000..9858807 --- /dev/null +++ b/tests/unittest.h @@ -0,0 +1,264 @@ +#ifndef UNITTEST_H +#define UNITTEST_H + +#include +#include +#include +#include +#include +#include + +typedef enum { + UNITTEST_PASS, + UNITTEST_FAIL, + UNITTEST_ERROR, + UNITTEST_SKIP, + UNITTEST_XFAIL, + UNITTEST_XPASS +} UnittestResultType_e; + +typedef enum { + UNITTEST_FORMAT_TEXT, + UNITTEST_FORMAT_QUIET, + UNITTEST_FORMAT_JSON, + UNITTEST_FORMAT_XML, + UNITTEST_FORMAT_HTML, + UNITTEST_FORMAT_TAP +} UnittestOutputFormat_e; + +typedef struct { + char *assertion_type; + char *message; + int line_number; + char *file_name; + char *expected_str; + char *actual_str; + double execution_time_ms; + bool passed; +} UnittestAssertionInfo_t; + +typedef struct { + char *test_name; + char *test_method; + char *test_class; + UnittestResultType_e result_type; + char *error_message; + UnittestAssertionInfo_t **assertions; + size_t assertion_count; + size_t assertion_capacity; + double execution_time_ms; + int line_number; + char *file_name; + char *traceback; + char *skip_reason; +} UnittestTestResult_t; + +typedef struct { + char *class_name; + UnittestTestResult_t **results; + size_t result_count; + size_t result_capacity; + int passed_count; + int failed_count; + int error_count; + int skipped_count; + int xfail_count; + int xpass_count; + double total_time_ms; +} UnittestTestCase_t; + +typedef struct { + UnittestTestCase_t **test_cases; + size_t test_case_count; + size_t test_case_capacity; + int total_passed; + int total_failed; + int total_errors; + int total_skipped; + int total_xfail; + int total_xpass; + double total_suite_time_ms; + char *test_suite_name; + time_t start_time; +} UnittestTestSuite_t; + +typedef struct { + UnittestOutputFormat_e output_format; + int verbosity; + FILE *output_stream; + char *output_file; + char **test_names_to_run; + size_t test_names_count; + bool stop_on_first_failure; + bool catch_exceptions; + bool track_execution_time; + double timeout_seconds; + bool show_local_variables; + bool show_full_traceback; + int max_traceback_depth; + char *test_pattern; + bool randomize_order; + int random_seed; + bool use_colors; + char *test_runner_name; + char *test_environment; +} UnittestConfig_t; + +UnittestConfig_t* unittest_config_create(void); +void unittest_config_destroy(UnittestConfig_t *config); + +UnittestTestSuite_t* unittest_test_suite_create(const char *suite_name); +void unittest_test_suite_add_test_case(UnittestTestSuite_t *suite, UnittestTestCase_t *test_case); +void unittest_test_suite_destroy(UnittestTestSuite_t *suite); + +UnittestTestCase_t* unittest_test_case_create(const char *class_name); +void unittest_test_case_add_result(UnittestTestCase_t *test_case, UnittestTestResult_t *result); +void unittest_test_case_destroy(UnittestTestCase_t *test_case); + +UnittestTestResult_t* unittest_test_result_create(const char *test_class, const char *test_method, int line_number, const char *file_name); +void unittest_test_result_set_skip(UnittestTestResult_t *result, const char *reason); +void unittest_test_result_set_xfail(UnittestTestResult_t *result, const char *reason); +void unittest_test_result_set_error(UnittestTestResult_t *result, const char *error_message, const char *traceback); +void unittest_test_result_destroy(UnittestTestResult_t *result); + +void unittest_record_assertion(UnittestTestResult_t *result, const char *assertion_type, const char *expected_str, const char *actual_str, const char *message, int line_number, const char *file_name, bool passed); + +bool unittest_assert_int_equal(UnittestTestResult_t *result, int expected, int actual, const char *message, int line, const char *file); +bool unittest_assert_int_not_equal(UnittestTestResult_t *result, int expected, int actual, const char *message, int line, const char *file); +bool unittest_assert_int_greater(UnittestTestResult_t *result, int actual, int threshold, const char *message, int line, const char *file); +bool unittest_assert_int_less(UnittestTestResult_t *result, int actual, int threshold, const char *message, int line, const char *file); +bool unittest_assert_int_greater_equal(UnittestTestResult_t *result, int actual, int threshold, const char *message, int line, const char *file); +bool unittest_assert_int_less_equal(UnittestTestResult_t *result, int actual, int threshold, const char *message, int line, const char *file); + +bool unittest_assert_long_equal(UnittestTestResult_t *result, long expected, long actual, const char *message, int line, const char *file); +bool unittest_assert_long_not_equal(UnittestTestResult_t *result, long expected, long actual, const char *message, int line, const char *file); + +bool unittest_assert_double_equal(UnittestTestResult_t *result, double expected, double actual, double epsilon, const char *message, int line, const char *file); +bool unittest_assert_double_not_equal(UnittestTestResult_t *result, double expected, double actual, double epsilon, const char *message, int line, const char *file); + +bool unittest_assert_string_equal(UnittestTestResult_t *result, const char *expected, const char *actual, const char *message, int line, const char *file); +bool unittest_assert_string_not_equal(UnittestTestResult_t *result, const char *expected, const char *actual, const char *message, int line, const char *file); +bool unittest_assert_string_contains(UnittestTestResult_t *result, const char *substring, const char *string, const char *message, int line, const char *file); +bool unittest_assert_string_not_contains(UnittestTestResult_t *result, const char *substring, const char *string, const char *message, int line, const char *file); +bool unittest_assert_string_starts_with(UnittestTestResult_t *result, const char *prefix, const char *string, const char *message, int line, const char *file); +bool unittest_assert_string_ends_with(UnittestTestResult_t *result, const char *suffix, const char *string, const char *message, int line, const char *file); + +bool unittest_assert_true(UnittestTestResult_t *result, bool condition, const char *message, int line, const char *file); +bool unittest_assert_false(UnittestTestResult_t *result, bool condition, const char *message, int line, const char *file); + +bool unittest_assert_null(UnittestTestResult_t *result, void *ptr, const char *message, int line, const char *file); +bool unittest_assert_not_null(UnittestTestResult_t *result, void *ptr, const char *message, int line, const char *file); + +bool unittest_assert_memory_equal(UnittestTestResult_t *result, const void *expected, const void *actual, size_t length, const char *message, int line, const char *file); +bool unittest_assert_array_int_equal(UnittestTestResult_t *result, const int *expected, const int *actual, size_t length, const char *message, int line, const char *file); + +bool unittest_assert_fail(UnittestTestResult_t *result, const char *message, int line, const char *file); +bool unittest_assert_pass(UnittestTestResult_t *result, const char *message, int line, const char *file); + +int unittest_run_suite(UnittestTestSuite_t *suite, UnittestConfig_t *config); +void unittest_generate_report(UnittestTestSuite_t *suite, UnittestConfig_t *config); +void unittest_get_summary(UnittestTestSuite_t *suite, int *total, int *passed, int *failed, int *errors, int *skipped); + +void _unittest_format_text(UnittestTestSuite_t *suite, UnittestConfig_t *config, FILE *output); +void _unittest_format_quiet(UnittestTestSuite_t *suite, UnittestConfig_t *config, FILE *output); +void _unittest_format_json(UnittestTestSuite_t *suite, UnittestConfig_t *config, FILE *output); +void _unittest_format_xml(UnittestTestSuite_t *suite, UnittestConfig_t *config, FILE *output); +void _unittest_format_html(UnittestTestSuite_t *suite, UnittestConfig_t *config, FILE *output); +void _unittest_format_tap(UnittestTestSuite_t *suite, UnittestConfig_t *config, FILE *output); + +double unittest_get_time_ms(void); + +#define UNITTEST_ASSERT_EQUAL_INT(result, expected, actual, msg) \ + unittest_assert_int_equal((result), (expected), (actual), (msg), __LINE__, __FILE__) + +#define UNITTEST_ASSERT_NOT_EQUAL_INT(result, expected, actual, msg) \ + unittest_assert_int_not_equal((result), (expected), (actual), (msg), __LINE__, __FILE__) + +#define UNITTEST_ASSERT_GREATER(result, actual, threshold, msg) \ + unittest_assert_int_greater((result), (actual), (threshold), (msg), __LINE__, __FILE__) + +#define UNITTEST_ASSERT_LESS(result, actual, threshold, msg) \ + unittest_assert_int_less((result), (actual), (threshold), (msg), __LINE__, __FILE__) + +#define UNITTEST_ASSERT_GREATER_EQUAL(result, actual, threshold, msg) \ + unittest_assert_int_greater_equal((result), (actual), (threshold), (msg), __LINE__, __FILE__) + +#define UNITTEST_ASSERT_LESS_EQUAL(result, actual, threshold, msg) \ + unittest_assert_int_less_equal((result), (actual), (threshold), (msg), __LINE__, __FILE__) + +#define UNITTEST_ASSERT_EQUAL_LONG(result, expected, actual, msg) \ + unittest_assert_long_equal((result), (expected), (actual), (msg), __LINE__, __FILE__) + +#define UNITTEST_ASSERT_NOT_EQUAL_LONG(result, expected, actual, msg) \ + unittest_assert_long_not_equal((result), (expected), (actual), (msg), __LINE__, __FILE__) + +#define UNITTEST_ASSERT_EQUAL_DOUBLE(result, expected, actual, eps, msg) \ + unittest_assert_double_equal((result), (expected), (actual), (eps), (msg), __LINE__, __FILE__) + +#define UNITTEST_ASSERT_NOT_EQUAL_DOUBLE(result, expected, actual, eps, msg) \ + unittest_assert_double_not_equal((result), (expected), (actual), (eps), (msg), __LINE__, __FILE__) + +#define UNITTEST_ASSERT_EQUAL_STR(result, expected, actual, msg) \ + unittest_assert_string_equal((result), (expected), (actual), (msg), __LINE__, __FILE__) + +#define UNITTEST_ASSERT_NOT_EQUAL_STR(result, expected, actual, msg) \ + unittest_assert_string_not_equal((result), (expected), (actual), (msg), __LINE__, __FILE__) + +#define UNITTEST_ASSERT_STRING_CONTAINS(result, substring, string, msg) \ + unittest_assert_string_contains((result), (substring), (string), (msg), __LINE__, __FILE__) + +#define UNITTEST_ASSERT_STRING_NOT_CONTAINS(result, substring, string, msg) \ + unittest_assert_string_not_contains((result), (substring), (string), (msg), __LINE__, __FILE__) + +#define UNITTEST_ASSERT_STRING_STARTS_WITH(result, prefix, string, msg) \ + unittest_assert_string_starts_with((result), (prefix), (string), (msg), __LINE__, __FILE__) + +#define UNITTEST_ASSERT_STRING_ENDS_WITH(result, suffix, string, msg) \ + unittest_assert_string_ends_with((result), (suffix), (string), (msg), __LINE__, __FILE__) + +#define UNITTEST_ASSERT_TRUE(result, condition, msg) \ + unittest_assert_true((result), (condition), (msg), __LINE__, __FILE__) + +#define UNITTEST_ASSERT_FALSE(result, condition, msg) \ + unittest_assert_false((result), (condition), (msg), __LINE__, __FILE__) + +#define UNITTEST_ASSERT_NULL(result, ptr, msg) \ + unittest_assert_null((result), (ptr), (msg), __LINE__, __FILE__) + +#define UNITTEST_ASSERT_NOT_NULL(result, ptr, msg) \ + unittest_assert_not_null((result), (ptr), (msg), __LINE__, __FILE__) + +#define UNITTEST_ASSERT_MEMORY_EQUAL(result, expected, actual, len, msg) \ + unittest_assert_memory_equal((result), (expected), (actual), (len), (msg), __LINE__, __FILE__) + +#define UNITTEST_ASSERT_ARRAY_INT_EQUAL(result, expected, actual, len, msg) \ + unittest_assert_array_int_equal((result), (expected), (actual), (len), (msg), __LINE__, __FILE__) + +#define UNITTEST_ASSERT_FAIL(result, msg) \ + unittest_assert_fail((result), (msg), __LINE__, __FILE__) + +#define UNITTEST_ASSERT_PASS(result, msg) \ + unittest_assert_pass((result), (msg), __LINE__, __FILE__) + +#define UNITTEST_BEGIN_TEST(class_name, method_name) \ + UnittestTestResult_t *_unittest_result = unittest_test_result_create(class_name, method_name, __LINE__, __FILE__); \ + double _unittest_start_time = unittest_get_time_ms(); + +#define UNITTEST_END_TEST() \ + _unittest_result->execution_time_ms = unittest_get_time_ms() - _unittest_start_time; \ + if (_unittest_result->result_type == UNITTEST_PASS) { \ + for (size_t i = 0; i < _unittest_result->assertion_count; i++) { \ + if (!_unittest_result->assertions[i]->passed) { \ + _unittest_result->result_type = UNITTEST_FAIL; \ + break; \ + } \ + } \ + } \ + return _unittest_result; + +#define UNITTEST_SKIP(reason) \ + (void)_unittest_start_time; \ + unittest_test_result_set_skip(_unittest_result, reason); \ + return _unittest_result; + +#endif