From c939f4f4d16834f3f32f2466ad559cb10871a62b Mon Sep 17 00:00:00 2001 From: JaDogg Date: Sat, 2 Mar 2024 14:05:37 +0000 Subject: [PATCH] test: do not save errors to a global and instead create an error_printer object --- compiler/CMakeLists.txt | 28 ++++--- compiler/Dockerfile | 14 +--- compiler/scripts/code-coverage.sh | 15 ++-- compiler/scripts/kabaraya.py | 27 ++++--- compiler/src/ast/codefiles.cpp | 10 +-- compiler/src/ast/codefiles.h | 4 +- compiler/src/ast_json.cpp | 4 +- compiler/src/comp_main.cpp | 10 ++- compiler/src/compiler/codegen.h | 21 ++--- compiler/src/compiler/codegen_c.cpp | 22 +++--- compiler/src/compiler/codegen_c.h | 20 ++--- compiler/src/compiler/codegen_json.cpp | 2 +- compiler/src/compiler/codegen_json.h | 2 +- compiler/src/compiler/multifile_compiler.cpp | 22 +++--- compiler/src/compiler/multifile_compiler.h | 4 +- compiler/src/dump.cpp | 4 +- compiler/src/html_gen.cpp | 3 +- compiler/src/lisp_main.cpp | 18 +++-- compiler/src/utilities/error_printer.cpp | 44 ++++++++++- compiler/src/utilities/error_printer.h | 82 +++++++------------- compiler/src/viz_main.cpp | 5 +- compiler/tests/fuzz_main.cpp | 12 ++- compiler/tests/test_parser.cpp | 6 +- compiler/tests/test_type_checker.cpp | 24 +++--- 24 files changed, 213 insertions(+), 190 deletions(-) diff --git a/compiler/CMakeLists.txt b/compiler/CMakeLists.txt index 17bd9c69..c07fcdd3 100644 --- a/compiler/CMakeLists.txt +++ b/compiler/CMakeLists.txt @@ -25,13 +25,15 @@ include_directories(${REPROC_DIR}/reproc++/include) set(REPROC++ ON) add_subdirectory(${REPROC_DIR}) -## Enable below to use address sanitizer -# add_compile_options(-O1 -g -fsanitize=address -fno-omit-frame-pointer) -# add_link_options(-g -fsanitize=address) +if(DEFINED ENV{YAKSHA_ADDRESS_SANITIZER}) + add_compile_options(-O1 -g -fsanitize=address -fno-omit-frame-pointer) + add_link_options(-g -fsanitize=address) +endif() -## Enable below to use undefined behaviour sanitizer -# add_compile_options(-O1 -g -fsanitize=undefined -fsanitize=integer -fsanitize=nullability -fno-omit-frame-pointer) -# add_link_options(-g -fsanitize=undefined -fsanitize=integer -fsanitize=nullability) +if(DEFINED ENV{YAKSHA_UNDEFINED_SANITIZER}) + add_compile_options(-O1 -g -fsanitize=undefined -fsanitize=integer -fsanitize=nullability -fno-omit-frame-pointer) + add_link_options(-g -fsanitize=undefined -fsanitize=integer -fsanitize=nullability) +endif() set(YAKSHA_SOURCE_FILES src/ast/ast.h src/ast/ast_printer.h src/ast/ast_vis.h src/ast/codefiles.h src/ast/environment.h src/ast/environment_stack.h src/ast/parser.h src/builtins/builtin.h src/builtins/builtins.h src/compiler/codegen.h src/compiler/codegen_c.h src/compiler/codegen_json.h src/compiler/comp_result.h src/compiler/compiler_utils.h src/compiler/const_fold.h src/compiler/datatype_compiler.h src/compiler/datatype_parser.h src/compiler/def_class_visitor.h src/compiler/delete_stack.h src/compiler/delete_stack_stack.h src/compiler/desugaring_compiler.h src/compiler/entry_struct_func_compiler.h src/compiler/function_datatype_extractor.h src/compiler/literal_utils.h src/compiler/multifile_compiler.h src/compiler/return_checker.h src/compiler/slot_matcher.h src/compiler/statement_writer.h src/compiler/to_c_compiler.h src/compiler/type_checker.h src/compiler/usage_analyser.h src/file_formats/ic_tokens_file.h src/file_formats/tokens_file.h src/ic2c/ic2c.h src/ic2c/ic_ast.h src/ic2c/ic_compiler.h src/ic2c/ic_level2_parser.h src/ic2c/ic_level2_tokenizer.h src/ic2c/ic_line_splicer.h src/ic2c/ic_optimizer.h src/ic2c/ic_parser.h src/ic2c/ic_peek_ahead_iter.h src/ic2c/ic_preprocessor.h src/ic2c/ic_simple_character_iter.h src/ic2c/ic_token.h src/ic2c/ic_tokenizer.h src/ic2c/ic_trigraph_translater.h src/tokenizer/block_analyzer.h src/tokenizer/string_utils.h src/tokenizer/token.h src/tokenizer/tokenizer.h src/utilities/annotation.h src/utilities/annotations.h src/utilities/argparser.h src/utilities/colours.h src/utilities/cpp_util.h src/utilities/defer_stack.h src/utilities/defer_stack_stack.h src/utilities/error_printer.h src/utilities/gc_pool.h src/utilities/human_id.h src/utilities/ykdatatype.h src/utilities/ykdt_pool.h src/utilities/ykobject.h src/yaksha_lisp/macro_processor.h src/yaksha_lisp/prelude.h src/yaksha_lisp/yaksha_lisp.h src/yaksha_lisp/yaksha_lisp_builtins.h src/ast/ast.cpp src/ast/ast_printer.cpp src/ast/ast_vis.cpp src/ast/codefiles.cpp src/ast/environment.cpp src/ast/environment_stack.cpp src/ast/parser.cpp src/builtins/builtins.cpp src/compiler/codegen_c.cpp src/compiler/codegen_json.cpp src/compiler/compiler_utils.cpp src/compiler/const_fold.cpp src/compiler/def_class_visitor.cpp src/compiler/delete_stack.cpp src/compiler/delete_stack_stack.cpp src/compiler/desugaring_compiler.cpp src/compiler/entry_struct_func_compiler.cpp src/compiler/literal_utils.cpp src/compiler/multifile_compiler.cpp src/compiler/return_checker.cpp src/compiler/to_c_compiler.cpp src/compiler/type_checker.cpp src/compiler/usage_analyser.cpp src/file_formats/ic_tokens_file.cpp src/file_formats/tokens_file.cpp src/ic2c/ic2c.cpp src/ic2c/ic_ast.cpp src/ic2c/ic_compiler.cpp src/ic2c/ic_level2_parser.cpp src/ic2c/ic_level2_tokenizer.cpp src/ic2c/ic_line_splicer.cpp src/ic2c/ic_optimizer.cpp src/ic2c/ic_parser.cpp src/ic2c/ic_peek_ahead_iter.cpp src/ic2c/ic_preprocessor.cpp src/ic2c/ic_tokenizer.cpp src/ic2c/ic_trigraph_translater.cpp src/tokenizer/block_analyzer.cpp src/tokenizer/string_utils.cpp src/tokenizer/tokenizer.cpp src/utilities/annotation.cpp src/utilities/annotations.cpp src/utilities/colours.cpp src/utilities/cpp_util.cpp src/utilities/defer_stack.cpp src/utilities/defer_stack_stack.cpp src/utilities/error_printer.cpp src/utilities/human_id.cpp src/utilities/ykdatatype.cpp src/utilities/ykdt_pool.cpp src/utilities/ykobject.cpp src/yaksha_lisp/yaksha_lisp.cpp src/yaksha_lisp/yaksha_lisp_builtins.cpp) # update_makefile.py SRC @@ -70,8 +72,8 @@ target_compile_features(yakshadmp PRIVATE cxx_std_17) # -- ykreload - compiler -- if(MSVC) -add_executable(ykreload src/reloader.cpp) -target_compile_features(yakshac PRIVATE cxx_std_17) + add_executable(ykreload src/reloader.cpp) + target_compile_features(yakshac PRIVATE cxx_std_17) endif (MSVC) # -- yaksha binary -- @@ -100,14 +102,16 @@ catch_discover_tests(YakshaTests) # Adding Fuzzer Binary # --------------------- +add_executable(YakshaFuzz ${YAKSHA_SOURCE_FILES} tests/fuzz_main.cpp runtime/whereami.c) if(DEFINED ENV{YAKSHA_FUZZ}) set(CMAKE_CXX_FLAGS "-fsanitize=fuzzer,address -g -O1") - add_executable(YakshaFuzz ${YAKSHA_SOURCE_FILES} tests/fuzz_main.cpp runtime/whereami.c) target_compile_definitions(YakshaFuzz PUBLIC YAKSHA_LLVM_FUZZ) - target_link_libraries(YakshaFuzz PUBLIC ${SYS_LIBS}) - target_link_libraries(YakshaFuzz PUBLIC reproc++) - target_compile_features(YakshaFuzz PRIVATE cxx_std_17) endif() +target_compile_definitions(YakshaFuzz PUBLIC TESTING) +target_link_libraries(YakshaFuzz PUBLIC ${SYS_LIBS}) +target_link_libraries(YakshaFuzz PUBLIC reproc++) +target_compile_features(YakshaFuzz PRIVATE cxx_std_17) + # CMake Build for carpntr add_compile_options("$<$:/utf-8>") diff --git a/compiler/Dockerfile b/compiler/Dockerfile index a2962b7f..59c1dcf8 100644 --- a/compiler/Dockerfile +++ b/compiler/Dockerfile @@ -1,21 +1,11 @@ FROM ubuntu:latest ENV DEBIAN_FRONTEND=noninteractive -ARG CMAKE_VERSION=3.22.0 ARG ZIG_VERSION=0.9.1 -RUN apt-get update && apt-get -y -q install clang-12 clang-tools git wget build-essential python3 libssl-dev lcov gcovr dos2unix - -RUN wget https://github.com/Kitware/CMake/releases/download/v${CMAKE_VERSION}/cmake-${CMAKE_VERSION}.tar.gz \ - && tar -zxf cmake-${CMAKE_VERSION}.tar.gz \ - && cd cmake-${CMAKE_VERSION} \ - && ./bootstrap \ - && make -j4 \ - && make install - +RUN apt-get update && apt-get -y -q install clang-12 clang-tools git wget build-essential python3 libssl-dev lcov gcovr dos2unix cmake ninja-build RUN ln -s /usr/bin/clang-12 /usr/bin/clang \ && ln -s /usr/bin/clang++-12 /usr/bin/clang++ - RUN apt-get -y -q install python3-pip && pip3 install tqdm RUN wget https://ziglang.org/download/0.9.1/zig-linux-x86_64-${ZIG_VERSION}.tar.xz \ @@ -36,5 +26,7 @@ ADD carpntr /app/carpntr ADD test_libs /app/test_libs ADD bin/test.txt /app/test.txt RUN mkdir "/app/bin" +RUN dos2unix /app/scripts/*.py +RUN dos2unix /app/scripts/*.sh ENTRYPOINT ["/app/scripts/code-coverage.sh"] diff --git a/compiler/scripts/code-coverage.sh b/compiler/scripts/code-coverage.sh index 6907e66c..17540531 100755 --- a/compiler/scripts/code-coverage.sh +++ b/compiler/scripts/code-coverage.sh @@ -5,28 +5,31 @@ mkdir build mkdir bin mkdir bin/fuzz cp /app/test.txt bin/test.txt +find . -type f -exec dos2unix {} \; set -e cd build || exit 1 # Build project -cmake -S .. -B . -DCMAKE_CXX_FLAGS="-fprofile-arcs -ftest-coverage -fPIC -O0 -fsanitize=address -static-libasan -g" +cmake -G Ninja -S .. -B . -DCMAKE_CXX_FLAGS="-fprofile-arcs -ftest-coverage -fPIC -O0 -fsanitize=address -static-libasan -g" cmake --build . -- -j 24 -# Fix test data by converting them all to unix format -cd ../test_data || exit 1 -dos2unix * -cd ../bin || exit 1 # Run test cases echo "██ ██ ███ ██ ██ ████████ ████████ ███████ ███████ ████████" echo "██ ██ ████ ██ ██ ██ ██ ██ ██ ██" echo "██ ██ ██ ██ ██ ██ ██ ██ █████ ███████ ██" echo "██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██" echo " ██████ ██ ████ ██ ██ ██ ███████ ███████ ██" -./YakshaTests +ninja test # Build carpntr cd ../carpntr +rm -rf build +mkdir build python3 bootstrap_me.py +# Disable exit on error so we can run the rest of the commands +set +e cd ../scripts # Run e2e tests python3 e2e.py +# Run yaksha ast +../bin/yaksha ast ../carpntr/main.yaka # Run libdocs.py mkdir ../comp_output_test python3 libdocs.py diff --git a/compiler/scripts/kabaraya.py b/compiler/scripts/kabaraya.py index 63121540..fbb2f540 100755 --- a/compiler/scripts/kabaraya.py +++ b/compiler/scripts/kabaraya.py @@ -5,36 +5,36 @@ # Note: libs - MIT license, runtime/3rd - various # ============================================================================================== # GPLv3: -# +# # Yaksha - Programming Language. # Copyright (C) 2020 - 2024 Bhathiya Perera -# +# # This program is free software: you can redistribute it and/or modify it under the terms # of the GNU General Public License as published by the Free Software Foundation, # either version 3 of the License, or (at your option) any later version. -# +# # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY # or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU General Public License for more details. -# +# # You should have received a copy of the GNU General Public License along with this program. # If not, see https://www.gnu.org/licenses/. -# +# # ============================================================================================== # Additional Terms: -# +# # Please note that any commercial use of the programming language's compiler source code # (everything except compiler/runtime, compiler/libs and compiler/3rd) require a written agreement # with author of the language (Bhathiya Perera). -# +# # If you are using it for an open source project, please give credits. # Your own project must use GPLv3 license with these additional terms. -# +# # You may use programs written in Yaksha/YakshaLisp for any legal purpose # (commercial, open-source, closed-source, etc) as long as it agrees # to the licenses of linked runtime libraries (see compiler/runtime/README.md). -# +# # ============================================================================================== # !/usr/bin/env python """ @@ -121,9 +121,10 @@ def fail(text): "assert", "del", "not", "elif", "if", "or", "await", "except", "raise", "finally", "lambda", "nonlocal", "global", "with", "async", - "yield", "in", "is"] + "yield", "in", "is", "macros!", "ccode", + "Const", "Array", "FixedArr"] -OPERATORS = "+ - * ** / // % @ << >> & | ^ ~ := < > <= >= == != ( ) " \ +OPERATORS = "! + - * ** / // % @ << >> & | ^ ~ := < > <= >= == != ( ) " \ "[ ] { } , : . ; @ = -> += -= *= /= //= %= @= &= |= ^= >>= <<= **=".split(" ") INPUT_DATA = [] @@ -291,7 +292,9 @@ def add_random_indent(input_text: List[str], _: List[List[str]]) -> List[str]: def execute(arg: str): - args = [os.path.abspath(BINARY), os.path.abspath(arg)] + return execute_item(arg, [os.path.abspath(BINARY), os.path.abspath(arg)]) + +def execute_item(arg: str, args: List[str]): fuzz_process = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8", universal_newlines=True) so, se = "", "" diff --git a/compiler/src/ast/codefiles.cpp b/compiler/src/ast/codefiles.cpp index e03e6b94..28105652 100644 --- a/compiler/src/ast/codefiles.cpp +++ b/compiler/src/ast/codefiles.cpp @@ -44,9 +44,9 @@ #include "utilities/error_printer.h" #include using namespace yaksha; -codefiles::codefiles(std::filesystem::path &libs_path) +codefiles::codefiles(std::filesystem::path &libs_path, errors::error_printer *ep) : current_path_(std::filesystem::current_path()), prefixes_(), - path_to_fi_(), libs_path_{libs_path}, pool_() { + path_to_fi_(), libs_path_{libs_path}, pool_(), ep_{ep} { esc_ = new entry_struct_func_compiler(&pool_); } codefiles::~codefiles() { @@ -217,14 +217,14 @@ file_data *codefiles::parse_or_null(const std::string &data, auto *t = new tokenizer{file_name, data, yaksha_macros_.get_yk_token_pool()}; t->tokenize(); if (!t->errors_.empty()) { - errors::print_errors(t->errors_); + ep_->print_errors(t->errors_); delete (t); return nullptr; } auto *b = new block_analyzer{t->tokens_, yaksha_macros_.get_yk_token_pool()}; b->analyze(); if (!b->errors_.empty()) { - errors::print_errors(b->errors_); + ep_->print_errors(b->errors_); delete (t); delete (b); return nullptr; @@ -232,7 +232,7 @@ file_data *codefiles::parse_or_null(const std::string &data, auto *p = new parser(file_name, b->tokens_, &pool_); p->step_1_parse_token_soup(); if (!b->errors_.empty()) { - errors::print_errors(b->errors_); + ep_->print_errors(b->errors_); delete (t); delete (b); delete (p); diff --git a/compiler/src/ast/codefiles.h b/compiler/src/ast/codefiles.h index 9933523e..0b17ade1 100644 --- a/compiler/src/ast/codefiles.h +++ b/compiler/src/ast/codefiles.h @@ -42,6 +42,7 @@ #include "ast/ast.h" #include "compiler/entry_struct_func_compiler.h" #include "tokenizer/tokenizer.h" +#include "utilities/error_printer.h" #include "utilities/ykdt_pool.h" #include "yaksha_lisp/yaksha_lisp.h" #include @@ -52,7 +53,7 @@ #include namespace yaksha { struct codefiles { - explicit codefiles(std::filesystem::path &libs_path); + codefiles(std::filesystem::path &libs_path, errors::error_printer *ep); ~codefiles(); file_info *get_or_null(const std::string &f); file_info *initialize_parsing_or_null(const std::string &filename); @@ -81,6 +82,7 @@ namespace yaksha { std::filesystem::path libs_path_{}; std::unordered_map import_to_path_cache_{}; + errors::error_printer *ep_{nullptr}; }; }// namespace yaksha #endif diff --git a/compiler/src/ast_json.cpp b/compiler/src/ast_json.cpp index 75bbf13a..077598e5 100644 --- a/compiler/src/ast_json.cpp +++ b/compiler/src/ast_json.cpp @@ -65,8 +65,8 @@ int main(int argc, char *argv[]) { return EXIT_FAILURE; } comp_result result; + multifile_compiler mc{}; try { - multifile_compiler mc{}; mc.main_required_ = check_main.is_set_; mc.check_types_ = check_types.is_set_; mc.usage_analysis_ = false; // disable usage analysis as JSON will dump all @@ -77,7 +77,7 @@ int main(int argc, char *argv[]) { result = mc.compile(code.value_, lib.value_, &cg); } if (result.failed_) { return EXIT_FAILURE; } - } catch (parsing_error &e) { errors::print_errors({e}); } + } catch (parsing_error &e) { mc.error_printer_.print_error(std::cerr, e); } std::cout << result.code_; return EXIT_SUCCESS; } diff --git a/compiler/src/comp_main.cpp b/compiler/src/comp_main.cpp index b3cfc5dc..a2d6727e 100644 --- a/compiler/src/comp_main.cpp +++ b/compiler/src/comp_main.cpp @@ -46,12 +46,14 @@ #include "utilities/argparser.h" using namespace yaksha; int main(int argc, char *argv[]) { - auto args = argparser::ARGS(PROGRAM_NAME, "Compile Yaksha code to C code", ""); + auto args = + argparser::ARGS(PROGRAM_NAME, "Compile Yaksha code to C code", ""); auto help = argparser::OP_BOOL('h', "--help", "Print this help message"); auto no_main = argparser::OP_BOOL('N', "--no-main", "Disable main() check"); args.optional_ = {&help, &no_main}; auto code = argparser::PO("mainfile.yaka", "Yaksha code file."); - auto lib = argparser::PO_OPT("[LIBS_PARENT_PATH]", "Path to the parent directory of the libraries"); + auto lib = argparser::PO_OPT("[LIBS_PARENT_PATH]", + "Path to the parent directory of the libraries"); args.positional_ = {&code, &lib}; argparser::parse_args(argc, argv, args); if (help.is_set_) { @@ -64,8 +66,8 @@ int main(int argc, char *argv[]) { return EXIT_FAILURE; } comp_result result; + multifile_compiler mc{}; try { - multifile_compiler mc{}; mc.main_required_ = !no_main.is_set_; codegen_c cg{}; if (!lib.is_set_) {// Just code.yaka is passed @@ -74,7 +76,7 @@ int main(int argc, char *argv[]) { result = mc.compile(code.value_, lib.value_, &cg); } if (result.failed_) { return EXIT_FAILURE; } - } catch (parsing_error &e) { errors::print_errors({e}); } + } catch (parsing_error &e) { mc.error_printer_.print_errors({e}); } std::cout << result.code_; return EXIT_SUCCESS; } diff --git a/compiler/src/compiler/codegen.h b/compiler/src/compiler/codegen.h index 2e5c7a2f..419198d9 100644 --- a/compiler/src/compiler/codegen.h +++ b/compiler/src/compiler/codegen.h @@ -5,40 +5,41 @@ // Note: libs - MIT license, runtime/3rd - various // ============================================================================================== // GPLv3: -// +// // Yaksha - Programming Language. // Copyright (C) 2020 - 2024 Bhathiya Perera -// +// // This program is free software: you can redistribute it and/or modify it under the terms // of the GNU General Public License as published by the Free Software Foundation, // either version 3 of the License, or (at your option) any later version. -// +// // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY // or FITNESS FOR A PARTICULAR PURPOSE. // See the GNU General Public License for more details. -// +// // You should have received a copy of the GNU General Public License along with this program. // If not, see https://www.gnu.org/licenses/. -// +// // ============================================================================================== // Additional Terms: -// +// // Please note that any commercial use of the programming language's compiler source code // (everything except compiler/runtime, compiler/libs and compiler/3rd) require a written agreement // with author of the language (Bhathiya Perera). -// +// // If you are using it for an open source project, please give credits. // Your own project must use GPLv3 license with these additional terms. -// +// // You may use programs written in Yaksha/YakshaLisp for any legal purpose // (commercial, open-source, closed-source, etc) as long as it agrees // to the licenses of linked runtime libraries (see compiler/runtime/README.md). -// +// // ============================================================================================== // codegen.h #ifndef CODEGEN_H #define CODEGEN_H +#include "utilities/error_printer.h" #include "utilities/gc_pool.h" namespace yaksha { struct comp_result; @@ -46,7 +47,7 @@ namespace yaksha { struct token; struct codegen { virtual ~codegen() = default; - virtual comp_result emit(codefiles *cf, gc_pool *token_pool) = 0; + virtual comp_result emit(codefiles *cf, gc_pool *token_pool, errors::error_printer* ep) = 0; private: }; diff --git a/compiler/src/compiler/codegen_c.cpp b/compiler/src/compiler/codegen_c.cpp index 9d39fb3a..eee5fac1 100644 --- a/compiler/src/compiler/codegen_c.cpp +++ b/compiler/src/compiler/codegen_c.cpp @@ -5,36 +5,36 @@ // Note: libs - MIT license, runtime/3rd - various // ============================================================================================== // GPLv3: -// +// // Yaksha - Programming Language. // Copyright (C) 2020 - 2024 Bhathiya Perera -// +// // This program is free software: you can redistribute it and/or modify it under the terms // of the GNU General Public License as published by the Free Software Foundation, // either version 3 of the License, or (at your option) any later version. -// +// // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY // or FITNESS FOR A PARTICULAR PURPOSE. // See the GNU General Public License for more details. -// +// // You should have received a copy of the GNU General Public License along with this program. // If not, see https://www.gnu.org/licenses/. -// +// // ============================================================================================== // Additional Terms: -// +// // Please note that any commercial use of the programming language's compiler source code // (everything except compiler/runtime, compiler/libs and compiler/3rd) require a written agreement // with author of the language (Bhathiya Perera). -// +// // If you are using it for an open source project, please give credits. // Your own project must use GPLv3 license with these additional terms. -// +// // You may use programs written in Yaksha/YakshaLisp for any legal purpose // (commercial, open-source, closed-source, etc) as long as it agrees // to the licenses of linked runtime libraries (see compiler/runtime/README.md). -// +// // ============================================================================================== // codegen_c.cpp #include "codegen_c.h" @@ -45,7 +45,7 @@ using namespace yaksha; codegen_c::codegen_c() = default; codegen_c::~codegen_c() = default; -comp_result codegen_c::emit(codefiles *cf, gc_pool *token_pool) { +comp_result codegen_c::emit(codefiles *cf, gc_pool *token_pool, errors::error_printer* ep) { // Compile all files. bool has_errors = false; std::stringstream struct_forward_decls{}; @@ -75,7 +75,7 @@ comp_result codegen_c::emit(codefiles *cf, gc_pool *token_pool) { } // We found errors during compile time if (has_errors) { - errors::print_errors(compiler_errors_); + ep->print_errors(compiler_errors_); LOG_COMP("found compile time errors"); return {true, ""}; } diff --git a/compiler/src/compiler/codegen_c.h b/compiler/src/compiler/codegen_c.h index 5f945ee0..88b3afe7 100644 --- a/compiler/src/compiler/codegen_c.h +++ b/compiler/src/compiler/codegen_c.h @@ -5,36 +5,36 @@ // Note: libs - MIT license, runtime/3rd - various // ============================================================================================== // GPLv3: -// +// // Yaksha - Programming Language. // Copyright (C) 2020 - 2024 Bhathiya Perera -// +// // This program is free software: you can redistribute it and/or modify it under the terms // of the GNU General Public License as published by the Free Software Foundation, // either version 3 of the License, or (at your option) any later version. -// +// // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY // or FITNESS FOR A PARTICULAR PURPOSE. // See the GNU General Public License for more details. -// +// // You should have received a copy of the GNU General Public License along with this program. // If not, see https://www.gnu.org/licenses/. -// +// // ============================================================================================== // Additional Terms: -// +// // Please note that any commercial use of the programming language's compiler source code // (everything except compiler/runtime, compiler/libs and compiler/3rd) require a written agreement // with author of the language (Bhathiya Perera). -// +// // If you are using it for an open source project, please give credits. // Your own project must use GPLv3 license with these additional terms. -// +// // You may use programs written in Yaksha/YakshaLisp for any legal purpose // (commercial, open-source, closed-source, etc) as long as it agrees // to the licenses of linked runtime libraries (see compiler/runtime/README.md). -// +// // ============================================================================================== // codegen_c.h #ifndef CODEGEN_C_H @@ -44,7 +44,7 @@ namespace yaksha { struct codegen_c : codegen { codegen_c(); ~codegen_c() override; - comp_result emit(codefiles *cf, gc_pool *token_pool) override; + comp_result emit(codefiles *cf, gc_pool *token_pool, errors::error_printer* ep) override; private: }; diff --git a/compiler/src/compiler/codegen_json.cpp b/compiler/src/compiler/codegen_json.cpp index 1589a9c9..231cd226 100644 --- a/compiler/src/compiler/codegen_json.cpp +++ b/compiler/src/compiler/codegen_json.cpp @@ -44,7 +44,7 @@ using namespace yaksha; codegen_json::codegen_json() = default; codegen_json::~codegen_json() = default; -comp_result codegen_json::emit(codefiles *cf, gc_pool *token_pool) { +comp_result codegen_json::emit(codefiles *cf, gc_pool *token_pool, errors::error_printer* ep) { // return {false, to_json.compile(cf->main_file_info_)}; std::stringstream multi_file_json{}; multi_file_json << "{\n"; diff --git a/compiler/src/compiler/codegen_json.h b/compiler/src/compiler/codegen_json.h index 73046ecb..7b17e2c5 100644 --- a/compiler/src/compiler/codegen_json.h +++ b/compiler/src/compiler/codegen_json.h @@ -45,7 +45,7 @@ namespace yaksha { struct codegen_json : codegen { codegen_json(); ~codegen_json() override; - comp_result emit(codefiles *cf, gc_pool *token_pool) override; + comp_result emit(codefiles *cf, gc_pool *token_pool, errors::error_printer* ep) override; private: }; diff --git a/compiler/src/compiler/multifile_compiler.cpp b/compiler/src/compiler/multifile_compiler.cpp index 1bcdeb70..7de097d1 100644 --- a/compiler/src/compiler/multifile_compiler.cpp +++ b/compiler/src/compiler/multifile_compiler.cpp @@ -68,7 +68,7 @@ comp_result multifile_compiler::compile(const std::string &code, bool use_code, codegen *code_generator) { LOG_COMP("compile:" << main_file); std::filesystem::path library_parent{libs_path}; - cf_ = new codefiles{library_parent}; + cf_ = new codefiles{library_parent, &error_printer_}; // Step 0) First of all, we initialize parsing // In this step, we initialize all files we know of at this point file_info *main_file_info; @@ -179,7 +179,7 @@ comp_result multifile_compiler::compile_all(codegen *code_generator) { LOG_COMP("dsv extract:" << f->filepath_.string()); if (!f->data_->dsv_->errors_.empty()) { LOG_COMP("dsv errors"); - errors::print_errors(f->data_->dsv_->errors_); + error_printer_.print_errors(f->data_->dsv_->errors_); has_errors = true; } delete builtins_obj; @@ -210,7 +210,7 @@ comp_result multifile_compiler::compile_all(codegen *code_generator) { } f->data_->type_checker_->check(f->data_->parser_->stmts_); if (!f->data_->type_checker_->errors_.empty()) { - errors::print_errors(f->data_->type_checker_->errors_); + error_printer_.print_errors(f->data_->type_checker_->errors_); LOG_COMP("type checker found errors: " << f->filepath_.string()); has_errors = true; } @@ -226,12 +226,12 @@ comp_result multifile_compiler::compile_all(codegen *code_generator) { usage_analyser ua{main_file_info}; ua.analyse(); if (!ua.errors_.empty()) { - errors::print_errors(ua.errors_); + error_printer_.print_errors(ua.errors_); LOG_COMP("usage analyser found errors"); return {true, ""}; } } - return code_generator->emit(cf_, &token_pool_); + return code_generator->emit(cf_, &token_pool_, &error_printer_); } void multifile_compiler::step_5_parse() { LOG_COMP("parsing: parsing to yaksha AST"); @@ -246,7 +246,7 @@ void multifile_compiler::step_5_parse() { f->step_ = scanning_step::PARSE_DONE; } else { std::cerr << "Failed to parse:" << f->filepath_ << "\n"; - errors::print_errors(f->data_->parser_->errors_); + error_printer_.print_errors(f->data_->parser_->errors_); f->step_ = scanning_step::FAILURE; } } @@ -266,7 +266,7 @@ void multifile_compiler::step_4_expand_macros() { } else { std::cerr << "Failed to expand dsl!{} in lisp-macro env:" << f->filepath_ << "\n"; - errors::print_errors(f->data_->parser_->errors_); + error_printer_.print_errors(f->data_->parser_->errors_); f->step_ = scanning_step::FAILURE; } } @@ -285,7 +285,7 @@ void multifile_compiler::step_3_macros_setup() { } else { std::cerr << "Failed to execute macros!{} in lisp-macro env:" << f->filepath_ << "\n"; - errors::print_errors(f->data_->parser_->errors_); + error_printer_.print_errors(f->data_->parser_->errors_); f->step_ = scanning_step::FAILURE; } } @@ -306,7 +306,7 @@ void multifile_compiler::step_2_initialize_preprocessor_env() { } catch (const parsing_error &ex) { /* redefining imports, etc */ std::cerr << "Failed to initialize lisp-macro env:" << f->filepath_ << "\n"; - errors::print_error(std::cerr, ex); + error_printer_.print_error(std::cerr, ex); f->step_ = scanning_step::FAILURE; } } @@ -325,7 +325,7 @@ void multifile_compiler::step_1_scan_macro_soup() { } else { std::cerr << "Failed to parse non-preprocessed tokens:" << f->filepath_ << "\n"; - errors::print_errors(f->data_->parser_->errors_); + error_printer_.print_errors(f->data_->parser_->errors_); f->step_ = scanning_step::FAILURE; } } @@ -354,6 +354,6 @@ bool multifile_compiler::has_invalid_main_func( multifile_compiler::~multifile_compiler() { delete cf_; } codefiles &multifile_compiler::get_codefiles() const { return *cf_; } comp_result do_nothing_codegen::emit(codefiles *cf, - gc_pool *token_pool) { + gc_pool *token_pool, errors::error_printer* ep) { return comp_result{false, ""}; } diff --git a/compiler/src/compiler/multifile_compiler.h b/compiler/src/compiler/multifile_compiler.h index 08644e90..0d418c08 100644 --- a/compiler/src/compiler/multifile_compiler.h +++ b/compiler/src/compiler/multifile_compiler.h @@ -43,12 +43,13 @@ #include "codegen.h" #include "compiler/comp_result.h" #include "tokenizer/token.h" +#include "utilities/error_printer.h" #include "utilities/gc_pool.h" #include namespace yaksha { struct do_nothing_codegen : codegen { ~do_nothing_codegen() override = default; - comp_result emit(codefiles *cf, gc_pool *token_pool) override; + comp_result emit(codefiles *cf, gc_pool *token_pool, errors::error_printer* ep) override; }; struct multifile_compiler { multifile_compiler() = default; @@ -63,6 +64,7 @@ namespace yaksha { bool main_required_ = true; bool check_types_ = true; bool usage_analysis_ = true; + errors::error_printer error_printer_{}; private: gc_pool token_pool_{}; diff --git a/compiler/src/dump.cpp b/compiler/src/dump.cpp index ffa7f7d7..16b61206 100644 --- a/compiler/src/dump.cpp +++ b/compiler/src/dump.cpp @@ -348,7 +348,7 @@ int main(int argc, char *argv[]) { tokenizer token_extractor{file_name, data, &token_pool}; token_extractor.tokenize(); if (!token_extractor.errors_.empty()) { - errors::print_errors(token_extractor.errors_); + mc.error_printer_.print_errors(token_extractor.errors_); return EXIT_FAILURE; } auto result = mc.compile(file_name, &cg); @@ -365,7 +365,7 @@ int main(int argc, char *argv[]) { display(*(main_files->data_->dsv_), *(main_files->data_->parser_), token_extractor, macro_env); } catch (parsing_error &p) { - std::cerr << "Parsing failed " << p.message_ << "\n"; + mc.error_printer_.print_error(std::cerr, p); return EXIT_FAILURE; } return EXIT_SUCCESS; diff --git a/compiler/src/html_gen.cpp b/compiler/src/html_gen.cpp index eca15051..5d505a38 100644 --- a/compiler/src/html_gen.cpp +++ b/compiler/src/html_gen.cpp @@ -157,9 +157,10 @@ int main(int argc, char *argv[]) { std::string data((std::istreambuf_iterator(script_file)), std::istreambuf_iterator()); tokenizer token_extractor{file_name, data, &token_pool}; + errors::error_printer ep{}; token_extractor.tokenize(); if (!token_extractor.errors_.empty()) { - errors::print_errors(token_extractor.errors_); + ep.print_errors(token_extractor.errors_); return EXIT_FAILURE; } // Now to print stuff diff --git a/compiler/src/lisp_main.cpp b/compiler/src/lisp_main.cpp index 75e51501..a3b2fab5 100644 --- a/compiler/src/lisp_main.cpp +++ b/compiler/src/lisp_main.cpp @@ -57,6 +57,7 @@ int main(int argc, char *argv[]) { return lisp_execute_file(argv[1]); } int lisp_execute_file(char *file_path) { + errors::error_printer ep{}; std::string file_name{file_path}; std::ifstream script_file(file_name); if (!script_file.good()) { @@ -69,13 +70,13 @@ int lisp_execute_file(char *file_path) { yaksha_lisp_tokenizer *tokenizer = mm.create_tokenizer(); tokenizer->tokenize(file_name, data, mm.get_yk_token_pool()); if (!tokenizer->errors_.empty()) { - errors::print_errors(tokenizer->errors_); + ep.print_errors(tokenizer->errors_); return EXIT_FAILURE; } yaksha_lisp_parser *parser = mm.create_parser(tokenizer); parser->parse(); if (!parser->errors_.empty()) { - errors::print_errors(parser->errors_); + ep.print_errors(parser->errors_); return EXIT_FAILURE; } std::string f_path{file_path}; @@ -85,29 +86,29 @@ int lisp_execute_file(char *file_path) { try { environment->eval(parser->exprs_); } catch (parsing_error &ex) { - errors::print_error(std::cerr, ex); + ep.print_error(std::cerr, ex); return EXIT_FAILURE; } return EXIT_SUCCESS; } void eval_line(const std::string &code, yaksha_envmap *environment, - yaksha_macros *mm) { + yaksha_macros *mm, errors::error_printer &ep) { yaksha_lisp_tokenizer *tokenizer = mm->create_tokenizer(); tokenizer->tokenize("repl.lisp", code, mm->get_yk_token_pool()); if (!tokenizer->errors_.empty()) { - errors::print_errors(tokenizer->errors_); + ep.print_errors(tokenizer->errors_); return; } yaksha_lisp_parser *parser = mm->create_parser(tokenizer); parser->parse(); if (!parser->errors_.empty()) { - errors::print_errors(parser->errors_); + ep.print_errors(parser->errors_); return; } try { auto result = environment->eval(parser->exprs_); if (result != nullptr) { std::cout << colours::cyan("-> ") << result; } - } catch (parsing_error &ex) { errors::print_error(std::cerr, ex); } + } catch (parsing_error &ex) { ep.print_error(std::cerr, ex); } } int lisp_repl() { std::cout << colours::red(" \n" @@ -140,6 +141,7 @@ int lisp_repl() { std::cout << "\n"; #endif bool exit = false; + errors::error_printer ep{}; while (!exit) { std::string line{}; std::cout << colours::green(">> "); @@ -149,7 +151,7 @@ int lisp_repl() { exit = true; continue; } - eval_line(line, environment, &mm); + eval_line(line, environment, &mm, ep); std::cout << "\n"; } return EXIT_SUCCESS; diff --git a/compiler/src/utilities/error_printer.cpp b/compiler/src/utilities/error_printer.cpp index 361a950e..d4a4b573 100644 --- a/compiler/src/utilities/error_printer.cpp +++ b/compiler/src/utilities/error_printer.cpp @@ -38,8 +38,46 @@ // ============================================================================================== // Note this file only has things to compile during TESTING #include "utilities/error_printer.h" -#ifdef YAKSHA_TESTING namespace yaksha::errors { - std::vector error_capture{}; -} + void error_printer::print_token(std::ostream &output, const token &tok) { + output << tok.file_ << colours::green(":") << tok.line_ + 1 + << colours::green(":") << tok.pos_; + if (tok.type_ == token_type::END_OF_FILE) { + output << " at EOF"; + } else if (tok.type_ != token_type::TK_UNKNOWN_TOKEN_DETECTED) { + output << " at " + << colours::cyan(string_utils::repr_string(tok.original_)); + } else { + output << " "; + } + } + void error_printer::print_error(std::ostream &output, + const parsing_error &err) { +#ifdef YAKSHA_TESTING + error_capture.push_back(err.message_); +#endif + if (!err.token_set_) { + output << colours::red(err.message_); + return; + } + auto tok = err.tok_; + print_token(output, tok); + output << " --> " << colours::red(err.message_); + } +#ifdef YAKSHA_TESTING + bool error_printer::has_error(const std::string &error_message) { + for (auto &e : error_capture) { + if (e == error_message) { return true; } + } + return false; + } + bool error_printer::has_any_error() { return !error_capture.empty(); } + bool error_printer::has_no_errors() { return error_capture.empty(); } #endif + void error_printer::print_errors(const std::vector &errors) { + for (auto &err : errors) { + print_error(std::cerr, err); + std::cerr << "\n"; + } + } +}// namespace yaksha::errors \ No newline at end of file diff --git a/compiler/src/utilities/error_printer.h b/compiler/src/utilities/error_printer.h index c1f4a23f..ae35fb38 100644 --- a/compiler/src/utilities/error_printer.h +++ b/compiler/src/utilities/error_printer.h @@ -47,63 +47,37 @@ #include #include namespace yaksha::errors { + struct error_printer { + error_printer() = default; + /** + * Print a problematic token + * @param output where to write the output to + * @param tok token object + */ + void print_token(std::ostream &output, const token &tok); + /** + * Print a single error + * @param output where to write the error message + * @param err error object + */ + void print_error(std::ostream &output, const parsing_error &err); #ifdef YAKSHA_TESTING - // Note: this is defined in error_printer.cpp - // This is used for type checker testing - extern std::vector error_capture; + bool has_error(const std::string &error_message); + bool has_any_error(); + bool has_no_errors(); #endif - /** - * Print a problematic token - * @param output where to write the output to - * @param tok token object - */ - static inline void print_token(std::ostream &output, const token &tok) { - output << tok.file_ << colours::green(":") << tok.line_ + 1 - << colours::green(":") << tok.pos_; - if (tok.type_ == token_type::END_OF_FILE) { - output << " at EOF"; - } else if (tok.type_ != token_type::TK_UNKNOWN_TOKEN_DETECTED) { - output << " at " - << colours::cyan(string_utils::repr_string(tok.original_)); - } else { - output << " "; - } - } - /** - * Print a single error - * @param output where to write the error message - * @param err error object - */ - static inline void print_error(std::ostream &output, - const parsing_error &err) { + /** + * Print a vector of errors to std::cerr + * @param errors errors to print + */ + void print_errors(const std::vector &errors); + +private: #ifdef YAKSHA_TESTING - error_capture.push_back(err.message_); + // Note: this is defined in error_printer.cpp + // This is used for type checker testing + std::vector error_capture{}; #endif - if (!err.token_set_) { - output << colours::red(err.message_); - return; - } - auto tok = err.tok_; - print_token(output, tok); - output << " --> " << colours::red(err.message_); - } -#ifdef YAKSHA_TESTING - static bool has_error(const std::string &error_message) { - for (auto &e : error_capture) { - if (e == error_message) { return true; } - } - return false; - } -#endif - /** - * Print a vector of errors to std::cerr - * @param errors errors to print - */ - static inline void print_errors(const std::vector &errors) { - for (auto &err : errors) { - print_error(std::cerr, err); - std::cerr << "\n"; - } - } + }; }// namespace yaksha::errors #endif diff --git a/compiler/src/viz_main.cpp b/compiler/src/viz_main.cpp index 758301ac..0948a17c 100644 --- a/compiler/src/viz_main.cpp +++ b/compiler/src/viz_main.cpp @@ -63,9 +63,10 @@ int main(int argc, char *argv[]) { std::istreambuf_iterator()); gc_pool token_pool{}; tokenizer t{file_name, data, &token_pool}; + errors::error_printer ep{}; t.tokenize(); if (!t.errors_.empty()) { - errors::print_errors(t.errors_); + ep.print_errors(t.errors_); return EXIT_FAILURE; } block_analyzer b{t.tokens_, &token_pool}; @@ -75,7 +76,7 @@ int main(int argc, char *argv[]) { parser p{file_name, b.tokens_, &dt_pool}; auto tree = p.parse(); if (tree.empty() || !p.errors_.empty()) { - errors::print_errors(p.errors_); + ep.print_errors(p.errors_); write_token_dump(std::cerr, b.tokens_); return EXIT_FAILURE; } diff --git a/compiler/tests/fuzz_main.cpp b/compiler/tests/fuzz_main.cpp index bcf01e17..3556e782 100644 --- a/compiler/tests/fuzz_main.cpp +++ b/compiler/tests/fuzz_main.cpp @@ -39,6 +39,7 @@ #include "ast/ast_printer.h" #include "ast/ast_vis.h" #include "ast/parser.h" +#include "compiler/codegen_c.h" #include "compiler/multifile_compiler.h" #include "compiler/type_checker.h" #include "file_formats/tokens_file.h" @@ -47,12 +48,13 @@ #include "utilities/error_printer.h" using namespace yaksha; void test_ast(const std::string &data, const std::string &file_name) { + errors::error_printer ep{}; gc_pool token_pool{}; tokenizer t{file_name, data, &token_pool}; ykdt_pool dt_pool{}; t.tokenize(); if (!t.errors_.empty()) { - errors::print_errors(t.errors_); + ep.print_errors(t.errors_); return; } block_analyzer b{t.tokens_, &token_pool}; @@ -67,7 +69,7 @@ void test_ast(const std::string &data, const std::string &file_name) { ast_vis vr{}; vr.print(tree); } else { - errors::print_errors(p.errors_); + ep.print_errors(p.errors_); return; } } catch (parsing_error &p) { @@ -78,7 +80,8 @@ void test_ast(const std::string &data, const std::string &file_name) { } void test_compiler(const std::string &filepath) { multifile_compiler mc{}; - auto result = mc.compile(filepath, "/app/libs"); + codegen_c cg{}; + auto result = mc.compile(filepath, "/app/libs", &cg); std::cout << "Success : " << (result.failed_ ? "No\n" : "Yes\n"); std::cout << result.code_ << "\n"; } @@ -89,7 +92,8 @@ extern "C" int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size) { std::string lib_path = "/app/libs"; test_ast(code, fname); multifile_compiler mc{}; - mc.compile(code, true, fname, lib_path); + codegen_c cg{}; + mc.compile(code, true, fname, lib_path, &cg); return 0; } #else diff --git a/compiler/tests/test_parser.cpp b/compiler/tests/test_parser.cpp index 0773744f..f02e30fb 100644 --- a/compiler/tests/test_parser.cpp +++ b/compiler/tests/test_parser.cpp @@ -112,8 +112,8 @@ static void TEST_SNIPPET_FULL(const std::string &S, const std::string &E) { const std::string &xa = S; auto result = mc.compile(xa, true, "dummy.yaka", ".", &cg); REQUIRE(result.failed_ == true); - REQUIRE(!yaksha::errors::error_capture.empty()); - REQUIRE(yaksha::errors::has_error(E)); + REQUIRE(mc.error_printer_.has_any_error()); + REQUIRE(mc.error_printer_.has_error(E)); } static void TEST_SNIPPET_FULL_OK(const std::string &S) { multifile_compiler mc{}; @@ -121,7 +121,7 @@ static void TEST_SNIPPET_FULL_OK(const std::string &S) { const std::string &xa = S; auto result = mc.compile(xa, true, "dummy.yaka", ".", &cg); REQUIRE(result.failed_ == false); - REQUIRE(yaksha::errors::error_capture.empty()); + REQUIRE(mc.error_printer_.has_no_errors()); } TEST_CASE("parser: Hello World") { test_parser_yaka_file("../test_data/compiler_tests/test1.yaka"); diff --git a/compiler/tests/test_type_checker.cpp b/compiler/tests/test_type_checker.cpp index 82dfe5d5..b68fead6 100644 --- a/compiler/tests/test_type_checker.cpp +++ b/compiler/tests/test_type_checker.cpp @@ -44,25 +44,22 @@ using namespace yaksha; static void test_typechecker_yaka_file(const std::string &A, const std::string &E) { - yaksha::errors::error_capture.clear(); multifile_compiler mc{}; codegen_c cg{}; auto result = mc.compile(A, &cg); REQUIRE(result.failed_ == true); - REQUIRE(!yaksha::errors::error_capture.empty()); - REQUIRE(yaksha::errors::has_error(E)); + REQUIRE(mc.error_printer_.has_any_error()); + REQUIRE(mc.error_printer_.has_error(E)); } static void test_typechecker_ok_yaka_file(const std::string &A) { - yaksha::errors::error_capture.clear(); multifile_compiler mc{}; codegen_c cg{}; auto result = mc.compile(A, &cg); REQUIRE(result.failed_ == false); - REQUIRE(yaksha::errors::error_capture.empty()); + REQUIRE(mc.error_printer_.has_no_errors()); } static void test_typechecker_snippet(const std::string &S, const std::string &E) { - yaksha::errors::error_capture.clear(); multifile_compiler mc{}; codegen_c cg{}; std::string xa = "def main() -> int:\n"; @@ -72,11 +69,10 @@ static void test_typechecker_snippet(const std::string &S, " return 0"; auto result = mc.compile(xa, true, "dummy.yaka", "../libs", &cg); REQUIRE(result.failed_ == true); - REQUIRE(!yaksha::errors::error_capture.empty()); - REQUIRE(yaksha::errors::has_error(E)); + REQUIRE(mc.error_printer_.has_any_error()); + REQUIRE(mc.error_printer_.has_error(E)); } static void test_typechecker_snippet_ok(const std::string &S) { - yaksha::errors::error_capture.clear(); multifile_compiler mc{}; codegen_c cg{}; std::string xa = "def main() -> int:\n"; @@ -86,27 +82,25 @@ static void test_typechecker_snippet_ok(const std::string &S) { " return 0"; auto result = mc.compile(xa, true, "dummy.yaka", "../libs", &cg); REQUIRE(result.failed_ == false); - REQUIRE(yaksha::errors::error_capture.empty()); + REQUIRE(mc.error_printer_.has_no_errors()); } static void test_typechecker_snippet_full(const std::string &S, const std::string &E) { - yaksha::errors::error_capture.clear(); multifile_compiler mc{}; codegen_c cg{}; const std::string &xa = S; auto result = mc.compile(xa, true, "dummy.yaka", "../libs", &cg); REQUIRE(result.failed_ == true); - REQUIRE(!yaksha::errors::error_capture.empty()); - REQUIRE(yaksha::errors::has_error(E)); + REQUIRE(mc.error_printer_.has_any_error()); + REQUIRE(mc.error_printer_.has_error(E)); } static void test_typechecker_snippet_full_ok(const std::string &S) { - yaksha::errors::error_capture.clear(); multifile_compiler mc{}; codegen_c cg{}; const std::string &xa = S; auto result = mc.compile(xa, true, "dummy.yaka", "../libs", &cg); REQUIRE(result.failed_ == false); - REQUIRE(yaksha::errors::error_capture.empty()); + REQUIRE(mc.error_printer_.has_no_errors()); } TEST_CASE("type checker: Bad function for qsort") { test_typechecker_yaka_file(