From: Brendan Hansen Date: Sun, 13 Dec 2020 01:28:12 +0000 (-0600) Subject: added stats printing with '--verbose' flag X-Git-Url: https://git.brendanfh.com/?a=commitdiff_plain;h=bf8ca2d1aa76ec28d81c9150d98e70a4837fdaa3;p=onyx.git added stats printing with '--verbose' flag --- diff --git a/docs/todo b/docs/todo index 2d1a728e..b30dff4c 100644 --- a/docs/todo +++ b/docs/todo @@ -10,8 +10,8 @@ Command Line Interface: - Running the binary right away using Wasmtime, Wasmer or NodeJS [X] Remove old code from CLI logic + [X] Add statistic printing [ ] Fix documentation generation (broken since compiler architecture change) - [ ] Add statistic printing [ ] Fix AST printing (broken for a long time) [ ] Add automated running component - Store to temporary file (OS independent) diff --git a/include/onyxlex.h b/include/onyxlex.h index c4cbebf2..3e4cf559 100644 --- a/include/onyxlex.h +++ b/include/onyxlex.h @@ -3,6 +3,10 @@ #include "bh.h" +// NOTE: Used for global statistics +extern u64 lexer_lines_processed; +extern u64 lexer_tokens_processed; + typedef enum TokenType { Token_Type_Ascii_End = 256, Token_Type_Unknown = 256, diff --git a/onyx b/onyx index ade42121..de21dca2 100755 Binary files a/onyx and b/onyx differ diff --git a/src/onyx.c b/src/onyx.c index a0496c97..e65d9f5d 100644 --- a/src/onyx.c +++ b/src/onyx.c @@ -217,16 +217,9 @@ static char* lookup_included_file(CompilerState* cs, char* filename) { } static ParseResults parse_source_file(CompilerState* compiler_state, bh_file_contents* file_contents) { - // NOTE: Maybe don't want to recreate the tokenizer and parser for every file - if (compiler_state->options->verbose_output) - bh_printf("[Lexing] %s\n", file_contents->filename); - OnyxTokenizer tokenizer = onyx_tokenizer_create(compiler_state->token_alloc, file_contents); onyx_lex_tokens(&tokenizer); - if (compiler_state->options->verbose_output) - bh_printf("[Parsing] %s\n", file_contents->filename); - OnyxParser parser = onyx_parser_create(compiler_state->ast_alloc, &tokenizer, &compiler_state->prog_info); return onyx_parse(&parser); } @@ -381,9 +374,6 @@ static CompilerProgress process_source_file(CompilerState* compiler_state, char* return ONYX_COMPILER_PROGRESS_FAILED_READ; } - if (compiler_state->options->verbose_output) - bh_printf("[Reading] %s\n", file.filename); - bh_file_contents fc = bh_file_read_contents(compiler_state->token_alloc, &file); bh_file_close(&file); @@ -401,6 +391,9 @@ static CompilerProgress process_source_file(CompilerState* compiler_state, char* bh_table_put(bh_file_contents, compiler_state->loaded_files, (char *) filename, fc); fc = bh_table_get(bh_file_contents, compiler_state->loaded_files, (char *) filename); + if (compiler_state->options->verbose_output) + bh_printf("Processing source file: %s\n", file.filename); + ParseResults results = parse_source_file(compiler_state, &fc); merge_parse_results(compiler_state, &results); @@ -451,6 +444,8 @@ static b32 process_entity(CompilerState* compiler_state, Entity* ent) { static i32 onyx_compile(CompilerState* compiler_state) { + u64 start_time = bh_time_curr(); + { entity_heap_insert(&compiler_state->prog_info.entities, ((Entity) { .state = Entity_State_Resolve_Symbols, @@ -494,10 +489,25 @@ static i32 onyx_compile(CompilerState* compiler_state) { } if (compiler_state->options->verbose_output) - bh_printf("[Writing WASM] %s\n", output_file.filename); + bh_printf("Outputting to WASM file: %s\n", output_file.filename); onyx_wasm_module_write_to_file(&global_wasm_module, output_file); + u64 duration = bh_time_duration(start_time); + + if (compiler_state->options->verbose_output) { + bh_printf("\nStatistics:\n"); + bh_printf(" Time taken: %l.%l seconds\n", + duration / 1000, duration % 1000); + bh_printf(" Processed %l lines (%f lines/second).\n", + lexer_lines_processed, + ((f32) 1000 * lexer_lines_processed) / (duration)); + bh_printf(" Processed %l tokens (%f tokens/second).\n", + lexer_tokens_processed, + ((f32) 1000 * lexer_tokens_processed) / (duration)); + bh_printf("\n"); + } + return ONYX_COMPILER_PROGRESS_SUCCESS; } @@ -549,8 +559,6 @@ int main(int argc, char *argv[]) { break; case ONYX_COMPILER_PROGRESS_SUCCESS: - if (compile_opts.verbose_output) - bh_printf("Successfully compiled to '%s'\n", compile_opts.target_file); break; } diff --git a/src/onyxlex.c b/src/onyxlex.c index 386fb508..9a678ad9 100644 --- a/src/onyxlex.c +++ b/src/onyxlex.c @@ -2,6 +2,9 @@ #include "onyxlex.h" #include "onyxutils.h" +u64 lexer_lines_processed = 0; +u64 lexer_tokens_processed = 0; + static const char* token_type_names[] = { "TOKEN_TYPE_UNKNOWN", "TOKEN_TYPE_END_STREAM", @@ -430,4 +433,7 @@ void onyx_lex_tokens(OnyxTokenizer* tokenizer) { do { tk = onyx_get_token(tokenizer); } while (tk->type != Token_Type_End_Stream); + + lexer_lines_processed += tokenizer->line_number - 1; + lexer_tokens_processed += bh_arr_length(tokenizer->tokens); }