added stats printing with '--verbose' flag
authorBrendan Hansen <brendan.f.hansen@gmail.com>
Sun, 13 Dec 2020 01:28:12 +0000 (19:28 -0600)
committerBrendan Hansen <brendan.f.hansen@gmail.com>
Sun, 13 Dec 2020 01:28:12 +0000 (19:28 -0600)
docs/todo
include/onyxlex.h
onyx
src/onyx.c
src/onyxlex.c

index 2d1a728ed0e91bcee07b4f9fc61218362b81cdf3..b30dff4cfdc58b2d4fd9838a7e7e07a169d19062 100644 (file)
--- a/docs/todo
+++ b/docs/todo
@@ -10,8 +10,8 @@ Command Line Interface:
         - Running the binary right away using Wasmtime, Wasmer or NodeJS
 
     [X] Remove old code from CLI logic
+    [X] Add statistic printing
     [ ] Fix documentation generation (broken since compiler architecture change) 
-    [ ] Add statistic printing
     [ ] Fix AST printing (broken for a long time)
     [ ] Add automated running component
         - Store to temporary file (OS independent)
index c4cbebf237e45641524eca88948150085ceb130e..3e4cf559fb1183ae911ae3653614024af6580f81 100644 (file)
@@ -3,6 +3,10 @@
 
 #include "bh.h"
 
+// NOTE: Used for global statistics
+extern u64 lexer_lines_processed;
+extern u64 lexer_tokens_processed;
+
 typedef enum TokenType {
     Token_Type_Ascii_End            = 256,
     Token_Type_Unknown              = 256,
diff --git a/onyx b/onyx
index ade4212141e9f33641791876032393253fe8c7a2..de21dca2cce922ab2830d5a0d010175921835528 100755 (executable)
Binary files a/onyx and b/onyx differ
index a0496c9798b9a5a82d2718242d65a13bf3071431..e65d9f5d260909daef59e84febc5ea45e7ee890a 100644 (file)
@@ -217,16 +217,9 @@ static char* lookup_included_file(CompilerState* cs, char* filename) {
 }
 
 static ParseResults parse_source_file(CompilerState* compiler_state, bh_file_contents* file_contents) {
-    // NOTE: Maybe don't want to recreate the tokenizer and parser for every file
-    if (compiler_state->options->verbose_output)
-        bh_printf("[Lexing]       %s\n", file_contents->filename);
-
     OnyxTokenizer tokenizer = onyx_tokenizer_create(compiler_state->token_alloc, file_contents);
     onyx_lex_tokens(&tokenizer);
 
-    if (compiler_state->options->verbose_output)
-        bh_printf("[Parsing]      %s\n", file_contents->filename);
-
     OnyxParser parser = onyx_parser_create(compiler_state->ast_alloc, &tokenizer, &compiler_state->prog_info);
     return onyx_parse(&parser);
 }
@@ -381,9 +374,6 @@ static CompilerProgress process_source_file(CompilerState* compiler_state, char*
         return ONYX_COMPILER_PROGRESS_FAILED_READ;
     }
 
-    if (compiler_state->options->verbose_output)
-        bh_printf("[Reading]      %s\n", file.filename);
-
     bh_file_contents fc = bh_file_read_contents(compiler_state->token_alloc, &file);
     bh_file_close(&file);
 
@@ -401,6 +391,9 @@ static CompilerProgress process_source_file(CompilerState* compiler_state, char*
     bh_table_put(bh_file_contents, compiler_state->loaded_files, (char *) filename, fc);
     fc = bh_table_get(bh_file_contents, compiler_state->loaded_files, (char *) filename);
 
+    if (compiler_state->options->verbose_output)
+        bh_printf("Processing source file:    %s\n", file.filename);
+
     ParseResults results = parse_source_file(compiler_state, &fc);
     merge_parse_results(compiler_state, &results);
 
@@ -451,6 +444,8 @@ static b32 process_entity(CompilerState* compiler_state, Entity* ent) {
 
 
 static i32 onyx_compile(CompilerState* compiler_state) {
+    u64 start_time = bh_time_curr();
+
     {
         entity_heap_insert(&compiler_state->prog_info.entities, ((Entity) {
             .state = Entity_State_Resolve_Symbols,
@@ -494,10 +489,25 @@ static i32 onyx_compile(CompilerState* compiler_state) {
     }
 
     if (compiler_state->options->verbose_output)
-        bh_printf("[Writing WASM] %s\n", output_file.filename);
+        bh_printf("Outputting to WASM file:   %s\n", output_file.filename);
 
     onyx_wasm_module_write_to_file(&global_wasm_module, output_file);
 
+    u64 duration = bh_time_duration(start_time);
+    
+    if (compiler_state->options->verbose_output) {
+        bh_printf("\nStatistics:\n");
+        bh_printf("    Time taken: %l.%l seconds\n",
+                duration / 1000, duration % 1000);
+        bh_printf("    Processed %l lines (%f lines/second).\n",
+                lexer_lines_processed,
+                ((f32) 1000 * lexer_lines_processed) / (duration));
+        bh_printf("    Processed %l tokens (%f tokens/second).\n",
+                lexer_tokens_processed,
+                ((f32) 1000 * lexer_tokens_processed) / (duration));
+        bh_printf("\n");
+    }
+
     return ONYX_COMPILER_PROGRESS_SUCCESS;
 }
 
@@ -549,8 +559,6 @@ int main(int argc, char *argv[]) {
             break;
 
         case ONYX_COMPILER_PROGRESS_SUCCESS:
-            if (compile_opts.verbose_output)
-                bh_printf("Successfully compiled to '%s'\n", compile_opts.target_file);
             break;
     }
 
index 386fb508a2c513493196d69de60efe8745cd6e3e..9a678ad9210890bf00c873b6d06295d72df3c249 100644 (file)
@@ -2,6 +2,9 @@
 #include "onyxlex.h"
 #include "onyxutils.h"
 
+u64 lexer_lines_processed = 0;
+u64 lexer_tokens_processed = 0;
+
 static const char* token_type_names[] = {
     "TOKEN_TYPE_UNKNOWN",
     "TOKEN_TYPE_END_STREAM",
@@ -430,4 +433,7 @@ void onyx_lex_tokens(OnyxTokenizer* tokenizer) {
     do {
         tk = onyx_get_token(tokenizer);
     } while (tk->type != Token_Type_End_Stream);
+
+    lexer_lines_processed += tokenizer->line_number - 1;
+    lexer_tokens_processed += bh_arr_length(tokenizer->tokens);
 }