|
7 | 7 | #include "../type/clash_types.h" |
8 | 8 | #include "../type/types.h" |
9 | 9 |
|
10 | | -char* global_library_path = "."; |
| 10 | +CStringVector global_library_paths = { 0 }; |
11 | 11 |
|
12 | 12 | // TODO: (organizational) move some of these functions out of this file |
13 | 13 |
|
14 | 14 | Node* keyword_import(const Token token, Parser* parser) { |
15 | | - String import_path = strf(0, global_library_path); |
| 15 | + String sub_path = { 0 }; |
16 | 16 | Trace full_trace = token.trace; |
17 | 17 |
|
18 | 18 | do { |
19 | 19 | const Trace section = expect(parser->tokenizer, TokenIdentifier).trace; |
20 | | - strf(&import_path, "/%.*s", PRINT(section.source)); |
| 20 | + strf(&sub_path, "/%.*s", PRINT(section.source)); |
21 | 21 | full_trace = stretch(full_trace, section); |
22 | 22 | } while(try(parser->tokenizer, TokenDoubleColon, NULL)); |
23 | 23 | expect(parser->tokenizer, ';'); |
24 | 24 |
|
25 | | - strf(&import_path, ".qk"); |
26 | | - push(&import_path, '\0'); |
| 25 | + strf(&sub_path, ".qk"); |
27 | 26 |
|
28 | | - char* input_content = fs_readfile(import_path.data); |
29 | | - if(!input_content) { |
30 | | - push(parser->tokenizer->messages, |
31 | | - REPORT_ERR(full_trace, strf(0, "unable to open or read '%.*s'", PRINT(import_path)))); |
32 | | - return new_node((Node) { NodeNone }); |
33 | | - } |
| 27 | + String import_path = { 0 }; |
| 28 | + for(size_t i = 0; i < global_library_paths.size; i++) { |
| 29 | + strf(&import_path, "%s%.*s%c", global_library_paths.data[i], PRINT(sub_path), 0); |
| 30 | + char* input_content = fs_readfile(import_path.data); |
| 31 | + |
| 32 | + if(!input_content) { |
| 33 | + import_path.size = 0; |
| 34 | + continue; |
| 35 | + } |
34 | 36 |
|
35 | | - Tokenizer import_tokenizer = new_tokenizer(import_path.data, input_content, parser->tokenizer->messages); |
36 | | - Tokenizer* const tokenizer = parser->tokenizer; |
37 | | - parser->tokenizer = &import_tokenizer; |
| 37 | + Tokenizer import_tokenizer = new_tokenizer(import_path.data, input_content, parser->tokenizer->messages); |
| 38 | + Tokenizer* const tokenizer = parser->tokenizer; |
| 39 | + parser->tokenizer = &import_tokenizer; |
38 | 40 |
|
39 | | - Scope* scope = new_scope(NULL); |
40 | | - scope->children = collect_until(parser, &statement, 0, 0); |
41 | | - parser->tokenizer = tokenizer; |
42 | | - return (void*) scope; |
| 41 | + Scope* scope = new_scope(NULL); |
| 42 | + scope->children = collect_until(parser, &statement, 0, 0); |
| 43 | + parser->tokenizer = tokenizer; |
| 44 | + return (void*) scope; |
| 45 | + } |
| 46 | + |
| 47 | + push(parser->tokenizer->messages, |
| 48 | + REPORT_ERR(full_trace, strf(0, "unable to open or read '%.*s'", PRINT(import_path)))); |
| 49 | + return new_node((Node) { NodeNone }); |
43 | 50 | } |
44 | 51 |
|
45 | 52 | Node* keyword_return(const Token token, Parser* parser) { |
|
0 commit comments