From 5b5ca6da4d48c880d96cf266e8e65a7063bd0541 Mon Sep 17 00:00:00 2001 From: Noah Groleau <98351050+NoahGJAC@users.noreply.github.com> Date: Fri, 5 Jul 2024 15:53:06 -0400 Subject: [PATCH] Add columns to error messages --- src/parser.hpp | 5 +++-- src/tokenization.hpp | 39 ++++++++++++++++++++++----------------- 2 files changed, 25 insertions(+), 19 deletions(-) diff --git a/src/parser.hpp b/src/parser.hpp index c6c9280..ee0a76b 100644 --- a/src/parser.hpp +++ b/src/parser.hpp @@ -112,7 +112,8 @@ class Parser { void error_expected(const std::string& msg) const { - std::cerr << "[Parse Error] Expected " << msg << " on line " << peek(-1).value().line << std::endl; + const Token tok = peek(-1).value(); + std::cerr << "[Parse Error] Expected " << msg << " on line " << tok.line << " at column " << tok.col << std::endl; exit(EXIT_FAILURE); } @@ -161,7 +162,7 @@ class Parser { else { break; } - const auto [type, line, value] = consume(); + const auto [type, line, col, value] = consume(); const int next_min_prec = prec.value() + 1; auto expr_rhs = parse_expr(next_min_prec); if (!expr_rhs.has_value()) { diff --git a/src/tokenization.hpp b/src/tokenization.hpp index 9fa7d6f..741175c 100644 --- a/src/tokenization.hpp +++ b/src/tokenization.hpp @@ -81,6 +81,7 @@ inline std::optional bin_prec(const TokenType type) struct Token { TokenType type; int line; + int col; std::optional value {}; }; @@ -103,27 +104,27 @@ class Tokenizer { buf.push_back(consume()); } if (buf == "exit") { - tokens.push_back({ TokenType::exit, line_count }); + tokens.push_back({ TokenType::exit, line_count, column_count }); buf.clear(); } else if (buf == "let") { - tokens.push_back({ TokenType::let, line_count }); + tokens.push_back({ TokenType::let, line_count, column_count }); buf.clear(); } else if (buf == "if") { - tokens.push_back({ TokenType::if_, line_count }); + tokens.push_back({ TokenType::if_, line_count, column_count }); buf.clear(); } else if (buf == "elif") { - tokens.push_back({ TokenType::elif, line_count }); + tokens.push_back({ TokenType::elif, line_count, column_count }); buf.clear(); } else if (buf == "else") { - tokens.push_back({ TokenType::else_, line_count }); + tokens.push_back({ TokenType::else_, line_count, column_count }); buf.clear(); } else { - tokens.push_back({ TokenType::ident, line_count, buf }); + tokens.push_back({ TokenType::ident, line_count, column_count, buf }); buf.clear(); } } @@ -132,7 +133,7 @@ class Tokenizer { while (peek().has_value() && std::isdigit(peek().value())) { buf.push_back(consume()); } - tokens.push_back({ TokenType::int_lit, line_count, buf }); + tokens.push_back({ TokenType::int_lit, line_count, column_count, buf }); buf.clear(); } else if (peek().value() == '/' && peek(1).has_value() && peek(1).value() == '/') { @@ -160,47 +161,48 @@ class Tokenizer { } else if (peek().value() == '(') { consume(); - tokens.push_back({ TokenType::open_paren, line_count }); + tokens.push_back({ TokenType::open_paren, line_count, column_count }); } else if (peek().value() == ')') { consume(); - tokens.push_back({ TokenType::close_paren, line_count }); + tokens.push_back({ TokenType::close_paren, line_count, column_count }); } else if (peek().value() == ';') { consume(); - tokens.push_back({ TokenType::semi, line_count }); + tokens.push_back({ TokenType::semi, line_count, column_count }); } else if (peek().value() == '=') { consume(); - tokens.push_back({ TokenType::eq, line_count }); + tokens.push_back({ TokenType::eq, line_count, column_count }); } else if (peek().value() == '+') { consume(); - tokens.push_back({ TokenType::plus, line_count }); + tokens.push_back({ TokenType::plus, line_count, column_count }); } else if (peek().value() == '*') { consume(); - tokens.push_back({ TokenType::star, line_count }); + tokens.push_back({ TokenType::star, line_count, column_count }); } else if (peek().value() == '-') { consume(); - tokens.push_back({ TokenType::minus, line_count }); + tokens.push_back({ TokenType::minus, line_count, column_count }); } else if (peek().value() == '/') { consume(); - tokens.push_back({ TokenType::fslash, line_count }); + tokens.push_back({ TokenType::fslash, line_count, column_count }); } else if (peek().value() == '{') { consume(); - tokens.push_back({ TokenType::open_curly, line_count }); + tokens.push_back({ TokenType::open_curly, line_count, column_count }); } else if (peek().value() == '}') { consume(); - tokens.push_back({ TokenType::close_curly, line_count }); + tokens.push_back({ TokenType::close_curly, line_count, column_count }); } else if (peek().value() == '\n') { consume(); line_count++; + column_count = 1; } else if (std::isspace(peek().value())) { consume(); @@ -223,8 +225,11 @@ class Tokenizer { return m_src.at(m_index + offset); } + int column_count = 1; + char consume() { + column_count++; return m_src.at(m_index++); }