diff --git a/source/main.cc b/source/main.cc index eb92195..5a95b25 100644 --- a/source/main.cc +++ b/source/main.cc @@ -45,6 +45,13 @@ int main() { // print the preprocessed tokens print_tokens(tokens); + for (auto &tok_ref : tokens) { + if (tok_ref->token_kind() == tokens::KEYWORD_FUNCTION) { + tok_ref.peek(); + std::cout << tok_ref.peek()->get().value() << "\n"; + } + } + // Print the time taken in nanoseconds and milliseconds print("time taken: ", diff.count() * 1e+9, " ns"); print(" ", diff.count() * 1000, " ms"); diff --git a/source/parser/ast/include/context.hh b/source/parser/ast/include/context.hh index fc260f5..d69571c 100644 --- a/source/parser/ast/include/context.hh +++ b/source/parser/ast/include/context.hh @@ -14,6 +14,9 @@ #ifndef __CONTEXT_HH__ #define __CONTEXT_HH__ +#include +#include "parser/ast/include/nodes.hh" + namespace ast { class ASTContext { diff --git a/source/parser/ast/include/nodes.hh b/source/parser/ast/include/nodes.hh index a098d56..f1d4fc0 100644 --- a/source/parser/ast/include/nodes.hh +++ b/source/parser/ast/include/nodes.hh @@ -72,7 +72,7 @@ struct Variable : AstNode { // a: int<8> | a: i32 = 5 | a: i32? = nul AstNodePtr value; // 5 | null bool is_nullable; Token reference; // & | && - Token pointer; // * | * + Token pointer; // * explicit Variable(AstNodeRef name, AstNodeRef type, AstNodePtr value, bool is_nullable = false, Token reference = Token(), Token pointer = Token()); diff --git a/source/parser/ast/include/parser.hh b/source/parser/ast/include/parser.hh index 12a2e52..05c6ef7 100644 --- a/source/parser/ast/include/parser.hh +++ b/source/parser/ast/include/parser.hh @@ -123,7 +123,7 @@ class Parser { std::unique_ptr parse() { token::tokens current_token_type{}; - for (auto it = tokens.begin(); it != tokens.end();) { + for (auto it = tokens.cbegin(); it != tokens.cend();) { const token::Token ¤t_token = *it; current_token_type = current_token.token_kind(); diff --git a/source/parser/preprocessor/include/preprocessor.hh b/source/parser/preprocessor/include/preprocessor.hh index 893b7f0..336100c 100644 --- a/source/parser/preprocessor/include/preprocessor.hh +++ b/source/parser/preprocessor/include/preprocessor.hh @@ -128,8 +128,8 @@ struct import_helix { std::string get_module() { std::string mod; - for (auto const &tok : module) { - mod += tok.value(); + for (auto &tok : module) { + mod += tok->value(); } return mod; @@ -138,8 +138,8 @@ struct import_helix { std::string get_namespace() { std::string name_space; - for (auto const &tok : relative) { - name_space += tok.value(); + for (auto &tok : relative) { + name_space += tok->value(); } return name_space; diff --git a/source/parser/preprocessor/source/preprocessor.cc b/source/parser/preprocessor/source/preprocessor.cc index 7f25bc2..a563fd3 100644 --- a/source/parser/preprocessor/source/preprocessor.cc +++ b/source/parser/preprocessor/source/preprocessor.cc @@ -125,7 +125,7 @@ bool is_circular_import(const std::shared_ptr &node) { std::shared_ptr current = node; while (current != nullptr) { - if (visited.find(current->module_name) != visited.end()) { + if (visited.find(current->module_name) != visited.cend()) { return true; // Circular import detected } visited.insert(current->module_name); @@ -222,16 +222,16 @@ Preprocessor::parse_import(std::unique_ptr &import_tre parsed_source = Preprocessor(parsed_source).parse(current_node); parsed_source.pop_back(); - parsed_source.insert(parsed_source.begin(), + parsed_source.insert(parsed_source.cbegin(), Token(tokens::PUNCTUATION_OPEN_BRACE, string_import_path)); - for (auto const &tok : std::ranges::reverse_view(namespace_name)) { - parsed_source.insert(parsed_source.begin(), tok); + for (auto it = namespace_name.rbegin(); it != namespace_name.rend(); ++it) { + parsed_source.insert(parsed_source.cbegin(), *it); } - parsed_source.insert(parsed_source.begin(), + parsed_source.insert(parsed_source.cbegin(), Token(tokens::KEYWORD_NAMESPACE, string_import_path)); - parsed_source.insert(parsed_source.end(), + parsed_source.insert(parsed_source.cend(), Token(tokens::PUNCTUATION_CLOSE_BRACE, string_import_path)); complete_import = { diff --git a/source/token/include/token.hh b/source/token/include/token.hh index d2f17ca..48650f0 100644 --- a/source/token/include/token.hh +++ b/source/token/include/token.hh @@ -14,14 +14,17 @@ #ifndef __TOKEN_HH__ #define __TOKEN_HH__ -#include #include +#include +#include #include #include #include -#include #include +#include "include/inttypes.hh" +#include "token/include/generate.hh" + namespace token { /** @@ -39,145 +42,34 @@ struct Token { mutable std::shared_mutex mtx; ///< Mutex for thread safety public: - /** - * @brief Constructs a Token with the specified attributes. - * - * @param line Line number where the token is located. - * @param column Column number where the token starts. - * @param length Length of the token. - * @param offset Offset from the beginning of the file. - * @param value String value of the token. - * @param token_kind Optional kind of the token as a string view. - */ Token(u64 line, u64 column, u64 length, u64 offset, std::string_view value, const std::string &filename, std::string_view token_kind = ""); - - /** - * @brief Copy constructor. - * - * @param other Token to copy from. - */ Token(const Token &other); - - /** - * @brief Copy assignment operator. - * - * @param other Token to copy from. - * @return Reference to the current object. - */ Token &operator=(const Token &other); - - /** - * @brief Move constructor. - * - * @param other Token to move from. - */ Token(Token &&other) noexcept; - - /** - * @brief Move assignment operator. - * - * @param other Token to move from. - * @return Reference to the current object. - */ Token &operator=(Token &&other) noexcept; - - /** - * @brief Default constructor that initializes the token to whitespace. - */ Token(); explicit Token(tokens token_type, const std::string &filename, std::string value = ""); - - /** - * @brief Destructor. - */ ~Token(); /* ====-------------------------- getters ---------------------------==== */ - - /** - * @brief Gets the line number where the token is located. - * - * @return Line number. - */ u64 line_number() const; - - /** - * @brief Gets the column number where the token starts. - * - * @return Column number. - */ u64 column_number() const; - - /** - * @brief Gets the length of the token. - * - * @return Length of the token. - */ u64 length() const; - - /** - * @brief Gets the offset from the beginning of the file. - * - * @return Offset from the beginning of the file. - */ u64 offset() const; - - /** - * @brief Gets the kind of the token. - * - * @return Kind of the token. - */ tokens token_kind() const; - - /** - * @brief Gets the string value of the token. - * - * @return String value of the token. - */ std::string value() const; - - /** - * @brief Gets the kind of the token as a string view. - * - * @return Kind of the token as a string view. - */ std::string_view token_kind_repr() const; - - /** - * @brief Gets the filename of the token. - * - * @return Filename. - */ std::string_view file_name() const; - // as string and ostream - - std::string to_string() const { - return std::string("Token(") + std::string("line: ") + std::to_string(line) + - std::string(", column: ") + std::to_string(column) + std::string(", len: ") + - std::to_string(len) + std::string(", offset: ") + std::to_string(_offset) + - std::string(", kind: ") + std::string(token_kind_repr()) + std::string(", val: ") + - std::string(val) + ")"; - } - - std::ostream &operator<<(std::ostream &os) const { return os << to_string(); } + std::string to_string() const; + bool operator==(const Token &rhs) const; + std::ostream &operator<<(std::ostream &os) const; /* ====-------------------------- setters ---------------------------==== */ - /** - * @brief Sets the filename of where the token belongs. - * - * @param value String file name. - */ void set_file_name(const std::string &file_name); - - bool operator==(const Token &rhs) const { - return (line == rhs.line && column == rhs.column && len == rhs.len && - _offset == rhs._offset && kind == rhs.kind && val == rhs.val && - filename == rhs.filename); - } }; /** @@ -189,129 +81,70 @@ class TokenList : public std::vector { TokenList(std::string filename, std::vector::const_iterator start, std::vector::const_iterator end); + class TokenListIter { + private: + std::reference_wrapper tokens; + u64 cursor_position; + u64 end; + + public: + explicit TokenListIter(TokenList &token_list, u64 pos = 0) + : tokens(token_list) + , cursor_position(pos) + , end(token_list.size() - 1) {} + + bool operator!=(const TokenListIter &other) const; + bool operator==(const TokenListIter &other) const; + std::unique_ptr operator->(); // TODO: change if a shared ptr is needed + TokenListIter &operator*(); + std::reference_wrapper operator--(); + std::reference_wrapper operator++(); + std::reference_wrapper advance(int n = 1); + std::reference_wrapper reverse(int n = 1); + std::optional> peek(int n = 1); + std::optional> peek_back(int n = 1); + std::reference_wrapper current(); + }; + public: using std::vector::vector; // Inherit constructors using const_iterator = std::vector::const_iterator; mutable const_iterator it; - + std::optional yes(); TokenList() = default; - - /** - * @brief Constructs a TokenList with the specified filename. - * - * @param filename Name of the file. - */ explicit TokenList(std::string filename); - - /** - * @brief Gets the next token in the list. - * - * @return Next token. - */ Token next(u32 n = 1) const; - - /** - * @brief Peeks at the current token in the list without advancing the iterator. - * - * @return Current token. - */ [[nodiscard]] Token peek(u32 n = 1) const; - - /** - * @brief Gets the current token in the list. - * - * @return Current token. - */ [[nodiscard]] Token current() const; + TokenListIter begin() { return TokenListIter(*this); } + TokenListIter end() { return TokenListIter(*this, this->size() - 1); } - /** - * @brief Gets a constant iterator to the beginning of the token list. - * - * @return Constant iterator to the beginning. - */ - [[nodiscard]] std::vector::const_iterator begin() const { + [[nodiscard]] std::vector::const_iterator cbegin() const { return std::vector::begin(); } + [[nodiscard]] std::vector::const_iterator cend() const { + return std::vector::end(); + } - /** - * @brief Gets a constant iterator to the end of the token list. - * - * @return Constant iterator to the end. - */ + [[nodiscard]] std::vector::const_iterator begin() const { + return std::vector::begin(); + } [[nodiscard]] std::vector::const_iterator end() const { return std::vector::end(); } - /** - * @brief Gets the previous token in the list. - * - * @return Previous token. - */ [[nodiscard]] Token previous(u32 n = 1) const; - - /** - * @brief Removes tokens from the beginning of the list up to the current iterator position. - */ void remove_left(); - - /** - * @brief Resets the iterator to the beginning of the token list. - */ void reset(); - - /** - * @brief Appends a token to the end of the token list. - * - * @param token Token to append. - */ void append(const Token &token); - - /** - * @brief Slices the token list from the start index to the end index. - * - * @param start Start index. - * @param end End index. - * @return Sliced token list. - */ TokenList slice(u64 start, u64 end); [[nodiscard]] bool reached_end() const; - - /** - * @brief Gets the filename of the token list. - * - * @return Filename. - */ [[nodiscard]] std::string file_name() const; - - /** - * @brief Replaces tokens in the list from start to end with the provided tokens. - * - * This function removes tokens from the specified start index up to, but not including, - * the end index, and then inserts the tokens from the provided TokenList at the start index. - * - * @param tokens TokenList to insert. - * @param start Start index of the range to remove. - * @param end End index of the range to remove. - */ void insert_remove(TokenList &tokens, u64 start, u64 end); - bool operator==(const TokenList &rhs) const { - // First, compare sizes - if (size() != rhs.size()) { - return false; - } - - // Then, compare each element - for (size_t i = 0; i < size(); ++i) { - if (at(i) != rhs.at(i)) { // Assuming Token has operator== - return false; - } - } - - return true; - } + bool operator==(const TokenList &rhs) const; }; /** diff --git a/source/token/source/token.cc b/source/token/source/token.cc index 1a3040a..bad617b 100644 --- a/source/token/source/token.cc +++ b/source/token/source/token.cc @@ -12,11 +12,10 @@ * https://helix-lang.com/ for more information. */ -#include -#include +#include "token/include/token.hh" + #include #include -#include #include "token/include/generate.hh" @@ -54,7 +53,7 @@ Token::Token() Token::Token(tokens token_type, const std::string &filename, std::string value) : kind(token_type) , filename(filename) { - + if (value.empty()) { value = std::string(tokens_map.at(token_type).value()); } @@ -64,15 +63,14 @@ Token::Token(tokens token_type, const std::string &filename, std::string value) } // Copy Constructor -Token::Token(const Token &other) { - line = other.line; - column = other.column; - len = other.len; - _offset = other._offset; - kind = other.kind; - val = other.val; - filename = other.filename; -} +Token::Token(const Token &other) + : line(other.line) + , column(other.column) + , len(other.len) + , _offset(other._offset) + , kind(other.kind) + , val(other.val) + , filename(other.filename) {} // Copy Assignment Operator Token &Token::operator=(const Token &other) { @@ -102,7 +100,7 @@ Token::Token(Token &&other) noexcept , _offset(other._offset) , kind(other.kind) , val(std::move(other.val)) - , filename(other.filename) {} + , filename(std::move(other.filename)) {} // Move Assignment Operator Token &Token::operator=(Token &&other) noexcept { @@ -140,140 +138,18 @@ std::string_view Token::file_name() const { return filename; } void Token::set_file_name(const std::string &file_name) { this->filename = std::string(file_name); } -TokenList::TokenList(std::string filename) - : filename(std::move(filename)) - , it(this->begin()) {} - -TokenList::TokenList(std::string filename, std::vector::const_iterator start, - std::vector::const_iterator end) - : std::vector(start, end) - , filename(std::move(filename)) {} - -Token TokenList::next(u32 n) const { - if (it == this->end()) { - return {}; - } - - if (it + n >= this->end()) { - return *(this->end() - 1); - } - - return *(it + n++); -} - -Token TokenList::peek(u32 n) const { - if (it == this->end()) { - return {}; - } - - if (it + n >= this->end()) { - return *(this->end() - 1); - } - - return *(it + n); -} - -Token TokenList::current() const { - if (it == this->begin()) { - return {}; - } - return *(it - 1); -} - -Token TokenList::previous(u32 n) const { - if (it == this->begin()) { - return {}; - } - - if (it - n < this->begin()) { - return *(this->begin()); - } - - return *(it - n); -} - -void TokenList::remove_left() { - this->erase(this->begin(), it); - it = this->begin(); -} - -void TokenList::reset() { it = this->begin(); } - -void TokenList::append(const Token &token) { this->push_back(token); } - -std::string TokenList::file_name() const { return filename; } - -TokenList TokenList::slice(u64 start, u64 end) { - if (end > this->size()) { - end = this->size(); - } - - auto start_index = static_cast::difference_type>(start); - auto end_index = static_cast::difference_type>(end); - - return {this->filename, this->begin() + start_index, this->begin() + end_index}; +std::string Token::to_string() const { + return std::string("Token(") + std::string("line: ") + std::to_string(line) + + std::string(", column: ") + std::to_string(column) + std::string(", len: ") + + std::to_string(len) + std::string(", offset: ") + std::to_string(_offset) + + std::string(", kind: ") + std::string(token_kind_repr()) + std::string(", val: ") + + std::string(val) + ")"; } -/** - * @brief Replaces tokens in the list from start to end with the provided tokens. - * - * This function removes tokens from the specified start index up to, but not including, - * the end index, and then inserts the tokens from the provided TokenList at the start index. - * - * @param tokens TokenList to insert. - * @param start Start index of the range to remove. - * @param end End index of the range to remove. - */ -void TokenList::insert_remove(TokenList &tokens, u64 start, u64 end) { - if (start > end || end > this->size()) { - throw std::out_of_range("Invalid start or end index"); - } - - auto start_it = this->begin() + static_cast::difference_type>(start); - auto end_it = this->begin() + static_cast::difference_type>(end); - - this->erase(start_it, end_it); - this->insert(start_it, tokens.begin(), tokens.end()); -} - -bool TokenList::reached_end() const { return it == this->end(); } - -void print_tokens(token::TokenList &tokens) { - u16 indent = 0; - - for (const auto &tok : tokens) { - if (tok.value() == "{") { - indent++; - } else if (tok.value() == "}") { - indent--; - } - if (tok.token_kind() == token::tokens::PUNCTUATION_SEMICOLON || - tok.token_kind() == token::tokens::PUNCTUATION_OPEN_BRACE || - tok.token_kind() == token::tokens::PUNCTUATION_CLOSE_BRACE || - tok.token_kind() == token::tokens::PUNCTUATION_SINGLE_LINE_COMMENT) { - if (tok.token_kind() != token::tokens::PUNCTUATION_CLOSE_BRACE) { - std::cout << "(" << colors::fg16::red - << token::tokens_map.at(tok.token_kind()).value() << colors::reset << ", " - << colors::fg16::green << tok.value() << colors::reset << ") "; - } - - std::cout << "\n"; - std::cout << std::string(static_cast(indent * 4), ' '); - - if (tok.token_kind() == token::tokens::PUNCTUATION_CLOSE_BRACE) { - std::cout << "(" << colors::fg16::red - << token::tokens_map.at(tok.token_kind()).value() << colors::reset << ", " - << colors::fg16::green << tok.value() << colors::reset << ") "; - } - - continue; - } - std::cout << "(" << colors::fg16::red << token::tokens_map.at(tok.token_kind()).value() - << colors::reset << ", " << colors::fg16::green << tok.value() << colors::reset - << ") "; - } - - std::cout << "\n"; +bool Token::operator==(const Token &rhs) const { + return (line == rhs.line && column == rhs.column && len == rhs.len && _offset == rhs._offset && + kind == rhs.kind && val == rhs.val && filename == rhs.filename); } +std::ostream &Token::operator<<(std::ostream &os) const { return os << to_string(); } } // namespace token diff --git a/source/token/source/token_list.cc b/source/token/source/token_list.cc new file mode 100644 index 0000000..e309d94 --- /dev/null +++ b/source/token/source/token_list.cc @@ -0,0 +1,251 @@ +/** + * @author Dhruvan Kartik + * @copyright Copyright (c) 2024 (CC BY 4.0) + * + * @note This code is part of the Helix Language Project and is licensed under the Attribution 4.0 + * International license (CC BY 4.0). You are allowed to use, modify, redistribute, and create + * derivative works, even for commercial purposes, provided that you give appropriate credit, + * provide a link to the license, and indicate if changes were made. For more information, please + * visit: https://creativecommons.org/licenses/by/4.0/ SPDX-License-Identifier: CC-BY-4.0 + * + * @note This code is provided by the creators of Helix. Visit our website at: + * https://helix-lang.com/ for more information. + */ + +#include +#include + +#include "include/colors_ansi.hh" +#include "token/include/generate.hh" +#include "token/include/token.hh" + +namespace token { +TokenList::TokenList(std::string filename) + : filename(std::move(filename)) + , it(this->cbegin()) {} + +TokenList::TokenList(std::string filename, std::vector::const_iterator start, + std::vector::const_iterator end) + : std::vector(start, end) + , filename(std::move(filename)) {} + +Token TokenList::next(u32 n) const { + if (it == this->cend()) { + return {}; + } + + if (it + n >= this->cend()) { + return *(this->cend() - 1); + } + + return *(it + n++); +} + +Token TokenList::peek(u32 n) const { + if (it == this->cend()) { + return {}; + } + + if (it + n >= this->cend()) { + return *(this->cend() - 1); + } + + return *(it + n); +} + +Token TokenList::current() const { + if (it == this->cbegin()) { + return {}; + } + return *(it - 1); +} + +Token TokenList::previous(u32 n) const { + if (it == this->cbegin()) { + return {}; + } + + if (it - n < this->cbegin()) { + return *(this->cbegin()); + } + + return *(it - n); +} + +void TokenList::remove_left() { + this->erase(this->cbegin(), it); + it = this->cbegin(); +} + +void TokenList::reset() { it = this->cbegin(); } + +void TokenList::append(const Token &token) { this->push_back(token); } + +std::string TokenList::file_name() const { return filename; } + +TokenList TokenList::slice(u64 start, u64 end) { + if (end > this->size()) { + end = this->size(); + } + + auto start_index = static_cast::difference_type>(start); + auto end_index = static_cast::difference_type>(end); + + return {this->filename, this->cbegin() + start_index, this->cbegin() + end_index}; +} + +/** + * @brief Replaces tokens in the list from start to end with the provided tokens. + * + * This function removes tokens from the specified start index up to, but not including, + * the end index, and then inserts the tokens from the provided TokenList at the start index. + * + * @param tokens TokenList to insert. + * @param start Start index of the range to remove. + * @param end End index of the range to remove. + */ +void TokenList::insert_remove(TokenList &tokens, u64 start, u64 end) { + if (start > end || end > this->size()) { + throw std::out_of_range("Invalid start or end index"); + } + + auto start_it = this->cbegin() + static_cast::difference_type>(start); + auto end_it = this->cbegin() + static_cast::difference_type>(end); + + this->erase(start_it, end_it); + this->insert(start_it, tokens.cbegin(), tokens.cend()); +} + +bool TokenList::reached_end() const { return it == this->cend(); } + +void print_tokens(token::TokenList &tokens) { + u16 indent = 0; + + for (auto &tok : tokens) { + if (tok->value() == "{") { + indent++; + } else if (tok->value() == "}") { + indent--; + } + if (tok->token_kind() == token::tokens::PUNCTUATION_SEMICOLON || + tok->token_kind() == token::tokens::PUNCTUATION_OPEN_BRACE || + tok->token_kind() == token::tokens::PUNCTUATION_CLOSE_BRACE || + tok->token_kind() == token::tokens::PUNCTUATION_SINGLE_LINE_COMMENT) { + if (tok->token_kind() != token::tokens::PUNCTUATION_CLOSE_BRACE) { + std::cout << "(" << colors::fg16::red + << token::tokens_map.at(tok->token_kind()).value() << colors::reset + << ", " << colors::fg16::green << tok->value() << colors::reset << ") "; + } + + std::cout << "\n"; + std::cout << std::string(static_cast(indent * 4), ' '); + + if (tok->token_kind() == token::tokens::PUNCTUATION_CLOSE_BRACE) { + std::cout << "(" << colors::fg16::red + << token::tokens_map.at(tok->token_kind()).value() << colors::reset + << ", " << colors::fg16::green << tok->value() << colors::reset << ") "; + } + + continue; + } + std::cout << "(" << colors::fg16::red << token::tokens_map.at(tok->token_kind()).value() + << colors::reset << ", " << colors::fg16::green << tok->value() << colors::reset + << ") "; + } + + std::cout << "\n"; +} + +bool TokenList::operator==(const TokenList &rhs) const { + // First, compare sizes + if (size() != rhs.size()) { + return false; + } + + // Then, compare each element + for (size_t i = 0; i < size(); ++i) { + if (at(i) != rhs.at(i)) { // Assuming Token has operator== + return false; + } + } + + return true; +} + +bool TokenList::TokenListIter::operator!=(const TokenListIter &other) const { + return cursor_position != other.cursor_position; +} + +bool TokenList::TokenListIter::operator==(const TokenListIter &other) const { + return cursor_position == other.cursor_position; +} + +std::unique_ptr TokenList::TokenListIter::operator->() { + return std::make_unique(tokens.get()[cursor_position]); +} // TODO: change if a shared ptr is needed + +TokenList::TokenListIter &TokenList::TokenListIter::operator*() { return *this; } + +std::reference_wrapper TokenList::TokenListIter::operator--() { + if ((cursor_position - 1) >= 0) { + --cursor_position; + return *this; + } + + throw std::out_of_range("access to token in token list is out of bounds"); +} + +std::reference_wrapper TokenList::TokenListIter::operator++() { + if ((cursor_position + 1) <= end) { + ++cursor_position; + return *this; + } + + throw std::out_of_range("access to token in token list is out of bounds"); +} + +std::reference_wrapper TokenList::TokenListIter::advance(int n) { + if ((cursor_position + n) <= end) { + ++cursor_position; + } + + if (n > 1) { + return advance(n - 1); + } + + return tokens.get()[cursor_position]; +} + +std::reference_wrapper TokenList::TokenListIter::reverse(int n) { + if ((cursor_position - n) >= 0) { + --cursor_position; + } + + if (n > 1) { + return advance(n - 1); + } + + return tokens.get()[cursor_position]; +} + +std::optional> TokenList::TokenListIter::peek(int n) { + if ((cursor_position + n) <= end) { + return tokens.get()[cursor_position + n]; + } + + return std::nullopt; +} + +std::optional> TokenList::TokenListIter::peek_back(int n) { + if ((cursor_position - n) >= 0) { + return tokens.get()[cursor_position - n]; + } + + return std::nullopt; +} + +std::reference_wrapper TokenList::TokenListIter::current() { + return tokens.get()[cursor_position]; +} + +} // namespace token \ No newline at end of file diff --git a/tests/main.hlx b/tests/main.hlx index b65c283..909f767 100644 --- a/tests/main.hlx +++ b/tests/main.hlx @@ -42,6 +42,10 @@ import test_imports::test; it allows for users to write clea and readable comments. ") ] -fn main() { +fn main() -> int? { // becomes std::optional + print("hey there", "hello"); +} + +fn main2() -> int { print("hey there", "hello"); } \ No newline at end of file diff --git a/xmake.lua b/xmake.lua index e060310..761e26e 100644 --- a/xmake.lua +++ b/xmake.lua @@ -9,7 +9,7 @@ target("helix") add_headerfiles("source/**.hh") -- add all headers in the source directory add_includedirs("source") - set_languages("c++latest") -- set the standard C++ version to C++23 + set_languages("c++2b") -- set the standard C++ version to C++23 if is_mode("debug") then set_symbols("debug") -- Generate debug symbols