Skip to content

Commit

Permalink
Update build_rev.txt and Scope.py, and refactor test.py and globals.py
Browse files Browse the repository at this point in the history
  • Loading branch information
Ze7111 committed Feb 19, 2024
1 parent 45feb6d commit 6c8642b
Show file tree
Hide file tree
Showing 17 changed files with 241 additions and 75 deletions.
Binary file modified __pycache__/globals.cpython-312.pyc
Binary file not shown.
2 changes: 1 addition & 1 deletion build_rev.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
0.0.2-alpha.p
1059
1212
2 changes: 2 additions & 0 deletions classes/Scope.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from dataclasses import dataclass
from threading import Thread
from time import time
from core.cache_store import cache
from core.panic import panic
from classes.Token import Token_List, Token
import globals
Expand Down Expand Up @@ -171,5 +172,6 @@ def process_from_lines(cls, lines: tuple[Token_List]) -> 'Scope':
root_scope.children = [cls.process_scope_from_scope(child) if isinstance(child, cls) else child for child in root_scope.children]
return root_scope

@cache
def get_keyword(self, internal_name: str) -> str:
return next((keyword for keyword in globals.KEYWORDS if globals.KEYWORDS[keyword]["internal_name"] == internal_name), None)
31 changes: 14 additions & 17 deletions classes/Transpiler.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,22 +3,25 @@
from classes.Scope import Scope
from classes.Token import Processed_Line, Token_List
from core.config import load_config
from functions._unmarked import _unmarked
import globals


INDENT_CHAR = load_config().Formatter["indent_char"]


class Transpiler:
copied_scope: Scope

root_scope: Scope
current_scope: Scope
parent_scope: Scope
scope_stack: list[Scope] = []

transpiled: list[Processed_Line] = []
add_to_transpiled_queue: list[tuple[str, Scope]] = []

@classmethod
def parse_non_keyword(cls, line: Token_List, *_: Scope) -> str:
return f"{INDENT_CHAR*line.indent_level}{line.full_line()}".replace("::", ".")
return _unmarked(line, cls.scope_stack[-1] if len(cls.scope_stack) >= 1 else cls.root_scope, cls.scope_stack[-2] if len(cls.scope_stack) >= 2 else cls.root_scope, cls.root_scope)

@classmethod
def get_match_function(cls, child: Scope) -> Callable[..., str]:
Expand All @@ -30,20 +33,15 @@ def get_match_function(cls, child: Scope) -> Callable[..., str]:
return cls.parse_non_keyword

@classmethod
def __transpile(cls, child: Scope = None) -> None:
def __transpile(cls, child: Scope | Token_List = None) -> None:
if isinstance(child, Scope):
if child.indent_level == 0:
cls.parent_scope = cls.root_scope
else:
cls.parent_scope = cls.current_scope
cls.current_scope = child

[cls.__transpile(child) for child in child.children]

cls.__add_from_queue(child)
cls.scope_stack.append(child) # push
[cls.__transpile(child) for child in child.children] # recursive call
cls.scope_stack.pop() # pop
cls.__add_from_queue(child) # add any lines that were added to the queue (so at the end of the current scope)

elif isinstance(child, list):
cls.transpiled.append(cls.get_match_function(child)(child, cls.current_scope, cls.parent_scope, cls.root_scope))
elif isinstance(child, Token_List):
cls.transpiled.append(cls.get_match_function(child)(child, cls.scope_stack[-1] if len(cls.scope_stack) >= 1 else cls.root_scope, cls.scope_stack[-2] if len(cls.scope_stack) >= 2 else cls.root_scope, cls.root_scope))

@classmethod
def append_at_end(cls, line: str, current_scope: Scope):
Expand All @@ -60,8 +58,7 @@ def __add_from_queue(cls, child: Scope):
@classmethod
def transpile(cls, root_scope: Scope):
cls.root_scope = root_scope
cls.current_scope = root_scope
cls.parent_scope = root_scope
cls.scope_stack = []

#globals.POOL.map(cls.__transpile, root_scope.children, chunksize=10)
[cls.__transpile(child) for child in root_scope.children]
Expand Down
Binary file modified classes/__pycache__/Scope.cpython-312.pyc
Binary file not shown.
Binary file modified classes/__pycache__/Transpiler.cpython-312.pyc
Binary file not shown.
Binary file modified core/token/__pycache__/tokenize_line.cpython-312.pyc
Binary file not shown.
18 changes: 10 additions & 8 deletions core/token/tokenize_line.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
from functools import cache

import re ##### Keep
from classes.Token import Token

from globals import BLOCK_COMMENT, COMMENT, DOUBLE_CHARACTER, EARLY_REPLACEMENTS

import globals

@cache
def tokenize_line(code) -> list[str]:
def tokenize_line(code: Token | str) -> list[str]:
"""
Tokenize a line of code.
Expand All @@ -16,17 +16,19 @@ def tokenize_line(code) -> list[str]:
Returns:
list[str]: The tokenized code
"""
if isinstance(code, str):
code = Token(None, code, 0, 0)

back_slash = "\\"

pattern: str = rf"""
("[^"\\]*(?:\\.[^"\\]*)*") | # Double quotes strings
('[^'\\]*(?:\\.[^'\\]*)*') | # Single quotes strings
({back_slash.join(COMMENT.split())}[^\n]*) | # Single line comments (~~)
({back_slash.join(BLOCK_COMMENT.split())}[\s\S]*?{back_slash.join(BLOCK_COMMENT.split())}) | # Multi line comments (~*~ ... ~*~)
({back_slash.join(globals.COMMENT.split())}[^\n]*) | # Single line comments (~~)
({back_slash.join(globals.BLOCK_COMMENT.split())}[\s\S]*?{back_slash.join(globals.BLOCK_COMMENT.split())}) | # Multi line comments (~*~ ... ~*~)
(\b\d+\.\d+\b) | # Decimal numbers
(\b\w+\b) | # Words (identifiers, keywords)
({'|'.join(DOUBLE_CHARACTER)}) | # Double character operators
({'|'.join(globals.DOUBLE_CHARACTER)}) | # Double character operators
([\(\){{}};,]) | # Single character delimiters
(\S) | # Catch other characters
"""
Expand All @@ -36,7 +38,7 @@ def tokenize_line(code) -> list[str]:
token
for group in tokens
for token in group
if token and not token.startswith(COMMENT) and not token.startswith(BLOCK_COMMENT) and not token.endswith(BLOCK_COMMENT)
if token and not token.startswith(globals.COMMENT) and not token.startswith(globals.BLOCK_COMMENT) and not token.endswith(globals.BLOCK_COMMENT)
]

code.line = [EARLY_REPLACEMENTS[token] if token in EARLY_REPLACEMENTS else token for token in flattened_tokens] if flattened_tokens else []
code.line = [globals.EARLY_REPLACEMENTS[token] if token in globals.EARLY_REPLACEMENTS else token for token in flattened_tokens] if flattened_tokens else []
Binary file modified functions/__pycache__/_functions.cpython-312.pyc
Binary file not shown.
90 changes: 74 additions & 16 deletions functions/_class.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,26 +9,82 @@
CLASS_EXTENSION = "::"
SEPARATOR_FOR_CLASSES = "+"

def _interface(ast_list: Token_List, current_scope, parent_scope, root_scope, modifiers=None) -> str:
pass

def _struct(ast_list: Token_List, current_scope, parent_scope, root_scope, modifiers=None) -> str:
pass

def _union(ast_list: Token_List, current_scope, parent_scope, root_scope, modifiers=None) -> str:
pass

def _enum(ast_list: Token_List, current_scope, parent_scope, root_scope, modifiers=None) -> str:
pass

def _abstract(ast_list: Token_List, current_scope, parent_scope, root_scope, modifiers=None) -> str:
pass

def _class(ast_list: Token_List, current_scope, parent_scope, root_scope, modifiers=None) -> str:
allowed_modifiers = (
root_scope.get_keyword("ASYNC"),
root_scope.get_keyword("PRIVATE"),
root_scope.get_keyword("PROTECTED"),
root_scope.get_keyword("FINAL"),
root_scope.get_keyword("UNSAFE"),
root_scope.get_keyword("STATIC")
data_structure_types = (
parent_scope.get_keyword("INTERFACE"),
parent_scope.get_keyword("STRUCT"),
parent_scope.get_keyword("UNION"),
parent_scope.get_keyword("ENUM"),
parent_scope.get_keyword("ABSTRACT")
)

class_name: str = ast_list[1].token
class_extends: tuple[Token] = [i for i in ast_list.get_all_after(CLASS_EXTENSION) if i.token != SEPARATOR_FOR_CLASSES and i.token != ":" and i.token != "(" and i.token != ")"] if ast_list[2].token == CLASS_EXTENSION else []
if CLASS_EXTENSION in ast_list:
class_name: str = ast_list.splice(0, len(ast_list.get_all_before(CLASS_EXTENSION))).splice(1).full_line().replace("<", "[").replace(">", "]")
else:
class_name: str = ast_list.splice(0, len(ast_list.get_all_before(":"))).splice(1).full_line().replace("<", "[").replace(">", "]")

class_extends: list[Token] = [i for i in ast_list.get_all_after(CLASS_EXTENSION) if i.token != SEPARATOR_FOR_CLASSES and i.token != ":" and i.token != "(" and i.token != ")"] if ast_list[2].token == CLASS_EXTENSION else []
class_decorators: list[str] = []

if any([i in ast_list for i in data_structure_types]):
if ast_list[0] == parent_scope.get_keyword("INTERFACE"):
class_extends.insert(0, Token(None, "ABC", None, None))
parent_scope.classes["ABC"] = {
"extends": [],
"modifiers": [],
"unsafe": False,
"static": False,
"private": False,
} if "ABC" not in parent_scope.classes else parent_scope.classes["ABC"]
elif ast_list[0] == parent_scope.get_keyword("STRUCT") and (modifiers and parent_scope.get_keyword("FINAL") in modifiers):
class_decorators.append("@dataclass(frozen=True)")
elif ast_list[0] == parent_scope.get_keyword("STRUCT"):
class_decorators.append("@dataclass")
elif ast_list[0] == parent_scope.get_keyword("ENUM"):
class_extends.insert(0, Token(None, "Enum", None, None)) # add generic enum
parent_scope.classes["Enum"] = {
"extends": [],
"modifiers": [],
"unsafe": False,
"static": False,
"private": False,
} if "Enum" not in parent_scope.classes else parent_scope.classes["Enum"]
elif ast_list[0] == parent_scope.get_keyword("ABSTRACT"):
class_extends.append(Token(None, "ABC", None, None))
parent_scope.classes["ABC"] = {
"extends": [],
"modifiers": [],
"unsafe": False,
"static": False,
"private": False,
} if "ABC" not in parent_scope.classes else parent_scope.classes["ABC"]
else:
# if the class has a final modifier, then it cannot be extended
if modifiers and (parent_scope.get_keyword("FINAL") in modifiers and class_extends):
panic(SyntaxError(f"Class '{class_name}' cannot be extended because it is marked as final"), "::", file=ast_list.file, line_no=ast_list[1].line_number)

[panic(SyntaxError(f"Unexpected token '{i.token}' in class extension"), i.token, file=ast_list.file, line_no=ast_list.find_line_number(i.token)) for i in class_extends if i.token in (CLASS_EXTENSION, SEPARATOR_FOR_CLASSES, "(", ")", ",")] if class_extends else None

for i in class_extends:
if i.token not in parent_scope.classes.keys():
panic(NameError(f"Class '{i.token}' not found"), i.token, file=ast_list.file, line_no=ast_list.find_line_number(i.token))

output = f"class {class_name}"
output = f"{INDENT_CHAR*ast_list.indent_level}class {class_name}"
if class_extends:
output += f"({', '.join([i.token for i in class_extends])}, metaclass=multimeta)"
else:
Expand All @@ -38,18 +94,17 @@ def _class(ast_list: Token_List, current_scope, parent_scope, root_scope, modifi
parent_scope.classes[class_name] = {
"extends": class_extends,
"modifiers": modifiers,
"unsafe": False,
"static": False,
"unsafe": False,
"static": False,
"private": False,
"protected": False,
"final": False,
}

if modifiers:
if root_scope.get_keyword("ASYNC") in modifiers:
panic(SyntaxError(f"Classes cannot be async"), file=ast_list.file, line_no=ast_list.find_line_number(root_scope.get_keyword("ASYNC")))
if root_scope.get_keyword("STATIC") in modifiers:
panic(SyntaxError(f"Classes cannot be static"), file=ast_list.file, line_no=ast_list.find_line_number(root_scope.get_keyword("STATIC")))
class_decorators.append("@singleton")

parent_scope.classes[class_name] = {
"extends": class_extends,
root_scope.get_keyword("STATIC"): True if root_scope.get_keyword("STATIC") in modifiers else False,
Expand All @@ -61,7 +116,10 @@ def _class(ast_list: Token_List, current_scope, parent_scope, root_scope, modifi
"extends": class_extends,
"unsafe": False,
"static": False,
"final": False,
"final": False,
}

if class_decorators:
output = "\n" + "\n".join([f"{INDENT_CHAR*ast_list.indent_level}{i}" for i in class_decorators]) + "\n" + output

return Processed_Line(output, ast_list)
20 changes: 19 additions & 1 deletion functions/_for.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,13 +46,17 @@ def process_init_statement(init_statement: Token_List) -> dict:
current_var_unsafe: bool = False
current_var_discard: bool = True

#TODO: add support for generics in loop declarations IF needed

expecting_type: bool = False
expecting_value: bool = False
excepting_name: bool = False
in_generic: bool = False
generic_count: int = 0

@cache
def extract_variables(index: int, token: Token) -> None:
nonlocal current_var_name, current_var_type, current_var_value, current_var_unsafe, current_var_discard, expecting_type, expecting_value, excepting_name
nonlocal current_var_name, in_generic, generic_count, current_var_type, current_var_value, current_var_unsafe, current_var_discard, expecting_type, expecting_value, excepting_name

if token in [globals.find_keyword("LET"), globals.find_keyword("VAR"), globals.find_keyword("UNSAFE")]:
if expecting_value:
Expand Down Expand Up @@ -96,6 +100,15 @@ def extract_variables(index: int, token: Token) -> None:
return

if expecting_type:
if in_generic:
if token == "<":
generic_count += 1
if token == ">":
generic_count -= 1
if generic_count == 0:
in_generic = False
return

if token == "=":
expecting_value = True
expecting_type = False
Expand All @@ -119,6 +132,11 @@ def extract_variables(index: int, token: Token) -> None:
}
current_var_type = ""
return

if token == "<":
in_generic = True
generic_count += 1
return

current_var_type += token.token + " "
return
Expand Down
34 changes: 19 additions & 15 deletions functions/_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ def contains(line: Token_List, compare: tuple):
# static async fn factorial(n: int) -> int {
def function(ast_list: Token_List, current_scope: Scope, parent_scope: Scope, root_scope: Scope) -> str:
decorators = []
print(parent_scope.name)

if ast_list.line[0].token == "#":
for _ in range(ast_list.count("#")):
decorator = ast_list.get_between("[", "]")
Expand All @@ -147,13 +147,20 @@ def function(ast_list: Token_List, current_scope: Scope, parent_scope: Scope, ro

modifiers = process_modifiers(ast_list, root_scope)


not_allowed_classes = (
parent_scope.get_keyword("CLASS"),
parent_scope.get_keyword("INTERFACE"),
parent_scope.get_keyword("STRUCT"),
parent_scope.get_keyword("UNION"),
parent_scope.get_keyword("ENUM"),
parent_scope.get_keyword("ABSTRACT")
)

if ast_list.line[0].token != root_scope.get_keyword('FUNCTION'):
# TODO: link with classes and other namespaces
if ast_list.line[0].token == root_scope.get_keyword("CLASS"):
return _class(ast_list, current_scope, parent_scope, root_scope, modifiers)
if ast_list.splice(len(modifiers))[0] in not_allowed_classes:
return _class(ast_list.splice(len(modifiers)), current_scope, parent_scope, root_scope, modifiers)

panic(SyntaxError(f"<Hex(02.E3)>: Expected the {root_scope.get_keyword('FUNCTION')} keyword"), file=ast_list.file, line_no=ast_list.find_line_number(root_scope.get_keyword('FUNCTION')))
panic(SyntaxError(f"<Hex(02.E3)>: Expected the {root_scope.get_keyword('FUNCTION')} keyword"), ast_list[1].token, file=ast_list.file, line_no=ast_list[0].line_number)

variables = extract_variables(ast_list, root_scope)
name = ast_list.line[1].token
Expand All @@ -180,17 +187,14 @@ def function(ast_list: Token_List, current_scope: Scope, parent_scope: Scope, ro

output = f"\n{INDENT_CHAR*ast_list.indent_level}{output}"

not_allowed_classes = (
parent_scope.get_keyword("CLASS"),
parent_scope.get_keyword("INTERFACE"),
parent_scope.get_keyword("STRUCT"),
parent_scope.get_keyword("UNION"),
parent_scope.get_keyword("ENUM"),
parent_scope.get_keyword("ABSTRACT")
)

if not any([i in not_allowed_classes for i in parent_scope.name]):
output = f"\n{INDENT_CHAR*ast_list.indent_level}@__internal__multi_method" + output
# if the type of parent_sope is an abstract class
if any([i == root_scope.get_keyword("ABSTRACT") for i in parent_scope.name]):
output = f"\n{INDENT_CHAR*ast_list.indent_level}@__internal__abstract_method" + output
# if the type of parent_sope is an interface
if any([i == root_scope.get_keyword("INTERFACE") for i in parent_scope.name]):
output = f"\n{INDENT_CHAR*ast_list.indent_level}@__internal__abstract_method" + output

static: bool = False
async_: bool = False
Expand Down
Empty file added functions/_let.py
Empty file.
Loading

0 comments on commit 6c8642b

Please sign in to comment.