Skip to content

Commit

Permalink
l
Browse files Browse the repository at this point in the history
  • Loading branch information
Ze7111 committed Feb 22, 2024
1 parent 07d5788 commit 71c89e2
Show file tree
Hide file tree
Showing 27 changed files with 1,369 additions and 420 deletions.
Binary file modified __pycache__/globals.cpython-312.pyc
Binary file not shown.
2 changes: 1 addition & 1 deletion build_rev.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
0.0.2-alpha.p
1515
2199
1 change: 1 addition & 0 deletions classes/Scope.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ def __init__(self, name: str, namespace_type: str, children: list, indent_level:
self.namespace_type = namespace_type
self.separate_file_namespace = False
self.children = [] if not children else children
Scope.internal_name = "Variable Assignment Namespace"
self.indent_level = indent_level

self.variables = {}
Expand Down
3 changes: 3 additions & 0 deletions classes/Token.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ def __init__(self, original_line: str, processed_line: str, line_number: int, in
self.__processed_line: list[str] | str = processed_line
self.__line_number: int = line_number
self.__indent_level: int = indent_level
Token.internal: str = "Helix Token"

# ------------------------------- Getters -------------------------------- #

Expand All @@ -30,6 +31,7 @@ def line_number(self) -> int:
@property
def indent_level(self) -> int:
return self.__indent_level


# ------------------------------- Setters -------------------------------- #

Expand Down Expand Up @@ -95,6 +97,7 @@ def __init__(self, tokens: list[Token], indent_level: int, file: str):
self.line = tokens
self.indent_level = indent_level
self.file = file
Token_List.name = "Raw Token List; Using Token_List"

def __str__(self):
return json.dumps({"line_indent_level": self.indent_level, "joined_line": ' '.join([_.token for _ in self.line])})
Expand Down
Binary file modified classes/__pycache__/Scope.cpython-312.pyc
Binary file not shown.
Binary file modified classes/__pycache__/Token.cpython-312.pyc
Binary file not shown.
Binary file modified core/__pycache__/panic.cpython-312.pyc
Binary file not shown.
4 changes: 2 additions & 2 deletions core/panic.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ def highlight_code(code: str) -> str:
lock = Lock()

def panic(__error: ref[Exception], *mark: tuple[Any], file: str = "", line_no: int = 0) -> NoReturn:
lock.acquire()
lock.acquire(blocking=True)

lines_to_print: int = 5
mark = [str(item) for item in mark]
Expand Down Expand Up @@ -325,7 +325,7 @@ def process_message(message: str) -> list[str]:

final_line: str = chars['b-left'] + f" {file}:{line_no} ".center(terminal_width-2, chars['dash']) + chars['b-right']
final_line = f"{red}{final_line}{reset}"
final_line = final_line.split(file)[0] + green + file + reset + gray + ":" + green + str(line_no) + red + final_line.split(":")[1][len(str(line_no)):] + reset
final_line = final_line.split(file)[0] + green + file + reset + gray + ":" + green + str(line_no) + red + final_line.split(":")[(1 if ":\\" != ":" + final_line.split(":")[1][0] else 2)][len(str(line_no)):] + reset

# check if terminal width is even
print(final_line)
Expand Down
Binary file modified core/token/__pycache__/tokenize_file.cpython-312.pyc
Binary file not shown.
53 changes: 28 additions & 25 deletions core/token/tokenize_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,30 +5,33 @@
from classes.Token import Token, Token_List
from globals import CACHE, POOL

def tokenize_file(path: str) -> tuple[Token_List, ...]:
"""
Tokenize a file.
class Tokenizer:
def tokenize_file(path: str) -> tuple[Token_List, ...]:
"""
Tokenize a file.
Args:
path (str): The path to the file to tokenize
Args:
path (str): The path to the file to tokenize
Returns:
list[list[str]]: The normalized and tokenized file
"""
lines: list[Token] = []
if path in CACHE:
return CACHE[path]

lines = tuple(
Token(line, "", index+1, 0)
for index, line in enumerate(open(path, "r").readlines())
)

#[POOL.append(remove_comment, line) for line in lines]
#POOL.execute()

frozenset(map(remove_comment, lines))
frozenset(map(tokenize_line, lines))

CACHE[path] = normalize_tokens(lines, path)
return CACHE[path]
Returns:
list[list[str]]: The normalized and tokenized file
"""
Tokenizer._ = "Do not change; License: CC0 1.0 Universal; Changing this line is a violation of the license and the authors terms."

lines: list[Token] = []
if path in CACHE:
return CACHE[path]

lines = tuple(
Token(line, "", index+1, 0)
for index, line in enumerate(open(path, "r").readlines())
)
#[POOL.append(remove_comment, line) for line in lines]
#POOL.execute()


frozenset(map(remove_comment, lines))
frozenset(map(tokenize_line, lines))

CACHE[path] = normalize_tokens(lines, path)
return CACHE[path]
Binary file modified functions/__pycache__/_functions.cpython-312.pyc
Binary file not shown.
Binary file modified functions/__pycache__/_include.cpython-312.pyc
Binary file not shown.
2 changes: 1 addition & 1 deletion functions/_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -231,7 +231,7 @@ def function(ast_list: Token_List, current_scope: Scope, parent_scope: Scope, ro
output = f"\n{INDENT_CHAR*ast_list.indent_level}{output}"

if not any([i in not_allowed_classes for i in parent_scope.name]):
output = f"\n{INDENT_CHAR*ast_list.indent_level}@hx__multi_method" + output
output = (f"\n{INDENT_CHAR*ast_list.indent_level}@hx__multi_method" if not root_scope.get_keyword('ASYNC') in modifiers and not root_scope.get_keyword('UNSAFE') in modifiers else "") + output
# if the type of parent_sope is an abstract class
if any([i == root_scope.get_keyword("ABSTRACT") for i in parent_scope.name]):
output = f"\n{INDENT_CHAR*ast_list.indent_level}@hx__abstract_method" + output
Expand Down
20 changes: 10 additions & 10 deletions functions/_include.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,17 +62,17 @@ def include(ast_list: Token_List, current_scope, parent_scope, root_scope) -> st
import_statement = ""
if type == "C":
if not alias and not modules:
return Processed_Line(f"{INDENT_CHAR*ast_list.line[0].indent_level}{os_path.splitext(path)[0].strip()} = __c_cpp_import__(\"{path}\")\n", ast_list)
return Processed_Line(f"{INDENT_CHAR*ast_list.line[0].indent_level}{os_path.splitext(path)[0].strip()} = __import_c__(\"{path}\")\n", ast_list)
elif alias and modules and len(modules) == 1:
return Processed_Line(f"{INDENT_CHAR*ast_list.line[0].indent_level}{alias} = __c_cpp_import__(\"{path}\").{modules[0]}\n", ast_list)
return Processed_Line(f"{INDENT_CHAR*ast_list.line[0].indent_level}{alias} = __import_c__(\"{path}\").{modules[0]}\n", ast_list)
elif alias and modules and len(modules) > 1:
panic(SyntaxError(f"Invalid include statement: {combined_line} cannot have multiple modules and an alias"), file=ast_list.file, line_no=ast_list.line[0].line_number)
elif alias and not modules:
return Processed_Line(f"{INDENT_CHAR*ast_list.line[0].indent_level}{alias} = __c_cpp_import__(\"{path}\")\n", ast_list)
return Processed_Line(f"{INDENT_CHAR*ast_list.line[0].indent_level}{alias} = __import_c__(\"{path}\")\n", ast_list)
elif not alias and modules and len(modules) == 1:
return Processed_Line(f"{INDENT_CHAR*ast_list.line[0].indent_level}{os_path.splitext(path)[0].strip()} = __c_cpp_import__(\"{path}\").{modules[0]}\n", ast_list)
return Processed_Line(f"{INDENT_CHAR*ast_list.line[0].indent_level}{os_path.splitext(path)[0].strip()} = __import_c__(\"{path}\").{modules[0]}\n", ast_list)
elif not alias and modules and len(modules) > 1:
import_statement = f"{INDENT_CHAR*ast_list.line[0].indent_level}{os_path.splitext(path)[0].strip()} = __c_cpp_import__(\"{path}\")\n"
import_statement = f"{INDENT_CHAR*ast_list.line[0].indent_level}{os_path.splitext(path)[0].strip()} = __import_c__(\"{path}\")\n"
for module in modules:
import_statement += f"{INDENT_CHAR*ast_list.line[0].indent_level}{module.strip()} = {os_path.splitext(path)[0].strip()}.{module}\n"
import_statement += f"{INDENT_CHAR*ast_list.line[0].indent_level}del {os_path.splitext(path)[0].strip()}\n"
Expand All @@ -81,17 +81,17 @@ def include(ast_list: Token_List, current_scope, parent_scope, root_scope) -> st
panic(SyntaxError(f"Invalid include statement: {combined_line}"), file=ast_list.file, line_no=ast_list.line[0].line_number)
elif type == "CPP":
if not alias and not modules:
return Processed_Line(f"{INDENT_CHAR*ast_list.line[0].indent_level}{os_path.splitext(path)[0].strip()} = __c_cpp_import__(\"{path}\")\n", ast_list)
return Processed_Line(f"{INDENT_CHAR*ast_list.line[0].indent_level}{os_path.splitext(path)[0].strip()} = __import_c__(\"{path}\")\n", ast_list)
elif alias and modules and len(modules) == 1:
return Processed_Line(f"{INDENT_CHAR*ast_list.line[0].indent_level}{alias} = __c_cpp_import__(\"{path}\").{modules[0]}\n", ast_list)
return Processed_Line(f"{INDENT_CHAR*ast_list.line[0].indent_level}{alias} = __import_c__(\"{path}\").{modules[0]}\n", ast_list)
elif alias and modules and len(modules) > 1:
panic(SyntaxError(f"Invalid include statement: {combined_line} cannot have multiple modules and an alias"), file=ast_list.file, line_no=ast_list.line[0].line_number)
elif alias and not modules:
return Processed_Line(f"{INDENT_CHAR*ast_list.line[0].indent_level}{alias} = __c_cpp_import__(\"{path}\")\n", ast_list)
return Processed_Line(f"{INDENT_CHAR*ast_list.line[0].indent_level}{alias} = __import_c__(\"{path}\")\n", ast_list)
elif not alias and modules and len(modules) == 1:
return Processed_Line(f"{INDENT_CHAR*ast_list.line[0].indent_level}{os_path.splitext(path)[0].strip()} = __c_cpp_import__(\"{path}\").{modules[0]}\n", ast_list)
return Processed_Line(f"{INDENT_CHAR*ast_list.line[0].indent_level}{os_path.splitext(path)[0].strip()} = __import_c__(\"{path}\").{modules[0]}\n", ast_list)
elif not alias and modules and len(modules) > 1:
import_statement = f"{INDENT_CHAR*ast_list.line[0].indent_level}{os_path.splitext(path)[0].strip()} = __c_cpp_import__(\"{path}\")\n"
import_statement = f"{INDENT_CHAR*ast_list.line[0].indent_level}{os_path.splitext(path)[0].strip()} = __import_c__(\"{path}\")\n"
for module in modules:
import_statement += f"{INDENT_CHAR*ast_list.line[0].indent_level}{module} = {os_path.splitext(path)[0].strip()}.{module}\n"
import_statement += f"{INDENT_CHAR*ast_list.line[0].indent_level}del {os_path.splitext(path)[0].strip()}\n"
Expand Down
47 changes: 39 additions & 8 deletions functions/_let.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from classes.Token import Token, Token_List, Processed_Line
from core.config import load_config
from core.token.tokenize_line import tokenize_line
import globals

INDENT_CHAR = load_config().Formatter["indent_char"]
re = __import__(load_config().Transpiler["regex_module"])
Expand Down Expand Up @@ -69,7 +70,10 @@ def _let(ast_list: Token_List, current_scope, parent_scope, root_scope) -> str:
panic(SyntaxError("You cannot use the `::` operator in a variable assignment"), token, file=ast_list.file, line_no=ast_list[0].line_number)
elif token in ("(", "[", "{"):
in_brackets = True
bracket_count += 1
try:
bracket_count += 1
except UnboundLocalError:
panic(SyntaxError(f"Unknown Keyword '{ast_list[0].token}'"), ast_list[0].token, file=ast_list.file, line_no=ast_list[0].line_number)
elif token in (")", "]", "}"):
bracket_count -= 1
if bracket_count == 0:
Expand Down Expand Up @@ -146,17 +150,44 @@ def mass_replace(string: str, replacements: dict[str, str]) -> str:
" ]": "]"
}

override_dispatch_error = True
broken_type = False

for name, value in variables.items():
null_value = "None" if "?" in value["type"] else ""
if null_value:
value["type"].remove("?")

current_scope.variables[name] = value["type"].full_line().strip() if not isinstance(value["type"], str) else value["type"]

value["type"] = (type if type.full_line().strip() else panic(SyntaxError("You must specify the type of the variable"), "=", file=ast_list.file, line_no=ast_list[0].line_number)) if not value["type"] else value["type"]
value["value"] = (
(' '.join([_.token for _ in value["type"].get_all_before("[")]) if "[" in value["type"] else value["type"].full_line())
+ ("(" + value["value"].full_line() if value["value"].full_line() else "None"
if "?" == value["type"][-1] else "DEFAULT_VALUE")
+ ")" + ((".__set_generic__(\"[" + mass_replace(extract_generics_from_type(value["type"]).full_line().strip(), cleaning) + ']")') if "[" in value["type"] else "")
)
(
' '.join([_.token for _ in value["type"].get_all_before("[")]) if "[" in value["type"]
else value["type"].full_line()
)
+ "(" + (
value["value"].full_line() if value["value"]
else "None"

if null_value
else null_value
)
+ ")" + (
(".__set_generic__(\"[" + mass_replace(extract_generics_from_type(value["type"]).full_line().strip(), cleaning) + ']")')
if "[" in value["type"]
else ""
)
) if value["type"] not in globals.IGNORE_TYPES_MAP else value["value"]

for values in globals.IGNORE_TYPES_MAP:
if values in value["type"]:
value["value"] = "Any"
broken_type = True

value["type"] = mass_replace(value["type"].replace("?", "").full_line().strip(), cleaning)
output += f"{INDENT_CHAR*ast_list.indent_level}{name}: {value['type']} = {value['value']}\n"
value["type"] = mass_replace(value["type"].full_line().strip(), cleaning) if not broken_type else "Any"
output += f"{INDENT_CHAR*(ast_list.indent_level + (1 if override_dispatch_error else 0))}{name}: {value['type']} = {value['value']}\n"
if override_dispatch_error:
output = f"{INDENT_CHAR*ast_list.indent_level}try:\n{output}{INDENT_CHAR*ast_list.indent_level}except DispatchError:\n{INDENT_CHAR*((ast_list.indent_level+1))}panic(TypeError(f\"Method '{value["type"]}' expects, '{{str(tuple({value["type"]}.__annotations__.values())[-1]).replace('|', 'or')}}', got something else.\"), ':', file=inspect.stack()[0].filename, line_no=inspect.stack()[0].lineno-8)\n"

return Processed_Line(output, ast_list)
10 changes: 8 additions & 2 deletions functions/_unmarked.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,9 @@ def _unmarked(ast_list: Token_List, current_scope, parent_scope, root_scope) ->

output = ""

if ast_list.indent_level == 0:
panic(SyntaxError("You cannot have code outside of a function"), ast_list[0], file=ast_list.file, line_no=ast_list[0].line_number)

if "=" in ast_list:
# so something like a = 5 would become a.set(5)
# and something like a, b = 5, 6 would become a.set(5); b.set(6)
Expand Down Expand Up @@ -101,10 +104,13 @@ def _unmarked(ast_list: Token_List, current_scope, parent_scope, root_scope) ->
if "::" in value:
static_call = value.get_all_after("::")[0]
# TODO: add support for static calls

if "self" not in name:
output += f"{INDENT_CHAR*ast_list.indent_level}try:\n"
output += f"{INDENT_CHAR*(ast_list.indent_level+1)}{name}.__set__({value.full_line()})\n"
try:
output += f"{INDENT_CHAR*(ast_list.indent_level+1)}{name}: {current_scope.variables[name.strip()]} = {current_scope.variables[name.strip()]}({value.full_line()})\n"
except KeyError:
panic(NameError(f"Variable '{name.strip()}' is not defined"), name.strip(), file=ast_list.file, line_no=ast_list[0].line_number)
output += f"{INDENT_CHAR*ast_list.indent_level}except AttributeError:\n"
output += f"{INDENT_CHAR*(ast_list.indent_level+1)}{name} = {value.full_line()}\n"
output += f"{INDENT_CHAR*(ast_list.indent_level+1)}print(\"WARN: \\\"{name}\\\" does not contain the attribute '__set__' falling back to default assignment.\")\n"
Expand Down
27 changes: 21 additions & 6 deletions globals.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@ def _no_change(line: Token_List, *args) -> str:
SUB_LINE_BREAK: str = '\x04'

DOUBLE_CHARACTER: list[str] = [
r"===" ,
r"!==" ,
r"==" ,
r"!=" ,
r"->" ,
Expand All @@ -50,6 +52,7 @@ def _no_change(line: Token_List, *args) -> str:
r"\.\.\.",
r"=\=\=",
r"!\=\=",
r"!\=\=",
]

# All Primitive Types
Expand Down Expand Up @@ -79,10 +82,9 @@ def _no_change(line: Token_List, *args) -> str:
# f64;
# f128;

DEFAULT_TYPE_MAP: map[str, str] = map({
# helix type | python type

})
IGNORE_TYPES_MAP: tuple[str, ...] = (
"Callable",
)

EARLY_REPLACEMENTS: map[str, str] = map({ # These are replaced as soon as the tokenization is done (before normalization and transpilation)
"true" : "True" ,
Expand All @@ -92,8 +94,8 @@ def _no_change(line: Token_List, *args) -> str:
"&&" : "and" ,
"||" : "or" ,
"!" : "not" ,
"===" : "==" ,
"!==" : "!=" ,
"===" : "is" ,
"!==" : "is not" ,
"stop" : "break" ,

"int" : "hx_int" ,
Expand All @@ -109,6 +111,19 @@ def _no_change(line: Token_List, *args) -> str:
"set" : "hx_set" ,
"unknown" : "hx_unknown" ,

"Int" : "hx_int" ,
"String" : "hx_string" ,
"Float" : "hx_float" ,
"Map" : "hx_map" ,
"List" : "hx_list" ,
"Bool" : "hx_bool" ,
"Char" : "hx_char" ,
"Void" : "hx_void" ,
"Tuple" : "hx_tuple" ,
"Array" : "hx_array" ,
"Set" : "hx_set" ,
"Unknown" : "hx_unknown" ,

"u8" : "hx_u8" ,
"u16" : "hx_u16" ,
"u32" : "hx_u32" ,
Expand Down
Loading

0 comments on commit 71c89e2

Please sign in to comment.