Skip to content

Commit

Permalink
more work, frontend compiler is kinda hooked-up
Browse files Browse the repository at this point in the history
  • Loading branch information
aljazerzen committed May 8, 2024
1 parent 7552383 commit 46e44e5
Show file tree
Hide file tree
Showing 6 changed files with 299 additions and 153 deletions.
8 changes: 8 additions & 0 deletions edb/errors/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,6 +181,14 @@ def line(self):
def col(self):
return int(self._attrs.get(FIELD_COLUMN_START, -1))

@property
def line_end(self):
return int(self._attrs.get(FIELD_LINE_END, -1))

@property
def col_end(self):
return int(self._attrs.get(FIELD_COLUMN_END, -1))

@property
def position(self):
return int(self._attrs.get(FIELD_POSITION_START, -1))
Expand Down
2 changes: 1 addition & 1 deletion edb/tools/edb.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,6 @@ def server(version=False, **kwargs):
from . import gen_rust_ast # noqa
from . import ast_inheritance_graph # noqa
from . import parser_demo # noqa
from . import lsp # noqa
from . import language_server # noqa
from .profiling import cli as prof_cli # noqa
from .experimental_interpreter import edb_entry # noqa
287 changes: 287 additions & 0 deletions edb/tools/language_server.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,287 @@
from typing import Any, List, Tuple, Optional
import dataclasses
import pathlib
import os

import click
from pygls.server import LanguageServer
from pygls.workspace import TextDocument
from pygls import uris as pygls_uris
from lsprotocol import types as lsp_types


from edb.tools.edb import edbcommands

from edb import errors
from edb.edgeql import ast as qlast
from edb.edgeql import tokenizer
from edb.edgeql import parser as qlparser
from edb.edgeql import compiler as qlcompiler
from edb.edgeql.parser.grammar import tokens as qltokens
from edb.schema import schema as s_schema
from edb.schema import std as s_std
from edb.schema import ddl as s_ddl
import edb._edgeql_parser as rust_parser


@dataclasses.dataclass(kw_only=True, slots=True)
class State:
schema: Optional[s_schema.Schema]


class EdgeDBLanguageServer(LanguageServer):
state: State

def __init__(self):
super().__init__('EdgeDB Language Server', 'v0.1')
self.state = State(schema=None)


@edbcommands.command("language-server")
@click.option('--stdio', default=False, is_flag=True)
def main(stdio: bool):
ls = EdgeDBLanguageServer()

@ls.feature(
lsp_types.INITIALIZE,
)
def init(_params: lsp_types.InitializeParams):
ls.show_message_log('Starting')
qlparser.preload_spec()
ls.show_message_log('Started')

@ls.feature(lsp_types.TEXT_DOCUMENT_DID_OPEN)
def text_document_did_open(params: lsp_types.DidOpenTextDocumentParams):
ls.show_message_log(f'did open: {params.text_document.uri}')

document = ls.workspace.get_text_document(params.text_document.uri)
ql_ast = parse_and_report_diagnostics(document, ls)

schema = get_schema(ls)
ls.show_message_log(f'schema: {schema}')

if isinstance(ql_ast, list):
for ql_stmt in ql_ast:

try:
ir_stmt = qlcompiler.compile_ast_to_ir(ql_stmt, schema)
ls.show_message_log(f'IR: {ir_stmt.dump()}')
except errors.EdgeDBError as error:
diagnostics = []
diagnostics.append(
lsp_types.Diagnostic(
range=lsp_types.Range(
start=lsp_types.Position(
line=error.line - 1,
character=error.col - 1,
),
end=lsp_types.Position(
line=error.line_end - 1,
character=error.col_end - 1,
),
),
severity=lsp_types.DiagnosticSeverity.Error,
message=error.args[0],
)
)
ls.publish_diagnostics(
document.uri, diagnostics, document.version
)

@ls.feature(lsp_types.TEXT_DOCUMENT_DID_CHANGE)
def text_document_did_change(params: lsp_types.DidChangeTextDocumentParams):
ls.show_message_log(f'did change: {params.text_document.uri}')

document = ls.workspace.get_text_document(params.text_document.uri)
parse_and_report_diagnostics(document, ls)

@ls.feature(
lsp_types.TEXT_DOCUMENT_COMPLETION,
lsp_types.CompletionOptions(trigger_characters=[',']),
)
def completions(params: lsp_types.CompletionParams):
items = []

document = ls.workspace.get_text_document(params.text_document.uri)

if item := parse_and_suggest_keyword(document, params.position):
items.append(item)

return lsp_types.CompletionList(is_incomplete=False, items=items)

if stdio:
ls.start_io()


def position_in_span(pos: lsp_types.Position, span: Tuple[Any, Any]):
start, end = span

if pos.line < start.line - 1:
return False
if pos.line > end.line - 1:
return False
if pos.line == start.line - 1 and pos.character < start.column - 1:
return False
if pos.line == end.line - 1 and pos.character > end.column - 1:
return False
return True


def parse(
source_str: str, sdl: bool
) -> Tuple[tokenizer.Source, rust_parser.ParserResult, Any]:
try:
source = tokenizer.Source.from_string(source_str)
except Exception as e:
# TODO
print(e)
raise AssertionError(e)

start_t = qltokens.T_STARTSDLDOCUMENT if sdl else qltokens.T_STARTBLOCK
start_t_name = start_t.__name__[2:]
tokens = source.tokens()

result, productions = rust_parser.parse(start_t_name, tokens)
return source, result, productions


def parse_and_report_diagnostics(
doc: TextDocument, ls: LanguageServer
) -> Optional[List[qlast.Base] | qlast.Schema]:
sdl = doc.filename.endswith('.esdl') if doc.filename else False

source, result, productions = parse(doc.source, sdl)

if result.errors:
diagnostics = []
for error in result.errors:
message, span, hint, details = error

if details:
message += f"\n{details}"
if hint:
message += f"\nHint: {hint}"
(start, end) = tokenizer.inflate_span(source.text(), span)
assert end

diagnostics.append(
lsp_types.Diagnostic(
range=lsp_types.Range(
start=lsp_types.Position(
line=start.line - 1,
character=start.column - 1,
),
end=lsp_types.Position(
line=end.line - 1,
character=end.column - 1,
),
),
severity=lsp_types.DiagnosticSeverity.Error,
message=message,
)
)

ls.publish_diagnostics(doc.uri, diagnostics, doc.version)
return None

ls.publish_diagnostics(doc.uri, [], doc.version)
# parsing successful

assert isinstance(result.out, rust_parser.CSTNode)

ast = qlparser._cst_to_ast(
result.out, productions, source, doc.filename
).val
if sdl:
assert isinstance(ast, qlast.Schema), ast
else:
assert isinstance(ast, list), ast
return ast


def parse_and_suggest_keyword(
doc: TextDocument, position: lsp_types.Position
) -> Optional[lsp_types.CompletionItem]:
sdl = doc.filename.endswith('.esdl') if doc.filename else False

source, result, _productions = parse(doc.source, sdl)
for error in result.errors:
message: str
message, span, hint, details = error
if not message.startswith('Missing keyword '):
continue
(start, end) = tokenizer.inflate_span(source.text(), span)

if not position_in_span(position, (start, end)):
continue

keyword = message.removeprefix('Missing keyword \'')[:-1]

return lsp_types.CompletionItem(
label=keyword,
kind=lsp_types.CompletionItemKind.Keyword,
)
return None


def get_schema(ls: EdgeDBLanguageServer) -> Optional[s_schema.Schema]:

if ls.state.schema:
return ls.state.schema

# discover dbschema/ folders
if len(ls.workspace.folders) != 1:

if len(ls.workspace.folders) > 1:
ls.show_message_log(
"WARNING: workspaces with multiple root folders "
"are not supported"
)
return None

workspace: lsp_types.WorkspaceFolder = next(
iter(ls.workspace.folders.values())
)
workspace_path = pathlib.Path(pygls_uris.to_fs_path(workspace.uri))

dbschema = workspace_path / 'dbschema'

# read and parse .esdl files
sdl = qlast.Schema(declarations=[])
for entry in os.listdir(dbschema):
if not entry.endswith('.esdl'):
continue
doc = ls.workspace.get_text_document(f'dbschema/{entry}')

doc_ast = parse_and_report_diagnostics(doc, ls)
assert isinstance(doc_ast, qlast.Schema)
sdl.declarations.extend(doc_ast.declarations)

# apply SDL to std schema
std_schema = _load_std_schema()
schema = s_ddl.apply_sdl(
sdl,
base_schema=std_schema,
current_schema=std_schema,
)

ls.state.schema = schema
return ls.state.schema


_std_schema: s_schema.Schema = None


def _load_std_schema():
global _std_schema
if _std_schema is not None:
return _std_schema

schema = s_schema.EMPTY_SCHEMA
for modname in [*s_schema.STD_SOURCES, *s_schema.TESTMODE_SOURCES]:
schema = s_std.load_std_module(schema, modname)
schema, _ = s_std.make_schema_version(schema)
schema, _ = s_std.make_global_schema_version(schema)

_std_schema = schema
return _std_schema
Loading

0 comments on commit 46e44e5

Please sign in to comment.