diff --git a/build.sbt b/build.sbt index 32a94468..f7dadde0 100644 --- a/build.sbt +++ b/build.sbt @@ -77,6 +77,8 @@ libraryDependencies ++= { } } +libraryDependencies += "org.eclipse.lsp4j" % "org.eclipse.lsp4j" % "0.12.0" + //////////////////////////////////////////////////////////////////////////////// // Testing dependencies //////////////////////////////////////////////////////////////////////////////// diff --git a/src/main/antlr4/AlogicLexer.g4 b/src/main/antlr4/AlogicLexer.g4 index 01dbeaed..9b31b66e 100644 --- a/src/main/antlr4/AlogicLexer.g4 +++ b/src/main/antlr4/AlogicLexer.g4 @@ -9,9 +9,13 @@ lexer grammar AlogicLexer; -fragment LCMT: '//' ~[\n]* ; // Line comment -fragment BCMT: '/*' .*? '*/'; // Block comment -CMT: (LCMT | BCMT) -> channel(HIDDEN) ; // Any comment +channels { + COMMENT +} + +fragment LCMT: '//' ~[\n]* ; // Line comment +fragment BCMT: '/*' .*? '*/'; // Block comment +CMT: (LCMT | BCMT) -> channel(COMMENT) ; // Any comment UINTTYPE: 'u' [0-9]+; diff --git a/src/main/scala/com/argondesign/alogic/antlr/AlogicTokenFactory.scala b/src/main/scala/com/argondesign/alogic/antlr/AlogicTokenFactory.scala index 786e8fa1..7a6feacf 100644 --- a/src/main/scala/com/argondesign/alogic/antlr/AlogicTokenFactory.scala +++ b/src/main/scala/com/argondesign/alogic/antlr/AlogicTokenFactory.scala @@ -55,7 +55,9 @@ class AlogicTokenFactory(val alogicSource: Source, mb: MessageBuffer) extends To line: Int, charPositionInLine: Int ): Token = { - require(channel == Token.DEFAULT_CHANNEL || channel == Token.HIDDEN_CHANNEL) + require( + channel == Token.DEFAULT_CHANNEL || channel == Token.HIDDEN_CHANNEL || channel == AlogicLexer.COMMENT + ) require(source.getItem1.isInstanceOf[AlogicLexer]) def mkToken(channel: Int): AlogicToken = { @@ -69,7 +71,7 @@ class AlogicTokenFactory(val alogicSource: Source, mb: MessageBuffer) extends To token } // Creates normal token passed to the parser - def normalToken: AlogicToken = mkToken(Token.DEFAULT_CHANNEL) + def normalToken: AlogicToken = mkToken(channel) // Creates hidden token not passed to the parser def hiddenToken: AlogicToken = mkToken(Token.HIDDEN_CHANNEL) diff --git a/src/main/scala/com/argondesign/alogic/lsp/AlogicLanguageServer.scala b/src/main/scala/com/argondesign/alogic/lsp/AlogicLanguageServer.scala new file mode 100644 index 00000000..0fc6d4ac --- /dev/null +++ b/src/main/scala/com/argondesign/alogic/lsp/AlogicLanguageServer.scala @@ -0,0 +1,207 @@ +//////////////////////////////////////////////////////////////////////////////// +// Copyright (c) 2017-2021 Argon Design Ltd. All rights reserved. +// This file is covered by the BSD (with attribution) license. +// See the LICENSE file for the precise wording of the license. +// +// DESCRIPTION: +// Language server implementation +//////////////////////////////////////////////////////////////////////////////// + +package com.argondesign.alogic.lsp + +import com.argondesign.alogic.Compiler +import com.argondesign.alogic.core.CompilerContext +import com.argondesign.alogic.core.Loc +import com.argondesign.alogic.core.MessageBuffer +import com.argondesign.alogic.core.Messages._ +import com.argondesign.alogic.core.Source +import com.argondesign.alogic.frontend.Frontend +import com.google.gson.JsonObject +import org.eclipse.lsp4j._ +import org.eclipse.lsp4j.services._ + +import java.io.File +import java.io.PrintWriter +import java.util.concurrent.CompletableFuture +import scala.concurrent.ExecutionContext.Implicits.global +import scala.jdk.CollectionConverters._ +import scala.jdk.FutureConverters._ +import scala.util.Failure +import scala.util.Success + +class AlogicLanguageServer extends LanguageServer with LanguageClientAware { + + private var client: LanguageClient = null + + var workspaceFolders: Seq[WorkspaceFolder] = Nil + var extraCommandLineOpts: Seq[String] = Nil + + def initialize(x: InitializeParams): CompletableFuture[InitializeResult] = { + workspaceFolders = x.getWorkspaceFolders().asScala.toList + val capabilities = new ServerCapabilities(); + capabilities.setTextDocumentSync(TextDocumentSyncKind.Full); + val semTokenLegend = new SemanticTokensLegend( + SemanticTokenType.values.toList.map(_.toString).asJava, + SemanticTokenModifier.values.toList.map(_.toString).asJava + ) + capabilities.setSemanticTokensProvider( + new SemanticTokensWithRegistrationOptions( + semTokenLegend, + new SemanticTokensServerFull(false), + false, + List(new DocumentFilter("alogic", "", "")).asJava + ) + ) + CompletableFuture.completedFuture(new InitializeResult(capabilities)) + } + + def shutdown(): CompletableFuture[Object] = { + CompletableFuture.completedFuture(null) + } + + def exit() = {} + + val fullTextDocumentService = new FullTextDocumentService() { + + override def didChange(params: DidChangeTextDocumentParams) = { + super.didChange(params); + validateDocument(documents(params.getTextDocument().getUri())) + } + + override def didOpen(params: DidOpenTextDocumentParams) = { + super.didOpen(params) + validateDocument(documents(params.getTextDocument().getUri())) + } + + } + + def getTextDocumentService(): TextDocumentService = { + fullTextDocumentService + } + + def getWorkspaceService(): WorkspaceService = { + new WorkspaceService() { + override def symbol(params: WorkspaceSymbolParams) = { + null; + } + + def didChangeConfiguration(params: DidChangeConfigurationParams) = { + val gloabalSettings = params.getSettings.asInstanceOf[JsonObject] + extraCommandLineOpts = gloabalSettings + .getAsJsonObject("alogic-lang") + .getAsJsonArray("extraCommandLineOpts") + .asScala + .map(_.getAsString) + .toSeq + client.workspaceFolders.asScala andThen { + case Success(folders) => { + val perWSOpts = gloabalSettings + .getAsJsonObject("alogic-lang") + .getAsJsonArray("perWorkspaceCmdOpts") + .asScala + .map(_.getAsString) + .toSeq + val prefix = "file://" + extraCommandLineOpts = + extraCommandLineOpts ++ folders.asScala.filter(_.getUri.startsWith(prefix)).flatMap { + folder => + perWSOpts map { + _.replaceAll("\\$\\{workspaceFolder}", folder.getUri.substring(prefix.length)) + } + } + fullTextDocumentService.documents.values.foreach(validateDocument) + } + case Failure(e) => e.printStackTrace + } + } + + def didChangeWatchedFiles(params: DidChangeWatchedFilesParams) = {} + } + } + + override def connect(client: LanguageClient) = { + this.client = client + } + + private def validateDocument(document: TextDocumentItem) = { + val prefix = "file://" + var tempFile: File = null + val path = if (document.getUri.startsWith(prefix)) { + document.getUri.substring(prefix.length) + } else { + tempFile = File.createTempFile("alogic-lang-", ".alogic") + new PrintWriter(tempFile) { + try { + write(document.getText) + } finally { + close() + } + } + tempFile.getAbsolutePath + } + + val source = Source(path, document.getText) + + val mb = new MessageBuffer + + Compiler.parseArgs( + mb, + extraCommandLineOpts :++ List("-o", System.getProperty("java.io.tmpdir"), path), + None + ) match { + case Some((settings, _, params)) => { + + implicit val cc = new CompilerContext(mb, settings) + val fe = new Frontend + fe(source, Loc(document.getUri, 1, source, 0, 0, 0), Nil) + + val sources = mb.messages.map {_.loc.source.path}.toSet + sources.foreach(src => { + val uri = if (src == path) document.getUri else "file://" + src + client.publishDiagnostics( + new PublishDiagnosticsParams( + uri, + mb.messages + .filter(_.loc.source.path == src) + .map(msg => { + val startLineOffset = + msg.loc.source.offsetFor(msg.loc.source.lineFor(msg.loc.start)) + new Diagnostic( + new Range( + new Position(msg.loc.line - 1, msg.loc.start - startLineOffset), + new Position(msg.loc.line - 1, msg.loc.end - startLineOffset) + ), + msg.msg.mkString("\n"), + msg.category match { + case WarningCategory => DiagnosticSeverity.Warning + case ErrorCategory => DiagnosticSeverity.Error + case NoteCategory => DiagnosticSeverity.Information + case FatalCategory => DiagnosticSeverity.Error + case IceCategory => DiagnosticSeverity.Error + }, + "alogic-lang" + ) + }) + .asJava + ) + ) + } + ) + // Push empty diagnostics for current file if not in sources + if (!sources.contains(path)) { + client.publishDiagnostics(new PublishDiagnosticsParams(document.getUri, Nil.asJava)) + } + + } + case None => { + client.showMessage(new MessageParams(MessageType.Error, "Command line parsing failed")) + println((extraCommandLineOpts :+ path).mkString("\n")) + mb.messages.foreach(msg => println(msg.msg.mkString("\n"))) + } + } + if (tempFile != null) { + tempFile.delete + } + } + +} diff --git a/src/main/scala/com/argondesign/alogic/lsp/FullTextDocumentService.scala b/src/main/scala/com/argondesign/alogic/lsp/FullTextDocumentService.scala new file mode 100644 index 00000000..4c4fa302 --- /dev/null +++ b/src/main/scala/com/argondesign/alogic/lsp/FullTextDocumentService.scala @@ -0,0 +1,151 @@ +//////////////////////////////////////////////////////////////////////////////// +// Copyright (c) 2017-2021 Argon Design Ltd. All rights reserved. +// This file is covered by the BSD (with attribution) license. +// See the LICENSE file for the precise wording of the license. +// +// DESCRIPTION: +// TextDocumentService implementation which doesn't handle deltas +//////////////////////////////////////////////////////////////////////////////// + +package com.argondesign.alogic.lsp + +import com.argondesign.alogic.antlr.AlogicLexer +import com.argondesign.alogic.antlr.AlogicParser +import com.argondesign.alogic.antlr.AlogicTokenFactory +import com.argondesign.alogic.antlr.AntlrConverters._ +import com.argondesign.alogic.core.Loc +import com.argondesign.alogic.core.MessageBuffer +import com.argondesign.alogic.core.Source +import org.antlr.v4.runtime.CharStreams +import org.antlr.v4.runtime.CommonTokenStream +import org.eclipse.lsp4j._ +import org.eclipse.lsp4j.services.TextDocumentService + +import java.util.concurrent.CompletableFuture +import scala.jdk.CollectionConverters._ +import scala.util.chaining._ + +class FullTextDocumentService extends TextDocumentService { + + var documents: scala.collection.mutable.Map[String, TextDocumentItem] = + scala.collection.mutable.Map() + + override def didOpen(params: DidOpenTextDocumentParams) = { + documents += (params.getTextDocument().getUri() -> params.getTextDocument()) + } + + override def didChange(params: DidChangeTextDocumentParams) = { + val uri = params.getTextDocument().getUri() + params.getContentChanges().forEach(change => documents(uri).setText(change.getText())); + } + + override def didClose(params: DidCloseTextDocumentParams) = { + documents -= params.getTextDocument().getUri() + } + + override def didSave(params: DidSaveTextDocumentParams) = {} + + override def semanticTokensFull( + params: SemanticTokensParams + ): CompletableFuture[SemanticTokens] = { + val document = documents(params.getTextDocument().getUri()) + + // Create Antlr4 parser + val prefix = "file:///" + val path = if (document.getUri().startsWith(prefix)) { + document.getUri().substring(prefix.length()) + } else { + document.getUri() + } + + val source = Source(path, document.getText()) + + val tokens = { + val mb = new MessageBuffer + val tokenFactory = new AlogicTokenFactory(source, mb) + val stream = CharStreams.fromString(document.getText()) + val lexer = new AlogicLexer(stream) + lexer.setTokenFactory(tokenFactory) + val tokenStream = new CommonTokenStream(lexer) + val parser = new AlogicParser(tokenStream) tap { parser => + parser.removeErrorListeners() + } + val visitor = new SemanticTokenVisitor() + val parseResult = parser.file // Needs to be called for tokenStream to be filled + val semTokens = visitor(parseResult) + (semTokens ++ tokenStream.getTokens.asScala + .filter(_.getChannel == AlogicLexer.COMMENT) + .map(tk => SemanticToken(tk.loc, SemanticTokenType.Comment))).sortBy(_.loc.start) + } + + val lineOffsets = source.lines.foldLeft(Array(0))((a, l) => a :+ a.last + l.length) + val lineLens = source.lines.map(_.length()).toArray + + // Binary search to find correct line for given offset + def getLineForOffset(offset: Int): Int = { + def search(start: Int = 0, end: Int = lineOffsets.length - 1): Int = { + val mid = start + (end - start) / 2 + if ( + offset >= lineOffsets(end) && (end == lineOffsets.length - 1 || offset < lineOffsets( + end + 1 + )) + ) end + else if (offset >= lineOffsets(start) && offset < lineOffsets(start + 1)) start + else if (offset >= lineOffsets(mid) && offset < lineOffsets(mid + 1)) mid + else if (offset >= lineOffsets(mid)) search(mid + 1, end) + else search(start, mid - 1) + } + search() + } + + // Split loc covering multiple lines into 1 per line + def splitLoc(loc: Loc): Seq[Loc] = { + val startLine = getLineForOffset(loc.start) + LazyList.from(0) takeWhile (line => { + lineOffsets(startLine + line) <= loc.end && startLine + line < lineLens.size + }) map { line => + val realLine = startLine + line + val startPos = loc.start.max(lineOffsets(realLine)) + val endPos = loc.end.min(lineOffsets(realLine) + lineLens(realLine)) + Loc(loc.file, loc.line + line, loc.source, startPos, endPos, startPos) + } + } + + // Generate list of 5 integers per semantic token as per: + // https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_semanticTokens + // [line (zero indexed), start char on line, length, type, modifiers] + val tkData = tokens filter(_.loc.start >= 0) flatMap { tk => + splitLoc(tk.loc) map { loc => + List[Integer]( + loc.line - 1, + loc.start - lineOffsets(getLineForOffset(loc.start)), + loc.end - loc.start, + tk.typ.id, + tk.getEncodedModifiers() + ) + } + } + + + // Delta encode the line and start character + val deltaTkData = tkData match { + case head :: next => + head ++ tkData.zip(next).flatMap { + case (prev, cur) => { + List[Integer]( + cur(0) - prev(0), + if (cur(0) == prev(0)) cur(1) - prev(1) + else cur(1), + cur(2), + cur(3), + cur(4) + ) + } + } + case Nil => Nil + } + + CompletableFuture.completedFuture(new SemanticTokens(deltaTkData.asJava)) + } + +} diff --git a/src/main/scala/com/argondesign/alogic/lsp/SemanticToken.scala b/src/main/scala/com/argondesign/alogic/lsp/SemanticToken.scala new file mode 100644 index 00000000..62859162 --- /dev/null +++ b/src/main/scala/com/argondesign/alogic/lsp/SemanticToken.scala @@ -0,0 +1,67 @@ +//////////////////////////////////////////////////////////////////////////////// +// Copyright (c) 2017-2021 Argon Design Ltd. All rights reserved. +// This file is covered by the BSD (with attribution) license. +// See the LICENSE file for the precise wording of the license. +// +// DESCRIPTION: +// Semantic token as defined by +// https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_semanticTokens +//////////////////////////////////////////////////////////////////////////////// + +package com.argondesign.alogic.lsp + +import com.argondesign.alogic.core.Loc + +object SemanticTokenType extends Enumeration { + type Type = Value + val Namespace = Value("namespace") + val Type = Value("type") + val Class = Value("class") + val Enum = Value("enum") + val Interface = Value("interface") + val Struct = Value("struct") + val TypeParameter = Value("typeParameter") + val Parameter = Value("parameter") + val Variable = Value("variable") + val Property = Value("property") + val EnumMember = Value("enumMember") + val Event = Value("event") + val Function = Value("function") + val Method = Value("method") + val Macro = Value("macro") + val Keyword = Value("keyword") + val Modifier = Value("modifier") + val Comment = Value("comment") + val String = Value("string") + val Number = Value("number") + val Regexp = Value("regexp") + val Operator = Value("operator") +} + +object SemanticTokenModifier extends Enumeration { + type Type = Value + val Declaration = Value("declaration") + val Definition = Value("definition") + val Readonly = Value("readonly") + val Static = Value("static") + val Deprecated = Value("deprecated") + val Abstract = Value("abstract") + val Async = Value("async") + val Modification = Value("modification") + val Documentation = Value("documentation") + val DefaultLibrary = Value("defaultLibrary") +} + +case class SemanticToken( + loc: Loc, + typ: SemanticTokenType.Type, + typeModifiers: Seq[SemanticTokenModifier.Type] = Nil) { + + def getEncodedModifiers(): Int = typeModifiers.foldLeft(0)((a, i) => a + (1 << i.id)) + + def startPosInLine(): Int = { + val startLineOffset = loc.source.offsetFor(loc.source.lineFor(loc.start)) + loc.start - startLineOffset + } + +} diff --git a/src/main/scala/com/argondesign/alogic/lsp/SemanticTokenVisitor.scala b/src/main/scala/com/argondesign/alogic/lsp/SemanticTokenVisitor.scala new file mode 100644 index 00000000..cf847102 --- /dev/null +++ b/src/main/scala/com/argondesign/alogic/lsp/SemanticTokenVisitor.scala @@ -0,0 +1,506 @@ +//////////////////////////////////////////////////////////////////////////////// +// Copyright (c) 2017-2021 Argon Design Ltd. All rights reserved. +// This file is covered by the BSD (with attribution) license. +// See the LICENSE file for the precise wording of the license. +// +// DESCRIPTION: +// Visitor which generates list of SemanticTokens from the parse tree +//////////////////////////////////////////////////////////////////////////////// + +package com.argondesign.alogic.lsp + +import com.argondesign.alogic.antlr.AlogicParser._ +import com.argondesign.alogic.antlr.AlogicParserRuleContext +import com.argondesign.alogic.antlr.AlogicScalarVisitor +import com.argondesign.alogic.antlr.AlogicToken +import com.argondesign.alogic.antlr.AntlrConverters._ +import com.argondesign.alogic.core.Loc +import com.argondesign.alogic.lsp.SemanticToken +import org.antlr.v4.runtime.tree.TerminalNode + +import scala.jdk.CollectionConverters._ + +class BaseTokenVisitor extends AlogicScalarVisitor[List[SemanticToken]] { + override def defaultResult(): List[SemanticToken] = Nil + + def getTokens(tk: AlogicToken, typ: SemanticTokenType.Type): List[SemanticToken] = + tk match { + case null => Nil + case _ => List(SemanticToken(tk.loc, typ)) + } + + def getTokens(ctx: AlogicParserRuleContext, typ: SemanticTokenType.Type): List[SemanticToken] = + ctx match { + case null => Nil + case _ => List(SemanticToken(ctx.loc, typ)) + } + + def getTokens(nd: TerminalNode, typ: SemanticTokenType.Type): List[SemanticToken] = + nd match { + case null => Nil + case _ => List(SemanticToken(nd.loc, typ)) + } + + override def aggregateResult( + aggregate: List[SemanticToken], + nextResult: List[SemanticToken] + ): List[SemanticToken] = aggregate ++ nextResult + +} + +class SemanticTokenVisitor extends BaseTokenVisitor { + private val parentVisitor = this + + val FunctionTokenVisitor = new BaseTokenVisitor { + override def visitExprIndex(ctx: ExprIndexContext): List[SemanticToken] = + visit(ctx.expr(0)) ++ parentVisitor(ctx.expr(1)) + + override def visitExprAtid(ctx: ExprAtidContext): List[SemanticToken] = + getTokens(ctx.ATID, SemanticTokenType.Function) + + override def visitExprDollarid(ctx: ExprDollaridContext): List[SemanticToken] = + getTokens(ctx.DOLLARID, SemanticTokenType.Function) + + override def visitExprDot(ctx: ExprDotContext): List[SemanticToken] = + parentVisitor(ctx.expr) ++ visit(ctx.ident) + + override def visitIdent(ctx: IdentContext): List[SemanticToken] = + parentVisitor(ctx.expr).flatten ++ + getTokens(ctx.IDENTIFIER, SemanticTokenType.Function) ++ + getTokens(ctx.HASH, SemanticTokenType.Operator) + + } + + val TypeTokenVisitor = new BaseTokenVisitor { + override def visitExprTypeBool(ctx: ExprTypeBoolContext): List[SemanticToken] = + getTokens(ctx.BOOL, SemanticTokenType.Type) + + override def visitExprTypeSInt(ctx: ExprTypeSIntContext): List[SemanticToken] = + getTokens(ctx.INTTYPE, SemanticTokenType.Type) + + override def visitExprTypeUInt(ctx: ExprTypeUIntContext): List[SemanticToken] = + getTokens(ctx.UINTTYPE, SemanticTokenType.Type) + + override def visitExprTypeSNum(ctx: ExprTypeSNumContext): List[SemanticToken] = + getTokens(ctx.INT, SemanticTokenType.Type) + + override def visitExprTypeUNum(ctx: ExprTypeUNumContext): List[SemanticToken] = + getTokens(ctx.UINT, SemanticTokenType.Type) + + override def visitExprTypeVoid(ctx: ExprTypeVoidContext): List[SemanticToken] = + getTokens(ctx.VOID, SemanticTokenType.Type) + + override def visitExprCall(ctx: ExprCallContext): List[SemanticToken] = + visit(ctx.expr) ++ parentVisitor(ctx.args) + + override def visitExprIndex(ctx: ExprIndexContext): List[SemanticToken] = + visit(ctx.expr(0)) ++ parentVisitor(ctx.expr(1)) + + override def visitIdent(ctx: IdentContext): List[SemanticToken] = + parentVisitor(ctx.expr).flatten ++ + getTokens(ctx.IDENTIFIER, SemanticTokenType.Type) ++ + getTokens(ctx.HASH, SemanticTokenType.Operator) + + } + + private def identTokens(ctx: IdentContext, typ: SemanticTokenType.Type): List[SemanticToken] = + ctx match { + case null => Nil + case _ => + getTokens(ctx.IDENTIFIER, typ) ++ + getTokens(ctx.HASH, SemanticTokenType.Operator) ++ + visit(ctx.expr).flatten + } + + override def visitIdent(ctx: IdentContext): List[SemanticToken] = + identTokens(ctx, SemanticTokenType.Variable) + + override def visitExprTypeBool(ctx: ExprTypeBoolContext): List[SemanticToken] = + getTokens(ctx.BOOL, SemanticTokenType.Type) + + override def visitExprTypeSInt(ctx: ExprTypeSIntContext): List[SemanticToken] = + getTokens(ctx.INTTYPE, SemanticTokenType.Type) + + override def visitExprTypeUInt(ctx: ExprTypeUIntContext): List[SemanticToken] = + getTokens(ctx.UINTTYPE, SemanticTokenType.Type) + + override def visitExprTypeSNum(ctx: ExprTypeSNumContext): List[SemanticToken] = + getTokens(ctx.INT, SemanticTokenType.Type) + + override def visitExprTypeUNum(ctx: ExprTypeUNumContext): List[SemanticToken] = + getTokens(ctx.UINT, SemanticTokenType.Type) + + override def visitExprTypeVoid(ctx: ExprTypeVoidContext): List[SemanticToken] = + getTokens(ctx.VOID, SemanticTokenType.Type) + + override def visitExprLitString(ctx: ExprLitStringContext): List[SemanticToken] = + visitChildren(ctx) ++ getTokens(ctx, SemanticTokenType.String) + + override def visitExprLitTrue(ctx: ExprLitTrueContext): List[SemanticToken] = + visitChildren(ctx) ++ getTokens(ctx, SemanticTokenType.Number) + + override def visitExprLitFalse(ctx: ExprLitFalseContext): List[SemanticToken] = + visitChildren(ctx) ++ getTokens(ctx, SemanticTokenType.Number) + + override def visitExprLitSizedInt(ctx: ExprLitSizedIntContext): List[SemanticToken] = + visitChildren(ctx) ++ getTokens(ctx, SemanticTokenType.Number) + + override def visitExprLitUnsizedInt(ctx: ExprLitUnsizedIntContext): List[SemanticToken] = + visitChildren(ctx) ++ getTokens(ctx, SemanticTokenType.Number) + + override def visitImportOne(ctx: ImportOneContext): List[SemanticToken] = + getTokens(ctx.IMPORT, SemanticTokenType.Keyword) ++ + getTokens(ctx.STRING, SemanticTokenType.String) ++ + getTokens(ctx.AS, SemanticTokenType.String) ++ + getTokens(ctx.ident, SemanticTokenType.Variable) + + override def visitFromOne(ctx: FromOneContext): List[SemanticToken] = + getTokens(ctx.FROM, SemanticTokenType.Keyword) ++ + getTokens(ctx.STRING, SemanticTokenType.String) ++ + getTokens(ctx.IMPORT, SemanticTokenType.Keyword) ++ + getTokens(ctx.expr, SemanticTokenType.Variable) ++ + getTokens(ctx.AS, SemanticTokenType.String) ++ + getTokens(ctx.ident, SemanticTokenType.Variable) + + override def visitFromAll(ctx: FromAllContext): List[SemanticToken] = + getTokens(ctx.FROM, SemanticTokenType.Keyword) ++ + getTokens(ctx.STRING, SemanticTokenType.String) ++ + getTokens(ctx.IMPORT, SemanticTokenType.Keyword) ++ + getTokens(ctx.MUL, SemanticTokenType.Operator) + + override def visitUsingOne(ctx: UsingOneContext): List[SemanticToken] = + visitChildren(ctx) ++ + getTokens(ctx.USING, SemanticTokenType.Keyword) ++ + getTokens(ctx.AS, SemanticTokenType.Keyword) + + override def visitUsingAll(ctx: UsingAllContext): List[SemanticToken] = + visitChildren(ctx) ++ + getTokens(ctx.USING, SemanticTokenType.Keyword) ++ + getTokens(ctx.MUL, SemanticTokenType.Operator) + + override def visitAssertionAssert(ctx: AssertionAssertContext): List[SemanticToken] = + visitChildren(ctx) ++ + getTokens(ctx.ASSERT, SemanticTokenType.Keyword) ++ + getTokens(ctx.STRING, SemanticTokenType.String) + + override def visitAssertionStatic(ctx: AssertionStaticContext): List[SemanticToken] = + visitChildren(ctx) ++ + getTokens(ctx.STATIC, SemanticTokenType.Keyword) ++ + getTokens(ctx.ASSERT, SemanticTokenType.Keyword) ++ + getTokens(ctx.STRING, SemanticTokenType.String) + + override def visitAssertionUnreachable(ctx: AssertionUnreachableContext): List[SemanticToken] = + getTokens(ctx.UNREACHABLE, SemanticTokenType.Keyword) ++ + getTokens(ctx.STRING, SemanticTokenType.String) + + override def visitExprBinary(ctx: ExprBinaryContext): List[SemanticToken] = + visitChildren(ctx) ++ getTokens(ctx.op, SemanticTokenType.Operator) + + override def visitExprUnary(ctx: ExprUnaryContext): List[SemanticToken] = + visitChildren(ctx) ++ getTokens(ctx.op, SemanticTokenType.Operator) + + override def visitStmtIf(ctx: StmtIfContext): List[SemanticToken] = + visitChildren(ctx) ++ + getTokens(ctx.IF, SemanticTokenType.Keyword) ++ + getTokens(ctx.ELSE, SemanticTokenType.Keyword) + + override def visitStmtCase(ctx: StmtCaseContext): List[SemanticToken] = + visitChildren(ctx) ++ getTokens(ctx.CASE, SemanticTokenType.Keyword) + + override def visitStmtLoop(ctx: StmtLoopContext): List[SemanticToken] = + visitChildren(ctx) ++ getTokens(ctx.LOOP, SemanticTokenType.Keyword) + + override def visitStmtDo(ctx: StmtDoContext): List[SemanticToken] = + visitChildren(ctx) ++ + getTokens(ctx.DO, SemanticTokenType.Keyword) ++ + getTokens(ctx.WHILE, SemanticTokenType.Keyword) + + override def visitStmtWhile(ctx: StmtWhileContext): List[SemanticToken] = + visitChildren(ctx) ++ getTokens(ctx.WHILE, SemanticTokenType.Keyword) + + override def visitStmtFor(ctx: StmtForContext): List[SemanticToken] = + visitChildren(ctx) ++ getTokens(ctx.FOR, SemanticTokenType.Keyword) + + override def visitStmtLet(ctx: StmtLetContext): List[SemanticToken] = + visitChildren(ctx) ++ getTokens(ctx.LET, SemanticTokenType.Keyword) + + override def visitStmtFence(ctx: StmtFenceContext): List[SemanticToken] = + visitChildren(ctx) ++ getTokens(ctx.FENCE, SemanticTokenType.Keyword) + + override def visitStmtBreak(ctx: StmtBreakContext): List[SemanticToken] = + visitChildren(ctx) ++ getTokens(ctx.BREAK, SemanticTokenType.Keyword) + + override def visitStmtContinue(ctx: StmtContinueContext): List[SemanticToken] = + visitChildren(ctx) ++ getTokens(ctx.CONTINUE, SemanticTokenType.Keyword) + + override def visitStmtGoto(ctx: StmtGotoContext): List[SemanticToken] = + visitChildren(ctx) ++ getTokens(ctx.GOTO, SemanticTokenType.Keyword) + + override def visitStmtReturn(ctx: StmtReturnContext): List[SemanticToken] = + visitChildren(ctx) ++ getTokens(ctx.RETURN, SemanticTokenType.Keyword) + + override def visitStmtPost(ctx: StmtPostContext): List[SemanticToken] = + visitChildren(ctx) ++ getTokens(ctx.op, SemanticTokenType.Operator) + + override def visitStmtWait(ctx: StmtWaitContext): List[SemanticToken] = + visitChildren(ctx) ++ getTokens(ctx.WAIT, SemanticTokenType.Keyword) + + override def visitExprKeyword(ctx: ExprKeywordContext): List[SemanticToken] = + visitChildren(ctx) ++ getTokens(ctx, SemanticTokenType.Keyword) + + override def visitExprThis(ctx: ExprThisContext): List[SemanticToken] = + visitChildren(ctx) ++ getTokens(ctx, SemanticTokenType.Keyword) + + override def visitExprCall(ctx: ExprCallContext): List[SemanticToken] = + FunctionTokenVisitor(ctx.expr) ++ visit(ctx.args) + + override def visitExprDot(ctx: ExprDotContext): List[SemanticToken] = + visitChildren(ctx) ++ getTokens(ctx.inout, SemanticTokenType.Keyword) + + override def visitExprSlice(ctx: ExprSliceContext): List[SemanticToken] = + visitChildren(ctx) ++ getTokens(ctx.op, SemanticTokenType.Operator) + + override def visitDescVar(ctx: DescVarContext): List[SemanticToken] = + visit(ctx.init) ++ + TypeTokenVisitor(ctx.expr(0)) ++ + getTokens(ctx.STATIC, SemanticTokenType.Keyword) ++ + getTokens(ctx.EQUALS, SemanticTokenType.Keyword) ++ + identTokens(ctx.ident, SemanticTokenType.Variable) + + override def visitDescIn(ctx: DescInContext): List[SemanticToken] = + identTokens(ctx.ident, SemanticTokenType.Variable) ++ + TypeTokenVisitor(ctx.spec) ++ + getTokens(ctx.IN, SemanticTokenType.Keyword) ++ + getTokens(ctx.PIPELINE, SemanticTokenType.Keyword) ++ + visit(ctx.fct) + + override def visitDescOut(ctx: DescOutContext): List[SemanticToken] = + identTokens(ctx.ident, SemanticTokenType.Variable) ++ + TypeTokenVisitor(ctx.spec) ++ + getTokens(ctx.OUT, SemanticTokenType.Keyword) ++ + getTokens(ctx.PIPELINE, SemanticTokenType.Keyword) ++ + getTokens(ctx.EQUALS, SemanticTokenType.Operator) ++ + visit(ctx.fct) ++ + visit(ctx.stt) ++ + visit(ctx.init) + + override def visitDescSnoop(ctx: DescSnoopContext): List[SemanticToken] = + identTokens(ctx.ident, SemanticTokenType.Variable) ++ + TypeTokenVisitor(ctx.spec) ++ + getTokens(ctx.SNOOP, SemanticTokenType.Keyword) ++ + visit(ctx.fct) + + override def visitDescPipeVar(ctx: DescPipeVarContext): List[SemanticToken] = + identTokens(ctx.ident, SemanticTokenType.Variable) ++ + TypeTokenVisitor(ctx.expr) ++ + getTokens(ctx.PIPELINE, SemanticTokenType.Keyword) + + override def visitDescParam(ctx: DescParamContext): List[SemanticToken] = + identTokens(ctx.ident, SemanticTokenType.Variable) ++ + TypeTokenVisitor(ctx.expr(0)) ++ + getTokens(ctx.PARAM, SemanticTokenType.Keyword) ++ + getTokens(ctx.EQUALS, SemanticTokenType.Operator) ++ + visit(ctx.init) + + override def visitDescParamType(ctx: DescParamTypeContext): List[SemanticToken] = + identTokens(ctx.ident, SemanticTokenType.Variable) ++ + getTokens(ctx.PARAM, SemanticTokenType.Keyword) ++ + getTokens(ctx.TYPE, SemanticTokenType.Keyword) ++ + getTokens(ctx.EQUALS, SemanticTokenType.Operator) ++ + visit(ctx.init) + + override def visitDescConst(ctx: DescConstContext): List[SemanticToken] = + identTokens(ctx.ident, SemanticTokenType.Variable) ++ + getTokens(ctx.CONST, SemanticTokenType.Keyword) ++ + getTokens(ctx.EQUALS, SemanticTokenType.Operator) ++ + TypeTokenVisitor(ctx.expr(0)) ++ + visit(ctx.expr(1)) + + override def visitDescArr(ctx: DescArrContext): List[SemanticToken] = + identTokens(ctx.ident, SemanticTokenType.Variable) ++ + TypeTokenVisitor(ctx.expr(0)) ++ + visit(ctx.expr(1)) + + override def visitDescSram(ctx: DescSramContext): List[SemanticToken] = + TypeTokenVisitor(ctx.expr(0)) ++ + visit(ctx.expr(1)) ++ + identTokens(ctx.ident, SemanticTokenType.Variable) ++ + getTokens(ctx.SRAM, SemanticTokenType.Keyword) ++ + getTokens(ctx.WIRE, SemanticTokenType.Keyword) + + override def visitDescType(ctx: DescTypeContext): List[SemanticToken] = + identTokens(ctx.ident, SemanticTokenType.Type) ++ + TypeTokenVisitor(ctx.expr) ++ + getTokens(ctx.TYPEDEF, SemanticTokenType.Keyword) + + override def visitDescEntity(ctx: DescEntityContext): List[SemanticToken] = + identTokens(ctx.ident, SemanticTokenType.Class) ++ + getTokens(ctx.entity_keyword, SemanticTokenType.Keyword) ++ + visit(ctx.ent).flatten + + override def visitDescRecord(ctx: DescRecordContext): List[SemanticToken] = + identTokens(ctx.ident, SemanticTokenType.Struct) ++ + getTokens(ctx.STRUCT, SemanticTokenType.Keyword) ++ + visit(ctx.rec).flatten + + override def visitDescInstance(ctx: DescInstanceContext): List[SemanticToken] = + identTokens(ctx.ident, SemanticTokenType.Class) ++ + getTokens(ctx.EQUALS, SemanticTokenType.Operator) ++ + getTokens(ctx.keyword, SemanticTokenType.Keyword) ++ + visit(ctx.expr) + + override def visitDescSingleton(ctx: DescSingletonContext): List[SemanticToken] = + getTokens(ctx.NEW, SemanticTokenType.Keyword) ++ + getTokens(ctx.entity_keyword, SemanticTokenType.Keyword) ++ + identTokens(ctx.ident, SemanticTokenType.Class) ++ + visit(ctx.ent).flatten + + override def visitDescFuncAlogic(ctx: DescFuncAlogicContext): List[SemanticToken] = + FunctionTokenVisitor(ctx.ident) ++ + TypeTokenVisitor(ctx.expr) ++ + getTokens(ctx.STATIC, SemanticTokenType.Keyword) ++ + visit(ctx.formal_arguments) ++ + visit(ctx.stmt).flatten + + override def visitDescFuncImport(ctx: DescFuncImportContext): List[SemanticToken] = + TypeTokenVisitor(ctx.expr) ++ + getTokens(ctx.IMPORT, SemanticTokenType.Keyword) ++ + getTokens(ctx.IDENTIFIER, SemanticTokenType.Function) ++ + visit(ctx.formal_arguments) + + override def visitDescGenIf(ctx: DescGenIfContext): List[SemanticToken] = + identTokens(ctx.ident, SemanticTokenType.Namespace) ++ + ctx.IF.asScala.flatMap(getTokens(_, SemanticTokenType.Keyword)) ++ + ctx.ELSE.asScala.flatMap(getTokens(_, SemanticTokenType.Keyword)) ++ + getTokens(ctx.GEN, SemanticTokenType.Keyword) ++ + visit(ctx.conds).flatten ++ + visit(ctx.thenItemss).flatten ++ + visit(ctx.elseItems) + + override def visitDescGenFor(ctx: DescGenForContext): List[SemanticToken] = + identTokens(ctx.ident, SemanticTokenType.Namespace) ++ + getTokens(ctx.GEN, SemanticTokenType.Keyword) ++ + getTokens(ctx.FOR, SemanticTokenType.Keyword) ++ + visit(ctx.ginits) ++ + visit(ctx.expr) ++ + visit(ctx.lsteps) ++ + visit(ctx.genitems) + + override def visitDescGenRange(ctx: DescGenRangeContext): List[SemanticToken] = + identTokens(ctx.ident, SemanticTokenType.Namespace) ++ + getTokens(ctx.GEN, SemanticTokenType.Keyword) ++ + getTokens(ctx.FOR, SemanticTokenType.Keyword) ++ + getTokens(ctx.IDENTIFIER, SemanticTokenType.Variable) ++ + getTokens(ctx.op, SemanticTokenType.Operator) ++ + TypeTokenVisitor(ctx.expr(0)) ++ + visit(ctx.expr(1)) ++ + visit(ctx.genitems) + + override def visitFCTSync(ctx: FCTSyncContext): List[SemanticToken] = + getTokens(ctx.SYNC, SemanticTokenType.Keyword) + + override def visitFCTSyncReady(ctx: FCTSyncReadyContext): List[SemanticToken] = { + val srLoc = ctx.SYNC_READY.loc + val endLineOffset = srLoc.source.lineFor(srLoc.end) - srLoc.source.lineFor(srLoc.start) + List( + SemanticToken( + Loc( + srLoc.file, + srLoc.line, + srLoc.source, + srLoc.start, + srLoc.start + 4, + srLoc.point, + srLoc.trueFileOpt + ), + SemanticTokenType.Keyword + ), // 'sync' + SemanticToken( + Loc( + srLoc.file, + srLoc.line + endLineOffset, + srLoc.source, + srLoc.end - 5, + srLoc.end, + srLoc.end - 5, + srLoc.trueFileOpt + ), + SemanticTokenType.Keyword + ) // 'ready' + ) + } + + override def visitSTTWire(ctx: STTWireContext): List[SemanticToken] = + visitChildren(ctx) ++ + getTokens(ctx.WIRE, SemanticTokenType.Keyword) + + override def visitSlices(ctx: SlicesContext): List[SemanticToken] = + visitChildren(ctx) ++ + ctx.BSLICE.asScala.flatMap(getTokens(_, SemanticTokenType.Keyword)) ++ + ctx.FSLICE.asScala.flatMap(getTokens(_, SemanticTokenType.Keyword)) ++ + ctx.BUBBLE.asScala.flatMap(getTokens(_, SemanticTokenType.Keyword)) + + override def visitFormal_arguments(ctx: Formal_argumentsContext): List[SemanticToken] = + ctx.expr.asScala.flatMap(TypeTokenVisitor(_)).toList ++ + ctx.IDENTIFIER.asScala.flatMap(getTokens(_, SemanticTokenType.Variable)) + + override def visitLoopInitDesc(ctx: LoopInitDescContext): List[SemanticToken] = + TypeTokenVisitor(ctx.expr(0)) ++ + getTokens(ctx.IDENTIFIER, SemanticTokenType.Variable) ++ + getTokens(ctx.EQUALS, SemanticTokenType.Operator) ++ + visit(ctx.expr(1)) + + override def visitGinit(ctx: GinitContext): List[SemanticToken] = + TypeTokenVisitor(ctx.expr(0)) ++ + getTokens(ctx.IDENTIFIER, SemanticTokenType.Variable) ++ + visit(ctx.expr(1)) + + override def visitCaseDefault(ctx: CaseDefaultContext): List[SemanticToken] = + visitChildren(ctx) ++ getTokens(ctx.DEFAULT, SemanticTokenType.Keyword) + + override def visitEntFenceBlock(ctx: EntFenceBlockContext): List[SemanticToken] = + visitChildren(ctx) ++ getTokens(ctx.FENCE, SemanticTokenType.Keyword) + + override def visitEntVerbatimBlock(ctx: EntVerbatimBlockContext): List[SemanticToken] = + visitChildren(ctx) ++ getTokens(ctx.VERBATIM, SemanticTokenType.Keyword) + + override def visitAttributes(ctx: AttributesContext): List[SemanticToken] = + getTokens(ctx, SemanticTokenType.Comment) + + override def visitEntConnect(ctx: EntConnectContext): List[SemanticToken] = + visitChildren(ctx) ++ getTokens(ctx.point, SemanticTokenType.Operator) + + override def visitEntConnectInputs(ctx: EntConnectInputsContext): List[SemanticToken] = + visitChildren(ctx) ++ + getTokens(ctx.point, SemanticTokenType.Operator) ++ + ctx.MUL.asScala.flatMap(getTokens(_, SemanticTokenType.Operator)) + + override def visitStmtAssign(ctx: StmtAssignContext): List[SemanticToken] = + visitChildren(ctx) ++ getTokens(ctx.EQUALS, SemanticTokenType.Operator) + + override def visitStmtUpdate(ctx: StmtUpdateContext): List[SemanticToken] = + visitChildren(ctx) ++ getTokens(ctx.ASSIGNOP, SemanticTokenType.Operator) + + override def visitExprTernary(ctx: ExprTernaryContext): List[SemanticToken] = + visitChildren(ctx) ++ + getTokens(ctx.QUESTIONMARK, SemanticTokenType.Operator) ++ + getTokens(ctx.COLON, SemanticTokenType.Operator) + + override def visitLoopInitAssign(ctx: LoopInitAssignContext): List[SemanticToken] = + visitChildren(ctx) ++ getTokens(ctx.EQUALS, SemanticTokenType.Operator) + + override def visitLoopStepAssign(ctx: LoopStepAssignContext): List[SemanticToken] = + visitChildren(ctx) ++ getTokens(ctx.EQUALS, SemanticTokenType.Operator) + + override def visitLoopStepUpdate(ctx: LoopStepUpdateContext): List[SemanticToken] = + visitChildren(ctx) ++ getTokens(ctx.ASSIGNOP, SemanticTokenType.Operator) + + override def visitLoopStepPost(ctx: LoopStepPostContext): List[SemanticToken] = + visitChildren(ctx) ++ getTokens(ctx.op, SemanticTokenType.Operator) + + override def visitPkgCompile(ctx: PkgCompileContext): List[SemanticToken] = + visitChildren(ctx) ++ + getTokens(ctx.COMPILE, SemanticTokenType.Keyword) ++ + getTokens(ctx.AS, SemanticTokenType.Keyword) + +} diff --git a/src/main/scala/com/argondesign/alogic/lsp/ServerApp.scala b/src/main/scala/com/argondesign/alogic/lsp/ServerApp.scala new file mode 100644 index 00000000..0ac170d9 --- /dev/null +++ b/src/main/scala/com/argondesign/alogic/lsp/ServerApp.scala @@ -0,0 +1,35 @@ +//////////////////////////////////////////////////////////////////////////////// +// Copyright (c) 2017-2021 Argon Design Ltd. All rights reserved. +// This file is covered by the BSD (with attribution) license. +// See the LICENSE file for the precise wording of the license. +// +// DESCRIPTION: +// Language server entry point +//////////////////////////////////////////////////////////////////////////////// + +package com.argondesign.alogic.lsp + +import org.eclipse.lsp4j.launch.LSPLauncher + +import java.net.ServerSocket + +object ServerApp { + + def main(args: Array[String]): Unit = { + val serverSocket = new ServerSocket(0) + println(s"${serverSocket.getLocalPort()}") + val socket = serverSocket.accept() + + val inputStream = socket.getInputStream() + val outputStream = socket.getOutputStream() + + val server = new AlogicLanguageServer(); + val launcher = LSPLauncher.createServerLauncher(server, inputStream, outputStream) + + val client = launcher.getRemoteProxy() + server.connect(client) + + launcher.startListening() + } + +}