diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..c9162b9 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,12 @@ +root = true + +[*] +charset = utf-8 +indent_style = tab +indent_size = 4 +insert_final_newline = true +end_of_line = lf + +[*.{yml,yaml}] +indent_style = space +indent_size = 2 diff --git a/.github/.templateMarker b/.github/.templateMarker new file mode 100644 index 0000000..5e3a3e0 --- /dev/null +++ b/.github/.templateMarker @@ -0,0 +1 @@ +KOLANICH/python_project_boilerplate.py diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..89ff339 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,8 @@ +version: 2 +updates: + - package-ecosystem: "pip" + directory: "/" + schedule: + interval: "daily" + allow: + - dependency-type: "all" diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml new file mode 100644 index 0000000..805a383 --- /dev/null +++ b/.github/workflows/CI.yml @@ -0,0 +1,15 @@ +name: CI +on: + push: + branches: [ "master" ] + pull_request: + branches: [ "master" ] + +jobs: + build: + runs-on: ubuntu-20.04 + steps: + - name: typical python workflow + uses: KOLANICH-GHActions/typical-python-workflow@master + with: + github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..a7e9840 --- /dev/null +++ b/.gitignore @@ -0,0 +1,11 @@ +__pycache__ +*.pyc +*.pyo +/*.egg-info +/build +/dist +/.eggs +/monkeytype.sqlite3 +/*.srctrldb +/*.srctrlbm +/*.srctrlprj diff --git a/Code_Of_Conduct.md b/Code_Of_Conduct.md new file mode 100644 index 0000000..2b781c7 --- /dev/null +++ b/Code_Of_Conduct.md @@ -0,0 +1 @@ +No codes of conduct! diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..7d1f365 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,9 @@ +include UNLICENSE +include *.md +exclude tests +include .editorconfig +global-include *.json +global-include *.pglr +global-include *.pgt +global-include *.parsimonious +global-include *.ebnf diff --git a/ReadMe.md b/ReadMe.md new file mode 100644 index 0000000..399c139 --- /dev/null +++ b/ReadMe.md @@ -0,0 +1,33 @@ +pyMetakitDefinitionString [![Unlicensed work](https://raw.githubusercontent.com/unlicense/unlicense.org/master/static/favicon.png)](https://unlicense.org/) +========================= +~~[wheel (GHA via `nightly.link`)](https://nightly.link/prebuilder/pyMetakitDefinitionString/workflows/CI/master/pyMetakitDefinitionString-0.CI-py3-none-any.whl)~~ +~~[![GitHub Actions](https://github.com/prebuilder/pyMetakitDefinitionString/workflows/CI/badge.svg)](https://github.com/prebuilder/pyMetakitDefinitionString/actions/)~~ +[![Libraries.io Status](https://img.shields.io/librariesio/github/prebuilder/pyMetakitDefinitionString.svg)](https://libraries.io/github/prebuilder/pyMetakitDefinitionString) +[![Code style: antiflash](https://img.shields.io/badge/code%20style-antiflash-FFF.svg)](https://codeberg.org/KOLANICH-tools/antiflash.py) + +Parses metakit definition string. + +Usage +----- + +```python +import pyMetakitDefinitionString + +d = pyMetakitDefinitionString.parse("people[first:S,last:S,shoesize:I],text[line:S]") +print(d) +``` + +``` +[ + view, scalar, scalar]>, + view]> +] +``` + +Interpretation of `typeF` is up to you since adding a enum will not spare you from having a look-up table. [See the docs.](https://codeberg.org/prebuilder/metakit/blob/master/doc/format.html#L155-L161) + + +Requirements +------------ +* [UniGrammarRuntime](https://codeberg.org/UniGrammar/UniGrammarRuntime.py) +* Any of the backends for which parsers have been generated. [`parsimonious`](https://github.com/erikrose/parsimonious) is recommended, as it was benchmarked as the fastest one. diff --git a/UNLICENSE b/UNLICENSE new file mode 100644 index 0000000..efb9808 --- /dev/null +++ b/UNLICENSE @@ -0,0 +1,24 @@ +This is free and unencumbered software released into the public domain. + +Anyone is free to copy, modify, publish, use, compile, sell, or +distribute this software, either in source code form or as a compiled +binary, for any purpose, commercial or non-commercial, and by any +means. + +In jurisdictions that recognize copyright laws, the author or authors +of this software dedicate any and all copyright interest in the +software to the public domain. We make this dedication for the benefit +of the public at large and to the detriment of our heirs and +successors. We intend this dedication to be an overt act of +relinquishment in perpetuity of all present and future rights to this +software under copyright law. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR +OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + +For more information, please refer to diff --git a/parserBundle b/parserBundle new file mode 100644 index 0000000..98b296d --- /dev/null +++ b/parserBundle @@ -0,0 +1 @@ +./pyMetakitDefinitionString/parserBundle/ \ No newline at end of file diff --git a/pyMetakitDefinitionString/__init__.py b/pyMetakitDefinitionString/__init__.py new file mode 100644 index 0000000..af62064 --- /dev/null +++ b/pyMetakitDefinitionString/__init__.py @@ -0,0 +1,17 @@ +__all__ = ("parse",) + +from pathlib import Path + +from UniGrammarRuntime.ParserBundle import ParserBundle + +thisFile = Path(__file__).absolute() +thisDir = thisFile.parent +bundleDir = thisDir / "parserBundle" + +bundle = ParserBundle(bundleDir) + +grammar = bundle.grammars["metakit4_definition_string"] +wrapper = grammar.getWrapper() # put backend name, by default it selects the fastest one + + +parse = wrapper diff --git a/pyMetakitDefinitionString/__main__.py b/pyMetakitDefinitionString/__main__.py new file mode 100644 index 0000000..9412367 --- /dev/null +++ b/pyMetakitDefinitionString/__main__.py @@ -0,0 +1,9 @@ +import sys + +if __name__ == "__main__": + from pprint import pprint + + from . import parse + + for el in sys.argv[1:]: + pprint(parse(el)) diff --git a/pyMetakitDefinitionString/parserBundle/compiled/TatSu/metakit4_definition_string.py b/pyMetakitDefinitionString/parserBundle/compiled/TatSu/metakit4_definition_string.py new file mode 100644 index 0000000..418e531 --- /dev/null +++ b/pyMetakitDefinitionString/parserBundle/compiled/TatSu/metakit4_definition_string.py @@ -0,0 +1,282 @@ +#!/usr/bin/env python + +# CAVEAT UTILITOR +# +# This file was automatically generated by TatSu. +# +# https://pypi.python.org/pypi/tatsu/ +# +# Any changes you make to it will be overwritten the next time +# the file is generated. + +from __future__ import annotations + +import sys + +from tatsu.buffering import Buffer +from tatsu.parsing import Parser +from tatsu.parsing import tatsumasu +from tatsu.parsing import leftrec, nomemo, isname # noqa +from tatsu.infos import ParserConfig +from tatsu.util import re, generic_main # noqa + + +KEYWORDS = {} # type: ignore + + +class metakit4_definition_stringBuffer(Buffer): + def __init__(self, text, /, config: ParserConfig = None, **settings): + config = ParserConfig.new( + config, + owner=self, + whitespace=None, + nameguard=None, + comments_re=None, + eol_comments_re=None, + ignorecase=False, + namechars="", + parseinfo=False, + ) + config = config.replace(**settings) + super().__init__(text, config=config) + + +class metakit4_definition_stringParser(Parser): + def __init__(self, /, config: ParserConfig = None, **settings): + config = ParserConfig.new( + config, + owner=self, + whitespace=None, + nameguard=None, + comments_re=None, + eol_comments_re=None, + ignorecase=False, + namechars="", + parseinfo=False, + keywords=KEYWORDS, + ) + config = config.replace(**settings) + super().__init__(config=config) + + @tatsumasu() + def _subFields_(self): # noqa + self._scalarOrView_() + self.name_last_node("first_subField") + self._rest_subFields_with_delF_() + self.name_last_node("rest_subFields_with_del") + self._define(["first_subField", "rest_subFields_with_del"], []) + + @tatsumasu() + def _scalarOrView_(self): # noqa + with self._choice(): + with self._option(): + self._scalar_() + self.name_last_node("scalarF") + self._define(["scalarF"], []) + with self._option(): + self._view_() + self.name_last_node("viewF") + self._define(["viewF"], []) + self._error("expecting one of: " " ") + self._define(["scalarF", "viewF"], []) + + @tatsumasu() + def _view_(self): # noqa + self._word_() + self.name_last_node("name") + self._SubFieldsStart_() + self._body_() + self.name_last_node("bodyF") + self._SubFieldsEnd_() + self._define(["bodyF", "name"], []) + + @tatsumasu() + def _scalar_(self): # noqa + self._word_() + self.name_last_node("name") + self._Colon_() + self._TypeSpecifier_() + self.name_last_node("typeF") + self._define(["name", "typeF"], []) + + @tatsumasu() + def _body_(self): # noqa + with self._choice(): + with self._option(): + self._subFields_() + self.name_last_node("subFieldsF") + self._define(["subFieldsF"], []) + with self._option(): + self._IndirectMarker_() + self.name_last_node("selfF") + self._define(["selfF"], []) + self._error("expecting one of: " "'^' " " ") + self._define(["selfF", "subFieldsF"], []) + + @tatsumasu() + def _rest_subFields_with_delF_(self): # noqa + def block0(): + self._rest_subField_with_delF_() + + self._closure(block0) + + @tatsumasu() + def _rest_subField_with_delF_(self): # noqa + self._OptionsSeparator_() + self._scalarOrView_() + self.name_last_node("rest_subField") + self._define(["rest_subField"], []) + + @tatsumasu() + def _word_(self): # noqa + def block0(): + self._wordPiece_() + + self._positive_closure(block0) + + @tatsumasu() + def _wordPiece_(self): # noqa + with self._choice(): + with self._option(): + self._TypeSpecifier_() + with self._option(): + + def block1(): + self._OtherWordChars_() + + self._positive_closure(block1) + self._error("expecting one of: " " " "[0-9ACEGHJ-LN-RT-Z_aceghj-ln-rt-z]" "[BDFIMSbdfims]") + + @tatsumasu() + def _SubFieldsStart_(self): # noqa + self._token("[") + + @tatsumasu() + def _SubFieldsEnd_(self): # noqa + self._token("]") + + @tatsumasu() + def _Colon_(self): # noqa + self._token(":") + + @tatsumasu() + def _OptionsSeparator_(self): # noqa + self._token(",") + + @tatsumasu() + def _IndirectMarker_(self): # noqa + self._token("^") + + @tatsumasu() + def _OtherWordChars_(self): # noqa + self._pattern("[0-9ACEGHJ-LN-RT-Z_aceghj-ln-rt-z]") + + @tatsumasu() + def _OtherWordCharsOther_(self): # noqa + self._pattern("[0-9_]") + + @tatsumasu() + def _OtherWordCharsUpper_(self): # noqa + self._pattern("[ACEGHJ-LN-RT-Z]") + + @tatsumasu() + def _OtherWordCharsLower_(self): # noqa + self._pattern("[aceghj-ln-rt-z]") + + @tatsumasu() + def _TypeSpecifier_(self): # noqa + self._pattern("[BDFIMSbdfims]") + + @tatsumasu() + def _TypeSpecifierUpper_(self): # noqa + self._pattern("[BDFIMS]") + + @tatsumasu() + def _TypeSpecifierLower_(self): # noqa + self._pattern("[bdfims]") + + +class metakit4_definition_stringSemantics: + def subFields(self, ast): # noqa + return ast + + def scalarOrView(self, ast): # noqa + return ast + + def view(self, ast): # noqa + return ast + + def scalar(self, ast): # noqa + return ast + + def body(self, ast): # noqa + return ast + + def rest_subFields_with_delF(self, ast): # noqa + return ast + + def rest_subField_with_delF(self, ast): # noqa + return ast + + def word(self, ast): # noqa + return ast + + def wordPiece(self, ast): # noqa + return ast + + def SubFieldsStart(self, ast): # noqa + return ast + + def SubFieldsEnd(self, ast): # noqa + return ast + + def Colon(self, ast): # noqa + return ast + + def OptionsSeparator(self, ast): # noqa + return ast + + def IndirectMarker(self, ast): # noqa + return ast + + def OtherWordChars(self, ast): # noqa + return ast + + def OtherWordCharsOther(self, ast): # noqa + return ast + + def OtherWordCharsUpper(self, ast): # noqa + return ast + + def OtherWordCharsLower(self, ast): # noqa + return ast + + def TypeSpecifier(self, ast): # noqa + return ast + + def TypeSpecifierUpper(self, ast): # noqa + return ast + + def TypeSpecifierLower(self, ast): # noqa + return ast + + +def main(filename, start=None, **kwargs): + if start is None: + start = "subFields" + if not filename or filename == "-": + text = sys.stdin.read() + else: + with open(filename) as f: + text = f.read() + parser = metakit4_definition_stringParser() + return parser.parse(text, rule_name=start, filename=filename, **kwargs) + + +if __name__ == "__main__": + import json + from tatsu.util import asjson + + ast = generic_main(main, metakit4_definition_stringParser, name="metakit4_definition_string") + data = asjson(ast) + print(json.dumps(data, indent=2)) diff --git a/pyMetakitDefinitionString/parserBundle/compiled/antlr4/metakit4_definition_stringLexer.interp b/pyMetakitDefinitionString/parserBundle/compiled/antlr4/metakit4_definition_stringLexer.interp new file mode 100644 index 0000000..dbe2801 --- /dev/null +++ b/pyMetakitDefinitionString/parserBundle/compiled/antlr4/metakit4_definition_stringLexer.interp @@ -0,0 +1,53 @@ +token literal names: +null +'[' +']' +':' +',' +'^' +null +null +null +null +null +null +null + +token symbolic names: +null +SubFieldsStart +SubFieldsEnd +Colon +OptionsSeparator +IndirectMarker +OtherWordChars +OtherWordCharsOther +OtherWordCharsUpper +OtherWordCharsLower +TypeSpecifier +TypeSpecifierUpper +TypeSpecifierLower + +rule names: +SubFieldsStart +SubFieldsEnd +Colon +OptionsSeparator +IndirectMarker +OtherWordChars +OtherWordCharsOther +OtherWordCharsUpper +OtherWordCharsLower +TypeSpecifier +TypeSpecifierUpper +TypeSpecifierLower + +channel names: +DEFAULT_TOKEN_CHANNEL +HIDDEN + +mode names: +DEFAULT_MODE + +atn: +[4, 0, 12, 54, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 3, 5, 39, 8, 5, 1, 6, 1, 6, 1, 7, 1, 7, 1, 8, 1, 8, 1, 9, 1, 9, 3, 9, 49, 8, 9, 1, 10, 1, 10, 1, 11, 1, 11, 0, 0, 12, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12, 1, 0, 5, 2, 0, 48, 57, 95, 95, 7, 0, 65, 65, 67, 67, 69, 69, 71, 72, 74, 76, 78, 82, 84, 90, 7, 0, 97, 97, 99, 99, 101, 101, 103, 104, 106, 108, 110, 114, 116, 122, 6, 0, 66, 66, 68, 68, 70, 70, 73, 73, 77, 77, 83, 83, 6, 0, 98, 98, 100, 100, 102, 102, 105, 105, 109, 109, 115, 115, 56, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 1, 25, 1, 0, 0, 0, 3, 27, 1, 0, 0, 0, 5, 29, 1, 0, 0, 0, 7, 31, 1, 0, 0, 0, 9, 33, 1, 0, 0, 0, 11, 38, 1, 0, 0, 0, 13, 40, 1, 0, 0, 0, 15, 42, 1, 0, 0, 0, 17, 44, 1, 0, 0, 0, 19, 48, 1, 0, 0, 0, 21, 50, 1, 0, 0, 0, 23, 52, 1, 0, 0, 0, 25, 26, 5, 91, 0, 0, 26, 2, 1, 0, 0, 0, 27, 28, 5, 93, 0, 0, 28, 4, 1, 0, 0, 0, 29, 30, 5, 58, 0, 0, 30, 6, 1, 0, 0, 0, 31, 32, 5, 44, 0, 0, 32, 8, 1, 0, 0, 0, 33, 34, 5, 94, 0, 0, 34, 10, 1, 0, 0, 0, 35, 39, 3, 13, 6, 0, 36, 39, 3, 15, 7, 0, 37, 39, 3, 17, 8, 0, 38, 35, 1, 0, 0, 0, 38, 36, 1, 0, 0, 0, 38, 37, 1, 0, 0, 0, 39, 12, 1, 0, 0, 0, 40, 41, 7, 0, 0, 0, 41, 14, 1, 0, 0, 0, 42, 43, 7, 1, 0, 0, 43, 16, 1, 0, 0, 0, 44, 45, 7, 2, 0, 0, 45, 18, 1, 0, 0, 0, 46, 49, 3, 21, 10, 0, 47, 49, 3, 23, 11, 0, 48, 46, 1, 0, 0, 0, 48, 47, 1, 0, 0, 0, 49, 20, 1, 0, 0, 0, 50, 51, 7, 3, 0, 0, 51, 22, 1, 0, 0, 0, 52, 53, 7, 4, 0, 0, 53, 24, 1, 0, 0, 0, 3, 0, 38, 48, 0] \ No newline at end of file diff --git a/pyMetakitDefinitionString/parserBundle/compiled/antlr4/metakit4_definition_stringLexer.py b/pyMetakitDefinitionString/parserBundle/compiled/antlr4/metakit4_definition_stringLexer.py new file mode 100644 index 0000000..3e78cc2 --- /dev/null +++ b/pyMetakitDefinitionString/parserBundle/compiled/antlr4/metakit4_definition_stringLexer.py @@ -0,0 +1,82 @@ +# Generated from grammar.g4 by ANTLR 4.11.1 +from antlr4 import * +from io import StringIO +import sys +if sys.version_info[1] > 5: + from typing import TextIO +else: + from typing.io import TextIO + + +def serializedATN(): + return [ + 4,0,12,54,6,-1,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2, + 6,7,6,2,7,7,7,2,8,7,8,2,9,7,9,2,10,7,10,2,11,7,11,1,0,1,0,1,1,1, + 1,1,2,1,2,1,3,1,3,1,4,1,4,1,5,1,5,1,5,3,5,39,8,5,1,6,1,6,1,7,1,7, + 1,8,1,8,1,9,1,9,3,9,49,8,9,1,10,1,10,1,11,1,11,0,0,12,1,1,3,2,5, + 3,7,4,9,5,11,6,13,7,15,8,17,9,19,10,21,11,23,12,1,0,5,2,0,48,57, + 95,95,7,0,65,65,67,67,69,69,71,72,74,76,78,82,84,90,7,0,97,97,99, + 99,101,101,103,104,106,108,110,114,116,122,6,0,66,66,68,68,70,70, + 73,73,77,77,83,83,6,0,98,98,100,100,102,102,105,105,109,109,115, + 115,56,0,1,1,0,0,0,0,3,1,0,0,0,0,5,1,0,0,0,0,7,1,0,0,0,0,9,1,0,0, + 0,0,11,1,0,0,0,0,13,1,0,0,0,0,15,1,0,0,0,0,17,1,0,0,0,0,19,1,0,0, + 0,0,21,1,0,0,0,0,23,1,0,0,0,1,25,1,0,0,0,3,27,1,0,0,0,5,29,1,0,0, + 0,7,31,1,0,0,0,9,33,1,0,0,0,11,38,1,0,0,0,13,40,1,0,0,0,15,42,1, + 0,0,0,17,44,1,0,0,0,19,48,1,0,0,0,21,50,1,0,0,0,23,52,1,0,0,0,25, + 26,5,91,0,0,26,2,1,0,0,0,27,28,5,93,0,0,28,4,1,0,0,0,29,30,5,58, + 0,0,30,6,1,0,0,0,31,32,5,44,0,0,32,8,1,0,0,0,33,34,5,94,0,0,34,10, + 1,0,0,0,35,39,3,13,6,0,36,39,3,15,7,0,37,39,3,17,8,0,38,35,1,0,0, + 0,38,36,1,0,0,0,38,37,1,0,0,0,39,12,1,0,0,0,40,41,7,0,0,0,41,14, + 1,0,0,0,42,43,7,1,0,0,43,16,1,0,0,0,44,45,7,2,0,0,45,18,1,0,0,0, + 46,49,3,21,10,0,47,49,3,23,11,0,48,46,1,0,0,0,48,47,1,0,0,0,49,20, + 1,0,0,0,50,51,7,3,0,0,51,22,1,0,0,0,52,53,7,4,0,0,53,24,1,0,0,0, + 3,0,38,48,0 + ] + +class metakit4_definition_stringLexer(Lexer): + + atn = ATNDeserializer().deserialize(serializedATN()) + + decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ] + + SubFieldsStart = 1 + SubFieldsEnd = 2 + Colon = 3 + OptionsSeparator = 4 + IndirectMarker = 5 + OtherWordChars = 6 + OtherWordCharsOther = 7 + OtherWordCharsUpper = 8 + OtherWordCharsLower = 9 + TypeSpecifier = 10 + TypeSpecifierUpper = 11 + TypeSpecifierLower = 12 + + channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ] + + modeNames = [ "DEFAULT_MODE" ] + + literalNames = [ "", + "'['", "']'", "':'", "','", "'^'" ] + + symbolicNames = [ "", + "SubFieldsStart", "SubFieldsEnd", "Colon", "OptionsSeparator", + "IndirectMarker", "OtherWordChars", "OtherWordCharsOther", "OtherWordCharsUpper", + "OtherWordCharsLower", "TypeSpecifier", "TypeSpecifierUpper", + "TypeSpecifierLower" ] + + ruleNames = [ "SubFieldsStart", "SubFieldsEnd", "Colon", "OptionsSeparator", + "IndirectMarker", "OtherWordChars", "OtherWordCharsOther", + "OtherWordCharsUpper", "OtherWordCharsLower", "TypeSpecifier", + "TypeSpecifierUpper", "TypeSpecifierLower" ] + + grammarFileName = "grammar.g4" + + def __init__(self, input=None, output:TextIO = sys.stdout): + super().__init__(input, output) + self.checkVersion("4.11.1") + self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache()) + self._actions = None + self._predicates = None + + diff --git a/pyMetakitDefinitionString/parserBundle/compiled/antlr4/metakit4_definition_stringLexer.tokens b/pyMetakitDefinitionString/parserBundle/compiled/antlr4/metakit4_definition_stringLexer.tokens new file mode 100644 index 0000000..d014e2e --- /dev/null +++ b/pyMetakitDefinitionString/parserBundle/compiled/antlr4/metakit4_definition_stringLexer.tokens @@ -0,0 +1,17 @@ +SubFieldsStart=1 +SubFieldsEnd=2 +Colon=3 +OptionsSeparator=4 +IndirectMarker=5 +OtherWordChars=6 +OtherWordCharsOther=7 +OtherWordCharsUpper=8 +OtherWordCharsLower=9 +TypeSpecifier=10 +TypeSpecifierUpper=11 +TypeSpecifierLower=12 +'['=1 +']'=2 +':'=3 +','=4 +'^'=5 diff --git a/pyMetakitDefinitionString/parserBundle/compiled/antlr4/metakit4_definition_stringListener.py b/pyMetakitDefinitionString/parserBundle/compiled/antlr4/metakit4_definition_stringListener.py new file mode 100644 index 0000000..0e1c4e1 --- /dev/null +++ b/pyMetakitDefinitionString/parserBundle/compiled/antlr4/metakit4_definition_stringListener.py @@ -0,0 +1,93 @@ +# Generated from grammar.g4 by ANTLR 4.11.1 +from antlr4 import * +if __name__ is not None and "." in __name__: + from .metakit4_definition_stringParser import metakit4_definition_stringParser +else: + from metakit4_definition_stringParser import metakit4_definition_stringParser + +# This class defines a complete listener for a parse tree produced by metakit4_definition_stringParser. +class metakit4_definition_stringListener(ParseTreeListener): + + # Enter a parse tree produced by metakit4_definition_stringParser#subFields. + def enterSubFields(self, ctx:metakit4_definition_stringParser.SubFieldsContext): + pass + + # Exit a parse tree produced by metakit4_definition_stringParser#subFields. + def exitSubFields(self, ctx:metakit4_definition_stringParser.SubFieldsContext): + pass + + + # Enter a parse tree produced by metakit4_definition_stringParser#scalarOrView. + def enterScalarOrView(self, ctx:metakit4_definition_stringParser.ScalarOrViewContext): + pass + + # Exit a parse tree produced by metakit4_definition_stringParser#scalarOrView. + def exitScalarOrView(self, ctx:metakit4_definition_stringParser.ScalarOrViewContext): + pass + + + # Enter a parse tree produced by metakit4_definition_stringParser#view. + def enterView(self, ctx:metakit4_definition_stringParser.ViewContext): + pass + + # Exit a parse tree produced by metakit4_definition_stringParser#view. + def exitView(self, ctx:metakit4_definition_stringParser.ViewContext): + pass + + + # Enter a parse tree produced by metakit4_definition_stringParser#scalar. + def enterScalar(self, ctx:metakit4_definition_stringParser.ScalarContext): + pass + + # Exit a parse tree produced by metakit4_definition_stringParser#scalar. + def exitScalar(self, ctx:metakit4_definition_stringParser.ScalarContext): + pass + + + # Enter a parse tree produced by metakit4_definition_stringParser#body. + def enterBody(self, ctx:metakit4_definition_stringParser.BodyContext): + pass + + # Exit a parse tree produced by metakit4_definition_stringParser#body. + def exitBody(self, ctx:metakit4_definition_stringParser.BodyContext): + pass + + + # Enter a parse tree produced by metakit4_definition_stringParser#rest_subFields_with_delF. + def enterRest_subFields_with_delF(self, ctx:metakit4_definition_stringParser.Rest_subFields_with_delFContext): + pass + + # Exit a parse tree produced by metakit4_definition_stringParser#rest_subFields_with_delF. + def exitRest_subFields_with_delF(self, ctx:metakit4_definition_stringParser.Rest_subFields_with_delFContext): + pass + + + # Enter a parse tree produced by metakit4_definition_stringParser#rest_subField_with_delF. + def enterRest_subField_with_delF(self, ctx:metakit4_definition_stringParser.Rest_subField_with_delFContext): + pass + + # Exit a parse tree produced by metakit4_definition_stringParser#rest_subField_with_delF. + def exitRest_subField_with_delF(self, ctx:metakit4_definition_stringParser.Rest_subField_with_delFContext): + pass + + + # Enter a parse tree produced by metakit4_definition_stringParser#word. + def enterWord(self, ctx:metakit4_definition_stringParser.WordContext): + pass + + # Exit a parse tree produced by metakit4_definition_stringParser#word. + def exitWord(self, ctx:metakit4_definition_stringParser.WordContext): + pass + + + # Enter a parse tree produced by metakit4_definition_stringParser#wordPiece. + def enterWordPiece(self, ctx:metakit4_definition_stringParser.WordPieceContext): + pass + + # Exit a parse tree produced by metakit4_definition_stringParser#wordPiece. + def exitWordPiece(self, ctx:metakit4_definition_stringParser.WordPieceContext): + pass + + + +del metakit4_definition_stringParser \ No newline at end of file diff --git a/pyMetakitDefinitionString/parserBundle/compiled/antlr4/metakit4_definition_stringParser.py b/pyMetakitDefinitionString/parserBundle/compiled/antlr4/metakit4_definition_stringParser.py new file mode 100644 index 0000000..2cab9d1 --- /dev/null +++ b/pyMetakitDefinitionString/parserBundle/compiled/antlr4/metakit4_definition_stringParser.py @@ -0,0 +1,604 @@ +# Generated from grammar.g4 by ANTLR 4.11.1 +# encoding: utf-8 +from antlr4 import * +from io import StringIO +import sys +if sys.version_info[1] > 5: + from typing import TextIO +else: + from typing.io import TextIO + +def serializedATN(): + return [ + 4,1,12,61,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7, + 6,2,7,7,7,2,8,7,8,1,0,1,0,1,0,1,1,1,1,3,1,24,8,1,1,2,1,2,1,2,1,2, + 1,2,1,3,1,3,1,3,1,3,1,4,1,4,3,4,37,8,4,1,5,5,5,40,8,5,10,5,12,5, + 43,9,5,1,6,1,6,1,6,1,7,4,7,49,8,7,11,7,12,7,50,1,8,1,8,4,8,55,8, + 8,11,8,12,8,56,3,8,59,8,8,1,8,0,0,9,0,2,4,6,8,10,12,14,16,0,0,57, + 0,18,1,0,0,0,2,23,1,0,0,0,4,25,1,0,0,0,6,30,1,0,0,0,8,36,1,0,0,0, + 10,41,1,0,0,0,12,44,1,0,0,0,14,48,1,0,0,0,16,58,1,0,0,0,18,19,3, + 2,1,0,19,20,3,10,5,0,20,1,1,0,0,0,21,24,3,6,3,0,22,24,3,4,2,0,23, + 21,1,0,0,0,23,22,1,0,0,0,24,3,1,0,0,0,25,26,3,14,7,0,26,27,5,1,0, + 0,27,28,3,8,4,0,28,29,5,2,0,0,29,5,1,0,0,0,30,31,3,14,7,0,31,32, + 5,3,0,0,32,33,5,10,0,0,33,7,1,0,0,0,34,37,3,0,0,0,35,37,5,5,0,0, + 36,34,1,0,0,0,36,35,1,0,0,0,37,9,1,0,0,0,38,40,3,12,6,0,39,38,1, + 0,0,0,40,43,1,0,0,0,41,39,1,0,0,0,41,42,1,0,0,0,42,11,1,0,0,0,43, + 41,1,0,0,0,44,45,5,4,0,0,45,46,3,2,1,0,46,13,1,0,0,0,47,49,3,16, + 8,0,48,47,1,0,0,0,49,50,1,0,0,0,50,48,1,0,0,0,50,51,1,0,0,0,51,15, + 1,0,0,0,52,59,5,10,0,0,53,55,5,6,0,0,54,53,1,0,0,0,55,56,1,0,0,0, + 56,54,1,0,0,0,56,57,1,0,0,0,57,59,1,0,0,0,58,52,1,0,0,0,58,54,1, + 0,0,0,59,17,1,0,0,0,6,23,36,41,50,56,58 + ] + +class metakit4_definition_stringParser ( Parser ): + + grammarFileName = "grammar.g4" + + atn = ATNDeserializer().deserialize(serializedATN()) + + decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ] + + sharedContextCache = PredictionContextCache() + + literalNames = [ "", "'['", "']'", "':'", "','", "'^'" ] + + symbolicNames = [ "", "SubFieldsStart", "SubFieldsEnd", "Colon", + "OptionsSeparator", "IndirectMarker", "OtherWordChars", + "OtherWordCharsOther", "OtherWordCharsUpper", "OtherWordCharsLower", + "TypeSpecifier", "TypeSpecifierUpper", "TypeSpecifierLower" ] + + RULE_subFields = 0 + RULE_scalarOrView = 1 + RULE_view = 2 + RULE_scalar = 3 + RULE_body = 4 + RULE_rest_subFields_with_delF = 5 + RULE_rest_subField_with_delF = 6 + RULE_word = 7 + RULE_wordPiece = 8 + + ruleNames = [ "subFields", "scalarOrView", "view", "scalar", "body", + "rest_subFields_with_delF", "rest_subField_with_delF", + "word", "wordPiece" ] + + EOF = Token.EOF + SubFieldsStart=1 + SubFieldsEnd=2 + Colon=3 + OptionsSeparator=4 + IndirectMarker=5 + OtherWordChars=6 + OtherWordCharsOther=7 + OtherWordCharsUpper=8 + OtherWordCharsLower=9 + TypeSpecifier=10 + TypeSpecifierUpper=11 + TypeSpecifierLower=12 + + def __init__(self, input:TokenStream, output:TextIO = sys.stdout): + super().__init__(input, output) + self.checkVersion("4.11.1") + self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache) + self._predicates = None + + + + + class SubFieldsContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.first_subField = None # ScalarOrViewContext + self.rest_subFields_with_del = None # Rest_subFields_with_delFContext + + def scalarOrView(self): + return self.getTypedRuleContext(metakit4_definition_stringParser.ScalarOrViewContext,0) + + + def rest_subFields_with_delF(self): + return self.getTypedRuleContext(metakit4_definition_stringParser.Rest_subFields_with_delFContext,0) + + + def getRuleIndex(self): + return metakit4_definition_stringParser.RULE_subFields + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterSubFields" ): + listener.enterSubFields(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitSubFields" ): + listener.exitSubFields(self) + + + + + def subFields(self): + + localctx = metakit4_definition_stringParser.SubFieldsContext(self, self._ctx, self.state) + self.enterRule(localctx, 0, self.RULE_subFields) + try: + self.enterOuterAlt(localctx, 1) + self.state = 18 + localctx.first_subField = self.scalarOrView() + self.state = 19 + localctx.rest_subFields_with_del = self.rest_subFields_with_delF() + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class ScalarOrViewContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.scalarF = None # ScalarContext + self.viewF = None # ViewContext + + def scalar(self): + return self.getTypedRuleContext(metakit4_definition_stringParser.ScalarContext,0) + + + def view(self): + return self.getTypedRuleContext(metakit4_definition_stringParser.ViewContext,0) + + + def getRuleIndex(self): + return metakit4_definition_stringParser.RULE_scalarOrView + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterScalarOrView" ): + listener.enterScalarOrView(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitScalarOrView" ): + listener.exitScalarOrView(self) + + + + + def scalarOrView(self): + + localctx = metakit4_definition_stringParser.ScalarOrViewContext(self, self._ctx, self.state) + self.enterRule(localctx, 2, self.RULE_scalarOrView) + try: + self.state = 23 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,0,self._ctx) + if la_ == 1: + self.enterOuterAlt(localctx, 1) + self.state = 21 + localctx.scalarF = self.scalar() + pass + + elif la_ == 2: + self.enterOuterAlt(localctx, 2) + self.state = 22 + localctx.viewF = self.view() + pass + + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class ViewContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.name = None # WordContext + self.bodyF = None # BodyContext + + def SubFieldsStart(self): + return self.getToken(metakit4_definition_stringParser.SubFieldsStart, 0) + + def SubFieldsEnd(self): + return self.getToken(metakit4_definition_stringParser.SubFieldsEnd, 0) + + def word(self): + return self.getTypedRuleContext(metakit4_definition_stringParser.WordContext,0) + + + def body(self): + return self.getTypedRuleContext(metakit4_definition_stringParser.BodyContext,0) + + + def getRuleIndex(self): + return metakit4_definition_stringParser.RULE_view + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterView" ): + listener.enterView(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitView" ): + listener.exitView(self) + + + + + def view(self): + + localctx = metakit4_definition_stringParser.ViewContext(self, self._ctx, self.state) + self.enterRule(localctx, 4, self.RULE_view) + try: + self.enterOuterAlt(localctx, 1) + self.state = 25 + localctx.name = self.word() + self.state = 26 + self.match(metakit4_definition_stringParser.SubFieldsStart) + self.state = 27 + localctx.bodyF = self.body() + self.state = 28 + self.match(metakit4_definition_stringParser.SubFieldsEnd) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class ScalarContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.name = None # WordContext + self.typeF = None # Token + + def Colon(self): + return self.getToken(metakit4_definition_stringParser.Colon, 0) + + def word(self): + return self.getTypedRuleContext(metakit4_definition_stringParser.WordContext,0) + + + def TypeSpecifier(self): + return self.getToken(metakit4_definition_stringParser.TypeSpecifier, 0) + + def getRuleIndex(self): + return metakit4_definition_stringParser.RULE_scalar + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterScalar" ): + listener.enterScalar(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitScalar" ): + listener.exitScalar(self) + + + + + def scalar(self): + + localctx = metakit4_definition_stringParser.ScalarContext(self, self._ctx, self.state) + self.enterRule(localctx, 6, self.RULE_scalar) + try: + self.enterOuterAlt(localctx, 1) + self.state = 30 + localctx.name = self.word() + self.state = 31 + self.match(metakit4_definition_stringParser.Colon) + self.state = 32 + localctx.typeF = self.match(metakit4_definition_stringParser.TypeSpecifier) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class BodyContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.subFieldsF = None # SubFieldsContext + self.selfF = None # Token + + def subFields(self): + return self.getTypedRuleContext(metakit4_definition_stringParser.SubFieldsContext,0) + + + def IndirectMarker(self): + return self.getToken(metakit4_definition_stringParser.IndirectMarker, 0) + + def getRuleIndex(self): + return metakit4_definition_stringParser.RULE_body + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterBody" ): + listener.enterBody(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitBody" ): + listener.exitBody(self) + + + + + def body(self): + + localctx = metakit4_definition_stringParser.BodyContext(self, self._ctx, self.state) + self.enterRule(localctx, 8, self.RULE_body) + try: + self.state = 36 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [6, 10]: + self.enterOuterAlt(localctx, 1) + self.state = 34 + localctx.subFieldsF = self.subFields() + pass + elif token in [5]: + self.enterOuterAlt(localctx, 2) + self.state = 35 + localctx.selfF = self.match(metakit4_definition_stringParser.IndirectMarker) + pass + else: + raise NoViableAltException(self) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Rest_subFields_with_delFContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def rest_subField_with_delF(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(metakit4_definition_stringParser.Rest_subField_with_delFContext) + else: + return self.getTypedRuleContext(metakit4_definition_stringParser.Rest_subField_with_delFContext,i) + + + def getRuleIndex(self): + return metakit4_definition_stringParser.RULE_rest_subFields_with_delF + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterRest_subFields_with_delF" ): + listener.enterRest_subFields_with_delF(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitRest_subFields_with_delF" ): + listener.exitRest_subFields_with_delF(self) + + + + + def rest_subFields_with_delF(self): + + localctx = metakit4_definition_stringParser.Rest_subFields_with_delFContext(self, self._ctx, self.state) + self.enterRule(localctx, 10, self.RULE_rest_subFields_with_delF) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 41 + self._errHandler.sync(self) + _la = self._input.LA(1) + while _la==4: + self.state = 38 + self.rest_subField_with_delF() + self.state = 43 + self._errHandler.sync(self) + _la = self._input.LA(1) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Rest_subField_with_delFContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.rest_subField = None # ScalarOrViewContext + + def OptionsSeparator(self): + return self.getToken(metakit4_definition_stringParser.OptionsSeparator, 0) + + def scalarOrView(self): + return self.getTypedRuleContext(metakit4_definition_stringParser.ScalarOrViewContext,0) + + + def getRuleIndex(self): + return metakit4_definition_stringParser.RULE_rest_subField_with_delF + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterRest_subField_with_delF" ): + listener.enterRest_subField_with_delF(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitRest_subField_with_delF" ): + listener.exitRest_subField_with_delF(self) + + + + + def rest_subField_with_delF(self): + + localctx = metakit4_definition_stringParser.Rest_subField_with_delFContext(self, self._ctx, self.state) + self.enterRule(localctx, 12, self.RULE_rest_subField_with_delF) + try: + self.enterOuterAlt(localctx, 1) + self.state = 44 + self.match(metakit4_definition_stringParser.OptionsSeparator) + self.state = 45 + localctx.rest_subField = self.scalarOrView() + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class WordContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def wordPiece(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(metakit4_definition_stringParser.WordPieceContext) + else: + return self.getTypedRuleContext(metakit4_definition_stringParser.WordPieceContext,i) + + + def getRuleIndex(self): + return metakit4_definition_stringParser.RULE_word + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterWord" ): + listener.enterWord(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitWord" ): + listener.exitWord(self) + + + + + def word(self): + + localctx = metakit4_definition_stringParser.WordContext(self, self._ctx, self.state) + self.enterRule(localctx, 14, self.RULE_word) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 48 + self._errHandler.sync(self) + _la = self._input.LA(1) + while True: + self.state = 47 + self.wordPiece() + self.state = 50 + self._errHandler.sync(self) + _la = self._input.LA(1) + if not (_la==6 or _la==10): + break + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class WordPieceContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def TypeSpecifier(self): + return self.getToken(metakit4_definition_stringParser.TypeSpecifier, 0) + + def OtherWordChars(self, i:int=None): + if i is None: + return self.getTokens(metakit4_definition_stringParser.OtherWordChars) + else: + return self.getToken(metakit4_definition_stringParser.OtherWordChars, i) + + def getRuleIndex(self): + return metakit4_definition_stringParser.RULE_wordPiece + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterWordPiece" ): + listener.enterWordPiece(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitWordPiece" ): + listener.exitWordPiece(self) + + + + + def wordPiece(self): + + localctx = metakit4_definition_stringParser.WordPieceContext(self, self._ctx, self.state) + self.enterRule(localctx, 16, self.RULE_wordPiece) + try: + self.state = 58 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [10]: + self.enterOuterAlt(localctx, 1) + self.state = 52 + self.match(metakit4_definition_stringParser.TypeSpecifier) + pass + elif token in [6]: + self.enterOuterAlt(localctx, 2) + self.state = 54 + self._errHandler.sync(self) + _alt = 1 + while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: + if _alt == 1: + self.state = 53 + self.match(metakit4_definition_stringParser.OtherWordChars) + + else: + raise NoViableAltException(self) + self.state = 56 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,4,self._ctx) + + pass + else: + raise NoViableAltException(self) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + + + diff --git a/pyMetakitDefinitionString/parserBundle/compiled/parglare/metakit4_definition_string.pg b/pyMetakitDefinitionString/parserBundle/compiled/parglare/metakit4_definition_string.pg new file mode 100644 index 0000000..974877e --- /dev/null +++ b/pyMetakitDefinitionString/parserBundle/compiled/parglare/metakit4_definition_string.pg @@ -0,0 +1,41 @@ +//Generated by UniGrammar (https://github.com/UniGrammar/UniGrammar.py) +//for parglare (https://github.com/igordejanovic/parglare) DSL + +//Metakit 4 is an embeddable database. It embeds strings known as `definition string`s that store the types of columns of a table. + + +//References: +// format.html + + +//productions +subFields: first_subField=scalarOrView rest_subFields_with_del=rest_subFields_with_delF; +scalarOrView: scalarF=scalar | viewF=view; +view: name=word SubFieldsStart bodyF=body SubFieldsEnd; +scalar: name=word Colon typeF=TypeSpecifier; +body: subFieldsF=subFields | selfF=IndirectMarker; +rest_subFields_with_delF: rest_subField_with_delF*; +rest_subField_with_delF: OptionsSeparator rest_subField=scalarOrView; + + +//fragmented +word: wordPiece+; +wordPiece: TypeSpecifier | OtherWordChars+; + + +LAYOUT: EMPTY; +terminals +//characters +SubFieldsStart: '['; +SubFieldsEnd: ']'; +Colon: ':'; +OptionsSeparator: ','; +IndirectMarker: '^'; +OtherWordChars: /[0-9ACEGHJ-LN-RT-Z_aceghj-ln-rt-z]/; +OtherWordCharsOther: /[0-9_]/; +OtherWordCharsUpper: /[ACEGHJ-LN-RT-Z]/; +OtherWordCharsLower: /[aceghj-ln-rt-z]/; +TypeSpecifier: /[BDFIMSbdfims]/; +TypeSpecifierUpper: /[BDFIMS]/; +TypeSpecifierLower: /[bdfims]/; + diff --git a/pyMetakitDefinitionString/parserBundle/compiled/parsimonious/metakit4_definition_string.ppeg b/pyMetakitDefinitionString/parserBundle/compiled/parsimonious/metakit4_definition_string.ppeg new file mode 100644 index 0000000..579fda1 --- /dev/null +++ b/pyMetakitDefinitionString/parserBundle/compiled/parsimonious/metakit4_definition_string.ppeg @@ -0,0 +1,39 @@ +#Generated by UniGrammar (https://github.com/UniGrammar/UniGrammar.py) +#for parsimonious (https://github.com/erikrose/parsimonious) DSL + +#Metakit 4 is an embeddable database. It embeds strings known as `definition string`s that store the types of columns of a table. + + +#References: +# format.html + + +#productions +subFields = scalarOrView rest_subFields_with_delF +scalarOrView = scalar / view +view = word SubFieldsStart body SubFieldsEnd +scalar = word Colon TypeSpecifier +body = subFields / IndirectMarker +rest_subFields_with_delF = rest_subField_with_delF* +rest_subField_with_delF = OptionsSeparator scalarOrView + + +#fragmented +word = wordPiece+ +wordPiece = TypeSpecifier / OtherWordChars+ + + +#characters +SubFieldsStart = '[' +SubFieldsEnd = ']' +Colon = ':' +OptionsSeparator = ',' +IndirectMarker = '^' +OtherWordChars = ~r"[0-9ACEGHJ-LN-RT-Z_aceghj-ln-rt-z]" +OtherWordCharsOther = ~r"[0-9_]" +OtherWordCharsUpper = ~r"[ACEGHJ-LN-RT-Z]" +OtherWordCharsLower = ~r"[aceghj-ln-rt-z]" +TypeSpecifier = ~r"[BDFIMSbdfims]" +TypeSpecifierUpper = ~r"[BDFIMS]" +TypeSpecifierLower = ~r"[bdfims]" + diff --git a/pyMetakitDefinitionString/parserBundle/compiled/waxeye/metakit4_definition_string_parser.py b/pyMetakitDefinitionString/parserBundle/compiled/waxeye/metakit4_definition_string_parser.py new file mode 100644 index 0000000..7304e26 --- /dev/null +++ b/pyMetakitDefinitionString/parserBundle/compiled/waxeye/metakit4_definition_string_parser.py @@ -0,0 +1,64 @@ +# Generated by the Waxeye Parser Generator - version 0.8.1 +# www.waxeye.org + +from waxeye import Edge, State, FA, WaxeyeParser + +class Metakit4_definition_stringParser (WaxeyeParser): + start = 0 + eof_check = True + automata = [FA("subFields", [State([Edge(1, 1, False)], False), + State([Edge(5, 2, False)], False), + State([], True)], FA.LEFT), + FA("scalarOrView", [State([Edge(3, 1, False), + Edge(2, 1, False)], False), + State([], True)], FA.LEFT), + FA("view", [State([Edge(7, 1, False)], False), + State([Edge(9, 2, False)], False), + State([Edge(4, 3, False)], False), + State([Edge(10, 4, False)], False), + State([], True)], FA.LEFT), + FA("scalar", [State([Edge(7, 1, False)], False), + State([Edge(11, 2, False)], False), + State([Edge(18, 3, False)], False), + State([], True)], FA.LEFT), + FA("body", [State([Edge(0, 1, False), + Edge(13, 1, False)], False), + State([], True)], FA.LEFT), + FA("rest_subFields_with_delF", [State([Edge(6, 0, False)], True)], FA.LEFT), + FA("rest_subField_with_delF", [State([Edge(12, 1, False)], False), + State([Edge(1, 2, False)], False), + State([], True)], FA.LEFT), + FA("word", [State([Edge(8, 1, False)], False), + State([Edge(8, 1, False)], True)], FA.LEFT), + FA("wordPiece", [State([Edge(18, 1, False), + Edge(14, 2, False)], False), + State([], True), + State([Edge(14, 2, False)], True)], FA.LEFT), + FA("subFieldsStart", [State([Edge("[", 1, False)], False), + State([], True)], FA.LEFT), + FA("subFieldsEnd", [State([Edge("]", 1, False)], False), + State([], True)], FA.LEFT), + FA("colon", [State([Edge(":", 1, False)], False), + State([], True)], FA.LEFT), + FA("optionsSeparator", [State([Edge(",", 1, False)], False), + State([], True)], FA.LEFT), + FA("indirectMarker", [State([Edge("^", 1, False)], False), + State([], True)], FA.LEFT), + FA("otherWordChars", [State([Edge([(48, 57), "A", "C", "E", (71, 72), (74, 76), (78, 82), (84, 90), "_", "a", "c", "e", (103, 104), (106, 108), (110, 114), (116, 122)], 1, False)], False), + State([], True)], FA.LEFT), + FA("otherWordCharsOther", [State([Edge([(48, 57), "_"], 1, False)], False), + State([], True)], FA.LEFT), + FA("otherWordCharsUpper", [State([Edge(["A", "C", "E", (71, 72), (74, 76), (78, 82), (84, 90)], 1, False)], False), + State([], True)], FA.LEFT), + FA("otherWordCharsLower", [State([Edge(["a", "c", "e", (103, 104), (106, 108), (110, 114), (116, 122)], 1, False)], False), + State([], True)], FA.LEFT), + FA("typeSpecifier", [State([Edge(["B", "D", "F", "I", "M", "S", "b", "d", "f", "i", "m", "s"], 1, False)], False), + State([], True)], FA.LEFT), + FA("typeSpecifierUpper", [State([Edge(["B", "D", "F", "I", "M", "S"], 1, False)], False), + State([], True)], FA.LEFT), + FA("typeSpecifierLower", [State([Edge(["b", "d", "f", "i", "m", "s"], 1, False)], False), + State([], True)], FA.LEFT)] + + def __init__(self): + WaxeyeParser.__init__(self, Metakit4_definition_stringParser.start, Metakit4_definition_stringParser.eof_check, Metakit4_definition_stringParser.automata) + diff --git a/pyMetakitDefinitionString/parserBundle/metrics/metakit4_definition_string.json b/pyMetakitDefinitionString/parserBundle/metrics/metakit4_definition_string.json new file mode 100644 index 0000000..4769993 --- /dev/null +++ b/pyMetakitDefinitionString/parserBundle/metrics/metakit4_definition_string.json @@ -0,0 +1,151 @@ +{ + "criteria": [ + "parseRaw", + "preprocess", + "wrapper" + ], + "backends": [ + "parsimonious", + "waxeye", + "TatSu", + "parglare", + "antlr4" + ], + "testData": [ + "v1[_B[p1:S,sv[p2:I],sub[^]],sub[^]],aaa[_H:I,_R:I,sub[^]],sub[^]" + ], + "matrix": [ + [ + [ + [ + 6.156014046953813e-06, + 7.917142528787511e-06, + 6.586245733049862e-06, + 2.997368832590726e-07, + 108, + 108 + ], + [ + 5.1159522771796474e-06, + 5.635503446379727e-06, + 5.370804321052945e-06, + 9.476723927243474e-08, + 124, + 123 + ], + [ + 6.421419288715972e-07, + 7.621846133458518e-07, + 6.845584660083475e-07, + 1.731814495368088e-08, + 243, + 242 + ] + ], + [ + [ + 3.219985430053743e-05, + 3.3775290017038306e-05, + 3.299776466471866e-05, + 4.2575405949849066e-07, + 68, + 67 + ], + [ + 1.4619291297940276e-05, + 1.642858414765725e-05, + 1.552136538645048e-05, + 2.9839222463527146e-07, + 85, + 85 + ], + [ + 1.8728383361025058e-06, + 2.215034371994911e-06, + 2.0301750220179455e-06, + 4.45141432184092e-08, + 171, + 171 + ] + ], + [ + [ + 0.0011360467148552882, + 0.0011691797487115654, + 0.0011522644331390674, + 9.362217847724497e-06, + 19, + 18 + ], + [ + 3.5779692502226937e-11, + 7.983110757586984e-11, + 4.277573485148963e-11, + 5.236484585673234e-12, + 4253, + 4252 + ], + [ + 1.861287077683707e-06, + 2.1783370892993585e-06, + 2.002081648254403e-06, + 6.158644301472048e-08, + 171, + 171 + ] + ], + [ + [ + 0.00012976317851214105, + 0.00013422311686822665, + 0.00013109352119720973, + 8.759828729028771e-07, + 41, + 41 + ], + [ + 1.9857224190954457e-11, + 5.7388605506202486e-11, + 2.3992007367476507e-11, + 2.543396181070757e-12, + 7570, + 7570 + ], + [ + 2.2549914229640183e-07, + 2.549602650227178e-07, + 2.3949945131792785e-07, + 5.0522746175959715e-09, + 339, + 339 + ] + ], + [ + [ + 0.0002573842887775363, + 0.00027032986709795825, + 0.0002610847819596529, + 2.6270036623538492e-06, + 33, + 32 + ], + [ + 2.0436710233411047e-11, + 5.7788782502231436e-11, + 2.4725997631435534e-11, + 2.746031746516031e-12, + 7356, + 7355 + ], + [ + 9.477808230090748e-07, + 1.1971141083618623e-06, + 1.0496537074558248e-06, + 2.996435959722221e-08, + 209, + 209 + ] + ] + ] + ] +} \ No newline at end of file diff --git a/pyMetakitDefinitionString/parserBundle/schemas/capless/metakit4_definition_string.json b/pyMetakitDefinitionString/parserBundle/schemas/capless/metakit4_definition_string.json new file mode 100644 index 0000000..bf08b23 --- /dev/null +++ b/pyMetakitDefinitionString/parserBundle/schemas/capless/metakit4_definition_string.json @@ -0,0 +1,25 @@ +{ + "subFields": { + "scalarOrView": "first_subField", + "rest_subFields_with_delF": "rest_subFields_with_del" + }, + "rest_subField_with_delF": { + "scalarOrView": "rest_subField" + }, + "scalarOrView": { + "scalar": "scalarF", + "view": "viewF" + }, + "view": { + "word": "name", + "body": "bodyF" + }, + "scalar": { + "word": "name", + "TypeSpecifier": "typeF" + }, + "body": { + "subFields": "subFieldsF", + "IndirectMarker": "selfF" + } +} \ No newline at end of file diff --git a/pyMetakitDefinitionString/parserBundle/schemas/iterless/metakit4_definition_string.json b/pyMetakitDefinitionString/parserBundle/schemas/iterless/metakit4_definition_string.json new file mode 100644 index 0000000..73b54cf --- /dev/null +++ b/pyMetakitDefinitionString/parserBundle/schemas/iterless/metakit4_definition_string.json @@ -0,0 +1,3 @@ +[ + "rest_subFields_with_delF" +] \ No newline at end of file diff --git a/pyMetakitDefinitionString/parserBundle/wrappers/metakit4_definition_string.py b/pyMetakitDefinitionString/parserBundle/wrappers/metakit4_definition_string.py new file mode 100644 index 0000000..16c00b2 --- /dev/null +++ b/pyMetakitDefinitionString/parserBundle/wrappers/metakit4_definition_string.py @@ -0,0 +1,65 @@ +import typing +from UniGrammarRuntime.IWrapper import IWrapper, IParseResult + + +class view(IParseResult): + __slots__ = "name", "bodyF" + + def __init__(self): + self.name = None + self.bodyF = None + + +class scalar(IParseResult): + __slots__ = "name", "typeF" + + def __init__(self): + self.name = None + self.typeF = None + + +class subFieldsParser(IWrapper): + __slots__ = () + + def process_delimited_scalarOrView_(self, parsed) -> typing.Iterable[typing.Union[scalar, view]]: + yield parsed.first_subField + for f in self.backend.wstr.iterateCollection(parsed.rest_subFields_with_del): + yield f.rest_subField + + def process_delimited_scalarOrView(self, parsed) -> typing.Iterable[typing.Union[scalar, view]]: + return [self.process_scalarOrView(f) for f in self.process_delimited_scalarOrView_(parsed)] + + def process_scalarOrView(self, parsed) -> typing.Union[scalar, view]: + scalarF = getattr(parsed, "scalarF", None) + if scalarF is not None: + return self.process_scalar(scalarF) + viewF = getattr(parsed, "viewF", None) + if viewF is not None: + return self.process_view(viewF) + raise TypeError(dir(parsed)) + + def process_view(self, parsed) -> view: + rec = view() + rec.name = self.backend.getSubTreeText(parsed.name) + rec.bodyF = self.process_body(parsed.bodyF) + return rec + + def process_scalar(self, parsed) -> scalar: + rec = scalar() + rec.name = self.backend.getSubTreeText(parsed.name) + rec.typeF = self.backend.terminalNodeToStr(parsed.typeF) + return rec + + def process_body(self, parsed) -> typing.Union[typing.Iterable[typing.Union[scalar, view]], str]: + subFieldsF = getattr(parsed, "subFieldsF", None) + if subFieldsF is not None: + return self.process_delimited_scalarOrView(subFieldsF) + selfF = getattr(parsed, "selfF", None) + if selfF is not None: + return self.backend.terminalNodeToStr(selfF) + raise TypeError(dir(parsed)) + + __MAIN_PRODUCTION__ = process_delimited_scalarOrView + + +__MAIN_PARSER__ = subFieldsParser diff --git a/pyMetakitDefinitionString/py.typed b/pyMetakitDefinitionString/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..c51540d --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,41 @@ +[build-system] +requires = ["setuptools>=42", "wheel", "setuptools_scm[toml]>=3.4.3"] +build-backend = "setuptools.build_meta" + +[project] +name = "pyMetakitDefinitionString" +# version = 0.1 +authors = [{name = "KOLANICH"}] +description = "Parses metakit definition string" +readme = "ReadMe.md" +keywords = ["metakit", "metakit4", "database", "parser"] +license = {text = "Unlicense"} +classifiers = [ + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Development Status :: 4 - Beta", + "Environment :: Other Environment", + "Intended Audience :: Developers", + "License :: Public Domain", + "Operating System :: OS Independent", + "Topic :: Software Development :: Libraries :: Python Modules", +] +requires-python = ">=3.4" +dependencies = [ + "UniGrammarRuntime" # @ https://codeberg.org/UniGrammar/UniGrammarRuntime.py +] +dynamic = ["version"] + +[project.urls] +Homepage = "https://codeberg.org/prebuilder/pyMetakitDefinitionString" + +[tool.setuptools] +zip-safe = true +include-package-data = true + +[tool.setuptools.packages.find] +include = [ + "pyMetakitDefinitionString", + "pyMetakitDefinitionString.*", +] +namespaces = false