Skip to content

Commit

Permalink
Merge pull request #2 from PyPDX/dev
Browse files Browse the repository at this point in the history
Dev
  • Loading branch information
MichaelKim0407 authored Dec 29, 2019
2 parents 14912cd + 8335a57 commit 0b7481a
Show file tree
Hide file tree
Showing 9 changed files with 102 additions and 22 deletions.
10 changes: 9 additions & 1 deletion clausewitz/datastructure.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
class Dict(dict):
DUPKEYS = '__dupkeys__'
OPS = '__ops__'
MODIFIERS = '__modifiers__'

def __init__(self, iterable=()):
super().__init__()
Expand All @@ -23,8 +24,12 @@ def dupkeys(self) -> _typing.Dict[str, _typing.List[str]]:
def ops(self) -> _typing.Dict[str, str]:
return self._get_meta(self.OPS)

@property
def modifiers(self) -> _typing.Dict[str, _typing.List[str]]:
return self._get_meta(self.MODIFIERS)

def __setitem__(self, key, value):
op, v = value
op, *mod, v = value

if key in self:
if key not in self.dupkeys:
Expand All @@ -36,4 +41,7 @@ def __setitem__(self, key, value):
if op != '=':
self.ops[key] = op

if mod:
self.modifiers[key] = mod

return super().__setitem__(key, v)
2 changes: 2 additions & 0 deletions clausewitz/parse.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@ def filter_tokens(tokens: _typing.Iterable[_TokenInfo]) -> _typing.Iterator[_Tok
_tokenize.NEWLINE,
_tokenize.NL,
_tokenize.COMMENT,
_tokenize.INDENT,
_tokenize.DEDENT,
_tokenize.ENDMARKER,
):
continue
Expand Down
16 changes: 15 additions & 1 deletion clausewitz/syntax/element.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,20 @@ def __init__(self, exact_type: int, raw: str):
self._exact_type = exact_type
self._raw = raw

@cached_property
@property
def value(self):
return self._raw


class Modifier(Element):
MODIFIERS = (
'rgb',
'hsv',
)

def __init__(self, raw: str):
self._raw = raw

@property
def value(self):
return self._raw
25 changes: 15 additions & 10 deletions clausewitz/syntax/scope.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,15 +9,16 @@
)

from .element import (
Element,
Operator,
Element as _Element,
Operator as _Operator,
Modifier as _Modifier,
)
from ..datastructure import (
Dict as _Dict,
)


class Scope(Element):
class Scope(_Element):
class SerializationError(Exception):
def __init__(self, data=None):
from pprint import pformat
Expand Down Expand Up @@ -50,20 +51,24 @@ def _as_list(self):
@_returns(_Dict)
def _as_dict(self):
for statement in self._statements:
if len(statement) != 3:
if len(statement) < 3:
raise self.SerializationError
op = statement[1]
if not isinstance(op, Operator):
if not isinstance(op, _Operator):
raise self.SerializationError
yield statement[0].value, (statement[1].value, statement[2].value)
if not all(
isinstance(element, _Modifier)
for element in statement[2:-1]
):
raise self.SerializationError

values = statement.values
yield values[0], values[1:]

@_returns(tuple)
def _raw(self):
for statement in self._statements:
yield tuple(
element.value
for element in statement
)
yield statement.values

@cached_property
def value(self):
Expand Down
14 changes: 12 additions & 2 deletions clausewitz/syntax/statement.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
Element as _Element,
String as _String,
Operator as _Operator,
Modifier as _Modifier,
)


Expand Down Expand Up @@ -38,8 +39,10 @@ def _end(self, *, reject_last: bool):
raise self.End(reject_last)

@property
def last_op(self) -> bool:
return len(self) == 0 or isinstance(self[-1], _Operator)
def accepts_value(self) -> bool:
return len(self) == 0 or \
isinstance(self[-1], _Operator) or \
isinstance(self[-1], _Modifier)

def push(self, token: _TokenInfo) -> None:
try:
Expand All @@ -63,6 +66,13 @@ def push(self, token: _TokenInfo) -> None:
else:
return self._end(reject_last=True)

@property
def values(self) -> _typing.List:
return [
element.value
for element in self
]


from .token import ( # noqa: E402
Tokens as _Tokens,
Expand Down
34 changes: 29 additions & 5 deletions clausewitz/syntax/token.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
Element as _Element,
Number as _Number,
Name as _Name,
Modifier as _Modifier,
)


Expand All @@ -31,6 +32,23 @@ class Tokens(_typing.List[_TokenInfo]):
_tokenize.STRING,
)

@property
def modifier(self) -> _typing.Optional['_Modifier']:
if not self:
return None

if len(self) != 1:
return None

token = self[0]
if token.type != _tokenize.NAME:
return None

if token.string not in _Modifier.MODIFIERS:
return None

return _Modifier(token.string)

@property
def number(self) -> _typing.Optional['_Number']:
if not self:
Expand Down Expand Up @@ -67,6 +85,12 @@ def __init__(self, parent: '_Statement'):
super().__init__()
self._parent = parent

def _modifier_or_end_statement(self):
if self.modifier is not None:
raise self.ShouldNotAppend
else:
raise self.EndStatement

def append(self, token: _TokenInfo):
if token.exact_type in self.OPERATORS:
raise self.ShouldNotAppend
Expand All @@ -79,27 +103,27 @@ def append(self, token: _TokenInfo):
*self.STRING_TYPES,
):
if self:
raise self.EndStatement
return self._modifier_or_end_statement()

else:
if self._parent.last_op:
if self._parent.accepts_value:
raise self.ShouldNotAppend
else:
raise self.EndStatement

if self:
if self[-1].end != token.start:
raise self.EndStatement
return self._modifier_or_end_statement()

else:
if not self._parent.last_op:
if not self._parent.accepts_value:
raise self.EndStatement

super().append(token)

@property
def value(self) -> '_Element':
return self.number or self.name
return self.modifier or self.number or self.name


from .statement import ( # noqa: E402
Expand Down
1 change: 1 addition & 0 deletions tests/data/sample.txt
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,4 @@ worl\"d\""
f = {}
d < 100
# this is a comment
color = rgb { 100 200 50 }
4 changes: 4 additions & 0 deletions tests/test_parse.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,17 @@ def test_parse(data):
'd': '>=',
'd+1': '<',
},
'__modifiers__': {
'color': ['rgb'],
},
'a': ['x.000', '"y"', '10z'],
'b': 0,
'c': 'true',
'd': -2.1,
'e.xyz': '"hello\nworl"d""',
'f': {},
'd+1': 100,
'color': [100, 200, 50],
}

with data('sample.txt') as readline:
Expand Down
18 changes: 15 additions & 3 deletions tests/util/test_tokenize.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ def test_prepare(data):
f = {}
d < 100
# this is a comment
color = rgb { 100 200 50 }
'''

with data('sample.txt') as readline:
Expand Down Expand Up @@ -75,14 +76,25 @@ def test_tokenize(data):
(tokenize.COMMENT, '# this is a comment'),
(tokenize.NL, '\n'),

(tokenize.INDENT, ' '),
(tokenize.NAME, 'color'),
(tokenize.OP, '='),
(tokenize.NAME, 'rgb'),
(tokenize.OP, '{'),
(tokenize.NUMBER, '100'),
(tokenize.NUMBER, '200'),
(tokenize.NUMBER, '50'),
(tokenize.OP, '}'),
(tokenize.NEWLINE, '\n'),
(tokenize.DEDENT, ''),

(tokenize.ENDMARKER, ''),
)

with data('sample.txt') as readline:
tokens = tokenize.tokenize(prepare(readline))
for t, s in expected:
for val in expected:
token = next(tokens)
assert token.type == t
assert token.string == s
assert (token.type, token.string) == val

assert tuple(tokens) == ()

0 comments on commit 0b7481a

Please sign in to comment.