diff --git a/syntax/arrai.wbnf b/syntax/arrai.wbnf index 271e9bbe..2e7a3947 100644 --- a/syntax/arrai.wbnf +++ b/syntax/arrai.wbnf @@ -11,7 +11,7 @@ expr -> C* amp="&"* @ C* arrow=( > C* @:binop=("without" | "with") C* > C* @:binop="||" C* > C* @:binop="&&" C* - > C* @:binop="+>" C* + > C* @:mergeop="+>" C* > C* @:compare=/{!?(?:<:|=|<=?|>=?|\((?:<=?|>=?|<>=?)\))} C* > C* @ if=("if" t=expr ("else" f=expr)?)* C* > C* @:binop=/{\+\+|[+|]|-%?} C* @@ -83,13 +83,14 @@ SEQ_COMMENT -> "," C*; .wrapRE -> /{\s*()\s*}; +nested_op -> "+>" | "|" | "++"; .macro patternterms(top) { C* odelim="{" C* rel=(names C* tuple=("(" v=top:SEQ_COMMENT, ")"):SEQ_COMMENT,?) C* cdelim="}" C* | C* odelim="{" C* set=(elt=top:SEQ_COMMENT,?) C* cdelim="}" C* - | C* odelim="{" C* dict=(pairs=((extra|key=(expr tail=("?")?) ":" value=(top fall=(":" expr)?))):SEQ_COMMENT,?) C* cdelim="}" C* + | C* odelim="{" C* dict=(pairs=((extra|key=(expr tail=("?")?) nested_op? ":" value=(top fall=(":" expr)?))):SEQ_COMMENT,?) C* cdelim="}" C* | C* odelim="[" C* array=(%!sparse_sequence(tail=("?")? top fall=(":" expr)?)?) C* cdelim="]" C* | C* odelim="<<" C* bytes=(item=(STR|NUM|CHAR|IDENT|"("top")"):SEQ_COMMENT,?) C* cdelim=">>" C* - | C* odelim="(" C* tuple=(pairs=(extra | (((name? tail="?") | rec=/{\brec\b}? name | name?) ":" v=(top fall=(":" expr)?))):SEQ_COMMENT,?) C* cdelim=")" C* + | C* odelim="(" C* tuple=(pairs=(extra | (((name? tail="?") | rec=/{\brec\b}? name | name?) nested_op? ":" v=(top fall=(":" expr)?))):SEQ_COMMENT,?) C* cdelim=")" C* }; .macro sparse_sequence(top) { diff --git a/syntax/compile.go b/syntax/compile.go index 5419c8dd..a835e43b 100644 --- a/syntax/compile.go +++ b/syntax/compile.go @@ -74,7 +74,7 @@ func MustCompile(ctx context.Context, filePath, source string) rel.Expr { func (pc ParseContext) CompileExpr(ctx context.Context, b ast.Branch) (rel.Expr, error) { // Note: please make sure if it is necessary to add new syntax name before `expr`. name, c := which(b, - "amp", "arrow", "let", "unop", "binop", "compare", "rbinop", "if", "get", + "amp", "arrow", "let", "unop", "binop", "compare", "mergeop", "rbinop", "if", "get", "tail_op", "postfix", "touch", "get", "rel", "set", "dict", "array", "bytes", "embed", "op", "fn", "pkg", "tuple", "xstr", "IDENT", "STR", "NUM", "CHAR", "cond", exprTag, @@ -95,6 +95,8 @@ func (pc ParseContext) CompileExpr(ctx context.Context, b ast.Branch) (rel.Expr, return pc.compileCompare(ctx, b, c) case "rbinop": return pc.compileRbinop(ctx, b, c) + case "mergeop": + return pc.compileMergeop(ctx, b, c) case "if": return pc.compileIf(ctx, b, c) case "cond": @@ -485,6 +487,189 @@ func (pc ParseContext) compileBinop(ctx context.Context, b ast.Branch, c ast.Chi return result, nil } +type keyOpType int + +// keyOpNode represent either a get or a call operation that should +// be syntactic sugar done as you descend one level in the nested +type keyOpNode struct { + op keyOpType + attr string + key rel.Expr +} + +type mergeOpKeyType int + +const ( + fallbackIdent = "((fallback))" + parentIdent = "((parent))" + + mergeOpKey mergeOpKeyType = iota + desugarKey + keysPathKey + + keyGet keyOpType = iota + keyCall +) + +func getKeyPaths(ctx context.Context) []keyOpNode { + if n := ctx.Value(keysPathKey); n != nil { + return n.([]keyOpNode) + } + return []keyOpNode{} +} + +func appendKeyPath(ctx context.Context, ops ...keyOpNode) context.Context { + return context.WithValue(ctx, keysPathKey, append(getKeyPaths(ctx), ops...)) +} + +func emptyKeyPaths(ctx context.Context) context.Context { + return context.WithValue(ctx, keysPathKey, nil) +} + +func popKeyPaths(ctx context.Context) context.Context { + arr := getKeyPaths(ctx) + if len(arr) > 0 { + arr = arr[:len(arr)-1] + } + return context.WithValue(ctx, keysPathKey, arr) +} + +func buildKeyPatterns(arr []keyOpNode, base rel.Pattern) rel.Pattern { + pattern := base + extra := rel.NewFallbackPattern(rel.NewExtraElementPattern(""), nil) + for i := len(arr) - 1; i >= 0; i-- { + e := arr[i] + switch e.op { + case keyGet: + pattern = rel.NewTuplePattern( + rel.NewTuplePatternAttr(e.attr, rel.NewFallbackPattern(pattern, nil)), + rel.NewTuplePatternAttr("", extra), + ) + case keyCall: + pattern = rel.NewDictPattern( + rel.NewDictPatternEntry(e.key, rel.NewFallbackPattern(pattern, nil)), + rel.NewDictPatternEntry(nil, extra), + ) + default: + panic(errors.New("buildKeyPatterns: unexpected type")) + } + } + return pattern +} + +// withMerging adds the context of merging operation. +func withMerging(ctx context.Context, status bool) context.Context { + return context.WithValue(ctx, mergeOpKey, status) +} + +// a compilation process isMerging when it is at the right hand side of a `+>` operation. This is meant to restrict +// expression so that user can not just write `(x+>: a)` everywhere. +func isMerging(ctx context.Context) bool { + if b := ctx.Value(mergeOpKey); b != nil { + return b.(bool) + } + return false +} + +// withDesugaring adds the context to desugar tuples and dictionaries for merging operations. +func withDesugaring(ctx context.Context, status bool) context.Context { + return context.WithValue(ctx, desugarKey, status) +} + +// a compilation process isDesugaring at certain locations. +// Currently it is used during the RHS of `+>` operation. In the RHS of `+>`, the syntactic sugar requires compilation +// of the RHS as a normal code. This means compiling `(x+>: a)` as `(x: a)`. The isDesugaring function is currently used +// to toggle between compiling it as a normal expression or desugar it. +func isDesugaring(ctx context.Context) bool { + if b := ctx.Value(desugarKey); b != nil { + return b.(bool) + } + return false +} + +func (pc ParseContext) compileMergeop(ctx context.Context, b ast.Branch, c ast.Children) (rel.Expr, error) { + // There's only one merge op, so the first one is enough + op := c.(ast.Many)[0].One("").Scanner() + f := binops[op.String()] + args := b.Many(exprTag) + result, err := pc.CompileExpr(ctx, args[0].(ast.Branch)) + if err != nil { + return nil, err + } + // withMerging allows the rhs to be parsed as a normal value + ctx = withMerging(ctx, true) + for _, arg := range args[1:] { + source, err := parser.MergeScanners(op, result.Source(), arg.Scanner()) + if err != nil { + return nil, err + } + fallback, err := pc.CompileExpr(ctx, arg.(ast.Branch)) + if err != nil { + return nil, err + } + // withDesugar allows the RHS to be desugared + transformedRHS, err := pc.CompileExpr(withDesugaring(ctx, true), arg.(ast.Branch)) + if err != nil { + return nil, err + } + result = f(source, result, transformNestedRHS(arg.Scanner(), result, fallback, transformedRHS)) + } + return result, nil +} + +func errMergeSyntacticSugar(scanner parser.Scanner) error { + return fmt.Errorf( + "attr/key operation only allowed in rhs of a merge operation: %v", + scanner.String(), + ) +} + +// let parent = ...; +// let fallback = ...; +// transformedRHS +func transformNestedRHS(scanner parser.Scanner, parent, fallback, transformedRHS rel.Expr) rel.Expr { + bind := binops["->"] + return bind( + scanner, parent, + rel.NewFunction(scanner, rel.NewIdentPattern(parentIdent), + bind( + scanner, fallback, + rel.NewFunction(scanner, rel.NewIdentPattern(fallbackIdent), transformedRHS), + ), + ), + ) +} + +// this transformation assumes that fallback and parent are available +// transforms into +// let pattern = fallback; +// cond parent { +// pattern: parent `op` rhs, +// _: fallback, +// } +func desugarNestedRHS( + arr []keyOpNode, + scanner parser.Scanner, + op string, + rhs rel.Expr, +) rel.Expr { + parentPattern := buildKeyPatterns(arr, rel.NewIdentPattern(parentIdent)) + parentIdentExpr := rel.NewIdentExpr(scanner, parentIdent) + fallbackPattern := buildKeyPatterns(arr, rel.NewIdentPattern(fallbackIdent)) + fallbackIdentExpr := rel.NewIdentExpr(scanner, fallbackIdent) + return binops["->"]( + scanner, + fallbackIdentExpr, + rel.NewFunction(scanner, fallbackPattern, + rel.NewCondPatternControlVarExpr( + scanner, parentIdentExpr, + rel.NewPatternExprPair(parentPattern, binops[op](scanner, parentIdentExpr, rhs)), + rel.NewPatternExprPair(rel.NewIdentPattern("_"), fallbackIdentExpr), + ), + ), + ) +} + func (pc ParseContext) compileCompare(ctx context.Context, b ast.Branch, c ast.Children) (rel.Expr, error) { args := b.Many(exprTag) argExprs := make([]rel.Expr, 0, len(args)) @@ -921,16 +1106,39 @@ func (pc ParseContext) compileDictEntryExprs(ctx context.Context, b ast.Branch) if pairs := b.Many("pairs"); pairs != nil { entryExprs := make([]rel.DictEntryTupleExpr, 0, len(pairs)) for _, pair := range pairs { + nestedOp := pair.One("nested_op") + if nestedOp != nil && !isMerging(ctx) { + return nil, errMergeSyntacticSugar(pair.Scanner()) + } key := pair.One("key") value := pair.One("value") keyExpr, err := pc.CompileExpr(ctx, key.(ast.Branch)) if err != nil { return nil, err } + keyPaths := []keyOpNode{} + // this is done here so that value compilation gets the next pattern or starts a new pattern + if isMerging(ctx) && isDesugaring(ctx) { + keyNode := keyOpNode{op: keyCall, key: keyExpr} + if nestedOp != nil { + keyPaths = append(getKeyPaths(ctx), keyNode) + ctx = emptyKeyPaths(ctx) + } else { + ctx = appendKeyPath(ctx, keyNode) + } + } valueExpr, err := pc.CompileExpr(ctx, value.(ast.Branch)) if err != nil { return nil, err } + if isMerging(ctx) && isDesugaring(ctx) { + if nestedOp != nil { + op := nestedOp.One("").(ast.Leaf).Scanner().String() + valueExpr = desugarNestedRHS(keyPaths, pair.Scanner(), op, valueExpr) + } + // remove current attr name for the next pair + ctx = popKeyPaths(ctx) + } entryExprs = append(entryExprs, rel.NewDictEntryTupleExpr(pair.Scanner(), keyExpr, valueExpr)) } return entryExprs, nil @@ -1130,14 +1338,49 @@ func (pc ParseContext) compileTuple(ctx context.Context, b ast.Branch, c ast.Chi if pairs := c.(ast.One).Node.Many("pairs"); pairs != nil { attrs := make([]rel.AttrExpr, 0, len(pairs)) for _, pair := range pairs { + nestedOp := pair.One("nested_op") + if nestedOp != nil && !isMerging(ctx) { + return nil, errMergeSyntacticSugar(pair.Scanner()) + } + var k string + name := pair.One("name") + if name != nil { + k = parseName(name.(ast.Branch)) + } + + keyPaths := []keyOpNode{} + if isMerging(ctx) && isDesugaring(ctx) { + keyNode := keyOpNode{op: keyGet, attr: k} + if nestedOp != nil { + if k == "" { + return nil, fmt.Errorf( + "attr name must be explicitly defined for attr operation: %v", + pair.Scanner().Context(-1), + ) + } + // get the key paths at this point because there is a nestedOp. Empty it for the next key paths. + keyPaths = append(getKeyPaths(ctx), keyNode) + ctx = emptyKeyPaths(ctx) + } else { + ctx = appendKeyPath(ctx, keyNode) + } + } + v, err := pc.CompileExpr(ctx, pair.One("v").(ast.Branch)) if err != nil { return nil, err } - if name := pair.One("name"); name != nil { - k = parseName(name.(ast.Branch)) - } else { + + if isMerging(ctx) && isDesugaring(ctx) { + if nestedOp != nil { + v = desugarNestedRHS(keyPaths, pair.Scanner(), nestedOp.One("").(ast.Leaf).Scanner().String(), v) + } + // remove current attr name for the next pair + ctx = popKeyPaths(ctx) + } + + if name == nil { switch v := v.(type) { case *rel.DotExpr: k = v.Attr() @@ -1152,6 +1395,7 @@ func (pc ParseContext) compileTuple(ctx context.Context, b ast.Branch, c ast.Chi fix, fixt := FixFuncs() v = rel.NewRecursionExpr(scanner, k, v, fix, fixt) } + attr, err := rel.NewAttrExpr(scanner, k, v) if err != nil { return nil, err diff --git a/syntax/expr_binary_test.go b/syntax/expr_binary_test.go index bc75ee93..e3901236 100644 --- a/syntax/expr_binary_test.go +++ b/syntax/expr_binary_test.go @@ -48,8 +48,219 @@ func TestOpsAddArrowForDicts(t *testing.T) { `{'a': {'b': 'ABC1'}} +> {'b': {'c': 'ABC2'}}`) } +func TestOpsNestedAddArrowMixTupleDict(t *testing.T) { + t.Parallel() + // | + AssertCodesEvalToSameValue(t, + `(a: (b: (c: {1, 2})))`, + `(a: (b: (c: {1}))) +> (a+>: (b+>: (c|: {2})))`, + ) + AssertCodesEvalToSameValue(t, + `{'a': {'b': {'c': {1, 2}}}}`, + `{'a': {'b': {'c': {1}}}} +> {'a'+>: {'b'+>: {'c'|: {2}}}}`, + ) + AssertCodesEvalToSameValue(t, + `(a: {'b': (c: {1, 2})})`, + `(a: {'b': (c: {1})}) +> (a+>: {'b'+>: (c|: {2})})`, + ) + AssertCodesEvalToSameValue(t, + `{'a': (b: {'c': {1, 2}})}`, + `{'a': (b: {'c': {1}})} +> {'a'+>: ('b'+>: {'c'|: {2}})}`, + ) + + // ++ + AssertCodesEvalToSameValue(t, + `(a: (b: (c: [1, 2])))`, + `(a: (b: (c: [1]))) +> (a+>: (b+>: (c++: [2])))`, + ) + AssertCodesEvalToSameValue(t, + `{'a': {'b': {'c': [1, 2]}}}`, + `{'a': {'b': {'c': [1]}}} +> {'a'+>: {'b'+>: {'c'++: [2]}}}`, + ) + AssertCodesEvalToSameValue(t, + `(a: {'b': (c: [1, 2])})`, + `(a: {'b': (c: [1])}) +> (a+>: {'b'+>: (c++: [2])})`, + ) + AssertCodesEvalToSameValue(t, + `{'a': (b: {'c': [1, 2]})}`, + `{'a': (b: {'c': [1]})} +> {'a'+>: ('b'+>: {'c'++: [2]})}`, + ) + + // very deep + AssertCodesEvalToSameValue(t, + `(a: (b: {'c': {'d': (e: {'f': {'g': (h: (i: (j: 2)))}})}}))`, + ` + let x = (a: (b: {'c': {'d': (e: {'f': {'g': (h: (i: (j: 1)))}})}})); + x +> (a+>: (b+>: {'c'+>: {'d'+>: (e+>: {'f'+>: {'g'+>: (h+>: (i+>: (j: 2)))}})}})) + `, + ) + + // multiple nested ops at the same level + AssertCodesEvalToSameValue(t, + `(a: (b: (c: {1, 2}, d: {1}), e: (f: 1)), g: (h: 1))`, + `(a: (b: (c: {1}))) +> (a+>: (b+>: (c|: {2}, d|: {1}), e+>: (f: 1)), g+>: (h: 1))`, + ) + + // chain of +> + AssertCodesEvalToSameValue(t, + `(a: (b: (c: {1, 2})), e: (f: 1))`, + `(a: (b: (c: {1}))) +> (a+>: (b+>: (c|: {2}))) +> (e+>: (f: (g: 1))) +> (e+>: (f: 1))`, + ) +} + +func TestOpsNestedAddArrowMixNestedOp(t *testing.T) { + t.Parallel() + AssertCodesEvalToSameValue(t, + `(a: (b: (c: {1, 2}, d: {2})), e: 2)`, + `(a: (b: (c: {1}, d: {1})), e: 1) +> (a+>: (b+>: (c|: {2}, d: {2})), e: 2)`, + ) + AssertCodesEvalToSameValue(t, + `{'a': {'b': {'c': {1, 2}, 'd': {2}}}, 'e': 2}`, + `{'a': {'b': {'c': {1}, 'd': {1}}}, 'e': 1} +> {'a'+>: {'b'+>: {'c'|: {2}, 'd': {2}}}, 'e': 2}`, + ) + AssertCodesEvalToSameValue(t, + `(a: (b: (c: {2}, d: {2})), e: 2)`, + `(a: (b: (c: {1}, d: {1})), e: 1) +> (a+>: (b: (c: {2}, d: {2})), e: 2)`, + ) + AssertCodesEvalToSameValue(t, + `{'a': {'b': {'c': {2}, 'd': {2}}}, 'e': 2}`, + `{'a': {'b': {'c': {1}, 'd': {1}}}, 'e': 1} +> {'a'+>: {'b': {'c': {2}, 'd': {2}}}, 'e': 2}`, + ) +} + +func TestOpsNestedAddArrowNestedOpsWithGap(t *testing.T) { + t.Parallel() + AssertCodesEvalToSameValue(t, + `(a: (b: (c: {1, 2})))`, + `(a: (b: (c: {1}, d: {1}))) +> (a+>: (b: (c|: {2})))`, + ) + AssertCodesEvalToSameValue(t, + `{'a': {'b': {'c': {1, 2}}}}`, + `{'a': {'b': {'c': {1}, 'd': {1}}}} +> {'a'+>: {'b': {'c'|: {2}}}}`, + ) + AssertCodesEvalToSameValue(t, + `(a: (b: (c: [1, 2])))`, + `(a: (b: (c: [1], d: [1]))) +> (a+>: (b: (c++: [2])))`, + ) + AssertCodesEvalToSameValue(t, + `{'a': {'b': {'c': [1, 2]}}}`, + `{'a': {'b': {'c': [1], 'd': [1]}}} +> {'a'+>: {'b': {'c'++: [2]}}}`, + ) +} + +func TestOpsNestedAddArrowWithMissingKeys(t *testing.T) { + t.Parallel() + // missing at the start + AssertCodesEvalToSameValue(t, + `(a: (b: (c: 1)))`, + `() +> (a+>: (b+>: (c+>: 1)))`, + ) + AssertCodesEvalToSameValue(t, + `(a: (b: (c: 1, d: 1), e: 1), f: 1)`, + `() +> (a+>: (b+>: (c+>: 1, d: 1), e: 1), f: 1)`, + ) + AssertCodesEvalToSameValue(t, + `{'a': {'b': {'c': 1}}}`, + `{} +> {'a'+>: {'b'+>: {'c'+>: 1}}}`, + ) + AssertCodesEvalToSameValue(t, + `{'a': {'b': {'c': 1, 'd': 1}, 'e': 1}, 'f': 1}`, + `{} +> {'a'+>: {'b'+>: {'c'+>: 1, 'd': 1}, 'e': 1}, 'f': 1}`, + ) + // missing at the end + AssertCodesEvalToSameValue(t, + `(a: (b: (c: 1, d: {1})))`, + `(a: (b: (c: 1))) +> (a+>: (b+>: (d|: {1})))`, + ) + AssertCodesEvalToSameValue(t, + `{'a': {'b': {'c': 1, 'd': {1}}}}`, + `{'a': {'b': {'c': 1}}} +> {'a'+>: {'b'+>: {'d'|: {1}}}}`, + ) + // missing everywhere + AssertCodesEvalToSameValue(t, + `(a: (b: (c: {1, 2}, d: {1}), e: {1}), f: {1})`, + `(a: (b: (c: {1}))) +> (a+>: (b+>: (c|: {2}, d+>: {1}), e|: {1}), f|: {1})`, + ) + AssertCodesEvalToSameValue(t, + `{'a': {'b': {'c': {1, 2}, 'd': 1}, 'e': {1}}, 'f': {1}}`, + `{'a': {'b': {'c': {1}}}} +> {'a'+>: {'b'+>: {'c'|: {2}, 'd'+>: 1}, 'e'|: {1}}, 'f'|: {1}}`, + ) + AssertCodesEvalToSameValue(t, + `(a: {'b': (c: {1, 2}, d: {1}), 'e': {1}}, f: {1})`, + `(a: {'b': (c: {1})}) +> (a+>: {'b'+>: (c|: {2}, d|: {1}), 'e'|: {1}}, f|: {1})`, + ) + AssertCodesEvalToSameValue(t, + `{'a': (b: {'c': {1, 2}, 'd': {1}}, e: {1}), 'f': {1}}`, + `{'a': (b: {'c': {1}})} +> {'a'+>: (b+>: {'c'|: {2}, 'd'|: {1}}, e|: {1}), 'f'|: {1}}`, + ) + // missing everywhere with nested ops gaps + AssertCodesEvalToSameValue(t, + `(a: (b: (c: {1, 2}, d: {1}), e: {1}), f: {1})`, + `(a: (b: (c: {1}, d: 2))) +> (a+>: (b: (c|: {2}, d: {1}), e|: {1}), f|: {1})`, + ) + AssertCodesEvalToSameValue(t, + `{'a': {'b': {'c': {1, 2}, 'd': {1}}, 'e': {1}}, 'f': {1}}`, + `{'a': {'b': {'c': {1}, 'd': 2}}} +> {'a'+>: {'b': {'c'|: {2}, 'd': {1}}, 'e'|: {1}}, 'f'|: {1}}`, + ) + AssertCodesEvalToSameValue(t, + `(a: {'b': (c: {1, 2}, d: {1}), 'e': {1}}, f: {1})`, + `(a: {'b': (c: {1}, d: 2)}) +> (a+>: {'b': (c|: {2}, d: {1}), 'e'|: {1}}, f|: {1})`, + ) + AssertCodesEvalToSameValue(t, + `{'a': (b: {'c': {1, 2}, 'd': {1}}, e: {1}), 'f': {1}}`, + `{'a': (b: {'c': {1}, 'd': 2})} +> {'a'+>: (b: {'c'|: {2}, 'd': {1}}, e|: {1}), 'f'|: {1}}`, + ) +} + +func TestOpsNestedAddArrowMixedWithOtherExpr(t *testing.T) { + t.Parallel() + AssertCodesEvalToSameValue(t, + `(a: (b: (c: 2)), x: 2)`, + ` + (a: (b: (c: 1))) +> + let x = 2; + ( + a+>: (b+>: (c: x)), + :x, + ) + `, + ) +} + func TestOpsAddArrowForError(t *testing.T) { t.Parallel() AssertCodeErrors(t, "", `{1, 2, 3} +> {4, 5, 6}`) AssertCodeErrors(t, "", `1 +> 4`) + AssertCodeErrors(t, + "attr/key operation only allowed in rhs of a merge operation: a+>: (b: 1)", + `(a+>: (b: 1)) +> (a+>: (b: 2))`, + ) + AssertCodeErrors(t, + "attr/key operation only allowed in rhs of a merge operation: 'a'+>: {'b': 1}", + `{'a'+>: {'b': 1}} +> {'a'+>: {'b': 2}}`, + ) + AssertCodeErrors(t, + "| lhs must be a set, not number", + `(a: (b: 1)) +> (a+>: (b|: 2))`, + ) + AssertCodeErrors(t, + "| lhs must be a set, not number", + `{'a': {'b': 1}} +> {'a'+>: {'b'|: 2}}`, + ) + AssertCodeErrors(t, + "++ lhs must be a set, not number", + `(a: (b: 1)) +> (a+>: (b++: 2))`, + ) + AssertCodeErrors(t, + "++ lhs must be a set, not number", + `{'a': {'b': 1}} +> {'a'+>: {'b'++: 2}}`, + ) + AssertCodeErrors(t, + "attr name must be explicitly defined for attr operation: "+ + "\n\x1b[1;37m:3:19:\x1b[0m\n\n\t\tlet x = (a: (b: 1));\n\t\t(a: (b: 2)) +> (\x1b[1;31m+>:x.a.b\x1b[0m)\n\t\t", + ` + let x = (a: (b: 1)); + (a: (b: 2)) +> (+>:x.a.b) + `, + ) } diff --git a/syntax/parser.go b/syntax/parser.go index 48e9695f..d0f445d5 100644 --- a/syntax/parser.go +++ b/syntax/parser.go @@ -25,7 +25,7 @@ expr -> C* amp="&"* @ C* arrow=( > C* @:binop=("without" | "with") C* > C* @:binop="||" C* > C* @:binop="&&" C* - > C* @:binop="+>" C* + > C* @:mergeop="+>" C* > C* @:compare=/{!?(?:<:|=|<=?|>=?|\((?:<=?|>=?|<>=?)\))} C* > C* @ if=("if" t=expr ("else" f=expr)?)* C* > C* @:binop=/{\+\+|[+|]|-%?} C* @@ -97,13 +97,14 @@ SEQ_COMMENT -> "," C*; .wrapRE -> /{\s*()\s*}; +nested_op -> "+>" | "|" | "++"; .macro patternterms(top) { C* odelim="{" C* rel=(names C* tuple=("(" v=top:SEQ_COMMENT, ")"):SEQ_COMMENT,?) C* cdelim="}" C* | C* odelim="{" C* set=(elt=top:SEQ_COMMENT,?) C* cdelim="}" C* - | C* odelim="{" C* dict=(pairs=((extra|key=(expr tail=("?")?) ":" value=(top fall=(":" expr)?))):SEQ_COMMENT,?) C* cdelim="}" C* + | C* odelim="{" C* dict=(pairs=((extra|key=(expr tail=("?")?) nested_op? ":" value=(top fall=(":" expr)?))):SEQ_COMMENT,?) C* cdelim="}" C* | C* odelim="[" C* array=(%!sparse_sequence(tail=("?")? top fall=(":" expr)?)?) C* cdelim="]" C* | C* odelim="<<" C* bytes=(item=(STR|NUM|CHAR|IDENT|"("top")"):SEQ_COMMENT,?) C* cdelim=">>" C* - | C* odelim="(" C* tuple=(pairs=(extra | (((name? tail="?") | rec=/{\brec\b}? name | name?) ":" v=(top fall=(":" expr)?))):SEQ_COMMENT,?) C* cdelim=")" C* + | C* odelim="(" C* tuple=(pairs=(extra | (((name? tail="?") | rec=/{\brec\b}? name | name?) nested_op? ":" v=(top fall=(":" expr)?))):SEQ_COMMENT,?) C* cdelim=")" C* }; .macro sparse_sequence(top) {