diff --git a/TOUR.md b/TOUR.md new file mode 100644 index 0000000..dbf6806 --- /dev/null +++ b/TOUR.md @@ -0,0 +1,12 @@ +# Tour + +`biscuit-zig` is split into a number of modules. The following description should provide helpful orientation: + +- `biscuit-schema` + - Contains the `schema.proto` from the official biscuit repo + - Contains `schema.pb.zig` which is generated from `schema.proto` using https://github.com/Arwalk/zig-protobuf. This powers the biscuit deserialization. +- `biscuit-format` + - Provides an intermediate `SerializedBiscuit` type that deserializes the protobuf format and verifies biscuit. +- `biscuit` + - Provides runtime representation of biscuit, this is the main interface a consumer of the library will use. + - A `Biscuit` can be initialized diff --git a/biscuit-builder/README.md b/biscuit-builder/README.md new file mode 100644 index 0000000..d22ff2a --- /dev/null +++ b/biscuit-builder/README.md @@ -0,0 +1 @@ +# biscuit-parser diff --git a/biscuit-builder/build.zig b/biscuit-builder/build.zig new file mode 100644 index 0000000..6864d4c --- /dev/null +++ b/biscuit-builder/build.zig @@ -0,0 +1,48 @@ +const std = @import("std"); + +// Although this function looks imperative, note that its job is to +// declaratively construct a build graph that will be executed by an external +// runner. +pub fn build(b: *std.Build) void { + // Standard target options allows the person running `zig build` to choose + // what target to build for. Here we do not override the defaults, which + // means any target is allowed, and the default is native. Other options + // for restricting supported target set are available. + const target = b.standardTargetOptions(.{}); + + // Standard optimization options allow the person running `zig build` to select + // between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall. Here we do not + // set a preferred release mode, allowing the user to decide how to optimize. + const optimize = b.standardOptimizeOption(.{}); + + const schema = b.dependency("biscuit-schema", .{ .target = target, .optimize = optimize }); + const format = b.dependency("biscuit-format", .{ .target = target, .optimize = optimize }); + const datalog = b.dependency("biscuit-datalog", .{ .target = target, .optimize = optimize }); + + _ = b.addModule("biscuit-builder", .{ + .root_source_file = .{ .path = "src/root.zig" }, + .imports = &.{ + .{ .name = "biscuit-schema", .module = schema.module("biscuit-schema") }, + .{ .name = "biscuit-format", .module = format.module("biscuit-format") }, + .{ .name = "biscuit-datalog", .module = datalog.module("biscuit-datalog") }, + }, + }); + + // Creates a step for unit testing. This only builds the test executable + // but does not run it. + const lib_unit_tests = b.addTest(.{ + .root_source_file = .{ .path = "src/main.zig" }, + .target = target, + .optimize = optimize, + }); + lib_unit_tests.root_module.addImport("biscuit-schema", schema.module("biscuit-schema")); + lib_unit_tests.root_module.addImport("biscuit-format", format.module("biscuit-format")); + + const run_lib_unit_tests = b.addRunArtifact(lib_unit_tests); + + // Similar to creating the run step earlier, this exposes a `test` step to + // the `zig build --help` menu, providing a way for the user to request + // running the unit tests. + const test_step = b.step("test", "Run unit tests"); + test_step.dependOn(&run_lib_unit_tests.step); +} diff --git a/biscuit-builder/build.zig.zon b/biscuit-builder/build.zig.zon new file mode 100644 index 0000000..c693ce8 --- /dev/null +++ b/biscuit-builder/build.zig.zon @@ -0,0 +1,65 @@ +.{ + .name = "biscuit-builder", + // This is a [Semantic Version](https://semver.org/). + // In a future version of Zig it will be used for package deduplication. + .version = "0.0.0", + + // This field is optional. + // This is currently advisory only; Zig does not yet do anything + // with this value. + //.minimum_zig_version = "0.11.0", + + // This field is optional. + // Each dependency must either provide a `url` and `hash`, or a `path`. + // `zig build --fetch` can be used to fetch all dependencies of a package, recursively. + // Once all dependencies are fetched, `zig build` no longer requires + // internet connectivity. + .dependencies = .{ + // See `zig fetch --save ` for a command-line interface for adding dependencies. + //.example = .{ + // // When updating this field to a new URL, be sure to delete the corresponding + // // `hash`, otherwise you are communicating that you expect to find the old hash at + // // the new URL. + // .url = "https://example.com/foo.tar.gz", + // + // // This is computed from the file contents of the directory of files that is + // // obtained after fetching `url` and applying the inclusion rules given by + // // `paths`. + // // + // // This field is the source of truth; packages do not come from a `url`; they + // // come from a `hash`. `url` is just one of many possible mirrors for how to + // // obtain a package matching this `hash`. + // // + // // Uses the [multihash](https://multiformats.io/multihash/) format. + // .hash = "...", + // + // // When this is provided, the package is found in a directory relative to the + // // build root. In this case the package's hash is irrelevant and therefore not + // // computed. This field and `url` are mutually exclusive. + // .path = "foo", + //}, + .@"biscuit-schema" = .{ .path = "../biscuit-schema" }, + .@"biscuit-format" = .{ .path = "../biscuit-format" }, + .@"biscuit-datalog" = .{ .path = "../biscuit-datalog" }, + }, + + // Specifies the set of files and directories that are included in this package. + // Only files and directories listed here are included in the `hash` that + // is computed for this package. + // Paths are relative to the build root. Use the empty string (`""`) to refer to + // the build root itself. + // A directory listed here means that all files within, recursively, are included. + .paths = .{ + // This makes *all* files, recursively, included in this package. It is generally + // better to explicitly list the files and directories instead, to insure that + // fetching from tarballs, file system paths, and version control all result + // in the same contents hash. + "", + // For example... + //"build.zig", + //"build.zig.zon", + //"src", + //"LICENSE", + //"README.md", + }, +} diff --git a/biscuit-builder/src/check.zig b/biscuit-builder/src/check.zig new file mode 100644 index 0000000..6e3f2e9 --- /dev/null +++ b/biscuit-builder/src/check.zig @@ -0,0 +1,41 @@ +const std = @import("std"); +const datalog = @import("biscuit-datalog"); +const Predicate = @import("predicate.zig").Predicate; +const Term = @import("term.zig").Term; +const Rule = @import("rule.zig").Rule; + +pub const Check = struct { + kind: datalog.Check.Kind, + queries: std.ArrayList(Rule), + + pub fn deinit(check: Check) void { + for (check.queries.items) |query| { + query.deinit(); + } + + check.queries.deinit(); + } + + pub fn convert(check: Check, allocator: std.mem.Allocator, symbols: *datalog.SymbolTable) !datalog.Check { + var queries = std.ArrayList(datalog.Rule).init(allocator); + + for (check.queries.items) |query| { + try queries.append(try query.convert(allocator, symbols)); + } + + return .{ .kind = check.kind, .queries = queries }; + } + + pub fn format(check: Check, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + try writer.print("check ", .{}); + + switch (check.kind) { + .one => try writer.print("if", .{}), + .all => try writer.print("all", .{}), + } + + for (check.queries.items) |query| { + try writer.print(" {any}", .{query}); + } + } +}; diff --git a/biscuit-builder/src/date.zig b/biscuit-builder/src/date.zig new file mode 100644 index 0000000..47db80c --- /dev/null +++ b/biscuit-builder/src/date.zig @@ -0,0 +1,171 @@ +const std = @import("std"); + +pub const Date = struct { + year: i32, + month: u8, + day: u8, + hour: u8, + minute: u8, + second: u8, + nanosecond: u32, + // Timezone offset in minutes from UTC; can be negative + utc_offset: i32, + + pub fn eql(left: Date, right: Date) bool { + return left.year == right.year and + left.month == right.month and + left.day == right.day and + left.hour == right.hour and + left.minute == right.minute and + left.second == right.second and + left.nanosecond == right.nanosecond; + } + + pub fn lt(left: Date, right: Date) bool { + if (left.year < right.year) return true; + if (left.year > right.year) return false; + + std.debug.assert(left.year == right.year); + + if (left.month < right.month) return true; + if (left.month > right.month) return false; + + std.debug.assert(left.month == right.month); + + if (left.day < right.day) return true; + if (left.day > right.day) return false; + + std.debug.assert(left.day == right.day); + + if (left.hour < right.hour) return true; + if (left.hour > right.hour) return false; + + std.debug.assert(left.hour == right.hour); + + if (left.minute < right.minute) return true; + if (left.minute > right.minute) return false; + + std.debug.assert(left.minute == right.minute); + + if (left.second < right.second) return true; + if (left.second > right.second) return false; + + std.debug.assert(left.second == right.second); + + if (left.nanosecond < right.nanosecond) return true; + if (left.nanosecond > right.nanosecond) return false; + + std.debug.assert(left.nanosecond == right.nanosecond); + + return false; + } + + pub fn gt(left: Date, right: Date) bool { + if (left.year > right.year) return true; + if (left.year < right.year) return false; + + std.debug.assert(left.year == right.year); + + if (left.month > right.month) return true; + if (left.month < right.month) return false; + + std.debug.assert(left.month == right.month); + + if (left.day > right.day) return true; + if (left.day < right.day) return false; + + std.debug.assert(left.day == right.day); + + if (left.hour > right.hour) return true; + if (left.hour < right.hour) return false; + + std.debug.assert(left.hour == right.hour); + + if (left.minute > right.minute) return true; + if (left.minute < right.minute) return false; + + std.debug.assert(left.minute == right.minute); + + if (left.second > right.second) return true; + if (left.second < right.second) return false; + + std.debug.assert(left.second == right.second); + + if (left.nanosecond > right.nanosecond) return true; + if (left.nanosecond < right.nanosecond) return false; + + std.debug.assert(left.nanosecond == right.nanosecond); + + return false; + } + + pub fn lteq(left: Date, right: Date) bool { + return left.eq(right) or left.lt(right); + } + + pub fn gteq(left: Date, right: Date) bool { + return left.eq(right) or left.gt(right); + } + + pub fn format(date: Date, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + return writer.print("{}-{}-{}T{}:{}:{}Z", .{ date.year, date.month, date.day, date.hour, date.minute, date.second }); + } + + // FIXME: leap seconds? + pub fn unixEpoch(date: Date) u64 { + var total_days: usize = 0; + + const date_year = @as(usize, @intCast(date.year)); + + for (1970..date_year) |year| { + if (isLeapYear(usize, year)) { + total_days += 366; + } else { + total_days += 365; + } + } + + for (1..date.month) |month| { + total_days += daysInMonth(usize, date_year, @as(u8, @intCast(month))); + } + + for (1..date.day) |_| { + total_days += 1; + } + + var total_seconds: u64 = total_days * 24 * 60 * 60; + + total_seconds += @as(u64, date.hour) * 60 * 60; + total_seconds += @as(u64, date.minute) * 60; + total_seconds += date.second; + + return total_seconds; + } + + pub fn isDayMonthYearValid(comptime T: type, year: T, month: u8, day: u8) bool { + return switch (month) { + // 30 days has september, april june and november + 4, 6, 9, 11 => day <= 30, + 1, 3, 5, 7, 8, 10, 12 => day <= 31, + 2 => if (isLeapYear(T, year)) day <= 29 else day <= 28, + else => false, + }; + } +}; + +pub fn daysInMonth(comptime T: type, year: T, month: u8) u8 { + return switch (month) { + 4, 6, 9, 11 => 30, + 1, 3, 5, 7, 8, 10, 12 => 31, + 2 => if (isLeapYear(T, year)) 29 else 28, + else => unreachable, + }; +} + +fn isLeapYear(comptime T: type, year: T) bool { + if (@mod(year, 400) == 0) return true; + if (@mod(year, 100) == 0) return false; + if (@mod(year, 4) == 0) return true; + + return false; +} diff --git a/biscuit-builder/src/expression.zig b/biscuit-builder/src/expression.zig new file mode 100644 index 0000000..e324e26 --- /dev/null +++ b/biscuit-builder/src/expression.zig @@ -0,0 +1,194 @@ +const std = @import("std"); +const datalog = @import("biscuit-datalog"); +const Predicate = @import("predicate.zig").Predicate; +const Term = @import("term.zig").Term; + +const ExpressionType = enum(u8) { + value, + unary, + binary, +}; + +pub const Expression = union(ExpressionType) { + value: Term, + unary: Unary, + binary: Binary, + + const Unary = struct { + op: UnaryOp, + expression: *Expression, + allocator: std.mem.Allocator, + }; + + const Binary = struct { + op: BinaryOp, + left: *Expression, + right: *Expression, + allocator: std.mem.Allocator, + }; + + pub const UnaryOp = enum { + negate, + parens, + length, + }; + + pub const BinaryOp = enum { + less_than, + greater_than, + less_or_equal, + greater_or_equal, + equal, + contains, + prefix, + suffix, + regex, + add, + sub, + mul, + div, + @"and", + @"or", + intersection, + @"union", + bitwise_and, + bitwise_or, + bitwise_xor, + not_equal, + }; + + /// convert to datalog fact + pub fn convert(expression: Expression, allocator: std.mem.Allocator, symbols: *datalog.SymbolTable) !datalog.Expression { + var ops = std.ArrayList(datalog.Op).init(allocator); + + try expression.toOpcodes(allocator, &ops, symbols); + + return .{ .ops = ops }; + } + + pub fn toOpcodes(expression: Expression, allocator: std.mem.Allocator, ops: *std.ArrayList(datalog.Op), symbols: *datalog.SymbolTable) !void { + switch (expression) { + .value => |v| try ops.append(.{ .value = try v.convert(allocator, symbols) }), + .unary => |u| { + try u.expression.toOpcodes(allocator, ops, symbols); + + try ops.append(.{ + .unary = switch (u.op) { + .negate => .negate, + .parens => .parens, + .length => .length, + }, + }); + }, + .binary => |b| { + try b.left.toOpcodes(allocator, ops, symbols); + try b.right.toOpcodes(allocator, ops, symbols); + + try ops.append(.{ + .binary = switch (b.op) { + .less_than => .less_than, + .greater_than => .greater_than, + .less_or_equal => .less_or_equal, + .greater_or_equal => .greater_or_equal, + .equal => .equal, + .contains => .contains, + .prefix => .prefix, + .suffix => .suffix, + .regex => .regex, + .add => .add, + .sub => .sub, + .mul => .mul, + .div => .div, + .@"and" => .@"and", + .@"or" => .@"or", + .intersection => .intersection, + .@"union" => .@"union", + .bitwise_and => .bitwise_and, + .bitwise_or => .bitwise_or, + .bitwise_xor => .bitwise_xor, + .not_equal => .not_equal, + }, + }); + }, + } + } + + pub fn deinit(expression: *Expression) void { + switch (expression.*) { + .value => |v| v.deinit(), + .unary => |*u| { + u.expression.deinit(); + + u.allocator.destroy(u.expression); + }, + .binary => |*b| { + b.left.deinit(); + b.right.deinit(); + + b.allocator.destroy(b.left); + b.allocator.destroy(b.right); + }, + } + } + + pub fn value(term: Term) !Expression { + return .{ .value = term }; + } + + pub fn unary(allocator: std.mem.Allocator, op: UnaryOp, expr: Expression) !Expression { + const expr_ptr = try allocator.create(Expression); + + expr_ptr.* = expr; + + return .{ .unary = .{ .op = op, .expression = expr_ptr, .allocator = allocator } }; + } + + pub fn binary(allocator: std.mem.Allocator, op: BinaryOp, left: Expression, right: Expression) !Expression { + const left_ptr = try allocator.create(Expression); + errdefer allocator.destroy(left_ptr); + const right_ptr = try allocator.create(Expression); + + left_ptr.* = left; + right_ptr.* = right; + + return .{ .binary = .{ .op = op, .left = left_ptr, .right = right_ptr, .allocator = allocator } }; + } + + pub fn format(expression: Expression, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + switch (expression) { + .value => |v| try writer.print("{any}", .{v}), + .unary => |u| { + switch (u.op) { + .negate => try writer.print("-{any}", .{u.expression}), + .parens => try writer.print("({any})", .{u.expression}), + .length => try writer.print("{any}.length()", .{u.expression}), + } + }, + .binary => |b| { + switch (b.op) { + .less_than => try writer.print("{any} < {any}", .{ b.left, b.right }), + .greater_than => try writer.print("{any} > {any}", .{ b.left, b.right }), + .less_or_equal => try writer.print("{any} <= {any}", .{ b.left, b.right }), + .greater_or_equal => try writer.print("{any} >= {any}", .{ b.left, b.right }), + .equal => try writer.print("{any} == {any}", .{ b.left, b.right }), + .contains => try writer.print("{any}.contains({any})", .{ b.left, b.right }), + .prefix => try writer.print("{any}.starts_with({any})", .{ b.left, b.right }), + .suffix => try writer.print("{any}.ends_with({any})", .{ b.left, b.right }), + .regex => try writer.print("{any}.matches({any})", .{ b.left, b.right }), + .add => try writer.print("{any} + {any}", .{ b.left, b.right }), + .sub => try writer.print("{any} - {any}", .{ b.left, b.right }), + .mul => try writer.print("{any} * {any}", .{ b.left, b.right }), + .div => try writer.print("{any} / {any}", .{ b.left, b.right }), + .@"and" => try writer.print("{any} && {any}", .{ b.left, b.right }), + .@"or" => try writer.print("{any} || {any}", .{ b.left, b.right }), + .intersection => try writer.print("{any}.intersection({any})", .{ b.left, b.right }), + .@"union" => try writer.print("{any}.union({any})", .{ b.left, b.right }), + .bitwise_and => try writer.print("{any} & {any}", .{ b.left, b.right }), + .bitwise_or => try writer.print("{any} | {any}", .{ b.left, b.right }), + .bitwise_xor => try writer.print("{any} ^ {any}", .{ b.left, b.right }), + .not_equal => try writer.print("{any} != {any}", .{ b.left, b.right }), + } + }, + } + } +}; diff --git a/biscuit-builder/src/fact.zig b/biscuit-builder/src/fact.zig new file mode 100644 index 0000000..0b1cd19 --- /dev/null +++ b/biscuit-builder/src/fact.zig @@ -0,0 +1,22 @@ +const std = @import("std"); +const datalog = @import("biscuit-datalog"); +const Predicate = @import("predicate.zig").Predicate; +const Term = @import("term.zig").Term; + +pub const Fact = struct { + predicate: Predicate, + variables: ?std.StringHashMap(?Term), + + pub fn deinit(fact: Fact) void { + fact.predicate.deinit(); + } + + /// convert to datalog fact + pub fn convert(fact: Fact, allocator: std.mem.Allocator, symbols: *datalog.SymbolTable) !datalog.Fact { + return .{ .predicate = try fact.predicate.convert(allocator, symbols) }; + } + + pub fn format(fact: Fact, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + return writer.print("{any}", .{fact.predicate}); + } +}; diff --git a/biscuit-builder/src/policy.zig b/biscuit-builder/src/policy.zig new file mode 100644 index 0000000..637ac63 --- /dev/null +++ b/biscuit-builder/src/policy.zig @@ -0,0 +1,44 @@ +const std = @import("std"); +const datalog = @import("biscuit-datalog"); +const Rule = @import("rule.zig").Rule; + +pub const Policy = struct { + kind: Kind, + queries: std.ArrayList(Rule), + + pub const Kind = enum { + allow, + deny, + }; + + pub fn deinit(policy: Policy) void { + for (policy.queries.items) |query| { + query.deinit(); + } + + policy.queries.deinit(); + } + + // pub fn convert(policy: Policy, allocator: std.mem.Allocator, symbols: *datalog.SymbolTable) !Policy { + // var queries = std.ArrayList(Rule).init(allocator); + + // for (policy.queries.items) |query| { + // try queries.append(try query.convert(allocator, symbols)); + // } + + // return .{ .kind = policy.kind, .queries = queries }; + // } + + pub fn format(policy: Policy, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + try writer.print("policy ", .{}); + + switch (policy.kind) { + .allow => try writer.print("allow if", .{}), + .deny => try writer.print("deny if", .{}), + } + + for (policy.queries.items) |query| { + try writer.print(" {any}", .{query}); + } + } +}; diff --git a/biscuit-builder/src/predicate.zig b/biscuit-builder/src/predicate.zig new file mode 100644 index 0000000..8eec66f --- /dev/null +++ b/biscuit-builder/src/predicate.zig @@ -0,0 +1,38 @@ +const std = @import("std"); +const datalog = @import("biscuit-datalog"); +const Term = @import("term.zig").Term; + +pub const Predicate = struct { + name: []const u8, + terms: std.ArrayList(Term), + + pub fn deinit(predicate: Predicate) void { + for (predicate.terms.items) |term| { + term.deinit(); + } + + predicate.terms.deinit(); + } + + /// convert to datalog predicate + pub fn convert(predicate: Predicate, allocator: std.mem.Allocator, symbols: *datalog.SymbolTable) !datalog.Predicate { + const name = try symbols.insert(predicate.name); + + var terms = std.ArrayList(datalog.Term).init(allocator); + + for (predicate.terms.items) |term| { + try terms.append(try term.convert(allocator, symbols)); + } + + return .{ .name = name, .terms = terms }; + } + + pub fn format(predicate: Predicate, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + try writer.print("{s}(", .{predicate.name}); + for (predicate.terms.items, 0..) |*term, i| { + try writer.print("{any}", .{term.*}); + if (i < predicate.terms.items.len - 1) try writer.print(", ", .{}); + } + return writer.print(")", .{}); + } +}; diff --git a/biscuit-builder/src/root.zig b/biscuit-builder/src/root.zig new file mode 100644 index 0000000..89c79ff --- /dev/null +++ b/biscuit-builder/src/root.zig @@ -0,0 +1,9 @@ +pub const Fact = @import("fact.zig").Fact; +pub const Predicate = @import("predicate.zig").Predicate; +pub const Term = @import("term.zig").Term; +pub const Check = @import("check.zig").Check; +pub const Rule = @import("rule.zig").Rule; +pub const Expression = @import("expression.zig").Expression; +pub const Scope = @import("scope.zig").Scope; +pub const Date = @import("date.zig").Date; +pub const Policy = @import("policy.zig").Policy; diff --git a/biscuit-builder/src/rule.zig b/biscuit-builder/src/rule.zig new file mode 100644 index 0000000..ab4540a --- /dev/null +++ b/biscuit-builder/src/rule.zig @@ -0,0 +1,80 @@ +const std = @import("std"); +const datalog = @import("biscuit-datalog"); +const Predicate = @import("predicate.zig").Predicate; +const Term = @import("term.zig").Term; +const Expression = @import("expression.zig").Expression; +const Scope = @import("scope.zig").Scope; + +pub const Rule = struct { + head: Predicate, + body: std.ArrayList(Predicate), + expressions: std.ArrayList(Expression), + variables: ?std.StringHashMap(?Term), + scopes: std.ArrayList(Scope), + + pub fn deinit(rule: Rule) void { + rule.head.deinit(); + + for (rule.body.items) |predicate| { + predicate.deinit(); + } + + for (rule.expressions.items) |*expression| { + expression.deinit(); + } + + rule.body.deinit(); + rule.expressions.deinit(); + rule.scopes.deinit(); + } + + /// convert to datalog predicate + pub fn convert(rule: Rule, allocator: std.mem.Allocator, symbols: *datalog.SymbolTable) !datalog.Rule { + const head = try rule.head.convert(allocator, symbols); + + var body = std.ArrayList(datalog.Predicate).init(allocator); + var expressions = std.ArrayList(datalog.Expression).init(allocator); + var scopes = std.ArrayList(datalog.Scope).init(allocator); + + for (rule.body.items) |predicate| { + try body.append(try predicate.convert(allocator, symbols)); + } + + for (rule.expressions.items) |expression| { + try expressions.append(try expression.convert(allocator, symbols)); + } + + for (rule.scopes.items) |scope| { + try scopes.append(try scope.convert(allocator, symbols)); + } + + return .{ + .head = head, + .body = body, + .expressions = expressions, + .scopes = scopes, + }; + } + + pub fn format(rule: Rule, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + try writer.print("{any} <- ", .{rule.head}); + for (rule.body.items, 0..) |*predicate, i| { + try writer.print("{any}", .{predicate.*}); + if (i < rule.body.items.len - 1) try writer.print(", ", .{}); + } + + if (rule.expressions.items.len > 0) try writer.print(", ", .{}); + + for (rule.expressions.items, 0..) |*expression, i| { + try writer.print("{any}", .{expression.*}); + if (i < rule.expressions.items.len - 1) try writer.print(", ", .{}); + } + + if (rule.expressions.items.len > 0) try writer.print(", ", .{}); + + for (rule.scopes.items, 0..) |*scopes, i| { + try writer.print("{any}", .{scopes.*}); + if (i < rule.scopes.items.len - 1) try writer.print(", ", .{}); + } + } +}; diff --git a/biscuit-builder/src/scope.zig b/biscuit-builder/src/scope.zig new file mode 100644 index 0000000..eae862c --- /dev/null +++ b/biscuit-builder/src/scope.zig @@ -0,0 +1,30 @@ +const std = @import("std"); +const datalog = @import("biscuit-datalog"); +const Predicate = @import("predicate.zig").Predicate; +const Term = @import("term.zig").Term; + +const Ed25519 = std.crypto.sign.Ed25519; + +const ScopeKind = enum(u8) { + authority, + previous, + public_key, + parameter, +}; + +pub const Scope = union(ScopeKind) { + authority: void, + previous: void, + public_key: Ed25519.PublicKey, + parameter: []const u8, + + /// convert to datalog fact + pub fn convert(scope: Scope, _: std.mem.Allocator, symbols: *datalog.SymbolTable) !datalog.Scope { + return switch (scope) { + .authority => .{ .authority = {} }, + .previous => .{ .previous = {} }, + .public_key => |pk| .{ .public_key = try symbols.insertPublicKey(pk) }, + .parameter => unreachable, + }; + } +}; diff --git a/biscuit-builder/src/term.zig b/biscuit-builder/src/term.zig new file mode 100644 index 0000000..f3ffa45 --- /dev/null +++ b/biscuit-builder/src/term.zig @@ -0,0 +1,41 @@ +const std = @import("std"); +const datalog = @import("biscuit-datalog"); +const Date = @import("date.zig").Date; + +const TermTag = enum(u8) { + variable, + string, + integer, + bool, + date, +}; + +pub const Term = union(TermTag) { + variable: []const u8, + string: []const u8, + integer: i64, + bool: bool, + date: u64, + + pub fn deinit(_: Term) void {} + + pub fn convert(term: Term, _: std.mem.Allocator, symbols: *datalog.SymbolTable) !datalog.Term { + return switch (term) { + .variable => |s| .{ .variable = @truncate(try symbols.insert(s)) }, // FIXME: assert symbol fits in u32 + .string => |s| .{ .string = try symbols.insert(s) }, + .integer => |n| .{ .integer = n }, + .bool => |b| .{ .bool = b }, + .date => |d| .{ .date = d }, + }; + } + + pub fn format(term: Term, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + switch (term) { + .variable => |v| try writer.print("${s}", .{v}), + .string => |s| try writer.print("\"{s}\"", .{s}), + .integer => |n| try writer.print("{}", .{n}), + .bool => |b| if (b) try writer.print("true", .{}) else try writer.print("false", .{}), + .date => |n| try writer.print("{}", .{n}), + } + } +}; diff --git a/biscuit-datalog/build.zig b/biscuit-datalog/build.zig index 458a5d1..c6df4e7 100644 --- a/biscuit-datalog/build.zig +++ b/biscuit-datalog/build.zig @@ -17,12 +17,14 @@ pub fn build(b: *std.Build) void { const schema = b.dependency("biscuit_schema", .{ .target = target, .optimize = optimize }); const format = b.dependency("biscuit_format", .{ .target = target, .optimize = optimize }); + const regex = b.dependency("regex", .{ .target = target, .optimize = optimize }); _ = b.addModule("biscuit-datalog", .{ .root_source_file = .{ .path = "src/main.zig" }, .imports = &.{ .{ .name = "biscuit-schema", .module = schema.module("biscuit-schema") }, .{ .name = "biscuit-format", .module = format.module("biscuit-format") }, + .{ .name = "regex", .module = regex.module("regex") }, }, }); @@ -35,6 +37,7 @@ pub fn build(b: *std.Build) void { }); lib_unit_tests.root_module.addImport("biscuit-schema", schema.module("biscuit-schema")); lib_unit_tests.root_module.addImport("biscuit-format", format.module("biscuit-format")); + lib_unit_tests.root_module.addImport("regex", regex.module("regex")); const run_lib_unit_tests = b.addRunArtifact(lib_unit_tests); diff --git a/biscuit-datalog/build.zig.zon b/biscuit-datalog/build.zig.zon index 05108cf..3736c09 100644 --- a/biscuit-datalog/build.zig.zon +++ b/biscuit-datalog/build.zig.zon @@ -40,6 +40,10 @@ //}, .biscuit_schema = .{ .path = "../biscuit-schema" }, .biscuit_format = .{ .path = "../biscuit-format" }, + .regex = .{ + .url = "https://github.com/tiehuis/zig-regex/archive/825181369f30bc0d0554d9d330d34db4162b91e5.tar.gz", + .hash = "122090c3c6b4a8745fb22e721ae4e2e55b73347769696fd84170c660fe16c1f062e6", + }, }, // Specifies the set of files and directories that are included in this package. diff --git a/biscuit-datalog/src/check.zig b/biscuit-datalog/src/check.zig index 6aa6c67..5a4564e 100644 --- a/biscuit-datalog/src/check.zig +++ b/biscuit-datalog/src/check.zig @@ -1,12 +1,13 @@ const std = @import("std"); const schema = @import("biscuit-schema"); const Rule = @import("rule.zig").Rule; +const SymbolTable = @import("symbol_table.zig").SymbolTable; pub const Check = struct { queries: std.ArrayList(Rule), kind: Kind, - const Kind = enum(u8) { one, all }; + pub const Kind = enum(u8) { one, all }; pub fn fromSchema(allocator: std.mem.Allocator, schema_check: schema.CheckV2) !Check { var rules = std.ArrayList(Rule).init(allocator); @@ -27,6 +28,7 @@ pub const Check = struct { for (check.queries.items) |*query| { query.deinit(); } + check.queries.deinit(); } @@ -38,4 +40,17 @@ pub const Check = struct { } return writer.print("", .{}); } + + pub fn convert(check: Check, old_symbols: *const SymbolTable, new_symbols: *SymbolTable) !Check { + var queries = try check.queries.clone(); + + for (queries.items, 0..) |query, i| { + queries.items[i] = try query.convert(old_symbols, new_symbols); + } + + return .{ + .queries = queries, + .kind = check.kind, + }; + } }; diff --git a/biscuit-datalog/src/combinator.zig b/biscuit-datalog/src/combinator.zig index a34c9ea..baf4ad5 100644 --- a/biscuit-datalog/src/combinator.zig +++ b/biscuit-datalog/src/combinator.zig @@ -4,7 +4,10 @@ const mem = std.mem; const Fact = @import("fact.zig").Fact; const Predicate = @import("predicate.zig").Predicate; const Term = @import("term.zig").Term; -const Set = @import("set.zig").Set; +const FactSet = @import("fact_set.zig").FactSet; +const Origin = @import("origin.zig").Origin; +const TrustedOrigins = @import("trusted_origins.zig").TrustedOrigins; +const Expression = @import("expression.zig").Expression; const MatchedVariables = @import("matched_variables.zig").MatchedVariables; const SymbolTable = @import("symbol_table.zig").SymbolTable; @@ -53,52 +56,60 @@ pub const Combinator = struct { variables: MatchedVariables, next_combinator: ?*Combinator, // Current combinator for the next predicate predicates: []Predicate, // List of the predicates so we can generate new Combinators + expressions: []Expression, current_bindings: ?std.AutoHashMap(u64, Term) = null, - facts: *const Set(Fact), - fact_iterator: Set(Fact).Iterator, - symbols: SymbolTable, + facts: *const FactSet, + trusted_fact_iterator: FactSet.TrustedIterator, + symbols: *SymbolTable, + trusted_origins: TrustedOrigins, - pub fn init(id: usize, allocator: mem.Allocator, variables: MatchedVariables, predicates: []Predicate, all_facts: *const Set(Fact), symbols: SymbolTable) !*Combinator { - std.debug.print("Init combinator[{}]: predicates = {any}\n", .{ id, predicates }); - const c = try allocator.create(Combinator); - - c.* = .{ + pub fn init(id: usize, allocator: mem.Allocator, variables: MatchedVariables, predicates: []Predicate, expressions: []Expression, all_facts: *const FactSet, symbols: *SymbolTable, trusted_origins: TrustedOrigins) Combinator { + return .{ .id = id, .allocator = allocator, .next_combinator = null, .facts = all_facts, .predicates = predicates, + .expressions = expressions, .variables = variables, .symbols = symbols, - .fact_iterator = all_facts.iterator(), + .trusted_fact_iterator = all_facts.trustedIterator(trusted_origins), + .trusted_origins = trusted_origins, }; - - return c; } pub fn deinit(combinator: *Combinator) void { combinator.variables.deinit(); - combinator.allocator.destroy(combinator); + // combinator.allocator.destroy(combinator); } + // QUESTION: is the return value guaranteed to be complete? I.e. each variable has been matched with some non-variable term? /// next returns the next _valid_ combination of variable bindings - pub fn next(combinator: *Combinator) !?MatchedVariables { + pub fn next(combinator: *Combinator) !?struct { Origin, MatchedVariables } { blk: while (true) { // Return from next combinator until expended if (combinator.next_combinator) |c| { - if (try c.next()) |vars| { - return vars; + if (try c.next()) |origin_vars| { + return origin_vars; } else { + // Deinit the existing combinator and free its memory c.deinit(); + combinator.allocator.destroy(c); combinator.next_combinator = null; continue; } } - const fact = combinator.fact_iterator.next() orelse return null; + // Lookup the next (trusted) fact + const origin_fact = combinator.trusted_fact_iterator.next() orelse return null; + + std.debug.print("combinator next trusted fact: {any}\n", .{origin_fact.fact}); + + const origin = origin_fact.origin.*; + const fact = origin_fact.fact.*; + // Only consider facts that match the current predicate if (!fact.matchPredicate(combinator.predicates[0])) continue; - std.debug.print("combinator[{}]: fact = {any}\n", .{ combinator.id, fact }); var vars: MatchedVariables = try combinator.variables.clone(); @@ -109,6 +120,8 @@ pub const Combinator = struct { // Since we are pulling terms out of a fact, we know // ahead of time that none of the terms will be variables. const fact_term = fact.predicate.terms.items[i]; + std.debug.assert(fact_term != .variable); + if (!(try vars.insert(sym, fact_term))) { // We have already bound this variable to a different // term, the current fact does work with previous @@ -117,14 +130,28 @@ pub const Combinator = struct { } } - // std.debug.print("len = {}\n", .{combinator.predicates[1..].len}); const next_predicates = combinator.predicates[1..]; + if (next_predicates.len == 0) { - return vars; + return .{ origin, vars }; } else { + std.debug.assert(combinator.next_combinator == null); if (combinator.next_combinator) |c| c.deinit(); - combinator.next_combinator = try Combinator.init(combinator.id + 1, combinator.allocator, vars, next_predicates, combinator.facts, combinator.symbols); + const combinator_ptr = try combinator.allocator.create(Combinator); + + combinator_ptr.* = Combinator.init( + combinator.id + 1, + combinator.allocator, + vars, + next_predicates, + combinator.expressions, + combinator.facts, + combinator.symbols, + combinator.trusted_origins, + ); + + combinator.next_combinator = combinator_ptr; } } diff --git a/biscuit-datalog/src/expression.zig b/biscuit-datalog/src/expression.zig index ddf5d1a..74ee6ea 100644 --- a/biscuit-datalog/src/expression.zig +++ b/biscuit-datalog/src/expression.zig @@ -1,6 +1,7 @@ const std = @import("std"); const mem = std.mem; const schema = @import("biscuit-schema"); +const Regex = @import("regex").Regex; const Term = @import("term.zig").Term; const SymbolTable = @import("symbol_table.zig").SymbolTable; @@ -60,11 +61,11 @@ pub const Expression = struct { expression.ops.deinit(); } - pub fn evaluate(expr: Expression, allocator: mem.Allocator, values: std.AutoHashMap(u32, Term), symbols: SymbolTable) !Term { + pub fn evaluate(expr: Expression, allocator: mem.Allocator, values: std.AutoHashMap(u32, Term), symbols: *SymbolTable) !Term { var stack = std.ArrayList(Term).init(allocator); defer stack.deinit(); - for (expr.ops) |op| { + for (expr.ops.items) |op| { switch (op) { .value => |term| { switch (term) { @@ -86,7 +87,7 @@ pub const Expression = struct { const right = stack.popOrNull() orelse return error.StackUnderflow; const left = stack.popOrNull() orelse return error.StackUnderflow; - const result = try binary_op.evaluate(left, right, symbols); + const result = try binary_op.evaluate(allocator, left, right, symbols); try stack.append(result); }, @@ -97,6 +98,62 @@ pub const Expression = struct { return stack.items[0]; } + + pub fn convert(expression: Expression, old_symbols: *const SymbolTable, new_symbols: *SymbolTable) !Expression { + // std.debug.print("Converting expression\n", .{}); + const ops = try expression.ops.clone(); + + for (ops.items, 0..) |op, i| { + ops.items[i] = switch (op) { + .value => |trm| .{ .value = try trm.convert(old_symbols, new_symbols) }, + else => op, + }; + } + + return .{ .ops = ops }; + } + + pub fn format(expression: Expression, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + for (expression.ops.items) |op| { + switch (op) { + .value => |v| try writer.print("{any}", .{v}), + .unary => |u| { + switch (u) { + .negate => try writer.print("neg", .{}), + .parens => try writer.print("paren", .{}), + .length => try writer.print("length", .{}), + } + }, + .binary => |b| { + switch (b) { + .less_than => try writer.print("<", .{}), + .greater_than => try writer.print(">", .{}), + .less_or_equal => try writer.print("<=", .{}), + .greater_or_equal => try writer.print(">=", .{}), + .equal => try writer.print("==", .{}), + .contains => try writer.print("contains", .{}), + .prefix => try writer.print("starts_with", .{}), + .suffix => try writer.print("ends_with", .{}), + .regex => try writer.print("matches", .{}), + .add => try writer.print("+", .{}), + .sub => try writer.print("-", .{}), + .mul => try writer.print("*", .{}), + .div => try writer.print("/", .{}), + .@"and" => try writer.print("&&", .{}), + .@"or" => try writer.print("||", .{}), + .intersection => try writer.print("intersection", .{}), + .@"union" => try writer.print("union", .{}), + .bitwise_and => try writer.print("&", .{}), + .bitwise_or => try writer.print("|", .{}), + .bitwise_xor => try writer.print("^", .{}), + .not_equal => try writer.print("!=", .{}), + } + }, + } + + try writer.print(" ", .{}); + } + } }; const OpKind = enum(u8) { @@ -105,7 +162,7 @@ const OpKind = enum(u8) { binary, }; -const Op = union(OpKind) { +pub const Op = union(OpKind) { value: Term, unary: Unary, binary: Binary, @@ -116,13 +173,18 @@ const Unary = enum { parens, length, - pub fn evaluate(expr: Unary, value: Term, symbols: SymbolTable) !Term { - _ = symbols; // Different type instead of SymbolTable - // + pub fn evaluate(expr: Unary, value: Term, symbols: *SymbolTable) !Term { return switch (expr) { .negate => if (value == .bool) .{ .bool = !value.bool } else return error.UnexpectedTermInUnaryNegate, .parens => value, - else => error.UnexpectedUnaryTermCombination, + .length => .{ + .integer = switch (value) { + .string => |index| std.math.cast(i64, (try symbols.getString(index)).len) orelse return error.FailedToCaseInt, + .bytes => |b| std.math.cast(i64, b.len) orelse return error.FailedToCaseInt, + .set => |s| std.math.cast(i64, s.count()) orelse return error.FailedToCaseInt, + else => return error.LengthNotSupportedOnValue, + }, + }, }; } }; @@ -150,7 +212,7 @@ const Binary = enum { bitwise_xor, not_equal, - pub fn evaluate(expr: Binary, left: Term, right: Term, symbols: SymbolTable) !Term { + pub fn evaluate(expr: Binary, allocator: std.mem.Allocator, left: Term, right: Term, symbols: *SymbolTable) !Term { // Integer operands if (left == .integer and right == .integer) { const i = left.integer; @@ -163,9 +225,9 @@ const Binary = enum { .greater_or_equal => .{ .bool = i >= j }, .equal => .{ .bool = i == j }, .not_equal => .{ .bool = i != j }, - .add => .{ .integer = i + j }, - .sub => .{ .integer = i - j }, - .mul => .{ .integer = i * j }, + .add => .{ .integer = try std.math.add(i64, i, j) }, + .sub => .{ .integer = try std.math.sub(i64, i, j) }, + .mul => .{ .integer = try std.math.mul(i64, i, j) }, .div => .{ .integer = @divExact(i, j) }, .bitwise_and => .{ .integer = i & j }, .bitwise_or => .{ .integer = i | j }, @@ -179,9 +241,9 @@ const Binary = enum { return switch (expr) { .prefix => .{ .bool = mem.startsWith(u8, sl, sr) }, .suffix => .{ .bool = mem.endsWith(u8, sl, sr) }, - .regex => return error.RegexUnimplemented, + .regex => .{ .bool = try match(allocator, sr, sl) }, .contains => .{ .bool = mem.containsAtLeast(u8, sl, 1, sr) }, - .add => return error.StringConcatNotImplemented, + .add => .{ .string = try symbols.insert(try concat(allocator, sl, sr)) }, .equal => .{ .bool = mem.eql(u8, sl, sr) }, .not_equal => .{ .bool = !mem.eql(u8, sl, sr) }, else => return error.UnexpectedOperationForStringOperands, @@ -236,19 +298,29 @@ const Binary = enum { } }; +fn match(allocator: std.mem.Allocator, regex: []const u8, string: []const u8) !bool { + var re = try Regex.compile(allocator, regex); + + return re.partialMatch(string); +} + +fn concat(allocator: std.mem.Allocator, left: []const u8, right: []const u8) ![]const u8 { + return try std.mem.concat(allocator, u8, &[_][]const u8{ left, right }); +} + test { const testing = std.testing; const t1: Term = .{ .integer = 10 }; const t2: Term = .{ .integer = 22 }; - try testing.expectEqual(@as(Term, .{ .bool = false }), try Binary.equal.evaluate(t1, t2, SymbolTable.init(testing.allocator))); - try testing.expectEqual(@as(Term, .{ .bool = true }), try Binary.equal.evaluate(t1, t1, SymbolTable.init(testing.allocator))); + try testing.expectEqual(@as(Term, .{ .bool = false }), try Binary.equal.evaluate(t1, t2, SymbolTable.init("test", testing.allocator))); + try testing.expectEqual(@as(Term, .{ .bool = true }), try Binary.equal.evaluate(t1, t1, SymbolTable.init("test", testing.allocator))); - try testing.expectEqual(@as(Term, .{ .integer = 32 }), try Binary.add.evaluate(t1, t2, SymbolTable.init(testing.allocator))); - try testing.expectEqual(@as(Term, .{ .integer = 220 }), try Binary.mul.evaluate(t1, t2, SymbolTable.init(testing.allocator))); + try testing.expectEqual(@as(Term, .{ .integer = 32 }), try Binary.add.evaluate(t1, t2, SymbolTable.init("test", testing.allocator))); + try testing.expectEqual(@as(Term, .{ .integer = 220 }), try Binary.mul.evaluate(t1, t2, SymbolTable.init("test", testing.allocator))); - var symbols = SymbolTable.init(testing.allocator); + var symbols = SymbolTable.init("test", testing.allocator); defer symbols.deinit(); const s = .{ .string = try symbols.insert("prefix_middle_suffix") }; @@ -267,7 +339,7 @@ test { // test "negate" { // const testing = std.testing; -// var symbols = SymbolTable.init(testing.allocator); +// var symbols = SymbolTable.init("test", testing.allocator); // defer symbols.deinit(); // _ = try symbols.insert("test1"); diff --git a/biscuit-datalog/src/fact_set.zig b/biscuit-datalog/src/fact_set.zig new file mode 100644 index 0000000..a0fd9f4 --- /dev/null +++ b/biscuit-datalog/src/fact_set.zig @@ -0,0 +1,200 @@ +const std = @import("std"); +const Wyhash = std.hash.Wyhash; +const Fact = @import("fact.zig").Fact; +const Set = @import("set.zig").Set; +const Origin = @import("origin.zig").Origin; +const TrustedOrigins = @import("trusted_origins.zig").TrustedOrigins; + +pub const FactSet = struct { + sets: InnerMap, + allocator: std.mem.Allocator, + + const InnerMap = std.HashMap(Origin, Set(Fact), Context, 80); + + const Context = struct { + pub fn hash(ctx: Context, key: Origin) u64 { + _ = ctx; + + var hasher = Wyhash.init(0); + key.hash(&hasher); + return hasher.final(); + } + + pub fn eql(ctx: Context, a: Origin, b: Origin) bool { + _ = ctx; + return a.eql(b); + } + }; + + pub fn init(allocator: std.mem.Allocator) FactSet { + return .{ + .sets = InnerMap.init(allocator), + .allocator = allocator, + }; + } + + // FIXME: to free or not to free...that is the question (to free or not free the keys?) + // We have a similar situation as we came across else where if we use some complicated + // value as a key, and we try to insert into hashmap that already contains that value, + // we will leak the key if we don't detect the existing version and deallocate one of the + // keys. + pub fn deinit(fact_set: *FactSet) void { + var it = fact_set.sets.iterator(); + + while (it.next()) |origin_facts| { + origin_facts.key_ptr.deinit(); // Okay, in practice this is also giving us incorrect alignment issues + origin_facts.value_ptr.deinit(); + } + + fact_set.sets.deinit(); + } + + pub const Iterator = struct { + set_it: InnerMap.Iterator, + origin_fact_it: ?struct { origin: *Origin, fact_it: Set(Fact).Iterator } = null, + + pub fn next(it: *Iterator) ?struct { origin: *Origin, fact: *Fact } { + while (true) { + if (it.origin_fact_it) |*origin_fact_it| { + if (origin_fact_it.fact_it.next()) |fact| return .{ .origin = origin_fact_it.origin, .fact = fact }; + + it.origin_fact_it = null; + } else { + const origin_set = it.set_it.next() orelse return null; + + it.origin_fact_it = .{ .origin = origin_set.key_ptr, .fact_it = origin_set.value_ptr.iterator() }; + } + } + } + }; + + pub fn iterator(fact_set: *const FactSet) Iterator { + return .{ .set_it = fact_set.sets.iterator() }; + } + + pub const TrustedIterator = struct { + trusted_origins: TrustedOrigins, + set_it: InnerMap.Iterator, + origin_fact_it: ?struct { origin: *Origin, fact_it: Set(Fact).Iterator } = null, + + pub fn next(it: *TrustedIterator) ?struct { origin: *Origin, fact: *Fact } { + while (true) { + if (it.origin_fact_it) |*origin_fact_it| { + if (origin_fact_it.fact_it.next()) |fact| { + return .{ .origin = origin_fact_it.origin, .fact = fact }; + } + + it.origin_fact_it = null; + } else { + std.debug.assert(it.origin_fact_it == null); + + const origin_set = it.set_it.next() orelse return null; + + const set_ptr: *Set(Fact) = origin_set.value_ptr; + const origin: *Origin = origin_set.key_ptr; + + // If we don't trust the origin of this set, we start the loop again + if (!it.trusted_origins.containsAll(origin)) continue; + + defer std.debug.assert(it.origin_fact_it != null); + + it.origin_fact_it = .{ + .origin = origin, + .fact_it = set_ptr.iterator(), // Is this iterator taking + }; + } + } + } + }; + + /// Return an iterator over facts that match the trusted origin. + pub fn trustedIterator(fact_set: *const FactSet, trusted_origins: TrustedOrigins) TrustedIterator { + return .{ .set_it = fact_set.sets.iterator(), .trusted_origins = trusted_origins }; + } + + /// Return the total number of facts in the fact set + pub fn count(fact_set: *const FactSet) usize { + var n: usize = 0; + + var it = fact_set.sets.valueIterator(); + while (it.next()) |facts| { + n += facts.count(); + } + + return n; + } + + /// Add fact with origin to fact set. + /// + /// Takes ownership of (i.e. will free) origin and fact + pub fn add(fact_set: *FactSet, origin: Origin, fact: Fact) !void { + if (fact_set.sets.getEntry(origin)) |entry| { + try entry.value_ptr.add(fact); + } else { + var set = Set(Fact).init(fact_set.allocator); + try set.add(fact); + + try fact_set.sets.put(origin, set); + } + } + + pub fn contains(fact_set: *const FactSet, origin: Origin, fact: Fact) bool { + const set = fact_set.sets.get(origin) orelse return false; + + return set.contains(fact); + } +}; + +test "FactSet trustedIterator" { + const testing = std.testing; + const Term = @import("term.zig").Term; + + var fs = FactSet.init(testing.allocator); + defer fs.deinit(); + + var origin = Origin.init(testing.allocator); + try origin.insert(0); + + var origin2 = Origin.init(testing.allocator); + try origin2.insert(1); + + const fact: Fact = .{ .predicate = .{ .name = 2123, .terms = std.ArrayList(Term).init(testing.allocator) } }; + const fact2: Fact = .{ .predicate = .{ .name = 2123, .terms = std.ArrayList(Term).init(testing.allocator) } }; + + try fs.add(origin, fact); + try fs.add(origin2, fact2); + + try testing.expect(fs.sets.contains(origin)); + try testing.expect(fs.sets.contains(origin2)); + + // With a non-trusted iterator we expect 2 facts + { + var count: usize = 0; + + var it = fs.iterator(); + while (it.next()) |origin_fact| { + defer count += 1; + + try testing.expectEqual(fact.predicate.name, origin_fact.fact.predicate.name); + } + + try testing.expectEqual(2, count); + } + + // With a trusted iterator only trusting [0] we only expect a single fact + { + var trusted_origins = try TrustedOrigins.defaultOrigins(testing.allocator); + defer trusted_origins.deinit(); + + var count: usize = 0; + + var it = fs.trustedIterator(trusted_origins); + while (it.next()) |origin_fact| { + defer count += 1; + + try testing.expectEqual(fact.predicate.name, origin_fact.fact.predicate.name); + } + + try testing.expectEqual(1, count); + } +} diff --git a/biscuit-datalog/src/main.zig b/biscuit-datalog/src/main.zig index ce3c5f1..bed063a 100644 --- a/biscuit-datalog/src/main.zig +++ b/biscuit-datalog/src/main.zig @@ -1,8 +1,19 @@ pub const fact = @import("fact.zig"); +pub const Fact = @import("fact.zig").Fact; pub const predicate = @import("predicate.zig"); +pub const Predicate = @import("predicate.zig").Predicate; +pub const Expression = @import("expression.zig").Expression; +pub const Op = @import("expression.zig").Op; +pub const Scope = @import("scope.zig").Scope; pub const rule = @import("rule.zig"); +pub const Rule = @import("rule.zig").Rule; pub const check = @import("check.zig"); pub const symbol_table = @import("symbol_table.zig"); +pub const SymbolTable = @import("symbol_table.zig").SymbolTable; +pub const Term = @import("term.zig").Term; +pub const Check = @import("check.zig").Check; +pub const Origin = @import("origin.zig").Origin; +pub const TrustedOrigins = @import("trusted_origins.zig").TrustedOrigins; pub const world = @import("world.zig"); test { diff --git a/biscuit-datalog/src/matched_variables.zig b/biscuit-datalog/src/matched_variables.zig index b8ef7b3..b22dee6 100644 --- a/biscuit-datalog/src/matched_variables.zig +++ b/biscuit-datalog/src/matched_variables.zig @@ -2,6 +2,8 @@ const std = @import("std"); const mem = std.mem; const Rule = @import("rule.zig").Rule; const Term = @import("term.zig").Term; +const Expression = @import("expression.zig").Expression; +const SymbolTable = @import("symbol_table.zig").SymbolTable; // Is a better name for this VariableBinding? @@ -29,10 +31,10 @@ const Term = @import("term.zig").Term; /// } /// ``` pub const MatchedVariables = struct { - variables: std.AutoHashMap(u64, ?Term), + variables: std.AutoHashMap(u32, ?Term), - pub fn init(allocator: mem.Allocator, rule: *Rule) !MatchedVariables { - var variables = std.AutoHashMap(u64, ?Term).init(allocator); + pub fn init(allocator: mem.Allocator, rule: *const Rule) !MatchedVariables { + var variables = std.AutoHashMap(u32, ?Term).init(allocator); // Add all variables in predicates in the rule's body to variable set for (rule.body.items) |predicate| { @@ -56,7 +58,7 @@ pub const MatchedVariables = struct { return .{ .variables = variables }; } - pub fn get(matched_variables: *const MatchedVariables, key: u64) ?Term { + pub fn get(matched_variables: *const MatchedVariables, key: u32) ?Term { return matched_variables.variables.get(key) orelse return null; } @@ -66,7 +68,7 @@ pub const MatchedVariables = struct { /// /// If the variable is unset we bind to the term unconditionally and /// return true. - pub fn insert(matched_variables: *MatchedVariables, variable: u64, term: Term) !bool { + pub fn insert(matched_variables: *MatchedVariables, variable: u32, term: Term) !bool { const entry = matched_variables.variables.getEntry(variable) orelse return false; if (entry.value_ptr.*) |existing_term| { @@ -94,15 +96,15 @@ pub const MatchedVariables = struct { /// If every variable in MatchedVariables has been assigned a term return a map /// from variable -> non-null term, otherwise return null. - pub fn complete(matched_variables: *const MatchedVariables, allocator: mem.Allocator) !?std.AutoHashMap(u64, Term) { + pub fn complete(matched_variables: *const MatchedVariables, allocator: mem.Allocator) !?std.AutoHashMap(u32, Term) { if (!matched_variables.isComplete()) return null; - var completed_variables = std.AutoHashMap(u64, Term).init(allocator); + var completed_variables = std.AutoHashMap(u32, Term).init(allocator); errdefer completed_variables.deinit(); var it = matched_variables.variables.iterator(); while (it.next()) |kv| { - const key: u64 = kv.key_ptr.*; + const key: u32 = kv.key_ptr.*; const value: ?Term = kv.value_ptr.*; try completed_variables.put(key, value.?); @@ -110,4 +112,24 @@ pub const MatchedVariables = struct { return completed_variables; } + + pub fn evaluateExpressions( + matchced_variables: *const MatchedVariables, + allocator: std.mem.Allocator, + expressions: []Expression, + symbols: *SymbolTable, + ) !bool { + const variables = try matchced_variables.complete(allocator) orelse return error.IncompleteVariables; + + for (expressions) |expression| { + const result = try expression.evaluate(allocator, variables, symbols); + + switch (result) { + .bool => |b| if (b) continue else return false, + else => return false, + } + } + + return true; + } }; diff --git a/biscuit-datalog/src/origin.zig b/biscuit-datalog/src/origin.zig index baddf95..1ed103d 100644 --- a/biscuit-datalog/src/origin.zig +++ b/biscuit-datalog/src/origin.zig @@ -1,17 +1,58 @@ const std = @import("std"); const mem = std.mem; +const Wyhash = std.hash.Wyhash; + const Set = @import("set.zig").Set; const Scope = @import("scope.zig").Scope; pub const Origin = struct { - block_ids: Set(usize), + block_ids: std.AutoHashMap(usize, void), + + // Authorizer id is maximum int storable in u64 + pub const AUTHORIZER_ID = std.math.maxInt(u64); pub fn init(allocator: mem.Allocator) Origin { - return .{ .block_ids = Set(usize).init(allocator) }; + return .{ .block_ids = std.AutoHashMap(usize, void).init(allocator) }; + } + + pub fn initWithId(allocator: mem.Allocator, block_id: usize) !Origin { + var block_ids = std.AutoHashMap(usize, void).init(allocator); + + try block_ids.put(block_id, {}); + + return .{ .block_ids = block_ids }; + } + + pub fn deinit(origin: *Origin) void { + origin.block_ids.deinit(); + } + + pub fn format(origin: Origin, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + var it = origin.block_ids.keyIterator(); + + try writer.print("[", .{}); + while (it.next()) |block_id_ptr| { + const block_id = block_id_ptr.*; + + if (block_id == Origin.AUTHORIZER_ID) { + try writer.print("{s},", .{"Authorizer"}); + } else { + try writer.print("{},", .{block_id}); + } + } + try writer.print("]", .{}); + } + + pub fn clone(origin: *const Origin) !Origin { + return .{ .block_ids = try origin.block_ids.clone() }; } - pub fn insert(origin: *Origin, block_id: usize) void { - origin.block_ids.add(block_id); + // pub fn authorizer(allocator: mem.Allocator) !Origin { + // return try Origin.initWithId(allocator, AUTHORIZER_ID); + // } + + pub fn insert(origin: *Origin, block_id: usize) !void { + try origin.block_ids.put(block_id, {}); } pub fn @"union"(origin: Origin, other: Origin) Origin { @@ -21,53 +62,40 @@ pub const Origin = struct { pub fn isSuperset(origin: Origin, other: Origin) Origin { return origin.block_ids.isSuperset(other.block_ids); } -}; -// TrustedOrigin represents the set of origins trusted by a particular rule -pub const TrustedOrigins = struct { - origin: Origin, + pub fn hash(origin: Origin, hasher: anytype) void { + var h: usize = 0; - pub fn init(allocator: mem.Allocator) TrustedOrigins { - return .{ .origin = Origin.init(allocator) }; - } + var it = origin.block_ids.keyIterator(); + while (it.next()) |block_id_ptr| { + const block_id = block_id_ptr.*; - pub fn initFromScopes( - allocator: mem.Allocator, - rule_scopes: []const Scope, - default_origins: TrustedOrigins, - current_block: usize, - public_key_to_block_id: std.AutoHashMap(usize, std.ArrayList(usize)), - ) TrustedOrigins { - const max_int = std.math.maxInt(usize); - _ = public_key_to_block_id; + h ^= block_id; + } - if (rule_scopes.len == 0) { - var origins = default_origins.clone(); + std.hash.autoHash(hasher, h); + } - origins.insert(current_block); - origins.insert(max_int); + pub fn eql(a: Origin, b: Origin) bool { + if (a.block_ids.count() != b.block_ids.count()) return false; - return origins; - } + var a_it = a.block_ids.keyIterator(); + while (a_it.next()) |a_block_id| { + if (b.block_ids.contains(a_block_id.*)) continue; - var origins = Origin.init(allocator); - origins.insert(max_int); - origins.insert(current_block); - - for (rule_scopes) |scope| { - switch (scope) { - .authority => origins.insert(0), - .previous => { - if (current_block == max_int) continue; - // TODO: extend - }, - .public_key => |public_key_id| { - _ = public_key_id; - // TODO: extend - }, - } + return false; } - return .{ .origin = origins }; + return true; } }; + +test "Origins" { + const testing = std.testing; + + var origins = Origin.init(testing.allocator); + defer origins.deinit(); + + try origins.insert(12); + try origins.insert(13); +} diff --git a/biscuit-datalog/src/rule.zig b/biscuit-datalog/src/rule.zig index 3400bd2..b1fb127 100644 --- a/biscuit-datalog/src/rule.zig +++ b/biscuit-datalog/src/rule.zig @@ -4,13 +4,16 @@ const meta = std.meta; const schema = @import("biscuit-schema"); const Set = @import("set.zig").Set; const Fact = @import("fact.zig").Fact; +const FactSet = @import("fact_set.zig").FactSet; const Predicate = @import("predicate.zig").Predicate; const Term = @import("term.zig").Term; +const Origin = @import("origin.zig").Origin; const SymbolTable = @import("symbol_table.zig").SymbolTable; const MatchedVariables = @import("matched_variables.zig").MatchedVariables; const Combinator = @import("combinator.zig").Combinator; const Scope = @import("scope.zig").Scope; const Expression = @import("expression.zig").Expression; +const TrustedOrigins = @import("trusted_origins.zig").TrustedOrigins; pub const Rule = struct { head: Predicate, @@ -18,20 +21,22 @@ pub const Rule = struct { expressions: std.ArrayList(Expression), scopes: std.ArrayList(Scope), + /// Make datalog rule from protobuf rule pub fn fromSchema(allocator: std.mem.Allocator, schema_rule: schema.RuleV2) !Rule { const head = try Predicate.fromSchema(allocator, schema_rule.head orelse return error.NoHeadInRuleSchema); var body = std.ArrayList(Predicate).init(allocator); + var expressions = std.ArrayList(Expression).init(allocator); + var scopes = std.ArrayList(Scope).init(allocator); + for (schema_rule.body.items) |predicate| { try body.append(try Predicate.fromSchema(allocator, predicate)); } - var expressions = std.ArrayList(Expression).init(allocator); for (schema_rule.expressions.items) |expression| { try expressions.append(try Expression.fromSchema(allocator, expression)); } - var scopes = std.ArrayList(Scope).init(allocator); for (schema_rule.scope.items) |scope| { try scopes.append(try Scope.fromSchema(scope)); } @@ -41,16 +46,17 @@ pub const Rule = struct { pub fn deinit(rule: *Rule) void { rule.head.deinit(); + for (rule.body.items) |*predicate| { predicate.deinit(); } - rule.body.deinit(); for (rule.expressions.items) |*expression| { expression.deinit(); } - rule.expressions.deinit(); + rule.body.deinit(); + rule.expressions.deinit(); rule.scopes.deinit(); } @@ -109,24 +115,31 @@ pub const Rule = struct { /// ``` /// /// ...and we add it to the set of facts (the set will take care of deduplication) - pub fn apply(rule: *Rule, allocator: mem.Allocator, facts: *const Set(Fact), new_facts: *Set(Fact), symbols: SymbolTable) !void { + pub fn apply(rule: *const Rule, allocator: mem.Allocator, origin_id: u64, facts: *const FactSet, new_facts: *FactSet, symbols: *SymbolTable, trusted_origins: TrustedOrigins) !void { var arena = std.heap.ArenaAllocator.init(allocator); defer arena.deinit(); - std.debug.print("\n\nrule = {any}\n", .{rule}); + std.debug.print("\napplying rule (from origin {}):\n {any}\n", .{ origin_id, rule }); const matched_variables = try MatchedVariables.init(arena.allocator(), rule); // TODO: if body is empty stuff - var it = try Combinator.init(0, allocator, matched_variables, rule.body.items, facts, symbols); + var it = Combinator.init(0, allocator, matched_variables, rule.body.items, rule.expressions.items, facts, symbols, trusted_origins); defer it.deinit(); - blk: while (try it.next()) |*bindings| { + blk: while (try it.next()) |*origin_bindings| { + const origin: Origin = origin_bindings[0]; + const bindings: MatchedVariables = origin_bindings[1]; + + if (!try bindings.evaluateExpressions(allocator, rule.expressions.items, symbols)) continue; + // TODO: Describe why clonedWithAllocator? More generally, describe in comment the overall // lifetimes / memory allocation approach during evaluation. var predicate = try rule.head.cloneWithAllocator(allocator); defer predicate.deinit(); + // Loop over terms in head predicate. Update all _variable_ terms with their value + // from the binding. for (predicate.terms.items, 0..) |head_term, i| { const sym = if (head_term == .variable) head_term.variable else continue; @@ -136,16 +149,22 @@ pub const Rule = struct { } const fact = Fact.init(predicate); - std.debug.print("adding new fact = {any}\n", .{fact}); + var new_origin = try origin.clone(); + try new_origin.insert(origin_id); + + std.debug.print("\nadding new fact:\n {any} with origin {any}\n", .{ fact, new_origin }); // Skip adding fact if we already have generated it. Because the // Set will clobber duplicate facts we'll lose a reference when // inserting a duplicate and then when we loop over the set to // deinit the facts we'll miss some. This ensures that the facts // can be freed purely from the Set. - if (new_facts.contains(fact)) continue; + if (new_facts.contains(new_origin, fact)) { + new_origin.deinit(); + continue; + } - try new_facts.add(try fact.clone()); + try new_facts.add(new_origin, try fact.clone()); } } @@ -154,16 +173,96 @@ pub const Rule = struct { /// /// Note: whilst the combinator may return multiple valid matches, `findMatch` only requires a single match /// so stopping on the first `it.next()` that returns not-null is enough. - pub fn findMatch(rule: *Rule, allocator: mem.Allocator, facts: *const Set(Fact), symbols: SymbolTable) !bool { + pub fn findMatch(rule: *Rule, allocator: mem.Allocator, facts: *const FactSet, symbols: *SymbolTable, trusted_origins: TrustedOrigins) !bool { + std.debug.print("\nrule.findMatch on {any} ({any})\n", .{ rule, trusted_origins }); var arena = std.heap.ArenaAllocator.init(allocator); defer arena.deinit(); - const matched_variables = try MatchedVariables.init(arena.allocator(), rule); + const arena_allocator = arena.allocator(); - var it = try Combinator.init(0, allocator, matched_variables, rule.body.items, facts, symbols); - defer it.deinit(); + if (rule.body.items.len == 0) { + const variables = std.AutoHashMap(u32, Term).init(allocator); + for (rule.expressions.items) |expr| { + const result = try expr.evaluate(arena_allocator, variables, symbols); - return try it.next() != null; + switch (result) { + .bool => |b| if (b) continue else return false, + else => return false, + } + } + + return true; + } else { + const matched_variables = try MatchedVariables.init(arena_allocator, rule); + + var it = Combinator.init(0, allocator, matched_variables, rule.body.items, rule.expressions.items, facts, symbols, trusted_origins); + defer it.deinit(); + + while (try it.next()) |*origin_bindings| { + const bindings: MatchedVariables = origin_bindings[1]; + + if (try bindings.evaluateExpressions(arena_allocator, rule.expressions.items, symbols)) return true; + } + + return false; + } + } + + pub fn checkMatchAll(rule: *Rule, allocator: mem.Allocator, facts: *const FactSet, symbols: *SymbolTable, trusted_origins: TrustedOrigins) !bool { + std.debug.print("\nrule.checkMatchAll on {any} ({any})\n", .{ rule, trusted_origins }); + var arena = std.heap.ArenaAllocator.init(allocator); + defer arena.deinit(); + + const arena_allocator = arena.allocator(); + + if (rule.body.items.len == 0) { + const variables = std.AutoHashMap(u32, Term).init(allocator); + for (rule.expressions.items) |expr| { + const result = try expr.evaluate(arena_allocator, variables, symbols); + + switch (result) { + .bool => |b| if (b) continue else return false, + else => return false, + } + } + + return true; + } else { + const matched_variables = try MatchedVariables.init(arena_allocator, rule); + + var it = Combinator.init(0, allocator, matched_variables, rule.body.items, rule.expressions.items, facts, symbols, trusted_origins); + defer it.deinit(); + + while (try it.next()) |*origin_bindings| { + const bindings: MatchedVariables = origin_bindings[1]; + + if (try bindings.evaluateExpressions(arena_allocator, rule.expressions.items, symbols)) continue; + + return false; + } + + return true; + } + } + + /// Checks there a no unbound variables in the head (i.e. every head variable must appear in the ) + pub fn validateVariables(rule: Rule) bool { + blk: for (rule.head.terms.items) |head_term| { + const head_variable = if (head_term == .variable) head_term.variable else continue; + + for (rule.body.items) |body_predicate| { + for (body_predicate.terms.items) |body_term| { + const body_variable = if (head_term == .variable) body_term.variable else continue; + + if (head_variable == body_variable) continue :blk; + } + } + + // We haven't found this loop's head variable anywhere in the body (i.e. the variable is unbound) + return false; + } + + return true; } pub fn format(rule: Rule, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { @@ -172,5 +271,49 @@ pub const Rule = struct { try writer.print("{any}", .{predicate.*}); if (i < rule.body.items.len - 1) try writer.print(", ", .{}); } + + if (rule.expressions.items.len > 0) { + try writer.print(", [", .{}); + + for (rule.expressions.items, 0..) |*expression, i| { + try writer.print("{any}", .{expression.*}); + if (i < rule.expressions.items.len - 1) try writer.print(", ", .{}); + } + + try writer.print("]", .{}); + } + + if (rule.scopes.items.len > 0) try writer.print(", ", .{}); + + for (rule.scopes.items, 0..) |*scope, i| { + try writer.print("{any}", .{scope.*}); + if (i < rule.scopes.items.len - 1) try writer.print(", ", .{}); + } + } + + // Convert datalog fact from old symbol space to new symbol space + pub fn convert(rule: Rule, old_symbols: *const SymbolTable, new_symbols: *SymbolTable) !Rule { + var body = try rule.body.clone(); + var expressions = try rule.expressions.clone(); + var scopes = try rule.scopes.clone(); + + for (body.items, 0..) |predicate, i| { + body.items[i] = try predicate.convert(old_symbols, new_symbols); + } + + for (expressions.items, 0..) |expression, i| { + expressions.items[i] = try expression.convert(old_symbols, new_symbols); + } + + for (scopes.items, 0..) |scope, i| { + scopes.items[i] = try scope.convert(old_symbols, new_symbols); + } + + return .{ + .head = try rule.head.convert(old_symbols, new_symbols), + .body = body, + .expressions = expressions, + .scopes = scopes, + }; } }; diff --git a/biscuit-datalog/src/rule_set.zig b/biscuit-datalog/src/rule_set.zig new file mode 100644 index 0000000..5db8adc --- /dev/null +++ b/biscuit-datalog/src/rule_set.zig @@ -0,0 +1,52 @@ +const std = @import("std"); +const Rule = @import("rule.zig").Rule; +const TrustedOrigins = @import("trusted_origins.zig").TrustedOrigins; + +pub const RuleSet = struct { + rules: std.AutoHashMap(TrustedOrigins, std.ArrayList(OriginRule)), + allocator: std.mem.Allocator, + + const OriginRule = struct { u64, Rule }; + + pub fn init(allocator: std.mem.Allocator) RuleSet { + return .{ + .rules = std.AutoHashMap(TrustedOrigins, std.ArrayList(OriginRule)).init(allocator), + .allocator = allocator, + }; + } + + pub fn deinit(rule_set: *RuleSet) void { + var it = rule_set.rules.iterator(); + + while (it.next()) |entry| { + entry.key_ptr.deinit(); + entry.value_ptr.deinit(); + } + + rule_set.rules.deinit(); + } + + pub fn add(rule_set: *RuleSet, origin: u64, scope: TrustedOrigins, rule: Rule) !void { + if (rule_set.rules.getEntry(scope)) |entry| { + try entry.value_ptr.append(.{ origin, rule }); + } else { + var list = std.ArrayList(OriginRule).init(rule_set.allocator); + try list.append(.{ origin, rule }); + + try rule_set.rules.put(scope, list); + } + } +}; + +test "RuleSet" { + const testing = std.testing; + + var rs = RuleSet.init(testing.allocator); + defer rs.deinit(); + + const default_origins = try TrustedOrigins.defaultOrigins(testing.allocator); + const rule: Rule = undefined; + + try rs.add(0, default_origins, rule); + std.debug.print("rs = {any}\n", .{rs}); +} diff --git a/biscuit-datalog/src/scope.zig b/biscuit-datalog/src/scope.zig index be08894..5eb6546 100644 --- a/biscuit-datalog/src/scope.zig +++ b/biscuit-datalog/src/scope.zig @@ -1,5 +1,6 @@ const std = @import("std"); const schema = @import("biscuit-schema"); +const SymbolTable = @import("symbol_table.zig").SymbolTable; pub const Scope = union(ScopeTag) { authority: void, @@ -18,6 +19,22 @@ pub const Scope = union(ScopeTag) { .publicKey => |key| .{ .public_key = @bitCast(key) }, // FIXME: should we check for negativity? }; } + + pub fn convert(scope: Scope, old_symbols: *const SymbolTable, new_symbols: *SymbolTable) !Scope { + return switch (scope) { + .authority => .authority, + .previous => .previous, + .public_key => |index| .{ .public_key = try new_symbols.insertPublicKey(try old_symbols.getPublicKey(index)) }, + }; + } + + pub fn format(scope: Scope, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + switch (scope) { + .authority => try writer.print("authority", .{}), + .previous => try writer.print("previous", .{}), + .public_key => |public_key| try writer.print("public key {}", .{public_key}), + } + } }; const ScopeTag = enum(u8) { diff --git a/biscuit-datalog/src/set.zig b/biscuit-datalog/src/set.zig index 02be348..6d13db5 100644 --- a/biscuit-datalog/src/set.zig +++ b/biscuit-datalog/src/set.zig @@ -49,10 +49,21 @@ pub fn Set(comptime K: type) type { set.inner.deinit(); } + pub fn clone(set: *const Self) !Self { + return .{ + .inner = try set.inner.clone(), + .alloc = set.alloc, + }; + } + pub fn iterator(set: Self) InnerSet.KeyIterator { return set.inner.keyIterator(); } + pub fn ptrIterator(set: *Self) InnerSet.KeyIterator { + return set.inner.keyIterator(); + } + pub fn add(set: *Self, value: K) !void { try set.inner.put(value, {}); } @@ -302,3 +313,22 @@ test "Intersection" { try testing.expect(!s3.contains(.{ .integer = 1 })); try testing.expect(!s3.contains(.{ .integer = 4 })); } + +test "Iterator" { + const Term = @import("term.zig").Term; + + const testing = std.testing; + const allocator = testing.allocator; + + var s1 = Set(Term).init(allocator); + defer s1.deinit(); + + try s1.add(.{ .integer = 1 }); + try s1.add(.{ .integer = 2 }); + + var it = s1.iterator(); + + try testing.expect(it.next() != null); + try testing.expect(it.next() != null); + try testing.expect(it.next() == null); +} diff --git a/biscuit-datalog/src/symbol_table.zig b/biscuit-datalog/src/symbol_table.zig index cfb1a05..d5243ab 100644 --- a/biscuit-datalog/src/symbol_table.zig +++ b/biscuit-datalog/src/symbol_table.zig @@ -1,14 +1,20 @@ const std = @import("std"); const mem = std.mem; +const Ed25519 = std.crypto.sign.Ed25519; + pub const SymbolTable = struct { + name: []const u8, allocator: mem.Allocator, symbols: std.ArrayList([]const u8), + public_keys: std.ArrayList(Ed25519.PublicKey), - pub fn init(allocator: mem.Allocator) SymbolTable { + pub fn init(name: []const u8, allocator: mem.Allocator) SymbolTable { return .{ + .name = name, .allocator = allocator, .symbols = std.ArrayList([]const u8).init(allocator), + .public_keys = std.ArrayList(Ed25519.PublicKey).init(allocator), }; } @@ -17,6 +23,8 @@ pub const SymbolTable = struct { symbol_table.allocator.free(symbol); } symbol_table.symbols.deinit(); + + symbol_table.public_keys.deinit(); } pub fn insert(symbol_table: *SymbolTable, symbol: []const u8) !u64 { @@ -31,7 +39,11 @@ pub const SymbolTable = struct { // Otherwise we need to insert the new symbol try symbol_table.symbols.append(string); - return symbol_table.symbols.items.len - 1 + NON_DEFAULT_SYMBOLS_OFFSET; + const index = symbol_table.symbols.items.len - 1 + NON_DEFAULT_SYMBOLS_OFFSET; + + // std.debug.print("{s}: Inserting \"{s}\" at {}\n", .{ symbol_table.name, symbol, index }); + + return index; } pub fn get(symbol_table: *SymbolTable, symbol: []const u8) ?u64 { @@ -48,15 +60,35 @@ pub const SymbolTable = struct { return null; } + pub fn insertPublicKey(symbol_table: *SymbolTable, public_key: Ed25519.PublicKey) !u64 { + for (symbol_table.public_keys.items, 0..) |k, i| { + if (std.mem.eql(u8, &k.bytes, &public_key.bytes)) return i; + } + + try symbol_table.public_keys.append(public_key); + return symbol_table.public_keys.items.len - 1; + } + + pub fn getPublicKey(symbol_table: *const SymbolTable, index: usize) !Ed25519.PublicKey { + if (index >= symbol_table.public_keys.items.len) return error.NoSuchPublicKey; + + return symbol_table.public_keys.items[index]; + } + pub fn getString(symbol_table: *const SymbolTable, sym_index: u64) ![]const u8 { - if (indexToDefault(sym_index)) |str| { - return str; + if (indexToDefault(sym_index)) |sym| { + // std.debug.print("Found \"{s}\" at {} (default)\n", .{ sym, sym_index }); + return sym; } if (sym_index >= NON_DEFAULT_SYMBOLS_OFFSET and sym_index < NON_DEFAULT_SYMBOLS_OFFSET + symbol_table.symbols.items.len) { - return symbol_table.symbols.items[sym_index - NON_DEFAULT_SYMBOLS_OFFSET]; + const sym = symbol_table.symbols.items[sym_index - NON_DEFAULT_SYMBOLS_OFFSET]; + // std.debug.print("Found \"{s}\" at {}\n", .{ sym, sym_index }); + return sym; } + // std.debug.print("Existing sym index {} not found\n", .{sym_index}); + return error.SymbolNotFound; } @@ -131,7 +163,7 @@ const default_symbols = std.ComptimeStringMap(u64, .{ test { const testing = std.testing; - var st = SymbolTable.init(testing.allocator); + var st = SymbolTable.init("test", testing.allocator); defer st.deinit(); try testing.expectEqual(@as(?u64, 0), st.get("read")); diff --git a/biscuit-datalog/src/term.zig b/biscuit-datalog/src/term.zig index 2804361..01e711a 100644 --- a/biscuit-datalog/src/term.zig +++ b/biscuit-datalog/src/term.zig @@ -33,7 +33,7 @@ pub const Term = union(TermKind) { .string => |v| .{ .string = v }, .bool => |v| .{ .bool = v }, .date => |v| .{ .date = v }, - .bytes => return error.FromSchemaNotImplementedForBytes, + .bytes => |v| .{ .bytes = v.getSlice() }, .set => |v| { var set = Set(Term).init(allocator); for (v.set.items) |term| { @@ -48,9 +48,17 @@ pub const Term = union(TermKind) { return switch (term) { .variable => |id| .{ .variable = std.math.cast(u32, try new_symbols.insert(try old_symbols.getString(id))) orelse return error.VariableIdTooLarge }, .string => |id| .{ .string = try new_symbols.insert(try old_symbols.getString(id)) }, - .integer, .bool, .date => term, - .bytes => return error.ConvertNotImplementedForBytes, - .set => |_| return error.ConvertNotImplementedForBytes, + .integer, .bool, .date, .bytes => term, + .set => |s| blk: { + var set = Set(Term).init(s.alloc); + + var it = s.iterator(); + while (it.next()) |term_ptr| { + try set.add(try term_ptr.convert(old_symbols, new_symbols)); + } + + break :blk .{ .set = set }; + }, }; } diff --git a/biscuit-datalog/src/trusted_origins.zig b/biscuit-datalog/src/trusted_origins.zig new file mode 100644 index 0000000..fea1f85 --- /dev/null +++ b/biscuit-datalog/src/trusted_origins.zig @@ -0,0 +1,135 @@ +const std = @import("std"); +const mem = std.mem; +const Set = @import("set.zig").Set; +const Scope = @import("scope.zig").Scope; +const Origin = @import("origin.zig").Origin; + +/// TrustedOrigin represents the set of origins trusted by a particular rule +pub const TrustedOrigins = struct { + ids: InnerSet, + + const InnerSet = std.AutoHashMap(usize, void); + + pub fn init(allocator: mem.Allocator) TrustedOrigins { + return .{ .ids = InnerSet.init(allocator) }; + } + + pub fn deinit(trusted_origins: *TrustedOrigins) void { + trusted_origins.ids.deinit(); + } + + pub fn clone(trusted_origins: *const TrustedOrigins) !TrustedOrigins { + return .{ .ids = try trusted_origins.ids.clone() }; + } + + /// Return a TrustedOrigins default of trusting the authority block (0) + /// and the authorizer (max int). + pub fn defaultOrigins(allocator: mem.Allocator) !TrustedOrigins { + var trusted_origins = TrustedOrigins.init(allocator); + + try trusted_origins.insert(0); + try trusted_origins.insert(Origin.AUTHORIZER_ID); + + return trusted_origins; + } + + /// Given a rule (rule scopes) generate trusted origins. + /// + /// The trusted origins always include the current block id and the authorizer id. + /// + /// Additional origins depend on rule scopes. If there are no role scopes, the + /// origins from `default_origins` are added. Otherwise we convert the role scopes + /// to block ids and add those. + pub fn fromScopes( + allocator: mem.Allocator, + rule_scopes: []const Scope, + default_origins: TrustedOrigins, + current_block: usize, + public_key_to_block_id: std.AutoHashMap(usize, std.ArrayList(usize)), + ) !TrustedOrigins { + var trusted_origins = TrustedOrigins.init(allocator); + try trusted_origins.insert(current_block); + try trusted_origins.insert(Origin.AUTHORIZER_ID); + + if (rule_scopes.len == 0) { + var it = default_origins.ids.keyIterator(); + + while (it.next()) |block_id| { + try trusted_origins.insert(block_id.*); + } + } else { + for (rule_scopes) |scope| { + switch (scope) { + .authority => try trusted_origins.insert(0), + .previous => { + if (current_block == Origin.AUTHORIZER_ID) continue; + + for (0..current_block + 1) |i| { + try trusted_origins.insert(i); + } + }, + .public_key => |public_key_id| { + const block_id_list = public_key_to_block_id.get(public_key_id) orelse continue; + + for (block_id_list.items) |block_id| { + try trusted_origins.insert(block_id); + } + }, + } + } + } + + return trusted_origins; + } + + // FIXME: this could have a way better name like `fn trustsFact` + /// Check that TrustedOrigins contai (at least) _all_ origin ids in fact_origin. In + /// other words, check that the facts origins are a subset of the trusted origins. + pub fn containsAll(trusted_origins: *TrustedOrigins, fact_origin: *Origin) bool { + var it = fact_origin.block_ids.keyIterator(); + + while (it.next()) |origin_id_ptr| { + const origin_id = origin_id_ptr.*; + + if (trusted_origins.ids.contains(origin_id)) continue; + + return false; + } + + return true; + } + + pub fn format(trusted_origins: TrustedOrigins, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + var it = trusted_origins.ids.keyIterator(); + + try writer.print("trusting [", .{}); + while (it.next()) |id_ptr| { + const id = id_ptr.*; + + if (id == Origin.AUTHORIZER_ID) { + try writer.print("{s},", .{"Authorizer"}); + } else { + try writer.print("{},", .{id}); + } + } + try writer.print("]", .{}); + } + + pub fn insert(trusted_origins: *TrustedOrigins, block_id: usize) !void { + try trusted_origins.ids.put(block_id, {}); + } +}; + +test "Trusted origin" { + const testing = std.testing; + + var to = try TrustedOrigins.defaultOrigins(testing.allocator); + defer to.deinit(); + + var o = Origin.init(testing.allocator); + defer o.deinit(); + + try o.insert(22); + + _ = to.containsAll(&o); +} diff --git a/biscuit-datalog/src/world.zig b/biscuit-datalog/src/world.zig index 01bbb96..1e70b39 100644 --- a/biscuit-datalog/src/world.zig +++ b/biscuit-datalog/src/world.zig @@ -1,16 +1,19 @@ const std = @import("std"); const mem = std.mem; const Fact = @import("fact.zig").Fact; +const Origin = @import("origin.zig").Origin; const Rule = @import("rule.zig").Rule; -const Set = @import("set.zig").Set; +const FactSet = @import("fact_set.zig").FactSet; +const RuleSet = @import("rule_set.zig").RuleSet; +const TrustedOrigins = @import("trusted_origins.zig").TrustedOrigins; const RunLimits = @import("run_limits.zig").RunLimits; const SymbolTable = @import("symbol_table.zig").SymbolTable; pub const World = struct { allocator: mem.Allocator, - facts: Set(Fact), - rules: std.ArrayList(Rule), + fact_set: FactSet, + rule_set: RuleSet, symbols: std.ArrayList([]const u8), /// init world @@ -26,72 +29,91 @@ pub const World = struct { pub fn init(allocator: mem.Allocator) World { return .{ .allocator = allocator, - .facts = Set(Fact).init(allocator), - .rules = std.ArrayList(Rule).init(allocator), + .fact_set = FactSet.init(allocator), + .rule_set = RuleSet.init(allocator), .symbols = std.ArrayList([]const u8).init(allocator), }; } pub fn deinit(world: *World) void { - var it = world.facts.iterator(); - while (it.next()) |fact| { - fact.deinit(); - } world.symbols.deinit(); - world.rules.deinit(); - world.facts.deinit(); + world.rule_set.deinit(); + world.fact_set.deinit(); } - pub fn run(world: *World, symbols: SymbolTable) !void { + pub fn run(world: *World, symbols: *SymbolTable) !void { try world.runWithLimits(symbols, .{}); } - pub fn runWithLimits(world: *World, symbols: SymbolTable, limits: RunLimits) !void { - std.debug.print("runWithLimits\n", .{}); - for (0..limits.max_iterations) |_| { - const starting_fact_count = world.facts.count(); + pub fn runWithLimits(world: *World, symbols: *SymbolTable, limits: RunLimits) !void { + for (0..limits.max_iterations) |iteration| { + std.debug.print("\nrunWithLimits[{}]\n", .{iteration}); + const starting_fact_count = world.fact_set.count(); - var new_facts = Set(Fact).init(world.allocator); - defer { - var it = new_facts.iterator(); - while (it.next()) |fact| fact.deinit(); - new_facts.deinit(); - } + var new_fact_sets = FactSet.init(world.allocator); + defer new_fact_sets.deinit(); + + // Iterate over rules to generate new facts + { + var it = world.rule_set.rules.iterator(); + + while (it.next()) |origin_set| { + const trusted_origins = origin_set.key_ptr.*; + const set = origin_set.value_ptr; + + for (set.items) |*origin_rule| { + const origin_id: u64 = origin_rule[0]; + const rule: Rule = origin_rule[1]; - for (world.rules.items) |*rule| { - try rule.apply(world.allocator, &world.facts, &new_facts, symbols); + try rule.apply(world.allocator, origin_id, &world.fact_set, &new_fact_sets, symbols, trusted_origins); + } + } } - var it = new_facts.iterator(); - while (it.next()) |fact| { - if (world.facts.contains(fact.*)) continue; - try world.facts.add(try fact.cloneWithAllocator(world.allocator)); + var it = new_fact_sets.iterator(); + while (it.next()) |origin_fact| { + const existing_origin = origin_fact.origin.*; + const fact = origin_fact.fact.*; + + var origin = try existing_origin.clone(); + + if (world.fact_set.contains(origin, fact)) { + origin.deinit(); + continue; + } + + try world.fact_set.add(origin, try fact.cloneWithAllocator(world.allocator)); } - std.debug.print("starting_fact_count = {}, world.facts.count() = {}\n", .{ starting_fact_count, world.facts.count() }); + std.debug.print("starting_fact_count = {}, world.facts.count() = {}\n", .{ starting_fact_count, world.fact_set.count() }); // If we haven't generated any new facts, we're done. - if (starting_fact_count == world.facts.count()) { + if (starting_fact_count == world.fact_set.count()) { std.debug.print("No new facts!\n", .{}); return; } - if (world.facts.count() > limits.max_facts) return error.TooManyFacts; + if (world.fact_set.count() > limits.max_facts) return error.TooManyFacts; } return error.TooManyIterations; } - pub fn addFact(world: *World, fact: Fact) !void { - std.debug.print("world: adding fact = {any}\n", .{fact}); - try world.facts.add(fact); + /// Add fact with origin to world + pub fn addFact(world: *World, origin: Origin, fact: Fact) !void { + std.debug.print("\nworld: adding fact = {any} ({any}) \n", .{ fact, origin }); + try world.fact_set.add(origin, fact); + } + + pub fn addRule(world: *World, origin_id: usize, scope: TrustedOrigins, rule: Rule) !void { + std.debug.print("\nworld: adding rule (origin {}) = {any} (trusts {any})\n", .{ origin_id, rule, scope }); + try world.rule_set.add(origin_id, scope, rule); } - pub fn addRule(world: *World, rule: Rule) !void { - std.debug.print("world: adding rule = {any}\n", .{rule}); - try world.rules.append(rule); + pub fn queryMatch(world: *World, rule: *Rule, symbols: *SymbolTable, trusted_origins: TrustedOrigins) !bool { + return rule.findMatch(world.allocator, &world.fact_set, symbols, trusted_origins); } - pub fn queryMatch(world: *World, rule: *Rule, symbols: SymbolTable) !bool { - return rule.findMatch(world.allocator, &world.facts, symbols); + pub fn queryMatchAll(world: *World, rule: *Rule, symbols: *SymbolTable, trusted_origins: TrustedOrigins) !bool { + return rule.checkMatchAll(world.allocator, &world.fact_set, symbols, trusted_origins); } }; diff --git a/biscuit-format/src/main.zig b/biscuit-format/src/main.zig index eaef9c4..87ff1ea 100644 --- a/biscuit-format/src/main.zig +++ b/biscuit-format/src/main.zig @@ -1,5 +1,8 @@ pub const decode = @import("decode.zig"); -pub const serialized_biscuit = @import("serialized_biscuit.zig"); +pub const MIN_SCHEMA_VERSION = @import("serialized_biscuit.zig").MIN_SCHEMA_VERSION; +pub const MAX_SCHEMA_VERSION = @import("serialized_biscuit.zig").MAX_SCHEMA_VERSION; +pub const SignedBlock = @import("signed_block.zig").SignedBlock; +pub const SerializedBiscuit = @import("serialized_biscuit.zig").SerializedBiscuit; test { _ = @import("serialized_biscuit.zig"); diff --git a/biscuit-format/src/serialized_biscuit.zig b/biscuit-format/src/serialized_biscuit.zig index 7bff467..c6f2a33 100644 --- a/biscuit-format/src/serialized_biscuit.zig +++ b/biscuit-format/src/serialized_biscuit.zig @@ -6,7 +6,7 @@ const SignedBlock = @import("signed_block.zig").SignedBlock; const Proof = @import("proof.zig").Proof; pub const MIN_SCHEMA_VERSION = 3; -pub const MAX_SCHEMA_VERSION = 3; +pub const MAX_SCHEMA_VERSION = 4; pub const SerializedBiscuit = struct { decoded_biscuit: schema.Biscuit, @@ -15,14 +15,17 @@ pub const SerializedBiscuit = struct { proof: Proof, // root_key_id: ?u64, + // FIXME: should this take a SymbolTable? /// Initialise a SerializedBiscuit from the token's bytes and root public key. /// /// This decodes the toplevel-level biscuit format from protobuf and verifies /// the token. - pub fn initFromBytes(allocator: mem.Allocator, bytes: []const u8, public_key: Ed25519.PublicKey) !SerializedBiscuit { + pub fn fromBytes(allocator: mem.Allocator, bytes: []const u8, public_key: Ed25519.PublicKey) !SerializedBiscuit { const b = try schema.decodeBiscuit(allocator, bytes); errdefer b.deinit(); + // FIXME: Add textual public keys to symbols + const authority = try SignedBlock.fromDecodedBlock(b.authority orelse return error.ExpectedAuthorityBlock); const proof = try Proof.fromDecodedProof(b.proof orelse return error.ExpectedProof); @@ -66,44 +69,61 @@ pub const SerializedBiscuit = struct { /// b) If the token is not sealed we check the last block's /// public key is the public key of the private key in the /// the proof. - fn verify(serialized_block: *SerializedBiscuit, root_public_key: Ed25519.PublicKey) !void { + fn verify(serialized_biscuit: *SerializedBiscuit, root_public_key: Ed25519.PublicKey) !void { var pk = root_public_key; // Verify the authority block's signature { - var verifier = try serialized_block.authority.signature.verifier(pk); + if (serialized_biscuit.authority.external_signature != null) return error.AuthorityBlockMustNotHaveExternalSignature; + + var verifier = try serialized_biscuit.authority.signature.verifier(pk); - verifier.update(serialized_block.authority.block); - verifier.update(&serialized_block.authority.algorithmBuf()); - verifier.update(&serialized_block.authority.public_key.bytes); + verifier.update(serialized_biscuit.authority.block); + verifier.update(&serialized_biscuit.authority.algorithmBuf()); + verifier.update(&serialized_biscuit.authority.public_key.bytes); try verifier.verify(); - pk = serialized_block.authority.public_key; + pk = serialized_biscuit.authority.public_key; } // Verify the other blocks' signatures - for (serialized_block.blocks.items) |*block| { - var verifier = try block.signature.verifier(pk); + for (serialized_biscuit.blocks.items) |*block| { + // Verify the block signature + { + var verifier = try block.signature.verifier(pk); + + verifier.update(block.block); + if (block.external_signature) |external_signature| { + verifier.update(&external_signature.signature.toBytes()); + } + verifier.update(&block.algorithmBuf()); + verifier.update(&block.public_key.bytes); - verifier.update(block.block); - verifier.update(&block.algorithmBuf()); - verifier.update(&block.public_key.bytes); + try verifier.verify(); + } - try verifier.verify(); + // Verify the external signature (where one exists) + if (block.external_signature) |external_signature| { + var external_verifier = try external_signature.signature.verifier(external_signature.public_key); + external_verifier.update(block.block); + external_verifier.update(&block.algorithm2Buf()); + external_verifier.update(&pk.bytes); + try external_verifier.verify(); + } pk = block.public_key; } // Check the proof - switch (serialized_block.proof) { + switch (serialized_biscuit.proof) { .next_secret => |next_secret| { if (!std.mem.eql(u8, &pk.bytes, &next_secret.publicKeyBytes())) { return error.SecretKeyProofFailedMismatchedPublicKeys; } }, .final_signature => |final_signature| { - var last_block = if (serialized_block.blocks.items.len == 0) serialized_block.authority else serialized_block.blocks.items[serialized_block.blocks.items.len - 1]; + var last_block = if (serialized_biscuit.blocks.items.len == 0) serialized_biscuit.authority else serialized_biscuit.blocks.items[serialized_biscuit.blocks.items.len - 1]; var verifier = try final_signature.verifier(pk); verifier.update(last_block.block); @@ -139,7 +159,7 @@ test { const bytes = try decode.urlSafeBase64ToBytes(allocator, token); defer allocator.free(bytes); - var b = try SerializedBiscuit.initFromBytes(allocator, bytes, public_key); + var b = try SerializedBiscuit.fromBytes(allocator, bytes, public_key); defer b.deinit(); } } diff --git a/biscuit-format/src/signed_block.zig b/biscuit-format/src/signed_block.zig index 1e5825e..7d6344e 100644 --- a/biscuit-format/src/signed_block.zig +++ b/biscuit-format/src/signed_block.zig @@ -7,6 +7,7 @@ pub const SignedBlock = struct { algorithm: schema.PublicKey.Algorithm, signature: Ed25519.Signature, public_key: Ed25519.PublicKey, + external_signature: ?ExternalSignature, pub fn fromDecodedBlock(schema_signed_block: schema.SignedBlock) !SignedBlock { const block_signature = schema_signed_block.signature.getSlice(); @@ -26,11 +27,35 @@ pub const SignedBlock = struct { @memcpy(&pubkey_buf, block_public_key); const public_key = try Ed25519.PublicKey.fromBytes(pubkey_buf); + const external_signature: ?ExternalSignature = if (schema_signed_block.externalSignature) |ext_sig| blk: { + const block_external_signature = ext_sig.signature.getSlice(); + const required_block_external_key = ext_sig.publicKey orelse return error.ExpectedExternalPublicKey; + const block_external_public_key = required_block_external_key.key.getSlice(); + + const algo = required_block_external_key.algorithm; + + std.debug.print("ALGORITHM = {}\n", .{algo}); + + if (block_external_signature.len != Ed25519.Signature.encoded_length) return error.IncorrectBlockExternalSignatureLength; + if (block_external_public_key.len != Ed25519.PublicKey.encoded_length) return error.IncorrectBlockExternalPublicKeyLength; + + var ext_sign_buf: [Ed25519.Signature.encoded_length]u8 = undefined; + @memcpy(&ext_sign_buf, block_external_signature); + const extenral_signature = Ed25519.Signature.fromBytes(ext_sign_buf); + + var ext_pubkey_buf: [Ed25519.PublicKey.encoded_length]u8 = undefined; + @memcpy(&ext_pubkey_buf, block_external_public_key); + const external_public_key = try Ed25519.PublicKey.fromBytes(ext_pubkey_buf); + + break :blk .{ .signature = extenral_signature, .public_key = external_public_key }; + } else null; + return .{ .block = schema_signed_block.block.getSlice(), .algorithm = algorithm, .signature = signature, .public_key = public_key, + .external_signature = external_signature, }; } @@ -39,4 +64,16 @@ pub const SignedBlock = struct { std.mem.writeInt(u32, buf[0..], @as(u32, @bitCast(@intFromEnum(signed_block.algorithm))), @import("builtin").cpu.arch.endian()); return buf; } + + // FIXME: we should take the algorithm from the appropriate key + pub fn algorithm2Buf(_: *SignedBlock) [4]u8 { + var buf: [4]u8 = undefined; + std.mem.writeInt(u32, buf[0..], @as(u32, 0), @import("builtin").cpu.arch.endian()); + return buf; + } +}; + +const ExternalSignature = struct { + signature: Ed25519.Signature, + public_key: Ed25519.PublicKey, }; diff --git a/biscuit-parser/README.md b/biscuit-parser/README.md new file mode 100644 index 0000000..d22ff2a --- /dev/null +++ b/biscuit-parser/README.md @@ -0,0 +1 @@ +# biscuit-parser diff --git a/biscuit-parser/build.zig b/biscuit-parser/build.zig new file mode 100644 index 0000000..e5e89f2 --- /dev/null +++ b/biscuit-parser/build.zig @@ -0,0 +1,55 @@ +const std = @import("std"); + +// Although this function looks imperative, note that its job is to +// declaratively construct a build graph that will be executed by an external +// runner. +pub fn build(b: *std.Build) void { + // Standard target options allows the person running `zig build` to choose + // what target to build for. Here we do not override the defaults, which + // means any target is allowed, and the default is native. Other options + // for restricting supported target set are available. + const target = b.standardTargetOptions(.{}); + + // Standard optimization options allow the person running `zig build` to select + // between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall. Here we do not + // set a preferred release mode, allowing the user to decide how to optimize. + const optimize = b.standardOptimizeOption(.{}); + + const ziglyph = b.dependency("ziglyph", .{ .optimize = optimize, .target = target }); + const schema = b.dependency("biscuit-schema", .{ .target = target, .optimize = optimize }); + const format = b.dependency("biscuit-format", .{ .target = target, .optimize = optimize }); + const builder = b.dependency("biscuit-builder", .{ .target = target, .optimize = optimize }); + const datalog = b.dependency("biscuit-datalog", .{ .target = target, .optimize = optimize }); + + _ = b.addModule("biscuit-parser", .{ + .root_source_file = .{ .path = "src/parser.zig" }, + .imports = &.{ + .{ .name = "biscuit-schema", .module = schema.module("biscuit-schema") }, + .{ .name = "biscuit-format", .module = format.module("biscuit-format") }, + .{ .name = "biscuit-builder", .module = builder.module("biscuit-builder") }, + .{ .name = "biscuit-datalog", .module = datalog.module("biscuit-datalog") }, + .{ .name = "ziglyph", .module = ziglyph.module("ziglyph") }, + }, + }); + + // Creates a step for unit testing. This only builds the test executable + // but does not run it. + const lib_unit_tests = b.addTest(.{ + .root_source_file = .{ .path = "src/parser.zig" }, + .target = target, + .optimize = optimize, + }); + lib_unit_tests.root_module.addImport("biscuit-schema", schema.module("biscuit-schema")); + lib_unit_tests.root_module.addImport("biscuit-format", format.module("biscuit-format")); + lib_unit_tests.root_module.addImport("biscuit-builder", builder.module("biscuit-builder")); + lib_unit_tests.root_module.addImport("biscuit-datalog", datalog.module("biscuit-datalog")); + lib_unit_tests.root_module.addImport("ziglyph", ziglyph.module("ziglyph")); + + const run_lib_unit_tests = b.addRunArtifact(lib_unit_tests); + + // Similar to creating the run step earlier, this exposes a `test` step to + // the `zig build --help` menu, providing a way for the user to request + // running the unit tests. + const test_step = b.step("test", "Run unit tests"); + test_step.dependOn(&run_lib_unit_tests.step); +} diff --git a/biscuit-parser/build.zig.zon b/biscuit-parser/build.zig.zon new file mode 100644 index 0000000..54f2748 --- /dev/null +++ b/biscuit-parser/build.zig.zon @@ -0,0 +1,70 @@ +.{ + .name = "biscuit-parser", + // This is a [Semantic Version](https://semver.org/). + // In a future version of Zig it will be used for package deduplication. + .version = "0.0.0", + + // This field is optional. + // This is currently advisory only; Zig does not yet do anything + // with this value. + //.minimum_zig_version = "0.11.0", + + // This field is optional. + // Each dependency must either provide a `url` and `hash`, or a `path`. + // `zig build --fetch` can be used to fetch all dependencies of a package, recursively. + // Once all dependencies are fetched, `zig build` no longer requires + // internet connectivity. + .dependencies = .{ + // See `zig fetch --save ` for a command-line interface for adding dependencies. + //.example = .{ + // // When updating this field to a new URL, be sure to delete the corresponding + // // `hash`, otherwise you are communicating that you expect to find the old hash at + // // the new URL. + // .url = "https://example.com/foo.tar.gz", + // + // // This is computed from the file contents of the directory of files that is + // // obtained after fetching `url` and applying the inclusion rules given by + // // `paths`. + // // + // // This field is the source of truth; packages do not come from a `url`; they + // // come from a `hash`. `url` is just one of many possible mirrors for how to + // // obtain a package matching this `hash`. + // // + // // Uses the [multihash](https://multiformats.io/multihash/) format. + // .hash = "...", + // + // // When this is provided, the package is found in a directory relative to the + // // build root. In this case the package's hash is irrelevant and therefore not + // // computed. This field and `url` are mutually exclusive. + // .path = "foo", + //}, + .@"biscuit-schema" = .{ .path = "../biscuit-schema" }, + .@"biscuit-format" = .{ .path = "../biscuit-format" }, + .@"biscuit-builder" = .{ .path = "../biscuit-builder" }, + .@"biscuit-datalog" = .{ .path = "../biscuit-datalog" }, + .ziglyph = .{ + .url = "https://codeberg.org/dude_the_builder/ziglyph/archive/947ed39203bf90412e3d16cbcf936518b6f23af0.tar.gz", + .hash = "12208b23d1eb6dcb929e85346524db8f8b8aa1401bdf8a97dee1e0cfb55da8d5fb42", + }, + }, + + // Specifies the set of files and directories that are included in this package. + // Only files and directories listed here are included in the `hash` that + // is computed for this package. + // Paths are relative to the build root. Use the empty string (`""`) to refer to + // the build root itself. + // A directory listed here means that all files within, recursively, are included. + .paths = .{ + // This makes *all* files, recursively, included in this package. It is generally + // better to explicitly list the files and directories instead, to insure that + // fetching from tarballs, file system paths, and version control all result + // in the same contents hash. + "", + // For example... + //"build.zig", + //"build.zig.zon", + //"src", + //"LICENSE", + //"README.md", + }, +} diff --git a/biscuit-parser/src/parser.zig b/biscuit-parser/src/parser.zig new file mode 100644 index 0000000..7e8e032 --- /dev/null +++ b/biscuit-parser/src/parser.zig @@ -0,0 +1,1113 @@ +const std = @import("std"); +const ziglyph = @import("ziglyph"); +const datalog = @import("biscuit-datalog"); +const Term = @import("biscuit-builder").Term; +const Fact = @import("biscuit-builder").Fact; +const Check = @import("biscuit-builder").Check; +const Rule = @import("biscuit-builder").Rule; +const Predicate = @import("biscuit-builder").Predicate; +const Expression = @import("biscuit-builder").Expression; +const Scope = @import("biscuit-builder").Scope; +const Date = @import("biscuit-builder").Date; +const Policy = @import("biscuit-builder").Policy; +const Ed25519 = std.crypto.sign.Ed25519; + +pub const Parser = struct { + input: []const u8, + offset: usize = 0, + allocator: std.mem.Allocator, + + pub fn init(allocator: std.mem.Allocator, input: []const u8) Parser { + return .{ .input = input, .allocator = allocator }; + } + + pub fn fact(parser: *Parser) !Fact { + return .{ .predicate = try parser.factPredicate(), .variables = null }; + } + + pub fn factPredicate(parser: *Parser) !Predicate { + const name = parser.readName(); + + std.debug.print("name = {s}\n", .{name}); + + parser.skipWhiteSpace(); + + // Consume left paren + try parser.expect('('); + + // Parse terms + var terms = std.ArrayList(Term).init(parser.allocator); + + var it = parser.factTermsIterator(); + while (try it.next()) |trm| { + try terms.append(trm); + + if (parser.peek()) |peeked| { + if (peeked != ',') break; + } else { + break; + } + } + + try parser.expect(')'); + + return .{ .name = name, .terms = terms }; + } + + const FactTermIterator = struct { + parser: *Parser, + + pub fn next(it: *FactTermIterator) !?Term { + it.parser.skipWhiteSpace(); + + return try it.parser.factTerm(); + } + }; + + pub fn factTermsIterator(parser: *Parser) FactTermIterator { + return .{ .parser = parser }; + } + + const TermIterator = struct { + parser: *Parser, + + pub fn next(it: *TermIterator) !?Term { + it.parser.skipWhiteSpace(); + + return try it.parser.term(); + } + }; + + pub fn termsIterator(parser: *Parser) TermIterator { + return .{ .parser = parser }; + } + + pub fn term(parser: *Parser) !Term { + const rst = parser.rest(); + + variable_blk: { + var term_parser = Parser.init(parser.allocator, rst); + + const value = term_parser.variable() catch break :variable_blk; + + parser.offset += term_parser.offset; + + return .{ .variable = value }; + } + + string_blk: { + var term_parser = Parser.init(parser.allocator, rst); + + const value = term_parser.string() catch break :string_blk; + + parser.offset += term_parser.offset; + + return .{ .string = value }; + } + + date_blk: { + var term_parser = Parser.init(parser.allocator, rst); + + const value = term_parser.date() catch break :date_blk; + + parser.offset += term_parser.offset; + + return .{ .date = value }; + } + + number_blk: { + var term_parser = Parser.init(parser.allocator, rst); + + const value = term_parser.number(i64) catch break :number_blk; + + parser.offset += term_parser.offset; + + return .{ .integer = value }; + } + + bool_blk: { + var term_parser = Parser.init(parser.allocator, rst); + + const value = term_parser.boolean() catch break :bool_blk; + + parser.offset += term_parser.offset; + + return .{ .bool = value }; + } + + return error.NoFactTermFound; + } + + pub fn factTerm(parser: *Parser) !Term { + const rst = parser.rest(); + + try parser.reject('$'); // Variables are disallowed in a fact term + + string_blk: { + var term_parser = Parser.init(parser.allocator, rst); + + const value = term_parser.string() catch break :string_blk; + + parser.offset += term_parser.offset; + + return .{ .string = value }; + } + + date_blk: { + var term_parser = Parser.init(parser.allocator, rst); + + const value = term_parser.date() catch break :date_blk; + + parser.offset += term_parser.offset; + + return .{ .date = value }; + } + + number_blk: { + var term_parser = Parser.init(parser.allocator, rst); + + const value = term_parser.number(i64) catch break :number_blk; + + parser.offset += term_parser.offset; + + return .{ .integer = value }; + } + + bool_blk: { + var term_parser = Parser.init(parser.allocator, rst); + + const value = term_parser.boolean() catch break :bool_blk; + + parser.offset += term_parser.offset; + + return .{ .bool = value }; + } + + return error.NoFactTermFound; + } + + pub fn predicate(parser: *Parser) !Predicate { + const name = parser.readName(); + + parser.skipWhiteSpace(); + + // Consume left paren + try parser.expect('('); + + // Parse terms + var terms = std.ArrayList(Term).init(parser.allocator); + + var it = parser.termsIterator(); + while (try it.next()) |trm| { + try terms.append(trm); + + if (parser.peek()) |peeked| { + if (peeked == ',') { + parser.offset += 1; + continue; + } + } + + break; + } + + try parser.expect(')'); + + return .{ .name = name, .terms = terms }; + } + + fn variable(parser: *Parser) ![]const u8 { + try parser.expect('$'); + + const start = parser.offset; + + for (parser.rest()) |c| { + if (ziglyph.isAlphaNum(c) or c == '_') { + parser.offset += 1; + continue; + } + + break; + } + + return parser.input[start..parser.offset]; + } + + // FIXME: properly implement string parsing + fn string(parser: *Parser) ![]const u8 { + try parser.expect('"'); + + const start = parser.offset; + + while (parser.peek()) |peeked| { + defer parser.offset += 1; + if (peeked == '"') { + return parser.input[start..parser.offset]; + } + } + + return error.ExpectedStringTerm; + } + + fn date(parser: *Parser) !u64 { + const year = try parser.number(i32); + + try parser.expect('-'); + + const month = try parser.number(u8); + if (month < 1 or month > 12) return error.MonthOutOfRange; + + try parser.expect('-'); + + const day = try parser.number(u8); + if (!Date.isDayMonthYearValid(i32, year, month, day)) return error.InvalidDayMonthYearCombination; + + try parser.expect('T'); + + const hour = try parser.number(u8); + if (hour > 23) return error.HoyrOutOfRange; + + try parser.expect(':'); + + const minute = try parser.number(u8); + if (minute > 59) return error.MinuteOutOfRange; + + try parser.expect(':'); + + const second = try parser.number(u8); + if (second > 59) return error.SecondOutOfRange; + + try parser.expect('Z'); + + const d: Date = .{ + .year = year, + .month = month, + .day = day, + .hour = hour, + .minute = minute, + .second = second, + .nanosecond = 0, + .utc_offset = 0, + }; + + return d.unixEpoch(); + } + + fn number(parser: *Parser, comptime T: type) !T { + const start = parser.offset; + + for (parser.rest()) |c| { + if (ziglyph.isAsciiDigit(c)) { + parser.offset += 1; + continue; + } + + break; + } + + const text = parser.input[start..parser.offset]; + + return try std.fmt.parseInt(T, text, 10); + } + + fn boolean(parser: *Parser) !bool { + if (std.mem.startsWith(u8, parser.rest(), "true")) { + parser.offset += "term".len; + return true; + } + + if (std.mem.startsWith(u8, parser.rest(), "false")) { + parser.offset += "false".len; + return false; + } + + return error.ExpectedBooleanTerm; + } + + pub fn policy(parser: *Parser) !Policy { + var kind: Policy.Kind = undefined; + + if (std.mem.startsWith(u8, parser.rest(), "allow if")) { + parser.offset += "allow if".len; + kind = .allow; + } else if (std.mem.startsWith(u8, parser.rest(), "deny if")) { + parser.offset += "deny if".len; + kind = .deny; + } else { + return error.UnexpectedPolicyKind; + } + + const queries = try parser.checkBody(); + + return .{ .kind = kind, .queries = queries }; + } + + pub fn check(parser: *Parser) !Check { + var kind: datalog.Check.Kind = undefined; + + if (std.mem.startsWith(u8, parser.rest(), "check if")) { + parser.offset += "check if".len; + kind = .one; + } else if (std.mem.startsWith(u8, parser.rest(), "check all")) { + parser.offset += "check all".len; + kind = .all; + } else { + return error.UnexpectedCheckKind; + } + + const queries = try parser.checkBody(); + + return .{ .kind = kind, .queries = queries }; + } + + fn checkBody(parser: *Parser) !std.ArrayList(Rule) { + var queries = std.ArrayList(Rule).init(parser.allocator); + + const required_body = try parser.ruleBody(); + + try queries.append(.{ + .head = .{ .name = "query", .terms = std.ArrayList(Term).init(parser.allocator) }, + .body = required_body.predicates, + .expressions = required_body.expressions, + .scopes = required_body.scopes, + .variables = null, + }); + + while (true) { + parser.skipWhiteSpace(); + + if (!std.mem.startsWith(u8, parser.rest(), "or")) break; + + parser.offset += "or".len; + + const body = try parser.ruleBody(); + + try queries.append(.{ + .head = .{ .name = "query", .terms = std.ArrayList(Term).init(parser.allocator) }, + .body = body.predicates, + .expressions = body.expressions, + .scopes = body.scopes, + .variables = null, + }); + } + + return queries; + } + + pub fn rule(parser: *Parser) !Rule { + const head = try parser.predicate(); + + parser.skipWhiteSpace(); + + if (!std.mem.startsWith(u8, parser.rest(), "<-")) return error.ExpectedArrow; + + parser.offset += "<-".len; + + const body = try parser.ruleBody(); + + return .{ + .head = head, + .body = body.predicates, + .expressions = body.expressions, + .scopes = body.scopes, + .variables = null, + }; + } + + pub fn ruleBody(parser: *Parser) !struct { predicates: std.ArrayList(Predicate), expressions: std.ArrayList(Expression), scopes: std.ArrayList(Scope) } { + var predicates = std.ArrayList(Predicate).init(parser.allocator); + var expressions = std.ArrayList(Expression).init(parser.allocator); + var scps = std.ArrayList(Scope).init(parser.allocator); + + while (true) { + parser.skipWhiteSpace(); + std.debug.print("{s}: \"{s}\"\n", .{ @src().fn_name, parser.rest() }); + + // Try parsing a predicate + predicate_blk: { + var predicate_parser = Parser.init(parser.allocator, parser.rest()); + + const p = predicate_parser.predicate() catch break :predicate_blk; + + parser.offset += predicate_parser.offset; + + try predicates.append(p); + + parser.skipWhiteSpace(); + + if (parser.peek()) |peeked| { + if (peeked == ',') { + parser.offset += 1; + continue; + } + } + } + + // Otherwise try parsing an expression + expression_blk: { + var expression_parser = Parser.init(parser.allocator, parser.rest()); + + const e = expression_parser.expression() catch break :expression_blk; + + parser.offset += expression_parser.offset; + + try expressions.append(e); + + parser.skipWhiteSpace(); + + if (parser.peek()) |peeked| { + if (peeked == ',') { + parser.offset += 1; + continue; + } + } + } + + // We haven't found a predicate or expression so we're done, + // other than potentially parsing a scope + break; + } + + scopes_blk: { + var scope_parser = Parser.init(parser.allocator, parser.rest()); + + const s = scope_parser.scopes(parser.allocator) catch break :scopes_blk; + + parser.offset += scope_parser.offset; + + scps = s; + } + + return .{ .predicates = predicates, .expressions = expressions, .scopes = scps }; + } + + fn expression(parser: *Parser) ParserError!Expression { + std.debug.print("Attempting to parser {s}\n", .{parser.rest()}); + parser.skipWhiteSpace(); + const e = try parser.expr(); + + std.debug.print("parsed expression = {any}\n", .{e}); + + return e; + } + + fn expr(parser: *Parser) ParserError!Expression { + std.debug.print("[{s}]\n", .{@src().fn_name}); + var e = try parser.expr1(); + + std.debug.print("[{s}] e = {any}\n", .{ @src().fn_name, e }); + + while (true) { + parser.skipWhiteSpace(); + if (parser.rest().len == 0) break; + + const op = parser.binaryOp0() catch break; + + parser.skipWhiteSpace(); + + const e2 = try parser.expr1(); + std.debug.print("[{s}] e2 = {any}\n", .{ @src().fn_name, e2 }); + + e = try Expression.binary(parser.allocator, op, e, e2); + } + + return e; + } + + fn expr1(parser: *Parser) ParserError!Expression { + std.debug.print("[{s}]\n", .{@src().fn_name}); + var e = try parser.expr2(); + + std.debug.print("[{s}] e = {any}\n", .{ @src().fn_name, e }); + + while (true) { + parser.skipWhiteSpace(); + if (parser.rest().len == 0) break; + + const op = parser.binaryOp1() catch break; + + parser.skipWhiteSpace(); + + const e2 = try parser.expr2(); + std.debug.print("[{s}] e2 = {any}\n", .{ @src().fn_name, e2 }); + + e = try Expression.binary(parser.allocator, op, e, e2); + } + + return e; + } + + fn expr2(parser: *Parser) ParserError!Expression { + std.debug.print("[{s}]\n", .{@src().fn_name}); + var e = try parser.expr3(); + + std.debug.print("[{s}] e = {any}\n", .{ @src().fn_name, e }); + + while (true) { + parser.skipWhiteSpace(); + if (parser.rest().len == 0) break; + + const op = parser.binaryOp2() catch break; + + parser.skipWhiteSpace(); + + const e2 = try parser.expr3(); + std.debug.print("[{s}] e2 = {any}\n", .{ @src().fn_name, e2 }); + + e = try Expression.binary(parser.allocator, op, e, e2); + } + + return e; + } + + fn expr3(parser: *Parser) ParserError!Expression { + std.debug.print("[{s}]\n", .{@src().fn_name}); + var e = try parser.expr4(); + + std.debug.print("[{s}] e = {any}\n", .{ @src().fn_name, e }); + + while (true) { + parser.skipWhiteSpace(); + if (parser.rest().len == 0) break; + + const op = parser.binaryOp3() catch break; + + parser.skipWhiteSpace(); + + const e2 = try parser.expr4(); + std.debug.print("[{s}] e2 = {any}\n", .{ @src().fn_name, e2 }); + + e = try Expression.binary(parser.allocator, op, e, e2); + } + + return e; + } + + fn expr4(parser: *Parser) ParserError!Expression { + std.debug.print("[{s}]\n", .{@src().fn_name}); + var e = try parser.expr5(); + + std.debug.print("[{s}] e = {any}\n", .{ @src().fn_name, e }); + + while (true) { + parser.skipWhiteSpace(); + if (parser.rest().len == 0) break; + + const op = parser.binaryOp4() catch break; + + parser.skipWhiteSpace(); + + const e2 = try parser.expr5(); + std.debug.print("[{s}] e2 = {any}\n", .{ @src().fn_name, e2 }); + + e = try Expression.binary(parser.allocator, op, e, e2); + } + + return e; + } + + fn expr5(parser: *Parser) ParserError!Expression { + std.debug.print("[{s}]\n", .{@src().fn_name}); + var e = try parser.expr6(); + + std.debug.print("[{s}] e = {any}\n", .{ @src().fn_name, e }); + + while (true) { + parser.skipWhiteSpace(); + if (parser.rest().len == 0) break; + + const op = parser.binaryOp5() catch break; + + parser.skipWhiteSpace(); + + const e2 = try parser.expr6(); + + std.debug.print("[{s}] e2 = {any}\n", .{ @src().fn_name, e2 }); + + e = try Expression.binary(parser.allocator, op, e, e2); + } + + return e; + } + + fn expr6(parser: *Parser) ParserError!Expression { + std.debug.print("[{s}]\n", .{@src().fn_name}); + var e = try parser.expr7(); + + std.debug.print("[{s}] e = {any}\n", .{ @src().fn_name, e }); + + while (true) { + parser.skipWhiteSpace(); + if (parser.rest().len == 0) break; + + const op = parser.binaryOp6() catch break; + + parser.skipWhiteSpace(); + + const e2 = try parser.expr7(); + std.debug.print("[{s}] e2 = {any}\n", .{ @src().fn_name, e2 }); + + e = try Expression.binary(parser.allocator, op, e, e2); + } + + return e; + } + + fn expr7(parser: *Parser) ParserError!Expression { + std.debug.print("[{s}]\n", .{@src().fn_name}); + const e1 = try parser.exprTerm(); + + std.debug.print("[{s}] e1 = {any}\n", .{ @src().fn_name, e1 }); + + parser.skipWhiteSpace(); + + if (!parser.startsWith(".")) return e1; + try parser.expect('.'); + + const op = try parser.binaryOp7(); + parser.skipWhiteSpace(); + + std.debug.print("[{s}] op = {any}, rest = \"{s}\"\n", .{ @src().fn_name, op, parser.rest() }); + + // if (!parser.startsWith("(")) return error.MissingLeftParen; + try parser.expect('('); + + parser.skipWhiteSpace(); + + std.debug.print("here\n", .{}); + + const e2 = try parser.expr(); + + std.debug.print("[{s}] e2 = {any}\n", .{ @src().fn_name, e2 }); + + parser.skipWhiteSpace(); + + // if (!parser.startsWith(")")) return error.MissingRightParen; + try parser.expect(')'); + + parser.skipWhiteSpace(); + + return try Expression.binary(parser.allocator, op, e1, e2); + } + + fn binaryOp0(parser: *Parser) ParserError!Expression.BinaryOp { + if (parser.startsWith("&&")) { + try parser.expectString("&&"); + return .@"and"; + } + + if (parser.startsWith("||")) { + try parser.expectString("||"); + return .@"or"; + } + + return error.UnexpectedOp; + } + + fn binaryOp1(parser: *Parser) ParserError!Expression.BinaryOp { + if (parser.startsWith("<=")) { + try parser.expectString("<="); + return .less_or_equal; + } + + if (parser.startsWith(">=")) { + try parser.expectString(">="); + return .greater_or_equal; + } + + if (parser.startsWith("<")) { + try parser.expectString("<"); + return .less_than; + } + + if (parser.startsWith(">")) { + try parser.expectString(">"); + return .greater_than; + } + + if (parser.startsWith("==")) { + try parser.expectString("=="); + return .equal; + } + + if (parser.startsWith("!=")) { + try parser.expectString("!="); + return .not_equal; + } + + return error.UnexpectedOp; + } + + fn binaryOp2(parser: *Parser) ParserError!Expression.BinaryOp { + if (parser.startsWith("+")) { + try parser.expectString("+"); + return .add; + } + + if (parser.startsWith("-")) { + try parser.expectString("-"); + return .sub; + } + + return error.UnexpectedOp; + } + + fn binaryOp3(parser: *Parser) ParserError!Expression.BinaryOp { + if (parser.startsWith("^")) { + try parser.expectString("^"); + return .bitwise_xor; + } + + return error.UnexpectedOp; + } + + fn binaryOp4(parser: *Parser) ParserError!Expression.BinaryOp { + if (parser.startsWith("|") and !parser.startsWith("||")) { + try parser.expectString("|"); + return .bitwise_or; + } + + return error.UnexpectedOp; + } + + fn binaryOp5(parser: *Parser) ParserError!Expression.BinaryOp { + if (parser.startsWith("&") and !parser.startsWith("&&")) { + try parser.expectString("&"); + return .bitwise_and; + } + + return error.UnexpectedOp; + } + + fn binaryOp6(parser: *Parser) ParserError!Expression.BinaryOp { + if (parser.startsWith("*")) { + try parser.expectString("*"); + return .mul; + } + + if (parser.startsWith("/")) { + try parser.expectString("/"); + return .div; + } + + return error.UnexpectedOp; + } + + fn binaryOp7(parser: *Parser) ParserError!Expression.BinaryOp { + if (parser.startsWith("contains")) { + try parser.expectString("contains"); + return .contains; + } + + if (parser.startsWith("starts_with")) { + try parser.expectString("starts_with"); + return .prefix; + } + + if (parser.startsWith("ends_with")) { + try parser.expectString("ends_with"); + return .suffix; + } + + if (parser.startsWith("matches")) { + try parser.expectString("matches"); + return .regex; + } + + return error.UnexpectedOp; + } + + fn exprTerm(parser: *Parser) ParserError!Expression { + // Try to parse unary + unary_blk: { + var unary_parser = Parser.init(parser.allocator, parser.rest()); + + const p = unary_parser.unary() catch break :unary_blk; + + parser.offset += unary_parser.offset; + + return p; + } + + // Otherwise we expect term + const term1 = try parser.term(); + + return try Expression.value(term1); + } + + fn unary(parser: *Parser) ParserError!Expression { + parser.skipWhiteSpace(); + + if (parser.peek()) |c| { + if (c == '!') { + try parser.expect('!'); + parser.skipWhiteSpace(); + + const e = try parser.expr(); + + return try Expression.unary(parser.allocator, .negate, e); + } + + if (c == '(') { + return try parser.unaryParens(); + } + } + + var e: Expression = undefined; + if (parser.term()) |t1| { + parser.skipWhiteSpace(); + e = try Expression.value(t1); + } else |_| { + e = try parser.unaryParens(); + parser.skipWhiteSpace(); + } + + if (parser.expectString(".length()")) |_| { + return try Expression.unary(parser.allocator, .length, e); + } else |_| { + return error.UnexpectedToken; + } + + return error.UnexpectedToken; + } + + fn unaryParens(parser: *Parser) ParserError!Expression { + try parser.expectString("("); + + parser.skipWhiteSpace(); + + const e = try parser.expr(); + + parser.skipWhiteSpace(); + + try parser.expectString(")"); + + return try Expression.unary(parser.allocator, .parens, e); + } + + fn scopes(parser: *Parser, allocator: std.mem.Allocator) !std.ArrayList(Scope) { + try parser.expectString("trusting"); + + parser.skipWhiteSpace(); + + var scps = std.ArrayList(Scope).init(allocator); + + while (true) { + parser.skipWhiteSpace(); + + const scp = try parser.scope(allocator); + + try scps.append(scp); + + parser.skipWhiteSpace(); + + if (!parser.startsWith(",")) break; + + try parser.expectString(","); + } + + return scps; + } + + fn scope(parser: *Parser, _: std.mem.Allocator) !Scope { + parser.skipWhiteSpace(); + + if (parser.startsWith("authority")) { + try parser.expectString("authority"); + + return .{ .authority = {} }; + } + + if (parser.startsWith("previous")) { + try parser.expectString("previous"); + + return .{ .previous = {} }; + } + + if (parser.startsWith("{")) { + try parser.expectString("{"); + + const parameter = parser.readName(); + + try parser.expectString("}"); + + return .{ .parameter = parameter }; + } + + return .{ .public_key = try parser.publicKey() }; + } + + fn publicKey(parser: *Parser) !Ed25519.PublicKey { + try parser.expectString("ed25519/"); + + const h = try parser.hex(); + + std.debug.print("publickey = {s}\n", .{h}); + + var out_buf: [Ed25519.PublicKey.encoded_length]u8 = undefined; + + _ = try std.fmt.hexToBytes(out_buf[0..], h); + + return try Ed25519.PublicKey.fromBytes(out_buf); + } + + fn peek(parser: *Parser) ?u8 { + if (parser.input[parser.offset..].len == 0) return null; + + return parser.input[parser.offset]; + } + + fn rest(parser: *Parser) []const u8 { + return parser.input[parser.offset..]; + } + + /// Expect (and consume) char. + fn expect(parser: *Parser, char: u8) !void { + const peeked = parser.peek() orelse return error.ExpectedMoreInput; + if (peeked != char) return error.ExpectedChar; + + parser.offset += 1; + } + + /// Expect and consume string. + fn expectString(parser: *Parser, str: []const u8) !void { + if (!std.mem.startsWith(u8, parser.rest(), str)) return error.UnexpectedString; + + parser.offset += str.len; + } + + fn startsWith(parser: *Parser, str: []const u8) bool { + return std.mem.startsWith(u8, parser.rest(), str); + } + + /// Reject char. Does not consume the character + fn reject(parser: *Parser, char: u8) !void { + const peeked = parser.peek() orelse return error.ExpectedMoreInput; + if (peeked == char) return error.DisallowedChar; + } + + fn hex(parser: *Parser) ![]const u8 { + const start = parser.offset; + + for (parser.rest()) |c| { + if (ziglyph.isHexDigit(c)) { + parser.offset += 1; + continue; + } + + break; + } + + return parser.input[start..parser.offset]; + } + + // FIXME: this should error? + fn readName(parser: *Parser) []const u8 { + const start = parser.offset; + + for (parser.rest()) |c| { + if (ziglyph.isAlphaNum(c) or c == '_' or c == ':') { + parser.offset += 1; + continue; + } + + break; + } + + return parser.input[start..parser.offset]; + } + + fn skipWhiteSpace(parser: *Parser) void { + for (parser.rest()) |c| { + if (ziglyph.isWhiteSpace(c)) { + parser.offset += 1; + continue; + } + + break; + } + } +}; + +const ParserError = error{ + ExpectedMoreInput, + DisallowedChar, + UnexpectedString, + ExpectedChar, + NoFactTermFound, + UnexpectedOp, + MissingLeftParen, + MissingRightParen, + OutOfMemory, + UnexpectedToken, +}; + +// test "parse fact predicate" { +// const testing = std.testing; +// const input: []const u8 = +// \\read(true) +// ; + +// var parser = Parser.init(input); + +// const r = try parser.factPredicate(testing.allocator); +// defer r.deinit(); + +// std.debug.print("{any}\n", .{r}); +// } + +// test "parse rule body" { +// const testing = std.testing; +// const input: []const u8 = +// \\query(false) <- read(true), write(false) +// ; + +// var parser = Parser.init(testing.allocator, input); + +// const r = try parser.rule(); +// defer r.deinit(); + +// std.debug.print("{any}\n", .{r}); +// } + +// test "parse rule body with variables" { +// const testing = std.testing; +// const input: []const u8 = +// \\query($0) <- read($0), write(false) +// ; + +// var parser = Parser.init(testing.allocator, input); + +// const r = try parser.rule(); +// defer r.deinit(); + +// std.debug.print("{any}\n", .{r}); +// } + +// test "parse check" { +// const testing = std.testing; +// const input: []const u8 = +// \\check if right($0, $1), resource($0), operation($1) +// ; + +// var parser = Parser.init(testing.allocator, input); + +// const r = try parser.check(); +// defer r.deinit(); + +// std.debug.print("{any}\n", .{r}); +// } + +test "parse check with expression" { + const testing = std.testing; + const input: []const u8 = + \\check if right($0, $1), resource($0), operation($1), $0.contains("file") + ; + + var parser = Parser.init(testing.allocator, input); + + const r = try parser.check(); + defer r.deinit(); + + std.debug.print("{any}\n", .{r}); +} diff --git a/biscuit-samples/src/main.zig b/biscuit-samples/src/main.zig index bf73738..a7dda48 100644 --- a/biscuit-samples/src/main.zig +++ b/biscuit-samples/src/main.zig @@ -2,6 +2,7 @@ const std = @import("std"); const mem = std.mem; const decode = @import("biscuit-format").decode; const Biscuit = @import("biscuit").Biscuit; +const AuthorizerError = @import("biscuit").AuthorizerError; const Samples = @import("sample.zig").Samples; const Result = @import("sample.zig").Result; @@ -39,24 +40,28 @@ pub fn main() anyerror!void { const token = try std.fs.cwd().readFileAlloc(alloc, testcase.filename, 0xFFFFFFF); - for (testcase.validations.map.values()) |validation| { - try validate(alloc, token, public_key, validation.result); + for (testcase.validations.map.values(), 0..) |validation, i| { + errdefer std.debug.print("Error on validation {} of {s}\n", .{ i, testcase.filename }); + try validate(alloc, token, public_key, validation.result, validation.authorizer_code); } } } -pub fn validate(alloc: mem.Allocator, token: []const u8, public_key: std.crypto.sign.Ed25519.PublicKey, result: Result) !void { +pub fn validate(alloc: mem.Allocator, token: []const u8, public_key: std.crypto.sign.Ed25519.PublicKey, result: Result, authorizer_code: []const u8) !void { + var errors = std.ArrayList(AuthorizerError).init(alloc); + defer errors.deinit(); + switch (result) { - .Ok => try runValidation(alloc, token, public_key), + .Ok => try runValidation(alloc, token, public_key, authorizer_code, &errors), .Err => |e| { switch (e) { .Format => |f| switch (f) { - .InvalidSignatureSize => runValidation(alloc, token, public_key) catch |err| switch (err) { + .InvalidSignatureSize => runValidation(alloc, token, public_key, authorizer_code, &errors) catch |err| switch (err) { error.IncorrectBlockSignatureLength => return, else => return err, }, .Signature => |s| switch (s) { - .InvalidSignature => runValidation(alloc, token, public_key) catch |err| switch (err) { + .InvalidSignature => runValidation(alloc, token, public_key, authorizer_code, &errors) catch |err| switch (err) { error.SignatureVerificationFailed, error.InvalidEncoding, => return, @@ -65,14 +70,72 @@ pub fn validate(alloc: mem.Allocator, token: []const u8, public_key: std.crypto. }, }, .FailedLogic => |f| switch (f) { - .Unauthorized => runValidation(alloc, token, public_key) catch |err| switch (err) { + .Unauthorized => |u| runValidation(alloc, token, public_key, authorizer_code, &errors) catch |err| switch (err) { + error.AuthorizationFailed => { + + // Check that we have expected check failures + for (u.checks) |expected_failed_check| { + var check_accounted_for = false; + + switch (expected_failed_check) { + .Block => |expected_failed_block_check| { + for (errors.items) |found_failed_check| { + switch (found_failed_check) { + .no_matching_policy => continue, + .denied_by_policy => continue, + .failed_block_check => |failed_block_check| { + if (failed_block_check.block_id == expected_failed_block_check.block_id and failed_block_check.check_id == expected_failed_block_check.check_id) { + // continue :blk; + check_accounted_for = true; + } + }, + .failed_authorizer_check => return error.NotImplemented, + .unbound_variable => continue, + } + } + }, + .Authorizer => |expected_failed_authorizer_check| { + for (errors.items) |found_failed_check| { + switch (found_failed_check) { + .no_matching_policy => continue, + .denied_by_policy => continue, + .failed_block_check => return error.NotImplemented, + .failed_authorizer_check => |failed_block_check| { + if (failed_block_check.check_id == expected_failed_authorizer_check.check_id) { + // continue :blk; + check_accounted_for = true; + } + }, + .unbound_variable => continue, + } + } + }, + } + + if (!check_accounted_for) return error.ExpectedFailedCheck; + } + + return; + }, else => return err, }, - .InvalidBlockRule => runValidation(alloc, token, public_key) catch |err| switch (err) { + .InvalidBlockRule => |_| runValidation(alloc, token, public_key, authorizer_code, &errors) catch |err| switch (err) { + error.AuthorizationFailed => { + for (errors.items) |found_failed_check| { + switch (found_failed_check) { + .no_matching_policy => continue, + .denied_by_policy => continue, + .failed_block_check => continue, + .failed_authorizer_check => return error.NotImplemented, + .unbound_variable => return, + } + } + }, else => return err, }, }, - .Execution => runValidation(alloc, token, public_key) catch |err| switch (err) { + .Execution => runValidation(alloc, token, public_key, authorizer_code, &errors) catch |err| switch (err) { + error.Overflow => return, else => return err, }, } @@ -82,10 +145,31 @@ pub fn validate(alloc: mem.Allocator, token: []const u8, public_key: std.crypto. } } -pub fn runValidation(alloc: mem.Allocator, token: []const u8, public_key: std.crypto.sign.Ed25519.PublicKey) !void { - var b = try Biscuit.initFromBytes(alloc, token, public_key); +pub fn runValidation(alloc: mem.Allocator, token: []const u8, public_key: std.crypto.sign.Ed25519.PublicKey, authorizer_code: []const u8, errors: *std.ArrayList(AuthorizerError)) !void { + var b = try Biscuit.fromBytes(alloc, token, public_key); defer b.deinit(); - var a = b.authorizer(alloc); + var a = try b.authorizer(alloc); defer a.deinit(); + + var it = std.mem.split(u8, authorizer_code, ";"); + while (it.next()) |code| { + const text = std.mem.trim(u8, code, " \n"); + if (text.len == 0) continue; + + if (std.mem.startsWith(u8, text, "check if") or std.mem.startsWith(u8, text, "check all")) { + try a.addCheck(text); + } else if (std.mem.startsWith(u8, text, "allow if") or std.mem.startsWith(u8, text, "deny if")) { + try a.addPolicy(text); + } else if (std.mem.startsWith(u8, text, "revocation_id")) { + // + } else { + try a.addFact(text); + } + } + + _ = a.authorize(errors) catch |err| { + std.debug.print("Authorization failed with errors: {any}\n", .{errors.items}); + return err; + }; } diff --git a/biscuit-samples/src/sample.zig b/biscuit-samples/src/sample.zig index f603edb..13e92da 100644 --- a/biscuit-samples/src/sample.zig +++ b/biscuit-samples/src/sample.zig @@ -60,8 +60,12 @@ pub const Result = union(enum) { }; const World = struct { - facts: [][]const u8, - rules: [][]const u8, - checks: [][]const u8, + facts: []FactOrigin, + rules: []RuleOrigin, + checks: []CheckOrigin, policies: [][]const u8, }; + +const FactOrigin = struct { facts: [][]const u8, origin: []?usize }; +const RuleOrigin = struct { origin: usize, rules: [][]const u8 }; +const CheckOrigin = struct { origin: usize, checks: [][]const u8 }; diff --git a/biscuit-samples/src/samples/samples.json b/biscuit-samples/src/samples/samples.json index 86011e9..432c45d 100644 --- a/biscuit-samples/src/samples/samples.json +++ b/biscuit-samples/src/samples/samples.json @@ -1,19 +1,24 @@ { - "root_private_key": "12aca40167fbdd1a11037e9fd440e3d510d9d9dea70a6646aa4aaf84d718d75a", - "root_public_key": "acdd6d5b53bfee478bf689f8e012fe7988bf755e3d7c5152947abc149bc20189", + "root_private_key": "99e87b0e9158531eeeb503ff15266e2b23c2a2507b138c9d1b1f2ab458df2d61", + "root_public_key": "1055c750b1a1505937af1537c626ba3263995c33a64758aaafb1275b0312e284", "testcases": [ { "title": "basic token", "filename": "test001_basic.bc", "token": [ { - "symbols": ["file1", "file2"], + "symbols": [ + "file1", + "file2" + ], "public_keys": [], "external_key": null, "code": "right(\"file1\", \"read\");\nright(\"file2\", \"read\");\nright(\"file1\", \"write\");\n" }, { - "symbols": ["0"], + "symbols": [ + "0" + ], "public_keys": [], "external_key": null, "code": "check if resource($0), operation(\"read\"), right($0, \"read\");\n" @@ -23,16 +28,37 @@ "": { "world": { "facts": [ - "resource(\"file1\")", - "right(\"file1\", \"read\")", - "right(\"file1\", \"write\")", - "right(\"file2\", \"read\")" + { + "origin": [ + null + ], + "facts": [ + "resource(\"file1\")" + ] + }, + { + "origin": [ + 0 + ], + "facts": [ + "right(\"file1\", \"read\")", + "right(\"file1\", \"write\")", + "right(\"file2\", \"read\")" + ] + } ], "rules": [], "checks": [ - "check if resource($0), operation(\"read\"), right($0, \"read\")" + { + "origin": 1, + "checks": [ + "check if resource($0), operation(\"read\"), right($0, \"read\")" + ] + } ], - "policies": ["allow if true"] + "policies": [ + "allow if true" + ] }, "result": { "Err": { @@ -56,8 +82,8 @@ }, "authorizer_code": "resource(\"file1\");\n\nallow if true;\n", "revocation_ids": [ - "3ee1c0f42ba69ec63b1f39a6b3c57d25a4ccec452233ca6d40530ecfe83af4918fa78d9346f8b7c498545b54663960342b9ed298b2c8bbe2085b80c237b56f09", - "e16ccf0820b02092adb531e36c2e82884c6c6c647b1c85184007f2ace601648afb71faa261b11f9ab352093c96187870f868588b664579c8018864b306bd5007" + "7595a112a1eb5b81a6e398852e6118b7f5b8cbbff452778e655100e5fb4faa8d3a2af52fe2c4f9524879605675fae26adbc4783e0cafc43522fa82385f396c03", + "45f4c14f9d9e8fa044d68be7a2ec8cddb835f575c7b913ec59bd636c70acae9a90db9064ba0b3084290ed0c422bbb7170092a884f5e0202b31e9235bbcc1650d" ] } } @@ -67,13 +93,17 @@ "filename": "test002_different_root_key.bc", "token": [ { - "symbols": ["file1"], + "symbols": [ + "file1" + ], "public_keys": [], "external_key": null, "code": "right(\"file1\", \"read\");\n" }, { - "symbols": ["0"], + "symbols": [ + "0" + ], "public_keys": [], "external_key": null, "code": "check if resource($0), operation(\"read\"), right($0, \"read\");\n" @@ -101,13 +131,18 @@ "filename": "test003_invalid_signature_format.bc", "token": [ { - "symbols": ["file1", "file2"], + "symbols": [ + "file1", + "file2" + ], "public_keys": [], "external_key": null, "code": "right(\"file1\", \"read\");\nright(\"file2\", \"read\");\nright(\"file1\", \"write\");\n" }, { - "symbols": ["0"], + "symbols": [ + "0" + ], "public_keys": [], "external_key": null, "code": "check if resource($0), operation(\"read\"), right($0, \"read\");\n" @@ -133,13 +168,18 @@ "filename": "test004_random_block.bc", "token": [ { - "symbols": ["file1", "file2"], + "symbols": [ + "file1", + "file2" + ], "public_keys": [], "external_key": null, "code": "right(\"file1\", \"read\");\nright(\"file2\", \"read\");\nright(\"file1\", \"write\");\n" }, { - "symbols": ["0"], + "symbols": [ + "0" + ], "public_keys": [], "external_key": null, "code": "check if resource($0), operation(\"read\"), right($0, \"read\");\n" @@ -167,13 +207,18 @@ "filename": "test005_invalid_signature.bc", "token": [ { - "symbols": ["file1", "file2"], + "symbols": [ + "file1", + "file2" + ], "public_keys": [], "external_key": null, "code": "right(\"file1\", \"read\");\nright(\"file2\", \"read\");\nright(\"file1\", \"write\");\n" }, { - "symbols": ["0"], + "symbols": [ + "0" + ], "public_keys": [], "external_key": null, "code": "check if resource($0), operation(\"read\"), right($0, \"read\");\n" @@ -201,13 +246,18 @@ "filename": "test006_reordered_blocks.bc", "token": [ { - "symbols": ["file1", "file2"], + "symbols": [ + "file1", + "file2" + ], "public_keys": [], "external_key": null, "code": "right(\"file1\", \"read\");\nright(\"file2\", \"read\");\nright(\"file1\", \"write\");\n" }, { - "symbols": ["0"], + "symbols": [ + "0" + ], "public_keys": [], "external_key": null, "code": "check if resource($0), operation(\"read\"), right($0, \"read\");\n" @@ -241,19 +291,28 @@ "filename": "test007_scoped_rules.bc", "token": [ { - "symbols": ["user_id", "alice", "file1"], + "symbols": [ + "user_id", + "alice", + "file1" + ], "public_keys": [], "external_key": null, "code": "user_id(\"alice\");\nowner(\"alice\", \"file1\");\n" }, { - "symbols": ["0", "1"], + "symbols": [ + "0", + "1" + ], "public_keys": [], "external_key": null, "code": "right($0, \"read\") <- resource($0), user_id($1), owner($1, $0);\ncheck if resource($0), operation(\"read\"), right($0, \"read\");\n" }, { - "symbols": ["file2"], + "symbols": [ + "file2" + ], "public_keys": [], "external_key": null, "code": "owner(\"alice\", \"file2\");\n" @@ -263,19 +322,52 @@ "": { "world": { "facts": [ - "operation(\"read\")", - "owner(\"alice\", \"file1\")", - "owner(\"alice\", \"file2\")", - "resource(\"file2\")", - "user_id(\"alice\")" + { + "origin": [ + null + ], + "facts": [ + "operation(\"read\")", + "resource(\"file2\")" + ] + }, + { + "origin": [ + 0 + ], + "facts": [ + "owner(\"alice\", \"file1\")", + "user_id(\"alice\")" + ] + }, + { + "origin": [ + 2 + ], + "facts": [ + "owner(\"alice\", \"file2\")" + ] + } ], "rules": [ - "right($0, \"read\") <- resource($0), user_id($1), owner($1, $0)" + { + "origin": 1, + "rules": [ + "right($0, \"read\") <- resource($0), user_id($1), owner($1, $0)" + ] + } ], "checks": [ - "check if resource($0), operation(\"read\"), right($0, \"read\")" + { + "origin": 1, + "checks": [ + "check if resource($0), operation(\"read\"), right($0, \"read\")" + ] + } ], - "policies": ["allow if true"] + "policies": [ + "allow if true" + ] }, "result": { "Err": { @@ -299,9 +391,9 @@ }, "authorizer_code": "resource(\"file2\");\noperation(\"read\");\n\nallow if true;\n", "revocation_ids": [ - "02d287b0e5b22780192f8351538583c17f7d0200e064b32a1fcf07899e64ffb10e4de324f5c5ebc72c89a63e424317226cf555eb42dae81b2fd4639cf7591108", - "22e75ea200cf7b2b62b389298fe0dec973b7f9c7e54e76c3c41811d72ea82c68227bc9079b7d05986de17ef9301cccdc08f5023455386987d1e6ee4391b19f06", - "140a3631fecae550b51e50b9b822b947fb485c80070b34482fa116cdea560140164a1d0a959b40fed8a727e2f62c0b57635760c488c8bf0eda80ee591558c409" + "4d86c9af808dc2e0583f47282e6f5df3e09dc264d5231ec360b4519e15ddaeec60b25a9bbcb22e8d192f4d36a0da3f9243711e30535b00ee55c53cb1395f230a", + "63208c668c66f3ba6927140ba37533593b25e03459447805d4b2a8b75adeef45794c3d7249afe506ed77ccee276160bb4052a4009302bd34871a440f070b4509", + "d8da982888eae8c038e4894a8c06fc57d8e5f06ad2e972b9cf4bde49ad60804558a0d1938192596c702d8e4f7f12ec19201d7c33d0cd77774a0d879a33880d02" ] } } @@ -311,19 +403,25 @@ "filename": "test008_scoped_checks.bc", "token": [ { - "symbols": ["file1"], + "symbols": [ + "file1" + ], "public_keys": [], "external_key": null, "code": "right(\"file1\", \"read\");\n" }, { - "symbols": ["0"], + "symbols": [ + "0" + ], "public_keys": [], "external_key": null, "code": "check if resource($0), operation(\"read\"), right($0, \"read\");\n" }, { - "symbols": ["file2"], + "symbols": [ + "file2" + ], "public_keys": [], "external_key": null, "code": "right(\"file2\", \"read\");\n" @@ -333,16 +431,44 @@ "": { "world": { "facts": [ - "operation(\"read\")", - "resource(\"file2\")", - "right(\"file1\", \"read\")", - "right(\"file2\", \"read\")" + { + "origin": [ + null + ], + "facts": [ + "operation(\"read\")", + "resource(\"file2\")" + ] + }, + { + "origin": [ + 0 + ], + "facts": [ + "right(\"file1\", \"read\")" + ] + }, + { + "origin": [ + 2 + ], + "facts": [ + "right(\"file2\", \"read\")" + ] + } ], "rules": [], "checks": [ - "check if resource($0), operation(\"read\"), right($0, \"read\")" + { + "origin": 1, + "checks": [ + "check if resource($0), operation(\"read\"), right($0, \"read\")" + ] + } ], - "policies": ["allow if true"] + "policies": [ + "allow if true" + ] }, "result": { "Err": { @@ -366,9 +492,9 @@ }, "authorizer_code": "resource(\"file2\");\noperation(\"read\");\n\nallow if true;\n", "revocation_ids": [ - "567682495bf002eb84c46491e40fad8c55943d918c65e2c110b1b88511bf393072c0305a243e3d632ca5f1e9b0ace3e3582de84838c3a258480657087c267f02", - "71f0010b1034dbc62c53f67a23947b92ccba46495088567ac7ad5c4d7d65476964bee42053a6a35088110c5918f9c9606057689271fef89d84253cf98e6d4407", - "6d00d5f2a5d25dbfaa19152a81b44328b368e8fb8300b25e36754cfe8b2ce1eb2d1452ce9b1502e6f377a23aa87098fb05b5b073541624a8815ba0610f793005" + "a80c985ddef895518c216f64c65dcd50a5d97d012a94453d79159aed2981654b1fe9748c686c5667604026a94fb8db8a1d02de747df61e99fa9a63ff2878ad00", + "77df45442be86a416aa02fd9d98d6d4703c634a9e3b1d293b41f5dc97849afbe7faeec8c22a210574888acc008fb64fe691ec9e8d2655586f970d9a6b6577000", + "b31398aefe97d3db41ebc445760f216fb3aa7bf7439adcfc3a07489bfcc163970af3f4e20f5460aa24cf841101a5ab114d21acc0ee8d442bae7793b121284900" ] } } @@ -384,7 +510,9 @@ "code": "" }, { - "symbols": ["file1", "expiration"], + "symbols": [ + "file1" + ], "public_keys": [], "external_key": null, "code": "check if resource(\"file1\");\ncheck if time($time), $time <= 2018-12-20T00:00:00Z;\n" @@ -394,16 +522,30 @@ "": { "world": { "facts": [ - "operation(\"read\")", - "resource(\"file1\")", - "time(2020-12-21T09:23:12Z)" + { + "origin": [ + null + ], + "facts": [ + "operation(\"read\")", + "resource(\"file1\")", + "time(2020-12-21T09:23:12Z)" + ] + } ], "rules": [], "checks": [ - "check if resource(\"file1\")", - "check if time($time), $time <= 2018-12-20T00:00:00Z" + { + "origin": 1, + "checks": [ + "check if resource(\"file1\")", + "check if time($time), $time <= 2018-12-20T00:00:00Z" + ] + } ], - "policies": ["allow if true"] + "policies": [ + "allow if true" + ] }, "result": { "Err": { @@ -427,8 +569,8 @@ }, "authorizer_code": "resource(\"file1\");\noperation(\"read\");\ntime(2020-12-21T09:23:12Z);\n\nallow if true;\n", "revocation_ids": [ - "b2474f3e0a5788cdeff811f2599497a04d1ad71ca48dbafb90f20a950d565dda0b86bd6c9072a727c19b6b20a1ae10d8cb88155186550b77016ffd1dca9a6203", - "0d12152670cbefe2fa504af9a92b513f1a48ae460ae5e66aaac4ed9f7dc3cc1c4c510693312b351465062169a2169fc520ce4e17e548d21982c81a74c66a3c0c" + "c248907bb6e5f433bbb5edf6367b399ebefca0d321d0b2ea9fc67f66dc1064ce926adb0c05d90c3e8a2833328b3578f79c4e1bca43583d9bcfb2ba6c37303d00", + "a4edf7aaea8658bb9ae19b3ffe2adcc77cc9f16c249aeb0a85a584b5362f89f27f7c67ac0af16d7170673d6d1fb1563d1934b25ec5a461f6c01fa49805cd5e07" ] } } @@ -438,13 +580,17 @@ "filename": "test010_authorizer_scope.bc", "token": [ { - "symbols": ["file1"], + "symbols": [ + "file1" + ], "public_keys": [], "external_key": null, "code": "right(\"file1\", \"read\");\n" }, { - "symbols": ["file2"], + "symbols": [ + "file2" + ], "public_keys": [], "external_key": null, "code": "right(\"file2\", \"read\");\n" @@ -454,14 +600,44 @@ "": { "world": { "facts": [ - "operation(\"read\")", - "resource(\"file2\")", - "right(\"file1\", \"read\")", - "right(\"file2\", \"read\")" + { + "origin": [ + null + ], + "facts": [ + "operation(\"read\")", + "resource(\"file2\")" + ] + }, + { + "origin": [ + 0 + ], + "facts": [ + "right(\"file1\", \"read\")" + ] + }, + { + "origin": [ + 1 + ], + "facts": [ + "right(\"file2\", \"read\")" + ] + } ], "rules": [], - "checks": ["check if right($0, $1), resource($0), operation($1)"], - "policies": ["allow if true"] + "checks": [ + { + "origin": 18446744073709551615, + "checks": [ + "check if right($0, $1), resource($0), operation($1)" + ] + } + ], + "policies": [ + "allow if true" + ] }, "result": { "Err": { @@ -484,8 +660,8 @@ }, "authorizer_code": "resource(\"file2\");\noperation(\"read\");\n\ncheck if right($0, $1), resource($0), operation($1);\n\nallow if true;\n", "revocation_ids": [ - "b9ecf192ecb1bbb10e45320c1c86661f0c6b6bd28e89fdd8fa838fe0ab3f754229f7fbbf92ad978d36f744c345c69bc156a2a91a2979a3c235a9d936d401b404", - "839728735701e589c2612e655afa2b53f573480e6a0477ae68ed71587987d1af398a31296bdec0b6eccee9348f4b4c23ca1031e809991626c579fef80b1d380d" + "a80c985ddef895518c216f64c65dcd50a5d97d012a94453d79159aed2981654b1fe9748c686c5667604026a94fb8db8a1d02de747df61e99fa9a63ff2878ad00", + "966eceb2aa937c41b25368808bab6e0698c02a4038de669d007c9c3d43602638a640083558d1576ac80cf3eb2ac6a7585527e0f6c1a65402f0935cf7f4df8005" ] } } @@ -495,7 +671,9 @@ "filename": "test011_authorizer_authority_caveats.bc", "token": [ { - "symbols": ["file1"], + "symbols": [ + "file1" + ], "public_keys": [], "external_key": null, "code": "right(\"file1\", \"read\");\n" @@ -505,13 +683,36 @@ "": { "world": { "facts": [ - "operation(\"read\")", - "resource(\"file2\")", - "right(\"file1\", \"read\")" + { + "origin": [ + null + ], + "facts": [ + "operation(\"read\")", + "resource(\"file2\")" + ] + }, + { + "origin": [ + 0 + ], + "facts": [ + "right(\"file1\", \"read\")" + ] + } ], "rules": [], - "checks": ["check if right($0, $1), resource($0), operation($1)"], - "policies": ["allow if true"] + "checks": [ + { + "origin": 18446744073709551615, + "checks": [ + "check if right($0, $1), resource($0), operation($1)" + ] + } + ], + "policies": [ + "allow if true" + ] }, "result": { "Err": { @@ -534,7 +735,7 @@ }, "authorizer_code": "resource(\"file2\");\noperation(\"read\");\n\ncheck if right($0, $1), resource($0), operation($1);\n\nallow if true;\n", "revocation_ids": [ - "593d273d141bf23a3e89b55fffe1b3f96f683a022bb763e78f4e49f31a7cf47668c3fd5e0f580727ac9113ede302d34264597f6f1e6c6dd4167836d57aedf504" + "a80c985ddef895518c216f64c65dcd50a5d97d012a94453d79159aed2981654b1fe9748c686c5667604026a94fb8db8a1d02de747df61e99fa9a63ff2878ad00" ] } } @@ -544,7 +745,9 @@ "filename": "test012_authority_caveats.bc", "token": [ { - "symbols": ["file1"], + "symbols": [ + "file1" + ], "public_keys": [], "external_key": null, "code": "check if resource(\"file1\");\n" @@ -553,25 +756,63 @@ "validations": { "file1": { "world": { - "facts": ["operation(\"read\")", "resource(\"file1\")"], + "facts": [ + { + "origin": [ + null + ], + "facts": [ + "operation(\"read\")", + "resource(\"file1\")" + ] + } + ], "rules": [], - "checks": ["check if resource(\"file1\")"], - "policies": ["allow if true"] + "checks": [ + { + "origin": 0, + "checks": [ + "check if resource(\"file1\")" + ] + } + ], + "policies": [ + "allow if true" + ] }, "result": { "Ok": 0 }, "authorizer_code": "resource(\"file1\");\noperation(\"read\");\n\nallow if true;\n", "revocation_ids": [ - "0a1d14a145debbb0a2f4ce0631d3a0a48a2e0eddabefda7fabb0414879ec6be24b9ae7295c434609ada3f8cc47b8845bbd5a0d4fba3d96748ff1b824496e0405" + "6a8f90dad67ae2ac188460463914ae7326fda431c80785755f4edcc15f1a53911f7366e606ad80cbbeba94672e42713e88632a932128f1d796ce9ba7d7a0b80a" ] }, "file2": { "world": { - "facts": ["operation(\"read\")", "resource(\"file2\")"], + "facts": [ + { + "origin": [ + null + ], + "facts": [ + "operation(\"read\")", + "resource(\"file2\")" + ] + } + ], "rules": [], - "checks": ["check if resource(\"file1\")"], - "policies": ["allow if true"] + "checks": [ + { + "origin": 0, + "checks": [ + "check if resource(\"file1\")" + ] + } + ], + "policies": [ + "allow if true" + ] }, "result": { "Err": { @@ -595,7 +836,7 @@ }, "authorizer_code": "resource(\"file2\");\noperation(\"read\");\n\nallow if true;\n", "revocation_ids": [ - "0a1d14a145debbb0a2f4ce0631d3a0a48a2e0eddabefda7fabb0414879ec6be24b9ae7295c434609ada3f8cc47b8845bbd5a0d4fba3d96748ff1b824496e0405" + "6a8f90dad67ae2ac188460463914ae7326fda431c80785755f4edcc15f1a53911f7366e606ad80cbbeba94672e42713e88632a932128f1d796ce9ba7d7a0b80a" ] } } @@ -605,13 +846,20 @@ "filename": "test013_block_rules.bc", "token": [ { - "symbols": ["file1", "file2"], + "symbols": [ + "file1", + "file2" + ], "public_keys": [], "external_key": null, "code": "right(\"file1\", \"read\");\nright(\"file2\", \"read\");\n" }, { - "symbols": ["valid_date", "0", "1"], + "symbols": [ + "valid_date", + "0", + "1" + ], "public_keys": [], "external_key": null, "code": "valid_date(\"file1\") <- time($0), resource(\"file1\"), $0 <= 2030-12-31T12:59:59Z;\nvalid_date($1) <- time($0), resource($1), $0 <= 1999-12-31T12:59:59Z, ![\"file1\"].contains($1);\ncheck if valid_date($0), resource($0);\n" @@ -621,42 +869,106 @@ "file1": { "world": { "facts": [ - "resource(\"file1\")", - "right(\"file1\", \"read\")", - "right(\"file2\", \"read\")", - "time(2020-12-21T09:23:12Z)", - "valid_date(\"file1\")" + { + "origin": [ + null + ], + "facts": [ + "resource(\"file1\")", + "time(2020-12-21T09:23:12Z)" + ] + }, + { + "origin": [ + null, + 1 + ], + "facts": [ + "valid_date(\"file1\")" + ] + }, + { + "origin": [ + 0 + ], + "facts": [ + "right(\"file1\", \"read\")", + "right(\"file2\", \"read\")" + ] + } ], "rules": [ - "valid_date(\"file1\") <- time($0), resource(\"file1\"), $0 <= 2030-12-31T12:59:59Z", - "valid_date($1) <- time($0), resource($1), $0 <= 1999-12-31T12:59:59Z, ![\"file1\"].contains($1)" + { + "origin": 1, + "rules": [ + "valid_date(\"file1\") <- time($0), resource(\"file1\"), $0 <= 2030-12-31T12:59:59Z", + "valid_date($1) <- time($0), resource($1), $0 <= 1999-12-31T12:59:59Z, ![\"file1\"].contains($1)" + ] + } ], - "checks": ["check if valid_date($0), resource($0)"], - "policies": ["allow if true"] + "checks": [ + { + "origin": 1, + "checks": [ + "check if valid_date($0), resource($0)" + ] + } + ], + "policies": [ + "allow if true" + ] }, "result": { "Ok": 0 }, "authorizer_code": "resource(\"file1\");\ntime(2020-12-21T09:23:12Z);\n\nallow if true;\n", "revocation_ids": [ - "d251352efd4e4c72e8a1609fce002f558f1a0bb5e36cd3d8b3a6c6599e3960880f21bea6fe1857f4ecbc2c399dd77829b154e75f1323e9dec413aad70f97650d", - "9de4f51e6019540598a957515dad52f5403e5c6cd8d2adbca1bff42a4fbc0eb8c6adab499da2fe894a8a9c9c581276bfb0fdc3d35ab2ff9f920a2c4690739903" + "c46d071ff3f33434223c8305fdad529f62bf78bb5d9cbfc2a345d4bca6bf314014840e18ba353f86fdb9073d58b12b8c872ac1f8e593c2e9064b90f6c2ede006", + "a0c4c163a0b3ca406df4ece3d1371356190df04208eccef72f77e875ed0531b5d37e243d6f388b1967776a5dfd16ef228f19c5bdd6d2820f145c5ed3c3dcdc00" ] }, "file2": { "world": { "facts": [ - "resource(\"file2\")", - "right(\"file1\", \"read\")", - "right(\"file2\", \"read\")", - "time(2020-12-21T09:23:12Z)" + { + "origin": [ + null + ], + "facts": [ + "resource(\"file2\")", + "time(2020-12-21T09:23:12Z)" + ] + }, + { + "origin": [ + 0 + ], + "facts": [ + "right(\"file1\", \"read\")", + "right(\"file2\", \"read\")" + ] + } ], "rules": [ - "valid_date(\"file1\") <- time($0), resource(\"file1\"), $0 <= 2030-12-31T12:59:59Z", - "valid_date($1) <- time($0), resource($1), $0 <= 1999-12-31T12:59:59Z, ![\"file1\"].contains($1)" + { + "origin": 1, + "rules": [ + "valid_date(\"file1\") <- time($0), resource(\"file1\"), $0 <= 2030-12-31T12:59:59Z", + "valid_date($1) <- time($0), resource($1), $0 <= 1999-12-31T12:59:59Z, ![\"file1\"].contains($1)" + ] + } ], - "checks": ["check if valid_date($0), resource($0)"], - "policies": ["allow if true"] + "checks": [ + { + "origin": 1, + "checks": [ + "check if valid_date($0), resource($0)" + ] + } + ], + "policies": [ + "allow if true" + ] }, "result": { "Err": { @@ -680,8 +992,8 @@ }, "authorizer_code": "resource(\"file2\");\ntime(2020-12-21T09:23:12Z);\n\nallow if true;\n", "revocation_ids": [ - "d251352efd4e4c72e8a1609fce002f558f1a0bb5e36cd3d8b3a6c6599e3960880f21bea6fe1857f4ecbc2c399dd77829b154e75f1323e9dec413aad70f97650d", - "9de4f51e6019540598a957515dad52f5403e5c6cd8d2adbca1bff42a4fbc0eb8c6adab499da2fe894a8a9c9c581276bfb0fdc3d35ab2ff9f920a2c4690739903" + "c46d071ff3f33434223c8305fdad529f62bf78bb5d9cbfc2a345d4bca6bf314014840e18ba353f86fdb9073d58b12b8c872ac1f8e593c2e9064b90f6c2ede006", + "a0c4c163a0b3ca406df4ece3d1371356190df04208eccef72f77e875ed0531b5d37e243d6f388b1967776a5dfd16ef228f19c5bdd6d2820f145c5ed3c3dcdc00" ] } } @@ -691,7 +1003,10 @@ "filename": "test014_regex_constraint.bc", "token": [ { - "symbols": ["0", "file[0-9]+.txt"], + "symbols": [ + "0", + "file[0-9]+.txt" + ], "public_keys": [], "external_key": null, "code": "check if resource($0), $0.matches(\"file[0-9]+.txt\");\n" @@ -700,10 +1015,28 @@ "validations": { "file1": { "world": { - "facts": ["resource(\"file1\")"], + "facts": [ + { + "origin": [ + null + ], + "facts": [ + "resource(\"file1\")" + ] + } + ], "rules": [], - "checks": ["check if resource($0), $0.matches(\"file[0-9]+.txt\")"], - "policies": ["allow if true"] + "checks": [ + { + "origin": 0, + "checks": [ + "check if resource($0), $0.matches(\"file[0-9]+.txt\")" + ] + } + ], + "policies": [ + "allow if true" + ] }, "result": { "Err": { @@ -727,22 +1060,40 @@ }, "authorizer_code": "resource(\"file1\");\n\nallow if true;\n", "revocation_ids": [ - "1c158e1e12c8670d3f4411597276fe1caab17b7728adb7f7e9c44eeec3e3d85676e6ebe2d28c287e285a45912386cfa53e1752997630bd7a4ca6c2cd9f143500" + "da42718ad2631c12d3a44b7710dcc76c6c7809c6bc3a2d7eb0378c4154eae10e0884a8d54a2cd25ca3dfe01091d816ebbb9d246227baf7a359a787cb2344ad07" ] }, "file123": { "world": { - "facts": ["resource(\"file123.txt\")"], + "facts": [ + { + "origin": [ + null + ], + "facts": [ + "resource(\"file123.txt\")" + ] + } + ], "rules": [], - "checks": ["check if resource($0), $0.matches(\"file[0-9]+.txt\")"], - "policies": ["allow if true"] + "checks": [ + { + "origin": 0, + "checks": [ + "check if resource($0), $0.matches(\"file[0-9]+.txt\")" + ] + } + ], + "policies": [ + "allow if true" + ] }, "result": { "Ok": 0 }, "authorizer_code": "resource(\"file123.txt\");\n\nallow if true;\n", "revocation_ids": [ - "1c158e1e12c8670d3f4411597276fe1caab17b7728adb7f7e9c44eeec3e3d85676e6ebe2d28c287e285a45912386cfa53e1752997630bd7a4ca6c2cd9f143500" + "da42718ad2631c12d3a44b7710dcc76c6c7809c6bc3a2d7eb0378c4154eae10e0884a8d54a2cd25ca3dfe01091d816ebbb9d246227baf7a359a787cb2344ad07" ] } } @@ -752,7 +1103,10 @@ "filename": "test015_multi_queries_caveats.bc", "token": [ { - "symbols": ["must_be_present", "hello"], + "symbols": [ + "must_be_present", + "hello" + ], "public_keys": [], "external_key": null, "code": "must_be_present(\"hello\");\n" @@ -761,17 +1115,35 @@ "validations": { "": { "world": { - "facts": ["must_be_present(\"hello\")"], + "facts": [ + { + "origin": [ + 0 + ], + "facts": [ + "must_be_present(\"hello\")" + ] + } + ], "rules": [], - "checks": ["check if must_be_present($0) or must_be_present($0)"], - "policies": ["allow if true"] + "checks": [ + { + "origin": 18446744073709551615, + "checks": [ + "check if must_be_present($0) or must_be_present($0)" + ] + } + ], + "policies": [ + "allow if true" + ] }, "result": { "Ok": 0 }, "authorizer_code": "check if must_be_present($0) or must_be_present($0);\n\nallow if true;\n", "revocation_ids": [ - "d3eee8a74eacec9c51d4d1eb29b479727dfaafa9df7d4c651d07c493c56f3a5f037a51139ebd036f50d1159d12bccec3e377bbd32db90a39dd52c4776757ad0b" + "b0d466d31e015fa85a075fa875f7e1c9017edd503fee9f62a5f033e1fcfa811074b6e39dfe5af2f452043db97a3f98650592a370f5685b62c5d6abf9dd10b603" ] } } @@ -781,13 +1153,17 @@ "filename": "test016_caveat_head_name.bc", "token": [ { - "symbols": ["hello"], + "symbols": [ + "hello" + ], "public_keys": [], "external_key": null, "code": "check if resource(\"hello\");\n" }, { - "symbols": ["test"], + "symbols": [ + "test" + ], "public_keys": [], "external_key": null, "code": "query(\"test\");\n" @@ -796,10 +1172,28 @@ "validations": { "": { "world": { - "facts": ["query(\"test\")"], + "facts": [ + { + "origin": [ + 1 + ], + "facts": [ + "query(\"test\")" + ] + } + ], "rules": [], - "checks": ["check if resource(\"hello\")"], - "policies": ["allow if true"] + "checks": [ + { + "origin": 0, + "checks": [ + "check if resource(\"hello\")" + ] + } + ], + "policies": [ + "allow if true" + ] }, "result": { "Err": { @@ -823,8 +1217,8 @@ }, "authorizer_code": "allow if true;\n", "revocation_ids": [ - "e79679e019f1d7d3a9f9a309673aceadc7b2b2d67c0df3e7a1dccec25218e9b5935b9c8f8249243446406e3cdd86c1b35601a21cf1b119df48ca5e897cc6cd0d", - "2042ea2dca41ba3eb31196f49b211e615dcba46067be126e6035b8549bb57cdfeb24d07f2b44241bc0f70cc8ddc31e30772116d785b82bc91be8440dfdab500f" + "ce6f804f4390e693a8853d9a4a10bd4f3c94b86b7c6d671993a6e19346bc4d20bbb52cc945e5d0d02e4e75fa5da2caa99764050190353564a0a0b4b276809402", + "916d566cc724e0773046fc5266e9d0d804311435b8d6955b332f823ab296be9a78dfea190447732ac9f6217234cf5726becf88f65169c6de56a766af55451b0f" ] } } @@ -845,13 +1239,12 @@ "b", "de", "abcD12", - "abcD12x", "abc", "def" ], "public_keys": [], "external_key": null, - "code": "check if true;\ncheck if !false;\ncheck if !false && true;\ncheck if false or true;\ncheck if (true || false) && true;\ncheck if 1 < 2;\ncheck if 2 > 1;\ncheck if 1 <= 2;\ncheck if 1 <= 1;\ncheck if 2 >= 1;\ncheck if 2 >= 2;\ncheck if 3 == 3;\ncheck if 1 != 3;\ncheck if 1 + 2 * 3 - 4 / 2 == 5;\ncheck if 1 | 2 ^ 3 == 0;\ncheck if \"hello world\".starts_with(\"hello\") && \"hello world\".ends_with(\"world\");\ncheck if \"aaabde\".matches(\"a*c?.e\");\ncheck if \"aaabde\".contains(\"abd\");\ncheck if \"aaabde\" == \"aaa\" + \"b\" + \"de\";\ncheck if \"abcD12\" == \"abcD12\";\ncheck if \"abcD12x\" != \"abcD12\";\ncheck if 2019-12-04T09:46:41Z < 2020-12-04T09:46:41Z;\ncheck if 2020-12-04T09:46:41Z > 2019-12-04T09:46:41Z;\ncheck if 2019-12-04T09:46:41Z <= 2020-12-04T09:46:41Z;\ncheck if 2020-12-04T09:46:41Z >= 2020-12-04T09:46:41Z;\ncheck if 2020-12-04T09:46:41Z >= 2019-12-04T09:46:41Z;\ncheck if 2020-12-04T09:46:41Z >= 2020-12-04T09:46:41Z;\ncheck if 2020-12-04T09:46:41Z == 2020-12-04T09:46:41Z;\ncheck if 2022-12-04T09:46:41Z != 2020-12-04T09:46:41Z;\ncheck if hex:12ab == hex:12ab;\ncheck if hex:12abcd != hex:12ab;\ncheck if [1, 2].contains(2);\ncheck if [2019-12-04T09:46:41Z, 2020-12-04T09:46:41Z].contains(2020-12-04T09:46:41Z);\ncheck if [false, true].contains(true);\ncheck if [\"abc\", \"def\"].contains(\"abc\");\ncheck if [hex:12ab, hex:34de].contains(hex:34de);\ncheck if [1, 2] == [1, 2];\ncheck if [1, 4] != [1, 2];\n" + "code": "check if true;\ncheck if !false;\ncheck if !false && true;\ncheck if false || true;\ncheck if (true || false) && true;\ncheck if true == true;\ncheck if false == false;\ncheck if 1 < 2;\ncheck if 2 > 1;\ncheck if 1 <= 2;\ncheck if 1 <= 1;\ncheck if 2 >= 1;\ncheck if 2 >= 2;\ncheck if 3 == 3;\ncheck if 1 + 2 * 3 - 4 / 2 == 5;\ncheck if \"hello world\".starts_with(\"hello\") && \"hello world\".ends_with(\"world\");\ncheck if \"aaabde\".matches(\"a*c?.e\");\ncheck if \"aaabde\".contains(\"abd\");\ncheck if \"aaabde\" == \"aaa\" + \"b\" + \"de\";\ncheck if \"abcD12\" == \"abcD12\";\ncheck if 2019-12-04T09:46:41Z < 2020-12-04T09:46:41Z;\ncheck if 2020-12-04T09:46:41Z > 2019-12-04T09:46:41Z;\ncheck if 2019-12-04T09:46:41Z <= 2020-12-04T09:46:41Z;\ncheck if 2020-12-04T09:46:41Z >= 2020-12-04T09:46:41Z;\ncheck if 2020-12-04T09:46:41Z >= 2019-12-04T09:46:41Z;\ncheck if 2020-12-04T09:46:41Z >= 2020-12-04T09:46:41Z;\ncheck if 2020-12-04T09:46:41Z == 2020-12-04T09:46:41Z;\ncheck if hex:12ab == hex:12ab;\ncheck if [1, 2].contains(2);\ncheck if [2019-12-04T09:46:41Z, 2020-12-04T09:46:41Z].contains(2020-12-04T09:46:41Z);\ncheck if [false, true].contains(true);\ncheck if [\"abc\", \"def\"].contains(\"abc\");\ncheck if [hex:12ab, hex:34de].contains(hex:34de);\ncheck if [1, 2].contains([2]);\ncheck if [1, 2] == [1, 2];\ncheck if [1, 2].intersection([2, 3]) == [2];\ncheck if [1, 2].union([2, 3]) == [1, 2, 3];\ncheck if [1, 2, 3].intersection([1, 2]).contains(1);\ncheck if [1, 2, 3].intersection([1, 2]).length() == 2;\n" } ], "validations": { @@ -860,52 +1253,61 @@ "facts": [], "rules": [], "checks": [ - "check if !false", - "check if !false && true", - "check if \"aaabde\" == \"aaa\" + \"b\" + \"de\"", - "check if \"aaabde\".contains(\"abd\")", - "check if \"aaabde\".matches(\"a*c?.e\")", - "check if \"abcD12\" == \"abcD12\"", - "check if \"abcD12x\" != \"abcD12\"", - "check if \"hello world\".starts_with(\"hello\") && \"hello world\".ends_with(\"world\")", - "check if (true || false) && true", - "check if 1 != 3", - "check if 1 + 2 * 3 - 4 / 2 == 5", - "check if 1 < 2", - "check if 1 <= 1", - "check if 1 <= 2", - "check if 1 | 2 ^ 3 == 0", - "check if 2 > 1", - "check if 2 >= 1", - "check if 2 >= 2", - "check if 2019-12-04T09:46:41Z < 2020-12-04T09:46:41Z", - "check if 2019-12-04T09:46:41Z <= 2020-12-04T09:46:41Z", - "check if 2020-12-04T09:46:41Z == 2020-12-04T09:46:41Z", - "check if 2020-12-04T09:46:41Z > 2019-12-04T09:46:41Z", - "check if 2020-12-04T09:46:41Z >= 2019-12-04T09:46:41Z", - "check if 2020-12-04T09:46:41Z >= 2020-12-04T09:46:41Z", - "check if 2022-12-04T09:46:41Z != 2020-12-04T09:46:41Z", - "check if 3 == 3", - "check if [\"abc\", \"def\"].contains(\"abc\")", - "check if [1, 2] == [1, 2]", - "check if [1, 2].contains(2)", - "check if [1, 4] != [1, 2]", - "check if [2019-12-04T09:46:41Z, 2020-12-04T09:46:41Z].contains(2020-12-04T09:46:41Z)", - "check if [false, true].contains(true)", - "check if [hex:12ab, hex:34de].contains(hex:34de)", - "check if false or true", - "check if hex:12ab == hex:12ab", - "check if hex:12abcd != hex:12ab", - "check if true" + { + "origin": 0, + "checks": [ + "check if !false", + "check if !false && true", + "check if \"aaabde\" == \"aaa\" + \"b\" + \"de\"", + "check if \"aaabde\".contains(\"abd\")", + "check if \"aaabde\".matches(\"a*c?.e\")", + "check if \"abcD12\" == \"abcD12\"", + "check if \"hello world\".starts_with(\"hello\") && \"hello world\".ends_with(\"world\")", + "check if (true || false) && true", + "check if 1 + 2 * 3 - 4 / 2 == 5", + "check if 1 < 2", + "check if 1 <= 1", + "check if 1 <= 2", + "check if 2 > 1", + "check if 2 >= 1", + "check if 2 >= 2", + "check if 2019-12-04T09:46:41Z < 2020-12-04T09:46:41Z", + "check if 2019-12-04T09:46:41Z <= 2020-12-04T09:46:41Z", + "check if 2020-12-04T09:46:41Z == 2020-12-04T09:46:41Z", + "check if 2020-12-04T09:46:41Z > 2019-12-04T09:46:41Z", + "check if 2020-12-04T09:46:41Z >= 2019-12-04T09:46:41Z", + "check if 2020-12-04T09:46:41Z >= 2020-12-04T09:46:41Z", + "check if 2020-12-04T09:46:41Z >= 2020-12-04T09:46:41Z", + "check if 3 == 3", + "check if [\"abc\", \"def\"].contains(\"abc\")", + "check if [1, 2, 3].intersection([1, 2]).contains(1)", + "check if [1, 2, 3].intersection([1, 2]).length() == 2", + "check if [1, 2] == [1, 2]", + "check if [1, 2].contains(2)", + "check if [1, 2].contains([2])", + "check if [1, 2].intersection([2, 3]) == [2]", + "check if [1, 2].union([2, 3]) == [1, 2, 3]", + "check if [2019-12-04T09:46:41Z, 2020-12-04T09:46:41Z].contains(2020-12-04T09:46:41Z)", + "check if [false, true].contains(true)", + "check if [hex:12ab, hex:34de].contains(hex:34de)", + "check if false == false", + "check if false || true", + "check if hex:12ab == hex:12ab", + "check if true", + "check if true == true" + ] + } ], - "policies": ["allow if true"] + "policies": [ + "allow if true" + ] }, "result": { "Ok": 0 }, "authorizer_code": "allow if true;\n", "revocation_ids": [ - "3e51db5f0453929a596485b59e89bf628a301a33d476132c48a1c0a208805809f15bdf99593733c1b5f30e8c1f473ee2f78042f81fd0557081bafb5370e65d0c" + "f61b4cb4fc58777fec6c8d39fe62259dc3c78511868236c391e9f67ffd03a3a8b8e3042d4bacce0d5756d053f5afccd4c5e4df0597af44b36bdfab492e5fe50e" ] } } @@ -921,7 +1323,11 @@ "code": "check if operation(\"read\");\n" }, { - "symbols": ["unbound", "any1", "any2"], + "symbols": [ + "unbound", + "any1", + "any2" + ], "public_keys": [], "external_key": null, "code": "operation($unbound, \"read\") <- operation($any1, $any2);\n" @@ -942,8 +1348,8 @@ }, "authorizer_code": "", "revocation_ids": [ - "c536d07f08f6f73da69a2f49310045168e059b8c07e3ddf25afd524df358a0397744b31a139eced043cb5f7a29dacbe3a510ce449fc792e53623186767cefc0c", - "8588c74c3701e8d4be770769b4e1054dbb5ea5f231a89d205000802b8718859ea1d596af207a41b1b0f7d05959180c227ea8954e903f13ade3ce3384d1e6a70a" + "a44210c6a01e55eadefc7d8540c2e6eff80ab6eeedde4751de734f9d780435780680d3f42d826b7e0f0dcf4a5ba303fd4c116984bb30978813d46ed867924307", + "b0a33e3f4cd0994c0766c196c4d11c15e5a0f9bfba79a3a2b35ddd04ddb890282a7c63336ada5c680b9f9c940c1fa7127d2699754cbc77c21e1a2d85c5ef700c" ] } } @@ -959,7 +1365,9 @@ "code": "check if operation(\"read\");\n" }, { - "symbols": ["any"], + "symbols": [ + "any" + ], "public_keys": [], "external_key": null, "code": "operation(\"read\") <- operation($any);\n" @@ -968,10 +1376,44 @@ "validations": { "": { "world": { - "facts": ["operation(\"read\")", "operation(\"write\")"], - "rules": ["operation(\"read\") <- operation($any)"], - "checks": ["check if operation(\"read\")"], - "policies": ["allow if true"] + "facts": [ + { + "origin": [ + null + ], + "facts": [ + "operation(\"write\")" + ] + }, + { + "origin": [ + null, + 1 + ], + "facts": [ + "operation(\"read\")" + ] + } + ], + "rules": [ + { + "origin": 1, + "rules": [ + "operation(\"read\") <- operation($any)" + ] + } + ], + "checks": [ + { + "origin": 0, + "checks": [ + "check if operation(\"read\")" + ] + } + ], + "policies": [ + "allow if true" + ] }, "result": { "Err": { @@ -995,8 +1437,8 @@ }, "authorizer_code": "operation(\"write\");\n\nallow if true;\n", "revocation_ids": [ - "4819e7360fdb840e54e94afcbc110e9b0652894dba2b8bf3b8b8f2254aaf00272bba7eb603c153c7e50cca0e5bb8e20449d70a1b24e7192e902c64f94848a703", - "4a4c59354354d2f91b3a2d1e7afa2c5eeaf8be9f7b163c6b9091817551cc8661f0f3e0523b525ef9a5e597c0dd1f32e09e97ace531c150dba335bb3e1d329d00" + "a44210c6a01e55eadefc7d8540c2e6eff80ab6eeedde4751de734f9d780435780680d3f42d826b7e0f0dcf4a5ba303fd4c116984bb30978813d46ed867924307", + "d3f8822a9b9bc0ee3933283c493ca9e711be5dd8339b5fe2eba1de3805aad4e84d3e2fb4affb4a743f1289915c167582b9425343635e45b70573ea1ee7a1ea03" ] } } @@ -1006,13 +1448,18 @@ "filename": "test020_sealed.bc", "token": [ { - "symbols": ["file1", "file2"], + "symbols": [ + "file1", + "file2" + ], "public_keys": [], "external_key": null, "code": "right(\"file1\", \"read\");\nright(\"file2\", \"read\");\nright(\"file1\", \"write\");\n" }, { - "symbols": ["0"], + "symbols": [ + "0" + ], "public_keys": [], "external_key": null, "code": "check if resource($0), operation(\"read\"), right($0, \"read\");\n" @@ -1022,25 +1469,46 @@ "": { "world": { "facts": [ - "operation(\"read\")", - "resource(\"file1\")", - "right(\"file1\", \"read\")", - "right(\"file1\", \"write\")", - "right(\"file2\", \"read\")" + { + "origin": [ + null + ], + "facts": [ + "operation(\"read\")", + "resource(\"file1\")" + ] + }, + { + "origin": [ + 0 + ], + "facts": [ + "right(\"file1\", \"read\")", + "right(\"file1\", \"write\")", + "right(\"file2\", \"read\")" + ] + } ], "rules": [], "checks": [ - "check if resource($0), operation(\"read\"), right($0, \"read\")" + { + "origin": 1, + "checks": [ + "check if resource($0), operation(\"read\"), right($0, \"read\")" + ] + } ], - "policies": ["allow if true"] + "policies": [ + "allow if true" + ] }, "result": { "Ok": 0 }, "authorizer_code": "resource(\"file1\");\noperation(\"read\");\n\nallow if true;\n", "revocation_ids": [ - "b279f8c6fee5ea3c3fcb5109d8c6b35ba3fecea64d83a4dc387102b9401633a1558ac6ac50ddd7fd9e9877f936f9f4064abd467faeca2bef3114b9695eb0580e", - "e1f0aca12704c1a3b9bb6292504ca6070462d9e043756dd209e625084e7d4053078bd4e55b6eebebbeb771d26d7794aa95f6b39ff949431548b32585a7379f0c" + "7595a112a1eb5b81a6e398852e6118b7f5b8cbbff452778e655100e5fb4faa8d3a2af52fe2c4f9524879605675fae26adbc4783e0cafc43522fa82385f396c03", + "45f4c14f9d9e8fa044d68be7a2ec8cddb835f575c7b913ec59bd636c70acae9a90db9064ba0b3084290ed0c422bbb7170092a884f5e0202b31e9235bbcc1650d" ] } } @@ -1050,7 +1518,10 @@ "filename": "test021_parsing.bc", "token": [ { - "symbols": ["ns::fact_123", "hello é\t😁"], + "symbols": [ + "ns::fact_123", + "hello é\t😁" + ], "public_keys": [], "external_key": null, "code": "ns::fact_123(\"hello é\t😁\");\n" @@ -1059,17 +1530,35 @@ "validations": { "": { "world": { - "facts": ["ns::fact_123(\"hello é\t😁\")"], + "facts": [ + { + "origin": [ + 0 + ], + "facts": [ + "ns::fact_123(\"hello é\t😁\")" + ] + } + ], "rules": [], - "checks": ["check if ns::fact_123(\"hello é\t😁\")"], - "policies": ["allow if true"] + "checks": [ + { + "origin": 18446744073709551615, + "checks": [ + "check if ns::fact_123(\"hello é\t😁\")" + ] + } + ], + "policies": [ + "allow if true" + ] }, "result": { "Ok": 0 }, "authorizer_code": "check if ns::fact_123(\"hello é\t😁\");\n\nallow if true;\n", "revocation_ids": [ - "4797a528328c8b5fb7939cc8956d8cda2513f552466eee501e26ea13a6cf6b4a381fd74ae547a9b50b627825142287d899b9d7bd1b5cfb18664a1be78320ea06" + "d4b2f417b6e906434fdf5058afcabfcb98d3628f814f1c9dd7e64250d9beec4465aff51bd0cb2e85d0e67dc9f613c2a42af6158c678bc6f8b4684cd3a2d0d302" ] } } @@ -1089,47 +1578,61 @@ "": { "world": { "facts": [ - "admin(13)", - "client(18)", - "client_ip(19)", - "cluster(23)", - "domain(20)", - "email(14)", - "group(15)", - "hostname(25)", - "ip_address(17)", - "member(16)", - "namespace(9)", - "node(24)", - "nonce(26)", - "operation(3)", - "owner(7)", - "path(21)", - "query(27)", - "read(0)", - "resource(2)", - "right(4)", - "role(6)", - "service(12)", - "team(11)", - "tenant(8)", - "time(5)", - "user(10)", - "version(22)", - "write(1)" + { + "origin": [ + 0 + ], + "facts": [ + "admin(13)", + "client(18)", + "client_ip(19)", + "cluster(23)", + "domain(20)", + "email(14)", + "group(15)", + "hostname(25)", + "ip_address(17)", + "member(16)", + "namespace(9)", + "node(24)", + "nonce(26)", + "operation(3)", + "owner(7)", + "path(21)", + "query(27)", + "read(0)", + "resource(2)", + "right(4)", + "role(6)", + "service(12)", + "team(11)", + "tenant(8)", + "time(5)", + "user(10)", + "version(22)", + "write(1)" + ] + } ], "rules": [], "checks": [ - "check if read(0), write(1), resource(2), operation(3), right(4), time(5), role(6), owner(7), tenant(8), namespace(9), user(10), team(11), service(12), admin(13), email(14), group(15), member(16), ip_address(17), client(18), client_ip(19), domain(20), path(21), version(22), cluster(23), node(24), hostname(25), nonce(26), query(27)" + { + "origin": 18446744073709551615, + "checks": [ + "check if read(0), write(1), resource(2), operation(3), right(4), time(5), role(6), owner(7), tenant(8), namespace(9), user(10), team(11), service(12), admin(13), email(14), group(15), member(16), ip_address(17), client(18), client_ip(19), domain(20), path(21), version(22), cluster(23), node(24), hostname(25), nonce(26), query(27)" + ] + } ], - "policies": ["allow if true"] + "policies": [ + "allow if true" + ] }, "result": { "Ok": 0 }, "authorizer_code": "check if read(0), write(1), resource(2), operation(3), right(4), time(5), role(6), owner(7), tenant(8), namespace(9), user(10), team(11), service(12), admin(13), email(14), group(15), member(16), ip_address(17), client(18), client_ip(19), domain(20), path(21), version(22), cluster(23), node(24), hostname(25), nonce(26), query(27);\n\nallow if true;\n", "revocation_ids": [ - "38094260b324eff92db2ef79e715d88c18503c0dafa400bff900399f2ab0840cedc5ac25bdd3e97860b3f9e78ca5e0df67a113eb87be50265d49278efb13210f" + "75ce48d496fd28f99905901783a1ba46d7ff8d69f9d364d1546fd73006026eae51849ad1190a4ae521a0a1269f9c6951e226afba8fcd24fa50f679162439ae09" ] } } @@ -1139,19 +1642,25 @@ "filename": "test023_execution_scope.bc", "token": [ { - "symbols": ["authority_fact"], + "symbols": [ + "authority_fact" + ], "public_keys": [], "external_key": null, "code": "authority_fact(1);\n" }, { - "symbols": ["block1_fact"], + "symbols": [ + "block1_fact" + ], "public_keys": [], "external_key": null, "code": "block1_fact(1);\n" }, { - "symbols": ["var"], + "symbols": [ + "var" + ], "public_keys": [], "external_key": null, "code": "check if authority_fact($var);\ncheck if block1_fact($var);\n" @@ -1160,13 +1669,37 @@ "validations": { "": { "world": { - "facts": ["authority_fact(1)", "block1_fact(1)"], + "facts": [ + { + "origin": [ + 0 + ], + "facts": [ + "authority_fact(1)" + ] + }, + { + "origin": [ + 1 + ], + "facts": [ + "block1_fact(1)" + ] + } + ], "rules": [], "checks": [ - "check if authority_fact($var)", - "check if block1_fact($var)" + { + "origin": 2, + "checks": [ + "check if authority_fact($var)", + "check if block1_fact($var)" + ] + } ], - "policies": ["allow if true"] + "policies": [ + "allow if true" + ] }, "result": { "Err": { @@ -1190,9 +1723,9 @@ }, "authorizer_code": "allow if true;\n", "revocation_ids": [ - "6a3606836bc63b858f96ce5000c9bead8eda139ab54679a2a8d7a9984c2e5d864b93280acc1b728bed0be42b5b1c3be10f48a13a4dbd05fd5763de5be3855108", - "5f1468fc60999f22c4f87fa088a83961188b4e654686c5b04bdc977b9ff4666d51a3d8be5594f4cef08054d100f31d1637b50bb394de7cccafc643c9b650390b", - "3eda05ddb65ee90d715cefc046837c01de944d8c4a7ff67e3d9a9d8470b5e214a20a8b9866bfe5e0d385e530b75ec8fcfde46b7dd6d4d6647d1e955c9d2fb90d" + "f9b49866caef5ece7be14ec5a9b36d98ca81d06b306eb0b4c57cd7436af176f40ee972f40903f87ec4460ab8b1adfcbfa9b19b20a6955a1e8dae7d88b2076005", + "889054b9119e4440e54da1b63266a98d0f6646cde195fef206efd8b133cfb2ee7be49b32a9a5925ece452e64f9e6f6d80dab422e916c599675dd68cdea053802", + "0a85ffbf27e08aa23665ba0d96a985b274d747556c9f016fd7f590c641ed0e4133291521aa442b320ee9ce80f5ad701b914a0c87b3dfa0cc92629dce94201806" ] } } @@ -1204,36 +1737,65 @@ { "symbols": [], "public_keys": [ - "ed25519/a424157b8c00c25214ea39894bf395650d88426147679a9dd43a64d65ae5bc25" + "ed25519/acdd6d5b53bfee478bf689f8e012fe7988bf755e3d7c5152947abc149bc20189" ], "external_key": null, - "code": "right(\"read\");\ncheck if group(\"admin\") trusting ed25519/a424157b8c00c25214ea39894bf395650d88426147679a9dd43a64d65ae5bc25;\n" + "code": "right(\"read\");\ncheck if group(\"admin\") trusting ed25519/acdd6d5b53bfee478bf689f8e012fe7988bf755e3d7c5152947abc149bc20189;\n" }, { "symbols": [], "public_keys": [], - "external_key": "ed25519/a424157b8c00c25214ea39894bf395650d88426147679a9dd43a64d65ae5bc25", + "external_key": "ed25519/acdd6d5b53bfee478bf689f8e012fe7988bf755e3d7c5152947abc149bc20189", "code": "group(\"admin\");\ncheck if right(\"read\");\n" } ], "validations": { "": { "world": { - "facts": ["group(\"admin\")", "right(\"read\")"], + "facts": [ + { + "origin": [ + 0 + ], + "facts": [ + "right(\"read\")" + ] + }, + { + "origin": [ + 1 + ], + "facts": [ + "group(\"admin\")" + ] + } + ], "rules": [], "checks": [ - "check if group(\"admin\") trusting ed25519/a424157b8c00c25214ea39894bf395650d88426147679a9dd43a64d65ae5bc25", - "check if right(\"read\")" + { + "origin": 0, + "checks": [ + "check if group(\"admin\") trusting ed25519/acdd6d5b53bfee478bf689f8e012fe7988bf755e3d7c5152947abc149bc20189" + ] + }, + { + "origin": 1, + "checks": [ + "check if right(\"read\")" + ] + } ], - "policies": ["allow if true"] + "policies": [ + "allow if true" + ] }, "result": { "Ok": 0 }, "authorizer_code": "allow if true;\n", "revocation_ids": [ - "4f61f2f2f9cefdcad03a82803638e459bef70d6fd72dbdf2bdcab78fbd23f33146e4ff9700e23acb547b820b871fa9b9fd3bb6d7a1a755afce47e9907c65600c", - "683b23943b73f53f57f473571ba266f79f1fca0633be249bc135054371a11ffb101c57150ab2f1b9a6a160b45d09567a314b7dbc84224edf6188afd5b86d9305" + "470e4bf7aa2a01ab39c98150bd06aa15b4aa5d86509044a8809a8634cd8cf2b42269a51a774b65d10bac9369d013070b00187925196a8e680108473f11cf8f03", + "93a7315ab1272da9eeef015f6fecbc9ac96fe4660e6204bf64ea2105ebe309e9c9cadc0a26c5604f13910fae3f2cd0800756afb6b6b208bf77adeb1ab2f42405" ] } } @@ -1243,7 +1805,13 @@ "filename": "test025_check_all.bc", "token": [ { - "symbols": ["allowed_operations", "A", "B", "op", "allowed"], + "symbols": [ + "allowed_operations", + "A", + "B", + "op", + "allowed" + ], "public_keys": [], "external_key": null, "code": "allowed_operations([\"A\", \"B\"]);\ncheck all operation($op), allowed_operations($allowed), $allowed.contains($op);\n" @@ -1253,36 +1821,78 @@ "A, B": { "world": { "facts": [ - "allowed_operations([ \"A\", \"B\"])", - "operation(\"A\")", - "operation(\"B\")" + { + "origin": [ + null + ], + "facts": [ + "operation(\"A\")", + "operation(\"B\")" + ] + }, + { + "origin": [ + 0 + ], + "facts": [ + "allowed_operations([\"A\", \"B\"])" + ] + } ], "rules": [], "checks": [ - "check all operation($op), allowed_operations($allowed), $allowed.contains($op)" + { + "origin": 0, + "checks": [ + "check all operation($op), allowed_operations($allowed), $allowed.contains($op)" + ] + } ], - "policies": ["allow if true"] + "policies": [ + "allow if true" + ] }, "result": { "Ok": 0 }, "authorizer_code": "operation(\"A\");\noperation(\"B\");\n\nallow if true;\n", "revocation_ids": [ - "b4ee591001e4068a7ee8efb7a0586c3ca3a785558f34d1fa8dbfa21b41ace70de0b670ac49222c7413066d0d83e6d9edee94fb0fda4b27ea11e837304dfb4b0b" + "c456817012e1d523c6d145b6d6a3475d9f7dd4383c535454ff3f745ecf4234984ce09b9dec0551f3d783abe850f826ce43b12f1fd91999a4753a56ecf4c56d0d" ] }, "A, invalid": { "world": { "facts": [ - "allowed_operations([ \"A\", \"B\"])", - "operation(\"A\")", - "operation(\"invalid\")" + { + "origin": [ + null + ], + "facts": [ + "operation(\"A\")", + "operation(\"invalid\")" + ] + }, + { + "origin": [ + 0 + ], + "facts": [ + "allowed_operations([\"A\", \"B\"])" + ] + } ], "rules": [], "checks": [ - "check all operation($op), allowed_operations($allowed), $allowed.contains($op)" + { + "origin": 0, + "checks": [ + "check all operation($op), allowed_operations($allowed), $allowed.contains($op)" + ] + } ], - "policies": ["allow if true"] + "policies": [ + "allow if true" + ] }, "result": { "Err": { @@ -1306,7 +1916,7 @@ }, "authorizer_code": "operation(\"A\");\noperation(\"invalid\");\n\nallow if true;\n", "revocation_ids": [ - "b4ee591001e4068a7ee8efb7a0586c3ca3a785558f34d1fa8dbfa21b41ace70de0b670ac49222c7413066d0d83e6d9edee94fb0fda4b27ea11e837304dfb4b0b" + "c456817012e1d523c6d145b6d6a3475d9f7dd4383c535454ff3f745ecf4234984ce09b9dec0551f3d783abe850f826ce43b12f1fd91999a4753a56ecf4c56d0d" ] } } @@ -1318,79 +1928,161 @@ { "symbols": [], "public_keys": [ - "ed25519/3c8aeced6363b8a862552fb2b0b4b8b0f8244e8cef3c11c3e55fd553f3a90f59" + "ed25519/acdd6d5b53bfee478bf689f8e012fe7988bf755e3d7c5152947abc149bc20189" ], "external_key": null, - "code": "query(0);\ncheck if true trusting previous, ed25519/3c8aeced6363b8a862552fb2b0b4b8b0f8244e8cef3c11c3e55fd553f3a90f59;\n" + "code": "query(0);\ncheck if true trusting previous, ed25519/acdd6d5b53bfee478bf689f8e012fe7988bf755e3d7c5152947abc149bc20189;\n" }, { "symbols": [], "public_keys": [ - "ed25519/ecfb8ed11fd9e6be133ca4dd8d229d39c7dcb2d659704c39e82fd7acf0d12dee" + "ed25519/a060270db7e9c9f06e8f9cc33a64e99f6596af12cb01c4b638df8afc7b642463" ], - "external_key": "ed25519/3c8aeced6363b8a862552fb2b0b4b8b0f8244e8cef3c11c3e55fd553f3a90f59", - "code": "query(1);\nquery(1, 2) <- query(1), query(2) trusting ed25519/ecfb8ed11fd9e6be133ca4dd8d229d39c7dcb2d659704c39e82fd7acf0d12dee;\ncheck if query(2), query(3) trusting ed25519/ecfb8ed11fd9e6be133ca4dd8d229d39c7dcb2d659704c39e82fd7acf0d12dee;\ncheck if query(1) trusting ed25519/3c8aeced6363b8a862552fb2b0b4b8b0f8244e8cef3c11c3e55fd553f3a90f59;\n" + "external_key": "ed25519/acdd6d5b53bfee478bf689f8e012fe7988bf755e3d7c5152947abc149bc20189", + "code": "query(1);\nquery(1, 2) <- query(1), query(2) trusting ed25519/a060270db7e9c9f06e8f9cc33a64e99f6596af12cb01c4b638df8afc7b642463;\ncheck if query(2), query(3) trusting ed25519/a060270db7e9c9f06e8f9cc33a64e99f6596af12cb01c4b638df8afc7b642463;\ncheck if query(1) trusting ed25519/acdd6d5b53bfee478bf689f8e012fe7988bf755e3d7c5152947abc149bc20189;\n" }, { "symbols": [], "public_keys": [], - "external_key": "ed25519/ecfb8ed11fd9e6be133ca4dd8d229d39c7dcb2d659704c39e82fd7acf0d12dee", - "code": "query(2);\ncheck if query(2), query(3) trusting ed25519/ecfb8ed11fd9e6be133ca4dd8d229d39c7dcb2d659704c39e82fd7acf0d12dee;\ncheck if query(1) trusting ed25519/3c8aeced6363b8a862552fb2b0b4b8b0f8244e8cef3c11c3e55fd553f3a90f59;\n" + "external_key": "ed25519/a060270db7e9c9f06e8f9cc33a64e99f6596af12cb01c4b638df8afc7b642463", + "code": "query(2);\ncheck if query(2), query(3) trusting ed25519/a060270db7e9c9f06e8f9cc33a64e99f6596af12cb01c4b638df8afc7b642463;\ncheck if query(1) trusting ed25519/acdd6d5b53bfee478bf689f8e012fe7988bf755e3d7c5152947abc149bc20189;\n" }, { "symbols": [], "public_keys": [], - "external_key": "ed25519/ecfb8ed11fd9e6be133ca4dd8d229d39c7dcb2d659704c39e82fd7acf0d12dee", - "code": "query(3);\ncheck if query(2), query(3) trusting ed25519/ecfb8ed11fd9e6be133ca4dd8d229d39c7dcb2d659704c39e82fd7acf0d12dee;\ncheck if query(1) trusting ed25519/3c8aeced6363b8a862552fb2b0b4b8b0f8244e8cef3c11c3e55fd553f3a90f59;\n" + "external_key": "ed25519/a060270db7e9c9f06e8f9cc33a64e99f6596af12cb01c4b638df8afc7b642463", + "code": "query(3);\ncheck if query(2), query(3) trusting ed25519/a060270db7e9c9f06e8f9cc33a64e99f6596af12cb01c4b638df8afc7b642463;\ncheck if query(1) trusting ed25519/acdd6d5b53bfee478bf689f8e012fe7988bf755e3d7c5152947abc149bc20189;\n" }, { "symbols": [], "public_keys": [ - "ed25519/2e0118e63beb7731dab5119280ddb117234d0cdc41b7dd5dc4241bcbbb585d14" + "ed25519/f98da8c1cf907856431bfc3dc87531e0eaadba90f919edc232405b85877ef136" ], "external_key": null, - "code": "query(4);\ncheck if query(2) trusting ed25519/ecfb8ed11fd9e6be133ca4dd8d229d39c7dcb2d659704c39e82fd7acf0d12dee;\ncheck if query(4) trusting ed25519/2e0118e63beb7731dab5119280ddb117234d0cdc41b7dd5dc4241bcbbb585d14;\n" + "code": "query(4);\ncheck if query(2) trusting ed25519/a060270db7e9c9f06e8f9cc33a64e99f6596af12cb01c4b638df8afc7b642463;\ncheck if query(4) trusting ed25519/f98da8c1cf907856431bfc3dc87531e0eaadba90f919edc232405b85877ef136;\n" } ], "validations": { "": { "world": { "facts": [ - "query(0)", - "query(1)", - "query(1, 2)", - "query(2)", - "query(3)", - "query(4)" + { + "origin": [ + 0 + ], + "facts": [ + "query(0)" + ] + }, + { + "origin": [ + 1 + ], + "facts": [ + "query(1)" + ] + }, + { + "origin": [ + 1, + 2 + ], + "facts": [ + "query(1, 2)" + ] + }, + { + "origin": [ + 2 + ], + "facts": [ + "query(2)" + ] + }, + { + "origin": [ + 3 + ], + "facts": [ + "query(3)" + ] + }, + { + "origin": [ + 4 + ], + "facts": [ + "query(4)" + ] + } ], "rules": [ - "query(1, 2) <- query(1), query(2) trusting ed25519/ecfb8ed11fd9e6be133ca4dd8d229d39c7dcb2d659704c39e82fd7acf0d12dee" + { + "origin": 1, + "rules": [ + "query(1, 2) <- query(1), query(2) trusting ed25519/a060270db7e9c9f06e8f9cc33a64e99f6596af12cb01c4b638df8afc7b642463" + ] + } ], "checks": [ - "check if query(1) trusting ed25519/3c8aeced6363b8a862552fb2b0b4b8b0f8244e8cef3c11c3e55fd553f3a90f59", - "check if query(1, 2) trusting ed25519/3c8aeced6363b8a862552fb2b0b4b8b0f8244e8cef3c11c3e55fd553f3a90f59, ed25519/ecfb8ed11fd9e6be133ca4dd8d229d39c7dcb2d659704c39e82fd7acf0d12dee", - "check if query(2) trusting ed25519/ecfb8ed11fd9e6be133ca4dd8d229d39c7dcb2d659704c39e82fd7acf0d12dee", - "check if query(2), query(3) trusting ed25519/ecfb8ed11fd9e6be133ca4dd8d229d39c7dcb2d659704c39e82fd7acf0d12dee", - "check if query(4) trusting ed25519/2e0118e63beb7731dab5119280ddb117234d0cdc41b7dd5dc4241bcbbb585d14", - "check if true trusting previous, ed25519/3c8aeced6363b8a862552fb2b0b4b8b0f8244e8cef3c11c3e55fd553f3a90f59" + { + "origin": 0, + "checks": [ + "check if true trusting previous, ed25519/acdd6d5b53bfee478bf689f8e012fe7988bf755e3d7c5152947abc149bc20189" + ] + }, + { + "origin": 1, + "checks": [ + "check if query(1) trusting ed25519/acdd6d5b53bfee478bf689f8e012fe7988bf755e3d7c5152947abc149bc20189", + "check if query(2), query(3) trusting ed25519/a060270db7e9c9f06e8f9cc33a64e99f6596af12cb01c4b638df8afc7b642463" + ] + }, + { + "origin": 2, + "checks": [ + "check if query(1) trusting ed25519/acdd6d5b53bfee478bf689f8e012fe7988bf755e3d7c5152947abc149bc20189", + "check if query(2), query(3) trusting ed25519/a060270db7e9c9f06e8f9cc33a64e99f6596af12cb01c4b638df8afc7b642463" + ] + }, + { + "origin": 3, + "checks": [ + "check if query(1) trusting ed25519/acdd6d5b53bfee478bf689f8e012fe7988bf755e3d7c5152947abc149bc20189", + "check if query(2), query(3) trusting ed25519/a060270db7e9c9f06e8f9cc33a64e99f6596af12cb01c4b638df8afc7b642463" + ] + }, + { + "origin": 4, + "checks": [ + "check if query(2) trusting ed25519/a060270db7e9c9f06e8f9cc33a64e99f6596af12cb01c4b638df8afc7b642463", + "check if query(4) trusting ed25519/f98da8c1cf907856431bfc3dc87531e0eaadba90f919edc232405b85877ef136" + ] + }, + { + "origin": 18446744073709551615, + "checks": [ + "check if query(1, 2) trusting ed25519/acdd6d5b53bfee478bf689f8e012fe7988bf755e3d7c5152947abc149bc20189, ed25519/a060270db7e9c9f06e8f9cc33a64e99f6596af12cb01c4b638df8afc7b642463" + ] + } ], "policies": [ - "allow if true", - "deny if query(0) trusting ed25519/3c8aeced6363b8a862552fb2b0b4b8b0f8244e8cef3c11c3e55fd553f3a90f59", + "deny if query(3)", "deny if query(1, 2)", - "deny if query(3)" + "deny if query(0) trusting ed25519/acdd6d5b53bfee478bf689f8e012fe7988bf755e3d7c5152947abc149bc20189", + "allow if true" ] }, "result": { "Ok": 3 }, - "authorizer_code": "check if query(1, 2) trusting ed25519/3c8aeced6363b8a862552fb2b0b4b8b0f8244e8cef3c11c3e55fd553f3a90f59, ed25519/ecfb8ed11fd9e6be133ca4dd8d229d39c7dcb2d659704c39e82fd7acf0d12dee;\n\ndeny if query(3);\ndeny if query(1, 2);\ndeny if query(0) trusting ed25519/3c8aeced6363b8a862552fb2b0b4b8b0f8244e8cef3c11c3e55fd553f3a90f59;\nallow if true;\n", + "authorizer_code": "check if query(1, 2) trusting ed25519/acdd6d5b53bfee478bf689f8e012fe7988bf755e3d7c5152947abc149bc20189, ed25519/a060270db7e9c9f06e8f9cc33a64e99f6596af12cb01c4b638df8afc7b642463;\n\ndeny if query(3);\ndeny if query(1, 2);\ndeny if query(0) trusting ed25519/acdd6d5b53bfee478bf689f8e012fe7988bf755e3d7c5152947abc149bc20189;\nallow if true;\n", "revocation_ids": [ - "bc144fef824b7ba4b266eac53e9b4f3f2d3cd443c6963833f2f8d4073bef9553f92034c2350fdd50966a9f0c09db35b142d61e0476b0133429885c787052060b", - "aba1631f8d0bea1c81447e73269f560973d03287c2b44325d1b42d10a496156dc8e78648b946bc7db7a3111d787a10c1a9da8d53fc066b1f207de7415a2e9b0b", - "539cff0f5c311dcac843a9e6c8bb445aff0d6510bfa9b17d5350747be92dc365217e89e1d733f3ead1ecc05f287f312c41831338708e788503b55517af3ad000", - "5b10f7a7b4487f4421cf7f7f6d00b24a7a71939037b65b2e44241909564082a3e1e70cf7d866eb96f0a5119b9ea395adb772faaa33252fa62a579eb15a108a0b", - "3905351588cdfc4433b510cc1ed9c11ca5c1a7bd7d9cef338bcd3f6d374c711f34edd83dd0d53c25b63bf05b49fc78addceb47905d5495580c2fd36c11bc1e0a" + "3771cefe71beb21ead35a59c8116ee82627a5717c0295f35980662abccb159fe1b37848cb1818e548656bd4fd882d0094a2daab631c76b2b72e3a093914bfe04", + "45133b90f228a81fe4d3042a79f6c6b7608e656e903d6b1f4db32cd774b09b8315af360879a5f210ad7be37ff55e3eb34f237bcc9711407b6329ac6018bfb400", + "179f054f3c572646aba5013159ae192ac42f5666dbdd984129955f4652b6829e59f54aa251e451f96329d42a2524ce569c3e1ec52e708b642dd8994af51dd703", + "edab54789d6656936fcd28200b9c61643434842d531f09f209fad555e11ff53174db174dafba126e6de448983a56f78d2042bc5782d71a45799c022fe69fb30d", + "6a62306831e9dbe83e7b33db96b758c77dd690930f2d2d87e239b210b1944c5582bf6d7e1bfea8e7f928c27f2fff0e2ee2e0adc41e11e0c3abe8d7b96b9ede07" ] } } @@ -1412,11 +2104,18 @@ "facts": [], "rules": [], "checks": [ - "check if true || -9223372036854775808 - 1 != 0", - "check if true || 10000000000 * 10000000000 != 0", - "check if true || 9223372036854775807 + 1 != 0" + { + "origin": 0, + "checks": [ + "check if true || -9223372036854775808 - 1 != 0", + "check if true || 10000000000 * 10000000000 != 0", + "check if true || 9223372036854775807 + 1 != 0" + ] + } ], - "policies": ["allow if true"] + "policies": [ + "allow if true" + ] }, "result": { "Err": { @@ -1425,7 +2124,53 @@ }, "authorizer_code": "allow if true;\n", "revocation_ids": [ - "70d8941198ab5daa445a11357994d93278876ee95b6500f4c4a265ad668a0111440942b762e02513e471d40265d586ea76209921068524f588dc46eb4260db07" + "3346a22aae0abfc1ffa526f02f7650e90af909e5e519989026441e78cdc245b7fd126503cfdc8831325fc04307edc65238db319724477915f7040a2f6a719a05" + ] + } + } + }, + { + "title": "test expression syntax and all available operations (v4 blocks)", + "filename": "test028_expressions_v4.bc", + "token": [ + { + "symbols": [ + "abcD12x", + "abcD12" + ], + "public_keys": [], + "external_key": null, + "code": "check if 1 != 3;\ncheck if 1 | 2 ^ 3 == 0;\ncheck if \"abcD12x\" != \"abcD12\";\ncheck if 2022-12-04T09:46:41Z != 2020-12-04T09:46:41Z;\ncheck if hex:12abcd != hex:12ab;\ncheck if [1, 4] != [1, 2];\n" + } + ], + "validations": { + "": { + "world": { + "facts": [], + "rules": [], + "checks": [ + { + "origin": 0, + "checks": [ + "check if \"abcD12x\" != \"abcD12\"", + "check if 1 != 3", + "check if 1 | 2 ^ 3 == 0", + "check if 2022-12-04T09:46:41Z != 2020-12-04T09:46:41Z", + "check if [1, 4] != [1, 2]", + "check if hex:12abcd != hex:12ab" + ] + } + ], + "policies": [ + "allow if true" + ] + }, + "result": { + "Ok": 0 + }, + "authorizer_code": "allow if true;\n", + "revocation_ids": [ + "117fa653744c859561555e6a6f5990e3a8e7817f91b87aa6991b6d64297158b4e884c92d10f49f74c96069df722aa676839b72751ca9d1fe83a7025b591de00b" ] } } diff --git a/biscuit-samples/src/samples/test001_basic.bc b/biscuit-samples/src/samples/test001_basic.bc index c28905d..f45e91b 100644 Binary files a/biscuit-samples/src/samples/test001_basic.bc and b/biscuit-samples/src/samples/test001_basic.bc differ diff --git a/biscuit-samples/src/samples/test002_different_root_key.bc b/biscuit-samples/src/samples/test002_different_root_key.bc index cf6d960..edf2dd7 100644 Binary files a/biscuit-samples/src/samples/test002_different_root_key.bc and b/biscuit-samples/src/samples/test002_different_root_key.bc differ diff --git a/biscuit-samples/src/samples/test003_invalid_signature_format.bc b/biscuit-samples/src/samples/test003_invalid_signature_format.bc index d8aff84..8c683a6 100644 Binary files a/biscuit-samples/src/samples/test003_invalid_signature_format.bc and b/biscuit-samples/src/samples/test003_invalid_signature_format.bc differ diff --git a/biscuit-samples/src/samples/test004_random_block.bc b/biscuit-samples/src/samples/test004_random_block.bc index bd0ba4a..e8882d3 100644 Binary files a/biscuit-samples/src/samples/test004_random_block.bc and b/biscuit-samples/src/samples/test004_random_block.bc differ diff --git a/biscuit-samples/src/samples/test005_invalid_signature.bc b/biscuit-samples/src/samples/test005_invalid_signature.bc index d8133b5..b33c6ab 100644 Binary files a/biscuit-samples/src/samples/test005_invalid_signature.bc and b/biscuit-samples/src/samples/test005_invalid_signature.bc differ diff --git a/biscuit-samples/src/samples/test006_reordered_blocks.bc b/biscuit-samples/src/samples/test006_reordered_blocks.bc index 20fc406..486981c 100644 Binary files a/biscuit-samples/src/samples/test006_reordered_blocks.bc and b/biscuit-samples/src/samples/test006_reordered_blocks.bc differ diff --git a/biscuit-samples/src/samples/test007_scoped_rules.bc b/biscuit-samples/src/samples/test007_scoped_rules.bc index 19d98ef..905f935 100644 Binary files a/biscuit-samples/src/samples/test007_scoped_rules.bc and b/biscuit-samples/src/samples/test007_scoped_rules.bc differ diff --git a/biscuit-samples/src/samples/test008_scoped_checks.bc b/biscuit-samples/src/samples/test008_scoped_checks.bc index ef87553..82e4527 100644 Binary files a/biscuit-samples/src/samples/test008_scoped_checks.bc and b/biscuit-samples/src/samples/test008_scoped_checks.bc differ diff --git a/biscuit-samples/src/samples/test009_expired_token.bc b/biscuit-samples/src/samples/test009_expired_token.bc index 599a420..1ae9e59 100644 Binary files a/biscuit-samples/src/samples/test009_expired_token.bc and b/biscuit-samples/src/samples/test009_expired_token.bc differ diff --git a/biscuit-samples/src/samples/test010_authorizer_scope.bc b/biscuit-samples/src/samples/test010_authorizer_scope.bc index e6664f2..b0188a7 100644 Binary files a/biscuit-samples/src/samples/test010_authorizer_scope.bc and b/biscuit-samples/src/samples/test010_authorizer_scope.bc differ diff --git a/biscuit-samples/src/samples/test011_authorizer_authority_caveats.bc b/biscuit-samples/src/samples/test011_authorizer_authority_caveats.bc index 46e1f8a..a368cf3 100644 Binary files a/biscuit-samples/src/samples/test011_authorizer_authority_caveats.bc and b/biscuit-samples/src/samples/test011_authorizer_authority_caveats.bc differ diff --git a/biscuit-samples/src/samples/test012_authority_caveats.bc b/biscuit-samples/src/samples/test012_authority_caveats.bc index 3a7c850..6d852ec 100644 Binary files a/biscuit-samples/src/samples/test012_authority_caveats.bc and b/biscuit-samples/src/samples/test012_authority_caveats.bc differ diff --git a/biscuit-samples/src/samples/test013_block_rules.bc b/biscuit-samples/src/samples/test013_block_rules.bc index cdf4957..149b4ee 100644 Binary files a/biscuit-samples/src/samples/test013_block_rules.bc and b/biscuit-samples/src/samples/test013_block_rules.bc differ diff --git a/biscuit-samples/src/samples/test014_regex_constraint.bc b/biscuit-samples/src/samples/test014_regex_constraint.bc index 92193c9..6c3c87e 100644 Binary files a/biscuit-samples/src/samples/test014_regex_constraint.bc and b/biscuit-samples/src/samples/test014_regex_constraint.bc differ diff --git a/biscuit-samples/src/samples/test015_multi_queries_caveats.bc b/biscuit-samples/src/samples/test015_multi_queries_caveats.bc index 55f5072..7c775b7 100644 Binary files a/biscuit-samples/src/samples/test015_multi_queries_caveats.bc and b/biscuit-samples/src/samples/test015_multi_queries_caveats.bc differ diff --git a/biscuit-samples/src/samples/test016_caveat_head_name.bc b/biscuit-samples/src/samples/test016_caveat_head_name.bc index 5923f40..c506641 100644 Binary files a/biscuit-samples/src/samples/test016_caveat_head_name.bc and b/biscuit-samples/src/samples/test016_caveat_head_name.bc differ diff --git a/biscuit-samples/src/samples/test017_expressions.bc b/biscuit-samples/src/samples/test017_expressions.bc index 8992f91..10f5024 100644 Binary files a/biscuit-samples/src/samples/test017_expressions.bc and b/biscuit-samples/src/samples/test017_expressions.bc differ diff --git a/biscuit-samples/src/samples/test018_unbound_variables_in_rule.bc b/biscuit-samples/src/samples/test018_unbound_variables_in_rule.bc index dc26cc7..3679959 100644 Binary files a/biscuit-samples/src/samples/test018_unbound_variables_in_rule.bc and b/biscuit-samples/src/samples/test018_unbound_variables_in_rule.bc differ diff --git a/biscuit-samples/src/samples/test019_generating_ambient_from_variables.bc b/biscuit-samples/src/samples/test019_generating_ambient_from_variables.bc index b31ab8e..f486a3c 100644 Binary files a/biscuit-samples/src/samples/test019_generating_ambient_from_variables.bc and b/biscuit-samples/src/samples/test019_generating_ambient_from_variables.bc differ diff --git a/biscuit-samples/src/samples/test020_sealed.bc b/biscuit-samples/src/samples/test020_sealed.bc index 04186fa..6ce2207 100644 Binary files a/biscuit-samples/src/samples/test020_sealed.bc and b/biscuit-samples/src/samples/test020_sealed.bc differ diff --git a/biscuit-samples/src/samples/test021_parsing.bc b/biscuit-samples/src/samples/test021_parsing.bc index 1d8de24..3f92636 100644 Binary files a/biscuit-samples/src/samples/test021_parsing.bc and b/biscuit-samples/src/samples/test021_parsing.bc differ diff --git a/biscuit-samples/src/samples/test022_default_symbols.bc b/biscuit-samples/src/samples/test022_default_symbols.bc index 7560afd..c12449d 100644 Binary files a/biscuit-samples/src/samples/test022_default_symbols.bc and b/biscuit-samples/src/samples/test022_default_symbols.bc differ diff --git a/biscuit-samples/src/samples/test023_execution_scope.bc b/biscuit-samples/src/samples/test023_execution_scope.bc index 24691b8..b10a686 100644 Binary files a/biscuit-samples/src/samples/test023_execution_scope.bc and b/biscuit-samples/src/samples/test023_execution_scope.bc differ diff --git a/biscuit-samples/src/samples/test024_third_party.bc b/biscuit-samples/src/samples/test024_third_party.bc index 78f1602..7bca415 100644 Binary files a/biscuit-samples/src/samples/test024_third_party.bc and b/biscuit-samples/src/samples/test024_third_party.bc differ diff --git a/biscuit-samples/src/samples/test025_check_all.bc b/biscuit-samples/src/samples/test025_check_all.bc index 6b76c3e..221df2c 100644 Binary files a/biscuit-samples/src/samples/test025_check_all.bc and b/biscuit-samples/src/samples/test025_check_all.bc differ diff --git a/biscuit-samples/src/samples/test026_public_keys_interning.bc b/biscuit-samples/src/samples/test026_public_keys_interning.bc index 0016c7a..49e417b 100644 Binary files a/biscuit-samples/src/samples/test026_public_keys_interning.bc and b/biscuit-samples/src/samples/test026_public_keys_interning.bc differ diff --git a/biscuit-samples/src/samples/test027_integer_wraparound.bc b/biscuit-samples/src/samples/test027_integer_wraparound.bc index fc3a585..50aa63b 100644 Binary files a/biscuit-samples/src/samples/test027_integer_wraparound.bc and b/biscuit-samples/src/samples/test027_integer_wraparound.bc differ diff --git a/biscuit-samples/src/samples/test028_expressions_v4.bc b/biscuit-samples/src/samples/test028_expressions_v4.bc new file mode 100644 index 0000000..c34d7a1 Binary files /dev/null and b/biscuit-samples/src/samples/test028_expressions_v4.bc differ diff --git a/biscuit/build.zig b/biscuit/build.zig index 48bdc2c..d4ff429 100644 --- a/biscuit/build.zig +++ b/biscuit/build.zig @@ -17,6 +17,8 @@ pub fn build(b: *std.Build) void { const schema = b.dependency("biscuit_schema", .{ .target = target, .optimize = optimize }); const format = b.dependency("biscuit_format", .{ .target = target, .optimize = optimize }); + const builder = b.dependency("biscuit-builder", .{ .target = target, .optimize = optimize }); + const parser = b.dependency("biscuit-parser", .{ .target = target, .optimize = optimize }); const datalog = b.dependency("biscuit_datalog", .{ .target = target, .optimize = optimize }); _ = b.addModule("biscuit", .{ @@ -24,6 +26,8 @@ pub fn build(b: *std.Build) void { .imports = &.{ .{ .name = "biscuit-schema", .module = schema.module("biscuit-schema") }, .{ .name = "biscuit-format", .module = format.module("biscuit-format") }, + .{ .name = "biscuit-builder", .module = builder.module("biscuit-builder") }, + .{ .name = "biscuit-parser", .module = parser.module("biscuit-parser") }, .{ .name = "biscuit-datalog", .module = datalog.module("biscuit-datalog") }, }, }); @@ -38,6 +42,7 @@ pub fn build(b: *std.Build) void { }); lib_unit_tests.root_module.addImport("biscuit-schema", schema.module("biscuit-schema")); lib_unit_tests.root_module.addImport("biscuit-format", format.module("biscuit-format")); + lib_unit_tests.root_module.addImport("biscuit-builder", builder.module("biscuit-builder")); lib_unit_tests.root_module.addImport("biscuit-datalog", datalog.module("biscuit-datalog")); const run_lib_unit_tests = b.addRunArtifact(lib_unit_tests); diff --git a/biscuit/build.zig.zon b/biscuit/build.zig.zon index e01fe07..89c5cf7 100644 --- a/biscuit/build.zig.zon +++ b/biscuit/build.zig.zon @@ -41,6 +41,8 @@ .biscuit_schema = .{ .path = "../biscuit-schema" }, .biscuit_format = .{ .path = "../biscuit-format" }, + .@"biscuit-builder" = .{ .path = "../biscuit-builder" }, + .@"biscuit-parser" = .{ .path = "../biscuit-parser" }, .biscuit_datalog = .{ .path = "../biscuit-datalog" }, }, diff --git a/biscuit/src/authorizer.zig b/biscuit/src/authorizer.zig index 8a3afab..82942ea 100644 --- a/biscuit/src/authorizer.zig +++ b/biscuit/src/authorizer.zig @@ -2,47 +2,150 @@ const std = @import("std"); const mem = std.mem; const Biscuit = @import("biscuit.zig").Biscuit; const World = @import("biscuit-datalog").world.World; +const Origin = @import("biscuit-datalog").Origin; +const TrustedOrigins = @import("biscuit-datalog").TrustedOrigins; const Check = @import("biscuit-datalog").check.Check; const SymbolTable = @import("biscuit-datalog").symbol_table.SymbolTable; +const Scope = @import("biscuit-datalog").Scope; +const Parser = @import("biscuit-parser").Parser; +const builder = @import("biscuit-builder"); +const PolicyResult = @import("biscuit-builder").PolicyResult; pub const Authorizer = struct { allocator: mem.Allocator, + checks: std.ArrayList(builder.Check), + policies: std.ArrayList(builder.Policy), biscuit: ?Biscuit, world: World, symbols: SymbolTable, + public_key_to_block_id: std.AutoHashMap(usize, std.ArrayList(usize)), + scopes: std.ArrayList(Scope), + + pub fn init(allocator: std.mem.Allocator, biscuit: Biscuit) !Authorizer { + var symbols = SymbolTable.init("authorizer", allocator); + var public_key_to_block_id = std.AutoHashMap(usize, std.ArrayList(usize)).init(allocator); + + // Map public key symbols into authorizer symbols and public_key_to_block_id map + var it = biscuit.public_key_to_block_id.iterator(); + while (it.next()) |entry| { + const biscuit_public_key_index = entry.key_ptr.*; + const block_ids = entry.value_ptr.*; + + const public_key = try biscuit.symbols.getPublicKey(biscuit_public_key_index); + + const authorizer_public_key_index = try symbols.insertPublicKey(public_key); + + try public_key_to_block_id.put(authorizer_public_key_index, try block_ids.clone()); + } - pub fn init(allocator: std.mem.Allocator, biscuit: Biscuit) Authorizer { return .{ .allocator = allocator, + .checks = std.ArrayList(builder.Check).init(allocator), + .policies = std.ArrayList(builder.Policy).init(allocator), .biscuit = biscuit, .world = World.init(allocator), - .symbols = SymbolTable.init(allocator), + .symbols = symbols, + .public_key_to_block_id = public_key_to_block_id, + .scopes = std.ArrayList(Scope).init(allocator), }; } pub fn deinit(authorizer: *Authorizer) void { authorizer.world.deinit(); authorizer.symbols.deinit(); + authorizer.scopes.deinit(); + + for (authorizer.checks.items) |check| { + check.deinit(); + } + authorizer.checks.deinit(); + + for (authorizer.policies.items) |policy| { + policy.deinit(); + } + authorizer.policies.deinit(); + + { + var it = authorizer.public_key_to_block_id.valueIterator(); + while (it.next()) |block_ids| { + block_ids.deinit(); + } + authorizer.public_key_to_block_id.deinit(); + } + } + + pub fn authorizerTrustedOrigins(authorizer: *Authorizer) !TrustedOrigins { + return try TrustedOrigins.fromScopes( + authorizer.allocator, + authorizer.scopes.items, + try TrustedOrigins.defaultOrigins(authorizer.allocator), + Origin.AUTHORIZER_ID, + authorizer.public_key_to_block_id, + ); + } + + /// Add fact from string to authorizer + pub fn addFact(authorizer: *Authorizer, input: []const u8) !void { + std.debug.print("authorizer.addFact = {s}\n", .{input}); + var parser = Parser.init(authorizer.allocator, input); + + const fact = try parser.fact(); + + std.debug.print("fact = {any}\n", .{fact}); + + const origin = try Origin.initWithId(authorizer.allocator, Origin.AUTHORIZER_ID); + + try authorizer.world.addFact(origin, try fact.convert(authorizer.allocator, &authorizer.symbols)); + } + + /// Add check from string to authorizer + pub fn addCheck(authorizer: *Authorizer, input: []const u8) !void { + var parser = Parser.init(authorizer.allocator, input); + + const check = try parser.check(); + + try authorizer.checks.append(check); + } + + /// Add policy from string to authorizer + pub fn addPolicy(authorizer: *Authorizer, input: []const u8) !void { + var parser = Parser.init(authorizer.allocator, input); + + const policy = try parser.policy(); + + try authorizer.policies.append(policy); } /// authorize /// /// authorize the Authorizer /// - /// The following high level steps take place during authorization: - /// - If we have a biscuit load the biscuit's authority block's facts and - /// and rules into the Authorizer's world - /// - Run the world to generate new facts - /// - Loop over and apply all of checks _of the authorizer_ - /// - Again, if we have a biscuit, loop over and apply the biscuit's authority block's checks - /// - Loop over the policies _of the authorizer_ (we won't have policies anywhere else) - /// - Finally, again if we have a biscuit, loop over all of the biscuits non-authority - /// blocks and apply the checks therein. - pub fn authorize(authorizer: *Authorizer) !void { - var errors = std.ArrayList(AuthorizerError).init(authorizer.allocator); - defer errors.deinit(); - - std.debug.print("authorizing biscuit:\n", .{}); + /// The following high-level steps take place during authorization: + /// 1. _biscuit_ (where it exists): load _all_ of the facts and rules + /// in the biscuit. We can add all the facts and rules as this time because + /// the facts and rules are scoped, i.e. the facts / rules are added to particular + /// scopes within the world. + /// 2. Run the world to generate new facts. + /// 3. _authorizer_: Run the _authorizer's_ checks + /// 4. _biscuit_ (where it exists): run the authority block's checks + /// 5. _authorizer_: Run the _authorizer's_ policies + /// 6. _biscuit_ (where it exists): run the checks from all the non-authority blocks + pub fn authorize(authorizer: *Authorizer, errors: *std.ArrayList(AuthorizerError)) !usize { + std.debug.print("\nAuthorizing biscuit:\n", .{}); + + std.debug.print("authorizer public keys:\n", .{}); + for (authorizer.symbols.public_keys.items, 0..) |pk, i| { + std.debug.print(" [{}]: {x}\n", .{ i, pk.bytes }); + } + + { + var it = authorizer.public_key_to_block_id.iterator(); + while (it.next()) |entry| { + std.debug.print("public_key_to_block_id: public key id = {}, block_ids = {any}\n", .{ entry.key_ptr.*, entry.value_ptr.items }); + } + } + + // 1. // Load facts and rules from authority block into world. Our block's facts // will have a particular symbol table that we map into the symvol table // of the world. @@ -50,48 +153,242 @@ pub const Authorizer = struct { // For example, the token may have a string "user123" which has id 12. But // when mapped into the world it may have id 5. if (authorizer.biscuit) |biscuit| { - for (biscuit.authority.facts.items) |fact| { - try authorizer.world.addFact(try fact.convert(&biscuit.authority.symbols, &authorizer.symbols)); + std.debug.print("biscuit token public keys:\n", .{}); + for (biscuit.symbols.public_keys.items, 0..) |pk, i| { + std.debug.print(" [{}]: {x}\n", .{ i, pk.bytes }); } + for (biscuit.authority.facts.items) |authority_fact| { + const fact = try authority_fact.convert(&biscuit.symbols, &authorizer.symbols); + const origin = try Origin.initWithId(authorizer.allocator, 0); - for (biscuit.authority.rules.items) |rule| { - // FIXME: remap rule - try authorizer.world.addRule(rule); + try authorizer.world.addFact(origin, fact); + } + + const authority_trusted_origins = try TrustedOrigins.fromScopes( + authorizer.allocator, + biscuit.authority.scopes.items, + try TrustedOrigins.defaultOrigins(authorizer.allocator), + 0, + authorizer.public_key_to_block_id, + ); + + for (biscuit.authority.rules.items) |authority_rule| { + // Map from biscuit symbol space to authorizer symbol space + const rule = try authority_rule.convert(&biscuit.symbols, &authorizer.symbols); + + if (!rule.validateVariables()) { + try errors.append(.unbound_variable); + } + + // A authority block's rule trusts + const rule_trusted_origins = try TrustedOrigins.fromScopes( + authorizer.allocator, + rule.scopes.items, + authority_trusted_origins, + 0, + authorizer.public_key_to_block_id, + ); + + try authorizer.world.addRule(0, rule_trusted_origins, rule); + } + + for (biscuit.blocks.items, 1..) |block, block_id| { + for (block.facts.items) |block_fact| { + const fact = try block_fact.convert(&biscuit.symbols, &authorizer.symbols); + const origin = try Origin.initWithId(authorizer.allocator, block_id); + + try authorizer.world.addFact(origin, fact); + } + + const block_trusted_origins = try TrustedOrigins.fromScopes( + authorizer.allocator, + block.scopes.items, + try TrustedOrigins.defaultOrigins(authorizer.allocator), + block_id, + authorizer.public_key_to_block_id, + ); + + for (block.rules.items) |block_rule| { + const rule = try block_rule.convert(&biscuit.symbols, &authorizer.symbols); + std.debug.print("block rule {any} CONVERTED to rule = {any}\n", .{ block_rule, rule }); + + if (!rule.validateVariables()) { + try errors.append(.unbound_variable); + } + + const block_rule_trusted_origins = try TrustedOrigins.fromScopes( + authorizer.allocator, + rule.scopes.items, + block_trusted_origins, + block_id, + authorizer.public_key_to_block_id, + ); + + try authorizer.world.addRule(block_id, block_rule_trusted_origins, rule); + } } } - try authorizer.world.run(authorizer.symbols); - // TODO: clear rules + // 2. Run the world to generate all facts + std.debug.print("\nGENERATING NEW FACTS\n", .{}); + try authorizer.world.run(&authorizer.symbols); + std.debug.print("\nEND GENERATING NEW FACTS\n", .{}); + + // 3. Run checks that have been added to this authorizer + std.debug.print("\nAUTHORIZER CHECKS\n", .{}); + for (authorizer.checks.items) |c| { + std.debug.print("authorizer check = {any}\n", .{c}); + const check = try c.convert(authorizer.allocator, &authorizer.symbols); + + for (check.queries.items, 0..) |*query, check_id| { + const rule_trusted_origins = try TrustedOrigins.fromScopes( + authorizer.allocator, + query.scopes.items, + try authorizer.authorizerTrustedOrigins(), + Origin.AUTHORIZER_ID, + authorizer.public_key_to_block_id, + ); - // TODO: Run checks that have been added to this authorizer + const is_match = switch (check.kind) { + .one => try authorizer.world.queryMatch(query, &authorizer.symbols, rule_trusted_origins), + .all => try authorizer.world.queryMatchAll(query, &authorizer.symbols, rule_trusted_origins), + }; + + if (!is_match) try errors.append(.{ .failed_authorizer_check = .{ .check_id = check_id } }); + std.debug.print("match {any} = {}\n", .{ query, is_match }); + } + } + std.debug.print("END AUTHORIZER CHECKS\n", .{}); - // Run checks in the biscuit + // 4. Run checks in the biscuit's authority block if (authorizer.biscuit) |biscuit| { - for (biscuit.authority.checks.items) |check| { - std.debug.print("{any}\n", .{check}); + const authority_trusted_origins = try TrustedOrigins.fromScopes( + authorizer.allocator, + biscuit.authority.scopes.items, + try TrustedOrigins.defaultOrigins(authorizer.allocator), + 0, + authorizer.public_key_to_block_id, + ); + + for (biscuit.authority.checks.items, 0..) |c, check_id| { + const check = try c.convert(&biscuit.symbols, &authorizer.symbols); + std.debug.print("{}: {any}\n", .{ check_id, check }); for (check.queries.items) |*query| { - const is_match = try authorizer.world.queryMatch(query, authorizer.symbols); + const rule_trusted_origins = try TrustedOrigins.fromScopes( + authorizer.allocator, + query.scopes.items, + authority_trusted_origins, + 0, + authorizer.public_key_to_block_id, + ); + + const is_match = switch (check.kind) { + .one => try authorizer.world.queryMatch(query, &authorizer.symbols, rule_trusted_origins), + .all => try authorizer.world.queryMatchAll(query, &authorizer.symbols, rule_trusted_origins), + }; - if (!is_match) try errors.append(.{ .failed_check = 0 }); + if (!is_match) try errors.append(.{ .failed_block_check = .{ .block_id = 0, .check_id = check_id } }); std.debug.print("match {any} = {}\n", .{ query, is_match }); } } } - // TODO: run policies + // 5. run policies from the authorizer + const allowed_policy_id: ?usize = policy_blk: { + for (authorizer.policies.items) |policy| { + std.debug.print("authorizer policy = {any}\n", .{policy}); + + for (policy.queries.items, 0..) |*q, policy_id| { + var query = try q.convert(authorizer.allocator, &authorizer.symbols); + + const rule_trusted_origins = try TrustedOrigins.fromScopes( + authorizer.allocator, + query.scopes.items, + try authorizer.authorizerTrustedOrigins(), + Origin.AUTHORIZER_ID, + authorizer.public_key_to_block_id, + ); - // TODO: run other block checks + const is_match = try authorizer.world.queryMatch(&query, &authorizer.symbols, rule_trusted_origins); + std.debug.print("match {any} = {}\n", .{ query, is_match }); + + if (is_match) { + switch (policy.kind) { + .allow => break :policy_blk policy_id, + .deny => { + try errors.append(.{ .denied_by_policy = .{ .deny_policy_id = policy_id } }); + break :policy_blk null; + }, + } + } + } + } + + try errors.append(.{ .no_matching_policy = {} }); + break :policy_blk null; + }; + + // 6. Run checks in the biscuit's other blocks + if (authorizer.biscuit) |biscuit| { + for (biscuit.blocks.items, 1..) |block, block_id| { + const block_trusted_origins = try TrustedOrigins.fromScopes( + authorizer.allocator, + block.scopes.items, + try TrustedOrigins.defaultOrigins(authorizer.allocator), + block_id, + authorizer.public_key_to_block_id, + ); + + std.debug.print("block = {any}\n", .{block}); + + for (block.checks.items, 0..) |c, check_id| { + const check = try c.convert(&biscuit.symbols, &authorizer.symbols); + + std.debug.print("check = {any}\n", .{check}); + + for (check.queries.items) |*query| { + const rule_trusted_origins = try TrustedOrigins.fromScopes( + authorizer.allocator, + query.scopes.items, + block_trusted_origins, + block_id, + authorizer.public_key_to_block_id, + ); + + const is_match = switch (check.kind) { + .one => try authorizer.world.queryMatch(query, &authorizer.symbols, rule_trusted_origins), + .all => try authorizer.world.queryMatchAll(query, &authorizer.symbols, rule_trusted_origins), + }; + + if (!is_match) try errors.append(.{ .failed_block_check = .{ .block_id = block_id, .check_id = check_id } }); + + std.debug.print("match {any} = {}\n", .{ query, is_match }); + } + } + } + } + + if (allowed_policy_id) |policy_id| { + if (errors.items.len == 0) return policy_id; + } - // FIXME: return logic - if (errors.items.len > 0) return error.AuthorizationFailed; + return error.AuthorizationFailed; } }; const AuthorizerErrorKind = enum(u8) { - failed_check, + no_matching_policy, + denied_by_policy, + failed_authorizer_check, + failed_block_check, + unbound_variable, }; -const AuthorizerError = union(AuthorizerErrorKind) { - failed_check: u32, +pub const AuthorizerError = union(AuthorizerErrorKind) { + no_matching_policy: void, + denied_by_policy: struct { deny_policy_id: usize }, + failed_authorizer_check: struct { check_id: usize }, + failed_block_check: struct { block_id: usize, check_id: usize }, + unbound_variable: void, }; diff --git a/biscuit/src/biscuit.zig b/biscuit/src/biscuit.zig index 87d41b5..bf00222 100644 --- a/biscuit/src/biscuit.zig +++ b/biscuit/src/biscuit.zig @@ -2,33 +2,75 @@ const std = @import("std"); const mem = std.mem; const Ed25519 = std.crypto.sign.Ed25519; const Authorizer = @import("authorizer.zig").Authorizer; +const AuthorizerError = @import("authorizer.zig").AuthorizerError; const Block = @import("block.zig").Block; +const SymbolTable = @import("biscuit-datalog").SymbolTable; const World = @import("biscuit-datalog").world.World; -const SerializedBiscuit = @import("biscuit-format").serialized_biscuit.SerializedBiscuit; +const SerializedBiscuit = @import("biscuit-format").SerializedBiscuit; pub const Biscuit = struct { serialized: SerializedBiscuit, authority: Block, blocks: std.ArrayList(Block), - symbols: std.ArrayList([]const u8), + symbols: SymbolTable, + public_key_to_block_id: std.AutoHashMap(usize, std.ArrayList(usize)), - pub fn initFromBytes(allocator: mem.Allocator, bytes: []const u8, public_key: Ed25519.PublicKey) !Biscuit { - std.debug.print("\nInitialising biscuit:\n", .{}); - var serialized = try SerializedBiscuit.initFromBytes(allocator, bytes, public_key); + pub fn fromBytes(allocator: mem.Allocator, token_bytes: []const u8, root_public_key: Ed25519.PublicKey) !Biscuit { + var serialized = try SerializedBiscuit.fromBytes(allocator, token_bytes, root_public_key); errdefer serialized.deinit(); - const authority = try Block.initFromBytes(allocator, serialized.authority.block); + // For each block we will temporarily store the external public key (where it exists). + var block_external_keys = try std.ArrayList(?Ed25519.PublicKey).initCapacity(allocator, 1 + serialized.blocks.items.len); + defer block_external_keys.deinit(); + defer std.debug.assert(block_external_keys.items.len == 1 + serialized.blocks.items.len); + + var token_symbols = SymbolTable.init("biscuit", allocator); + + const authority = try Block.fromBytes(allocator, serialized.authority, &token_symbols); + try block_external_keys.append(null); + std.debug.print("authority block =\n{any}\n", .{authority}); var blocks = std.ArrayList(Block).init(allocator); - for (serialized.blocks.items) |b| { - try blocks.append(try Block.initFromBytes(allocator, b.block)); + for (serialized.blocks.items) |signed_block| { + const block = try Block.fromBytes(allocator, signed_block, &token_symbols); + std.debug.print("non-authority block =\n{any}\n", .{block}); + + const external_key = if (signed_block.external_signature) |external_signature| external_signature.public_key else null; + try block_external_keys.append(external_key); + + try blocks.append(block); + } + + // Build map from public key (rather the symbol index associated with the public key) to block id. + // Multiple blocks may be signed by the same external key and so the mapping is from the public + // key to a list of block ids. + var public_key_to_block_id = std.AutoHashMap(usize, std.ArrayList(usize)).init(allocator); + for (block_external_keys.items, 0..) |block_external_key, block_id| { + const key = block_external_key orelse continue; + + const key_index = try token_symbols.insertPublicKey(key); + if (public_key_to_block_id.getPtr(key_index)) |list_ptr| { + try list_ptr.append(block_id); + } else { + var list = std.ArrayList(usize).init(allocator); + try list.append(block_id); + try public_key_to_block_id.put(key_index, list); + } + } + + { + var it = public_key_to_block_id.iterator(); + while (it.next()) |entry| { + std.debug.print("public_key_to_block_id: public key id = {}, block_ids = {any}\n", .{ entry.key_ptr.*, entry.value_ptr.items }); + } } return .{ .serialized = serialized, .authority = authority, .blocks = blocks, - .symbols = std.ArrayList([]const u8).init(allocator), + .symbols = token_symbols, + .public_key_to_block_id = public_key_to_block_id, }; } @@ -38,11 +80,19 @@ pub const Biscuit = struct { } biscuit.blocks.deinit(); biscuit.authority.deinit(); + + // FIXME: think about lifetimes for public_key_to_block_id + var it = biscuit.public_key_to_block_id.valueIterator(); + while (it.next()) |block_ids| { + block_ids.deinit(); + } + biscuit.public_key_to_block_id.deinit(); + biscuit.serialized.deinit(); } - pub fn authorizer(biscuit: *Biscuit, allocator: std.mem.Allocator) Authorizer { - return Authorizer.init(allocator, biscuit.*); + pub fn authorizer(biscuit: *Biscuit, allocator: std.mem.Allocator) !Authorizer { + return try Authorizer.init(allocator, biscuit.*); } }; @@ -72,13 +122,16 @@ test { const bytes = try decode.urlSafeBase64ToBytes(allocator, token); defer allocator.free(bytes); - var b = try Biscuit.initFromBytes(allocator, bytes, public_key); + var b = try Biscuit.fromBytes(allocator, bytes, public_key); defer b.deinit(); - var a = b.authorizer(allocator); + var a = try b.authorizer(allocator); defer a.deinit(); - try a.authorize(); + var errors = std.ArrayList(AuthorizerError).init(allocator); + defer errors.deinit(); + + _ = try a.authorize(&errors); } } @@ -103,12 +156,15 @@ test "Tokens that should fail to validate" { const bytes = try decode.urlSafeBase64ToBytes(allocator, token); defer allocator.free(bytes); - var b = try Biscuit.initFromBytes(allocator, bytes, public_key); + var b = try Biscuit.fromBytes(allocator, bytes, public_key); defer b.deinit(); - var a = b.authorizer(allocator); + var a = try b.authorizer(allocator); defer a.deinit(); - try testing.expectError(error.AuthorizationFailed, a.authorize()); + var errors = std.ArrayList(AuthorizerError).init(allocator); + defer errors.deinit(); + + try testing.expectError(error.AuthorizationFailed, a.authorize(&errors)); } } diff --git a/biscuit/src/block.zig b/biscuit/src/block.zig index e7f33be..304dbc3 100644 --- a/biscuit/src/block.zig +++ b/biscuit/src/block.zig @@ -1,37 +1,59 @@ const std = @import("std"); -const format = @import("biscuit-format"); +const Ed25519 = std.crypto.sign.Ed25519; +const SignedBlock = @import("biscuit-format").SignedBlock; +const MIN_SCHEMA_VERSION = @import("biscuit-format").MIN_SCHEMA_VERSION; +const MAX_SCHEMA_VERSION = @import("biscuit-format").MAX_SCHEMA_VERSION; const schema = @import("biscuit-schema"); const Fact = @import("biscuit-datalog").fact.Fact; const Rule = @import("biscuit-datalog").rule.Rule; const Check = @import("biscuit-datalog").check.Check; +const Scope = @import("biscuit-datalog").Scope; const SymbolTable = @import("biscuit-datalog").symbol_table.SymbolTable; -const MIN_SCHEMA_VERSION = format.serialized_biscuit.MIN_SCHEMA_VERSION; -const MAX_SCHEMA_VERSION = format.serialized_biscuit.MAX_SCHEMA_VERSION; pub const Block = struct { version: u32, context: []const u8, - symbols: SymbolTable, + symbols: SymbolTable, // Do we need symbol table here? When we deserialize a biscuit we build up a complete symbol table for the entire biscuit. So what is the purpose? facts: std.ArrayList(Fact), rules: std.ArrayList(Rule), checks: std.ArrayList(Check), + scopes: std.ArrayList(Scope), + public_keys: std.ArrayList(Ed25519.PublicKey), - pub fn init(allocator: std.mem.Allocator) !Block { + pub fn init(allocator: std.mem.Allocator) Block { return .{ .version = 0, .context = "", - .symbols = SymbolTable.init(allocator), + .symbols = SymbolTable.init("block", allocator), .facts = std.ArrayList(Fact).init(allocator), .rules = std.ArrayList(Rule).init(allocator), .checks = std.ArrayList(Check).init(allocator), + .scopes = std.ArrayList(Scope).init(allocator), + .public_keys = std.ArrayList(Ed25519.PublicKey).init(allocator), }; } - pub fn initFromBytes(allocator: std.mem.Allocator, data: []const u8) !Block { + pub fn deinit(block: *Block) void { + for (block.checks.items) |*check| check.deinit(); + for (block.rules.items) |*rule| rule.deinit(); + for (block.facts.items) |*fact| fact.deinit(); + + block.checks.deinit(); + block.rules.deinit(); + block.facts.deinit(); + block.scopes.deinit(); + block.public_keys.deinit(); + block.symbols.deinit(); + } + + /// Given a blocks contents as bytes, derserialize into runtime block + pub fn fromBytes(allocator: std.mem.Allocator, signed_block: SignedBlock, token_symbols: *SymbolTable) !Block { + const data = signed_block.block; + std.debug.print("Block.fromBytes\n", .{}); const decoded_block = try schema.decodeBlock(allocator, data); defer decoded_block.deinit(); - var block = try init(allocator); + var block = Block.init(allocator); errdefer block.deinit(); const version = decoded_block.version orelse return error.ExpectedVersion; @@ -44,6 +66,16 @@ pub const Block = struct { _ = try block.symbols.insert(symbol.getSlice()); } + // If we have an external signature we add the external public key that to the parent biscuit's symbols and we don't add the blocks symbols + // Otherwise add the blocks symbols to the biscuit's symbol table. + if (signed_block.external_signature) |external_signature| { + _ = try token_symbols.insertPublicKey(external_signature.public_key); + } else { + for (decoded_block.symbols.items) |symbol| { + _ = try token_symbols.insert(symbol.getSlice()); + } + } + for (decoded_block.facts_v2.items) |fact| { try block.facts.append(try Fact.fromSchema(allocator, fact)); } @@ -56,17 +88,49 @@ pub const Block = struct { try block.checks.append(try Check.fromSchema(allocator, check)); } + for (decoded_block.publicKeys.items) |public_key| { + var pubkey_buf: [Ed25519.PublicKey.encoded_length]u8 = undefined; + @memcpy(&pubkey_buf, public_key.key.getSlice()); + + const key = try Ed25519.PublicKey.fromBytes(pubkey_buf); + + _ = try token_symbols.insertPublicKey(key); + try block.public_keys.append(key); + } + return block; } - pub fn deinit(block: *Block) void { - for (block.checks.items) |*check| check.deinit(); - for (block.rules.items) |*rule| rule.deinit(); - for (block.facts.items) |*fact| fact.deinit(); + pub fn format(block: Block, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + try writer.print("block:\n", .{}); + try writer.print(" version: {}\n", .{block.version}); + try writer.print(" context: {s}\n", .{block.context}); - block.checks.deinit(); - block.rules.deinit(); - block.facts.deinit(); - block.symbols.deinit(); + try writer.print(" symbols:\n", .{}); + for (block.symbols.symbols.items, 0..) |symbol, i| { + try writer.print(" [{}]: {s}\n", .{ i, symbol }); + } + + try writer.print(" facts:\n", .{}); + for (block.facts.items, 0..) |fact, i| { + try writer.print(" [{}]: {any}\n", .{ i, fact }); + } + + try writer.print(" rules:\n", .{}); + for (block.rules.items, 0..) |rule, i| { + try writer.print(" [{}]: {any}\n", .{ i, rule }); + } + + try writer.print(" checks:\n", .{}); + for (block.checks.items, 0..) |check, i| { + try writer.print(" [{}]: {any}\n", .{ i, check }); + } + + try writer.print(" public keys:\n", .{}); + for (block.public_keys.items, 0..) |public_key, i| { + try writer.print(" [{}]: {x}\n", .{ i, public_key.bytes }); + } + + return; } }; diff --git a/biscuit/src/main.zig b/biscuit/src/main.zig index d20feb9..ce0ad3a 100644 --- a/biscuit/src/main.zig +++ b/biscuit/src/main.zig @@ -1,4 +1,5 @@ pub const Biscuit = @import("biscuit.zig").Biscuit; +pub const AuthorizerError = @import("authorizer.zig").AuthorizerError; test { _ = @import("biscuit.zig");