From 12d3a25aba311e47632e474417d4d39257ce0156 Mon Sep 17 00:00:00 2001 From: Fantix King Date: Tue, 10 Dec 2024 17:36:46 -0500 Subject: [PATCH 1/6] Add structured config compilation Structured config values are Python objects from environment variables or (future) TOML config files. The compilation re- assembles the objects into ConfigOp ASTs and use the static evaluation mechanism to generate verified config values. As a side-effect, such config sources now support specifying EdgeQL values directly in a double-braces, for example: env GEL_SERVER_CONFIG_cfg::session_idle_timeout \ = "{{'5s' * 2}}" Technically, this allows setting nested config objects with INSERT statements too. --- edb/ir/ast.py | 59 +++++++ edb/ir/staeval.py | 135 +++++++++++++- edb/server/compiler/compiler.py | 21 +++ edb/server/compiler/config.py | 292 +++++++++++++++++++++++++++++++ edb/server/compiler_pool/pool.py | 4 + edb/server/config/ops.py | 37 ++-- edb/server/config/types.py | 10 +- tests/test_server_compiler.py | 147 +++++++++++++++- 8 files changed, 686 insertions(+), 19 deletions(-) create mode 100644 edb/server/compiler/config.py diff --git a/edb/ir/ast.py b/edb/ir/ast.py index 3602b592047..fc5b5eeb70e 100644 --- a/edb/ir/ast.py +++ b/edb/ir/ast.py @@ -77,6 +77,7 @@ from edb.schema import modules as s_mod from edb.schema import name as sn from edb.schema import objects as so +from edb.schema import objtypes as s_objtypes from edb.schema import pointers as s_pointers from edb.schema import schema as s_schema from edb.schema import types as s_types @@ -1400,3 +1401,61 @@ class FTSDocument(ImmutableExpr): weight: typing.Optional[str] typeref: TypeRef + + +# StaticIntrospection is only used in static evaluation (staeval.py), +# but unfortunately the IR AST node can only be defined here. +class StaticIntrospection(Tuple): + + ir: TypeIntrospection + schema: s_schema.Schema + + @property + def meta_type(self) -> s_objtypes.ObjectType: + return self.schema.get_by_id( + self.ir.typeref.id, type=s_objtypes.ObjectType + ) + + @property + def output_type(self) -> s_types.Type: + return self.schema.get_by_id( + self.ir.output_typeref.id, type=s_types.Type + ) + + @property + def elements(self) -> typing.List[TupleElement]: + from . import staeval + + rv = [] + schema = self.schema + output_type = self.output_type + for ptr in self.meta_type.get_pointers(schema).objects(schema): + field_sn = ptr.get_shortname(schema) + field_name = field_sn.name + field_type = ptr.get_target(schema) + assert field_type is not None + try: + field_value = output_type.get_field_value(schema, field_name) + except LookupError: + continue + try: + val = staeval.coerce_py_const(field_type.id, field_value) + except staeval.UnsupportedExpressionError: + continue + ref = TypeRef(id=field_type.id, name_hint=field_sn) + vset = Set(expr=val, typeref=ref, path_id=PathId.from_typeref(ref)) + rv.append(TupleElement(name=field_name, val=vset)) + return rv + + @elements.setter + def elements(self, elements: typing.List[TupleElement]) -> None: + pass + + def get_field_value(self, name: sn.QualName) -> ConstExpr | TypeCast: + from . import staeval + + ptr = self.meta_type.getptr(self.schema, name.get_local_name()) + rv_type = ptr.get_target(self.schema) + assert rv_type is not None + rv_value = self.output_type.get_field_value(self.schema, name.name) + return staeval.coerce_py_const(rv_type.id, rv_value) diff --git a/edb/ir/staeval.py b/edb/ir/staeval.py index d3a9000e3d7..7301cc7e41d 100644 --- a/edb/ir/staeval.py +++ b/edb/ir/staeval.py @@ -34,6 +34,7 @@ import decimal import functools +import uuid import immutables @@ -43,6 +44,7 @@ from edb.common import typeutils from edb.common import parsing from edb.common import uuidgen +from edb.common import value_dispatch from edb.edgeql import ast as qlast from edb.edgeql import compiler as qlcompiler from edb.edgeql import qltypes @@ -52,6 +54,7 @@ from edb.ir import statypes as statypes from edb.ir import utils as irutils +from edb.schema import name as sn from edb.schema import objects as s_obj from edb.schema import objtypes as s_objtypes from edb.schema import types as s_types @@ -100,6 +103,40 @@ def evaluate_SelectStmt( 'expression is not constant', span=ir_stmt.span) +@evaluate.register(irast.InsertStmt) +def evaluate_InsertStmt( + ir: irast.InsertStmt, schema: s_schema.Schema +) -> EvaluationResult: + # XXX: raise for unsupported InsertStmt? + + return irast.Tuple( + named=True, + typeref=ir.subject.typeref, + elements=[ + irast.TupleElement( + name=ptr_set.expr.ptrref.shortname.name, + val=irast.Set( + expr=evaluate(ptr_set.expr.expr, schema), + typeref=ptr_set.typeref, + path_id=ptr_set.path_id, + ), + ) + for ptr_set, _ in ir.subject.shape + if ptr_set.expr.ptrref.shortname.name != "id" + and ptr_set.expr.expr is not None + ], + ) + + +@evaluate.register(irast.TypeIntrospection) +def evaluate_TypeIntrospection( + ir: irast.TypeIntrospection, schema: s_schema.Schema +) -> EvaluationResult: + return irast.StaticIntrospection( + named=True, ir=ir, schema=schema, elements=[], typeref=ir.typeref + ) + + @evaluate.register(irast.TypeCast) def evaluate_TypeCast( ir_cast: irast.TypeCast, schema: s_schema.Schema @@ -108,7 +145,7 @@ def evaluate_TypeCast( schema, from_type = irtyputils.ir_typeref_to_type( schema, ir_cast.from_type) schema, to_type = irtyputils.ir_typeref_to_type( - schema, ir_cast.from_type) + schema, ir_cast.to_type) if ( not isinstance(from_type, s_scalars.ScalarType) @@ -141,11 +178,39 @@ def evaluate_Pointer( ) -> EvaluationResult: if ptr.expr is not None: return evaluate(ptr.expr, schema=schema) + + elif ( + ptr.direction == s_pointers.PointerDirection.Outbound + and isinstance(ptr.ptrref, irast.PointerRef) + and ptr.ptrref.out_cardinality.is_single() + and ptr.ptrref.out_target.is_scalar + ): + return evaluate_pointer_ref( + evaluate(ptr.source.expr, schema=schema), ptr.ptrref + ) + else: raise UnsupportedExpressionError( 'expression is not constant', span=ptr.span) +@functools.singledispatch +def evaluate_pointer_ref( + evaluated_source: EvaluationResult, ptrref: irast.PointerRef +) -> EvaluationResult: + raise UnsupportedExpressionError( + f'unsupported PointerRef on source {evaluated_source}', + span=ptrref.span, + ) + + +@evaluate_pointer_ref.register(irast.StaticIntrospection) +def evaluate_pointer_ref_StaticIntrospection( + source: irast.StaticIntrospection, ptrref: irast.PointerRef +) -> EvaluationResult: + return source.get_field_value(ptrref.shortname) + + @evaluate.register(irast.ConstExpr) def evaluate_BaseConstant( ir_const: irast.ConstExpr, schema: s_schema.Schema @@ -645,3 +710,71 @@ def evaluate_config_reset( setting_name=ir.name, value=None, ) + + +@evaluate_to_config_op.register(irast.ConfigInsert) +def evaluate_config_insert( + ir: irast.ConfigInsert, schema: s_schema.Schema +) -> config.Operation: + return config.Operation( + opcode=config.OpCode.CONFIG_ADD, + scope=ir.scope, + setting_name=ir.name, + value=evaluate_to_python_val( + irast.InsertStmt(subject=ir.expr), schema=schema + ), + ) + + +@value_dispatch.value_dispatch +def coerce_py_const( + type_id: uuid.UUID, val: Any +) -> irast.ConstExpr | irast.TypeCast: + raise UnsupportedExpressionError(f"unimplemented coerce type: {type_id}") + + +@coerce_py_const.register(s_obj.get_known_type_id("std::str")) +def evaluate_std_str( + type_id: uuid.UUID, val: Any +) -> irast.ConstExpr | irast.TypeCast: + return irast.StringConstant( + typeref=irast.TypeRef( + id=type_id, name_hint=sn.name_from_string("std::str") + ), + value=str(val), + ) + + +@coerce_py_const.register(s_obj.get_known_type_id("std::bool")) +def evaluate_std_bool( + type_id: uuid.UUID, val: Any +) -> irast.ConstExpr | irast.TypeCast: + return irast.BooleanConstant( + typeref=irast.TypeRef( + id=type_id, name_hint=sn.name_from_string("std::bool") + ), + value=str(bool(val)).lower(), + ) + + +@coerce_py_const.register(s_obj.get_known_type_id("std::uuid")) +def evaluate_std_uuid( + type_id: uuid.UUID, val: Any +) -> irast.ConstExpr | irast.TypeCast: + str_type_id = s_obj.get_known_type_id("std::str") + str_typeref = irast.TypeRef( + id=str_type_id, name_hint=sn.name_from_string("std::str") + ) + return irast.TypeCast( + from_type=str_typeref, + to_type=irast.TypeRef( + id=type_id, name_hint=sn.name_from_string("std::uuid") + ), + expr=irast.Set( + expr=irast.StringConstant(typeref=str_typeref, value=str(val)), + typeref=str_typeref, + path_id=irast.PathId.from_typeref(str_typeref), + ), + sql_cast=True, + sql_expr=False, + ) diff --git a/edb/server/compiler/compiler.py b/edb/server/compiler/compiler.py index 0d06f73bc0b..27c73d71058 100644 --- a/edb/server/compiler/compiler.py +++ b/edb/server/compiler/compiler.py @@ -96,6 +96,7 @@ from edb.pgsql import types as pg_types from edb.pgsql import delta as pg_delta +from . import config as config_compiler from . import dbstate from . import enums from . import explain @@ -1290,6 +1291,26 @@ def validate_schema_equivalence( pickle.loads(global_schema), ) + def compile_structured_config( + self, + objects: Mapping[str, config_compiler.ConfigObject], + source: str | None = None, + ) -> dict[str, immutables.Map[str, config.SettingValue]]: + # XXX: only config in the stdlib is supported currently, so the only + # key allowed in objects is "cfg::Config". API for future compatibility + if list(objects) != ["cfg::Config"]: + difference = set(objects) - {"cfg::Config"} + raise NotImplementedError( + f"unsupported config: {', '.join(difference)}" + ) + + return config_compiler.compile_structured_config( + objects, + spec=self.state.config_spec, + schema=self.state.std_schema, + source=source, + ) + def compile_schema_storage_in_delta( ctx: CompileContext, diff --git a/edb/server/compiler/config.py b/edb/server/compiler/config.py new file mode 100644 index 00000000000..048e4376c0b --- /dev/null +++ b/edb/server/compiler/config.py @@ -0,0 +1,292 @@ +# +# This source file is part of the EdgeDB open source project. +# +# Copyright 2024-present MagicStack Inc. and the EdgeDB authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from __future__ import annotations +from typing import Iterable, Mapping, Sequence, Type + +import dataclasses +import datetime +import functools + +import immutables + +from edb import errors +from edb.common import typeutils +from edb.edgeql import ast as qlast +from edb.edgeql import parser as qlparser +from edb.edgeql import qltypes +from edb.edgeql import compiler as qlcompiler +from edb.ir import ast as irast +from edb.ir import staeval as ireval +from edb.server import config +from edb.schema import name as sn +from edb.schema import objtypes as s_objtypes +from edb.schema import pointers as s_pointers +from edb.schema import schema as s_schema +from edb.schema import types as s_types +from edb.schema import utils as s_utils + +ConfigInput = ( + str + | int + | float + | bool + | datetime.datetime + | datetime.date + | datetime.time + | Sequence["ConfigInput"] + | Mapping[str, "ConfigInput"] + | None +) +ConfigObject = Mapping[str, ConfigInput] + + +@dataclasses.dataclass +class Context: + schema: s_schema.Schema + obj_type: s_objtypes.ObjectType + qual_name: str + options: qlcompiler.CompilerOptions + + def get_ptr(self, name: str) -> s_pointers.Pointer: + un = sn.UnqualName(name) + schema = self.schema + ty = self.obj_type + ancestors = ty.get_ancestors(schema).objects(schema) + for t in (ty,) + ancestors: + if (rv := t.maybe_get_ptr(schema, un)) is not None: + return rv + raise errors.ConfigurationError( + f"{ty.get_shortname(schema)!s} does not have field: {name!r}" + ) + + def get_full_name(self, ptr: s_pointers.Pointer) -> str: + return f"{self.qual_name}::{ptr.get_local_name(self.schema)}" + + def is_multi(self, ptr: s_pointers.Pointer) -> bool: + return ptr.get_cardinality(self.schema).is_multi() + + def get_type( + self, ptr: s_pointers.Pointer, *, type: Type[s_types.TypeT] + ) -> s_types.TypeT: + rv = ptr.get_target(self.schema) + if not isinstance(rv, type): + raise TypeError(f"{ptr!r}.target is not {type:r}") + return rv + + def get_ref(self, ptr: s_pointers.Pointer) -> qlast.ObjectRef: + ty = self.get_type(ptr, type=s_types.QualifiedType) + ty_name = ty.get_shortname(self.schema) + return qlast.ObjectRef(name=ty_name.name, module=ty_name.module) + + def cast( + self, expr: qlast.Expr, *, ptr: s_pointers.Pointer + ) -> qlast.TypeCast: + return qlast.TypeCast( + expr=expr, + type=qlast.TypeName(maintype=self.get_ref(ptr)), + ) + + +@functools.singledispatch +def compile_input_to_ast( + value: ConfigInput, *, ptr: s_pointers.Pointer, ctx: Context +) -> qlast.Expr: + raise errors.ConfigurationError( + f"unsupported input type {type(value)!r} for {ctx.get_full_name(ptr)}" + ) + + +@compile_input_to_ast.register +def compile_input_str( + value: str, *, ptr: s_pointers.Pointer, ctx: Context +) -> qlast.Expr: + if value.startswith("{{") and value.endswith("}}"): + return qlparser.parse_fragment(value[2:-2]) + ty = ctx.get_type(ptr, type=s_types.QualifiedType) + if ty.is_enum(ctx.schema): + ty_name = ty.get_shortname(ctx.schema) + return qlast.Path( + steps=[ + qlast.ObjectRef(name=ty_name.name, module=ty_name.module), + qlast.Ptr(name=value), + ] + ) + else: + return ctx.cast(qlast.Constant.string(value), ptr=ptr) + + +@compile_input_to_ast.register +def compile_input_scalar( + value: int | float | bool, *, ptr: s_pointers.Pointer, ctx: Context +) -> qlast.Expr: + return ctx.cast(s_utils.const_ast_from_python(value), ptr=ptr) + + +@compile_input_to_ast.register(dict) +@compile_input_to_ast.register(immutables.Map) +def compile_input_mapping( + value: Mapping[str, ConfigInput], + *, + ptr: s_pointers.Pointer, + ctx: Context, +) -> qlast.Expr: + if "_tname" in value: + tname = value["_tname"] + if not isinstance(tname, str): + raise errors.ConfigurationError( + f"type of `_tname` must be str, got: {type(tname)!r}" + ) + obj_type = ctx.schema.get(tname, type=s_objtypes.ObjectType) + else: + try: + obj_type = ctx.get_type(ptr, type=s_objtypes.ObjectType) + except TypeError: + raise errors.ConfigurationError( + f"unsupported input type {type(value)!r} " + f"for {ctx.get_full_name(ptr)}" + ) + obj_name = obj_type.get_shortname(ctx.schema) + new_ctx = Context( + schema=ctx.schema, + obj_type=obj_type, + qual_name=ctx.get_full_name(ptr), + options=ctx.options, + ) + return qlast.InsertQuery( + subject=qlast.ObjectRef(name=obj_name.name, module=obj_name.module), + shape=list(compile_dict_to_shape(value, ctx=new_ctx).values()), + ) + + +def compile_dict_to_shape( + values: Mapping[str, ConfigInput], *, ctx: Context +) -> dict[str, qlast.ShapeElement]: + rv = {} + for name, value in values.items(): + if name == "_tname": + continue + ptr = ctx.get_ptr(name) + expr: qlast.Expr + if ctx.is_multi(ptr) and not isinstance(value, str): + if not typeutils.is_container(value) or isinstance(value, Mapping): + raise errors.ConfigurationError( + f"{ctx.get_full_name(ptr)} must be a sequence, " + f"got type: {type(value)!r}" + ) + assert isinstance(value, Iterable) + expr = qlast.Set( + elements=[ + compile_input_to_ast(v, ptr=ptr, ctx=ctx) for v in value + ] + ) + else: + expr = compile_input_to_ast(value, ptr=ptr, ctx=ctx) + rv[name] = qlast.ShapeElement( + expr=qlast.Path(steps=[qlast.Ptr(name=name)]), compexpr=expr + ) + return rv + + +def compile_ast_to_operation( + obj_name: str, + field_name: str, + expr: qlast.Expr, + *, + schema: s_schema.Schema, + options: qlcompiler.CompilerOptions, +) -> config.Operation: + cmd: qlast.ConfigOp + if isinstance(expr, qlast.InsertQuery): + cmd = qlast.ConfigInsert( + name=expr.subject, + scope=qltypes.ConfigScope.INSTANCE, + shape=expr.shape, + ) + else: + field_name_ref = qlast.ObjectRef(name=field_name) + if obj_name != "cfg::Config": + field_name_ref.module = obj_name + cmd = qlast.ConfigSet( + name=field_name_ref, + scope=qltypes.ConfigScope.INSTANCE, + expr=expr, + ) + ir = qlcompiler.compile_ast_to_ir(cmd, schema=schema, options=options) + if ( + isinstance(ir, irast.ConfigSet) + or isinstance(ir, irast.Statement) + and isinstance((ir := ir.expr.expr), irast.ConfigInsert) + ): + return ireval.evaluate_to_config_op(ir, schema=schema) + + raise errors.InternalServerError(f"unrecognized IR: {type(ir)!r}") + + +def compile_structured_config( + objects: Mapping[str, ConfigObject], + *, + spec: config.Spec, + schema: s_schema.Schema, + source: str | None = None, +) -> dict[str, immutables.Map[str, config.SettingValue]]: + options = qlcompiler.CompilerOptions( + modaliases={None: "cfg"}, + in_server_config_op=True, + ) + rv = {} + for obj_name, input_values in objects.items(): + storage: immutables.Map[str, config.SettingValue] = immutables.Map() + ctx = Context( + schema=schema, + obj_type=schema.get(obj_name, type=s_objtypes.ObjectType), + qual_name=obj_name, + options=options, + ) + shape = compile_dict_to_shape(input_values, ctx=ctx) + for field_name, shape_el in shape.items(): + if isinstance(shape_el.compexpr, qlast.Set): + elements = shape_el.compexpr.elements + if not elements: + continue + + if isinstance(elements[0], qlast.InsertQuery): + for ast in shape_el.compexpr.elements: + op = compile_ast_to_operation( + obj_name, + field_name, + ast, + schema=schema, + options=options, + ) + storage = op.apply(spec, storage, source=source) + continue + + assert shape_el.compexpr is not None + op = compile_ast_to_operation( + obj_name, + field_name, + shape_el.compexpr, + schema=schema, + options=options, + ) + storage = op.apply(spec, storage, source=source) + + rv[obj_name] = storage + + return rv diff --git a/edb/server/compiler_pool/pool.py b/edb/server/compiler_pool/pool.py index 97820b3fe1e..4d3d69ca91c 100644 --- a/edb/server/compiler_pool/pool.py +++ b/edb/server/compiler_pool/pool.py @@ -607,6 +607,10 @@ async def validate_schema_equivalence(self, *args, **kwargs): return await self._simple_call( 'validate_schema_equivalence', *args, **kwargs) + async def compile_structured_config(self, *args, **kwargs): + return await self._simple_call( + 'compile_structured_config', *args, **kwargs) + def get_debug_info(self): return {} diff --git a/edb/server/config/ops.py b/edb/server/config/ops.py index c7282099896..fb66e90ea31 100644 --- a/edb/server/config/ops.py +++ b/edb/server/config/ops.py @@ -229,7 +229,13 @@ def coerce_global_value( # the value has explicitly been set to {}. return b[4:] if b[:4] != b'\xff\xff\xff\xff' else None - def apply(self, spec: spec.Spec, storage: SettingsMap) -> SettingsMap: + def apply( + self, + spec: spec.Spec, + storage: SettingsMap, + *, + source: str | None = None, + ) -> SettingsMap: allow_missing = ( self.opcode is OpCode.CONFIG_REM @@ -245,7 +251,7 @@ def apply(self, spec: spec.Spec, storage: SettingsMap) -> SettingsMap: value = self.coerce_global_value(allow_missing=allow_missing) if self.opcode is OpCode.CONFIG_SET: - storage = self._set_value(storage, value) + storage = self._set_value(storage, value, source=source) elif self.opcode is OpCode.CONFIG_RESET: try: @@ -269,7 +275,7 @@ def apply(self, spec: spec.Spec, storage: SettingsMap) -> SettingsMap: new_value = _check_object_set_uniqueness( setting, list(exist_value) + [value]) - storage = self._set_value(storage, new_value) + storage = self._set_value(storage, new_value, source=source) elif self.opcode is OpCode.CONFIG_REM: assert setting @@ -285,7 +291,7 @@ def apply(self, spec: spec.Spec, storage: SettingsMap) -> SettingsMap: else: exist_value = setting.default new_value = exist_value - {value} - storage = self._set_value(storage, new_value) + storage = self._set_value(storage, new_value, source=source) return storage @@ -293,18 +299,21 @@ def _set_value( self, storage: SettingsMap, value: Any, + *, + source: str | None = None, ) -> SettingsMap: - if self.scope is qltypes.ConfigScope.INSTANCE: - source = 'system override' - elif self.scope is qltypes.ConfigScope.DATABASE: - source = 'database' - elif self.scope is qltypes.ConfigScope.SESSION: - source = 'session' - elif self.scope is qltypes.ConfigScope.GLOBAL: - source = 'global' - else: - raise AssertionError(f'unexpected config scope: {self.scope}') + if source is None: + if self.scope is qltypes.ConfigScope.INSTANCE: + source = 'system override' + elif self.scope is qltypes.ConfigScope.DATABASE: + source = 'database' + elif self.scope is qltypes.ConfigScope.SESSION: + source = 'session' + elif self.scope is qltypes.ConfigScope.GLOBAL: + source = 'global' + else: + raise AssertionError(f'unexpected config scope: {self.scope}') return set_value( storage, diff --git a/edb/server/config/types.py b/edb/server/config/types.py index f895a2db1b3..975cbbd8dbb 100644 --- a/edb/server/config/types.py +++ b/edb/server/config/types.py @@ -193,9 +193,15 @@ def from_pyvalue( value = cls.from_pyvalue(value, tspec=actual_f_type, spec=spec) - elif _issubclass(f_type, statypes.Duration): + elif ( + _issubclass(f_type, statypes.Duration) + and isinstance(value, str) + ): value = statypes.Duration.from_iso8601(value) - elif _issubclass(f_type, statypes.ConfigMemory): + elif ( + _issubclass(f_type, statypes.ConfigMemory) + and isinstance(value, str | int) + ): value = statypes.ConfigMemory(value) elif not isinstance(f_type, type) or not isinstance(value, f_type): diff --git a/tests/test_server_compiler.py b/tests/test_server_compiler.py index b6d3135e21a..51673de1e89 100644 --- a/tests/test_server_compiler.py +++ b/tests/test_server_compiler.py @@ -16,6 +16,8 @@ # limitations under the License. # +from typing import Any + import asyncio import contextlib import os @@ -31,6 +33,8 @@ import immutables from edb import edgeql +from edb import errors +from edb.ir import statypes from edb.testbase import lang as tb from edb.testbase import server as tbs from edb.pgsql import params as pg_params @@ -60,10 +64,13 @@ def setUpClass(cls): super().setUpClass() cls._std_schema = tb._load_std_schema() + def setUp(self): + super().setUp() + self.compiler = tb.new_compiler() + def test_server_compiler_compile_edgeql_script(self): - compiler = tb.new_compiler() context = edbcompiler.new_compiler_context( - compiler_state=compiler.state, + compiler_state=self.compiler.state, user_schema=self.schema, modaliases={None: 'default'}, ) @@ -77,6 +84,142 @@ def test_server_compiler_compile_edgeql_script(self): ''', ) + def _test_compile_structured_config( + self, + values: dict[str, Any], + *, + source: str = "config file", + **expected: Any, + ) -> dict[str, config.SettingValue]: + result = self.compiler.compile_structured_config( + {"cfg::Config": values}, source=source + ) + rv = dict(result["cfg::Config"]) + for name, setting in rv.items(): + self.assertEqual(setting.name, name) + self.assertEqual(setting.scope, config.ConfigScope.INSTANCE) + self.assertEqual(setting.source, source) + self.assertDictEqual({k: v.value for k, v in rv.items()}, expected) + return rv + + def composite_obj(self, _type_name, **values): + return config.CompositeConfigType( + self.compiler.state.config_spec.get_type_by_name(_type_name), + **values, + ) + + def test_server_compiler_compile_structured_config_01(self): + self._test_compile_structured_config( + { + "singleprop": "value", + "memprop": 512, + "durprop": "16 seconds", + "enumprop": "One", + "multiprop": ["v1", "v2", "v3"], + "listen_port": 5, + "sysobj": [ + { + "name": "1", + "obj": { + "_tname": "cfg::Subclass1", + "name": "aa", + "sub1": "bb", + }, + }, + { + "name": "2", + "_tname": "cfg::TestInstanceConfigStatTypes", + "memprop": 128, + }, + ], + }, + singleprop="value", + memprop=statypes.ConfigMemory(512), + durprop=statypes.Duration.from_microseconds(16 * 1_000_000), + enumprop="One", + multiprop=frozenset(["v1", "v2", "v3"]), + listen_port=5, + sysobj=frozenset([ + self.composite_obj( + "cfg::TestInstanceConfig", + name="1", + obj=self.composite_obj( + "cfg::Subclass1", name="aa", sub1="bb", + ), + ), + self.composite_obj( + "cfg::TestInstanceConfigStatTypes", + name="2", + memprop=statypes.ConfigMemory(128), + ), + ]) + ) + + def test_server_compiler_compile_structured_config_02(self): + self._test_compile_structured_config( + {"singleprop": 42}, singleprop="42" + ) + + def test_server_compiler_compile_structured_config_03(self): + self._test_compile_structured_config( + {"singleprop": "{{'4' ++ 2}}"}, singleprop="42" + ) + + def test_server_compiler_compile_structured_config_04(self): + with self.assertRaisesRegex( + errors.ConfigurationError, "unsupported input type" + ): + self._test_compile_structured_config({"singleprop": ["1", "2"]}) + + def test_server_compiler_compile_structured_config_05(self): + with self.assertRaisesRegex( + errors.ConfigurationError, "unsupported input type" + ): + self._test_compile_structured_config({"singleprop": {"a": "x"}}) + + def test_server_compiler_compile_structured_config_06(self): + self._test_compile_structured_config( + {"listen_port": "8080"}, listen_port=8080 + ) + + def test_server_compiler_compile_structured_config_07(self): + self._test_compile_structured_config( + {"multiprop": "single"}, multiprop=frozenset(["single"]) + ) + + def test_server_compiler_compile_structured_config_08(self): + with self.assertRaisesRegex( + errors.ConfigurationError, "must be a sequence" + ): + self._test_compile_structured_config({"multiprop": {"a": 1}}) + + def test_server_compiler_compile_structured_config_09(self): + with self.assertRaisesRegex( + errors.InvalidReferenceError, "has no member" + ): + self._test_compile_structured_config({"enumprop": "non_exist"}) + + def test_server_compiler_compile_structured_config_10(self): + with self.assertRaisesRegex( + errors.ConfigurationError, "does not have field" + ): + self._test_compile_structured_config({"non_exist": 123}) + + def test_server_compiler_compile_structured_config_11(self): + with self.assertRaisesRegex( + errors.ConfigurationError, "type of `_tname` must be str" + ): + self._test_compile_structured_config({"sysobj": [{"_tname": 123}]}) + + def test_server_compiler_compile_structured_config_12(self): + with self.assertRaisesRegex( + errors.ConstraintViolationError, + "name violates exclusivity constraint", + ): + self._test_compile_structured_config( + {"sysobj": [{"name": "same"}, {"name": "same"}]} + ) + class ServerProtocol(amsg.ServerProtocol): def __init__(self): From 621be4fe28b3fe0618ae16ffa12d096db928b80c Mon Sep 17 00:00:00 2001 From: Fantix King Date: Wed, 11 Dec 2024 11:40:00 -0500 Subject: [PATCH 2/6] Use new API to validate env var config --- edb/ir/staeval.py | 3 +- edb/server/bootstrap.py | 18 ++-- edb/server/inplace_upgrade.py | 2 +- edb/server/main.py | 164 +++++++++++++++------------------- tests/test_server_config.py | 2 +- 5 files changed, 85 insertions(+), 104 deletions(-) diff --git a/edb/ir/staeval.py b/edb/ir/staeval.py index 7301cc7e41d..e8176fa739c 100644 --- a/edb/ir/staeval.py +++ b/edb/ir/staeval.py @@ -512,7 +512,8 @@ def python_cast_str(sval: str, pytype: type) -> Any: return False else: raise errors.InvalidValueError( - f"invalid input syntax for type bool: {sval!r}" + f"invalid input syntax for type bool: {sval!r}", + hint="bool value can only be one of: true, false" ) else: return pytype(sval) diff --git a/edb/server/bootstrap.py b/edb/server/bootstrap.py index d2646cc6977..f322e99e062 100644 --- a/edb/server/bootstrap.py +++ b/edb/server/bootstrap.py @@ -2436,7 +2436,7 @@ async def _pg_ensure_database_not_connected( f'database {dbname!r} is being accessed by other users') -async def _start(ctx: BootstrapContext) -> edbcompiler.CompilerState: +async def _start(ctx: BootstrapContext) -> edbcompiler.Compiler: conn = await _check_catalog_compatibility(ctx) try: @@ -2445,7 +2445,7 @@ async def _start(ctx: BootstrapContext) -> edbcompiler.CompilerState: ctx.cluster.overwrite_capabilities(struct.Struct('!Q').unpack(caps)[0]) _check_capabilities(ctx) - return (await edbcompiler.new_compiler_from_pg(conn)).state + return await edbcompiler.new_compiler_from_pg(conn) finally: conn.terminate() @@ -2473,7 +2473,7 @@ async def _bootstrap_edgedb_super_roles(ctx: BootstrapContext) -> uuid.UUID: async def _bootstrap( ctx: BootstrapContext, no_template: bool=False, -) -> edbcompiler.CompilerState: +) -> edbcompiler.Compiler: args = ctx.args cluster = ctx.cluster backend_params = cluster.get_runtime_params() @@ -2690,13 +2690,13 @@ async def _bootstrap( args.default_database_user or edbdef.EDGEDB_SUPERUSER, ) - return compiler.state + return compiler async def ensure_bootstrapped( cluster: pgcluster.BaseCluster, args: edbargs.ServerConfig, -) -> tuple[bool, edbcompiler.CompilerState]: +) -> tuple[bool, edbcompiler.Compiler]: """Bootstraps Gel instance if it hasn't been bootstrapped already. Returns True if bootstrap happened and False if the instance was already @@ -2712,10 +2712,10 @@ async def ensure_bootstrapped( mode = await _get_cluster_mode(ctx) ctx = dataclasses.replace(ctx, mode=mode) if mode == ClusterMode.pristine: - state = await _bootstrap(ctx) - return True, state + compiler = await _bootstrap(ctx) + return True, compiler else: - state = await _start(ctx) - return False, state + compiler = await _start(ctx) + return False, compiler finally: pgconn.terminate() diff --git a/edb/server/inplace_upgrade.py b/edb/server/inplace_upgrade.py index 7350aabbaf1..d07ca92bee6 100644 --- a/edb/server/inplace_upgrade.py +++ b/edb/server/inplace_upgrade.py @@ -516,7 +516,7 @@ async def _upgrade_all( ) -> None: cluster = ctx.cluster - state = await bootstrap._bootstrap(ctx) + state = (await bootstrap._bootstrap(ctx)).state databases = await _get_databases(ctx) assert ctx.args.inplace_upgrade_prepare diff --git a/edb/server/main.py b/edb/server/main.py index 4352c87d0bc..c883911c93f 100644 --- a/edb/server/main.py +++ b/edb/server/main.py @@ -36,6 +36,7 @@ import asyncio import contextlib +import enum import logging import os import os.path @@ -171,7 +172,7 @@ def _internal_state_dir( async def _init_cluster( cluster, args: srvargs.ServerConfig -) -> tuple[bool, edbcompiler.CompilerState]: +) -> tuple[bool, edbcompiler.Compiler]: from edb.server import bootstrap new_instance = await bootstrap.ensure_bootstrapped(cluster, args) @@ -501,6 +502,7 @@ async def run_server( user_schema=stdlib.reflschema, reflection=reflection, ) + del reflection compiler_state = edbcompiler.CompilerState( std_schema=compiler.state.std_schema, refl_schema=compiler.state.refl_schema, @@ -512,6 +514,7 @@ async def run_server( local_intro_query=local_intro_sql, global_intro_query=global_intro_sql, ) + del local_intro_sql, global_intro_sql ( sys_queries, report_configs_typedesc_1_0, @@ -525,8 +528,9 @@ async def run_server( sys_config, backend_settings = initialize_static_cfg( args, is_remote_cluster=True, - config_spec=compiler_state.config_spec, + compiler=compiler, ) + del compiler with _internal_state_dir(runstate_dir, args) as ( int_runstate_dir, args, @@ -604,12 +608,12 @@ async def run_server( await inplace_upgrade.inplace_upgrade(cluster, args) return - new_instance, compiler_state = await _init_cluster(cluster, args) + new_instance, compiler = await _init_cluster(cluster, args) _, backend_settings = initialize_static_cfg( args, is_remote_cluster=not is_local_cluster, - config_spec=compiler_state.config_spec, + compiler=compiler, ) if is_local_cluster and (new_instance or backend_settings): @@ -662,7 +666,7 @@ async def run_server( int_runstate_dir, do_setproctitle=do_setproctitle, new_instance=new_instance, - compiler_state=compiler_state, + compiler_state=compiler.state, ) except server.StartupError as e: @@ -797,108 +801,84 @@ def main_dev(): main() -def _coerce_cfg_value(setting: config.Setting, value): - if setting.set_of: - return frozenset( - config.coerce_single_value(setting, v) for v in value - ) - else: - return config.coerce_single_value(setting, value) +class Source(enum.StrEnum): + command_line_argument = "A" + environment_variable = "E" + + +sources = { + Source.command_line_argument: "command line argument", + Source.environment_variable: "environment variable", +} def initialize_static_cfg( args: srvargs.ServerConfig, is_remote_cluster: bool, - config_spec: config.Spec + compiler: edbcompiler.Compiler, ) -> Tuple[Mapping[str, config.SettingValue], Dict[str, str]]: result = {} init_con_script_data = [] backend_settings = {} - command_line_argument = "A" - environment_variable = "E" - sources = { - command_line_argument: "command line argument", - environment_variable: "environment variable", + config_spec = compiler.state.config_spec + + def add_config_values(obj: dict[str, Any], source: Source): + settings = compiler.compile_structured_config( + {"cfg::Config": obj}, source=sources[source] + )["cfg::Config"] + for name, value in settings.items(): + setting = config_spec[name] + + if is_remote_cluster: + if setting.backend_setting and setting.requires_restart: + if source == Source.command_line_argument: + where = "on command line" + else: + where = "as an environment variable" + raise server.StartupError( + f"Can't set config {name!r} {where} when using " + f"a remote Postgres cluster" + ) + init_con_script_data.append({ + "name": name, + "value": config.value_to_json_value(setting, value.value), + "type": source, + }) + result[name] = value + if setting.backend_setting: + backend_val = value.value + if isinstance(backend_val, statypes.ScalarType): + backend_val = backend_val.to_backend_str() + backend_settings[setting.backend_setting] = str(backend_val) + + values: dict[str, Any] = {} + translate_env = { + "EDGEDB_SERVER_BIND_ADDRESS": "listen_addresses", + "EDGEDB_SERVER_PORT": "listen_port", + "GEL_SERVER_BIND_ADDRESS": "listen_addresses", + "GEL_SERVER_PORT": "listen_port", } - - def add_config(name, value, type_): - setting = config_spec[name] - if is_remote_cluster: - if setting.backend_setting and setting.requires_restart: - if type_ == command_line_argument: - where = "on command line" - else: - where = "as an environment variable" - raise server.StartupError( - f"Can't set config {name!r} {where} when using " - f"a remote Postgres cluster" - ) - value = _coerce_cfg_value(setting, value) - init_con_script_data.append({ - "name": name, - "value": config.value_to_json_value(setting, value), - "type": type_, - }) - result[name] = config.SettingValue( - name=name, - value=value, - source=sources[type_], - scope=config.ConfigScope.INSTANCE, - ) - if setting.backend_setting: - if isinstance(value, statypes.ScalarType): - value = value.to_backend_str() - backend_settings[setting.backend_setting] = str(value) - - def iter_environ(): - translate_env = { - "EDGEDB_SERVER_BIND_ADDRESS": "listen_addresses", - "EDGEDB_SERVER_PORT": "listen_port", - "GEL_SERVER_BIND_ADDRESS": "listen_addresses", - "GEL_SERVER_PORT": "listen_port", - } - for name, value in os.environ.items(): - if cfg := translate_env.get(name): - yield name, value, cfg + for name, value in os.environ.items(): + if cfg := translate_env.get(name): + values[cfg] = value + else: + cfg = name.removeprefix("EDGEDB_SERVER_CONFIG_cfg::") + if cfg != name: + values[cfg] = value else: - cfg = name.removeprefix("EDGEDB_SERVER_CONFIG_cfg::") + cfg = name.removeprefix("GEL_SERVER_CONFIG_cfg::") if cfg != name: - yield name, value, cfg - else: - cfg = name.removeprefix("GEL_SERVER_CONFIG_cfg::") - if cfg != name: - yield name, value, cfg - - env_value: Any - setting: config.Setting - for env_name, env_value, cfg_name in iter_environ(): - try: - setting = config_spec[cfg_name] - except KeyError: - continue - choices = setting.enum_values - if setting.type is bool: - choices = ['true', 'false'] - env_value = env_value.lower() - if choices is not None and env_value not in choices: - raise server.StartupError( - f"Environment variable {env_name!r} can only be one of: " + - ", ".join(choices) - ) - if setting.type is bool: - env_value = env_value == 'true' - elif not issubclass(setting.type, statypes.ScalarType): # type: ignore - env_value = setting.type(env_value) # type: ignore - if setting.set_of: - env_value = (env_value,) - add_config(cfg_name, env_value, environment_variable) + values[cfg] = value + if values: + add_config_values(values, Source.environment_variable) + values = {} if args.bind_addresses: - add_config( - "listen_addresses", args.bind_addresses, command_line_argument - ) + values["listen_addresses"] = args.bind_addresses if args.port: - add_config("listen_port", args.port, command_line_argument) + values["listen_port"] = args.port + if values: + add_config_values(values, Source.command_line_argument) if init_con_script_data: from . import pgcon diff --git a/tests/test_server_config.py b/tests/test_server_config.py index 02f79f66723..a2c83881c7b 100644 --- a/tests/test_server_config.py +++ b/tests/test_server_config.py @@ -2248,7 +2248,7 @@ async def test_server_config_env_02(self): } with self.assertRaisesRegex( cluster.ClusterError, - "can only be one of: AlwaysAllow, NeverAllow" + "'cfg::AllowBareDDL' enum has no member called 'illegal_input'" ): async with tb.start_edgedb_server(env=env): pass From 89b2809d2a49a9eb453d5b60f24056a6b2f05313 Mon Sep 17 00:00:00 2001 From: Fantix King Date: Thu, 12 Dec 2024 16:57:00 -0500 Subject: [PATCH 3/6] Drop now-unused export --- edb/server/config/__init__.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/edb/server/config/__init__.py b/edb/server/config/__init__.py index 6ddd9fdd2a4..4cf98290979 100644 --- a/edb/server/config/__init__.py +++ b/edb/server/config/__init__.py @@ -29,7 +29,7 @@ from .ops import ( spec_to_json, to_json_obj, to_json, from_json, set_value, to_edgeql ) -from .ops import value_from_json, value_to_json_value, coerce_single_value +from .ops import value_from_json, value_to_json_value from .spec import ( Spec, FlatSpec, ChainedSpec, Setting, load_spec_from_schema, load_ext_spec_from_schema, @@ -49,7 +49,6 @@ 'load_spec_from_schema', 'load_ext_spec_from_schema', 'load_ext_settings_from_schema', 'get_compilation_config', - 'coerce_single_value', 'QueryCacheMode', ) From 9de684ac8877f53d0c73f82dacb8fc480ddef6a7 Mon Sep 17 00:00:00 2001 From: Fantix King Date: Fri, 13 Dec 2024 10:57:03 -0500 Subject: [PATCH 4/6] CRF: turn off inserting nested ConfigObject and add type check --- edb/ir/staeval.py | 46 +++++++++++++++++++++------------ edb/server/compiler/compiler.py | 2 ++ edb/server/compiler/config.py | 8 ++++++ tests/test_server_compiler.py | 2 +- 4 files changed, 40 insertions(+), 18 deletions(-) diff --git a/edb/ir/staeval.py b/edb/ir/staeval.py index e8176fa739c..81fcbd32ce0 100644 --- a/edb/ir/staeval.py +++ b/edb/ir/staeval.py @@ -107,24 +107,36 @@ def evaluate_SelectStmt( def evaluate_InsertStmt( ir: irast.InsertStmt, schema: s_schema.Schema ) -> EvaluationResult: - # XXX: raise for unsupported InsertStmt? + # InsertStmt should NOT be statically evaluated in general; + # This is a special case for inserting nested cfg::ConfigObject + # when it's evaluated into a named tuple and then squashed into + # a Python dict to be used in compile_structured_config(). + tmp_schema, subject_type = irtyputils.ir_typeref_to_type( + schema, ir.subject.expr.typeref + ) + config_obj = schema.get("cfg::ConfigObject") + assert isinstance(config_obj, s_obj.SubclassableObject) + if subject_type.issubclass(tmp_schema, config_obj): + return irast.Tuple( + named=True, + typeref=ir.subject.typeref, + elements=[ + irast.TupleElement( + name=ptr_set.expr.ptrref.shortname.name, + val=irast.Set( + expr=evaluate(ptr_set.expr.expr, schema), + typeref=ptr_set.typeref, + path_id=ptr_set.path_id, + ), + ) + for ptr_set, _ in ir.subject.shape + if ptr_set.expr.ptrref.shortname.name != "id" + and ptr_set.expr.expr is not None + ], + ) - return irast.Tuple( - named=True, - typeref=ir.subject.typeref, - elements=[ - irast.TupleElement( - name=ptr_set.expr.ptrref.shortname.name, - val=irast.Set( - expr=evaluate(ptr_set.expr.expr, schema), - typeref=ptr_set.typeref, - path_id=ptr_set.path_id, - ), - ) - for ptr_set, _ in ir.subject.shape - if ptr_set.expr.ptrref.shortname.name != "id" - and ptr_set.expr.expr is not None - ], + raise UnsupportedExpressionError( + f'no static IR evaluation handler for general {ir.__class__}' ) diff --git a/edb/server/compiler/compiler.py b/edb/server/compiler/compiler.py index 27c73d71058..58649566c15 100644 --- a/edb/server/compiler/compiler.py +++ b/edb/server/compiler/compiler.py @@ -1295,6 +1295,7 @@ def compile_structured_config( self, objects: Mapping[str, config_compiler.ConfigObject], source: str | None = None, + allow_nested: bool = False, ) -> dict[str, immutables.Map[str, config.SettingValue]]: # XXX: only config in the stdlib is supported currently, so the only # key allowed in objects is "cfg::Config". API for future compatibility @@ -1309,6 +1310,7 @@ def compile_structured_config( spec=self.state.config_spec, schema=self.state.std_schema, source=source, + allow_nested=allow_nested, ) diff --git a/edb/server/compiler/config.py b/edb/server/compiler/config.py index 048e4376c0b..8bb33851550 100644 --- a/edb/server/compiler/config.py +++ b/edb/server/compiler/config.py @@ -210,9 +210,14 @@ def compile_ast_to_operation( *, schema: s_schema.Schema, options: qlcompiler.CompilerOptions, + allow_nested: bool = True, ) -> config.Operation: cmd: qlast.ConfigOp if isinstance(expr, qlast.InsertQuery): + if not allow_nested: + raise errors.ConfigurationError( + "nested config object is not allowed" + ) cmd = qlast.ConfigInsert( name=expr.subject, scope=qltypes.ConfigScope.INSTANCE, @@ -244,6 +249,7 @@ def compile_structured_config( spec: config.Spec, schema: s_schema.Schema, source: str | None = None, + allow_nested: bool = True, ) -> dict[str, immutables.Map[str, config.SettingValue]]: options = qlcompiler.CompilerOptions( modaliases={None: "cfg"}, @@ -273,6 +279,7 @@ def compile_structured_config( ast, schema=schema, options=options, + allow_nested=allow_nested, ) storage = op.apply(spec, storage, source=source) continue @@ -284,6 +291,7 @@ def compile_structured_config( shape_el.compexpr, schema=schema, options=options, + allow_nested=allow_nested, ) storage = op.apply(spec, storage, source=source) diff --git a/tests/test_server_compiler.py b/tests/test_server_compiler.py index 51673de1e89..f6c5df2cab6 100644 --- a/tests/test_server_compiler.py +++ b/tests/test_server_compiler.py @@ -92,7 +92,7 @@ def _test_compile_structured_config( **expected: Any, ) -> dict[str, config.SettingValue]: result = self.compiler.compile_structured_config( - {"cfg::Config": values}, source=source + {"cfg::Config": values}, source=source, allow_nested=True ) rv = dict(result["cfg::Config"]) for name, setting in rv.items(): From 04af3be1265b07b89bffa960ee57d48348c9eef9 Mon Sep 17 00:00:00 2001 From: Fantix King Date: Fri, 13 Dec 2024 12:37:59 -0500 Subject: [PATCH 5/6] Use the new API for EDGEDB_MAGIC_SMTP_CONFIG --- edb/server/main.py | 20 +++++++++++++++----- edb/server/server.py | 15 --------------- edb/server/tenant.py | 13 +++++++++++-- 3 files changed, 26 insertions(+), 22 deletions(-) diff --git a/edb/server/main.py b/edb/server/main.py index c883911c93f..f418a407298 100644 --- a/edb/server/main.py +++ b/edb/server/main.py @@ -37,6 +37,7 @@ import asyncio import contextlib import enum +import json import logging import os import os.path @@ -197,7 +198,7 @@ async def _run_server( *, do_setproctitle: bool, new_instance: bool, - compiler_state: edbcompiler.CompilerState, + compiler: edbcompiler.Compiler, ): sockets = service_manager.get_activation_listen_sockets() @@ -244,7 +245,7 @@ async def _run_server( new_instance=new_instance, admin_ui=args.admin_ui, disable_dynamic_system_config=args.disable_dynamic_system_config, - compiler_state=compiler_state, + compiler_state=compiler.state, tenant=tenant, use_monitor_fs=args.reload_config_files in [ srvargs.ReloadTrigger.Default, @@ -252,10 +253,19 @@ async def _run_server( ], net_worker_mode=args.net_worker_mode, ) + magic_smtp = os.getenv('EDGEDB_MAGIC_SMTP_CONFIG') + if magic_smtp: + magic_smtp = json.loads(magic_smtp) + if isinstance(magic_smtp, dict): + # for backward compatibility + magic_smtp = [magic_smtp] + await tenant.load_sidechannel_configs( + magic_smtp, compiler=compiler + ) # This coroutine runs as long as the server, - # and compiler_state is *heavy*, so make sure we don't + # and compiler(.state) is *heavy*, so make sure we don't # keep a reference to it. - del compiler_state + del compiler await sc.wait_for(ss.init()) ( @@ -666,7 +676,7 @@ async def run_server( int_runstate_dir, do_setproctitle=do_setproctitle, new_instance=new_instance, - compiler_state=compiler.state, + compiler=compiler, ) except server.StartupError as e: diff --git a/edb/server/server.py b/edb/server/server.py index 73540b198ed..59caa6311f9 100644 --- a/edb/server/server.py +++ b/edb/server/server.py @@ -1274,7 +1274,6 @@ async def init(self) -> None: await self._load_instance_data() await self._maybe_patch() await self._tenant.init() - self._load_sidechannel_configs() await super().init() def get_default_tenant(self) -> edbtenant.Tenant: @@ -1283,20 +1282,6 @@ def get_default_tenant(self) -> edbtenant.Tenant: def iter_tenants(self) -> Iterator[edbtenant.Tenant]: yield self._tenant - def _load_sidechannel_configs(self) -> None: - # TODO(fantix): Do something like this for multitenant - magic_smtp = os.getenv('EDGEDB_MAGIC_SMTP_CONFIG') - if magic_smtp: - email_type = self._config_settings['email_providers'].type - assert not isinstance(email_type, type) - configs = [ - config.CompositeConfigType.from_json_value( - entry, tspec=email_type, spec=self._config_settings - ) - for entry in json.loads(magic_smtp) - ] - self._tenant.set_sidechannel_configs(configs) - async def _get_patch_log( self, conn: pgcon.PGConnection, idx: int ) -> Optional[bootstrap.PatchEntry]: diff --git a/edb/server/tenant.py b/edb/server/tenant.py index f8a1e9304c1..b1c1656e975 100644 --- a/edb/server/tenant.py +++ b/edb/server/tenant.py @@ -249,8 +249,17 @@ def set_server(self, server: edbserver.BaseServer) -> None: self._server = server self.__loop = server.get_loop() - def set_sidechannel_configs(self, configs: list[Any]) -> None: - self._sidechannel_email_configs = configs + async def load_sidechannel_configs( + self, value: Any, *, compiler: edbcompiler.Compiler | None = None + ) -> None: + if compiler is None: + compiler = self._server.get_compiler_pool() + result = compiler.compile_structured_config( + {"cfg::Config": {"email_providers": value}}, source="magic", + allow_nested=True, + ) + email_providers = result["cfg::Config"]["email_providers"] + self._sidechannel_email_configs = list(email_providers.value) def get_http_client(self, *, originator: str) -> HttpClient: if self._http_client is None: From a2dc673d8cafc019f41d37a9938f30a10f5be4a6 Mon Sep 17 00:00:00 2001 From: Fantix King Date: Fri, 13 Dec 2024 14:34:01 -0500 Subject: [PATCH 6/6] crf: drop compat patch --- edb/server/main.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/edb/server/main.py b/edb/server/main.py index f418a407298..de1de206150 100644 --- a/edb/server/main.py +++ b/edb/server/main.py @@ -255,12 +255,8 @@ async def _run_server( ) magic_smtp = os.getenv('EDGEDB_MAGIC_SMTP_CONFIG') if magic_smtp: - magic_smtp = json.loads(magic_smtp) - if isinstance(magic_smtp, dict): - # for backward compatibility - magic_smtp = [magic_smtp] await tenant.load_sidechannel_configs( - magic_smtp, compiler=compiler + json.loads(magic_smtp), compiler=compiler ) # This coroutine runs as long as the server, # and compiler(.state) is *heavy*, so make sure we don't