Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/master' into query-stats-4
Browse files Browse the repository at this point in the history
  • Loading branch information
fantix committed Oct 23, 2024
2 parents 44154bb + 584795d commit ac3b25e
Show file tree
Hide file tree
Showing 39 changed files with 1,022 additions and 605 deletions.
2 changes: 1 addition & 1 deletion .github/workflows.src/build.inc.yml
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@
uses: actions/setup-python@v5
if: << 'false' if tgt.runs_on and 'self-hosted' in tgt.runs_on else 'true' >>
with:
python-version: "3.x"
python-version: "3.12"

- name: Set up NodeJS
uses: actions/setup-node@v4
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/dryrun.yml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions .github/workflows/nightly.yml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions .github/workflows/release.yml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions .github/workflows/testing.yml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 2 additions & 3 deletions docs/reference/configuration.rst
Original file line number Diff line number Diff line change
Expand Up @@ -74,9 +74,8 @@ Resource usage
:eql:synopsis:`shared_buffers -> cfg::memory`
The amount of memory used for shared memory buffers.

:eql:synopsis:`net_http_max_connections -> int64`
The maximum number of concurrent HTTP connections to allow when using the
``std::net::http`` module.
:eql:synopsis:`http_max_connections -> int64`
The maximum number of concurrent outbound HTTP connections to allow.

Query planning
--------------
Expand Down
2 changes: 1 addition & 1 deletion edb/buildmeta.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@
# The merge conflict there is a nice reminder that you probably need
# to write a patch in edb/pgsql/patches.py, and then you should preserve
# the old value.
EDGEDB_CATALOG_VERSION = 2024_10_21_00_00
EDGEDB_CATALOG_VERSION = 2024_10_23_14_39
EDGEDB_MAJOR_VERSION = 6


Expand Down
4 changes: 2 additions & 2 deletions edb/edgeql/compiler/options.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
SourceOrPathId = s_types.Type | s_pointers.Pointer | pathid.PathId


@dataclass
@dataclass(kw_only=True)
class GlobalCompilerOptions:
"""Compiler toggles that affect compilation as a whole."""

Expand Down Expand Up @@ -102,7 +102,7 @@ class GlobalCompilerOptions:
dump_restore_mode: bool = False


@dataclass
@dataclass(kw_only=True)
class CompilerOptions(GlobalCompilerOptions):

#: Module name aliases.
Expand Down
4 changes: 2 additions & 2 deletions edb/lib/cfg.edgeql
Original file line number Diff line number Diff line change
Expand Up @@ -238,8 +238,8 @@ ALTER TYPE cfg::AbstractConfig {
'Where the query cache is finally stored';
};

# std::net::http Configuration
CREATE PROPERTY net_http_max_connections -> std::int64 {
# HTTP Worker Configuration
CREATE PROPERTY http_max_connections -> std::int64 {
SET default := 10;
CREATE ANNOTATION std::description :=
'The maximum number of concurrent HTTP connections.';
Expand Down
14 changes: 13 additions & 1 deletion edb/pgsql/compiler/relgen.py
Original file line number Diff line number Diff line change
Expand Up @@ -3323,7 +3323,13 @@ def _compile_func_epilogue(
aspect=pgce.PathAspect.VALUE,
)

aspects: Tuple[pgce.PathAspect, ...] = (pgce.PathAspect.VALUE,)
aspects: Tuple[pgce.PathAspect, ...]
if expr.body:
# For inlined functions, we want all of the aspects provided.
aspects = tuple(pathctx.list_path_aspects(func_rel, ir_set.path_id))
else:
# Otherwise we just know we have value.
aspects = (pgce.PathAspect.VALUE,)

func_rvar = relctx.new_rel_rvar(ir_set, func_rel, ctx=ctx)
relctx.include_rvar(
Expand Down Expand Up @@ -3605,6 +3611,12 @@ def process_set_as_func_expr(
_compile_inlined_call_args(ir_set, ctx=newctx)

set_expr = dispatch.compile(expr.body, ctx=newctx)
# Map the path id so that we can extract source aspects
# from it, which we want so that we can directly select
# from an INSERT instead of using overlays.
pathctx.put_path_id_map(
newctx.rel, ir_set.path_id, expr.body.path_id
)

else:
args = _compile_call_args(ir_set, ctx=newctx)
Expand Down
19 changes: 11 additions & 8 deletions edb/pgsql/delta.py
Original file line number Diff line number Diff line change
Expand Up @@ -1629,13 +1629,16 @@ def apply(

if not overload:
variadic = func.get_params(schema).find_variadic(schema)
self.pgops.add(
dbops.DropFunction(
name=self.get_pgname(func, schema),
args=self.compile_args(func, schema),
has_variadic=variadic is not None,
if func.get_volatility(schema) != ql_ft.Volatility.Modifying:
# Modifying functions are not compiled.
# See: compile_edgeql_function
self.pgops.add(
dbops.DropFunction(
name=self.get_pgname(func, schema),
args=self.compile_args(func, schema),
has_variadic=variadic is not None,
)
)
)

return super().apply(schema, context)

Expand Down Expand Up @@ -5370,10 +5373,10 @@ def _create_table(

id = sn.QualName(
module=prop.get_name(schema).module, name=str(prop.id))
index_name = common.convert_name(id, 'idx0', catenate=True)
index_name = common.convert_name(id, 'idx0', catenate=False)

pg_index = dbops.Index(
name=index_name, table_name=new_table_name,
name=index_name[1], table_name=new_table_name,
unique=False, columns=[src_col],
metadata={'code': DEFAULT_INDEX_CODE},
)
Expand Down
8 changes: 7 additions & 1 deletion edb/pgsql/deltadbops.py
Original file line number Diff line number Diff line change
Expand Up @@ -485,7 +485,13 @@ def create_constr_trigger_function(
return [dbops.CreateFunction(func, or_replace=True)]

def drop_constr_trigger_function(self, proc_name: Tuple[str, ...]):
return [dbops.DropFunction(name=proc_name, args=(), if_exists=True)]
return [dbops.DropFunction(
name=proc_name,
args=(),
# Use a condition instead of if_exists ot reduce annoying
# debug spew from postgres.
conditions=[dbops.FunctionExists(name=proc_name, args=())],
)]

def create_constraint(self, constraint: SchemaConstraintTableConstraint):
# Add the constraint normally to our table
Expand Down
12 changes: 6 additions & 6 deletions edb/pgsql/metaschema.py
Original file line number Diff line number Diff line change
Expand Up @@ -6673,15 +6673,16 @@ def _generate_sql_information_schema(
SELECT attrelid,
attname,
atttypid,
attstattarget,
attlen,
attnum,
attnum as attnum_internal,
attndims,
attcacheoff,
atttypmod,
attbyval,
attalign,
attstorage,
attalign,
attnotnull,
atthasdef,
atthasmissing,
Expand All @@ -6691,7 +6692,6 @@ def _generate_sql_information_schema(
attislocal,
attinhcount,
attcollation,
attstattarget,
attacl,
attoptions,
attfdwoptions,
Expand All @@ -6716,6 +6716,7 @@ def _generate_sql_information_schema(
SELECT pc_oid as attrelid,
col_name as attname,
COALESCE(atttypid, 25) as atttypid, -- defaults to TEXT
COALESCE(attstattarget, -1) as attstattarget,
COALESCE(attlen, -1) as attlen,
(ROW_NUMBER() OVER (
PARTITION BY pc_oid
Expand All @@ -6726,8 +6727,8 @@ def _generate_sql_information_schema(
COALESCE(attcacheoff, -1) as attcacheoff,
COALESCE(atttypmod, -1) as atttypmod,
COALESCE(attbyval, FALSE) as attbyval,
COALESCE(attalign, 'i') as attalign,
COALESCE(attstorage, 'x') as attstorage,
COALESCE(attalign, 'i') as attalign,
required as attnotnull,
-- Always report no default, to avoid expr trouble
false as atthasdef,
Expand All @@ -6738,7 +6739,6 @@ def _generate_sql_information_schema(
COALESCE(attislocal, TRUE) as attislocal,
COALESCE(attinhcount, 0) as attinhcount,
COALESCE(attcollation, 0) as attcollation,
COALESCE(attstattarget, -1) as attstattarget,
attacl,
attoptions,
attfdwoptions,
Expand Down Expand Up @@ -6839,14 +6839,15 @@ def _generate_sql_information_schema(
attrelid,
attname,
atttypid,
attstattarget,
attlen,
attnum,
attndims,
attcacheoff,
atttypmod,
attbyval,
attalign,
attstorage,
attalign,
attnotnull,
atthasdef,
atthasmissing,
Expand All @@ -6856,7 +6857,6 @@ def _generate_sql_information_schema(
attislocal,
attinhcount,
attcollation,
attstattarget,
attacl,
attoptions,
attfdwoptions,
Expand Down
13 changes: 12 additions & 1 deletion edb/server/bootstrap.py
Original file line number Diff line number Diff line change
Expand Up @@ -1135,6 +1135,9 @@ async def create_branch(
elif line.startswith('CREATE TYPE'):
if any(skip in line for skip in to_skip):
skipping = True
elif line == 'SET transaction_timeout = 0;':
continue

if skipping:
continue
new_lines.append(line)
Expand Down Expand Up @@ -1535,6 +1538,15 @@ def cleanup_tpldbdump(tpldbdump: bytes) -> bytes:
flags=re.MULTILINE,
)

# PostgreSQL 17 adds a transaction_timeout config setting that
# didn't exist on earlier versions.
tpldbdump = re.sub(
rb'^SET transaction_timeout = 0;$',
rb'',
tpldbdump,
flags=re.MULTILINE,
)

return tpldbdump


Expand Down Expand Up @@ -2144,7 +2156,6 @@ async def _populate_misc_instance_data(
json.dumps(json_single_role_metadata),
)

assert backend_params.has_create_database
if not backend_params.has_create_database:
await _store_static_json_cache(
ctx,
Expand Down
2 changes: 2 additions & 0 deletions edb/server/compiler/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,11 @@
from .enums import InputFormat, OutputFormat
from .explain import analyze_explain_output
from .ddl import repair_schema
from .rpc import CompilationRequest

__all__ = (
'Cardinality',
'CompilationRequest',
'Compiler',
'CompilerState',
'CompileContext',
Expand Down
Loading

0 comments on commit ac3b25e

Please sign in to comment.