Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/master' into macos_alt_fix
Browse files Browse the repository at this point in the history
  • Loading branch information
fantix committed Nov 27, 2024
2 parents 1263c85 + 97a3d40 commit 726b7df
Show file tree
Hide file tree
Showing 15 changed files with 169 additions and 33 deletions.
7 changes: 7 additions & 0 deletions docs/reference/protocol/messages.rst
Original file line number Diff line number Diff line change
Expand Up @@ -229,6 +229,13 @@ Format:
.. eql:struct:: edb.protocol.Annotation
.. eql:struct:: edb.protocol.DumpFlag
Use:

* ``DUMP_SECRETS`` to include secrets in the backup. By default, secrets are
not included.


.. _ref_protocol_msg_command_data_description:

Expand Down
2 changes: 1 addition & 1 deletion edb/buildmeta.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@
# The merge conflict there is a nice reminder that you probably need
# to write a patch in edb/pgsql/patches.py, and then you should preserve
# the old value.
EDGEDB_CATALOG_VERSION = 2024_11_26_16_10
EDGEDB_CATALOG_VERSION = 2024_11_26_22_08
EDGEDB_MAJOR_VERSION = 6


Expand Down
4 changes: 4 additions & 0 deletions edb/lib/sys.edgeql
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,10 @@ CREATE TYPE sys::QueryStats EXTENDING sys::ExternalObject {
CREATE ANNOTATION std::description :=
"Type of the query.";
};
CREATE PROPERTY tag -> std::str {
CREATE ANNOTATION std::description :=
"Query tag, commonly specifies the origin of the query, e.g 'gel/cli' for queries originating from the CLI. Clients can specify a tag for easier query identification.";
};

CREATE PROPERTY compilation_config -> std::json;
CREATE PROPERTY protocol_version -> tuple<major: std::int16,
Expand Down
1 change: 1 addition & 0 deletions edb/pgsql/metaschema.py
Original file line number Diff line number Diff line change
Expand Up @@ -6153,6 +6153,7 @@ def float64_to_duration_t(val: str) -> str:
'branch': "((d.description)->>'id')::uuid",
'query': "s.query",
'query_type': f"(t.mapping->>s.stmt_type::text)::{query_type_domain}",
'tag': "s.tag",

'plans': 's.plans',
'total_plan_time': float64_to_duration_t('s.total_plan_time'),
Expand Down
6 changes: 6 additions & 0 deletions edb/protocol/messages.py
Original file line number Diff line number Diff line change
Expand Up @@ -519,6 +519,11 @@ class CompilationFlag(enum.IntFlag):
INJECT_OUTPUT_OBJECT_IDS = 1 << 2 # noqa


class DumpFlag(enum.IntFlag):

DUMP_SECRETS = 1 << 0 # noqa


class ErrorSeverity(enum.Enum):
ERROR = 120
FATAL = 200
Expand Down Expand Up @@ -746,6 +751,7 @@ class Dump(ClientMessage):
mtype = MessageType('>')
message_length = MessageLength
annotations = Annotations
flags = EnumOf(UInt64, DumpFlag, 'A bit mask of dump options.')


class Sync(ClientMessage):
Expand Down
3 changes: 3 additions & 0 deletions edb/server/dbview/dbview.pxd
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,9 @@ cdef class CompiledQuery:
cdef public object request
cdef public object recompiled_cache
cdef public bint use_pending_func_cache
cdef public object tag

cdef bytes make_query_prefix(self)


cdef class DatabaseIndex:
Expand Down
10 changes: 10 additions & 0 deletions edb/server/dbview/dbview.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,16 @@ cdef class CompiledQuery:
self.recompiled_cache = recompiled_cache
self.use_pending_func_cache = use_pending_func_cache

cdef bytes make_query_prefix(self):
data = {}
if self.tag:
data['tag'] = self.tag
if data:
data_bytes = json.dumps(data).encode(defines.EDGEDB_ENCODING)
return b''.join([b'-- ', data_bytes, b'\n'])
else:
return b''


cdef class Database:

Expand Down
3 changes: 2 additions & 1 deletion edb/server/pgcon/pgcon.pxd
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,8 @@ cdef class PGConnection:
cdef send_query_unit_group(
self, object query_unit_group, bint sync,
object bind_datas, bytes state,
ssize_t start, ssize_t end, int dbver, object parse_array
ssize_t start, ssize_t end, int dbver, object parse_array,
object query_prefix,
)

cdef _rewrite_copy_data(
Expand Down
12 changes: 9 additions & 3 deletions edb/server/pgcon/pgcon.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -587,7 +587,8 @@ cdef class PGConnection:
cdef send_query_unit_group(
self, object query_unit_group, bint sync,
object bind_datas, bytes state,
ssize_t start, ssize_t end, int dbver, object parse_array
ssize_t start, ssize_t end, int dbver, object parse_array,
object query_prefix,
):
# parse_array is an array of booleans for output with the same size as
# the query_unit_group, indicating if each unit is freshly parsed
Expand Down Expand Up @@ -633,6 +634,8 @@ cdef class PGConnection:
query_unit_group.units[start:end], bind_datas):
stmt_name = query_unit.sql_hash
sql = query_unit.sql
if query_prefix:
sql = query_prefix + sql
if stmt_name:
if parse_array[idx]:
buf = WriteBuffer.new_message(b'P')
Expand Down Expand Up @@ -968,6 +971,7 @@ cdef class PGConnection:
bint use_pending_func_cache,
tx_isolation,
list param_data_types,
bytes query_prefix,
):
cdef:
WriteBuffer out
Expand Down Expand Up @@ -1047,9 +1051,9 @@ cdef class PGConnection:

if use_pending_func_cache and query.cache_func_call:
sql, stmt_name = query.cache_func_call
sqls = (sql,)
sqls = (query_prefix + sql,)
else:
sqls = (query.sql,) + query.db_op_trailer
sqls = (query_prefix + query.sql,) + query.db_op_trailer
stmt_name = query.sql_hash

msgs_num = <uint64_t>(len(sqls))
Expand Down Expand Up @@ -1298,6 +1302,7 @@ cdef class PGConnection:
int dbver = 0,
bint use_pending_func_cache = 0,
tx_isolation = None,
query_prefix = None,
):
self.before_command()
started_at = time.monotonic()
Expand All @@ -1312,6 +1317,7 @@ cdef class PGConnection:
use_pending_func_cache,
tx_isolation,
param_data_types,
query_prefix or b'',
)
finally:
metrics.backend_query_duration.observe(
Expand Down
1 change: 1 addition & 0 deletions edb/server/protocol/binary.pxd
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,7 @@ cdef class EdgeConnection(frontend.FrontendConnection):
cdef dict parse_headers(self)
cdef dict parse_annotations(self)
cdef inline ignore_annotations(self)
cdef get_checked_tag(self, dict annotations)

cdef write_status(self, bytes name, bytes value)
cdef write_edgedb_error(self, exc)
Expand Down
26 changes: 23 additions & 3 deletions edb/server/protocol/binary.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -862,6 +862,15 @@ cdef class EdgeConnection(frontend.FrontendConnection):
)
return rv, allow_capabilities

cdef get_checked_tag(self, dict annotations):
tag = annotations.get("tag")
if not tag:
return None
if len(tag) > 128:
raise errors.BinaryProtocolError(
'bad annotation: tag too long (> 128 bytes)')
return tag

async def parse(self):
cdef:
bytes eql
Expand Down Expand Up @@ -907,9 +916,10 @@ cdef class EdgeConnection(frontend.FrontendConnection):
uint64_t allow_capabilities

if self.protocol_version >= (3, 0):
self.ignore_annotations()
tag = self.get_checked_tag(self.parse_annotations())
else:
self.ignore_headers()
tag = None

_dbview = self.get_dbview()
if _dbview.get_state_serializer() is None:
Expand Down Expand Up @@ -941,6 +951,8 @@ cdef class EdgeConnection(frontend.FrontendConnection):
else:
compiled = _dbview.as_compiled(query_req, query_unit_group)

compiled.tag = tag

if query_req.input_language is LANG_SQL and len(query_unit_group) > 1:
raise errors.UnsupportedFeatureError(
"multi-statement SQL scripts are not supported yet")
Expand Down Expand Up @@ -1322,9 +1334,16 @@ cdef class EdgeConnection(frontend.FrontendConnection):
cdef:
WriteBuffer msg_buf
dbview.DatabaseConnectionView _dbview
uint64_t flags

headers = self.parse_headers()
include_secrets = headers.get(QUERY_HEADER_DUMP_SECRETS) == b'\x01'
# Parse the "Dump" message
if self.protocol_version >= (3, 0):
self.ignore_annotations()
flags = <uint64_t>self.buffer.read_int64()
include_secrets = flags & messages.DumpFlag.DUMP_SECRETS
else:
headers = self.parse_headers()
include_secrets = headers.get(QUERY_HEADER_DUMP_SECRETS) == b'\x01'

self.buffer.finish_message()

Expand Down Expand Up @@ -1898,6 +1917,7 @@ async def run_script(
branch_name=database,
),
)
compiled.tag = "gel/startup-script"
if len(compiled.query_unit_group) > 1:
await conn._execute_script(compiled, b'')
else:
Expand Down
5 changes: 4 additions & 1 deletion edb/server/protocol/execute.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,7 @@ cdef class ExecutionGroup:
len(self.group), # end
dbver,
parse_array,
None, # query_prefix
)
if state is not None:
await be_conn.wait_for_state_resp(state, state_sync=0)
Expand Down Expand Up @@ -231,7 +232,6 @@ async def execute(
cdef:
bytes state = None, orig_state = None
WriteBuffer bound_args_buf
ExecutionGroup group

query_unit = compiled.query_unit_group[0]

Expand Down Expand Up @@ -296,6 +296,7 @@ async def execute(
dbver=dbv.dbver,
use_pending_func_cache=compiled.use_pending_func_cache,
tx_isolation=tx_isolation,
query_prefix=compiled.make_query_prefix(),
)

if query_unit.needs_readback and data:
Expand Down Expand Up @@ -434,6 +435,7 @@ async def execute_script(
feature_used_metrics = None
global_schema = roles = None
unit_group = compiled.query_unit_group
query_prefix = compiled.make_query_prefix()

sync = False
no_sync = False
Expand Down Expand Up @@ -491,6 +493,7 @@ async def execute_script(
sent,
dbver,
parse_array,
query_prefix,
)

if idx == 0 and state is not None:
Expand Down
1 change: 1 addition & 0 deletions edb_stat_statements/edb_stat_statements--1.0.sql
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ CREATE FUNCTION edb_stat_statements(IN showtext boolean,
OUT queryid bigint,
OUT query text,
OUT extras jsonb,
OUT tag text,
OUT id uuid,
OUT stmt_type int2,
OUT plans int8,
Expand Down
Loading

0 comments on commit 726b7df

Please sign in to comment.