Skip to content

Commit

Permalink
add test
Browse files Browse the repository at this point in the history
  • Loading branch information
colin-sentry committed Dec 27, 2024
1 parent 3fcec5f commit 40e74f7
Show file tree
Hide file tree
Showing 3 changed files with 58 additions and 15 deletions.
10 changes: 5 additions & 5 deletions snuba/clickhouse/translators/snuba/mappers.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,11 +241,11 @@ class SubscriptableHashBucketMapper(SubscriptableReferenceMapper):
to_col_table: Optional[str]
to_col_name: str
# if specified, casts the result to the specified type.
data_type: Optional[str]
data_type: Optional[str] = None
# if you add {'sentry.span_id': 'span_id'} here, then if the user requests attr_blah[sentry.span_id],
# this mapper will return a reference to the actual column instead of attr_str.
# if specified, data_type must also be specified.
normalized_columns: Optional[Mapping[str, str]]
normalized_columns: Optional[Mapping[str, str]] = None

def attempt_map(
self,
Expand All @@ -266,7 +266,7 @@ def attempt_map(
if (
self.normalized_columns
and key.value in self.normalized_columns
and self.cast_as
and self.data_type
):
return f.CAST(
column(self.normalized_columns[key.value]),
Expand All @@ -277,7 +277,7 @@ def attempt_map(
bucket_idx = fnv_1a(key.value.encode("utf-8")) % ATTRIBUTE_BUCKETS
if self.data_type:
return f.CAST(
f.arrayElement(
arrayElement(
None,
ColumnExpr(
None, self.to_col_table, f"{self.to_col_name}_{bucket_idx}"
Expand All @@ -288,7 +288,7 @@ def attempt_map(
alias=expression.alias,
)
else:
return f.arrayElement(
return arrayElement(
expression.alias,
ColumnExpr(None, self.to_col_table, f"{self.to_col_name}_{bucket_idx}"),
key,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@ schema:
{ name: attr_str, type: Map, args: { key: { type: String }, value: { type: String } } },
{ name: attr_f64, type: Map, args: { key: { type: String }, value: { type: Float, args: { size: 64 } } } },
{ name: attr_i64, type: Map, args: { key: { type: String }, value: { type: Int, args: { size: 64 } } } },
{ name: attr_bool, type: Map, args: { key: { type: String }, value: { type: DateTime64 } } },
]

storages:
Expand Down Expand Up @@ -82,15 +81,6 @@ storages:
normalized_columns:
sentry.organization_id: organization_id
sentry.project_id: project_id
- mapper: SubscriptableHashBucketMapper
args:
from_column_table: null
from_column_name: attr_bool
to_col_table: null
to_col_name: attr_num
data_type: Boolean
normalized_columns:
sentry.is_segment: is_segment

storage_selector:
selector: DefaultQueryStorageSelector
Expand Down
53 changes: 53 additions & 0 deletions tests/clickhouse/translators/snuba/test_translation.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,6 +146,59 @@ def test_hash_bucket_tag_translation() -> None:
)


def test_hash_bucket_normalized() -> None:
mapper = SubscriptableHashBucketMapper(
from_column_table=None,
from_column_name="tags_str",
to_col_table=None,
to_col_name="tags_float",
data_type="String",
normalized_columns={"derp.hello": "some_column"},
)

non_normalized_mapped = mapper.attempt_map(
SubscriptableReference(
"tags_str[z]", Column(None, None, "tags_str"), Literal(None, "z")
),
SnubaClickhouseMappingTranslator(TranslationMappers()),
)

normalized_mapped = mapper.attempt_map(
SubscriptableReference(
"tags_str[derp.hello]",
Column(None, None, "tags_str"),
Literal(None, "derp.hello"),
),
SnubaClickhouseMappingTranslator(TranslationMappers()),
)

assert non_normalized_mapped == FunctionCall(
"tags_str[z]",
"CAST",
(
FunctionCall(
None,
"arrayElement",
(
Column(
None,
None,
f"tags_float_{fnv_1a(b'z') % constants.ATTRIBUTE_BUCKETS}",
),
Literal(None, "z"),
),
),
Literal(None, "String"),
),
)

assert normalized_mapped == FunctionCall(
"tags_str[derp.hello]",
"CAST",
(Column(None, None, "some_column"), Literal(None, "String")),
)


def _get_nullable_expr(alias: str) -> FunctionCall:
return FunctionCall(
alias,
Expand Down

0 comments on commit 40e74f7

Please sign in to comment.