Skip to content

Commit

Permalink
Add integration tests
Browse files Browse the repository at this point in the history
  • Loading branch information
bobbyiliev committed Sep 24, 2024
1 parent 38fdbdb commit 5b43cee
Showing 1 changed file with 176 additions and 0 deletions.
176 changes: 176 additions & 0 deletions integration/source.tf
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,21 @@ resource "materialize_source_load_generator" "load_generator_auction" {
}
}

# Create source table from Auction load generator source
resource "materialize_source_table_load_generator" "load_generator_auction_table" {
name = "load_gen_auction_table"
schema_name = "public"
database_name = "materialize"

source {
name = materialize_source_load_generator.load_generator_auction.name
}

comment = "source table load generator comment"

upstream_name = "bids"
}

resource "materialize_source_load_generator" "load_generator_marketing" {
name = "load_gen_marketing"
schema_name = materialize_schema.schema.name
Expand All @@ -82,6 +97,21 @@ resource "materialize_source_load_generator" "load_generator_marketing" {
}
}

# Create source table from Marketing load generator source
resource "materialize_source_table_load_generator" "load_generator_marketing_table" {
name = "load_gen_marketing_table"
schema_name = "public"
database_name = "materialize"

source {
name = materialize_source_load_generator.load_generator_marketing.name
}

comment = "source table load generator comment"

upstream_name = "leads"
}

resource "materialize_source_load_generator" "load_generator_tpch" {
name = "load_gen_tpch"
schema_name = materialize_schema.schema.name
Expand Down Expand Up @@ -144,6 +174,26 @@ resource "materialize_source_postgres" "example_source_postgres" {
}
}

# Create source table from Postgres source
resource "materialize_source_table_postgres" "source_table_postgres" {
name = "source_table2_postgres"
schema_name = "public"
database_name = "materialize"

source {
name = materialize_source_postgres.example_source_postgres.name
schema_name = materialize_source_postgres.example_source_postgres.schema_name
database_name = materialize_source_postgres.example_source_postgres.database_name
}

upstream_name = "table2"
upstream_schema_name = "public"

text_columns = [
"updated_at"
]
}

resource "materialize_source_kafka" "example_source_kafka_format_text" {
name = "source_kafka_text"
comment = "source kafka comment"
Expand All @@ -168,6 +218,32 @@ resource "materialize_source_kafka" "example_source_kafka_format_text" {
depends_on = [materialize_sink_kafka.sink_kafka]
}

# Create source table from Kafka source
resource "materialize_source_table_kafka" "source_table_kafka" {
name = "source_table_kafka"
schema_name = "public"
database_name = "materialize"

source {
name = materialize_source_kafka.example_source_kafka_format_text.name
schema_name = materialize_source_kafka.example_source_kafka_format_text.schema_name
database_name = materialize_source_kafka.example_source_kafka_format_text.database_name
}

topic = "topic1"
key_format_text = true
value_format_text = true
envelope_none = true
include_timestamp = true
include_offset = true
include_partition = true
include_key = true
include_key_alias = "key_alias"
include_offset_alias = "offset_alias"
include_partition_alias = "partition_alias"
include_timestamp_alias = "timestamp_alias"
}

resource "materialize_source_kafka" "example_source_kafka_format_bytes" {
name = "source_kafka_bytes"
cluster_name = materialize_cluster.cluster_source.name
Expand All @@ -185,6 +261,27 @@ resource "materialize_source_kafka" "example_source_kafka_format_bytes" {
depends_on = [materialize_sink_kafka.sink_kafka]
}

# Create source table from Kafka source with bytes format
resource "materialize_source_table_kafka" "source_table_kafka_bytes" {
name = "source_table_kafka_bytes"
schema_name = "public"
database_name = "materialize"

source {
name = materialize_source_kafka.example_source_kafka_format_bytes.name
schema_name = materialize_source_kafka.example_source_kafka_format_bytes.schema_name
database_name = materialize_source_kafka.example_source_kafka_format_bytes.database_name
}

topic = "topic1"

format {
bytes = true
}

depends_on = [materialize_sink_kafka.sink_kafka]
}

resource "materialize_source_kafka" "example_source_kafka_format_avro" {
name = "source_kafka_avro"
cluster_name = materialize_cluster.cluster_source.name
Expand All @@ -211,6 +308,33 @@ resource "materialize_source_kafka" "example_source_kafka_format_avro" {
depends_on = [materialize_sink_kafka.sink_kafka]
}

# Source table from Kafka source with Avro format
resource "materialize_source_table_kafka" "source_table_kafka_avro" {
name = "source_table_kafka_avro"
schema_name = "public"
database_name = "materialize"

source {
name = materialize_source_kafka.example_source_kafka_format_avro.name
schema_name = materialize_source_kafka.example_source_kafka_format_avro.schema_name
database_name = materialize_source_kafka.example_source_kafka_format_avro.database_name
}

topic = "topic1"

format {
avro {
schema_registry_connection {
name = materialize_connection_confluent_schema_registry.schema_registry.name
schema_name = materialize_connection_confluent_schema_registry.schema_registry.schema_name
database_name = materialize_connection_confluent_schema_registry.schema_registry.database_name
}
}
}

depends_on = [materialize_sink_kafka.sink_kafka]
}

resource "materialize_source_webhook" "example_webhook_source" {
name = "example_webhook_source"
comment = "source webhook comment"
Expand Down Expand Up @@ -271,6 +395,22 @@ resource "materialize_source_mysql" "test" {
}
}

# Create source table from MySQL source
resource "materialize_source_table_mysql" "source_table_mysql" {
name = "source_table1_mysql"
schema_name = "public"
database_name = "materialize"

source {
name = materialize_source_mysql.test.name
schema_name = materialize_source_mysql.test.schema_name
database_name = materialize_source_mysql.test.database_name
}

upstream_name = "mysql_table1"
upstream_schema_name = "shop"
}

resource "materialize_source_grant" "source_grant_select" {
role_name = materialize_role.role_1.name
privilege = "SELECT"
Expand Down Expand Up @@ -317,6 +457,42 @@ resource "materialize_source_kafka" "kafka_upsert_options_source" {
include_key_alias = "key_alias"
}

# Create source table from Kafka source with upsert options
resource "materialize_source_table_kafka" "source_table_kafka_upsert_options" {
name = "source_table_kafka_upsert_options"
schema_name = "public"
database_name = "materialize"

source {
name = materialize_source_kafka.kafka_upsert_options_source.name
schema_name = materialize_source_kafka.kafka_upsert_options_source.schema_name
database_name = materialize_source_kafka.kafka_upsert_options_source.database_name
}

topic = "topic1"

key_format_text = true
value_format_text = true

envelope_upsert = true

upsert_options {
value_decoding_errors {
inline {
enabled = true
alias = "my_error_col"
}
}
}

include_timestamp_alias = "timestamp_alias"
include_offset = true
include_offset_alias = "offset_alias"
include_partition = true
include_partition_alias = "partition_alias"
include_key_alias = "key_alias"
}

output "qualified_load_generator" {
value = materialize_source_load_generator.load_generator.qualified_sql_name
}
Expand Down

0 comments on commit 5b43cee

Please sign in to comment.