Skip to content

Commit

Permalink
feat: add support for Log Analytics in log bucket destination (#179)
Browse files Browse the repository at this point in the history
Co-authored-by: Imran Nayer <[email protected]>
  • Loading branch information
daniel-cit and imrannayer authored Nov 6, 2023
1 parent 63d105b commit 511585e
Show file tree
Hide file tree
Showing 11 changed files with 107 additions and 17 deletions.
2 changes: 1 addition & 1 deletion build/int.cloudbuild.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ steps:
args: ['/bin/bash', '-c', 'cft test run TestLogBucketProjectModule --stage apply --verbose']
- id: go-verify-logbucket-project
waitFor:
- go-apply-logbucket-org
- go-apply-logbucket-project
name: 'gcr.io/cloud-foundation-cicd/$_DOCKER_IMAGE_DEVELOPER_TOOLS:$_DOCKER_TAG_VERSION_DEVELOPER_TOOLS'
args: ['/bin/bash', '-c', 'cft test run TestLogBucketProjectModule --stage verify --verbose']
- id: go-teardown-logbucket-project
Expand Down
4 changes: 3 additions & 1 deletion examples/logbucket/project/README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Log Export: Log Bucket destination at Project level

These examples configures a project-level log sink that feeds a logging log bucket destination with log bucket and log sink in the same project or in separated projects.
These examples configures a project-level log sink that feeds a logging log bucket destination with log bucket and log sink in the same project or in separated projects. This example also configures [Log Analytics](https://cloud.google.com/logging/docs/log-analytics) in the log bucket with a linked BigQuery dataset.

<!-- BEGINNING OF PRE-COMMIT-TERRAFORM DOCS HOOK -->
## Inputs
Expand All @@ -14,8 +14,10 @@ These examples configures a project-level log sink that feeds a logging log buck

| Name | Description |
|------|-------------|
| log\_bkt\_linked\_ds\_name\_same\_proj | The name for the log bucket linked BigQuery dataset name in same project example. |
| log\_bkt\_name\_same\_proj | The name for the log bucket for sink and logbucket in same project example. |
| log\_bkt\_same\_proj | The project where the log bucket is created for sink and logbucket in same project example. |
| log\_bucket\_linked\_ds\_name | The name for the log bucket linked BigQuery dataset name. |
| log\_bucket\_name | The name for the log bucket. |
| log\_bucket\_project | The project where the log bucket is created. |
| log\_sink\_dest\_uri\_same\_proj | A fully qualified URI for the log sink for sink and logbucket in same project example. |
Expand Down
16 changes: 11 additions & 5 deletions examples/logbucket/project/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -31,11 +31,14 @@ module "log_export" {
}

module "destination" {
source = "../../..//modules/logbucket"
project_id = var.project_destination_logbkt_id
name = "logbucket_from_other_project_${random_string.suffix.result}"
location = "global"
log_sink_writer_identity = module.log_export.writer_identity
source = "../../..//modules/logbucket"
project_id = var.project_destination_logbkt_id
name = "logbucket_from_other_project_${random_string.suffix.result}"
location = "global"
enable_analytics = true
linked_dataset_id = "log_analytics_dataset"
linked_dataset_description = "dataset for log bucket"
log_sink_writer_identity = module.log_export.writer_identity
}

#-------------------------------------#
Expand All @@ -56,6 +59,9 @@ module "dest_same_proj" {
project_id = var.project_destination_logbkt_id
name = "logbucket_from_same_project_${random_string.suffix.result}"
location = "global"
enable_analytics = true
linked_dataset_id = "log_analytics_dataset_same"
linked_dataset_description = "dataset for log bucket in the same project"
log_sink_writer_identity = module.log_export_same_proj.writer_identity
grant_write_permission_on_bkt = false
}
10 changes: 10 additions & 0 deletions examples/logbucket/project/outputs.tf
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,11 @@ output "log_bucket_name" {
value = module.destination.resource_name
}

output "log_bucket_linked_ds_name" {
description = "The name for the log bucket linked BigQuery dataset name."
value = module.destination.linked_dataset_name
}

output "log_sink_project_id" {
description = "The project id where the log sink is created."
value = module.log_export.parent_resource_id
Expand Down Expand Up @@ -58,6 +63,11 @@ output "log_bkt_name_same_proj" {
value = module.dest_same_proj.resource_name
}

output "log_bkt_linked_ds_name_same_proj" {
description = "The name for the log bucket linked BigQuery dataset name in same project example."
value = module.dest_same_proj.linked_dataset_name
}

output "log_sink_id_same_proj" {
description = "The project id where the log sink is created for sink and logbucket in same project example."
value = module.log_export_same_proj.parent_resource_id
Expand Down
4 changes: 4 additions & 0 deletions modules/logbucket/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,10 @@ module "destination" {

| Name | Description | Type | Default | Required |
|------|-------------|------|---------|:--------:|
| enable\_analytics | (Optional) Whether or not Log Analytics is enabled. A Log bucket with Log Analytics enabled can be queried in the Log Analytics page using SQL queries. Cannot be disabled once enabled. | `bool` | `false` | no |
| grant\_write\_permission\_on\_bkt | (Optional) Indicates whether the module is responsible for granting write permission on the logbucket. This permission will be given by default, but if the user wants, this module can skip this step. This is the case when the sink route logs to a log bucket in the same Cloud project, no new service account will be created and this module will need to bypass granting permissions. | `bool` | `true` | no |
| linked\_dataset\_description | A use-friendly description of the linked BigQuery dataset. The maximum length of the description is 8000 characters. | `string` | `null` | no |
| linked\_dataset\_id | The ID of the linked BigQuery dataset. A valid link dataset ID must only have alphanumeric characters and underscores within it and have up to 100 characters. | `string` | `null` | no |
| location | The location of the log bucket. | `string` | `"global"` | no |
| log\_sink\_writer\_identity | The service account that logging uses to write log entries to the destination. (This is available as an output coming from the root module). | `string` | n/a | yes |
| name | The name of the log bucket to be created and used for log entries matching the filter. | `string` | n/a | yes |
Expand All @@ -50,6 +53,7 @@ module "destination" {
|------|-------------|
| console\_link | The console link to the destination log buckets |
| destination\_uri | The destination URI for the log bucket. |
| linked\_dataset\_name | The resource name of the linked BigQuery dataset. |
| project | The project in which the log bucket was created. |
| resource\_name | The resource name for the destination log bucket |

Expand Down
23 changes: 19 additions & 4 deletions modules/logbucket/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -33,10 +33,25 @@ resource "google_project_service" "enable_destination_api" {
#------------#

resource "google_logging_project_bucket_config" "bucket" {
project = google_project_service.enable_destination_api.project
location = var.location
retention_days = var.retention_days
bucket_id = var.name
project = google_project_service.enable_destination_api.project
location = var.location
retention_days = var.retention_days
enable_analytics = var.enable_analytics
bucket_id = var.name
}

#-------------------------#
# Linked BigQuery dataset #
#-------------------------#

resource "google_logging_linked_dataset" "linked_dataset" {
count = var.linked_dataset_id != null ? 1 : 0

link_id = var.linked_dataset_id
description = var.linked_dataset_description
parent = "projects/${google_project_service.enable_destination_api.project}"
bucket = google_logging_project_bucket_config.bucket.id
location = var.location
}

#--------------------------------#
Expand Down
17 changes: 17 additions & 0 deletions modules/logbucket/metadata.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -65,11 +65,26 @@ spec:
- name: splunk-sink
location: examples/splunk-sink
variables:
- name: enable_analytics
description: (Optional) Whether or not Log Analytics is enabled. A Log bucket with Log Analytics enabled can be queried in the Log Analytics page using SQL queries. Cannot be disabled once enabled.
type: bool
default: false
required: false
- name: grant_write_permission_on_bkt
description: (Optional) Indicates whether the module is responsible for granting write permission on the logbucket. This permission will be given by default, but if the user wants, this module can skip this step. This is the case when the sink route logs to a log bucket in the same Cloud project, no new service account will be created and this module will need to bypass granting permissions.
type: bool
default: true
required: false
- name: linked_dataset_description
description: A use-friendly description of the linked BigQuery dataset. The maximum length of the description is 8000 characters.
type: string
default: Logbucket linked BigQuery dataset
required: false
- name: linked_dataset_id
description: The ID of the linked BigQuery dataset. A valid link dataset ID must only have alphanumeric characters and underscores within it and have up to 100 characters.
type: string
default: null
required: false
- name: location
description: The location of the log bucket.
type: string
Expand Down Expand Up @@ -97,6 +112,8 @@ spec:
description: The console link to the destination log buckets
- name: destination_uri
description: The destination URI for the log bucket.
- name: linked_dataset_name
description: The resource name of the linked BigQuery dataset.
- name: project
description: The project in which the log bucket was created.
- name: resource_name
Expand Down
5 changes: 5 additions & 0 deletions modules/logbucket/outputs.tf
Original file line number Diff line number Diff line change
Expand Up @@ -33,3 +33,8 @@ output "destination_uri" {
description = "The destination URI for the log bucket."
value = local.destination_uri
}

output "linked_dataset_name" {
description = "The resource name of the linked BigQuery dataset."
value = var.linked_dataset_id != null ? google_logging_linked_dataset.linked_dataset[0].name : ""
}
18 changes: 18 additions & 0 deletions modules/logbucket/variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -46,3 +46,21 @@ variable "grant_write_permission_on_bkt" {
type = bool
default = true
}

variable "enable_analytics" {
description = "(Optional) Whether or not Log Analytics is enabled. A Log bucket with Log Analytics enabled can be queried in the Log Analytics page using SQL queries. Cannot be disabled once enabled."
type = bool
default = false
}

variable "linked_dataset_id" {
description = "The ID of the linked BigQuery dataset. A valid link dataset ID must only have alphanumeric characters and underscores within it and have up to 100 characters."
type = string
default = null
}

variable "linked_dataset_description" {
description = "A use-friendly description of the linked BigQuery dataset. The maximum length of the description is 8000 characters."
type = string
default = null
}
2 changes: 1 addition & 1 deletion modules/logbucket/versions.tf
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ terraform {

google = {
source = "hashicorp/google"
version = ">= 3.53, < 6"
version = ">= 4.59, < 6"
}
}

Expand Down
23 changes: 18 additions & 5 deletions test/integration/logbucket-project/logbucket_project_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,8 @@ func TestLogBucketProjectModule(t *testing.T) {
for _, tc := range []struct {
projId string
bktName string
linkedDsName string
linkedDsID string
sinkDest string
sinkProjId string
sinkName string
Expand All @@ -47,17 +49,21 @@ func TestLogBucketProjectModule(t *testing.T) {
{
projId: bpt.GetStringOutput("log_bucket_project"),
bktName: bpt.GetStringOutput("log_bucket_name"),
linkedDsName: bpt.GetStringOutput("log_bucket_linked_ds_name"),
linkedDsID: "log_analytics_dataset",
sinkDest: bpt.GetStringOutput("log_sink_destination_uri"),
sinkProjId: bpt.GetStringOutput("log_sink_project_id"),
sinkName: bpt.GetStringOutput("log_sink_resource_name"),
writerIdentity: bpt.GetStringOutput("log_sink_writer_identity"),
},
{
projId: bpt.GetStringOutput("log_bkt_same_proj"),
bktName: bpt.GetStringOutput("log_bkt_name_same_proj"),
sinkDest: bpt.GetStringOutput("log_sink_dest_uri_same_proj"),
sinkProjId: bpt.GetStringOutput("log_sink_id_same_proj"),
sinkName: bpt.GetStringOutput("log_sink_resource_name_same_proj"),
projId: bpt.GetStringOutput("log_bkt_same_proj"),
bktName: bpt.GetStringOutput("log_bkt_name_same_proj"),
linkedDsName: bpt.GetStringOutput("log_bkt_linked_ds_name_same_proj"),
linkedDsID: "log_analytics_dataset_same",
sinkDest: bpt.GetStringOutput("log_sink_dest_uri_same_proj"),
sinkProjId: bpt.GetStringOutput("log_sink_id_same_proj"),
sinkName: bpt.GetStringOutput("log_sink_resource_name_same_proj"),
// writerIdentity: As sink and bucket are in same project no service account is needed and writerIdentity is empty
},
} {
Expand All @@ -77,6 +83,13 @@ func TestLogBucketProjectModule(t *testing.T) {
assert.Equal(tc.sinkDest, logSinkDetails.Get("destination").String(), "log sink destination should match")
assert.Equal("resource.type = gce_instance", logSinkDetails.Get("filter").String(), "log sink filter should match")
assert.Equal(tc.writerIdentity, logSinkDetails.Get("writerIdentity").String(), "log sink writerIdentity should match")

// assert linked dataset name & BigQuery Dataset ID
projectNumber := gcloud.Runf(t, "projects describe %s", tc.projId).Get("projectNumber").String()
bigqueryDatasetID := fmt.Sprintf("bigquery.googleapis.com/projects/%s/datasets/%s", projectNumber, tc.linkedDsID)
linkedDs := gcloud.Runf(t, "logging links describe %s --bucket=%s --location=%s --project=%s", tc.linkedDsID, tc.bktName, "global", tc.projId)
assert.Equal(tc.linkedDsName, linkedDs.Get("name").String(), "log bucket linked dataset name should match")
assert.Equal(bigqueryDatasetID, linkedDs.Get("bigqueryDataset.datasetId").String(), "log bucket BigQuery dataset ID should match")
}

//*****************************
Expand Down

0 comments on commit 511585e

Please sign in to comment.