diff --git a/.github/workflows/prisma.yml b/.github/workflows/prisma.yml new file mode 100644 index 0000000000..f29d98baf2 --- /dev/null +++ b/.github/workflows/prisma.yml @@ -0,0 +1,61 @@ +# This workflow uses actions that are not certified by GitHub. +# They are provided by a third-party and are governed by +# separate terms of service, privacy policy, and support +# documentation. + +# A sample workflow that checks for security issues using +# the Prisma Cloud Infrastructure as Code Scan Action on +# the IaC files present in the repository. +# The results are uploaded to GitHub Security Code Scanning +# +# For more details on the Action configuration see https://github.com/prisma-cloud-shiftleft/iac-scan-action + +name: Prisma Cloud IaC Scan + +on: + push: + branches: [ "master" ] + pull_request: + # The branches below must be a subset of the branches above + branches: [ "master" ] + schedule: + - cron: '22 21 * * 5' + +permissions: + contents: read + +jobs: + prisma_cloud_iac_scan: + permissions: + contents: read # for actions/checkout to fetch code + security-events: write # for github/codeql-action/upload-sarif to upload SARIF results + actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status + runs-on: ubuntu-latest + name: Run Prisma Cloud IaC Scan to check + steps: + - name: Checkout + uses: actions/checkout@v3 + - id: iac-scan + name: Run Scan on CFT files in the repository + uses: prisma-cloud-shiftleft/iac-scan-action@53278c231c438216d99b463308a3cbed351ba0c3 + with: + # You will need Prisma Cloud API Access Token + # More details in https://github.com/prisma-cloud-shiftleft/iac-scan-action + prisma_api_url: ${{ secrets.PRISMA_CLOUD_API_URL }} + access_key: ${{ secrets.PRISMA_CLOUD_ACCESS_KEY }} + secret_key: ${{ secrets.PRISMA_CLOUD_SECRET_KEY }} + # Scan sources on Prisma Cloud are uniquely identified by their name + asset_name: 'my-asset-name' + # The service need to know the type of IaC being scanned + template_type: 'CFT' + - name: Upload SARIF file + uses: github/codeql-action/upload-sarif@v2 + # Results are generated only on a success or failure + # this is required since GitHub by default won't run the next step + # when the previous one has failed. + # And alternative it to add `continue-on-error: true` to the previous step + if: success() || failure() + with: + # The SARIF Log file name is configurable on scan action + # therefore the file name is best read from the steps output + sarif_file: ${{ steps.iac-scan.outputs.iac_scan_result_sarif_path }} diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 0000000000..61749ab9ca --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,9 @@ +steps: + - name: Checkout repo + uses: actions/checkout@v2 + + - name: Run Bridgecrew + id: Bridgecrew + uses: bridgecrewio/bridgecrew-action@master + with: + api-key: ${{ secrets.BC_API_KEY }} diff --git a/terraform/aws/ec2.tf b/terraform/aws/ec2.tf index 00e0ba940f..b680be0d04 100644 --- a/terraform/aws/ec2.tf +++ b/terraform/aws/ec2.tf @@ -2,7 +2,7 @@ resource "aws_instance" "web_host" { # ec2 have plain text secrets in user data ami = "${var.ami}" instance_type = "t2.nano" - + # this should go boom vpc_security_group_ids = [ "${aws_security_group.web-node.id}"] subnet_id = "${aws_subnet.web_subnet.id}" diff --git a/terraform/aws/s3.tf b/terraform/aws/s3.tf index a2a2ed78fe..432c57c58b 100644 --- a/terraform/aws/s3.tf +++ b/terraform/aws/s3.tf @@ -20,6 +20,28 @@ resource "aws_s3_bucket" "data" { }) } + +resource "aws_s3_bucket" "data_log_bucket" { + bucket = "data-log-bucket" +} + +resource "aws_s3_bucket_logging" "data" { + bucket = aws_s3_bucket.data.id + + target_bucket = aws_s3_bucket.data_log_bucket.id + target_prefix = "log/" +} + + + +resource "aws_s3_bucket_versioning" "data" { + bucket = aws_s3_bucket.data.id + + versioning_configuration { + status = "Enabled" + } +} + resource "aws_s3_bucket_object" "data_object" { bucket = aws_s3_bucket.data.id key = "customer-master.xlsx" @@ -62,6 +84,28 @@ resource "aws_s3_bucket" "financials" { } + +resource "aws_s3_bucket" "financials_log_bucket" { + bucket = "financials-log-bucket" +} + +resource "aws_s3_bucket_logging" "financials" { + bucket = aws_s3_bucket.financials.id + + target_bucket = aws_s3_bucket.financials_log_bucket.id + target_prefix = "log/" +} + + + +resource "aws_s3_bucket_versioning" "financials" { + bucket = aws_s3_bucket.financials.id + + versioning_configuration { + status = "Enabled" + } +} + resource "aws_s3_bucket" "operations" { # bucket is not encrypted # bucket does not have access logs @@ -86,6 +130,19 @@ resource "aws_s3_bucket" "operations" { }) } + +resource "aws_s3_bucket" "operations_log_bucket" { + bucket = "operations-log-bucket" +} + +resource "aws_s3_bucket_logging" "operations" { + bucket = aws_s3_bucket.operations.id + + target_bucket = aws_s3_bucket.operations_log_bucket.id + target_prefix = "log/" +} + + resource "aws_s3_bucket" "data_science" { # bucket is not encrypted bucket = "${local.resource_prefix.value}-data-science" @@ -139,3 +196,15 @@ resource "aws_s3_bucket" "logs" { yor_trace = "01946fe9-aae2-4c99-a975-e9b0d3a4696c" }) } + + +resource "aws_s3_bucket" "logs_log_bucket" { + bucket = "logs-log-bucket" +} + +resource "aws_s3_bucket_logging" "logs" { + bucket = aws_s3_bucket.logs.id + + target_bucket = aws_s3_bucket.logs_log_bucket.id + target_prefix = "log/" +} \ No newline at end of file