diff --git a/.markdownlint-cli2.yaml b/.markdownlint-cli2.yaml deleted file mode 120000 index 8b5fdf87..00000000 --- a/.markdownlint-cli2.yaml +++ /dev/null @@ -1 +0,0 @@ -builders/etc/.markdownlint-cli2.yaml \ No newline at end of file diff --git a/.markdownlint-cli2.yaml b/.markdownlint-cli2.yaml new file mode 100644 index 00000000..df968a89 --- /dev/null +++ b/.markdownlint-cli2.yaml @@ -0,0 +1,44 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +config: + line-length: + line_length: 120 + #stern: true + code_blocks: false + tables: false + + # these are apparently in conflict with prettier's markdown formatting + list-marker-space: false + list-indent: false + ul-indent: false + + headings: false + + proper-names: + code_blocks: false + names: + - CommonMark + - JavaScript + - Markdown + - markdown-it + - markdownlint + - markdownlint-cli2 + - Node.js + +fix: true + +ignores: +- CHANGELOG.md +- google_internal/LATEST_RELEASE.md diff --git a/CHANGELOG.md b/CHANGELOG.md index 7b0d679f..2e21449e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,62 @@ # Changelog +## [2.7.0](https://github.com/privacysandbox/aggregation-service/compare/v2.6.0...v2.7.0) (2024-08-01) + +- Added support for aggregating reports belonging to multiple reporting origins under the same + reporting site in a single aggregation job. +- [GCP Only] Updated coordinator endpoints to new Google/Third-Party coordinator pair. + +## [2.6.0](https://github.com/privacysandbox/aggregation-service/compare/v2.5.0...v2.6.0) (2024-07-19) + +- Enabled support for + [Aggregate Debug Reporting API](https://github.com/WICG/attribution-reporting-api/blob/main/aggregate_debug_reporting.md). +- Reduced memory usage by making `AggregatedFacts` mutable and removing redundant object creation. +- Updated dependencies to address security vulnerabilities. +- Upgraded control plane shared library dependency to + [v1.9.0-rc03](https://github.com/privacysandbox/coordinator-services-and-shared-libraries/releases/tag/v1.9.0-rc03) + +## [2.5.1](https://github.com/privacysandbox/aggregation-service/compare/v2.5.0...v2.5.1) (2024-07-19) + +- Updated dependencies to address security vulnerabilities. + +## [2.5.0](https://github.com/privacysandbox/aggregation-service/compare/v2.4.2...v2.5.0) (2024-05-22) + +### Changes + +- Deprecated single party key support. +- Enabled exception caching on private key service and set TTL for exception cache to reduce the + load and make aggregation jobs run/fail faster. +- Enabled non-zero filtering of labeled contributions. +- Enabled OpenTelemetry for metric and trace collection. +- Fixed enclave worker AMI + [build issue](https://github.com/privacysandbox/aggregation-service/issues/40) by pinning Docker + version to 24.0.5. +- Fixed GCP cloud build by adding -y option to the install commands with Docker. +- Fixed job failure with `ClassCastException` error. +- Fixed noised facts mismatch when DebugRun and DomainOptional are both set. +- Limited Otel memory reporting max to 90%. +- Made aggregate fact noising parallel. +- Made the job fail early on reaching a report error threshold. +- Refactored blob reading logic to handle zero-sized blobs gracefully. +- Removed redundant SingleFactAggregation class to reduce memory consumption. +- Updated dependencies to address security vulnerabilities. +- Updated documentation for batching strategies, aggregatable reports and OpenTelemetry usage. +- Upgraded control plane shared library dependency to + [v1.8.0-rc01](https://github.com/privacysandbox/coordinator-services-and-shared-libraries/releases/tag/v1.8.0-rc01) + +## [2.4.4](https://github.com/privacysandbox/aggregation-service/compare/v2.4.3...v2.4.4) (2024-07-19) + +### Changes + +- Updated dependencies to address security vulnerabilities. + +## [2.4.3](https://github.com/privacysandbox/aggregation-service/compare/v2.4.2...v2.4.3) (2024-05-20) + +### Changes + +- Fixed GCP cloud build by adding -y option to the install commands with Docker. +- Updated dependencies to address security vulnerabilities. + ## [2.4.2](https://github.com/privacysandbox/aggregation-service/compare/v2.4.1...v2.4.2) (2024-04-09) ### Changes @@ -28,6 +85,36 @@ - Used RxJava for domain reading, reducing overall job execution time and memory consumption. - Upgraded Bazel version to 6.5.0. +## [2.3.4](https://github.com/privacysandbox/aggregation-service/compare/v2.3.3...v2.3.4) (2024-07-19) + +### Changes + +- Updated dependencies to address security vulnerabilities. + +## [2.3.3](https://github.com/privacysandbox/aggregation-service/compare/v2.3.2...v2.3.3) (2024-05-20) + +### Changes + +- Fixed GCP cloud build by adding -y option to the install commands with Docker. +- Updated dependencies to address security vulnerabilities. + +## [2.3.2](https://github.com/privacysandbox/aggregation-service/compare/v2.3.1...v2.3.2) (2024-04-09) + +### Changes + +- Fixed job failure with `ClassCastException` error. +- Updated dependencies to address security vulnerabilities. + +## [2.3.1](https://github.com/privacysandbox/aggregation-service/compare/v2.3.0...v2.3.1) (2024-03-11) + +### Changes + +#### [AWS only] + +- Fixed enclave worker AMI + [build issue](https://github.com/privacysandbox/aggregation-service/issues/40) by pinning Docker + version to 24.0.5. + ## [2.3.0](https://github.com/privacysandbox/aggregation-service/compare/v2.2.0...v2.3.0) (2024-01-12) ### Changes @@ -36,6 +123,30 @@ [v1.5.1](https://github.com/privacysandbox/coordinator-services-and-shared-libraries/releases/tag/v1.5.1) - Updated dependencies to address security vulnerabilities. +## [2.2.3](https://github.com/privacysandbox/aggregation-service/compare/v2.2.2...v2.2.3) (2024-05-20) + +### Changes + +- Fixed GCP cloud build by adding -y option to the install commands with Docker. +- Updated dependencies to address security vulnerabilities. + +## [2.2.2](https://github.com/privacysandbox/aggregation-service/compare/v2.2.1...v2.2.2) (2024-04-09) + +### Changes + +- Fixed job failure with `ClassCastException` error. +- Updated dependencies to address security vulnerabilities. + +## [2.2.1](https://github.com/privacysandbox/aggregation-service/compare/v2.2.0...v2.2.1) (2024-03-11) + +### Changes + +#### [AWS only] + +- Fixed enclave worker AMI + [build issue](https://github.com/privacysandbox/aggregation-service/issues/40) by pinning Docker + version to 24.0.5. + ## [2.2.0](https://github.com/privacysandbox/aggregation-service/compare/v2.1.0...v2.2.0) (2023-12-07) ### Changes @@ -51,6 +162,30 @@ - Enabled parallel upload to cloud storage of the sharded summary reports. +## [2.1.4](https://github.com/privacysandbox/aggregation-service/compare/v2.1.3...v2.1.4) (2024-05-20) + +### Changes + +- Fixed GCP cloud build by adding -y option to the install commands with Docker. +- Updated dependencies to address security vulnerabilities. + +## [2.1.3](https://github.com/privacysandbox/aggregation-service/compare/v2.1.2...v2.1.3) (2024-04-09) + +### Changes + +- Fixed job failure with `ClassCastException` error. +- Updated dependencies to address security vulnerabilities. + +## [2.1.2](https://github.com/privacysandbox/aggregation-service/compare/v2.1.1...v2.1.2) (2024-03-11) + +### Changes + +#### [AWS only] + +- Fixed enclave worker AMI + [build issue](https://github.com/privacysandbox/aggregation-service/issues/40) by pinning Docker + version to 24.0.5. + ## [2.1.1](https://github.com/privacysandbox/aggregation-service/compare/v2.1.0...v2.1.1) (2023-12-05) ### Changes @@ -78,6 +213,29 @@ - Enable uploading sharded summary report to cloud storage in parallel. - Stabilized script fetch_terraform.sh by cleaning up existing files. +## [2.0.4](https://github.com/privacysandbox/aggregation-service/compare/v2.0.3...v2.0.4) (2024-05-20) + +### Changes + +- Updated dependencies to address security vulnerabilities. + +## [2.0.3](https://github.com/privacysandbox/aggregation-service/compare/v2.0.2...v2.0.3) (2024-04-09) + +### Changes + +- Fixed job failure with `ClassCastException` error. +- Updated dependencies to address security vulnerabilities. + +## [2.0.2](https://github.com/privacysandbox/aggregation-service/compare/v2.0.1...v2.0.2) (2024-03-11) + +### Changes + +#### [AWS only] + +- Fixed enclave worker AMI + [build issue](https://github.com/privacysandbox/aggregation-service/issues/40) by pinning Docker + version to 24.0.5. + ## [2.0.1](https://github.com/privacysandbox/aggregation-service/compare/v2.0.0...v2.0.1) (2023-12-05) ### Changes diff --git a/DEPENDENCIES.md b/DEPENDENCIES.md index ff6a7b9e..976d4660 100644 --- a/DEPENDENCIES.md +++ b/DEPENDENCIES.md @@ -1,5 +1,7 @@ # Dependencies and Licenses +## AWS + The deployment of the Amazon Web Services [Nitro Enclaves](https://aws.amazon.com/ec2/nitro/nitro-enclaves/) based Aggregation Service depends on several packaged artifacts listed below. These artifacts can be downloaded with the @@ -7,374 +9,549 @@ on several packaged artifacts listed below. These artifacts can be downloaded wi information can be found in the [README](/docs/aws-aggregation-service.md#download-terraform-scripts-and-prebuilt-dependencies). -## Packaged AWS Lambda Jars +### Packaged AWS Lambda Jars + +#### AsgCapacityHandlerLambda\_{version}.jar + + +| groupId | artifactId | Version | License | URL | +|--|--|--|--|--| +| com.amazonaws|aws-lambda-java-core|1.2.3|Apache License, Version 2.0 | | +| com.amazonaws|aws-lambda-java-events-sdk-transformer|3.1.0|Apache License, Version 2.0 | | +| com.fasterxml.jackson.core|jackson-annotations|2.16.1|Apache License, Version 2.0 | | +| com.fasterxml.jackson.core|jackson-core|2.16.1|Apache License, Version 2.0 | | +| com.fasterxml.jackson.core|jackson-databind|2.16.1|Apache License, Version 2.0 | | +| com.fasterxml.jackson.datatype|jackson-datatype-guava|2.16.1|Apache License, Version 2.0 | | +| com.fasterxml.jackson.datatype|jackson-datatype-jdk8|2.16.1|Apache License, Version 2.0 | | +| com.fasterxml.jackson.datatype|jackson-datatype-jsr310|2.16.1|Apache License, Version 2.0 | | +| com.google.auto.value|auto-value-annotations|1.10.4|Apache License, Version 2.0 | | +| com.google.code.findbugs|jsr305|3.0.2|Apache License, Version 2.0 | | +| com.google.code.gson|gson|2.10.1|Apache License, Version 2.0 | | +| com.google.errorprone|error-prone-annotations|2.24.1|Apache License, Version 2.0 | | +| com.google.guava|failureaccess|1.0.2|Apache License, Version 2.0 | | +| com.google.guava|guava|33.0.0-jre|Apache License, Version 2.0 | | +| com.google.guava|listenablefuture|9999.0-empty-to-avoid-conflict-with-guava|Apache License, Version 2.0 | | +| com.google.j2objc|j2objc-annotations|2.8|Apache License, Version 2.0 | | +| commons-codec|commons-codec|1.16|Apache License, Version 2.0| | +| commons-logging|commons-logging|1.3.0|Apache License, Version 2.0 | | +| io.netty|netty-buffer|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-codec|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-codec-http|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-codec-http2|4.1.100.Final|Apache License, Version 2.0| +| io.netty|netty-common|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-handler|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-resolver|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-transport|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-transport-native-classes-epoll|4.1.100.Final|Apache License, Version 2.0| +| io.netty|netty-transport-native-unix-common|4.1.100.Final|Apache License, Version 2.0 | +| joda-time|joda-time|2.10.8|Apache License, Version 2.0 | | +| org.apache.httpcomponents|httpclient|4.5.14|Apache License, Version 2.0 | | +| org.apache.httpcomponents|httpcore|4.4.16|Apache License, Version 2.0 | | +| org.jctools|jctools-core|3.1.0|Apache License, Version 2.0 | | +| org.slf4j|slf4j-api|2.0.11|MIT License | | +| org.slf4j|slf4j-simple|2.0.11|MIT License | | +| software.amazon.awssdk|annotations|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|apache-client|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|auth|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|aws-core|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|aws-json-protocol|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|aws-query-protocol|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|checksums|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|checksums-spi|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|dynamodb|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|dynamodb-enhanced|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|endpoints-spi|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|http-auth|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|http-auth-aws|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|http-auth-spi|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|http-client-spi|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|identity-spi|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|json-utils|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|metrics-spi|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|netty-nio-client|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|profiles|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|protocol-core|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|regions|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|sdk-core|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|sqs|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|url-connection-client|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|utils|2.21.16|Apache License, Version 2.0 | | +| software.amazon.eventstream|eventstream|1.0.1|Apache License, Version 2.0 | | + -### AwsChangeHandlerLambda\_{version}.jar +#### AwsApigatewayFrontend\_{version}.jar | groupId | artifactId | Version | License | URL | |--|--|--|--|--| -| aopalliance | aopalliance | 1.0 |Public Domain | N/A | -| com.amazonaws|aws-lambda-java-core|1.2.1|Apache License, Version 2.0 | | +| com.amazonaws|aws-lambda-java-core|1.2.3|Apache License, Version 2.0 | | +| com.amazonaws|aws-lambda-java-events|3.11.3|Apache License, Version 2.0 | | | com.amazonaws|aws-lambda-java-events-sdk-transformer|3.1.0|Apache License, Version 2.0 | | -| com.amazonaws|aws-lambda-java-events|3.8.0|Apache License, Version 2.0 | | -| com.fasterxml.jackson.core|jackson-annotations|2.12.2|The Apache Software License, Version 2.0 | |.txt | -| com.fasterxml.jackson.core|jackson-core|2.12.2|The Apache Software License, Version 2.0 | |.txt | -| com.fasterxml.jackson.core|jackson-databind|2.12.2|The Apache Software License, Version 2.0 | |.txt | -| com.fasterxml.jackson.datatype|jackson-datatype-guava|2.12.2|The Apache Software License, Version 2.0 | |.txt | -| com.fasterxml.jackson.datatype|jackson-datatype-jdk8|2.12.2|The Apache Software License, Version 2.0 | |.txt | -| com.fasterxml.jackson.datatype|jackson-datatype-jsr310|2.12.2|The Apache Software License, Version 2.0 | |.txt | -| com.google.android|annotations|4.1.1.4|Apache 2.0 | | -| com.google.auto.service|auto-service-annotations|1|Apache 2.0 | |.txt | -| com.google.auto.service|auto-service|1|Apache 2.0 | |.txt | -| com.google.auto.value|auto-value-annotations|1.7.4|Apache 2.0 | |.txt | -| com.google.auto.value|auto-value|1.7.4|Apache 2.0 | |.txt | -| com.google.auto|auto-common|1|Apache 2.0 | |.txt | -| com.google.errorprone|error_prone_annotations|2.0.15|Apache 2.0 | |.txt | -| com.google.guava|failureaccess|1.0.1|The Apache Software License, Version 2.0 | |.txt | -| com.google.guava|guava|30.1-jre|Apache License, Version 2.0 | |.txt | -| com.google.guava|listenablefuture|9999.0-empty-to-avoid-conflict-with-guava|The Apache Software License, Version 2.0 | |.txt | -| com.google.inject|guice|4.2.3|The Apache Software License, Version 2.0 | |.txt | -| com.google.j2objc|j2objc-annotations|1.3|The Apache Software License, Version 2.0 | |.txt | -| com.typesafe.netty|netty-reactive-streams-http|2.0.5|Apache License, Version 2.0 | |.txt | -| com.typesafe.netty|netty-reactive-streams|2.0.5|Apache License, Version 2.0 | |.txt | -| commons-codec|commons-codec|1.15|Apache License, Version 2.0| | -| commons-logging|commons-logging|1.1.1|The Apache Software License, Version 2.0 | |.txt | -| io.netty|netty-buffer|4.1.53.Final|Apache License, Version 2.0 | | -| io.netty|netty-codec-http2|4.1.63.Final|Apache License, Version 2.0| -| io.netty|netty-codec-http|4.1.53.Final|Apache License, Version 2.0 | | -| io.netty|netty-codec|4.1.53.Final|Apache License, Version 2.0 | | -| io.netty|netty-common|4.1.53.Final|Apache License, Version 2.0 | | -| io.netty|netty-handler|4.1.53.Final|Apache License, Version 2.0 | | -| io.netty|netty-resolver|4.1.53.Final|Apache License, Version 2.0 | | -| io.netty|netty-transport-native-epoll|4.1.63.Final|Apache License, Version 2.0| -| io.netty|netty-transport-native-unix-common|4.1.63.Final|Apache License, Version 2.0| -| io.netty|netty-transport|4.1.53.Final|Apache License, Version 2.0 | | -| javax.inject|javax.inject|1|The Apache Software License, Version 2.0 | |.txt | -| joda-time|joda-time|2.6|Apache 2 | |.txt | -| org.apache.httpcomponents|httpclient|4.5.13|Apache License, Version 2.0 | |.txt | -| org.apache.httpcomponents|httpcore|4.4.14|Apache License, Version 2.0 | |.txt | -| org.checkerframework|checker-qual|3.8.0|The MIT License | | -| org.json|json|20180813|The JSON License | | -| org.reactivestreams|reactive-streams|1.0.3 | CC0 | | -| org.slf4j|slf4j-api|1.7.30|MIT License | | -| software.amazon.awssdk|annotations|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|apache-client|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|arns|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|auth|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|aws-core|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|aws-json-protocol|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|aws-query-protocol|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|aws-xml-protocol|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|connect|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|dynamodb-enhanced|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|dynamodb|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|http-client-spi|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|kms|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|lambda|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|metrics-spi|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|netty-nio-client|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|pi|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|profiles|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|protocol-core|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|ram|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|regions|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|s3|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|sdk-core|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|sqs|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|url-connection-client|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|utils|2.16.104|Apache License, Version 2.0 | | +| com.fasterxml.jackson.core|jackson-annotations|2.16.1|Apache License, Version 2.0 | | +| com.fasterxml.jackson.core|jackson-core|2.16.1|Apache License, Version 2.0 | | +| com.fasterxml.jackson.core|jackson-databind|2.16.1|Apache License, Version 2.0 | | +| com.fasterxml.jackson.datatype|jackson-datatype-guava|2.16.1|Apache License, Version 2.0 | | +| com.fasterxml.jackson.datatype|jackson-datatype-jdk8|2.16.1|Apache License, Version 2.0 | | +| com.fasterxml.jackson.datatype|jackson-datatype-jsr310|2.16.1|Apache License, Version 2.0 | | +| com.google.auto.service|auto-service-annotations|1.1.1|Apache License, Version 2.0 | | +| com.google.auto.value|auto-value-annotations|1.10.4|Apache License, Version 2.0 | | +| com.google.code.findbugs|jsr305|3.0.2|Apache License, Version 2.0 | | +| com.google.code.gson|gson|2.10.1|Apache License, Version 2.0 | | +| com.google.errorprone|error-prone-annotations|2.24.1|Apache License, Version 2.0 | | +| com.google.guava|failureaccess|1.0.2|Apache License, Version 2.0 | | +| com.google.guava|guava|33.0.0-jre|Apache License, Version 2.0 | | +| com.google.guava|listenablefuture|9999.0-empty-to-avoid-conflict-with-guava|Apache License, Version 2.0 | | +| com.google.j2objc|j2objc-annotations|2.8|Apache License, Version 2.0 | | +| commons-codec|commons-codec|1.16|Apache License, Version 2.0| | +| commons-logging|commons-logging|1.3.0|Apache License, Version 2.0 | | +| io.netty|netty-buffer|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-codec|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-codec-http|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-codec-http2|4.1.100.Final|Apache License, Version 2.0| +| io.netty|netty-common|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-handler|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-resolver|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-transport|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-transport-native-classes-epoll|4.1.100.Final|Apache License, Version 2.0| +| io.netty|netty-transport-native-unix-common|4.1.100.Final|Apache License, Version 2.0 | +| io.vavr|vavr|0.10.2|Apache License, Version 2.0 | +| io.vavr|vavr-match|0.10.2|Apache License, Version 2.0 | +| joda-time|joda-time|2.10.8|Apache License, Version 2.0 | | +| org.apache.httpcomponents|httpclient|4.5.14|Apache License, Version 2.0 | | +| org.apache.httpcomponents|httpcore|4.4.16|Apache License, Version 2.0 | | +| org.apache.httpcomponents.client5|httpclient5|5.3|Apache License, Version 2.0 | | +| org.apache.httpcomponents.core5|httpcore5|5.2.4|Apache License, Version 2.0 | | +| org.apache.httpcomponents.core5|httpcore5-h2|5.2.4|Apache License, Version 2.0 | | +| org.jctools|jctools-core|3.1.0|Apache License, Version 2.0 | | +| org.slf4j|slf4j-api|2.0.11|MIT License | | +| org.slf4j|slf4j-simple|2.0.11|MIT License | | +| software.amazon.awssdk|annotations|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|apache-client|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|arns|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|auth|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|aws-core|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|aws-json-protocol|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|aws-query-protocol|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|aws-xml-protocol|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|checksums|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|checksums-spi|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|crt-core|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|dynamodb|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|dynamodb-enhanced|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|endpoints-spi|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|http-auth|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|http-auth-aws|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|http-auth-spi|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|http-client-spi|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|identity-spi|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|json-utils|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|kms|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|metrics-spi|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|netty-nio-client|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|pricing|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|profiles|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|protocol-core|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|regions|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|s3|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|sdk-core|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|sqs|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|sts|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|url-connection-client|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|utils|2.21.16|Apache License, Version 2.0 | | | software.amazon.eventstream|eventstream|1.0.1|Apache License, Version 2.0 | | -### aws_apigateway_frontend\_{version}.jar +#### AwsChangeHandlerLambda\_{version}.jar | groupId | artifactId | Version | License | URL | |--|--|--|--|--| -| aopalliance | aopalliance | 1.0 |Public Domain | N/A | -| com.amazonaws|aws-lambda-java-core|1.2.1|Apache License, Version 2.0 | | +| com.amazonaws|aws-lambda-java-core|1.2.3|Apache License, Version 2.0 | | | com.amazonaws|aws-lambda-java-events-sdk-transformer|3.1.0|Apache License, Version 2.0 | | -| com.amazonaws|aws-lambda-java-events|3.8.0|Apache License, Version 2.0 | | -| com.fasterxml.jackson.core|jackson-annotations|2.12.2|The Apache Software License, Version 2.0 | |.txt | -| com.fasterxml.jackson.core|jackson-core|2.12.2|The Apache Software License, Version 2.0 | |.txt | -| com.fasterxml.jackson.core|jackson-databind|2.12.2|The Apache Software License, Version 2.0 | |.txt | -| com.fasterxml.jackson.datatype|jackson-datatype-guava|2.12.2|The Apache Software License, Version 2.0 | |.txt | -| com.fasterxml.jackson.datatype|jackson-datatype-jdk8|2.12.2|The Apache Software License, Version 2.0 | |.txt | -| com.fasterxml.jackson.datatype|jackson-datatype-jsr310|2.12.2|The Apache Software License, Version 2.0 | |.txt | -| com.google.android|annotations|4.1.1.4|Apache 2.0 | | -| com.google.auto.service|auto-service-annotations|1|Apache 2.0 | |.txt | -| com.google.auto.service|auto-service|1|Apache 2.0 | |.txt | -| com.google.auto.value|auto-value-annotations|1.7.4|Apache 2.0 | |.txt | -| com.google.auto.value|auto-value|1.7.4|Apache 2.0 | |.txt | -| com.google.auto|auto-common|1|Apache 2.0 | |.txt | -| com.google.errorprone|error_prone_annotations|2.0.15|Apache 2.0 | |.txt | -| com.google.guava|failureaccess|1.0.1|The Apache Software License, Version 2.0 | |.txt | -| com.google.guava|guava|30.1-jre|Apache License, Version 2.0 | |.txt | -| com.google.guava|listenablefuture|9999.0-empty-to-avoid-conflict-with-guava|The Apache Software License, Version 2.0 | |.txt | -| com.google.inject|guice|4.2.3|The Apache Software License, Version 2.0 | |.txt | -| com.google.j2objc|j2objc-annotations|1.3|The Apache Software License, Version 2.0 | |.txt | -| com.typesafe.netty|netty-reactive-streams-http|2.0.5|Apache License, Version 2.0 | |.txt | -| com.typesafe.netty|netty-reactive-streams|2.0.5|Apache License, Version 2.0 | |.txt | -| commons-codec|commons-codec|1.15|Apache License, Version 2.0| | -| commons-logging|commons-logging|1.1.1|The Apache Software License, Version 2.0 | |.txt | -| io.netty|netty-buffer|4.1.53.Final|Apache License, Version 2.0 | | -| io.netty|netty-codec-http2|4.1.63.Final|Apache License, Version 2.0| -| io.netty|netty-codec-http|4.1.53.Final|Apache License, Version 2.0 | | -| io.netty|netty-codec|4.1.53.Final|Apache License, Version 2.0 | | -| io.netty|netty-common|4.1.53.Final|Apache License, Version 2.0 | | -| io.netty|netty-handler|4.1.53.Final|Apache License, Version 2.0 | | -| io.netty|netty-resolver|4.1.53.Final|Apache License, Version 2.0 | | -| io.netty|netty-transport-native-epoll|4.1.63.Final|Apache License, Version 2.0| -| io.netty|netty-transport-native-unix-common|4.1.63.Final|Apache License, Version 2.0| -| io.netty|netty-transport|4.1.53.Final|Apache License, Version 2.0 | | -| javax.inject|javax.inject|1|The Apache Software License, Version 2.0 | |.txt | -| joda-time|joda-time|2.6|Apache 2 | |.txt | -| org.apache.httpcomponents|httpclient|4.5.13|Apache License, Version 2.0 | |.txt | -| org.apache.httpcomponents|httpcore|4.4.14|Apache License, Version 2.0 | |.txt | -| org.checkerframework|checker-qual|3.8.0|The MIT License | | -| org.json|json|20180813|The JSON License | | -| org.reactivestreams|reactive-streams|1.0.3 | CC0 | | -| org.slf4j|slf4j-api|1.7.30|MIT License | | -| software.amazon.awssdk|annotations|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|apache-client|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|arns|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|auth|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|aws-core|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|aws-json-protocol|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|aws-query-protocol|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|aws-xml-protocol|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|connect|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|dynamodb-enhanced|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|dynamodb|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|http-client-spi|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|kms|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|lambda|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|metrics-spi|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|netty-nio-client|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|pi|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|profiles|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|protocol-core|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|ram|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|regions|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|s3|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|sdk-core|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|sqs|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|url-connection-client|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|utils|2.16.104|Apache License, Version 2.0 | | +| com.fasterxml.jackson.core|jackson-annotations|2.16.1|Apache License, Version 2.0 | | +| com.fasterxml.jackson.core|jackson-core|2.16.1|Apache License, Version 2.0 | | +| com.fasterxml.jackson.core|jackson-databind|2.16.1|Apache License, Version 2.0 | | +| com.fasterxml.jackson.datatype|jackson-datatype-guava|2.16.1|Apache License, Version 2.0 | | +| com.fasterxml.jackson.datatype|jackson-datatype-jdk8|2.16.1|Apache License, Version 2.0 | | +| com.fasterxml.jackson.datatype|jackson-datatype-jsr310|2.16.1|Apache License, Version 2.0 | | +| com.google.auto.service|auto-service-annotations|1.1.1|Apache License, Version 2.0 | | +| com.google.auto.value|auto-value-annotations|1.10.4|Apache License, Version 2.0 | | +| com.google.auto.value|auto-value|1.10.4|Apache License, Version 2.0 | | +| com.google.code.findbugs|jsr305|3.0.2|Apache License, Version 2.0 | | +| com.google.code.gson|gson|2.10.1|Apache License, Version 2.0 | | +| com.google.errorprone|error-prone-annotations|2.24.1|Apache License, Version 2.0 | | +| com.google.guava|failureaccess|1.0.2|Apache License, Version 2.0 | | +| com.google.guava|guava|33.0.0-jre|Apache License, Version 2.0 | | +| com.google.guava|listenablefuture|9999.0-empty-to-avoid-conflict-with-guava|Apache License, Version 2.0 | | +| com.google.j2objc|j2objc-annotations|2.8|Apache License, Version 2.0 | | +| commons-codec|commons-codec|1.16|Apache License, Version 2.0| | +| commons-logging|commons-logging|1.3.0|Apache License, Version 2.0 | | +| io.netty|netty-buffer|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-codec|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-codec-http|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-codec-http2|4.1.100.Final|Apache License, Version 2.0| +| io.netty|netty-common|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-handler|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-resolver|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-transport|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-transport-native-classes-epoll|4.1.100.Final|Apache License, Version 2.0| +| io.netty|netty-transport-native-unix-common|4.1.100.Final|Apache License, Version 2.0 | +| io.vavr|vavr|0.10.2|Apache License, Version 2.0 | +| io.vavr|vavr-match|0.10.2|Apache License, Version 2.0 | +| joda-time|joda-time|2.10.8|Apache License, Version 2.0 | | +| org.apache.httpcomponents|httpclient|4.5.14|Apache License, Version 2.0 | | +| org.apache.httpcomponents|httpcore|4.4.16|Apache License, Version 2.0 | | +| org.apache.httpcomponents.client5|httpclient5|5.3|Apache License, Version 2.0 | | +| org.apache.httpcomponents.core5|httpcore5|5.2.4|Apache License, Version 2.0 | | +| org.apache.httpcomponents.core5|httpcore5-h2|5.2.4|Apache License, Version 2.0 | | +| org.jctools|jctools-core|3.1.0|Apache License, Version 2.0 | | +| org.slf4j|slf4j-api|2.0.11|MIT License | | +| org.slf4j|slf4j-simple|2.0.11|MIT License | | +| software.amazon.awssdk|annotations|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|apache-client|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|arns|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|auth|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|aws-core|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|aws-json-protocol|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|aws-query-protocol|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|aws-xml-protocol|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|checksums|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|checksums-spi|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|crt-core|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|dynamodb|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|dynamodb-enhanced|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|endpoints-spi|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|http-auth|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|http-auth-aws|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|http-auth-spi|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|http-client-spi|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|identity-spi|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|json-utils|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|kms|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|metrics-spi|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|netty-nio-client|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|pricing|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|profiles|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|protocol-core|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|regions|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|s3|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|sdk-core|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|sqs|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|sts|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|url-connection-client|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|utils|2.21.16|Apache License, Version 2.0 | | | software.amazon.eventstream|eventstream|1.0.1|Apache License, Version 2.0 | | -### AwsFrontendCleanupLambda\_{version}.jar +#### AwsFrontendCleanupLambda\_{version}.jar | groupId | artifactId | Version | License | URL | |--|--|--|--|--| -| aopalliance | aopalliance | 1.0 |Public Domain | N/A | -| com.amazonaws|aws-lambda-java-core|1.2.1|Apache License, Version 2.0 | | +| com.amazonaws|aws-lambda-java-core|1.2.3|Apache License, Version 2.0 | | +| com.amazonaws|aws-lambda-java-events|3.11.3|Apache License, Version 2.0 | | | com.amazonaws|aws-lambda-java-events-sdk-transformer|3.1.0|Apache License, Version 2.0 | | -| com.amazonaws|aws-lambda-java-events|3.8.0|Apache License, Version 2.0 | | -| com.fasterxml.jackson.core|jackson-annotations|2.12.2|The Apache Software License, Version 2.0 | |.txt | -| com.fasterxml.jackson.core|jackson-core|2.12.2|The Apache Software License, Version 2.0 | |.txt | -| com.fasterxml.jackson.core|jackson-databind|2.12.2|The Apache Software License, Version 2.0 | |.txt | -| com.fasterxml.jackson.datatype|jackson-datatype-guava|2.12.2|The Apache Software License, Version 2.0 | |.txt | -| com.fasterxml.jackson.datatype|jackson-datatype-jdk8|2.12.2|The Apache Software License, Version 2.0 | |.txt | -| com.fasterxml.jackson.datatype|jackson-datatype-jsr310|2.12.2|The Apache Software License, Version 2.0 | |.txt | -| com.google.android|annotations|4.1.1.4|Apache 2.0 | | -| com.google.auto.service|auto-service-annotations|1|Apache 2.0 | |.txt | -| com.google.auto.service|auto-service|1|Apache 2.0 | |.txt | -| com.google.auto.value|auto-value-annotations|1.7.4|Apache 2.0 | |.txt | -| com.google.auto.value|auto-value|1.7.4|Apache 2.0 | |.txt | -| com.google.auto|auto-common|1|Apache 2.0 | |.txt | -| com.google.errorprone|error_prone_annotations|2.0.15|Apache 2.0 | |.txt | -| com.google.guava|failureaccess|1.0.1|The Apache Software License, Version 2.0 | |.txt | -| com.google.guava|guava|30.1-jre|Apache License, Version 2.0 | |.txt | -| com.google.guava|listenablefuture|9999.0-empty-to-avoid-conflict-with-guava|The Apache Software License, Version 2.0 | |.txt | -| com.google.inject|guice|4.2.3|The Apache Software License, Version 2.0 | |.txt | -| com.google.j2objc|j2objc-annotations|1.3|The Apache Software License, Version 2.0 | |.txt | -| com.typesafe.netty|netty-reactive-streams-http|2.0.5|Apache License, Version 2.0 | |.txt | -| com.typesafe.netty|netty-reactive-streams|2.0.5|Apache License, Version 2.0 | |.txt | -| commons-codec|commons-codec|1.15|Apache License, Version 2.0| | -| commons-logging|commons-logging|1.1.1|The Apache Software License, Version 2.0 | |.txt | -| io.netty|netty-buffer|4.1.53.Final|Apache License, Version 2.0 | | -| io.netty|netty-codec-http2|4.1.63.Final|Apache License, Version 2.0| -| io.netty|netty-codec-http|4.1.53.Final|Apache License, Version 2.0 | | -| io.netty|netty-codec|4.1.53.Final|Apache License, Version 2.0 | | -| io.netty|netty-common|4.1.53.Final|Apache License, Version 2.0 | | -| io.netty|netty-handler|4.1.53.Final|Apache License, Version 2.0 | | -| io.netty|netty-resolver|4.1.53.Final|Apache License, Version 2.0 | | -| io.netty|netty-transport-native-epoll|4.1.63.Final|Apache License, Version 2.0| -| io.netty|netty-transport-native-unix-common|4.1.63.Final|Apache License, Version 2.0| -| io.netty|netty-transport|4.1.53.Final|Apache License, Version 2.0 | | -| javax.inject|javax.inject|1|The Apache Software License, Version 2.0 | |.txt | -| joda-time|joda-time|2.6|Apache 2 | |.txt | -| org.apache.httpcomponents|httpclient|4.5.13|Apache License, Version 2.0 | |.txt | -| org.apache.httpcomponents|httpcore|4.4.14|Apache License, Version 2.0 | |.txt | -| org.checkerframework|checker-qual|3.8.0|The MIT License | | -| org.json|json|20180813|The JSON License | | -| org.reactivestreams|reactive-streams|1.0.3 | CC0 | | -| org.slf4j|slf4j-api|1.7.30|MIT License | | -| software.amazon.awssdk|annotations|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|apache-client|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|arns|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|auth|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|aws-core|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|aws-json-protocol|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|aws-query-protocol|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|aws-xml-protocol|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|connect|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|dynamodb-enhanced|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|dynamodb|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|http-client-spi|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|kms|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|lambda|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|metrics-spi|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|netty-nio-client|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|pi|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|profiles|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|protocol-core|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|ram|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|regions|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|s3|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|sdk-core|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|sqs|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|url-connection-client|2.16.104|Apache License, Version 2.0 | | -| software.amazon.awssdk|utils|2.16.104|Apache License, Version 2.0 | | +| com.fasterxml.jackson.core|jackson-annotations|2.16.1|Apache License, Version 2.0 | | +| com.fasterxml.jackson.core|jackson-core|2.16.1|Apache License, Version 2.0 | | +| com.fasterxml.jackson.core|jackson-databind|2.16.1|Apache License, Version 2.0 | | +| com.fasterxml.jackson.datatype|jackson-datatype-guava|2.16.1|Apache License, Version 2.0 | | +| com.fasterxml.jackson.datatype|jackson-datatype-jdk8|2.16.1|Apache License, Version 2.0 | | +| com.fasterxml.jackson.datatype|jackson-datatype-jsr310|2.16.1|Apache License, Version 2.0 | | +| com.google.auto.service|auto-service-annotations|1.1.1|Apache License, Version 2.0 | | +| com.google.auto.value|auto-value-annotations|1.10.4|Apache License, Version 2.0 | | +| com.google.code.findbugs|jsr305|3.0.2|Apache License, Version 2.0 | | +| com.google.code.gson|gson|2.10.1|Apache License, Version 2.0 | | +| com.google.errorprone|error-prone-annotations|2.24.1|Apache License, Version 2.0 | | +| com.google.guava|failureaccess|1.0.2|Apache License, Version 2.0 | | +| com.google.guava|guava|33.0.0-jre|Apache License, Version 2.0 | | +| com.google.guava|listenablefuture|9999.0-empty-to-avoid-conflict-with-guava|Apache License, Version 2.0 | | +| com.google.j2objc|j2objc-annotations|2.8|Apache License, Version 2.0 | | +| commons-codec|commons-codec|1.16|Apache License, Version 2.0| | +| commons-logging|commons-logging|1.3.0|Apache License, Version 2.0 | | +| io.netty|netty-buffer|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-codec|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-codec-http|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-codec-http2|4.1.100.Final|Apache License, Version 2.0| +| io.netty|netty-common|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-handler|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-resolver|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-transport|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-transport-native-classes-epoll|4.1.100.Final|Apache License, Version 2.0| +| io.netty|netty-transport-native-unix-common|4.1.100.Final|Apache License, Version 2.0 | +| io.vavr|vavr|0.10.2|Apache License, Version 2.0 | +| io.vavr|vavr-match|0.10.2|Apache License, Version 2.0 | +| joda-time|joda-time|2.10.8|Apache License, Version 2.0 | | +| org.apache.httpcomponents|httpclient|4.5.14|Apache License, Version 2.0 | | +| org.apache.httpcomponents|httpcore|4.4.16|Apache License, Version 2.0 | | +| org.apache.httpcomponents.client5|httpclient5|5.3|Apache License, Version 2.0 | | +| org.apache.httpcomponents.core5|httpcore5|5.2.4|Apache License, Version 2.0 | | +| org.apache.httpcomponents.core5|httpcore5-h2|5.2.4|Apache License, Version 2.0 | | +| org.jctools|jctools-core|3.1.0|Apache License, Version 2.0 | | +| org.slf4j|slf4j-api|2.0.11|MIT License | | +| org.slf4j|slf4j-simple|2.0.11|MIT License | | +| software.amazon.awssdk|annotations|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|apache-client|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|arns|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|auth|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|aws-core|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|aws-json-protocol|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|aws-query-protocol|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|aws-xml-protocol|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|checksums|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|checksums-spi|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|crt-core|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|dynamodb|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|dynamodb-enhanced|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|endpoints-spi|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|http-auth|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|http-auth-aws|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|http-auth-spi|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|http-client-spi|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|identity-spi|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|json-utils|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|kms|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|metrics-spi|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|netty-nio-client|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|pricing|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|profiles|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|protocol-core|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|regions|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|s3|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|sdk-core|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|sqs|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|sts|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|url-connection-client|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|utils|2.21.16|Apache License, Version 2.0 | | +| software.amazon.eventstream|eventstream|1.0.1|Apache License, Version 2.0 | | + + +#### TerminatedInstanceHandlerLambda\_{version}.jar + + +| groupId | artifactId | Version | License | URL | +|--|--|--|--|--| +| com.amazonaws|aws-lambda-java-core|1.2.3|Apache License, Version 2.0 | | +| com.amazonaws|aws-lambda-java-events|3.11.3|Apache License, Version 2.0 | | +| com.fasterxml.jackson.core|jackson-annotations|2.16.1|Apache License, Version 2.0 | | +| com.fasterxml.jackson.core|jackson-core|2.16.1|Apache License, Version 2.0 | | +| com.fasterxml.jackson.core|jackson-databind|2.16.1|Apache License, Version 2.0 | | +| com.fasterxml.jackson.datatype|jackson-datatype-guava|2.16.1|Apache License, Version 2.0 | | +| com.fasterxml.jackson.datatype|jackson-datatype-jdk8|2.16.1|Apache License, Version 2.0 | | +| com.fasterxml.jackson.datatype|jackson-datatype-jsr310|2.16.1|Apache License, Version 2.0 | | +| com.google.auto.value|auto-value-annotations|1.10.4|Apache License, Version 2.0 | | +| com.google.code.findbugs|jsr305|3.0.2|Apache License, Version 2.0 | | +| com.google.code.gson|gson|2.10.1|Apache License, Version 2.0 | | +| com.google.errorprone|error-prone-annotations|2.24.1|Apache License, Version 2.0 | | +| com.google.guava|failureaccess|1.0.2|Apache License, Version 2.0 | | +| com.google.guava|guava|33.0.0-jre|Apache License, Version 2.0 | | +| com.google.guava|listenablefuture|9999.0-empty-to-avoid-conflict-with-guava|Apache License, Version 2.0 | | +| com.google.j2objc|j2objc-annotations|2.8|Apache License, Version 2.0 | | +| commons-codec|commons-codec|1.16|Apache License, Version 2.0| | +| commons-logging|commons-logging|1.3.0|Apache License, Version 2.0 | | +| io.netty|netty-buffer|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-codec|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-codec-http|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-codec-http2|4.1.100.Final|Apache License, Version 2.0| +| io.netty|netty-common|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-handler|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-resolver|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-transport|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-transport-native-classes-epoll|4.1.100.Final|Apache License, Version 2.0| +| io.netty|netty-transport-native-unix-common|4.1.100.Final|Apache License, Version 2.0 | +| joda-time|joda-time|2.10.8|Apache License, Version 2.0 | | +| org.apache.httpcomponents|httpclient|4.5.14|Apache License, Version 2.0 | | +| org.apache.httpcomponents|httpcore|4.4.16|Apache License, Version 2.0 | | +| org.jctools|jctools-core|3.1.0|Apache License, Version 2.0 | | +| org.slf4j|slf4j-api|2.0.11|MIT License | | +| org.slf4j|slf4j-simple|2.0.11|MIT License | | +| software.amazon.awssdk|annotations|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|apache-client|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|auth|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|autoscaling|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|aws-core|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|aws-json-protocol|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|aws-query-protocol|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|checksums|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|checksums-spi|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|dynamodb|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|dynamodb-enhanced|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|endpoints-spi|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|http-auth|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|http-auth-aws|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|http-auth-spi|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|http-client-spi|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|identity-spi|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|json-utils|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|metrics-spi|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|netty-nio-client|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|profiles|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|protocol-core|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|regions|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|sdk-core|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|sqs|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|url-connection-client|2.21.16|Apache License, Version 2.0 | | +| software.amazon.awssdk|utils|2.21.16|Apache License, Version 2.0 | | | software.amazon.eventstream|eventstream|1.0.1|Apache License, Version 2.0 | | -### AsgCapacityHandlerLambda\_{version}.jar +## GCP + +The deployment of the Google Cloud Platform +[Confidential Space](https://cloud.google.com/blog/products/identity-security/confidential-space-is-ga) +based Aggregation Service depends on several packaged artifacts listed below. These artifacts can be +downloaded with the +[download_prebuilt_dependencies.sh](/terraform/gcp/download_prebuilt_dependencies.sh) script. More +information can be found in the +[README](/docs/gcp-aggregation-service.md#download-terraform-scripts-and-prebuilt-dependencies). + +### Packaged GCP Cloud Function Jars + +#### FrontendServiceHttpCloudFunction\_{version}.jar | groupId | artifactId | Version | License | URL | |--|--|--|--|--| -| aopalliance | aopalliance | 1 | Public Domain | -| com.amazonaws | aws-lambda-java-core | 1.2.1 | Apache License, Version 2.0 | | -| com.amazonaws | aws-lambda-java-events | 3.8.0 | Apache License, Version 2.0 | | -| com.fasterxml.jackson.core | jackson-annotations | 2.12.2 | The Apache Software License, Version 2.0 | | -| com.fasterxml.jackson.core | jackson-core | 2.12.2 | The Apache Software License, Version 2.0 | | -| com.fasterxml.jackson.core | jackson-databind | 2.12.2 | The Apache Software License, Version 2.0 | | -| com.google.android | annotations | 4.1.1.4 | Apache 2.0 | | -| com.google.auto.value | auto-value-annotations | 1.7.4 | Apache 2.0 | | -| com.google.auto.value | auto-value | 1.7.4 | Apache 2.0 | | -| com.google.code.findbugs | jsr305 | 3.0.2 | The Apache Software License, Version 2.0 | | -| com.google.errorprone | error_prone_annotations | 2.0.15 | Apache 2.0 | | -| com.google.guava | failureaccess | 1.0.1 | The Apache Software License, Version 2.0 | | -| com.google.guava | guava | 30.1-jre | Apache License, Version 2.0 | | -| com.google.guava | listenablefuture | 9999.0-empty-to-avoid-conflict-with-guava | The Apache Software License, Version 2.0 | | -| com.google.inject | guice | 4.2.3 | The Apache Software License, Version 2.0 | | -| com.google.j2objc | j2objc-annotations | 1.3 | The Apache Software License, Version 2.0 | | -| com.typesafe.netty | netty-reactive-streams-http | 2.0.5 | Apache License, Version 2.0 | | -| com.typesafe.netty | netty-reactive-streams | 2.0.5 | Apache License, Version 2.0 | | -| commons-codec | commons-codec | 1.15 | Apache License, Version 2.0 | | -| commons-logging | commons-logging | 1.1.1 | The Apache Software License, Version 2.0 | | -| io.netty | netty-buffer | 4.1.53.Final | Apache License, Version 2.0 | | -| io.netty | netty-codec-http2 | 4.1.63.Final | Apache License, Version 2.0 | | -| io.netty | netty-codec-http | 4.1.53.Final | Apache License, Version 2.0 | | -| io.netty | netty-codec | 4.1.53.Final | Apache License, Version 2.0 | | -| io.netty | netty-common | 4.1.53.Final | Apache License, Version 2.0 | | -| io.netty | netty-handler | 4.1.53.Final | Apache License, Version 2.0 | | -| io.netty | netty-resolver | 4.1.53.Final | Apache License, Version 2.0 | | -| io.netty | netty-transport-native-epoll | 4.1.63.Final | Apache License, Version 2.0 | | -| io.netty | netty-transport-native-unix-common | 4.1.63.Final | Apache License, Version 2.0 | | -| io.netty | netty-transport | 4.1.53.Final | Apache License, Version 2.0 | | -| javax.inject | javax.inject | 1 | The Apache Software License, Version 2.0 | | -| joda-time | joda-time | 2.6 | Apache 2 | | -| org.apache.httpcomponents | httpclient | 4.5.13 | Apache License, Version 2.0 | | -| org.apache.httpcomponents | httpcore | 4.4.14 | Apache License, Version 2.0 | | -| org.checkerframework | checker-qual | 3.8.0 | The MIT License | | -| org.reactivestreams | reactive-streams | 1.0.3 | CC0 | | -| org.slf4j | slf4j-api | 1.7.30 | MIT License | | -| org.slf4j | slf4j-simple | 1.7.30 | MIT License | | -| software.amazon.awssdk | annotations | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | apache-client | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | auth | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | autoscaling | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | aws-core | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | aws-query-protocol | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | connect | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | http-client-spi | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | lambda | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | metrics-spi | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | netty-nio-client | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | pi | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | profiles | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | protocol-core | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | ram | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | regions | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | sdk-core | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | sqs | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | url-connection-client | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | utils | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.eventstream | eventstream | 1.0.1 | Apache License, Version 2.0 | | +| com.fasterxml.jackson.core|jackson-annotations|2.16.1|Apache License, Version 2.0 | | +| com.fasterxml.jackson.core|jackson-core|2.16.1|Apache License, Version 2.0 | | +| com.fasterxml.jackson.core|jackson-databind|2.16.1|Apache License, Version 2.0 | | +| com.fasterxml.jackson.datatype|jackson-datatype-guava|2.16.1|Apache License, Version 2.0 | | +| com.fasterxml.jackson.datatype|jackson-datatype-jdk8|2.16.1|Apache License, Version 2.0 | | +| com.fasterxml.jackson.datatype|jackson-datatype-jsr310|2.16.1|Apache License, Version 2.0 | | +| com.google.android|annotations|4.1.1.4|Apache License, Version 2.0 | | +| com.google.api|gax|2.41.0|BSD 3-Clause | | +| com.google.api|gax-grpc|2.41.0|BSD 3-Clause | | +| com.google.api|gax-httpjson|2.41.0|BSD 3-Clause | | +| com.google.api-client|google-api-client|2.2.0|Apache License, Version 2.0 | | +| com.google.api.grpc|grpc-google-cloud-spanner-admin-database-v1|6.56.0|Apache License, Version 2.0 | | +| com.google.api.grpc|grpc-google-cloud-spanner-admin-instance-v1|6.56.0|Apache License, Version 2.0 | | +| com.google.api.grpc|grpc-google-cloud-spanner-v1|6.56.0|Apache License, Version 2.0 | | +| com.google.api.grpc|grpc-google-common-protos|2.31.0|Apache License, Version 2.0 | | +| com.google.api.grpc|proto-google-cloud-pubsub-v1|1.108.2|Apache License, Version 2.0 | | +| com.google.api.grpc|proto-google-cloud-spanner-admin-database-v1|6.56.0|Apache License, Version 2.0 | | +| com.google.api.grpc|proto-google-cloud-spanner-admin-instance-v1|6.56.0|Apache License, Version 2.0 | | +| com.google.api.grpc|proto-google-cloud-spanner-executor-v1|6.56.0|Apache License, Version 2.0 | | +| com.google.api.grpc|proto-google-cloud-spanner-v1|6.56.0|Apache License, Version 2.0 | | +| com.google.api.grpc|proto-google-common-protos|2.32.0|Apache License, Version 2.0 | | +| com.google.api.grpc|proto-google-iam-v1|1.27.0|Apache License, Version 2.0 | | +| com.google.auth|google-auth-library-credentials|1.22.0|BSD 3-Clause | | +| com.google.auth|google-auth-library-oauth2-http|1.22.0|BSD 3-Clause | | +| com.google.auto.value|auto-value-annotations|1.10.4|Apache License, Version 2.0 | | +| com.google.cloud|google-cloud-core|2.31.0|Apache License, Version 2.0 | | +| com.google.cloud|google-cloud-core-grpc|2.31.0|Apache License, Version 2.0 | | +| com.google.cloud|google-cloud-pubsub|1.126.2|Apache License, Version 2.0 | | +| com.google.cloud|google-cloud-spanner|6.56.0|Apache License, Version 2.0 | | +| com.google.cloud.functions|functions-framework-api|1.1.0|Apache License, Version 2.0 | | +| com.google.code.findbugs|jsr305|3.0.2|Apache License, Version 2.0 | | +| com.google.code.gson|gson|2.10.1|Apache License, Version 2.0 | | +| com.google.errorprone|error-prone-annotations|2.24.1|Apache License, Version 2.0 | | +| com.google.guava|failureaccess|1.0.2|Apache License, Version 2.0 | | +| com.google.guava|guava|33.0.0-jre|Apache License, Version 2.0 | | +| com.google.guava|listenablefuture|9999.0-empty-to-avoid-conflict-with-guava|Apache License, Version 2.0 | | +| com.google.http-client|google-http-client|1.43.3|Apache License, Version 2.0 | | +| com.google.http-client|google-http-client-gson|1.43.3|Apache License, Version 2.0 | | +| com.google.j2objc|j2objc-annotations|2.8|Apache License, Version 2.0 | | +| com.google.oauth-client|google-oauth-client|1.35.0|Apache License, Version 2.0 | | +| commons-codec|commons-codec|1.16|Apache License, Version 2.0| | +| commons-logging|commons-logging|1.3.0|Apache License, Version 2.0 | | +| io.cloudevents|cloudevents-api|2.5.0|Apache License, Version 2.0 | | +| io.netty|netty-buffer|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-codec|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-codec-http|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-codec-http2|4.1.100.Final|Apache License, Version 2.0| +| io.netty|netty-common|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-handler|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-resolver|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-transport|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-transport-native-classes-epoll|4.1.100.Final|Apache License, Version 2.0| +| io.netty|netty-transport-native-unix-common|4.1.100.Final|Apache License, Version 2.0 | +| io.vavr|vavr|0.10.2|Apache License, Version 2.0 | +| io.vavr|vavr-match|0.10.2|Apache License, Version 2.0 | +| javax.annotation|javax.annotation-api|1.3.2|CDDL, GPL 2.0 | | +| org.apache.httpcomponents|httpclient|4.5.14|Apache License, Version 2.0 | | +| org.apache.httpcomponents|httpcore|4.4.16|Apache License, Version 2.0 | | +| org.apache.httpcomponents.client5|httpclient5|5.3|Apache License, Version 2.0 | | +| org.apache.httpcomponents.core5|httpcore5|5.2.4|Apache License, Version 2.0 | | +| org.apache.httpcomponents.core5|httpcore5-h2|5.2.4|Apache License, Version 2.0 | | +| org.codehaus.mojo|animal-sniffer-annotations|1.23|MIT License | | +| org.jctools|jctools-core|3.1.0|Apache License, Version 2.0 | | +| org.slf4j|slf4j-api|2.0.11|MIT License | | +| org.slf4j|slf4j-simple|2.0.11|MIT License | | +| org.threeten|threetenbp|1.6.8|BSD 3-Clause | | -### TerminatedInstanceHandlerLambda\_{version}.jar +#### WorkerScaleInCloudFunction\_{version}.jar | groupId | artifactId | Version | License | URL | |--|--|--|--|--| -| aopalliance | aopalliance | 1 | Public Domain | -| com.amazonaws | aws-lambda-java-core | 1.2.1 | Apache License, Version 2.0 | | -| com.amazonaws | aws-lambda-java-events | 3.8.0 | Apache License, Version 2.0 | | -| com.fasterxml.jackson.core | jackson-annotations | 2.12.2 | The Apache Software License, Version 2.0 | | -| com.fasterxml.jackson.core | jackson-core | 2.12.2 | The Apache Software License, Version 2.0 | | -| com.fasterxml.jackson.core | jackson-databind | 2.12.2 | The Apache Software License, Version 2.0 | | -| com.google.android | annotations | 4.1.1.4 | Apache 2.0 | | -| com.google.auto.value | auto-value-annotations | 1.7.4 | Apache 2.0 | | -| com.google.auto.value | auto-value | 1.7.4 | Apache 2.0 | | -| com.google.code.findbugs | jsr305 | 3.0.2 | The Apache Software License, Version 2.0 | | -| com.google.errorprone | error_prone_annotations | 2.0.15 | Apache 2.0 | | -| com.google.guava | failureaccess | 1.0.1 | The Apache Software License, Version 2.0 | | -| com.google.guava | guava | 30.1-jre | Apache License, Version 2.0 | | -| com.google.guava | listenablefuture | 9999.0-empty-to-avoid-conflict-with-guava | The Apache Software License, Version 2.0 | | -| com.google.inject | guice | 4.2.3 | The Apache Software License, Version 2.0 | | -| com.google.j2objc | j2objc-annotations | 1.3 | The Apache Software License, Version 2.0 | | -| com.typesafe.netty | netty-reactive-streams-http | 2.0.5 | Apache License, Version 2.0 | | -| com.typesafe.netty | netty-reactive-streams | 2.0.5 | Apache License, Version 2.0 | | -| commons-codec | commons-codec | 1.15 | Apache License, Version 2.0 | | -| commons-logging | commons-logging | 1.1.1 | The Apache Software License, Version 2.0 | | -| io.netty | netty-buffer | 4.1.53.Final | Apache License, Version 2.0 | | -| io.netty | netty-codec-http2 | 4.1.63.Final | Apache License, Version 2.0 | | -| io.netty | netty-codec-http | 4.1.53.Final | Apache License, Version 2.0 | | -| io.netty | netty-codec | 4.1.53.Final | Apache License, Version 2.0 | | -| io.netty | netty-common | 4.1.53.Final | Apache License, Version 2.0 | | -| io.netty | netty-handler | 4.1.53.Final | Apache License, Version 2.0 | | -| io.netty | netty-resolver | 4.1.53.Final | Apache License, Version 2.0 | | -| io.netty | netty-transport-native-epoll | 4.1.63.Final | Apache License, Version 2.0 | | -| io.netty | netty-transport-native-unix-common | 4.1.63.Final | Apache License, Version 2.0 | | -| io.netty | netty-transport | 4.1.53.Final | Apache License, Version 2.0 | | -| javax.inject | javax.inject | 1 | The Apache Software License, Version 2.0 | | -| joda-time | joda-time | 2.6 | Apache 2 | | -| org.apache.httpcomponents | httpclient | 4.5.13 | Apache License, Version 2.0 | | -| org.apache.httpcomponents | httpcore | 4.4.14 | Apache License, Version 2.0 | | -| org.checkerframework | checker-qual | 3.8.0 | The MIT License | | -| org.reactivestreams | reactive-streams | 1.0.3 | CC0 | | -| org.slf4j | slf4j-api | 1.7.30 | MIT License | | -| org.slf4j | slf4j-simple | 1.7.30 | MIT License | | -| software.amazon.awssdk | annotations | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | apache-client | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | auth | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | autoscaling | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | aws-core | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | aws-query-protocol | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | connect | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | http-client-spi | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | lambda | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | metrics-spi | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | netty-nio-client | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | pi | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | profiles | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | protocol-core | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | ram | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | regions | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | sdk-core | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | sqs | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | url-connection-client | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.awssdk | utils | 2.16.104 | Apache License, Version 2.0 | | -| software.amazon.eventstream | eventstream | 1.0.1 | Apache License, Version 2.0 | | +| com.fasterxml.jackson.core|jackson-annotations|2.16.1|Apache License, Version 2.0 | | +| com.fasterxml.jackson.core|jackson-core|2.16.1|Apache License, Version 2.0 | | +| com.fasterxml.jackson.core|jackson-databind|2.16.1|Apache License, Version 2.0 | | +| com.fasterxml.jackson.datatype|jackson-datatype-guava|2.16.1|Apache License, Version 2.0 | | +| com.fasterxml.jackson.datatype|jackson-datatype-jdk8|2.16.1|Apache License, Version 2.0 | | +| com.fasterxml.jackson.datatype|jackson-datatype-jsr310|2.16.1|Apache License, Version 2.0 | | +| com.google.android|annotations|4.1.1.4|Apache License, Version 2.0 | | +| com.google.api|gax|2.41.0|BSD 3-Clause | | +| com.google.api|gax-grpc|2.41.0|BSD 3-Clause | | +| com.google.api|gax-httpjson|2.41.0|BSD 3-Clause | | +| com.google.api.grpc|grpc-google-cloud-spanner-admin-database-v1|6.56.0|Apache License, Version 2.0 | | +| com.google.api.grpc|grpc-google-cloud-spanner-admin-instance-v1|6.56.0|Apache License, Version 2.0 | | +| com.google.api.grpc|grpc-google-cloud-spanner-v1|6.56.0|Apache License, Version 2.0 | | +| com.google.api.grpc|grpc-google-common-protos|2.31.0|Apache License, Version 2.0 | | +| com.google.api.grpc|proto-google-cloudcompute-v1|1.44.0|Apache License, Version 2.0 | | +| com.google.api.grpc|proto-google-cloud-pubsub-v1|1.108.2|Apache License, Version 2.0 | | +| com.google.api.grpc|proto-google-cloud-spanner-admin-database-v1|6.56.0|Apache License, Version 2.0 | | +| com.google.api.grpc|proto-google-cloud-spanner-admin-instance-v1|6.56.0|Apache License, Version 2.0 | | +| com.google.api.grpc|proto-google-cloud-spanner-executor-v1|6.56.0|Apache License, Version 2.0 | | +| com.google.api.grpc|proto-google-cloud-spanner-v1|6.56.0|Apache License, Version 2.0 | | +| com.google.api.grpc|proto-google-common-protos|2.32.0|Apache License, Version 2.0 | | +| com.google.api.grpc|proto-google-iam-v1|1.27.0|Apache License, Version 2.0 | | +| com.google.auth|google-auth-library-credentials|1.22.0|BSD 3-Clause | | +| com.google.auth|google-auth-library-oauth2-http|1.22.0|BSD 3-Clause | | +| com.google.auto.value|auto-value-annotations|1.10.4|Apache License, Version 2.0 | | +| com.google.cloud|google-cloud-compute|1.44.0|Apache License, Version 2.0 | | +| com.google.cloud|google-cloud-core|2.31.0|Apache License, Version 2.0 | | +| com.google.cloud|google-cloud-core-grpc|2.31.0|Apache License, Version 2.0 | | +| com.google.cloud|google-cloud-spanner|6.56.0|Apache License, Version 2.0 | | +| com.google.cloud.functions|functions-framework-api|1.1.0|Apache License, Version 2.0 | | +| com.google.code.findbugs|jsr305|3.0.2|Apache License, Version 2.0 | | +| com.google.code.gson|gson|2.10.1|Apache License, Version 2.0 | | +| com.google.errorprone|error-prone-annotations|2.24.1|Apache License, Version 2.0 | | +| com.google.guava|failureaccess|1.0.2|Apache License, Version 2.0 | | +| com.google.guava|guava|33.0.0-jre|Apache License, Version 2.0 | | +| com.google.guava|listenablefuture|9999.0-empty-to-avoid-conflict-with-guava|Apache License, Version 2.0 | | +| com.google.http-client|google-http-client|1.43.3|Apache License, Version 2.0 | | +| com.google.http-client|google-http-client-gson|1.43.3|Apache License, Version 2.0 | | +| com.google.j2objc|j2objc-annotations|2.8|Apache License, Version 2.0 | | +| com.google.oauth-client|google-oauth-client|1.35.0|Apache License, Version 2.0 | | +| commons-codec|commons-codec|1.16|Apache License, Version 2.0| | +| commons-logging|commons-logging|1.3.0|Apache License, Version 2.0 | | +| io.cloudevents|cloudevents-api|2.5.0|Apache License, Version 2.0 | | +| io.netty|netty-buffer|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-codec|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-codec-http|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-codec-http2|4.1.100.Final|Apache License, Version 2.0| +| io.netty|netty-common|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-handler|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-resolver|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-transport|4.1.100.Final|Apache License, Version 2.0 | | +| io.netty|netty-transport-native-classes-epoll|4.1.100.Final|Apache License, Version 2.0| +| io.netty|netty-transport-native-unix-common|4.1.100.Final|Apache License, Version 2.0 | +| io.vavr|vavr|0.10.2|Apache License, Version 2.0 | +| io.vavr|vavr-match|0.10.2|Apache License, Version 2.0 | +| javax.annotation|javax.annotation-api|1.3.2|CDDL, GPL 2.0 | | +| org.apache.httpcomponents|httpclient|4.5.14|Apache License, Version 2.0 | | +| org.apache.httpcomponents|httpcore|4.4.16|Apache License, Version 2.0 | | +| org.apache.httpcomponents.client5|httpclient5|5.3|Apache License, Version 2.0 | | +| org.apache.httpcomponents.core5|httpcore5|5.2.4|Apache License, Version 2.0 | | +| org.apache.httpcomponents.core5|httpcore5-h2|5.2.4|Apache License, Version 2.0 | | +| org.codehaus.mojo|animal-sniffer-annotations|1.23|MIT License | | +| org.jctools|jctools-core|3.1.0|Apache License, Version 2.0 | | +| org.slf4j|slf4j-api|2.0.11|MIT License | | +| org.slf4j|slf4j-simple|2.0.11|MIT License | | +| org.threeten|threetenbp|1.6.8|BSD 3-Clause | | ## License of artifacts in this repository -Apache 2.0 - See [LICENSE](./LICENSE) for more information. +Apache License, Version 2.0 - See [LICENSE](./LICENSE) for more information. diff --git a/VERSION b/VERSION index 8e8299dc..24ba9a38 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -2.4.2 +2.7.0 diff --git a/WORKSPACE b/WORKSPACE index 5fa84963..f8e2f725 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -16,12 +16,15 @@ http_archive( url = "https://github.com/bazelbuild/rules_jvm_external/archive/%s.zip" % RULES_JVM_EXTERNAL_TAG, ) +# IMPORTANT: If you added dependencies and/or updated dependency versions below, run +# `$ bazel run @unpinned_maven//:pin` and include `maven_install.json` in your change. + # Declare explicit protobuf version, to override any implicit dependencies. PROTOBUF_CORE_VERSION = "3.25.2" PROTOBUF_SHA_256 = "3c83e4301b968d0b4f29a0c29c0b3cde1da81d790ffd344b111c523ba1954392" -COORDINATOR_VERSION = "v1.5.1" # version updated on 2024-01-09 +COORDINATOR_VERSION = "v1.9.0-rc03" # version updated on 2024-07-17 JACKSON_VERSION = "2.16.1" @@ -76,15 +79,14 @@ git_repository( remote = "https://github.com/privacysandbox/coordinator-services-and-shared-libraries", patches = [ "//build_defs/shared_libraries:coordinator.patch", - "//build_defs/shared_libraries:gcs_storage_client.patch", - "//build_defs/shared_libraries:dependency_update.patch", - "//build_defs/shared_libraries:key_cache_ttl.patch", - "//build_defs/shared_libraries:pin_pkr_docker.patch", + "//build_defs/shared_libraries:rules_pkg_build_fix.patch", ], tag = COORDINATOR_VERSION, workspace_file = "@shared_libraries_workspace//file", ) +# IMPORTANT: If you added dependencies and/or updated dependency versions below, run +# `$ bazel run @unpinned_maven//:pin` and include `maven_install.json` in your change. OTEL_ARTIFACTS = [ "com.google.errorprone:error_prone_annotations:2.+", "io.opentelemetry:opentelemetry-api:" + OTEL_VERSION, @@ -99,6 +101,8 @@ OTEL_ARTIFACTS = [ "io.opentelemetry.contrib:opentelemetry-aws-xray:" + OTEL_VERSION, ] +# IMPORTANT: If you added dependencies and/or updated dependency versions below, run +# `$ bazel run @unpinned_maven//:pin` and include `maven_install.json` in your change. maven_install( artifacts = [ "com.amazonaws:aws-lambda-java-core:1.2.3", @@ -137,6 +141,9 @@ maven_install( "com.google.api.grpc:proto-google-cloud-compute-v1:1.44.0", "com.google.cloud.functions:functions-framework-api:1.1.0", "commons-logging:commons-logging:1.3.0", + "com.google.api.grpc:proto-google-common-protos:2.34.0", + "com.google.cloud:google-cloud-trace:2.35.0", + "com.google.api.grpc:proto-google-cloud-trace-v1:2.35.0", "com.google.api:gax:" + GOOGLE_GAX_VERSION, "com.google.http-client:google-http-client-jackson2:1.43.3", "io.reactivex.rxjava3:rxjava:3.1.8", @@ -157,6 +164,7 @@ maven_install( "io.github.resilience4j:resilience4j-retry:1.7.1", "junit:junit:4.13.2", "org.apache.avro:avro:1.11.3", + "org.apache.commons:commons-compress:1.26.2", "org.apache.commons:commons-math3:3.6.1", "org.apache.httpcomponents:httpcore:4.4.16", "org.apache.httpcomponents:httpclient:4.5.14", @@ -191,7 +199,7 @@ maven_install( "software.amazon.awssdk:utils:" + AWS_SDK_VERSION, "software.amazon.awssdk:auth:" + AWS_SDK_VERSION, "software.amazon.awssdk:lambda:" + AWS_SDK_VERSION, - "com.google.crypto.tink:tink:1.12.0", + "com.google.crypto.tink:tink:1.13.0", "com.google.crypto.tink:tink-gcpkms:1.9.0", "com.google.oauth-client:google-oauth-client:1.35.0", ] + OTEL_ARTIFACTS, diff --git a/build-scripts/DEBIAN_CONTAINER_DIGEST b/build-scripts/DEBIAN_CONTAINER_DIGEST index 48daa319..81df05b0 100644 --- a/build-scripts/DEBIAN_CONTAINER_DIGEST +++ b/build-scripts/DEBIAN_CONTAINER_DIGEST @@ -1 +1 @@ -sha256:346dd1cba3caf44de9467ae428a9d38573f14665408acb80a615e2a7c3f9a2a4 +sha256:16112ae93b810eb1ec6d1db6e01835d2444c8ca99aa678e03dd104ea3ec68408 diff --git a/build-scripts/aws/publish.sh b/build-scripts/aws/publish.sh index c41d18f6..84c17e98 100644 --- a/build-scripts/aws/publish.sh +++ b/build-scripts/aws/publish.sh @@ -36,3 +36,7 @@ bazel run //terraform/aws:aws_frontend_cleanup_handler_lambda_release \ bazel run //terraform/aws:local_testing_tool_release \ --//terraform/aws:bucket_flag=$JARS_PUBLISH_BUCKET --//terraform/aws:bucket_path_flag=$JARS_PUBLISH_BUCKET_PATH \ -- --version=$VERSION + +bazel run //terraform/aws:privacy_budget_unit_extraction_tool_release \ +--//terraform/aws:bucket_flag=$JARS_PUBLISH_BUCKET --//terraform/aws:bucket_path_flag=$JARS_PUBLISH_BUCKET_PATH \ +-- --version=$VERSION diff --git a/build-scripts/gcp/build-container/Dockerfile b/build-scripts/gcp/build-container/Dockerfile index bf7f82d4..007679f1 100644 --- a/build-scripts/gcp/build-container/Dockerfile +++ b/build-scripts/gcp/build-container/Dockerfile @@ -54,13 +54,13 @@ RUN \ echo \ "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/debian \ $(lsb_release -cs) stable" | tee /etc/apt/sources.list.d/docker.list > /dev/null && \ - apt-get -y update && \ + apt-get update && \ apt-get install -y docker-ce docker-ce-cli RUN \ echo "deb [signed-by=/usr/share/keyrings/cloud.google.asc] https://packages.cloud.google.com/apt cloud-sdk main" | tee -a /etc/apt/sources.list.d/google-cloud-sdk.list && \ curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | tee /usr/share/keyrings/cloud.google.asc && \ - apt-get update && apt-get install google-cloud-cli && \ + apt-get update && apt-get -y install google-cloud-cli && \ apt-get -y autoclean && apt-get -y autoremove # Store the Bazel outputs under /workspace so that the symlinks under bazel-bin (et al) are accessible diff --git a/build-scripts/gcp/cloudbuild.yaml b/build-scripts/gcp/cloudbuild.yaml index d4661bfe..af59f7b4 100644 --- a/build-scripts/gcp/cloudbuild.yaml +++ b/build-scripts/gcp/cloudbuild.yaml @@ -15,7 +15,7 @@ steps: - name: '$_BUILD_IMAGE_REPO_PATH/bazel-build-container:$_VERSION' script: | - bazel run worker/gcp:worker_mp_gcp_prod -- -dst "$_IMAGE_REPO_PATH/$_IMAGE_NAME:$_IMAGE_TAG" + bazel run worker/gcp:worker_mp_gcp_g3p_prod -- -dst "$_IMAGE_REPO_PATH/$_IMAGE_NAME:$_IMAGE_TAG" bazel run //terraform/gcp:frontend_service_http_cloud_function_release \ --//terraform/gcp:bucket_flag=$_JARS_PUBLISH_BUCKET --//terraform/gcp:bucket_path_flag=$_JARS_PUBLISH_BUCKET_PATH \ -- --version=$_VERSION @@ -25,6 +25,9 @@ steps: bazel run //terraform/gcp:local_testing_tool_release \ --//terraform/gcp:bucket_flag=$_JARS_PUBLISH_BUCKET --//terraform/gcp:bucket_path_flag=$_JARS_PUBLISH_BUCKET_PATH \ -- --version=$_VERSION + bazel run //terraform/gcp:privacy_budget_unit_extraction_tool_release \ + --//terraform/gcp:bucket_flag=$_JARS_PUBLISH_BUCKET --//terraform/gcp:bucket_path_flag=$_JARS_PUBLISH_BUCKET_PATH \ + -- --version=$_VERSION automapSubstitutions: true options: diff --git a/build_defs/container_dependencies.bzl b/build_defs/container_dependencies.bzl index ef4184f1..153cb435 100644 --- a/build_defs/container_dependencies.bzl +++ b/build_defs/container_dependencies.bzl @@ -24,11 +24,11 @@ # - java_base: Distroless image for running Java. ################################################################################ -# Updated as of: 2024-04-11 +# Updated as of: 2024-07-26 CONTAINER_DEPS = { "amazonlinux_2": { - "digest": "sha256:0752515c545922fa6f1fff89d8ca181e8f4071aa9b4b0551243b50e987e0f60e", + "digest": "sha256:b2ed30084a71c34c0f41a5add7dd623a2e623f2c3b50117c720bbc02d7653fa1", "registry": "index.docker.io", "repository": "amazonlinux", }, @@ -38,7 +38,7 @@ CONTAINER_DEPS = { "repository": "aws-observability/aws-otel-collector", }, "java_base": { - "digest": "sha256:64967fe3051702640c68bd434813b91a3fc9182f8894962f7638f79a5986c31d", + "digest": "sha256:c7846b62436ccf2961972fea5b776527610a1a51b48d8e7b434287146904cf2d", "registry": "gcr.io", "repository": "distroless/java17-debian11", }, diff --git a/build_defs/shared_libraries/dependency_update.patch b/build_defs/shared_libraries/dependency_update.patch deleted file mode 100644 index 65332cba..00000000 --- a/build_defs/shared_libraries/dependency_update.patch +++ /dev/null @@ -1,49 +0,0 @@ -diff --git a/java/com/google/scp/operator/cpio/cryptoclient/BUILD b/java/com/google/scp/operator/cpio/cryptoclient/BUILD -index c4a8c5ad4..169b610e9 100644 ---- a/java/com/google/scp/operator/cpio/cryptoclient/BUILD -+++ b/java/com/google/scp/operator/cpio/cryptoclient/BUILD -@@ -124,6 +124,7 @@ java_library( - "//java/external:autovalue_annotations", - "//java/external:google_api_client", - "//java/external:google_auth_library_oauth2_http", -+ "//java/external:google_oauth_client", - "//java/external:guava", - "//java/external:guice", - "//java/external:jackson_annotations", -diff --git a/java/com/google/scp/operator/frontend/service/gcp/BUILD b/java/com/google/scp/operator/frontend/service/gcp/BUILD -index e9ba9df40..ae3d96cc2 100644 ---- a/java/com/google/scp/operator/frontend/service/gcp/BUILD -+++ b/java/com/google/scp/operator/frontend/service/gcp/BUILD -@@ -40,6 +40,7 @@ java_library( - "//java/external:gcp_cloud_function", - "//java/external:google_api_client", - "//java/external:google_http_client_gson", -+ "//java/external:google_oauth_client", - "//java/external:guava", - "//java/external:guice", - "//java/external:jackson_core", -diff --git a/java/com/google/scp/shared/gcp/util/BUILD b/java/com/google/scp/shared/gcp/util/BUILD -index 6fd4c40ea..2a731fad4 100644 ---- a/java/com/google/scp/shared/gcp/util/BUILD -+++ b/java/com/google/scp/shared/gcp/util/BUILD -@@ -29,6 +29,7 @@ java_library( - "//java/external:apache_httpcore5", - "//java/external:gcp_cloud_function", - "//java/external:google_auth_library_oauth2_http", -+ "//java/external:google_oauth_client", - "//java/external:guava", - "//java/external:jackson_core", - "//java/external:jackson_databind", -diff --git a/java/external/BUILD b/java/external/BUILD -index b77e89996..a03059f43 100644 ---- a/java/external/BUILD -+++ b/java/external/BUILD -@@ -707,3 +707,8 @@ alias( - name = "grcp_netty_shaded", - actual = "@maven//:io_grpc_grpc_netty_shaded", - ) -+ -+alias( -+ name = "google_oauth_client", -+ actual = "@maven//:com_google_oauth_client_google_oauth_client", -+) diff --git a/build_defs/shared_libraries/gcs_storage_client.patch b/build_defs/shared_libraries/gcs_storage_client.patch deleted file mode 100644 index 13ca8791..00000000 --- a/build_defs/shared_libraries/gcs_storage_client.patch +++ /dev/null @@ -1,24 +0,0 @@ -diff --git a/java/com/google/scp/operator/cpio/blobstorageclient/gcp/GcsBlobStorageClient.java b/java/com/google/scp/operator/cpio/blobstorageclient/gcp/GcsBlobStorageClient.java -index 06e01f56f..c1fb97464 100644 ---- a/java/com/google/scp/operator/cpio/blobstorageclient/gcp/GcsBlobStorageClient.java -+++ b/java/com/google/scp/operator/cpio/blobstorageclient/gcp/GcsBlobStorageClient.java -@@ -33,7 +33,6 @@ import com.google.scp.operator.cpio.blobstorageclient.model.DataLocation.BlobSto - import java.io.IOException; - import java.io.InputStream; - import java.nio.channels.Channels; --import java.nio.file.Files; - import java.nio.file.Path; - import java.util.Arrays; - import java.util.Optional; -@@ -79,9 +78,9 @@ public final class GcsBlobStorageClient implements BlobStorageClient { - Storage storageClient = createGcsClient(accountIdentity, Scope.READ_AND_WRITE); - BlobStoreDataLocation blobLocation = location.blobStoreDataLocation(); - try { -- storageClient.create( -+ storageClient.createFrom( - BlobInfo.newBuilder(BlobId.of(blobLocation.bucket(), blobLocation.key())).build(), -- Files.readAllBytes(filePath)); -+ filePath); - } catch (IOException exception) { - throw new BlobStorageClientException(exception); - } diff --git a/build_defs/shared_libraries/key_cache_ttl.patch b/build_defs/shared_libraries/key_cache_ttl.patch deleted file mode 100644 index f0b0aa4c..00000000 --- a/build_defs/shared_libraries/key_cache_ttl.patch +++ /dev/null @@ -1,112 +0,0 @@ -diff --git a/java/com/google/scp/operator/cpio/cryptoclient/Annotations.java b/java/com/google/scp/operator/cpio/cryptoclient/Annotations.java -index b4b52bb..b3148ff 100644 ---- a/java/com/google/scp/operator/cpio/cryptoclient/Annotations.java -+++ b/java/com/google/scp/operator/cpio/cryptoclient/Annotations.java -@@ -56,4 +56,12 @@ - @Target({FIELD, PARAMETER, METHOD}) - @Retention(RUNTIME) - public @interface CoordinatorBEncryptionKeyServiceBaseUrl {} -+ -+ /** -+ * TTL for private key cache. To be used only for load testing and should not be used by clients. -+ */ -+ @BindingAnnotation -+ @Target({FIELD, PARAMETER, METHOD}) -+ @Retention(RUNTIME) -+ public @interface DecrypterCacheEntryTtlSec {} - } -diff --git a/java/com/google/scp/operator/cpio/cryptoclient/MultiPartyDecryptionKeyServiceImpl.java b/java/com/google/scp/operator/cpio/cryptoclient/MultiPartyDecryptionKeyServiceImpl.java -index d4cd66d..e72ca30 100644 ---- a/java/com/google/scp/operator/cpio/cryptoclient/MultiPartyDecryptionKeyServiceImpl.java -+++ b/java/com/google/scp/operator/cpio/cryptoclient/MultiPartyDecryptionKeyServiceImpl.java -@@ -36,6 +36,7 @@ - import com.google.scp.coordinator.protos.keymanagement.shared.api.v1.KeyDataProto.KeyData; - import com.google.scp.operator.cpio.cryptoclient.Annotations.CoordinatorAAead; - import com.google.scp.operator.cpio.cryptoclient.Annotations.CoordinatorBAead; -+import com.google.scp.operator.cpio.cryptoclient.Annotations.DecrypterCacheEntryTtlSec; - import com.google.scp.operator.cpio.cryptoclient.EncryptionKeyFetchingService.EncryptionKeyFetchingServiceException; - import com.google.scp.operator.cpio.cryptoclient.model.ErrorReason; - import com.google.scp.shared.api.exception.ServiceException; -@@ -56,24 +57,13 @@ - public final class MultiPartyDecryptionKeyServiceImpl implements DecryptionKeyService { - - private static final int MAX_CACHE_SIZE = 100; -- private static final long CACHE_ENTRY_TTL_SEC = 3600; -+ private final long decrypterCacheEntryTtlSec; - private static final int CONCURRENCY_LEVEL = Runtime.getRuntime().availableProcessors(); - private final CloudAeadSelector coordinatorAAeadService; - private final CloudAeadSelector coordinatorBAeadService; - private final EncryptionKeyFetchingService coordinatorAEncryptionKeyFetchingService; - private final EncryptionKeyFetchingService coordinatorBEncryptionKeyFetchingService; -- private final LoadingCache decypterCache = -- CacheBuilder.newBuilder() -- .maximumSize(MAX_CACHE_SIZE) -- .expireAfterWrite(CACHE_ENTRY_TTL_SEC, TimeUnit.SECONDS) -- .concurrencyLevel(CONCURRENCY_LEVEL) -- .build( -- new CacheLoader() { -- @Override -- public HybridDecrypt load(final String keyId) throws KeyFetchException { -- return createDecrypter(keyId); -- } -- }); -+ private final LoadingCache decrypterCache; - - /** Creates a new instance of the {@code MultiPartyDecryptionKeyServiceImpl} class. */ - @Inject -@@ -83,18 +73,30 @@ - @CoordinatorBEncryptionKeyFetchingService - EncryptionKeyFetchingService coordinatorBEncryptionKeyFetchingService, - @CoordinatorAAead CloudAeadSelector coordinatorAAeadService, -- @CoordinatorBAead CloudAeadSelector coordinatorBAeadService) { -+ @CoordinatorBAead CloudAeadSelector coordinatorBAeadService, @DecrypterCacheEntryTtlSec long decrypterCacheEntryTtlSec) { - this.coordinatorAEncryptionKeyFetchingService = coordinatorAEncryptionKeyFetchingService; - this.coordinatorBEncryptionKeyFetchingService = coordinatorBEncryptionKeyFetchingService; - this.coordinatorAAeadService = coordinatorAAeadService; - this.coordinatorBAeadService = coordinatorBAeadService; -+ this.decrypterCacheEntryTtlSec = decrypterCacheEntryTtlSec; -+ this.decrypterCache = CacheBuilder.newBuilder() -+ .maximumSize(MAX_CACHE_SIZE) -+ .expireAfterWrite(this.decrypterCacheEntryTtlSec, TimeUnit.SECONDS) -+ .concurrencyLevel(CONCURRENCY_LEVEL) -+ .build( -+ new CacheLoader() { -+ @Override -+ public HybridDecrypt load(final String keyId) throws KeyFetchException { -+ return createDecrypter(keyId); -+ } -+ }); - } - - /** Returns the decrypter for the provided key. */ - @Override - public HybridDecrypt getDecrypter(String keyId) throws KeyFetchException { - try { -- return decypterCache.get(keyId); -+ return decrypterCache.get(keyId); - } catch (ExecutionException | UncheckedExecutionException e) { - ErrorReason reason = ErrorReason.UNKNOWN_ERROR; - if (e.getCause() instanceof KeyFetchException) { -diff --git a/javatests/com/google/scp/operator/cpio/cryptoclient/MultiPartyDecryptionKeyServiceImplTest.java b/javatests/com/google/scp/operator/cpio/cryptoclient/MultiPartyDecryptionKeyServiceImplTest.java -index 89f59ca..4dc7e46 100644 ---- a/javatests/com/google/scp/operator/cpio/cryptoclient/MultiPartyDecryptionKeyServiceImplTest.java -+++ b/javatests/com/google/scp/operator/cpio/cryptoclient/MultiPartyDecryptionKeyServiceImplTest.java -@@ -68,6 +68,8 @@ - private MockTinkUtils mockTinkUtils; - private MultiPartyDecryptionKeyServiceImpl multiPartyDecryptionKeyServiceImpl; - -+ private long decrypterCacheEntryTtlSec = 3600; -+ - @Before - public void setup() throws Exception { - mockTinkUtils = new MockTinkUtils(); -@@ -76,7 +78,8 @@ - coordinatorAKeyFetchingService, - coordinatorBKeyFetchingService, - aeadServicePrimary, -- aeadServiceSecondary); -+ aeadServiceSecondary, -+ decrypterCacheEntryTtlSec); - } - - @Test diff --git a/build_defs/shared_libraries/pin_pkr_docker.patch b/build_defs/shared_libraries/pin_pkr_docker.patch deleted file mode 100644 index fdd9e82d..00000000 --- a/build_defs/shared_libraries/pin_pkr_docker.patch +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/operator/worker/aws/setup_enclave.sh b/operator/worker/aws/setup_enclave.sh -index e4bd30371..8bf2e0fb1 100644 ---- a/operator/worker/aws/setup_enclave.sh -+++ b/operator/worker/aws/setup_enclave.sh -@@ -19,7 +19,7 @@ sudo yum install -y https://s3.amazonaws.com/ec2-downloads-windows/SSMAgent/late - # - # Builds enclave image inside the /home/ec2-user directory as part of automatic - # AMI generation. --sudo yum install docker -y -+sudo yum install docker-24.0.5-1.amzn2023.0.3 -y - sudo systemctl enable docker - sudo systemctl start docker - diff --git a/build_defs/shared_libraries/rules_pkg_build_fix.patch b/build_defs/shared_libraries/rules_pkg_build_fix.patch new file mode 100644 index 00000000..f447f7dd --- /dev/null +++ b/build_defs/shared_libraries/rules_pkg_build_fix.patch @@ -0,0 +1,227 @@ +diff --git a/BUILD b/BUILD +index 12cf08abb..9e25a6747 100644 +--- a/BUILD ++++ b/BUILD +@@ -12,7 +12,6 @@ + # See the License for the specific language governing permissions and + # limitations under the License. + +-load("@bazel_skylib//rules:copy_directory.bzl", "copy_directory") + load("@com_github_bazelbuild_buildtools//buildifier:def.bzl", "buildifier") + load("@rules_pkg//:mappings.bzl", "pkg_files") + load("@rules_pkg//:pkg.bzl", "pkg_tar") +@@ -30,53 +29,6 @@ buildifier( + mode = "fix", + ) + +-# pkg_tar no longer allows directories to be specified. +-# Must use copy_directory to create Tree Artifacts. +-# https://github.com/bazelbuild/rules_pkg/issues/611 +-# +-# The srcs directory is prefixed to avoid the error conflicting with +-# other build rules: +-# "One of the output paths ... is a prefix of the other. +-# These actions cannot be simultaneously present; +-# please rename one of the output files or build just one of them" +-# It will be stripped by pkg_tar remap_paths. +- +-copy_directory( +- name = "build_defs_dir", +- src = "build_defs", +- out = "srcs/build_defs", +-) +- +-copy_directory( +- name = "cc_dir", +- src = "cc", +- out = "srcs/cc", +-) +- +-copy_directory( +- name = "java_dir", +- src = "java", +- out = "srcs/java", +-) +- +-copy_directory( +- name = "javatests_dir", +- src = "javatests", +- out = "srcs/javatests", +-) +- +-copy_directory( +- name = "licenses_dir", +- src = "licenses", +- out = "srcs/licenses", +-) +- +-copy_directory( +- name = "operator_dir", +- src = "operator", +- out = "srcs/operator", +-) +- + # This rule is used to copy the source code from other bazel rules. + # This can be used for reproducible builds. + # Only cc targets are needed at this point, so only the files needed to build +@@ -88,18 +40,15 @@ pkg_tar( + ".bazelversion", + "BUILD", + "WORKSPACE", +- ":build_defs_dir", +- ":cc_dir", +- ":java_dir", +- ":javatests_dir", +- ":licenses_dir", +- ":operator_dir", ++ "build_defs", ++ "cc", ++ "java", ++ "javatests", ++ "licenses", ++ "operator", + ] + glob(["*.bzl"]), + mode = "0777", + package_dir = "scp", +- remap_paths = { +- "srcs/": "", +- }, + ) + + pkg_files( +diff --git a/build_defs/aws/kmstool/kmstool.BUILD b/build_defs/aws/kmstool/kmstool.BUILD +index 022ca6791..5a5d95a2e 100644 +--- a/build_defs/aws/kmstool/kmstool.BUILD ++++ b/build_defs/aws/kmstool/kmstool.BUILD +@@ -1,62 +1,19 @@ +-load("@bazel_skylib//rules:copy_directory.bzl", "copy_directory") + load("@rules_pkg//:pkg.bzl", "pkg_tar") + + package(default_visibility = ["//visibility:public"]) + + exports_files(glob(["*"])) + +-copy_directory( +- name = "bin_dir", +- src = "bin", +- out = "bin", +-) +- +-copy_directory( +- name = "cmake_dir", +- src = "cmake", +- out = "cmake", +-) +- +-copy_directory( +- name = "containers_dir", +- src = "containers", +- out = "containers", +-) +- +-copy_directory( +- name = "docs_dir", +- src = "docs", +- out = "docs", +-) +- +-copy_directory( +- name = "include_dir", +- src = "include", +- out = "include", +-) +- +-copy_directory( +- name = "source_dir", +- src = "source", +- out = "source", +-) +- +-copy_directory( +- name = "tests_dir", +- src = "tests", +- out = "tests", +-) +- + pkg_tar( + name = "source_code_tar", + srcs = [ +- ":bin_dir", +- ":cmake_dir", +- ":containers_dir", +- ":docs_dir", +- ":include_dir", +- ":source_dir", +- ":tests_dir", ++ "bin", ++ "cmake", ++ "containers", ++ "docs", ++ "include", ++ "source", ++ "tests", + ] + glob(["*"]), + mode = "0777", + package_dir = "aws_nitro_enclaves_sdk_c", +diff --git a/operator/terraform/aws/environments/demo/BUILD b/operator/terraform/aws/environments/demo/BUILD +index 82726daf5..fdde467c2 100644 +--- a/operator/terraform/aws/environments/demo/BUILD ++++ b/operator/terraform/aws/environments/demo/BUILD +@@ -64,20 +64,20 @@ pkg_files( + # copied. + pkg_mklink( + name = "operator_service_link", +- link_name = "demo/operator_service.tf", +- target = "../shared/operator_service.tf", ++ src = "../shared/operator_service.tf", ++ dest = "demo/operator_service.tf", + ) + + pkg_mklink( + name = "operator_service_vars_link", +- link_name = "demo/operator_service_variables.tf", +- target = "../shared/operator_service_variables.tf", ++ src = "../shared/operator_service_variables.tf", ++ dest = "demo/operator_service_variables.tf", + ) + + pkg_mklink( + name = "ami_params_link", +- link_name = "demo/ami_params.auto.tfvars", +- target = "../shared/ami_params.auto.tfvars", ++ src = "../shared/ami_params.auto.tfvars", ++ dest = "demo/ami_params.auto.tfvars", + ) + + pkg_filegroup( +diff --git a/operator/terraform/gcp/environments/demo/BUILD b/operator/terraform/gcp/environments/demo/BUILD +index 5f5d9f54d..ee109ecf9 100644 +--- a/operator/terraform/gcp/environments/demo/BUILD ++++ b/operator/terraform/gcp/environments/demo/BUILD +@@ -20,20 +20,20 @@ package(default_visibility = ["//visibility:public"]) + # copied. + pkg_mklink( + name = "job_service_link", +- link_name = "demo/job_service.tf", +- target = "../shared/job_service.tf", ++ src = "../shared/job_service.tf", ++ dest = "demo/job_service.tf", + ) + + pkg_mklink( + name = "job_service_vars_link", +- link_name = "demo/job_service_variables.tf", +- target = "../shared/job_service_variables.tf", ++ src = "../shared/job_service_variables.tf", ++ dest = "demo/job_service_variables.tf", + ) + + pkg_mklink( + name = "job_service_outputs_link", +- link_name = "demo/job_service_outputs.tf", +- target = "../shared/job_service_outputs.tf", ++ src = "../shared/job_service_outputs.tf", ++ dest = "demo/job_service_outputs.tf", + ) + + pkg_files( diff --git a/docs/api.md b/docs/api.md index 2d5108d9..c46ab1c7 100644 --- a/docs/api.md +++ b/docs/api.md @@ -31,6 +31,8 @@ POST // "folder1/shard" would take all files with paths starting with // "folder1/shard" such as "folder1/shard1.avro", "folder1/shard/test1.avro" // and "folder1/shard1/folder2/test1.avro". + // It is recommended to keep the number of shards between the number of CPUs + // available to the enclave and 1000. "input_data_blob_prefix": , // Storage bucket for input data. @@ -55,6 +57,8 @@ POST // domain files, it's a prefix in the file path. For example, inputting // "folder1/shard" would include "folder1/shard/domain1.avro", // "folder1/shard_domain.avro" and "folder1/shard/folder2/domain.avro". + // It is recommended to keep the number of shards between the number of CPUs + // available to the enclave and 1000. "output_domain_blob_prefix": , // Domain file bucket. @@ -64,17 +68,42 @@ POST // This should be same as the reporting_origin present in the reports' shared_info. "attribution_report_to": , - // [Optional] differential privacy epsilon value to be used + // [Optional] Reporting Site. + // This should be the reporting site that is onboared to aggregation service. + // Note: All reports in the request should have reporting origins which + // belong to the reporting site mentioned in this parameter. This parameter + // and the "attribution_report_to" parameter are mutually exclusive, exactly + // one of the two parameters should be provided in the request. + "reporting_site": "" + + // [Optional] Differential privacy epsilon value to be used // for this job. 0.0 < debug_privacy_epsilon <= 64.0. The // value can be varied so that tests with different epsilon // values can be performed during the origin trial. - "debug_privacy_epsilon": , + "debug_privacy_epsilon": , // [Optional] The percentage of reports, if excluded from // aggregation due to an error, will fail the job. // Values can be from 0 to 100. If left empty, default value of 10% // will be used, - "report_error_threshold_percentage": + "report_error_threshold_percentage": , + + // [Optional] Total number of reports provided as input data for this job. + // This value, in conjunction with "report_error_threshold_percentage" will + // enable early failure of the job when reports are excluded due to errors. + "input_report_count": , + + // [Optional] A list of unsigned filtering IDs separated by comma. All the + // contribtions other than the matching filtering ID will be filtered out. + // e.g. "filtering_ids":"12345,34455,12". Default value is "0". + "filtering_ids":, + + // [Optional] When executing a debug run, noised and unnoised debug summary + // report and annotations are added to indicate which keys are present in the + // domain input and/or reports. Additionally, duplicates across batches are + // also not enforced. Note that the debug run only considers reports that have the flag + // "debug_mode": "enabled". Read /docs/debugging.md for details. + "debug_run": } } ``` @@ -135,6 +164,10 @@ These are the validations that are done before the aggregation begins. ATTRIBUTION_REPORT_TO_MISMATCH error counter. Aggregatable report validations and error counters can be found in the [Input Aggregatable Report Validations](#input-aggregatable-report-validations) below +4. Job request's `job_parameters` should contain exactly one of `attribution_report_to` and + `reporting_site`. +5. If `job_parameters.reporting_site` is provided, `shared_info.reporting_origin` of all + aggregatable reports should belong to this reporting site. Return code: [INVALID_JOB](java/com/google/aggregate/adtech/worker/AggregationWorkerReturnCode.java#L38) @@ -206,18 +239,28 @@ Not found: 404 Not Found "output_domain_bucket_name": , // Reporting URL "attribution_report_to" : , + // [Optional] Reporting site value from the CreateJob request, if provided. + "reporting_site": // [Optional] differential privacy epsilon value to be used // for this job. 0.0 < debug_privacy_epsilon <= 64.0. The // value can be varied so that tests with different epsilon // values can be performed during the origin trial. A greater // epsilon value results in less noise in the output. Default // value for epsilon is 10. - "debug_privacy_epsilon": , + "debug_privacy_epsilon": , // [Optional] The percentage of reports, if excluded from // aggregation due to an error, will fail the job. // Values can be from 0 to 100. If left empty, default value of 10% // will be used. - "report_error_threshold_percentage": + "report_error_threshold_percentage": , + // [Optional] Total number of reports provided as input data for this job. + // This value, in conjunction with "report_error_threshold_percentage" will + // enable early failure of the job when reports are excluded due to errors. + "input_report_count": , + // [Optional] A list of unsigned filtering IDs separated by comma. All the + // contribtions other than the matching filtering ID will be filtered out. + // e.g. "filtering_ids":"12345,34455,12". Default value is "0". + "filtering_ids":, }, // The time when worker starts processing request in the latest processing // attempt @@ -377,15 +420,8 @@ If the invalid reports in a job exceed the `report_error_threshold_percentage` ( [createJob](#createjob-endpoint) request job parameters above), the job will fail with REPORTS_WITH_ERRORS_EXCEEDED_THRESHOLD error. -Invalid aggregatable report error counters corresponding to various validations - - -| shared_info field | ErrorCode | Error Reason | -| --------------------- | ------------------------------- | ---------------------------------------------- | -| api | UNSUPPORTED_REPORT_API_TYPE | api is unsupported | -| report_id | INVALID_REPORT_ID | report_id is empty | -| reporting_origin | ATTRIBUTION_REPORT_TO_MALFORMED | syntactically invalid domain | -| scheduled_report_time | ORIGINAL_REPORT_TIME_TOO_OLD | older than 90 days at the time of aggregation. | -| version | UNSUPPORTED_SHAREDINFO_VERSION | unsupported report shared_info.version | +A summary of all report error counters along with their mitigations can be found in +[Aggregation Service Report Error Codes and Mitigations](/docs/error-codes-and-mitigation.md#aggregation-service-report-error-codes-and-mitigations). If report `shared_info.version` is higher than supported major version, the aggregation job will fail without consuming privacy budget with `result_info.return_code` UNSUPPORTED_REPORT_VERSION. diff --git a/docs/aws-aggregation-service.md b/docs/aws-aggregation-service.md index e3285777..d146017b 100644 --- a/docs/aws-aggregation-service.md +++ b/docs/aws-aggregation-service.md @@ -6,14 +6,11 @@ To test the aggregation service with support for encrypted reports, you need the - Have an [AWS account](https://portal.aws.amazon.com/gp/aws/developer/registration/index.html) available to you. -- [Register](https://developer.chrome.com/origintrials/#/view_trial/771241436187197441) for the - Privacy Sandbox Relevance and Measurement origin trial (OT) - Complete the aggregation service [onboarding form](https://forms.gle/EHoecersGKhpcLPNA) Once you've submitted the onboarding form, we will contact you to verify your information. Then, we'll send you the remaining instructions and information needed for this setup.
_You won't be -able to successfully setup your AWS system without registering for the origin trial and completing -the onboarding process!_ +able to successfully setup your AWS system without completing the onboarding process!_ To set up aggregation service in AWS you'll use [Terraform](https://www.terraform.io/). @@ -279,15 +276,6 @@ file into smaller shards. ## Updating the system -If you have deployed the system before, we recommend to run `terraform destroy` in your environment -folder (e.g. `/terraform/aws/environments/dev`) when upgrading from `0.3.z` to -`0.4.z+` and follow the [setup steps](#set-up-your-deployment-environment) again. - -After your upgrade to `0.4.z+` and if you have followed the above setup, next time you can update -your system to the latest version by checking out the latest tagged version and running -`terraform apply` in your environment folder (e.g. -`/terraform/aws/environments/dev`). - Run the following in the ``. ```sh diff --git a/docs/batching-strategies.md b/docs/batching-strategies.md index 3f0347bf..d6f1ed0d 100644 --- a/docs/batching-strategies.md +++ b/docs/batching-strategies.md @@ -1,34 +1,79 @@ # Strategies for batching -When batching aggregatable reports, it is important to optimize batching strategies so that privacy limits are not exceeded. The following are a few recommended strategies for sending batches of reports to the Aggregation Service. + +When batching aggregatable reports, it is important to optimize batching strategies so that privacy +limits are not exceeded. The following are a few recommended strategies for sending batches of +reports to the Aggregation Service. ## Collect reports + When collecting reports to include in a batch, keep the following in mind: #### Report upload retries -**Note:** Retry criteria are subject to change. The information in this section will be updated in that case. - -On both the web and OS platforms, a platform will attempt to send the report three times, but if the report fails to be sent after the third try, it will not be sent. The original `scheduled_report_time` value is preserved no matter when the report is able to be sent. The timeline for retries is different per platform: -* A web browser will send reports when the browser is online. If the report fails to send, it will wait five minutes for the second retry, and then 15 minutes for the third. If the browser goes offline, the next retry will be one minute after it comes back online. There is no maximum delay in sending reports on the web; this means, if the browser goes offline, no matter how long ago the report was generated, once the browser goes back online, it will try and send the report in accordance with the retry policy. -* An Android phone has a consistent network connection. As such, it will run the job to send reports once per hour. This means that if a report fails to send, it will be retried the next hour, and again in the hour after that. If the device doesn’t have a connection, the device will retry sending the report with the next reporting job that runs after the device connects to the network again. The maximum delay is 28 days, which means that the device will not send a report that was generated more than 28 days ago. +**Note:** Retry criteria are subject to change. The information in this section will be updated in +that case. + +On both the web and OS platforms, a platform will attempt to send the report three times, but if the +report fails to be sent after the third try, it will not be sent. The original +`scheduled_report_time` value is preserved no matter when the report is able to be sent. The +timeline for retries is different per platform: + +- A web browser will send reports when the browser is online. If the report fails to send, it will + wait five minutes for the second retry, and then 15 minutes for the third. If the browser goes + offline, the next retry will be one minute after it comes back online. There is no maximum delay + in sending reports on the web; this means, if the browser goes offline, no matter how long ago + the report was generated, once the browser goes back online, it will try and send the report in + accordance with the retry policy. +- An Android phone has a consistent network connection. As such, it will run the job to send + reports once per hour. This means that if a report fails to send, it will be retried the next + hour, and again in the hour after that. If the device doesn't have a connection, the device will + retry sending the report with the next reporting job that runs after the device connects to the + network again. The maximum delay is 28 days, which means that the device will not send a report + that was generated more than 28 days ago. #### Wait on reports -It is recommended to wait for late arriving reports when collecting reports for batching. Late reports can be determined by checking the `scheduled_report_time` value against when the report was received. The time difference between those reports will help determine how long you may want to consider waiting for late arriving reports. For example, as delayed reports are collected, check the `scheduled_report_time` field and note the time delay in hours as 90%, 95%, and 99% of reports are received. That data can be used to determine how long to wait for late arriving reports. [Instant aggregate reports](https://github.com/WICG/attribution-reporting-api/blob/main/AGGREGATE.md#optional-reduce-report-delay-with-trigger-context-id) can be used to reduce the chance of delayed reports. -The following visual shows late arriving reports being stored in the appropriate batches according to scheduled report time. Batch T represents scheduled_report_time, and T+X represents time waited for delayed reports. This results in a summary report that includes the majority of reports that are included in the batch which corresponds to their scheduled report time. +It is recommended to wait for late arriving reports when collecting reports for batching. Late +reports can be determined by checking the `scheduled_report_time` value against when the report was +received. The time difference between those reports will help determine how long you may want to +consider waiting for late arriving reports. For example, as delayed reports are collected, check the +`scheduled_report_time` field and note the time delay in hours as 90%, 95%, and 99% of reports are +received. That data can be used to determine how long to wait for late arriving reports. +[Instant aggregate reports](https://github.com/WICG/attribution-reporting-api/blob/main/AGGREGATE.md#optional-reduce-report-delay-with-trigger-context-id) +can be used to reduce the chance of delayed reports. + +The following visual shows late arriving reports being stored in the appropriate batches according +to scheduled report time. Batch T represents scheduled_report_time, and T+X represents time waited +for delayed reports. This results in a summary report that includes the majority of reports that are +included in the batch which corresponds to their scheduled report time. ![batching](assets/batching.png) ### Aggregatable report accounting -The Aggregation Service maintains a [“no duplicates” rule](https://github.com/WICG/attribution-reporting-api/blob/main/AGGREGATION_SERVICE_TEE.md#no-duplicates-rule). This rule enforces that all Aggregatable reports with the same shared ID must be included in the same batch. -After the reports are collected, they should be batched in such a way that all reports with the same shared ID are part of one batch. +The Aggregation Service maintains a +["no duplicates" rule](https://github.com/WICG/attribution-reporting-api/blob/main/AGGREGATION_SERVICE_TEE.md#no-duplicates-rule). +This rule enforces that all Aggregatable reports with the same shared ID must be included in the +same batch. + +After the reports are collected, they should be batched in such a way that all reports with the same +shared ID are part of one batch. -If a report has already been processed in another batch, the processing can result in a [privacy budget exhausted error](https://github.com/privacysandbox/aggregation-service/blob/main/java/com/google/aggregate/adtech/worker/AggregationWorkerReturnCode.java#L26). Batching reports correctly helps prevent batches from being rejected due to the “no duplicates” rule. +If a report has already been processed in another batch, the processing can result in a +[privacy budget exhausted error](https://github.com/privacysandbox/aggregation-service/blob/2a83ed7c6e7c99d0a8fa4dc107edec86264aeaad/java/com/google/aggregate/adtech/worker/AggregationWorkerReturnCode.java#L26). +Batching reports correctly helps prevent batches from being rejected due to the "no duplicates" +rule. -A [shared ID](https://github.com/WICG/attribution-reporting-api/blob/main/AGGREGATION_SERVICE_TEE.md#disjoint-batches) is a key that is generated for each report to track aggregatable report accounting. The shared ID ensures that reports with the same shared ID contribute to only one summary report. This means that the reports that map to one shared ID together must all be included in a single batch. For example, if Report X and Report Y both have the same shared ID, they must be included in the same batch to avoid reports being dropped for duplication. +A +[shared ID](https://github.com/WICG/attribution-reporting-api/blob/main/AGGREGATION_SERVICE_TEE.md#disjoint-batches) +is a key that is generated for each report to track aggregatable report accounting. The shared ID +ensures that reports with the same shared ID contribute to only one summary report. This means that +the reports that map to one shared ID together must all be included in a single batch. For example, +if Report X and Report Y both have the same shared ID, they must be included in the same batch to +avoid reports being dropped for duplication. -The following image demonstrates the shared_info components that are hashed together to generate a Shared ID. +The following image demonstrates the shared_info components that are hashed together to generate a +Shared ID. ![shared-id](assets/shared-id.png) @@ -36,31 +81,66 @@ The following image demonstrates how two different reports can have the same sha ![scheduled-report-time](assets/scheduled-report-time.png) -**Note:** scheduled_report_time is truncated by hour, and source_registration_time is truncated by day. Report_id is not used in shared ID creation. Time granularity may be updated in the future. +**Note:** scheduled_report_time is truncated by hour, and source_registration_time is truncated by +day. Report_id is not used in shared ID creation. Time granularity may be updated in the future. #### Duplicate reports within batches -The `shared_info` field in an aggregatable report contains a UUID in the `report_id` field, which is used to identify duplicate reports within a batch. If there is more than one report with the same `report_id` in a batch, only the first report will be aggregated, and the others will be considered duplicates and silently dropped; the aggregation will proceed as normal and no errors will be sent. Although not required, Adtech can expect to see some performance gains by filtering out the duplicate reports with same reports ids before aggregation. -The `report_id` is unique to each report. +The `shared_info` field in an aggregatable report contains a UUID in the `report_id` field, which is +used to identify duplicate reports within a batch. If there is more than one report with the same +`report_id` in a batch, only the first report will be aggregated, and the others will be considered +duplicates and silently dropped; the aggregation will proceed as normal and no errors will be sent. +Although not required, Adtech can expect to see some performance gains by filtering out the +duplicate reports with same reports ids before aggregation. + +The `report_id` is unique to each report. #### Duplicate reports across batches -Each report is assigned a shared ID, which is an ID generated from combined data points that come from the report’s `shared_info` field. Multiple reports can have the same shared ID, and each batch can contain multiple shared IDs. All reports with the same shared ID must go in the same batch. If reports with the same shared ID end up in multiple batches, only the first batch will be accepted, and the others will be rejected as duplicates. To prevent this, [batches must be created appropriately](#batch-reports). -The following image shows an example where reports with the same shared ID across batches can cause the later batch to fail. In the image, you can see that two or more reports with the same shared ID e679aa are batched into different batches #1 and #2. Since the budget for all reports with shared ID e679aa is consumed during Batch #1 summary report generation, Batch #2 is not allowed and fails with an error. +Each report is assigned a shared ID, which is an ID generated from combined data points that come +from the report's `shared_info` field. Multiple reports can have the same shared ID, and each batch +can contain multiple shared IDs. All reports with the same shared ID must go in the same batch. If +reports with the same shared ID end up in multiple batches, only the first batch will be accepted, +and the others will be rejected as duplicates. To prevent this, +[batches must be created appropriately](#batch-reports). + +The following image shows an example where reports with the same shared ID across batches can cause +the later batch to fail. In the image, you can see that two or more reports with the same shared ID +e679aa are batched into different batches #1 and #2. Since the budget for all reports with shared ID +e679aa is consumed during Batch #1 summary report generation, Batch #2 is not allowed and fails with +an error. ![duplication](assets/duplication.png) ## Batch reports -The following are recommended ways to batch reports to avoid duplicates and optimize aggregate report accounting. + +The following are recommended ways to batch reports to avoid duplicates and optimize aggregate +report accounting. ### Batch by advertiser -**Note:** This strategy is only recommended for Attribution Reporting aggregation. -Private Aggregation does not have an attribution_destination field, which is the advertiser. -It is recommended to batch by advertiser, meaning to include reports belonging to a single advertiser in the same batch, to avoid hitting the aggregatable report account limit for each batch. Advertiser is a field considered in sharedID generation, so reports with the same advertiser could also have the same sharedID, which would require them to be in the same batch to avoid errors. +**Note:** This strategy is only recommended for Attribution Reporting aggregation. + +Private Aggregation does not have an attribution_destination field, which is the advertiser. It is +recommended to batch by advertiser, meaning to include reports belonging to a single advertiser in +the same batch, to avoid hitting the aggregatable report account limit for each batch. Advertiser is +a field considered in sharedID generation, so reports with the same advertiser could also have the +same sharedID, which would require them to be in the same batch to avoid errors. ### Batch by time -It is recommended to consider the report's scheduled report time (`shared_info.scheduled_report_time`) when batching. Scheduled report time is truncated to the hour in the shared ID generation, so at a minimum reports should be batched at hour intervals, meaning all reports with scheduled report time within the same hour should go in the same batch to avoid having reports with same shared ID across multiple batches, which will lead to job errors. + +It is recommended to consider the report's scheduled report time +(`shared_info.scheduled_report_time`) when batching. Scheduled report time is truncated to the hour +in the shared ID generation, so at a minimum reports should be batched at hour intervals, meaning +all reports with scheduled report time within the same hour should go in the same batch to avoid +having reports with same shared ID across multiple batches, which will lead to job errors. ### Batching frequency and noise -It is recommended to consider the [impact of noise](https://developers.google.com/privacy-sandbox/relevance/attribution-reporting/design-decisions#batching-frequency) on how often aggregatable reports are processed. If aggregatable reports are batched more frequently—for example, reports are processed once an hour—fewer conversion events will be included and noise will have a larger relative impact. If the frequency is decreased and reports are processed once a week, noise will have a smaller relative impact. To better understand the impact of noise on batches, experiment with the [Noise Lab](https://noise-lab.uc.r.appspot.com/?mode=simple). + +It is recommended to consider the +[impact of noise](https://developers.google.com/privacy-sandbox/relevance/attribution-reporting/design-decisions#batching-frequency) +on how often aggregatable reports are processed. If aggregatable reports are batched more +frequently-for example, reports are processed once an hour-fewer conversion events will be included +and noise will have a larger relative impact. If the frequency is decreased and reports are +processed once a week, noise will have a smaller relative impact. To better understand the impact of +noise on batches, experiment with the [Noise Lab](https://noise-lab.uc.r.appspot.com/?mode=simple). diff --git a/docs/collecting.md b/docs/collecting.md index 0e3bc3ed..420b8562 100644 --- a/docs/collecting.md +++ b/docs/collecting.md @@ -36,19 +36,17 @@ This is a sample aggregatable report produced with the ```json { + "aggregation_coordinator_origin": "https://publickeyservice.msmt.aws.privacysandboxservices.com", "aggregation_service_payloads": [ { - "debug_cleartext_payload": "omRkYXRhgaJldmFsdWVEAACAAGZidWNrZXRQAAAAAAAAAAAAAAAAAAAFWWlvcGVyYXRpb25paGlzdG9ncmFt", - "key_id": "e101cca5-3dec-4d4f-9823-9c7984b0bafe", - "payload": "26/oZSjHABFqsIxR4Gyh/DpmJLNA/fcp43Wdc1/sblss3eAkAPsqJLphnKjAC2eLFR2bQolMTOneOU5sMWuCfag2tmFlQKLjTkNv85Wq6HAmLg+Zq+YU0gxF573yzK38Cj2pWtb65lhnq9dl4Yiz" + "debug_cleartext_payload": "omRkYXRhlKJldmFsdWVEAACAAGZidWNrZXRQPPhnkD+7c+wm1RjAlowp3KJldmFsdWVEAAARMGZidWNrZXRQJFJl9DLxbnMm1RjAlowp3KJldmFsdWVEAAAAAGZidWNrZXRQAAAAAAAAAAAAAAAAAAAAAKJldmFsdWVEAAAAAGZidWNrZXRQAAAAAAAAAAAAAAAAAAAAAKJldmFsdWVEAAAAAGZidWNrZXRQAAAAAAAAAAAAAAAAAAAAAKJldmFsdWVEAAAAAGZidWNrZXRQAAAAAAAAAAAAAAAAAAAAAKJldmFsdWVEAAAAAGZidWNrZXRQAAAAAAAAAAAAAAAAAAAAAKJldmFsdWVEAAAAAGZidWNrZXRQAAAAAAAAAAAAAAAAAAAAAKJldmFsdWVEAAAAAGZidWNrZXRQAAAAAAAAAAAAAAAAAAAAAKJldmFsdWVEAAAAAGZidWNrZXRQAAAAAAAAAAAAAAAAAAAAAKJldmFsdWVEAAAAAGZidWNrZXRQAAAAAAAAAAAAAAAAAAAAAKJldmFsdWVEAAAAAGZidWNrZXRQAAAAAAAAAAAAAAAAAAAAAKJldmFsdWVEAAAAAGZidWNrZXRQAAAAAAAAAAAAAAAAAAAAAKJldmFsdWVEAAAAAGZidWNrZXRQAAAAAAAAAAAAAAAAAAAAAKJldmFsdWVEAAAAAGZidWNrZXRQAAAAAAAAAAAAAAAAAAAAAKJldmFsdWVEAAAAAGZidWNrZXRQAAAAAAAAAAAAAAAAAAAAAKJldmFsdWVEAAAAAGZidWNrZXRQAAAAAAAAAAAAAAAAAAAAAKJldmFsdWVEAAAAAGZidWNrZXRQAAAAAAAAAAAAAAAAAAAAAKJldmFsdWVEAAAAAGZidWNrZXRQAAAAAAAAAAAAAAAAAAAAAKJldmFsdWVEAAAAAGZidWNrZXRQAAAAAAAAAAAAAAAAAAAAAGlvcGVyYXRpb25paGlzdG9ncmFt", + "key_id": "27c22e1b-dc77-4fc0-aee7-4d291262071c", + "payload": "vxia0shX3KjvsgdNWZdznk2IUWtOi8+fZCTGFniGeRTzAv5LgSeGLd3PhDANDyazjDNSm8GxjtsPmoG08rd5nL8+qc6NwKJfwnAQUVgg/iCS8alpCcrD9aTSTyjeUdrJBmfwvjjydpm6NA6o+RVbpv1N7v+Z3arSvUzqw3k3UfGy12n10+TkaH/ymCo5Ke/9mbSwxy68aJfAGutEt91fo7qHtvvrTAULBIgQTgNsy9LkSeCTQ2YeEr7wyxBssZLrPEanjh6LGtzj2gXRiz3OiVAqXxnSn7eW8K9V9TosFsIVHijk9o5oz6+9LhkDx5SHHvx048m5OqyFe48KSBAlMcHHSSk+GCwQtHcHWgSVb8TmHDE9UnTc7+tSR60IK/UK4351my7aEaKaacF3q28pYjjoy92idwzcw0IWUUMAb4c1z1RetxZ602txSnjOsFLUcW02ReeUJcgynbi5M/DIbpltqTlg2FeSsMzLvf0yGvP6NDUF6jqgaD9kfHcLgwLR61G/SnjXVtdUU31VdS5bPMyrfBthesHytmNFKQQtBqLZ32uyf60NKed9xZku70IW0r/z06hPlvRG+ME+FqPEgbWC/YsBXv/Ua3wPjQ1Q3/BvQWHIPuaEyA2aeDilE0h3wfHMLsGbHxq/5Tw+pqwR12g069Qf5jiYyCiBdo96Vd40iKbZL4DVt85QHevpHFQYTmANBHYqW09Gl3TWnay8BgbIE+38IX2O9lwR59tiRCXFNwLmE6nUtGjfTBFpz1VCIVRos9K14tE4yGfxyyNVt8dHy6CFABG5wYtnT/+izKMhPzZCVA65wqaKEehk26+inHa4GzXCJfWvX5QNV1FxEaSrktM22/91NWsQXyMC2fT7NtK7MEKsXSmkqL0VdlxNR4b6WCz2yd5hVufSKZA8e4Wfljr43Lc22om54o2Y6qtteoIsj9FhJbHmnke/NbrbmEPf9Jk37d/KxGu96X0E46ACA4WQa18hBxKTir+LKI0g+rXJlfwUn4vvNZz1vInX7VUHelZi0mpVylFD5XP09O0PX4oUPx+WVCCf" } ], - "attribution_destination": "http://shoes.localhost", - "shared_info": "{\"debug_mode\":\"enabled\",\"privacy_budget_key\":\"OtLi6K1k0yNpebFbh92gUh/Cf8HgVBVXLo/BU50SRag=\",\"report_id\":\"00cf2236-a4fa-40e5-a7aa-d2ceb33a4d9d\",\"reporting_origin\":\"http://adtech.localhost:3000\",\"scheduled_report_time\":\"1649652363\",\"version\":\"\"}", - "source_debug_key": "531933890459023", - "source_registration_time": "1649635200", - "source_site": "http://news.localhost", - "trigger_debug_key": "531933890459023" + "shared_info": "{\"api\":\"attribution-reporting\",\"attribution_destination\":\"https://arapi-advertiser.web.app\",\"debug_mode\":\"enabled\",\"report_id\":\"6334058b-301d-40c8-be58-3f63eed454a1\",\"reporting_origin\":\"https://arapi-adtech.web.app\",\"scheduled_report_time\":\"1700089276\",\"source_registration_time\":\"0\",\"version\":\"0.1\"}", + "source_debug_key": "685645209142579", + "trigger_debug_key": "685645209142579" } ``` @@ -58,18 +56,44 @@ The above CBOR payload decodes into the following data in JSON format (Decoded w representing the underlying bytes. While some bytes may be represented as ASCII characters, others are unicode escaped. +Using CBOR, you will get the bucket and value in hex format. You can convert the value into decimal +while the bucket can be converted into an escaped unicode format by converting the characters into +ASCII or using the JavaScript code below. + ```json { "data": [ { - "value": "\u0000\u0000\x80\u0000", - "bucket": "\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0005Y" - } + "value": h'00008000', + "bucket": h'3CF867903FBB73EC26D518C0968C29DC' + }, + { + "value": h'00001130', + "bucket": h'245265F432F16E7326D518C0968C29DC' + }, + { + "value": h'00000000', + "bucket": h'00000000000000000000000000000000' + }, + ... ], "operation": "histogram" } ``` +```javascript +function hexToAscii(hexString) { + if (hexString.length % 2 != 0) { + hexString = '0' + hexString; + } + let asciiStr = ''; + for (let i = 0; i < hexString.length; i += 2) { + asciiStr += String.fromCharCode(parseInt(hexString.substr(i, 2), 16)); + } + return asciiStr; +} +``` + ## Convert the aggregatable report into Avro binary representation Both the local testing tool and the aggregation service running on AWS Nitro Enclave expect @@ -314,7 +338,7 @@ sample uses unicode escaped "characters" to encode the byte array bucket value. ```json { - "bucket": "\u0005Y" + "bucket": "<øg\u0090?»sì&Õ\u0018À\u0096\u008c)Ü" } ``` @@ -329,43 +353,6 @@ Another sample of a valid output domain json file - ```json { - "bucket": "\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0005Y" + "bucket": "\u003c\u00f8\u0067\u0090\u003f\u00bb\u0073\u00ec\u0026\u00d5\u0018\u00c0\u0096\u008c\u0029\u00dc" } ``` - -### Produce a summary report locally - -Using the local testing tool, you now can generate a summary report. -[See all flags and descriptions](/docs/local-testing-tool.md#local-testing-tool-flags-and-descriptions) - -Follow the instructions in the -[using-the-local-testing-tool](/docs/local-testing-tool.md#using-the-local-testing-tool) to download -the local testing tool. - -We will run the tool, without adding noise to the summary report, to receive the expected value of -`32768` from the [sample aggregatable report](#aggregatable-report-sample). - -```sh -java -jar LocalRunner_deploy.jar \ ---input_data_avro_file output_debug_reports_.avro \ ---domain_avro_file output_domain.avro \ ---json_output \ ---no_noising -``` - -The output of above tool execution will be in `output.json` with the following content - -```json -[ - { - "bucket": "\u0005Y", - "metric": 32768 - } -] -``` - -Note: The local testing tool also supports aggregation of -[Protected Audience](https://github.com/patcg-individual-drafts/private-aggregation-api#reports) and -[Shared Storage](https://github.com/patcg-individual-drafts/private-aggregation-api#reports) -reports. Simply pass the batch of FLEDGE or shared-storage unencrypted reports in the ---input_data_avro_file param. diff --git a/docs/error-codes-and-mitigation.md b/docs/error-codes-and-mitigation.md new file mode 100644 index 00000000..e24e5f74 --- /dev/null +++ b/docs/error-codes-and-mitigation.md @@ -0,0 +1,49 @@ +# Error Codes and Mitigations for Aggregation Service + +## Aggregation Service Return Codes and Mitigations + +This section covers the return code provided by the Aggregation Service job once it finishes +running. The `return_code` field within the `result_info` section of the getJob API response will +provide these return codes listed in [/docs/api.md#getjob-endpoint](/docs/api.md#getjob-endpoint). + +| Return Code | Message | When does this error happen? | Job can be retried | Adtech actionable? | Mitigation | +| ------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------- | ------------------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| SUCCESS | Aggregation Job completed successfully. | Aggregation Job completed successfully. | N/A | N/A | N/A | +| SUCCESS_WITH_ERRORS | "Aggregation job successfully processed but some reports have errors." | Some of the reports had processing errors. | No | No | Check the Error Summary to see Aggregation Service Report Error Codes. More details about error summary at [/docs/api.md#error-response-body-1](/docs/api.md#error-response-body-1) | +| PRIVACY_BUDGET_EXHAUSTED | "Insufficient privacy budget for one or more aggregatable reports. No aggregatable report can appear in more than one aggregation job." | When a job is trying to process reports that have been processed before or have the same shared ID as another report that is already processed. | No | Yes | Remove reports that have shared IDs with no budget left. Note: All reports regardless of production or debug endpoint will need to be batched according to shared ID. If you have a batch where the shared ID both exists in production and debug, you will encounter a privacy budget exhausted error. See our batching strategies [/docs/batching-strategies.md](/docs/batching-strategies.md) for more information. | +| PRIVACY_BUDGET_ERROR | "Exception while consuming privacy budget. Exception message: " + {exception message} | An error happened while consuming the privacy budget. | Yes | Yes | Retry the job. We also recommend each batch to be per advertiser or destination. Batching strategies can be found at [/docs/batching-strategies.md](/docs/batching-strategies.md) | +| INVALID_JOB | "Error due to validation exception." | When the job parameters fail validation. | Yes | Yes | Correct the job parameters for the job based on the validation message returned. Please see the createJob request parameter documentation for more details : [/docs/api.md#payload](/docs/api.md#payload) | +| RESULT_WRITE_ERROR | "Exception occured while writing result." | When the write to the output directory fails. | No | Yes | 1. Check that the account running Aggregation Service has write permissions for the output directory. 2. If a budget recovery process is available then contact Aggregation Service support for recovering the budget. | +| INTERNAL_ERROR | "Exception in processing domain." or "Internal Service Exception when processing reports." | An error occurred while processing output domains or Internal Error encountered. | Yes | Yes | Ensure that output domain location is a valid path. Retry the job. If error persists, contact Aggregation Service support. | +| UNSUPPORTED_REPORT_VERSION | "Exception due to unsupported report version" | An aggregatable report with a higher major sharedInfo version was provided and Aggregation Service is not up-to-date to support this version. | No | Yes | Update Aggregation Service deployment to a version that supports the report versions. | +| PERMISSION_ERROR | "Exception because of missing permission." | Aggregation service did not have access to storage or other requested resources. | No | Yes | 1. Ensure that Aggregation Service has access to the storage and requested resources to run the job. 2. This could be due to permission error while fetching decryption keys. Ensure that the account running Aggregation Service is the same as the one provided during onboarding. | +| INPUT_DATA_READ_FAILED | "No report shards found for location: " + reportsLocationor "Exception while reading reports input data." or "Exception while reading domain input data." | No reports or output domain shards were found, or the Aggregation Service was unable to read them. | Yes | Yes | 1.Ensure that the input report data location has the reports to be processed. 2.Ensure that the job has the right permissions to read the input/domain shards. Make sure the worker/job has the correct permissions to read the shards. 3.Ensure that the input_data_bucket_name, input_data_blob_prefix, output_data_bucket_name and output_data_blob_prefix fields are correct in your createJob request | +| DEBUG_SUCCESS_WITH_PRIVACY_BUDGET_ERROR | "Aggregation would have failed in non-debug mode due to a privacy budget error." | Job run using debug_run param succeeded but would have failed due to privacy budget error if run in normal mode. | Yes | No | N/A | +| DEBUG_SUCCESS_WITH_PRIVACY_BUDGET_EXHAUSTED | "Aggregation would have failed in non-debug mode due to privacy budget exhaustion" | Job running in Debug Mode succeeded but would have failed if running in non-debug mode due to privacy budget exhaustion. | Yes | No | N/A | +| REPORTS_WITH_ERRORS_EXCEEDED_THRESHOLD | "Aggregation job failed early because the number of reports excluded from aggregation exceeded threshold." | The number reports with an issue exceeded the threshold. | Yes. | Yes | Check the Error Summary to see Aggregation Service Report Error Codes. Retry the job once reports with errors are removed from the batch. | +| PRIVACY_BUDGET_AUTHENTICATION_ERROR | "Aggregation service is not authenticated to call privacy budget service. This could happen due to a misconfiguration during enrollment. Please contact support for resolution." | Aggregation Service is not set up correctly for Aggregatable report accounting. | Yes, retry after setup is fixed. | Yes | Check that the account running the Aggregation Service matches the account that was provided during onboarding. | +| PRIVACY_BUDGET_AUTHORIZATION_ERROR | "Aggregation service is not authorized to call privacy budget service. This could happen if the createJob API job_paramaters.attribution_report_to does not match the one registered at enrollment. Please verify and contact support if needed." | The attribution reporting origin in the job parameters doesn't match the one registered during enrollment. | Yes, retry after changing the reporting origin job parameter. | Yes | Correct the job parameter `attribution_report_to`. | + +## Aggregation Service Report Error Codes and Mitigations + +This section covers the report-level errors present in the error summary. These are reports excluded +from aggregation due to errors and can be found in the `getJob` response +`result_info.error_summary.error_counts`. See +[/docs/api.md#error-response-body-1](/docs/api.md#error-response-body-1) for more details. + +| Return Code | Message | When does this error happen? | Mitigation | +| --------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| DECRYPTION_KEY_NOT_FOUND | "Could not find decryption key on private key endpoint." | The key_id supplied in the report's payload was not found. | For Attribution Reporting API, this error may be caused by an issue with the trigger registration. Check that the trigger has been registered with the correct cloud using the `aggregation_coordinator_origin` field (instructions here: [AGGREGATE.md#data-processing-through-a-secure-aggregation-service in github.com/WICG/attribution-reporting-api](https://github.com/WICG/attribution-reporting-api/blob/main/AGGREGATE.md#data-processing-through-a-secure-aggregation-service)). This can also happen when aggregatable reports encrypted for one cloud are aggregated with an Aggregation service running on another cloud provider. Validate the public key endpoint used to encrypt the aggregatable reports. On your raw aggregatable report received in your .well-known reporting endpoint, you should be able to see the `aggregation_coordinator_origin` field. For GCP, the value should be `https://publickeyservice.msmt.gcp.privacysandboxservices.com`. For AWS, it's `https://publickeyservice.msmt.aws.privacysandboxservices.com`. If `aggregation_coordinator_origin` is not stated in the Attribution Reporting API / Private Aggregation API, the default will be AWS. For Attribution Reporting API, you can specify your cloud provider by passing it in the trigger registration. For Private Aggregation API, you will have to define the `aggregationCoordinatorOrigin` using the example in Aggregation coordinator choice section in the Private Aggregation API explainer. Please specify `https://publickeyservice.msmt.gcp.privacysandboxservices.com` as the "aggregationCoordinatorOrigin". | +| DECRYPTION_KEY_FETCH_ERROR | "Fetching the decryption key for report decryption failed. This can happen using an unapproved aggregation service binary, running the aggregation service binary in debug mode, key corruption or service availability issues." | This happens if running an unapproved Aggregation Service binary or due to Key Service issues. | No action required in case of Key Service disruptions. Job can be retried. In case of unapproved binary, using the right binary will fix the issue. Follow instructions here to use prebuilt image or self-build your image: [/docs/aws-aggregation-service.md#download-terraform-scripts-and-prebuilt-dependencies](/docs/aws-aggregation-service.md#download-terraform-scripts-and-prebuilt-dependencies) | +| DECRYPTION_ERROR | "Unable to decrypt the report. This may be caused by: a tampered aggregatable report file, a corrupt encrypted report, or other such issues." | This happens when report decryption fails. | 1. Ensure that the Aggregatable AVRO reports are generated correctly. Payload will need to be base64 decoded and converted into a byte array. More information on how to generate an AVRO report can be found at [/docs/collecting.md#convert-the-aggregatable-report-into-avro-binary-representation](/docs/collecting.md#convert-the-aggregatable-report-into-avro-binary-representation) 2. Ensure that the report is in avro format. 3.Check if the output domain AVRO is correct. Buckets are converted to escaped unicode hex format and then converted into a byte array. Contact Aggregation Service support for next steps. | +| ATTRIBUTION_REPORT_TO_MALFORMED | Report's reporting_origin domain is malformed. Domain must be syntactically valid and have an Effective Top Level Domain (eTLD). | This happens when the reporting origin's domain is malformed/invalid. | Ensure that the reports have a valid reporting origin domain. | +| ATTRIBUTION_REPORT_TO_MISMATCH | Report's reporting_origin value does not match attribution_report_to value set in the Aggregation job parameters. Aggregation request job parameters must have attribution_report_to set to report's reporting_origin value. | This occurs when there is a mismatch between the report's origin and the aggregation job parameters. | Change the reporting origin in the job parameter attribution_report_to to match the reporting origin of the reports being processed. | +| DEBUG_NOT_ENABLED | "Reports without .debug_mode enabled cannot be processed in a debug run." | This happens when the job is trying to process reports without debug mode enabled in a debug run. | Process only debug mode enabled reports in the debug run. A debug run only considers reports that have the flag "debug_mode": "enabled" in the report shared_info, see example at: [collecting.md#aggregatable-report-sample](/docs/collecting.md#aggregatable-report-sample) | +| NUM_REPORTS_WITH_ERRORS | "Total number of reports had an error. These reports were not considered in aggregation. See additional error messages for details on specific reasons. | This indicates that some reports had errors and were excluded from aggregation. | Exclude the processed reports and rerun the job with the unprocessed reports. | +| ORIGINAL_REPORT_TIME_TOO_OLD | "Report's scheduled_report_time is too old, reports cannot be older than 90 days." | This error occurs when the report's timestamp is older than the allowed 90-day limit. | N/A - The reports are too old to be processed. | +| INTERNAL_ERROR | "Internal error occurred during operation." | An internal error within the Aggregation Service service during operation. | Retry the job. If you encounter repeated Internal Errors, contact Aggregation Service support for help. | +| UNSUPPORTED_OPERATION | "Report's operation is unsupported. Supported operations are {SUPPORTED_OPERATIONS}." | This means that the report's operation is not among the supported operations. | N/A - The report's operation is not supported by Aggregation Service. | +| UNSUPPORTED_REPORT_API_TYPE | "The report's API type is not supported for aggregation. Supported APIs are {SUPPORTED_APIS}" | This error occurs when the report's API type is not supported by Aggregation Service. | N/A - The report's API type is not supported by Aggregation Service. | +| REQUIRED_SHAREDINFO_FIELD_INVALID | "One or more required SharedInfo fields are empty or invalid." | This indicates that essential SharedInfo fields are missing or contain invalid values. | Ensure that the reports have all the required SharedInfo fields. | +| INVALID_REPORT_ID | "Report ID missing or invalid in SharedInfo." | This error happens when the report ID is missing or invalid in the SharedInfo. | N/A - Aggregation Service cannot process reports with invalid report ID. | +| UNSUPPORTED_SHAREDINFO_VERSION | "Report has an unsupported version value in its shared_info. Supported values for report shared_info major version(s) are:{SUPPORTED_MAJOR_VERSIONS}" | This error arises when the report's shared_info version is incompatible with the supported versions. | Process the reports with a version of Aggregation Service that supports the report versions. | diff --git a/docs/gcp-aggregation-service.md b/docs/gcp-aggregation-service.md index 9f4d397b..f8652c21 100644 --- a/docs/gcp-aggregation-service.md +++ b/docs/gcp-aggregation-service.md @@ -5,16 +5,13 @@ To test the aggregation service with support for encrypted reports, you need the following: - Have a [GCP project](https://cloud.google.com/). -- [Register](https://developer.chrome.com/origintrials/#/view_trial/771241436187197441) for the - Privacy Sandbox Relevance and Measurement origin trial (OT) - Run the [Adtech Setup Terraform](#adtech-setup-terraform) to create/configure the service account needed for onboarding. - Complete the aggregation service [onboarding form](https://forms.gle/EHoecersGKhpcLPNA) Once you've submitted the onboarding form, we will contact you to verify your information. Then, we'll send you the remaining instructions and information needed for this setup.
_You won't be -able to successfully setup your GCP deployment without registering for the origin trial and -completing the onboarding process!_ +able to successfully setup your GCP deployment without completing the onboarding process!_ To set up aggregation service in GCP you'll use [Terraform](https://www.terraform.io/). @@ -146,6 +143,9 @@ folder: ... ``` +_Note: Please be advised that executing `terraform destroy` for the Adtech Setup environment will +result in the deletion of all resources generated within that environment._ + ## Set up your deployment environment _Note: Please, make sure that you have completed the above [Prerequisites](#prerequisites), @@ -325,3 +325,29 @@ file into smaller shards. _Note: This API requires authentication. Follow the [Google Cloud Function instructions](https://cloud.google.com/functions/docs/securing/authenticating) for sending an authenticated request. [Detailed API spec](/docs/api.md#getjob-endpoint)_ + +## Updating the system + +Run the following in the ``. + +```sh +git fetch origin && git checkout -b dev-v{VERSION} v{VERSION} +cd terraform/gcp +bash download_prebuilt_dependencies.sh +cd environments/dev +terraform apply +``` + +_Note: If you use self-built artifacts described in +[build-scripts/gcp](/build-scripts/gcp/README.md), run `bash fetch_terraform.sh` instead of +`bash download_prebuilt_dependencies.sh` and make sure you updated your dependencies in the `jars` +folder._ + +_Note: When migrating to new coordinator pair from version 2.[4|5|6].z to 2.7.z or later, ensure the +file `/terraform/gcp/environments/shared/release_params.auto.tfvars` was updated with the following +values:_ + +```sh +coordinator_a_impersonate_service_account = "a-opallowedusr@ps-msmt-coord-prd-g3p-svcacc.iam.gserviceaccount.com" +coordinator_b_impersonate_service_account = "b-opallowedusr@ps-prod-msmt-type2-e541.iam.gserviceaccount.com" +``` diff --git a/docs/local-testing-tool.md b/docs/local-testing-tool.md index dd8f35d0..5dd1e97a 100644 --- a/docs/local-testing-tool.md +++ b/docs/local-testing-tool.md @@ -49,6 +49,8 @@ _The `SHA256` of the `LocalTestingTool_{version}.jar` can be found on the ### Generating a summary report +Using the local testing tool, you can generate a summary report. + Simply pass any of the 3 kinds of supported reports as `--input_data_avro_file` param. Follow the instructions on how to @@ -71,10 +73,44 @@ This will create a summary report as output.avro file in the same directory wher You can use [avro tools](https://mvnrepository.com/artifact/org.apache.avro/avro-tools) to read avro output as JSON. +You can also batch the `output_debug_reports.avro` file into a summary report without adding noise +to the summary report. You should expect to receive the value of `32768` and `4400` from the +[sample aggregatable report](collecting.md#aggregatable-report-sample) + +```sh +java -jar LocalTestingTool_.jar \ +--input_data_avro_file output_debug_reports_.avro \ +--domain_avro_file output_domain.avro \ +--json_output \ +--output_directory . \ +--no_noising +``` + +The output of above tool execution will be in `output.json` with the following content + +```json +[ + { + "bucket": "<øg\u0090?»sì&Õ\u0018À\u0096\u008c)Ü", + "metric": 32768 + }, + { + "bucket": "$Reô2ñns&Õ\u0018À\u0096\u008c)Ü", + "metric": 4400 + } +] +``` + To see all supported flags for the local testing tool run `java -jar LocalTestingTool_{version}.jar --help`, e.g. you can adjust the noising epsilon with the `--epsilon` flag or disable noising all together with the `--no_noising` flag. +Note: The local testing tool also supports aggregation of +[Protected Audience](https://github.com/patcg-individual-drafts/private-aggregation-api#reports) and +[Shared Storage](https://github.com/patcg-individual-drafts/private-aggregation-api#reports) +reports. Simply pass the batch of FLEDGE or shared-storage unencrypted reports in the +--input_data_avro_file param. + ## Local Testing tool flags and descriptions ```sh diff --git a/docs/supported-api-types.md b/docs/supported-api-types.md new file mode 100644 index 00000000..bc3f95b9 --- /dev/null +++ b/docs/supported-api-types.md @@ -0,0 +1,13 @@ +# API types supported by the Aggregation Service + +The following Aggregatable Report API types are supported by the Aggregation Service + +1. [Attribution Reporting API](https://github.com/WICG/attribution-reporting-api/blob/main/AGGREGATE.md) +2. [Attribution Reporting Debug API](https://github.com/WICG/attribution-reporting-api/blob/main/aggregate_debug_reporting.md) +3. [Protected Audience API](https://github.com/WICG/turtledove/blob/main/FLEDGE.md) +4. [Shared Storage API](https://github.com/WICG/shared-storage) + +Also see +[Private Aggregation API explainer](https://github.com/patcg-individual-drafts/private-aggregation-api) +for more information on measurement of results collected via Protected Audience API and Shared +Storage API. diff --git a/docs/telemetry.md b/docs/telemetry.md new file mode 100644 index 00000000..7d300356 --- /dev/null +++ b/docs/telemetry.md @@ -0,0 +1,54 @@ +# Telemetry in Aggregation Service + +Aggregation Service exports the following metrics and traces through +[OpenTelemetry](https://opentelemetry.io/): CPU usage, memory, total execution time. These +metrics/traces can be helpful during the debugging process or when deciding on the appropriate cloud +[instance size](./sizing-guidance.md). + +Metrics/traces: + +- CPU usage: measured in percentage and exported with an interval of one minute. CPU percentage is + rounded to the nearest integer (e.g. 12.34% rounded to 12%). +- Memory: measured in percentage and exported with an interval of one minute. Memory percentage is + rounded to the nearest 10th (e.g. 12% rounded to 10%) and reported max to 90%. +- Total execution time (in seconds): time spent in worker processing from the time job is picked + for processing to its completion. This is generated per job. + +Memory and CPU usage are tracked for each environment. For debugging purposes only, we recommend +setting the following in your terraform variables file (`{name}.auto.tfvars`) to debug issues with a +single cloud instance. + +E.g. In terraform variables file (`{name}.auto.tfvars`): + +- For AWS, set `max_capacity_ec2_instances = "1"` +- For GCP, set `max_worker_instances = "1"` + +## How to enable metrics/traces collection + +Please note that enabling metrics or traces may add extra cost to your cloud billing. We recommend +referring to cloud provider website for cost details. + +The metrics and traces collection is disabled by default. To enable it, please add the metrics you +want to export in your terraform variables file (`{name}.auto.tfvars`) as shown here: + +```sh +allowed_otel_metrics = ["cpu_usage", "memory", "total_execution_time"] +``` + +In this case, "cpu_usage", "memory" and "total_execution_time" would be exported. + +## Where to find the metrics/traces: + +The env_name here is the same as what was set in your Aggregation Service deployment terraform. + +- AWS + - "cpu_usage" and "memory" graphs can be found in Cloudwatch > all metrics > {env_name} > + OTelLib. + - "total_execution_time" is exported to "Traces" in Cloudwatch. You can run a query with + `annotation.job_id = {job_id}` to get traces for a specific job. +- GCP + - "cpu_usage" and "memory" graphs can be found in Monitoring > Metric explorer > Generic + Node > Process. You can put `custom_namespace={env_name}` in the filter to see the metrics + from a specific environment. + - "total_execution_time" is exported to "Trace Explorer". You can set `job-id: {job_id}` in + the filter to get traces for a specific job. diff --git a/java/com/google/aggregate/adtech/worker/AggregationWorkerArgs.java b/java/com/google/aggregate/adtech/worker/AggregationWorkerArgs.java index d9e2b728..8e29d0c9 100644 --- a/java/com/google/aggregate/adtech/worker/AggregationWorkerArgs.java +++ b/java/com/google/aggregate/adtech/worker/AggregationWorkerArgs.java @@ -28,6 +28,39 @@ import com.google.privacysandbox.otel.OTelExporterSelector; import java.net.URI; +/** + * Worker args are runtime flags that are set when building an image or as CLI args when running a + * standalone binary and set by the Aggregation Service team. They differ from aggregation job + * params, which are set in the Job Request when requesting an aggregation report. For available job + * parameters see API docs. + * + *

+ * + *

+ * + *

To add a new worker arg: declare a new parameter in this class and its getter function, update + * the {@link AggregationWorkerModule} to inject it to the appropriate location, and set the param + * in the BUILD rules. + * + *

+ * + *

+ * + *

Use the following convention for naming the new param: + * + *

    + *
  • Use "lower_underscore" style for the 'names' attribute. + *
  • Prefer "long_descriptive_names" over "short_names" and noun phrases. + *
  • For Boolean flags: + *
      + *
    • Use positive or neutral terms (--foo_enabled rather than --foo_disabled). + *
    • Param name should be "feature_name_enabled" + *
    • Variable name should be "featureNameEnabled" + *
    • Getter name should be "isFeatureNameEnabled(...)" + *
    + *
+ */ public final class AggregationWorkerArgs { private static final int NUM_CPUS = Runtime.getRuntime().availableProcessors(); @@ -96,14 +129,6 @@ public final class AggregationWorkerArgs { + " This is used only for the LocalFileDecryptionKeyService.") private String localFileDecryptionKeyPath = ""; - @Parameter( - names = "--private_key_service_base_url", - description = - "Full URL (including protocol and api version path fragment) of the private key vending" - + " service. Do not include trailing slash") - private String privateKeyServiceUrl = - "https://privatekeyservice-staging.aws.admcstesting.dev:443/v1alpha"; - @Parameter( names = "--coordinator_a_encryption_key_service_base_url", description = @@ -344,7 +369,9 @@ public final class AggregationWorkerArgs { @Parameter( names = "--domain_optional", - description = "If set, option to threshold when output domain is not provided is enabled.") + description = + "If set, option to threshold when output domain is not provided is enabled. This feature" + + " is currently not enabled for use in Aggregation Service jobs.") private boolean domainOptional = false; @Parameter(names = "--domain_file_format", description = "Format of the domain generation file.") @@ -385,25 +412,56 @@ public final class AggregationWorkerArgs { + " error, will fail the job. This can be overridden in job request.") private double reportErrorThresholdPercentage = 10.0; - @Parameter(names = "--output_shard_file_size_bytes", description = - "Size of one shard of the output file. The default value is 100,000,000. (100MB)") + @Parameter( + names = "--output_shard_file_size_bytes", + description = + "Size of one shard of the output file. The default value is 100,000,000. (100MB)") private long outputShardFileSizeBytes = 100_000_000L; // 100MB @Parameter( - names = "--parallel-summary-upload", + names = "--parallel_summary_upload_enabled", description = "Flag to enable parallel upload of the sharded summary reports.") - private boolean enableParallelSummaryUpload = false; + private boolean parallelSummaryUploadEnabled = false; @Parameter( names = "--decrypter_cache_entry_ttl_sec", - description = "Flag to set the private key cache time to live. Used for testing only.") - private long decrypterCacheEntryTtlSec = 3600; + description = + "Flag to set the private key cache time to live. Flag exposed for testing only.") + private long decrypterCacheEntryTtlSec = 28800; // 8 hours. @Parameter( - names = "--streaming-output-domain-processing", - description = "Flag to enable RxJava streaming based output domain processing." - ) - private boolean streamingOutputDomainProcessing = false; + names = "--exception_cache_entry_ttl_sec", + description = "Flag to set the exception cache time to live.") + private long exceptionCacheEntryTtlSec = 10; // 10 seconds. + + @Parameter( + names = "--streaming_output_domain_processing_enabled", + description = "Flag to enable RxJava streaming based output domain processing.") + private boolean streamingOutputDomainProcessingEnabled = false; + + @Parameter( + names = "--labeled_privacy_budget_keys_enabled", + description = + "Flag to allow filtering of labeled payload contributions. If enabled, only contributions" + + " corresponding to queried labels/ids are included in aggregation.") + private boolean labeledPrivacyBudgetKeysEnabled = false; + + @Parameter( + names = "--local_job_params_input_filtering_ids", + description = + "Filtering Id to be added in Job Params to filter the labeled payload contributions. To" + + " be used only in Local mode.") + private String filteringIds = null; + + @Parameter( + names = "--attribution_reporting_debug_api_enabled", + description = "Flag to enable support for Attribution Reporting Debug API.") + private boolean attributionReportingDebugApiEnabled = true; + + @Parameter( + names = "--parallel_fact_noising_enabled", + description = "Flag to enable parallel aggregated fact noising.") + private boolean parallelAggregatedFactNoisingEnabled = false; ClientConfigSelector getClientConfigSelector() { return clientConfigSelector; @@ -533,10 +591,6 @@ String getLocalFileDecryptionKeyPath() { return localFileDecryptionKeyPath; } - String getPrivateKeyServiceBaseUrl() { - return privateKeyServiceUrl; - } - String getCoordinatorAEncryptionKeyServiceBaseUrl() { return coordinatorAEncryptionKeyServiceBaseUrl; } @@ -683,15 +737,35 @@ public long getOutputShardFileSizeBytes() { return outputShardFileSizeBytes; } - public boolean isEnableParallelSummaryUpload() { - return enableParallelSummaryUpload; + public boolean isParallelSummaryUploadEnabled() { + return parallelSummaryUploadEnabled; } public long getDecrypterCacheEntryTtlSec() { return decrypterCacheEntryTtlSec; } - public boolean isStreamingOutputDomainProcessing() { - return streamingOutputDomainProcessing; + public long getExceptionCacheEntryTtlSec() { + return exceptionCacheEntryTtlSec; + } + + public boolean isStreamingOutputDomainProcessingEnabled() { + return streamingOutputDomainProcessingEnabled; + } + + boolean isLabeledPrivacyBudgetKeysEnabled() { + return labeledPrivacyBudgetKeysEnabled; + } + + String getFilteringIds() { + return filteringIds; + } + + boolean isAttributionReportingDebugApiEnabled() { + return attributionReportingDebugApiEnabled; + } + + public boolean isParallelAggregatedFactNoisingEnabled() { + return parallelAggregatedFactNoisingEnabled; } } diff --git a/java/com/google/aggregate/adtech/worker/AggregationWorkerModule.java b/java/com/google/aggregate/adtech/worker/AggregationWorkerModule.java index 8a7154a6..8d94e1df 100644 --- a/java/com/google/aggregate/adtech/worker/AggregationWorkerModule.java +++ b/java/com/google/aggregate/adtech/worker/AggregationWorkerModule.java @@ -16,18 +16,27 @@ package com.google.aggregate.adtech.worker; +import static com.google.aggregate.adtech.worker.model.SharedInfo.ATTRIBUTION_REPORTING_API; +import static com.google.aggregate.adtech.worker.model.SharedInfo.ATTRIBUTION_REPORTING_DEBUG_API; +import static com.google.aggregate.adtech.worker.model.SharedInfo.PROTECTED_AUDIENCE_API; +import static com.google.aggregate.adtech.worker.model.SharedInfo.SHARED_STORAGE_API; + import com.fasterxml.jackson.databind.ObjectMapper; import com.google.aggregate.adtech.worker.Annotations.BenchmarkMode; import com.google.aggregate.adtech.worker.Annotations.BlockingThreadPool; +import com.google.aggregate.adtech.worker.Annotations.CustomForkJoinThreadPool; import com.google.aggregate.adtech.worker.Annotations.DomainOptional; import com.google.aggregate.adtech.worker.Annotations.EnableParallelSummaryUpload; +import com.google.aggregate.adtech.worker.Annotations.EnablePrivacyBudgetKeyFiltering; import com.google.aggregate.adtech.worker.Annotations.EnableStackTraceInResponse; import com.google.aggregate.adtech.worker.Annotations.EnableThresholding; import com.google.aggregate.adtech.worker.Annotations.MaxDepthOfStackTrace; import com.google.aggregate.adtech.worker.Annotations.NonBlockingThreadPool; import com.google.aggregate.adtech.worker.Annotations.OutputShardFileSizeBytes; +import com.google.aggregate.adtech.worker.Annotations.ParallelAggregatedFactNoising; import com.google.aggregate.adtech.worker.Annotations.ReportErrorThresholdPercentage; import com.google.aggregate.adtech.worker.Annotations.StreamingOutputDomainProcessing; +import com.google.aggregate.adtech.worker.Annotations.SupportedApis; import com.google.aggregate.adtech.worker.LocalFileToCloudStorageLogger.ResultWorkingDirectory; import com.google.aggregate.adtech.worker.aggregation.concurrent.ConcurrentAggregationProcessor; import com.google.aggregate.adtech.worker.aggregation.domain.OutputDomainProcessor; @@ -39,6 +48,7 @@ import com.google.aggregate.adtech.worker.decryption.RecordDecrypter; import com.google.aggregate.adtech.worker.model.serdes.PayloadSerdes; import com.google.aggregate.adtech.worker.model.serdes.cbor.CborPayloadSerdes; +import com.google.aggregate.adtech.worker.util.JobUtils; import com.google.aggregate.adtech.worker.validation.SimulationValidationModule; import com.google.aggregate.adtech.worker.validation.ValidationModule; import com.google.aggregate.perf.StopwatchExporter; @@ -49,7 +59,9 @@ import com.google.aggregate.privacy.budgeting.budgetkeygenerator.PrivacyBudgetKeyGeneratorModule; import com.google.aggregate.privacy.noise.proto.Params.NoiseParameters.Distribution; import com.google.aggregate.shared.mapper.TimeObjectMapper; +import com.google.common.base.Strings; import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.inject.AbstractModule; @@ -73,7 +85,7 @@ import com.google.scp.operator.cpio.cryptoclient.Annotations.CoordinatorAEncryptionKeyServiceBaseUrl; import com.google.scp.operator.cpio.cryptoclient.Annotations.CoordinatorBEncryptionKeyServiceBaseUrl; import com.google.scp.operator.cpio.cryptoclient.Annotations.DecrypterCacheEntryTtlSec; -import com.google.scp.operator.cpio.cryptoclient.HttpPrivateKeyFetchingService.PrivateKeyServiceBaseUrl; +import com.google.scp.operator.cpio.cryptoclient.Annotations.ExceptionCacheEntryTtlSec; import com.google.scp.operator.cpio.cryptoclient.aws.Annotations.KmsEndpointOverride; import com.google.scp.operator.cpio.cryptoclient.local.LocalFileDecryptionKeyServiceModule.DecryptionKeyFilePath; import com.google.scp.operator.cpio.distributedprivacybudgetclient.DistributedPrivacyBudgetClientModule.CoordinatorAPrivacyBudgetServiceAuthEndpoint; @@ -104,6 +116,7 @@ import java.nio.file.Paths; import java.util.Optional; import java.util.concurrent.Executors; +import java.util.concurrent.ForkJoinPool; import java.util.function.Supplier; import javax.inject.Singleton; import software.amazon.awssdk.http.SdkHttpClient; @@ -156,8 +169,7 @@ protected void configure() { if (!args.getLocalFileJobInfoPath().isEmpty()) { localJobInfoPath = Optional.of(Paths.get(args.getLocalFileJobInfoPath())); } - bind(new TypeLiteral>() { - }) + bind(new TypeLiteral>() {}) .annotatedWith(LocalFileJobHandlerResultPath.class) .toInstance(localJobInfoPath); bind(ObjectMapper.class).to(TimeObjectMapper.class); @@ -246,9 +258,6 @@ protected void configure() { bind(RecordReaderFactory.class).to(args.getEncryptedRecordReader().getReaderFactoryClass()); // Dependencies for decryption and deserialization. - bind(String.class) - .annotatedWith(PrivateKeyServiceBaseUrl.class) - .toInstance(args.getPrivateKeyServiceBaseUrl()); bind(String.class) .annotatedWith(CoordinatorAEncryptionKeyServiceBaseUrl.class) .toInstance(args.getCoordinatorAEncryptionKeyServiceBaseUrl()); @@ -281,11 +290,11 @@ protected void configure() { // Dependency for the aggregation processor. bind(JobProcessor.class).to(ConcurrentAggregationProcessor.class); - bind(boolean.class).annotatedWith(StreamingOutputDomainProcessing.class) - .toInstance(args.isStreamingOutputDomainProcessing()); - // Noising module. install(args.getNoisingSelector().getNoisingModule()); + bind(boolean.class) + .annotatedWith(ParallelAggregatedFactNoising.class) + .toInstance(args.isParallelAggregatedFactNoisingEnabled()); // Result logger module. install(args.resultLoggerModuleSelector().getResultLoggerModule()); @@ -296,13 +305,28 @@ protected void configure() { .toInstance(Paths.get(args.getResultWorkingDirectoryPathString())); break; } - bind(boolean.class).annotatedWith(EnableParallelSummaryUpload.class) - .toInstance(args.isEnableParallelSummaryUpload()); + + // Feature flags. + bind(boolean.class) + .annotatedWith(EnableParallelSummaryUpload.class) + .toInstance(args.isParallelSummaryUploadEnabled()); + bind(boolean.class) + .annotatedWith(EnablePrivacyBudgetKeyFiltering.class) + .toInstance(args.isLabeledPrivacyBudgetKeysEnabled()); + bind(boolean.class) + .annotatedWith(StreamingOutputDomainProcessing.class) + .toInstance(args.isStreamingOutputDomainProcessingEnabled()); // Parameter to set key cache. This is a test only flag. - bind(Long.class).annotatedWith(DecrypterCacheEntryTtlSec.class) + bind(Long.class) + .annotatedWith(DecrypterCacheEntryTtlSec.class) .toInstance(args.getDecrypterCacheEntryTtlSec()); + // Parameter to set exception cache. This is a test only flag. + bind(Long.class) + .annotatedWith(ExceptionCacheEntryTtlSec.class) + .toInstance(args.getExceptionCacheEntryTtlSec()); + // Dependencies for privacy budgeting. bind(PrivacyBudgetingServiceBridge.class).to(args.getPrivacyBudgeting().getBridge()); if (args.getPrivacyBudgeting() == PrivacyBudgetingSelector.HTTP) { @@ -357,8 +381,8 @@ protected void configure() { // Otel exporter. switch (args.getOTelExporterSelector()) { - // Specifying CollectorEndpoint is required for GRPC exporter because aggregation service - // would send metric to the CollectorEndpoint and thus collector/exporter could collect. + // Specifying CollectorEndpoint is required for GRPC exporter because aggregation service + // would send metric to the CollectorEndpoint and thus collector/exporter could collect. case GRPC: bind(String.class) .annotatedWith(GrpcOtelCollectorEndpoint.class) @@ -384,6 +408,20 @@ protected void configure() { .toInstance(args.getOutputShardFileSizeBytes()); } + @Provides + @SupportedApis + ImmutableSet providesSupportedApis() { + if (args.isAttributionReportingDebugApiEnabled()) { + return ImmutableSet.of( + ATTRIBUTION_REPORTING_API, + ATTRIBUTION_REPORTING_DEBUG_API, + PROTECTED_AUDIENCE_API, + SHARED_STORAGE_API); + } else { + return ImmutableSet.of(ATTRIBUTION_REPORTING_API, PROTECTED_AUDIENCE_API, SHARED_STORAGE_API); + } + } + @Provides @LocalFileJobParameters Supplier> providesLocalFileJobParameters() { @@ -409,6 +447,9 @@ Supplier> providesLocalFileJobParameters() { if (args.isDebugRun()) { jobParametersBuilder.put("debug_run", "true"); } + if (!Strings.isNullOrEmpty(args.getFilteringIds())) { + jobParametersBuilder.put(JobUtils.JOB_PARAM_FILTERING_IDS, args.getFilteringIds()); + } return () -> (jobParametersBuilder.build()); } @@ -429,4 +470,11 @@ ListeningExecutorService provideBlockingThreadPool() { return MoreExecutors.listeningDecorator( Executors.newFixedThreadPool(args.getBlockingThreadPoolSize())); } + + @Provides + @Singleton + @CustomForkJoinThreadPool + ListeningExecutorService provideCustomForkJoinThreadPool() { + return MoreExecutors.listeningDecorator(new ForkJoinPool(args.getNonBlockingThreadPoolSize())); + } } diff --git a/java/com/google/aggregate/adtech/worker/Annotations.java b/java/com/google/aggregate/adtech/worker/Annotations.java index 96d6ba99..c59200e8 100644 --- a/java/com/google/aggregate/adtech/worker/Annotations.java +++ b/java/com/google/aggregate/adtech/worker/Annotations.java @@ -25,142 +25,115 @@ import java.lang.annotation.Retention; import java.lang.annotation.Target; -/** - * Guice annotations for dependency injection - */ +/** Guice annotations for dependency injection */ public final class Annotations { /** - * Annotation for the {@link com.google.common.util.concurrent.ServiceManager} running the - * worker. + * Annotation for the {@link com.google.common.util.concurrent.ServiceManager} running the worker. */ @BindingAnnotation @Target({FIELD, PARAMETER, METHOD}) @Retention(RUNTIME) - public @interface WorkerServiceManager { - - } + public @interface WorkerServiceManager {} - /** - * Annotation for the {@link com.google.common.util.concurrent.Service} doing the work. - */ + /** Annotation for the {@link com.google.common.util.concurrent.Service} doing the work. */ @BindingAnnotation @Target({FIELD, PARAMETER, METHOD}) @Retention(RUNTIME) - public @interface PullWorkService { - - } + public @interface PullWorkService {} - /** - * Annotation for the thread pool doing the non-blocking work. - */ + /** Annotation for the thread pool doing the non-blocking work. */ @BindingAnnotation @Target({FIELD, PARAMETER, METHOD}) @Retention(RUNTIME) - public @interface NonBlockingThreadPool { - - } + public @interface NonBlockingThreadPool {} - /** - * Annotation for the thread pool doing the blocking work. - */ + /** Annotation for the thread pool doing the blocking work. */ @BindingAnnotation @Target({FIELD, PARAMETER, METHOD}) @Retention(RUNTIME) - public @interface BlockingThreadPool { - - } + public @interface BlockingThreadPool {} @BindingAnnotation @Target({FIELD, PARAMETER, METHOD}) @Retention(RUNTIME) - public @interface BenchmarkMode { - - } + public @interface BenchmarkMode {} @BindingAnnotation @Target({FIELD, PARAMETER, METHOD}) @Retention(RUNTIME) - public @interface DomainOptional { - - } + public @interface DomainOptional {} @BindingAnnotation @Target({FIELD, PARAMETER, METHOD}) @Retention(RUNTIME) - public @interface EnableThresholding { - - } + public @interface EnableThresholding {} @BindingAnnotation @Target({FIELD, PARAMETER, METHOD}) @Retention(RUNTIME) - @interface DebugWriter { - - } + @interface DebugWriter {} @BindingAnnotation @Target({FIELD, PARAMETER, METHOD}) @Retention(RUNTIME) - @interface ResultWriter { + @interface ResultWriter {} - } - - /** - * Annotation to enable or disable returning stack trace in response. - */ + /** Annotation to enable or disable returning stack trace in response. */ @BindingAnnotation @Target({FIELD, PARAMETER, METHOD}) @Retention(RUNTIME) - public @interface EnableStackTraceInResponse { + public @interface EnableStackTraceInResponse {} - } - - /** - * Annotation to declare the max depth of stacktrace to be returned. - */ + /** Annotation to declare the max depth of stacktrace to be returned. */ @BindingAnnotation @Target({FIELD, PARAMETER, METHOD}) @Retention(RUNTIME) - public @interface MaxDepthOfStackTrace { - - } + public @interface MaxDepthOfStackTrace {} - /** - * Annotation to declare the threshold of report errors to fail the job. - */ + /** Annotation to declare the threshold of report errors to fail the job. */ @BindingAnnotation @Target({FIELD, PARAMETER, METHOD}) @Retention(RUNTIME) - public @interface ReportErrorThresholdPercentage { - - } + public @interface ReportErrorThresholdPercentage {} - /** - * Annotation to set a size of shard with single key-value pair. - */ + /** Annotation to set a size of shard with single key-value pair. */ @BindingAnnotation @Target({FIELD, PARAMETER, METHOD}) @Retention(RUNTIME) - public @interface OutputShardFileSizeBytes { + public @interface OutputShardFileSizeBytes {} - } + @BindingAnnotation + @Target({FIELD, PARAMETER, METHOD}) + @Retention(RUNTIME) + public @interface EnableParallelSummaryUpload {} @BindingAnnotation @Target({FIELD, PARAMETER, METHOD}) @Retention(RUNTIME) - public @interface EnableParallelSummaryUpload { + public @interface StreamingOutputDomainProcessing {} - } + @BindingAnnotation + @Target({FIELD, PARAMETER, METHOD}) + @Retention(RUNTIME) + public @interface EnablePrivacyBudgetKeyFiltering {} + /** Annotation for the set of supported APIs. */ @BindingAnnotation @Target({FIELD, PARAMETER, METHOD}) @Retention(RUNTIME) - public @interface StreamingOutputDomainProcessing { + public @interface SupportedApis {} - } + @BindingAnnotation + @Target({FIELD, PARAMETER, METHOD}) + @Retention(RUNTIME) + public @interface ParallelAggregatedFactNoising {} + /** Annotation for the custom ForkJoin thread pool. */ + @BindingAnnotation + @Target({FIELD, PARAMETER, METHOD}) + @Retention(RUNTIME) + public @interface CustomForkJoinThreadPool {} - private Annotations() { - } + private Annotations() {} } diff --git a/java/com/google/aggregate/adtech/worker/BUILD b/java/com/google/aggregate/adtech/worker/BUILD index 72ea8532..62b2b760 100644 --- a/java/com/google/aggregate/adtech/worker/BUILD +++ b/java/com/google/aggregate/adtech/worker/BUILD @@ -182,6 +182,7 @@ java_library( "//java/com/google/aggregate/adtech/worker/reader/avro", "//java/com/google/aggregate/adtech/worker/selector", "//java/com/google/aggregate/adtech/worker/testing:in_memory_logger", + "//java/com/google/aggregate/adtech/worker/util", "//java/com/google/aggregate/adtech/worker/validation", "//java/com/google/aggregate/adtech/worker/writer", "//java/com/google/aggregate/adtech/worker/writer/avro", @@ -285,8 +286,7 @@ java_binary( main_class = "com.google.aggregate.adtech.worker.AggregationWorkerRunner", runtime_deps = [ # //telemetry library should be before :worker_runner - # TODO(b/305100313) Re-enable prod library when building prod jar. - "//telemetry/noop/java/com/google/privacysandbox/otel:otel_noop", + "//telemetry/prod/java/com/google/privacysandbox/otel:otel_prod", ":worker_runner", "//java/external:commons_logging", "//java/external:slf4j_simple", diff --git a/java/com/google/aggregate/adtech/worker/ErrorSummaryAggregator.java b/java/com/google/aggregate/adtech/worker/ErrorSummaryAggregator.java index f926a45c..4045dba5 100644 --- a/java/com/google/aggregate/adtech/worker/ErrorSummaryAggregator.java +++ b/java/com/google/aggregate/adtech/worker/ErrorSummaryAggregator.java @@ -58,6 +58,9 @@ public static ErrorSummaryAggregator createErrorSummaryAggregator( Optional errorThresholdValue = totalReportCountsOptional.map( totalReportCounts -> Math.round(totalReportCounts * errorThresholdPercentage / 100)); + if (errorThresholdValue.isEmpty() && errorThresholdPercentage == 0) { + errorThresholdValue = Optional.of(0L); + } return new ErrorSummaryAggregator(errorThresholdValue, errorThresholdPercentage); } diff --git a/java/com/google/aggregate/adtech/worker/LocalFileToCloudStorageLogger.java b/java/com/google/aggregate/adtech/worker/LocalFileToCloudStorageLogger.java index 6e9c134e..7d01f2d1 100644 --- a/java/com/google/aggregate/adtech/worker/LocalFileToCloudStorageLogger.java +++ b/java/com/google/aggregate/adtech/worker/LocalFileToCloudStorageLogger.java @@ -17,7 +17,6 @@ package com.google.aggregate.adtech.worker; import static com.google.aggregate.adtech.worker.util.DebugSupportHelper.getDebugFilePrefix; -import static com.google.scp.operator.cpio.blobstorageclient.BlobStorageClient.BlobStorageClientException; import static com.google.scp.operator.cpio.blobstorageclient.BlobStorageClient.getDataLocation; import static com.google.scp.operator.shared.model.BackendModelUtil.toJobKeyString; import static java.lang.annotation.ElementType.FIELD; @@ -31,10 +30,8 @@ import com.google.aggregate.adtech.worker.Annotations.ResultWriter; import com.google.aggregate.adtech.worker.exceptions.ResultLogException; import com.google.aggregate.adtech.worker.model.AggregatedFact; -import com.google.aggregate.adtech.worker.model.EncryptedReport; import com.google.aggregate.adtech.worker.util.OutputShardFileHelper; import com.google.aggregate.adtech.worker.writer.LocalResultFileWriter; -import com.google.aggregate.adtech.worker.writer.LocalResultFileWriter.FileWriteException; import com.google.common.collect.ImmutableList; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; @@ -45,7 +42,6 @@ import com.google.scp.operator.cpio.blobstorageclient.BlobStorageClient; import com.google.scp.operator.cpio.blobstorageclient.model.DataLocation; import com.google.scp.operator.cpio.jobclient.model.Job; -import java.io.IOException; import java.lang.annotation.Retention; import java.lang.annotation.Target; import java.nio.file.Files; @@ -69,7 +65,6 @@ public final class LocalFileToCloudStorageLogger implements ResultLogger { private final BlobStorageClient blobStorageClient; private final Path workingDirectory; private final ListeningExecutorService blockingThreadPool; - private static final String reencryptedReportFileNamePrefix = "reencrypted-"; @Inject LocalFileToCloudStorageLogger( @@ -165,26 +160,6 @@ public void logResults(ImmutableList results, Job ctx, boolean i } } - @Override - public void logReports(ImmutableList reports, Job ctx, String shardNumber) - throws ResultLogException { - String localFileName = - toJobKeyString(ctx.jobKey()) - + "-" - + reencryptedReportFileNamePrefix - + shardNumber - + ".avro"; - Path localReportsFilePath = - workingDirectory - .getFileSystem() - .getPath(Paths.get(workingDirectory.toString(), localFileName).toString()); - try { - writeReportsToCloud(reports.stream(), ctx, localReportsFilePath, localResultFileWriter); - } catch (FileWriteException | BlobStorageClientException | IOException e) { - throw new ResultLogException(e); - } - } - @SuppressWarnings("UnstableApiUsage") private ListenableFuture writeFile( Stream aggregatedFacts, @@ -220,21 +195,6 @@ private ListenableFuture writeFile( blockingThreadPool); } - private void writeReportsToCloud( - Stream reports, Job ctx, Path localFilepath, LocalResultFileWriter writer) - throws IOException, FileWriteException, BlobStorageClientException { - Files.createDirectories(workingDirectory); - writer.writeLocalReportFile(reports, localFilepath); - - String outputDataBlobBucket = ctx.requestInfo().getOutputDataBucketName(); - String outputDataBlobPrefix = localFilepath.getFileName().toString(); - - DataLocation resultLocation = getDataLocation(outputDataBlobBucket, outputDataBlobPrefix); - - blobStorageClient.putBlob(resultLocation, localFilepath); - Files.deleteIfExists(localFilepath); - } - /** * The local file name has a random UUID in it to prevent cases where an item is processed twice * by the same worker and clobbers other files being written. diff --git a/java/com/google/aggregate/adtech/worker/LocalResultLogger.java b/java/com/google/aggregate/adtech/worker/LocalResultLogger.java index 9d304b0a..60ee2995 100644 --- a/java/com/google/aggregate/adtech/worker/LocalResultLogger.java +++ b/java/com/google/aggregate/adtech/worker/LocalResultLogger.java @@ -23,7 +23,6 @@ import com.google.aggregate.adtech.worker.LibraryAnnotations.LocalOutputDirectory; import com.google.aggregate.adtech.worker.exceptions.ResultLogException; import com.google.aggregate.adtech.worker.model.AggregatedFact; -import com.google.aggregate.adtech.worker.model.EncryptedReport; import com.google.aggregate.adtech.worker.writer.LocalResultFileWriter; import com.google.aggregate.adtech.worker.writer.LocalResultFileWriter.FileWriteException; import com.google.common.collect.ImmutableList; @@ -68,23 +67,6 @@ public void logResults(ImmutableList results, Job ctx, boolean i isDebugRun ? localDebugResultFileWriter : localResultFileWriter); } - // TODO(b/315199032): Add local runner test - @Override - public void logReports(ImmutableList reports, Job ctx, String shardNumber) - throws ResultLogException { - String localFileName = "reencrypted_report.avro"; - Path localReportsFilePath = - workingDirectory - .getFileSystem() - .getPath(Paths.get(workingDirectory.toString(), localFileName).toString()); - try { - Files.createDirectories(workingDirectory); - localResultFileWriter.writeLocalReportFile(reports.stream(), localReportsFilePath); - } catch (IOException | FileWriteException e) { - throw new ResultLogException(e); - } - } - private DataLocation writeFile( Stream results, Job ctx, Path filePath, LocalResultFileWriter writer) throws ResultLogException { diff --git a/java/com/google/aggregate/adtech/worker/LocalWorkerArgs.java b/java/com/google/aggregate/adtech/worker/LocalWorkerArgs.java index 5f690c4e..91098556 100644 --- a/java/com/google/aggregate/adtech/worker/LocalWorkerArgs.java +++ b/java/com/google/aggregate/adtech/worker/LocalWorkerArgs.java @@ -21,10 +21,34 @@ import com.beust.jcommander.ParameterException; /** - * Command line args for the standalone library + * Command line args for the standalone library. + * + *

To add a new worker arg: declare a new parameter in this class and its getter function, update + * the {@link LocalWorkerModule} to inject it to the appropriate location, and set the param in the + * BUILD rules. + * + *

+ * + *

+ * + *

Use the following convention for naming the new param: + * + *

    + *
  • Use "lower_underscore" style for the 'names' attribute. + *
  • Prefer "long_descriptive_names" over "short_names" and noun phrases. + *
  • For Boolean flags: + *
      + *
    • Use positive or neutral terms (--foo_enabled rather than --foo_disabled). + *
    • Param name should be "feature_name_enabled" + *
    • Variable name should be "featureNameEnabled" + *
    • Getter name should be "isFeatureNameEnabled(...)" + *
    + *
*/ public class LocalWorkerArgs { + private static final int NUM_CPUS = Runtime.getRuntime().availableProcessors(); + @Parameter( names = "--input_data_avro_file", order = 0, @@ -109,15 +133,51 @@ public class LocalWorkerArgs { + " error, will fail the job. This can be overridden in job request.") private double reportErrorThresholdPercentage = 10.0; - @Parameter(names = "--output_shard_file_size_bytes", description = - "Size of one shard of the output file. The default value is 100,000,000. (100MB)") + @Parameter( + names = "--output_shard_file_size_bytes", + description = + "Size of one shard of the output file. The default value is 100,000,000. (100MB)") private long outputShardFileSizeBytes = 100_000_000L; // 100MB @Parameter( - names = "--streaming-output-domain-processing", - description = "Flag to enable RxJava streaming based output domain processing." - ) - private boolean streamingOutputDomainProcessing = false; + names = "--streaming_output_domain_processing_enabled", + description = "Flag to enable RxJava streaming based output domain processing.") + private boolean streamingOutputDomainProcessingEnabled = false; + + @Parameter( + names = "--local_job_params_input_filtering_ids", + description = + "Filtering Id to be added in Job Params to filter the labeled payload contributions.") + private String filteringIds = null; + + @Parameter( + names = "--labeled_privacy_budget_keys_enabled", + description = + "Flag to allow filtering of labeled payload contributions. If enabled, only contributions" + + " corresponding to queried labels/ids are included in aggregation.") + private boolean labeledPrivacyBudgetKeysEnabled = false; + + @Parameter( + names = "--attribution_reporting_debug_api_enabled", + description = "Flag to enable support for Attribution Reporting Debug API.") + private boolean attributionReportingDebugApiEnabled = true; + + @Parameter( + names = "--nonblocking_thread_pool_size", + description = "Size of the non-blocking thread pool") + private int nonBlockingThreadPoolSize = Math.max(1, NUM_CPUS); + + @Parameter( + names = "--blocking_thread_pool_size", + description = "Size of the blocking thread pool") + // Blocking thread is for I/O which is faster than non-IO operation in aggregation service. + // Therefore, the thread pool size default is set to be smaller than nonBlockingThreadPool size. + private int blockingThreadPoolSize = Math.max(1, NUM_CPUS / 2); + + @Parameter( + names = "--parallel_fact_noising_enabled", + description = "Flag to enable parallel aggregated fact noising.") + private boolean parallelAggregatedFactNoisingEnabled = false; public String getInputDataAvroFile() { return inputDataAvroFile; @@ -187,8 +247,32 @@ public int getMaximumDepthOfStackTrace() { return reportErrorThresholdPercentage; } - public boolean isStreamingOutputDomainProcessing() { - return streamingOutputDomainProcessing; + public boolean isStreamingOutputDomainProcessingEnabled() { + return streamingOutputDomainProcessingEnabled; + } + + String getFilteringIds() { + return filteringIds; + } + + boolean isLabeledPrivacyBudgetKeysEnabled() { + return labeledPrivacyBudgetKeysEnabled; + } + + boolean isAttributionReportingDebugApiEnabled() { + return attributionReportingDebugApiEnabled; + } + + int getNonBlockingThreadPoolSize() { + return nonBlockingThreadPoolSize; + } + + int getBlockingThreadPoolSize() { + return blockingThreadPoolSize; + } + + public boolean isParallelAggregatedFactNoisingEnabled() { + return parallelAggregatedFactNoisingEnabled; } public void validate() { @@ -212,6 +296,17 @@ public void validate() { "Required Parameter %s missing, should be a writeable directory for writing results.", "--output_directory")); } + + if (getNonBlockingThreadPoolSize() < 1) { + throw new ParameterException( + "NonBlockingThreadPoolSize must be >= 1. Provided value: " + + getNonBlockingThreadPoolSize()); + } + + if (getBlockingThreadPoolSize() < 1) { + throw new ParameterException( + "BlockingThreadPoolSize must be >= 1. Provided value: " + getBlockingThreadPoolSize()); + } } public static class EpsilonValidator implements IParameterValidator { diff --git a/java/com/google/aggregate/adtech/worker/LocalWorkerModule.java b/java/com/google/aggregate/adtech/worker/LocalWorkerModule.java index c7fecf40..ac50da32 100644 --- a/java/com/google/aggregate/adtech/worker/LocalWorkerModule.java +++ b/java/com/google/aggregate/adtech/worker/LocalWorkerModule.java @@ -16,17 +16,26 @@ package com.google.aggregate.adtech.worker; +import static com.google.aggregate.adtech.worker.model.SharedInfo.ATTRIBUTION_REPORTING_API; +import static com.google.aggregate.adtech.worker.model.SharedInfo.ATTRIBUTION_REPORTING_DEBUG_API; +import static com.google.aggregate.adtech.worker.model.SharedInfo.PROTECTED_AUDIENCE_API; +import static com.google.aggregate.adtech.worker.model.SharedInfo.SHARED_STORAGE_API; + import com.fasterxml.jackson.databind.ObjectMapper; import com.google.aggregate.adtech.worker.Annotations.BenchmarkMode; import com.google.aggregate.adtech.worker.Annotations.BlockingThreadPool; +import com.google.aggregate.adtech.worker.Annotations.CustomForkJoinThreadPool; import com.google.aggregate.adtech.worker.Annotations.DomainOptional; +import com.google.aggregate.adtech.worker.Annotations.EnablePrivacyBudgetKeyFiltering; import com.google.aggregate.adtech.worker.Annotations.EnableStackTraceInResponse; import com.google.aggregate.adtech.worker.Annotations.EnableThresholding; import com.google.aggregate.adtech.worker.Annotations.MaxDepthOfStackTrace; import com.google.aggregate.adtech.worker.Annotations.NonBlockingThreadPool; import com.google.aggregate.adtech.worker.Annotations.OutputShardFileSizeBytes; +import com.google.aggregate.adtech.worker.Annotations.ParallelAggregatedFactNoising; import com.google.aggregate.adtech.worker.Annotations.ReportErrorThresholdPercentage; import com.google.aggregate.adtech.worker.Annotations.StreamingOutputDomainProcessing; +import com.google.aggregate.adtech.worker.Annotations.SupportedApis; import com.google.aggregate.adtech.worker.LibraryAnnotations.LocalOutputDirectory; import com.google.aggregate.adtech.worker.aggregation.concurrent.ConcurrentAggregationProcessor; import com.google.aggregate.adtech.worker.aggregation.domain.OutputDomainProcessor; @@ -39,6 +48,7 @@ import com.google.aggregate.adtech.worker.local.LocalBlobStorageClientModule; import com.google.aggregate.adtech.worker.model.serdes.PayloadSerdes; import com.google.aggregate.adtech.worker.model.serdes.cbor.CborPayloadSerdes; +import com.google.aggregate.adtech.worker.util.JobUtils; import com.google.aggregate.adtech.worker.validation.SimulationValidationModule; import com.google.aggregate.perf.StopwatchExporter; import com.google.aggregate.perf.export.NoOpStopwatchExporter; @@ -48,7 +58,9 @@ import com.google.aggregate.privacy.noise.proto.Params.NoiseParameters.Distribution; import com.google.aggregate.privacy.noise.testing.ConstantNoiseModule; import com.google.aggregate.shared.mapper.TimeObjectMapper; +import com.google.common.base.Strings; import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.inject.AbstractModule; @@ -67,6 +79,7 @@ import java.nio.file.Paths; import java.util.Optional; import java.util.concurrent.Executors; +import java.util.concurrent.ForkJoinPool; import java.util.function.Supplier; public final class LocalWorkerModule extends AbstractModule { @@ -108,8 +121,9 @@ protected void configure() { bind(RecordDecrypter.class).to(DeserializingReportDecrypter.class); bind(ObjectMapper.class).to(TimeObjectMapper.class); bind(JobProcessor.class).to(ConcurrentAggregationProcessor.class); - bind(Boolean.class).annotatedWith(StreamingOutputDomainProcessing.class) - .toInstance(localWorkerArgs.isStreamingOutputDomainProcessing()); + bind(Boolean.class) + .annotatedWith(StreamingOutputDomainProcessing.class) + .toInstance(localWorkerArgs.isStreamingOutputDomainProcessingEnabled()); bind(boolean.class).annotatedWith(BenchmarkMode.class).toInstance(false); bind(Path.class) .annotatedWith(LocalFileJobHandlerPath.class) @@ -117,8 +131,7 @@ protected void configure() { bind(Path.class) .annotatedWith(LocalOutputDirectory.class) .toInstance(Path.of(localWorkerArgs.getOutputDirectory()).toAbsolutePath()); - bind(new TypeLiteral>() { - }) + bind(new TypeLiteral>() {}) .annotatedWith(LocalFileJobHandlerResultPath.class) .toInstance( Optional.of( @@ -134,6 +147,9 @@ protected void configure() { .annotatedWith(NoisingL1Sensitivity.class) .toInstance(localWorkerArgs.getL1Sensitivity()); bind(double.class).annotatedWith(NoisingDelta.class).toInstance(localWorkerArgs.getDelta()); + bind(boolean.class) + .annotatedWith(ParallelAggregatedFactNoising.class) + .toInstance(localWorkerArgs.isParallelAggregatedFactNoisingEnabled()); if (localWorkerArgs.isNoNoising()) { install(new ConstantNoiseModule()); @@ -154,6 +170,24 @@ protected void configure() { bind(long.class) .annotatedWith(OutputShardFileSizeBytes.class) .toInstance(localWorkerArgs.getOutputShardFileSizeBytes()); + + bind(boolean.class) + .annotatedWith(EnablePrivacyBudgetKeyFiltering.class) + .toInstance(localWorkerArgs.isLabeledPrivacyBudgetKeysEnabled()); + } + + @Provides + @SupportedApis + ImmutableSet providesSupportedApis() { + if (localWorkerArgs.isAttributionReportingDebugApiEnabled()) { + return ImmutableSet.of( + ATTRIBUTION_REPORTING_API, + ATTRIBUTION_REPORTING_DEBUG_API, + PROTECTED_AUDIENCE_API, + SHARED_STORAGE_API); + } else { + return ImmutableSet.of(ATTRIBUTION_REPORTING_API, PROTECTED_AUDIENCE_API, SHARED_STORAGE_API); + } } @Provides @@ -173,6 +207,9 @@ Supplier> providesLocalFileJobParameters() { if (localWorkerArgs.isDebugRun()) { jobParametersBuilder.put("debug_run", "true"); } + if (!Strings.isNullOrEmpty(localWorkerArgs.getFilteringIds())) { + jobParametersBuilder.put(JobUtils.JOB_PARAM_FILTERING_IDS, localWorkerArgs.getFilteringIds()); + } return () -> (jobParametersBuilder.build()); } @@ -180,13 +217,23 @@ Supplier> providesLocalFileJobParameters() { @Singleton @NonBlockingThreadPool ListeningExecutorService provideNonBlockingThreadPool() { - return MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(4)); + return MoreExecutors.listeningDecorator( + Executors.newFixedThreadPool(localWorkerArgs.getNonBlockingThreadPoolSize())); } @Provides @Singleton @BlockingThreadPool ListeningExecutorService provideBlockingThreadPool() { - return MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(4)); + return MoreExecutors.listeningDecorator( + Executors.newFixedThreadPool(localWorkerArgs.getBlockingThreadPoolSize())); + } + + @Provides + @Singleton + @CustomForkJoinThreadPool + ListeningExecutorService provideCustomForkJoinThreadPool() { + return MoreExecutors.listeningDecorator( + new ForkJoinPool(localWorkerArgs.getNonBlockingThreadPoolSize())); } } diff --git a/java/com/google/aggregate/adtech/worker/ReportDecrypterAndValidator.java b/java/com/google/aggregate/adtech/worker/ReportDecrypterAndValidator.java index 20015602..69a83864 100644 --- a/java/com/google/aggregate/adtech/worker/ReportDecrypterAndValidator.java +++ b/java/com/google/aggregate/adtech/worker/ReportDecrypterAndValidator.java @@ -95,7 +95,6 @@ public DecryptionValidationResult decryptAndValidate(EncryptedReport encryptedRe errorMessageBuilder.setCategory(ErrorCounter.DECRYPTION_ERROR); } - errorMessageBuilder.setDetailedErrorMessage(detailedErrorMessage); return DecryptionValidationResult.builder() .addErrorMessage(errorMessageBuilder.build()) .build(); @@ -110,7 +109,7 @@ private static ErrorCounter errorCounterFromCipherCreationException(ErrorReason return ErrorCounter.DECRYPTION_KEY_NOT_FOUND; case INTERNAL: default: - return ErrorCounter.SERVICE_ERROR; + return ErrorCounter.INTERNAL_ERROR; } } } diff --git a/java/com/google/aggregate/adtech/worker/ResultLogger.java b/java/com/google/aggregate/adtech/worker/ResultLogger.java index 0a1662bc..d631dfa4 100644 --- a/java/com/google/aggregate/adtech/worker/ResultLogger.java +++ b/java/com/google/aggregate/adtech/worker/ResultLogger.java @@ -18,7 +18,6 @@ import com.google.aggregate.adtech.worker.exceptions.ResultLogException; import com.google.aggregate.adtech.worker.model.AggregatedFact; -import com.google.aggregate.adtech.worker.model.EncryptedReport; import com.google.common.collect.ImmutableList; import com.google.scp.operator.cpio.jobclient.model.Job; @@ -28,8 +27,4 @@ public interface ResultLogger { /** Takes the aggregation results and logs them to results. */ void logResults(ImmutableList results, Job ctx, boolean isDebugRun) throws ResultLogException; - - /** Logs encrypted aggregatable reports. */ - void logReports(ImmutableList results, Job ctx, String shardNumber) - throws ResultLogException; } diff --git a/java/com/google/aggregate/adtech/worker/WorkerPullWorkService.java b/java/com/google/aggregate/adtech/worker/WorkerPullWorkService.java index 796c8562..ca9cb026 100644 --- a/java/com/google/aggregate/adtech/worker/WorkerPullWorkService.java +++ b/java/com/google/aggregate/adtech/worker/WorkerPullWorkService.java @@ -34,6 +34,7 @@ import com.google.common.util.concurrent.ListeningExecutorService; import com.google.privacysandbox.otel.OTelConfiguration; import com.google.privacysandbox.otel.Timer; +import com.google.privacysandbox.otel.TimerUnit; import com.google.scp.operator.cpio.jobclient.JobClient; import com.google.scp.operator.cpio.jobclient.model.Job; import com.google.scp.operator.cpio.jobclient.model.JobResult; @@ -46,9 +47,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * Guava service for repeatedly pulling from the pubsub and processing the request - */ +/** Guava service for repeatedly pulling from the pubsub and processing the request */ public final class WorkerPullWorkService extends AbstractExecutionThreadService { private static final Logger logger = LoggerFactory.getLogger(WorkerPullWorkService.class); @@ -138,7 +137,9 @@ protected void run() { Job currentJob = job.get(); JobResult jobResult = null; String jobID = toJobKeyString(currentJob.jobKey()); - try (Timer t = oTelConfiguration.createProdTimerStarted("total_execution_time", jobID)) { + try (Timer t = + oTelConfiguration.createProdTimerStarted( + "total_execution_time", jobID, TimerUnit.SECONDS)) { jobResult = jobProcessor.process(currentJob); } jobClient.markJobCompleted(jobResult); diff --git a/java/com/google/aggregate/adtech/worker/aggregation/concurrent/ConcurrentAggregationProcessor.java b/java/com/google/aggregate/adtech/worker/aggregation/concurrent/ConcurrentAggregationProcessor.java index cead36fe..41e101a4 100644 --- a/java/com/google/aggregate/adtech/worker/aggregation/concurrent/ConcurrentAggregationProcessor.java +++ b/java/com/google/aggregate/adtech/worker/aggregation/concurrent/ConcurrentAggregationProcessor.java @@ -33,11 +33,11 @@ import static com.google.aggregate.adtech.worker.util.JobUtils.JOB_PARAM_OUTPUT_DOMAIN_BUCKET_NAME; import static com.google.aggregate.adtech.worker.util.JobUtils.JOB_PARAM_REPORT_ERROR_THRESHOLD_PERCENTAGE; import static com.google.common.collect.ImmutableList.toImmutableList; -import static com.google.common.util.concurrent.Futures.immediateFuture; import static com.google.scp.operator.shared.model.BackendModelUtil.toJobKeyString; import com.google.aggregate.adtech.worker.AggregationWorkerReturnCode; import com.google.aggregate.adtech.worker.Annotations.BlockingThreadPool; +import com.google.aggregate.adtech.worker.Annotations.EnablePrivacyBudgetKeyFiltering; import com.google.aggregate.adtech.worker.Annotations.NonBlockingThreadPool; import com.google.aggregate.adtech.worker.Annotations.ReportErrorThresholdPercentage; import com.google.aggregate.adtech.worker.Annotations.StreamingOutputDomainProcessing; @@ -58,7 +58,10 @@ import com.google.aggregate.adtech.worker.model.EncryptedReport; import com.google.aggregate.adtech.worker.util.DebugSupportHelper; import com.google.aggregate.adtech.worker.util.JobResultHelper; +import com.google.aggregate.adtech.worker.util.JobUtils; import com.google.aggregate.adtech.worker.util.NumericConversions; +import com.google.aggregate.adtech.worker.util.ReportingOriginUtils; +import com.google.aggregate.adtech.worker.util.ReportingOriginUtils.InvalidReportingOriginException; import com.google.aggregate.adtech.worker.validation.ValidationException; import com.google.aggregate.perf.StopwatchRegistry; import com.google.aggregate.privacy.budgeting.bridge.PrivacyBudgetingServiceBridge; @@ -71,8 +74,7 @@ import com.google.common.base.Stopwatch; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; -import com.google.common.util.concurrent.Futures; -import com.google.common.util.concurrent.ListenableFuture; +import com.google.common.primitives.UnsignedLong; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.errorprone.annotations.Var; import com.google.privacysandbox.otel.OTelConfiguration; @@ -89,7 +91,6 @@ import io.reactivex.rxjava3.schedulers.Schedulers; import java.io.IOException; import java.io.InputStream; -import java.math.BigInteger; import java.security.AccessControlException; import java.util.List; import java.util.Map; @@ -110,6 +111,8 @@ public final class ConcurrentAggregationProcessor implements JobProcessor { public static final String JOB_PARAM_ATTRIBUTION_REPORT_TO = "attribution_report_to"; // Key to indicate whether this is a debug job public static final String JOB_PARAM_DEBUG_RUN = "debug_run"; + // Key for user provided reporting site value in the job params of the job request. + public static final String JOB_PARAM_REPORTING_SITE = "reporting_site"; private static final int NUM_CPUS = Runtime.getRuntime().availableProcessors(); // In aggregation service, reading is much faster than decryption, and most of the time, it waits @@ -145,8 +148,11 @@ public final class ConcurrentAggregationProcessor implements JobProcessor { private final PrivacyBudgetingServiceBridge privacyBudgetingServiceBridge; private final ListeningExecutorService blockingThreadPool; private final ListeningExecutorService nonBlockingThreadPool; + private final boolean enablePrivacyBudgetKeyFiltering; private final OTelConfiguration oTelConfiguration; private final double defaultReportErrorThresholdPercentage; + + // TODO(b/338219415): Reuse this flag to enable full streaming approach. private final Boolean streamingOutputDomainProcessing; @Inject @@ -166,7 +172,8 @@ public final class ConcurrentAggregationProcessor implements JobProcessor { @BlockingThreadPool ListeningExecutorService blockingThreadPool, @NonBlockingThreadPool ListeningExecutorService nonBlockingThreadPool, @ReportErrorThresholdPercentage double defaultReportErrorThresholdPercentage, - @StreamingOutputDomainProcessing Boolean streamingOutputDomainProcessing) { + @StreamingOutputDomainProcessing Boolean streamingOutputDomainProcessing, + @EnablePrivacyBudgetKeyFiltering boolean enablePrivacyBudgetKeyFiltering) { this.reportDecrypterAndValidator = reportDecrypterAndValidator; this.aggregationEngineFactory = aggregationEngineFactory; this.outputDomainProcessor = outputDomainProcessor; @@ -183,6 +190,7 @@ public final class ConcurrentAggregationProcessor implements JobProcessor { this.oTelConfiguration = oTelConfiguration; this.defaultReportErrorThresholdPercentage = defaultReportErrorThresholdPercentage; this.streamingOutputDomainProcessing = streamingOutputDomainProcessing; + this.enablePrivacyBudgetKeyFiltering = enablePrivacyBudgetKeyFiltering; } /** @@ -215,7 +223,7 @@ public JobResult process(Job job) if (jobParams.containsKey(JOB_PARAM_OUTPUT_DOMAIN_BUCKET_NAME) && jobParams.containsKey(JOB_PARAM_OUTPUT_DOMAIN_BLOB_PREFIX) && (!jobParams.get(JOB_PARAM_OUTPUT_DOMAIN_BUCKET_NAME).isEmpty() - || !jobParams.get(JOB_PARAM_OUTPUT_DOMAIN_BLOB_PREFIX).isEmpty())) { + || !jobParams.get(JOB_PARAM_OUTPUT_DOMAIN_BLOB_PREFIX).isEmpty())) { outputDomainLocation = Optional.of( BlobStorageClient.getDataLocation( @@ -255,12 +263,19 @@ public JobResult process(Job job) try { double reportErrorThresholdPercentage = getReportErrorThresholdPercentage(jobParams); - AggregationEngine aggregationEngine = aggregationEngineFactory.create(); + @Var ImmutableSet filteringIds = ImmutableSet.of(); + if (enablePrivacyBudgetKeyFiltering) { + filteringIds = + NumericConversions.getUnsignedLongsFromString( + jobParams.get(JobUtils.JOB_PARAM_FILTERING_IDS), + JobUtils.JOB_PARAM_FILTERING_IDS_DELIMITER); + } + AggregationEngine aggregationEngine = aggregationEngineFactory.create(filteringIds); // TODO(b/218924983) Estimate report counts to enable failing early on report errors reaching // threshold. ErrorSummaryAggregator errorAggregator = ErrorSummaryAggregator.createErrorSummaryAggregator( - Optional.empty(), reportErrorThresholdPercentage); + getInputReportCountFromJobParams(jobParams), reportErrorThresholdPercentage); AtomicLong totalReportCount = new AtomicLong(0); try (Timer reportsProcessTimer = @@ -282,7 +297,6 @@ public JobResult process(Job job) NoisedAggregatedResultSet noisedResultSet; try { - if (streamingOutputDomainProcessing) { noisedResultSet = conflateWithDomainAndAddNoiseStreaming( outputDomainLocation, @@ -290,26 +304,9 @@ public JobResult process(Job job) aggregationEngine, debugPrivacyEpsilon, debugRun); - - } else { - noisedResultSet = - conflateWithDomainAndAddNoise( - outputDomainLocation, - outputDomainShards, - aggregationEngine, - debugPrivacyEpsilon, - debugRun); - } } catch (DomainReadException e) { throw new AggregationJobProcessException( INPUT_DATA_READ_FAILED, "Exception while reading domain input data.", e.getCause()); - } catch (ExecutionException e) { - if (e.getCause() instanceof DomainReadException) { - throw new AggregationJobProcessException( - INPUT_DATA_READ_FAILED, "Exception while reading domain input data.", e.getCause()); - } - throw new AggregationJobProcessException( - INTERNAL_ERROR, "Exception in processing domain.", e); } processingStopwatch.stop(); @@ -339,7 +336,7 @@ public JobResult process(Job job) job, errorSummary, jobCode, /* message= */ Optional.empty()); } catch (ResultLogException e) { throw new AggregationJobProcessException( - RESULT_WRITE_ERROR, "Exception occured while writing result.", e); + RESULT_WRITE_ERROR, "Exception occurred while writing result.", e); } catch (AccessControlException e) { throw new AggregationJobProcessException( PERMISSION_ERROR, "Exception because of missing permission.", e); @@ -360,6 +357,14 @@ public JobResult process(Job job) } } + private static Optional getInputReportCountFromJobParams(Map jobParams) { + String inputReportCount = jobParams.get(JobUtils.JOB_PARAM_INPUT_REPORT_COUNT); + if (inputReportCount == null || inputReportCount.trim().isEmpty()) { + return Optional.empty(); + } + return Optional.ofNullable(Long.parseLong(inputReportCount.trim())); + } + private NoisedAggregatedResultSet conflateWithDomainAndAddNoiseStreaming( Optional outputDomainLocation, ImmutableList outputDomainShards, @@ -376,34 +381,6 @@ private NoisedAggregatedResultSet conflateWithDomainAndAddNoiseStreaming( debugRun); } - private NoisedAggregatedResultSet conflateWithDomainAndAddNoise( - Optional outputDomainLocation, - ImmutableList outputDomainShards, - AggregationEngine engine, - Optional debugPrivacyEpsilon, - Boolean debugRun) - throws DomainReadException, ExecutionException, InterruptedException { - @Var - ListenableFuture> outputDomainFuture = - outputDomainLocation - .map(loc -> outputDomainProcessor.readAndDedupeDomain(loc, outputDomainShards)) - .orElse(immediateFuture(ImmutableSet.of())); - - ListenableFuture aggregationFinalFuture = - Futures.transform( - outputDomainFuture, - outputDomain -> - outputDomainProcessor.adjustAggregationWithDomainAndNoise( - noisedAggregationRunner, - outputDomain, - engine.makeAggregation(), - debugPrivacyEpsilon, - debugRun), - nonBlockingThreadPool); - - return aggregationFinalFuture.get(); - } - private double getReportErrorThresholdPercentage(Map jobParams) { String jobParamsReportErrorThresholdPercentage = jobParams.getOrDefault(JOB_PARAM_REPORT_ERROR_THRESHOLD_PERCENTAGE, null); @@ -426,16 +403,34 @@ private void consumePrivacyBudgetUnits(ImmutableList budgetsT return; } + String claimedIdentity; + // Validations ensure that at least one of the parameters will always exist. + if (job.requestInfo().getJobParametersMap().containsKey(JOB_PARAM_REPORTING_SITE)) { + claimedIdentity = job.requestInfo().getJobParametersMap().get(JOB_PARAM_REPORTING_SITE); + } else { + try { + claimedIdentity = + ReportingOriginUtils.convertReportingOriginToSite( + job.requestInfo().getJobParametersMap().get(JOB_PARAM_ATTRIBUTION_REPORT_TO)); + } catch (InvalidReportingOriginException e) { + // This should never happen due to validations ensuring that the reporting origin is always + // valid. + throw new IllegalStateException( + "Invalid reporting origin found while consuming budget, this should not happen as job" + + " validations ensure the reporting origin is always valid.", + e); + } + } + ImmutableList missingPrivacyBudgetUnits; try { try (Timer t = oTelConfiguration.createDebugTimerStarted("pbs_latency", toJobKeyString(job.jobKey()))) { + final String reportingOrigin = + job.requestInfo().getJobParametersMap().get(JOB_PARAM_ATTRIBUTION_REPORT_TO); missingPrivacyBudgetUnits = privacyBudgetingServiceBridge.consumePrivacyBudget( - budgetsToConsume, /* budgetsToConsume */ - job.requestInfo() /* attributionReportTo */ - .getJobParametersMap() - .get(JOB_PARAM_ATTRIBUTION_REPORT_TO)); + budgetsToConsume, claimedIdentity); } } catch (PrivacyBudgetingServiceBridgeException e) { if (e.getStatusCode() != null) { @@ -500,6 +495,9 @@ private Flowable readData(DataLocation shard) { return Flowable.using( () -> { try { + if (blobStorageClient.getBlobSize(shard) <= 0) { + return InputStream.nullInputStream(); + } return blobStorageClient.getBlob(shard); } catch (BlobStorageClientException e) { throw new ConcurrentShardReadException(e); @@ -542,6 +540,10 @@ private void processReports( decryptAndAggregateReports( encryptedReports, job, aggregationEngine, errorAggregator)), NUM_PROCESS_THREADS) + .takeUntil( + unused -> { + return errorAggregator.countsAboveThreshold(); + }) .blockingSubscribe(); } diff --git a/java/com/google/aggregate/adtech/worker/aggregation/domain/AvroOutputDomainProcessor.java b/java/com/google/aggregate/adtech/worker/aggregation/domain/AvroOutputDomainProcessor.java index 98b3fd24..a46de36e 100644 --- a/java/com/google/aggregate/adtech/worker/aggregation/domain/AvroOutputDomainProcessor.java +++ b/java/com/google/aggregate/adtech/worker/aggregation/domain/AvroOutputDomainProcessor.java @@ -16,27 +16,19 @@ package com.google.aggregate.adtech.worker.aggregation.domain; -import static com.google.common.collect.ImmutableList.toImmutableList; - import com.google.aggregate.adtech.worker.Annotations.BlockingThreadPool; import com.google.aggregate.adtech.worker.Annotations.DomainOptional; import com.google.aggregate.adtech.worker.Annotations.EnableThresholding; import com.google.aggregate.adtech.worker.Annotations.NonBlockingThreadPool; import com.google.aggregate.adtech.worker.exceptions.DomainReadException; import com.google.aggregate.perf.StopwatchRegistry; -import com.google.aggregate.protocol.avro.AvroOutputDomainReader; import com.google.aggregate.protocol.avro.AvroOutputDomainReaderFactory; import com.google.aggregate.protocol.avro.AvroOutputDomainRecord; -import com.google.common.base.Stopwatch; -import com.google.common.collect.ImmutableList; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.scp.operator.cpio.blobstorageclient.BlobStorageClient; -import com.google.scp.operator.cpio.blobstorageclient.BlobStorageClient.BlobStorageClientException; -import com.google.scp.operator.cpio.blobstorageclient.model.DataLocation; import java.io.IOException; import java.io.InputStream; import java.math.BigInteger; -import java.util.UUID; import java.util.stream.Stream; import javax.inject.Inject; import org.apache.avro.AvroRuntimeException; @@ -69,30 +61,11 @@ public AvroOutputDomainProcessor( this.stopwatches = stopwatches; } - @Override - protected ImmutableList readShard(DataLocation outputDomainLocation) { - Stopwatch stopwatch = - stopwatches.createStopwatch(String.format("domain-shard-read-%s", UUID.randomUUID())); - stopwatch.start(); - try (InputStream domainStream = blobStorageClient.getBlob(outputDomainLocation)) { - AvroOutputDomainReader outputDomainReader = avroReaderFactory.create(domainStream); - ImmutableList shard = - outputDomainReader - .streamRecords() - .map(AvroOutputDomainRecord::bucket) - .collect(toImmutableList()); - stopwatch.stop(); - return shard; - } catch (IOException | BlobStorageClientException | AvroRuntimeException e) { - stopwatch.stop(); // stop the stopwatch if an exception occurs - throw new DomainReadException(e); - } - } - @Override public Stream readInputStream(InputStream shardInputStream) { try { - return avroReaderFactory.create(shardInputStream) + return avroReaderFactory + .create(shardInputStream) .streamRecords() .map(AvroOutputDomainRecord::bucket); } catch (IOException | AvroRuntimeException e) { diff --git a/java/com/google/aggregate/adtech/worker/aggregation/domain/OutputDomainProcessor.java b/java/com/google/aggregate/adtech/worker/aggregation/domain/OutputDomainProcessor.java index 3d491ec3..0fc70173 100644 --- a/java/com/google/aggregate/adtech/worker/aggregation/domain/OutputDomainProcessor.java +++ b/java/com/google/aggregate/adtech/worker/aggregation/domain/OutputDomainProcessor.java @@ -26,17 +26,8 @@ import com.google.aggregate.privacy.noise.NoisedAggregationRunner; import com.google.aggregate.privacy.noise.model.NoisedAggregatedResultSet; import com.google.aggregate.privacy.noise.model.NoisedAggregationResult; -import com.google.common.base.Stopwatch; import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Iterables; -import com.google.common.collect.MapDifference; -import com.google.common.collect.MapDifference.ValueDifference; -import com.google.common.collect.Maps; import com.google.common.collect.Sets; -import com.google.common.util.concurrent.Futures; -import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.scp.operator.cpio.blobstorageclient.BlobStorageClient; import com.google.scp.operator.cpio.blobstorageclient.BlobStorageClient.BlobStorageClientException; @@ -48,15 +39,12 @@ import java.io.InputStream; import java.math.BigInteger; import java.util.ArrayList; -import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; -import java.util.UUID; import java.util.concurrent.atomic.AtomicLong; -import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; import org.slf4j.Logger; @@ -96,54 +84,6 @@ public abstract class OutputDomainProcessor { this.enableThresholding = enableThresholding; } - /** - * Asynchronously reads output domain from {@link DataLocation} shards and returns a deduped set - * of buckets in output domain as {@link BigInteger}. The input data location can contain many - * shards. - * - *

Shards are read asynchronously. If there is an error reading the shards the future will - * complete with an exception. - * - * @return ListenableFuture containing the output domain buckets in a set - * @throws DomainReadException (unchecked) if there is an error listing the shards or the location - * provided has no shards present. - */ - public ListenableFuture> readAndDedupeDomain( - DataLocation outputDomainLocation, ImmutableList shards) { - ImmutableList>> futureShardReads = - shards.stream() - .map(shard -> blockingThreadPool.submit(() -> readShard(shard))) - .collect(ImmutableList.toImmutableList()); - - ListenableFuture>> allFutureShards = - Futures.allAsList(futureShardReads); - - return Futures.transform( - allFutureShards, - readShards -> { - Stopwatch stopwatch = - stopwatches.createStopwatch( - String.format("domain-combine-shards-%s", UUID.randomUUID())); - stopwatch.start(); - ImmutableSet domain = - readShards.stream() - .flatMap(Collection::stream) - .collect(ImmutableSet.toImmutableSet()); - stopwatch.stop(); - if (domain.isEmpty()) { - throw new DomainReadException( - new IllegalArgumentException( - String.format( - "No output domain provided in the location. : %s. Please refer to the API" - + " documentation for output domain parameters at" - + " https://github.com/privacysandbox/aggregation-service/blob/main/docs/api.md", - outputDomainLocation))); - } - return domain; - }, - nonBlockingThreadPool); - } - /** * Read all shards at {@link DataLocation} on the cloud storage provider. * @@ -190,7 +130,8 @@ public NoisedAggregatedResultSet adjustAggregationWithDomainAndNoiseStreaming( Boolean debugRun) throws DomainReadException { Set reportsOnlyKeys = Sets.newConcurrentHashSet(aggregationEngine.getKeySet()); - Set domainOnlyKeys = Sets.newConcurrentHashSet(); + Set overlappingKeys = Sets.newConcurrentHashSet(); + AtomicLong outputDomainTotalCount = new AtomicLong(0); Flowable.fromStream(domainShards.stream()) @@ -210,9 +151,9 @@ public NoisedAggregatedResultSet adjustAggregationWithDomainAndNoiseStreaming( domainKeys -> { domainKeys.forEach( domainKey -> { - // Domain only keys are separately annotated only for debug run. - if (debugRun && !aggregationEngine.containsKey(domainKey)) { - domainOnlyKeys.add(domainKey); + // keys are separately annotated only for debug run. + if (debugRun && reportsOnlyKeys.contains(domainKey)) { + overlappingKeys.add(domainKey); } reportsOnlyKeys.remove(domainKey); @@ -270,11 +211,11 @@ public NoisedAggregatedResultSet adjustAggregationWithDomainAndNoiseStreaming( noisedOverlappingAndDomainResults .noisedAggregatedFacts() .forEach( - (f) -> { - if (domainOnlyKeys.contains(f.bucket())) { - domainOnlyFacts.add(f); + (aggregatedFact) -> { + if (overlappingKeys.contains(aggregatedFact.getBucket())) { + overlappingFacts.add(aggregatedFact); } else { - overlappingFacts.add(f); + domainOnlyFacts.add(aggregatedFact); } }); @@ -322,6 +263,9 @@ private Flowable readShardData(DataLocation shard) { return Flowable.using( () -> { try { + if (blobStorageClient.getBlobSize(shard) <= 0) { + return InputStream.nullInputStream(); + } return blobStorageClient.getBlob(shard); } catch (BlobStorageClientException e) { throw new DomainReadException(e); @@ -331,98 +275,5 @@ private Flowable readShardData(DataLocation shard) { InputStream::close); } - /** - * Conflate aggregated facts with the output domain and noise results using the Maps.Difference - * API. - * - * @return NoisedAggregatedResultSet containing the combined and noised Aggregatable reports and - * output domain buckets. - */ - public NoisedAggregatedResultSet adjustAggregationWithDomainAndNoise( - NoisedAggregationRunner noisedAggregationRunner, - ImmutableSet outputDomain, - ImmutableMap reportsAggregatedMap, - Optional debugPrivacyEpsilon, - Boolean debugRun) { - // This pseudo-aggregation has all zeroes for the output domain. If a key is present in the - // output domain, but not in the aggregation itself, a zero is inserted which will later be - // noised to some value. - ImmutableMap outputDomainPseudoAggregation = - outputDomain.stream() - .collect( - ImmutableMap.toImmutableMap( - Function.identity(), key -> AggregatedFact.create(key, /* metric= */ 0))); - - // Difference by key is computed so that the output can be adjusted for the output domain. - // Keys that are in the aggregation data, but not in the output domain, are subject to both - // noising and thresholding. - // Otherwise, the data is subject to noising only. - MapDifference pseudoDiff = - Maps.difference(reportsAggregatedMap, outputDomainPseudoAggregation); - - // The values for common keys should in theory be differing, since the pseudo aggregation will - // have all zeroes, while the 'real' aggregation will have non-zeroes, but just in case to - // cover overlapping zeroes, matching keys are also processed. - // `overlappingZeroes` includes all the keys present in both domain and reports but - // the values are 0. - Iterable overlappingZeroes = pseudoDiff.entriesInCommon().values(); - // `overlappingNonZeroes` includes all the keys present in both domain and reports, and the - // value is non-zero in reports. - Iterable overlappingNonZeroes = - Maps.transformValues(pseudoDiff.entriesDiffering(), ValueDifference::leftValue).values(); - // `domainOutputOnlyZeroes` only includes keys in domain. - Iterable domainOutputOnlyZeroes = pseudoDiff.entriesOnlyOnRight().values(); - - NoisedAggregationResult noisedOverlappingNoThreshold = - noisedAggregationRunner.noise( - Iterables.concat(overlappingZeroes, overlappingNonZeroes), debugPrivacyEpsilon); - - NoisedAggregationResult noisedDomainOnlyNoThreshold = - noisedAggregationRunner.noise(domainOutputOnlyZeroes, debugPrivacyEpsilon); - - NoisedAggregationResult noisedDomainNoThreshold = - NoisedAggregationResult.merge(noisedOverlappingNoThreshold, noisedDomainOnlyNoThreshold); - - NoisedAggregationResult noisedReportsOnlyNoThreshold = null; - if (debugRun || domainOptional) { - noisedReportsOnlyNoThreshold = - noisedAggregationRunner.noise( - pseudoDiff.entriesOnlyOnLeft().values(), debugPrivacyEpsilon); - } - - NoisedAggregatedResultSet.Builder noisedResultSetBuilder = NoisedAggregatedResultSet.builder(); - - if (debugRun) { - noisedResultSetBuilder.setNoisedDebugResult( - getAnnotatedDebugResults( - noisedReportsOnlyNoThreshold, - noisedDomainOnlyNoThreshold, - noisedOverlappingNoThreshold)); - } - - if (domainOptional) { - NoisedAggregationResult noisedReportsDomainOptional = - enableThresholding - ? noisedAggregationRunner.threshold( - noisedReportsOnlyNoThreshold.noisedAggregatedFacts(), debugPrivacyEpsilon) - : noisedReportsOnlyNoThreshold; - - return noisedResultSetBuilder - .setNoisedResult( - NoisedAggregationResult.merge(noisedDomainNoThreshold, noisedReportsDomainOptional)) - .build(); - } else { - return noisedResultSetBuilder.setNoisedResult(noisedDomainNoThreshold).build(); - } - } - - /** - * Reads a given shard of the output domain - * - * @param shardLocation the location of the file to read - * @return the contents of the shard as a {@link ImmutableList} - */ - protected abstract ImmutableList readShard(DataLocation shardLocation); - public abstract Stream readInputStream(InputStream shardInputStream); } diff --git a/java/com/google/aggregate/adtech/worker/aggregation/domain/TextOutputDomainProcessor.java b/java/com/google/aggregate/adtech/worker/aggregation/domain/TextOutputDomainProcessor.java index d7f10860..ac9a7df4 100644 --- a/java/com/google/aggregate/adtech/worker/aggregation/domain/TextOutputDomainProcessor.java +++ b/java/com/google/aggregate/adtech/worker/aggregation/domain/TextOutputDomainProcessor.java @@ -25,23 +25,17 @@ import com.google.aggregate.adtech.worker.exceptions.DomainReadException; import com.google.aggregate.adtech.worker.util.NumericConversions; import com.google.aggregate.perf.StopwatchRegistry; -import com.google.common.base.Stopwatch; import com.google.common.collect.ImmutableList; import com.google.common.io.ByteStreams; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.scp.operator.cpio.blobstorageclient.BlobStorageClient; -import com.google.scp.operator.cpio.blobstorageclient.BlobStorageClient.BlobStorageClientException; -import com.google.scp.operator.cpio.blobstorageclient.model.DataLocation; import java.io.IOException; import java.io.InputStream; import java.math.BigInteger; -import java.util.UUID; import java.util.stream.Stream; import javax.inject.Inject; -/** - * Reads output domain from a text file with each aggregation key on a separate line. - */ +/** Reads output domain from a text file with each aggregation key on a separate line. */ public final class TextOutputDomainProcessor extends OutputDomainProcessor { private final BlobStorageClient blobStorageClient; @@ -66,24 +60,6 @@ public TextOutputDomainProcessor( this.stopwatches = stopwatches; } - public ImmutableList readShard(DataLocation outputDomainLocation) { - Stopwatch stopwatch = - stopwatches.createStopwatch(String.format("domain-shard-read-%s", UUID.randomUUID())); - stopwatch.start(); - try (InputStream domainStream = blobStorageClient.getBlob(outputDomainLocation)) { - byte[] bytes = ByteStreams.toByteArray(domainStream); - try (Stream fileLines = NumericConversions.createStringFromByteArray(bytes).lines()) { - ImmutableList shard = - fileLines.map(NumericConversions::createBucketFromString).collect(toImmutableList()); - return shard; - } - } catch (IOException | BlobStorageClientException | IllegalArgumentException e) { - throw new DomainReadException(e); - } finally { - stopwatch.stop(); - } - } - public Stream readInputStream(InputStream shardInputStream) { try { byte[] bytes = ByteStreams.toByteArray(shardInputStream); diff --git a/java/com/google/aggregate/adtech/worker/aggregation/engine/AggregationEngine.java b/java/com/google/aggregate/adtech/worker/aggregation/engine/AggregationEngine.java index 6dbaa80b..f638dbd2 100644 --- a/java/com/google/aggregate/adtech/worker/aggregation/engine/AggregationEngine.java +++ b/java/com/google/aggregate/adtech/worker/aggregation/engine/AggregationEngine.java @@ -30,11 +30,13 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; +import com.google.common.primitives.UnsignedLong; import java.math.BigInteger; import java.util.Optional; import java.util.Set; import java.util.UUID; import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.atomic.LongAdder; import java.util.function.Consumer; import java.util.function.Function; @@ -55,7 +57,7 @@ public final class AggregationEngine implements Consumer { private final PrivacyBudgetKeyGeneratorFactory privacyBudgetKeyGeneratorFactory; // Track aggregations for individual facts, keyed by fact buckets that are 128-bit integers. - private final ConcurrentMap aggregationMap; + private final ConcurrentMap aggregationMap; // Tracks distinct privacy budget unit identifiers for the reports aggregated. private final Set privacyBudgetUnits; @@ -64,7 +66,7 @@ public final class AggregationEngine implements Consumer { private final Set reportIdSet; /** Queried filteringIds to filter payload contributions. */ - private final ImmutableSet filteringIds; + private final ImmutableSet filteringIds; /** * Consumes a report by adding its individual facts to the aggregation Only reports with unique @@ -87,9 +89,9 @@ public void accept(Report report) { } /** Checks if the queried filteringId matches the fact's. */ - private static boolean containsFilteringId(Fact fact, ImmutableSet filteringIds) { + private static boolean containsFilteringId(Fact fact, ImmutableSet filteringIds) { // id = 0 is the default for reports w/o ids. - int factId = fact.id().orElse(0); + UnsignedLong factId = fact.id().orElse(UnsignedLong.ZERO); return filteringIds.contains(factId); } @@ -97,7 +99,7 @@ private static boolean containsFilteringId(Fact fact, ImmutableSet filt * Insert a new key with an empty fact. PBKs are not calculated for keys added using this method. */ public void accept(BigInteger key) { - aggregationMap.computeIfAbsent(key, unused -> new SingleFactAggregation()); + aggregationMap.computeIfAbsent(key, unused -> new LongAdder()); } public boolean containsKey(BigInteger key) { @@ -118,9 +120,15 @@ private static boolean isNullFact(Fact fact) { } /** Calculates Privacy Budget Keys for the report for the filteringId. */ - private void addPrivacyBudgetKey(SharedInfo sharedInfo, int filteringId) { + private void addPrivacyBudgetKey(SharedInfo sharedInfo, UnsignedLong filteringId) { + PrivacyBudgetKeyInput privacyBudgetKeyInput = + PrivacyBudgetKeyInput.builder() + .setSharedInfo(sharedInfo) + .setFilteringId(filteringId) + .build(); + Optional privacyBudgetKeyGenerator = - privacyBudgetKeyGeneratorFactory.getPrivacyBudgetKeyGenerator(sharedInfo); + privacyBudgetKeyGeneratorFactory.getPrivacyBudgetKeyGenerator(privacyBudgetKeyInput); if (privacyBudgetKeyGenerator.isEmpty()) { // Impossible because validations ensure only the supported reports are allowed. throw new IllegalStateException( @@ -130,16 +138,12 @@ private void addPrivacyBudgetKey(SharedInfo sharedInfo, int filteringId) { sharedInfo.api().get(), sharedInfo.version())); } String privacyBudgetKey = - privacyBudgetKeyGenerator - .get() - .generatePrivacyBudgetKey( - PrivacyBudgetKeyInput.builder() - .setSharedInfo(sharedInfo) - .setFilteringId(filteringId) - .build()); + privacyBudgetKeyGenerator.get().generatePrivacyBudgetKey(privacyBudgetKeyInput); PrivacyBudgetUnit budgetUnitId = PrivacyBudgetUnit.create( - privacyBudgetKey, sharedInfo.scheduledReportTime().truncatedTo(HOURS)); + privacyBudgetKey, + sharedInfo.scheduledReportTime().truncatedTo(HOURS), + sharedInfo.reportingOrigin()); privacyBudgetUnits.add(budgetUnitId); } @@ -149,12 +153,8 @@ private void addPrivacyBudgetKey(SharedInfo sharedInfo, int filteringId) { // TODO: investigate enforcing call of makeAggregation strictly after all accepts. public ImmutableMap makeAggregation() { return aggregationMap.entrySet().stream() - .map( - factAggr -> { - SingleFactAggregation aggregation = factAggr.getValue(); - return AggregatedFact.create(factAggr.getKey(), aggregation.getSum()); - }) - .collect(toImmutableMap(AggregatedFact::bucket, Function.identity())); + .map(factAggr -> AggregatedFact.create(factAggr.getKey(), factAggr.getValue().longValue())) + .collect(toImmutableMap(AggregatedFact::getBucket, Function.identity())); } /** Gets a set of distinct privacy budget units observed during the aggregation */ @@ -170,17 +170,15 @@ public ImmutableList getPrivacyBudgetUnits() { * fact is just updated. */ private void upsertAggregationForFact(Fact fact) { - aggregationMap - .computeIfAbsent(fact.bucket(), unused -> new SingleFactAggregation()) - .accept(fact); + aggregationMap.computeIfAbsent(fact.bucket(), unused -> new LongAdder()).add(fact.value()); } AggregationEngine( PrivacyBudgetKeyGeneratorFactory privacyBudgetKeyGeneratorFactory, - ConcurrentMap aggregationMap, + ConcurrentMap aggregationMap, Set privacyBudgetUnits, Set reportIdSet, - ImmutableSet filteringIds) { + ImmutableSet filteringIds) { this.privacyBudgetKeyGeneratorFactory = privacyBudgetKeyGeneratorFactory; this.aggregationMap = aggregationMap; this.privacyBudgetUnits = privacyBudgetUnits; diff --git a/java/com/google/aggregate/adtech/worker/aggregation/engine/AggregationEngineFactory.java b/java/com/google/aggregate/adtech/worker/aggregation/engine/AggregationEngineFactory.java index f6c3f4a4..82789702 100644 --- a/java/com/google/aggregate/adtech/worker/aggregation/engine/AggregationEngineFactory.java +++ b/java/com/google/aggregate/adtech/worker/aggregation/engine/AggregationEngineFactory.java @@ -22,10 +22,12 @@ import com.google.aggregate.privacy.budgeting.budgetkeygenerator.PrivacyBudgetKeyGeneratorFactory; import com.google.common.collect.ImmutableSet; import com.google.common.collect.MapMaker; +import com.google.common.primitives.UnsignedLong; import java.math.BigInteger; import java.util.Set; import java.util.UUID; import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.atomic.LongAdder; import javax.inject.Inject; /** @@ -43,7 +45,7 @@ public class AggregationEngineFactory { /** * Creates AggregationEngine object with queried filteringId. */ - public AggregationEngine create(ImmutableSet filteringIds) { + public AggregationEngine create(ImmutableSet filteringIds) { // Number of logical cores available to the JVM is used to hint the concurrent map maker. Any // number will work, this is just a hint that is passed to the map maker, but different values // may result in different performance. @@ -53,17 +55,17 @@ public AggregationEngine create(ImmutableSet filteringIds) { // is used, the number obtained here is 2x larger than the number of physical cores. int concurrentMapConcurrencyHint = Runtime.getRuntime().availableProcessors(); - ConcurrentMap aggregationMap = + ConcurrentMap aggregationMap = new MapMaker().concurrencyLevel(concurrentMapConcurrencyHint).makeMap(); Set privacyBudgetUnits = newConcurrentHashSet(); Set reportIdSet = newConcurrentHashSet(); // null and zero are to be treated as the same. - ImmutableSet.Builder filteringIdsEnhanced = new ImmutableSet.Builder<>(); + ImmutableSet.Builder filteringIdsEnhanced = new ImmutableSet.Builder<>(); filteringIdsEnhanced.addAll(filteringIds); if (filteringIds.isEmpty()) { - filteringIdsEnhanced.add(0); + filteringIdsEnhanced.add(UnsignedLong.ZERO); } return new AggregationEngine( diff --git a/java/com/google/aggregate/adtech/worker/aggregation/engine/BUILD b/java/com/google/aggregate/adtech/worker/aggregation/engine/BUILD index 34562bb2..25dcb8dd 100644 --- a/java/com/google/aggregate/adtech/worker/aggregation/engine/BUILD +++ b/java/com/google/aggregate/adtech/worker/aggregation/engine/BUILD @@ -24,7 +24,6 @@ java_library( ], javacopts = ["-Xep:Var"], deps = [ - ":single_fact_aggregation", "//java/com/google/aggregate/adtech/worker/model", "//java/com/google/aggregate/privacy/budgeting/bridge:privacy_budgeting_service_bridge", "//java/com/google/aggregate/privacy/budgeting/budgetkeygenerator:privacy_budget_key_generator", @@ -32,12 +31,3 @@ java_library( "//java/external:javax_inject", ], ) - -java_library( - name = "single_fact_aggregation", - srcs = ["SingleFactAggregation.java"], - javacopts = ["-Xep:Var"], - deps = [ - "//java/com/google/aggregate/adtech/worker/model", - ], -) diff --git a/java/com/google/aggregate/adtech/worker/aggregation/engine/SingleFactAggregation.java b/java/com/google/aggregate/adtech/worker/aggregation/engine/SingleFactAggregation.java deleted file mode 100644 index 14898400..00000000 --- a/java/com/google/aggregate/adtech/worker/aggregation/engine/SingleFactAggregation.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright 2022 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.aggregate.adtech.worker.aggregation.engine; - -import com.google.aggregate.adtech.worker.model.Fact; -import java.util.concurrent.atomic.LongAdder; -import java.util.function.Consumer; - -/** - * Aggregator for a single fact, tracks count and sum - * - *

The aggregator is thread safe, leveraging highly optimized Java 8+ long accumulators. - */ -final class SingleFactAggregation implements Consumer { - - private final LongAdder sum; - - SingleFactAggregation() { - sum = new LongAdder(); - } - - @Override - public void accept(Fact fact) { - sum.add(fact.value()); - } - - /** - * Gets the sum recorded for the aggregation, at the time of call (if other threads are still - * pushing data into the aggregation, it may be inaccurate, but it is expected that sum is - * collected only after everything is pushed). - */ - long getSum() { - return sum.longValue(); - } -} diff --git a/java/com/google/aggregate/adtech/worker/encryption/NoopRecordEncrypter.java b/java/com/google/aggregate/adtech/worker/encryption/NoopRecordEncrypter.java index bf1d724d..1aa47d6a 100644 --- a/java/com/google/aggregate/adtech/worker/encryption/NoopRecordEncrypter.java +++ b/java/com/google/aggregate/adtech/worker/encryption/NoopRecordEncrypter.java @@ -17,7 +17,6 @@ package com.google.aggregate.adtech.worker.encryption; import com.google.aggregate.adtech.worker.model.EncryptedReport; -import com.google.aggregate.adtech.worker.model.Report; import com.google.common.io.ByteSource; public final class NoopRecordEncrypter implements RecordEncrypter { @@ -27,10 +26,4 @@ public EncryptedReport encryptSingleReport( ByteSource report, String sharedInfo, String reportVersion) throws EncryptionException { return null; } - - @Override - public EncryptedReport encryptReport(Report report, String publicKeyUri) - throws EncryptionException { - return null; - } } diff --git a/java/com/google/aggregate/adtech/worker/encryption/RecordEncrypter.java b/java/com/google/aggregate/adtech/worker/encryption/RecordEncrypter.java index 6636c9a6..19fa6c66 100644 --- a/java/com/google/aggregate/adtech/worker/encryption/RecordEncrypter.java +++ b/java/com/google/aggregate/adtech/worker/encryption/RecordEncrypter.java @@ -17,7 +17,6 @@ package com.google.aggregate.adtech.worker.encryption; import com.google.aggregate.adtech.worker.model.EncryptedReport; -import com.google.aggregate.adtech.worker.model.Report; import com.google.common.io.ByteSource; /** Interface that does encryption for any provided encryption algorithm. */ @@ -29,16 +28,6 @@ public interface RecordEncrypter { EncryptedReport encryptSingleReport(ByteSource report, String sharedInfo, String reportVersion) throws EncryptionException; - /** - * Encrypts a deserialized Report with keys provided by the publicKeyUri. - * - * @param report - * @param publicKeyUri - * @return EncryptedReport - * @throws EncryptionException - */ - EncryptedReport encryptReport(Report report, String publicKeyUri) throws EncryptionException; - final class EncryptionException extends Exception { public EncryptionException(Throwable cause) { diff --git a/java/com/google/aggregate/adtech/worker/encryption/RecordEncrypterImpl.java b/java/com/google/aggregate/adtech/worker/encryption/RecordEncrypterImpl.java index 362f357b..4b591d71 100644 --- a/java/com/google/aggregate/adtech/worker/encryption/RecordEncrypterImpl.java +++ b/java/com/google/aggregate/adtech/worker/encryption/RecordEncrypterImpl.java @@ -21,37 +21,21 @@ import com.google.aggregate.adtech.worker.encryption.hybrid.key.EncryptionKey; import com.google.aggregate.adtech.worker.encryption.hybrid.key.EncryptionKeyService; import com.google.aggregate.adtech.worker.encryption.hybrid.key.EncryptionKeyService.KeyFetchException; -import com.google.aggregate.adtech.worker.encryption.hybrid.key.ReEncryptionKeyService; -import com.google.aggregate.adtech.worker.encryption.hybrid.key.ReEncryptionKeyService.ReencryptionKeyFetchException; import com.google.aggregate.adtech.worker.model.EncryptedReport; -import com.google.aggregate.adtech.worker.model.Report; -import com.google.aggregate.adtech.worker.model.serdes.PayloadSerdes; -import com.google.aggregate.adtech.worker.model.serdes.SharedInfoSerdes; import com.google.common.io.ByteSource; import com.google.inject.Inject; -import java.util.Optional; /** {@link RecordEncrypter} implementation. */ public final class RecordEncrypterImpl implements RecordEncrypter { private final EncryptionCipherFactory encryptionCipherFactory; private final EncryptionKeyService encryptionKeyService; - private final ReEncryptionKeyService reEncryptionKeyService; - private final PayloadSerdes payloadSerdes; - private final SharedInfoSerdes sharedInfoSerdes; @Inject public RecordEncrypterImpl( - EncryptionCipherFactory encryptionCipherFactory, - EncryptionKeyService encryptionKeyService, - ReEncryptionKeyService reEncryptionKeyService, - PayloadSerdes payloadSerdes, - SharedInfoSerdes sharedInfoSerdes) { + EncryptionCipherFactory encryptionCipherFactory, EncryptionKeyService encryptionKeyService) { this.encryptionCipherFactory = encryptionCipherFactory; this.encryptionKeyService = encryptionKeyService; - this.reEncryptionKeyService = reEncryptionKeyService; - this.payloadSerdes = payloadSerdes; - this.sharedInfoSerdes = sharedInfoSerdes; } @Override @@ -71,30 +55,4 @@ public EncryptedReport encryptSingleReport( throw new EncryptionException(e); } } - - @Override - public EncryptedReport encryptReport(Report report, String cloudEncryptionKeyVendingUri) - throws EncryptionException { - try { - EncryptionKey encryptionKey = - reEncryptionKeyService.getEncryptionPublicKey(cloudEncryptionKeyVendingUri); - EncryptionCipher encryptionCipher = - encryptionCipherFactory.encryptionCipherFor(encryptionKey.key()); - String sharedInfoString = - sharedInfoSerdes.reverse().convert(Optional.of(report.sharedInfo())); - return EncryptedReport.builder() - .setPayload( - encryptionCipher.encryptReport( - payloadSerdes.reverse().convert(Optional.of(report.payload())), - sharedInfoString, - report.sharedInfo().version())) - .setKeyId(encryptionKey.id()) - .setSharedInfo(sharedInfoString) - .build(); - } catch (CipherCreationException | ReencryptionKeyFetchException e) { - throw new EncryptionException(e); - } catch (PayloadEncryptionException e) { - throw new EncryptionException("Encountered PayloadEncryptionException."); - } - } } diff --git a/java/com/google/aggregate/adtech/worker/encryption/hybrid/key/BUILD b/java/com/google/aggregate/adtech/worker/encryption/hybrid/key/BUILD index 88138c69..f3102177 100644 --- a/java/com/google/aggregate/adtech/worker/encryption/hybrid/key/BUILD +++ b/java/com/google/aggregate/adtech/worker/encryption/hybrid/key/BUILD @@ -21,7 +21,6 @@ java_library( srcs = [ "EncryptionKey.java", "EncryptionKeyService.java", - "ReEncryptionKeyService.java", ], javacopts = ["-Xep:Var"], deps = [ diff --git a/java/com/google/aggregate/adtech/worker/encryption/hybrid/key/ReEncryptionKeyService.java b/java/com/google/aggregate/adtech/worker/encryption/hybrid/key/ReEncryptionKeyService.java deleted file mode 100644 index 59a726a2..00000000 --- a/java/com/google/aggregate/adtech/worker/encryption/hybrid/key/ReEncryptionKeyService.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.aggregate.adtech.worker.encryption.hybrid.key; - -/** Interface for retrieving public encryption keys from provided public key hosting URI */ -public interface ReEncryptionKeyService { - - /** Retrieve a key from the aggregate service KMS */ - EncryptionKey getEncryptionPublicKey(String keyVendingUri) throws ReencryptionKeyFetchException; - - final class ReencryptionKeyFetchException extends Exception { - public ReencryptionKeyFetchException(Throwable cause) { - super(cause); - } - - public ReencryptionKeyFetchException(String message) { - super(message); - } - } -} diff --git a/java/com/google/aggregate/adtech/worker/encryption/hybrid/key/cloud/BUILD b/java/com/google/aggregate/adtech/worker/encryption/hybrid/key/cloud/BUILD index 47f3053f..d32ad8d1 100644 --- a/java/com/google/aggregate/adtech/worker/encryption/hybrid/key/cloud/BUILD +++ b/java/com/google/aggregate/adtech/worker/encryption/hybrid/key/cloud/BUILD @@ -21,12 +21,10 @@ java_library( srcs = [ "CloudEncryptionKeyModule.java", "CloudEncryptionKeyService.java", - "CloudReEncryptionKeyService.java", ], javacopts = ["-Xep:Var"], deps = [ "//java/com/google/aggregate/adtech/worker/encryption/hybrid/key", - "//java/com/google/aggregate/adtech/worker/encryption/publickeyuri:encryption_key_config", "//java/com/google/aggregate/shared/mapper", "//java/external:apache_httpclient", "//java/external:apache_httpcore", diff --git a/java/com/google/aggregate/adtech/worker/encryption/hybrid/key/cloud/CloudReEncryptionKeyService.java b/java/com/google/aggregate/adtech/worker/encryption/hybrid/key/cloud/CloudReEncryptionKeyService.java deleted file mode 100644 index d9379b52..00000000 --- a/java/com/google/aggregate/adtech/worker/encryption/hybrid/key/cloud/CloudReEncryptionKeyService.java +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.aggregate.adtech.worker.encryption.hybrid.key.cloud; - -import static com.google.aggregate.adtech.worker.encryption.publickeyuri.CloudEncryptionKeyConfig.NUM_ENCRYPTION_KEYS; - -import com.google.aggregate.adtech.worker.encryption.hybrid.key.EncryptionKey; -import com.google.aggregate.adtech.worker.encryption.hybrid.key.EncryptionKeyService; -import com.google.aggregate.adtech.worker.encryption.hybrid.key.ReEncryptionKeyService; -import com.google.common.cache.CacheBuilder; -import com.google.common.cache.CacheLoader; -import com.google.common.cache.LoadingCache; -import com.google.common.collect.ImmutableList; -import com.google.common.primitives.Ints; -import com.google.inject.Inject; -import com.google.protobuf.util.JsonFormat; -import com.google.scp.coordinator.protos.keymanagement.keyhosting.api.v1.EncodedPublicKeyProto.EncodedPublicKey; -import com.google.scp.coordinator.protos.keymanagement.keyhosting.api.v1.GetActivePublicKeysResponseProto.GetActivePublicKeysResponse; -import com.google.scp.shared.api.util.HttpClientResponse; -import com.google.scp.shared.api.util.HttpClientWrapper; -import com.google.scp.shared.util.PublicKeyConversionUtil; -import java.net.URI; -import java.security.GeneralSecurityException; -import java.time.Duration; -import java.util.Random; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; -import org.apache.http.client.config.RequestConfig; -import org.apache.http.client.methods.HttpGet; - -/** - * TODO(b/321088264): Merge CloudReEncryptionKeyService and CloudEncryptionKeyService {@link - * EncryptionKeyService} implementation to get a random encryption key from public key endpoint for - * reencryption. - */ -public final class CloudReEncryptionKeyService implements ReEncryptionKeyService { - - private static final int REQUEST_TIMEOUT_DURATION = - Ints.checkedCast(Duration.ofMinutes(1).toMillis()); - private static final RequestConfig REQUEST_CONFIG = - RequestConfig.custom() - .setConnectionRequestTimeout(REQUEST_TIMEOUT_DURATION) - .setConnectTimeout(REQUEST_TIMEOUT_DURATION) - .setSocketTimeout(REQUEST_TIMEOUT_DURATION) - .build(); - private static final int MAX_CACHE_SIZE = 5; - private static final long CACHE_ENTRY_TTL_SEC = 3600; - private static final Random RANDOM = new Random(); - private final HttpClientWrapper httpClient; - private final LoadingCache> encryptionKeysCache = - CacheBuilder.newBuilder() - .maximumSize(MAX_CACHE_SIZE) - .expireAfterWrite(CACHE_ENTRY_TTL_SEC, TimeUnit.SECONDS) - .concurrencyLevel(Runtime.getRuntime().availableProcessors()) - .build( - new CacheLoader<>() { - @Override - public ImmutableList load(String uri) - throws ReencryptionKeyFetchException { - return getPublicKeysFromService(uri); - } - }); - - @Inject - public CloudReEncryptionKeyService(HttpClientWrapper httpClient) { - this.httpClient = httpClient; - } - - /** Throws ReencryptionKeyFetchException. */ - @Override - public EncryptionKey getEncryptionPublicKey(String keyVendingUri) - throws ReencryptionKeyFetchException { - try { - ImmutableList publicKeys = encryptionKeysCache.get(keyVendingUri); - EncodedPublicKey publicKey = publicKeys.get(randomIndex()); - return EncryptionKey.builder() - .setKey(PublicKeyConversionUtil.getKeysetHandle(publicKey.getKey())) - .setId(publicKey.getId()) - .build(); - } catch (GeneralSecurityException | ExecutionException e) { - throw new ReencryptionKeyFetchException(e); - } - } - - private ImmutableList getPublicKeysFromService(String publicKeyServiceUri) - throws ReencryptionKeyFetchException { - try { - HttpGet request = new HttpGet(URI.create(publicKeyServiceUri)); - request.setConfig(REQUEST_CONFIG); - HttpClientResponse response = httpClient.execute(request); - if (response.statusCode() != 200) { - throw new ReencryptionKeyFetchException(response.responseBody()); - } - GetActivePublicKeysResponse.Builder builder = GetActivePublicKeysResponse.newBuilder(); - JsonFormat.parser().merge(response.responseBody(), builder); - GetActivePublicKeysResponse keys = builder.build(); - return ImmutableList.copyOf(keys.getKeysList()); - } catch (Exception e) { - throw new ReencryptionKeyFetchException(e); - } - } - - private int randomIndex() { - return RANDOM.nextInt(NUM_ENCRYPTION_KEYS); - } -} diff --git a/java/com/google/aggregate/adtech/worker/encryption/hybrid/key/testing/BUILD b/java/com/google/aggregate/adtech/worker/encryption/hybrid/key/testing/BUILD index 6a7c1c6e..01f8199d 100644 --- a/java/com/google/aggregate/adtech/worker/encryption/hybrid/key/testing/BUILD +++ b/java/com/google/aggregate/adtech/worker/encryption/hybrid/key/testing/BUILD @@ -20,7 +20,6 @@ java_library( name = "testing", srcs = [ "FakeEncryptionKeyService.java", - "FakeReEncryptionKeyService.java", ], javacopts = ["-Xep:Var"], deps = [ diff --git a/java/com/google/aggregate/adtech/worker/encryption/hybrid/key/testing/FakeReEncryptionKeyService.java b/java/com/google/aggregate/adtech/worker/encryption/hybrid/key/testing/FakeReEncryptionKeyService.java deleted file mode 100644 index b0294356..00000000 --- a/java/com/google/aggregate/adtech/worker/encryption/hybrid/key/testing/FakeReEncryptionKeyService.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.aggregate.adtech.worker.encryption.hybrid.key.testing; - -import com.google.aggregate.adtech.worker.encryption.hybrid.key.EncryptionKey; -import com.google.aggregate.adtech.worker.encryption.hybrid.key.ReEncryptionKeyService; -import com.google.crypto.tink.KeysetHandle; -import com.google.inject.Inject; -import java.security.GeneralSecurityException; - -/** Fake implementation of {@link ReEncryptionKeyService} for testing. */ -public final class FakeReEncryptionKeyService implements ReEncryptionKeyService { - - private final KeysetHandle keysetHandle; - - private static final String ENCRYPTION_KEY_ID = "00000000-0000-0000-0000-000000000000"; - - @Inject - FakeReEncryptionKeyService(KeysetHandle keysetHandle) { - this.keysetHandle = keysetHandle; - } - - @Override - public EncryptionKey getEncryptionPublicKey(String keyVendingUri) - throws ReencryptionKeyFetchException { - try { - return EncryptionKey.builder() - .setKey(keysetHandle.getPublicKeysetHandle()) - .setId(ENCRYPTION_KEY_ID) - .build(); - } catch (GeneralSecurityException e) { - throw new ReencryptionKeyFetchException(e); - } - } -} diff --git a/java/com/google/aggregate/adtech/worker/encryption/publickeyuri/CloudEncryptionKeyConfig.java b/java/com/google/aggregate/adtech/worker/encryption/publickeyuri/CloudEncryptionKeyConfig.java deleted file mode 100644 index fca6b682..00000000 --- a/java/com/google/aggregate/adtech/worker/encryption/publickeyuri/CloudEncryptionKeyConfig.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.aggregate.adtech.worker.encryption.publickeyuri; - -import com.google.auto.value.AutoValue; - -/** Class for configuring encryption key config. */ -@AutoValue -public abstract class CloudEncryptionKeyConfig { - public static Builder builder() { - return new AutoValue_CloudEncryptionKeyConfig.Builder(); - } - - public static final int NUM_ENCRYPTION_KEYS = 5; - - public abstract String keyVendingServiceUri(); - - @AutoValue.Builder - public abstract static class Builder { - public abstract Builder setKeyVendingServiceUri(String value); - - public abstract CloudEncryptionKeyConfig build(); - } -} diff --git a/java/com/google/aggregate/adtech/worker/encryption/publickeyuri/EncryptionKeyConfigFactory.java b/java/com/google/aggregate/adtech/worker/encryption/publickeyuri/EncryptionKeyConfigFactory.java deleted file mode 100644 index e242ec8f..00000000 --- a/java/com/google/aggregate/adtech/worker/encryption/publickeyuri/EncryptionKeyConfigFactory.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.aggregate.adtech.worker.encryption.publickeyuri; - -import java.util.HashMap; -import java.util.Map; - -/** Factory class to get EncryptionKeyConfig based on public key uri cloud provider type. */ -public final class EncryptionKeyConfigFactory { - public static String CLOUD_PROVIDER_NAME_GCP = "GCP"; - private static final Map cloudEncryptionKeyConfigMap = - new HashMap<>(); - - /** - * Returns EncryptionKeyConfigType for the given cloud provider. - * - * @throws IllegalArgumentException when invalid cloud provider name is provided. - */ - public static CloudEncryptionKeyConfig getCloudEncryptionKeyConfig(String cloudProviderName) { - if (cloudProviderName.isEmpty()) { - throw new IllegalArgumentException("Cloud provider name not set."); - } else if (cloudProviderName.equals(CLOUD_PROVIDER_NAME_GCP)) { - cloudEncryptionKeyConfigMap.putIfAbsent( - CLOUD_PROVIDER_NAME_GCP, - CloudEncryptionKeyConfig.builder() - .setKeyVendingServiceUri( - "https://publickeyservice-a.postsb-a.test.aggregationhelper.com/.well-known/aggregation-service/v1/public-keys") - .build()); - return cloudEncryptionKeyConfigMap.get(CLOUD_PROVIDER_NAME_GCP); - } else { - throw new IllegalArgumentException("Invalid cloud provider."); - } - } -} diff --git a/java/com/google/aggregate/adtech/worker/gcp/AggregationWorkerArgs.java b/java/com/google/aggregate/adtech/worker/gcp/AggregationWorkerArgs.java index cf823e78..a5c7f20b 100644 --- a/java/com/google/aggregate/adtech/worker/gcp/AggregationWorkerArgs.java +++ b/java/com/google/aggregate/adtech/worker/gcp/AggregationWorkerArgs.java @@ -35,8 +35,43 @@ import com.google.privacysandbox.otel.OTelExporterSelector; import java.util.Optional; +/** + * Worker args are runtime flags that are set when building an image or as CLI args when running a + * standalone binary and set by the Aggregation Service team. They differ from aggregation job + * params, which are set in the Job Request when requesting an aggregation report. For available job + * parameters see API docs. + * + *

+ * + *

+ * + *

To add a new worker arg: declare a new parameter in this class and its getter function, update + * the {@link AggregationWorkerModule} to inject it to the appropriate location, and set the param + * in the BUILD rules. + * + *

+ * + *

+ * + *

Use the following convention for naming the new param: + * + *

    + *
  • Use "lower_underscore" style for the 'names' attribute. + *
  • Prefer "long_descriptive_names" over "short_names" and noun phrases. + *
  • For Boolean flags: + *
      + *
    • Use positive or neutral terms (--foo_enabled rather than --foo_disabled). + *
    • Param name should be "feature_name_enabled" + *
    • Variable name should be "featureNameEnabled" + *
    • Getter name should be "isFeatureNameEnabled(...)" + *
    + *
+ */ public class AggregationWorkerArgs { + private static final int NUM_CPUS = Runtime.getRuntime().availableProcessors(); + @Parameter(names = "--client_config_env", description = "Selects client config environment") private ClientConfigSelector clientConfigSelector = ClientConfigSelector.GCP; @@ -67,13 +102,6 @@ public class AggregationWorkerArgs { @Parameter(names = "--privacy_budgeting", description = "Implementation of privacy budgeting") private PrivacyBudgetingSelector privacyBudgeting = PrivacyBudgetingSelector.UNLIMITED; - @Parameter( - names = "--private_key_service_base_url", - description = - "Full URL (including protocol and api version path fragment) of the private key vending" - + " service. Do not include trailing slash") - private String privateKeyServiceUrl = ""; - @Parameter( names = "--primary_encryption_key_service_base_url", description = @@ -265,12 +293,14 @@ public class AggregationWorkerArgs { @Parameter( names = "--nonblocking_thread_pool_size", description = "Size of the non-blocking thread pool") - private int nonBlockingThreadPoolSize = 16; + private int nonBlockingThreadPoolSize = Math.max(1, NUM_CPUS); @Parameter( names = "--blocking_thread_pool_size", description = "Size of the blocking thread pool") - private int blockingThreadPoolSize = 64; + // Blocking thread is for I/O which is faster than non-IO operation in aggregation service. + // Therefore, the thread pool size default is set to be smaller than nonBlockingThreadPool size. + private int blockingThreadPoolSize = Math.max(1, NUM_CPUS / 2); @Parameter(names = "--benchmark", description = "Set to true to run in benchmark mode.") private boolean benchmark = false; @@ -332,20 +362,42 @@ public class AggregationWorkerArgs { private String testCoordinatorBEncodedKeysetHandle = ""; @Parameter( - names = "--parallel-summary-upload", + names = "--parallel_summary_upload_enabled", description = "Flag to enable parallel upload of the sharded summary reports.") - private boolean enableParallelSummaryUpload = false; + private boolean parallelSummaryUploadEnabled = false; @Parameter( names = "--decrypter_cache_entry_ttl_sec", - description = "Flag to set the private key cache time to live. Used for testing only.") - private long decrypterCacheEntryTtlSec = 3600; + description = + "Flag to set the private key cache time to live. Flag exposed for testing only.") + private long decrypterCacheEntryTtlSec = 28800; // 8 hours. + + @Parameter( + names = "--exception_cache_entry_ttl_sec", + description = "Flag to set the exception cache time to live.") + private long exceptionCacheEntryTtlSec = 10; // 10 seconds. @Parameter( - names = "--streaming-output-domain-processing", - description = "Flag to enable RxJava streaming based output domain processing." - ) - private boolean streamingOutputDomainProcessing = false; + names = "--streaming_output_domain_processing_enabled", + description = "Flag to enable RxJava streaming based output domain processing.") + private boolean streamingOutputDomainProcessingEnabled = false; + + @Parameter( + names = "--labeled_privacy_budget_keys_enabled", + description = + "Flag to allow filtering of labeled payload contributions. If enabled, only contributions" + + " corresponding to queried labels/ids are included in aggregation.") + private boolean labeledPrivacyBudgetKeysEnabled = false; + + @Parameter( + names = "--attribution_reporting_debug_api_enabled", + description = "Flag to enable support for Attribution Reporting Debug API.") + private boolean attributionReportingDebugApiEnabled = true; + + @Parameter( + names = "--parallel_fact_noising_enabled", + description = "Flag to enable parallel aggregated fact noising.") + private boolean parallelAggregatedFactNoisingEnabled = false; ResultLoggerModuleSelector resultLoggerModuleSelector() { return resultLoggerModuleSelector; @@ -587,15 +639,31 @@ public long getOutputShardFileSizeBytes() { return outputShardFileSizeBytes; } - public boolean isEnableParallelSummaryUpload() { - return enableParallelSummaryUpload; + public boolean isParallelSummaryUploadEnabled() { + return parallelSummaryUploadEnabled; } public long getDecrypterCacheEntryTtlSec() { return decrypterCacheEntryTtlSec; } - public boolean isStreamingOutputDomainProcessing() { - return streamingOutputDomainProcessing; + public long getExceptionCacheEntryTtlSec() { + return exceptionCacheEntryTtlSec; + } + + public boolean isStreamingOutputDomainProcessingEnabled() { + return streamingOutputDomainProcessingEnabled; + } + + boolean isLabeledPrivacyBudgetKeysEnabled() { + return labeledPrivacyBudgetKeysEnabled; + } + + boolean isAttributionReportingDebugApiEnabled() { + return attributionReportingDebugApiEnabled; + } + + public boolean isParallelAggregatedFactNoisingEnabled() { + return parallelAggregatedFactNoisingEnabled; } } diff --git a/java/com/google/aggregate/adtech/worker/gcp/AggregationWorkerModule.java b/java/com/google/aggregate/adtech/worker/gcp/AggregationWorkerModule.java index a574dfc7..7b8d5d8d 100644 --- a/java/com/google/aggregate/adtech/worker/gcp/AggregationWorkerModule.java +++ b/java/com/google/aggregate/adtech/worker/gcp/AggregationWorkerModule.java @@ -16,18 +16,27 @@ package com.google.aggregate.adtech.worker.gcp; +import static com.google.aggregate.adtech.worker.model.SharedInfo.ATTRIBUTION_REPORTING_API; +import static com.google.aggregate.adtech.worker.model.SharedInfo.ATTRIBUTION_REPORTING_DEBUG_API; +import static com.google.aggregate.adtech.worker.model.SharedInfo.PROTECTED_AUDIENCE_API; +import static com.google.aggregate.adtech.worker.model.SharedInfo.SHARED_STORAGE_API; + import com.fasterxml.jackson.databind.ObjectMapper; import com.google.aggregate.adtech.worker.Annotations.BenchmarkMode; import com.google.aggregate.adtech.worker.Annotations.BlockingThreadPool; +import com.google.aggregate.adtech.worker.Annotations.CustomForkJoinThreadPool; import com.google.aggregate.adtech.worker.Annotations.DomainOptional; import com.google.aggregate.adtech.worker.Annotations.EnableParallelSummaryUpload; +import com.google.aggregate.adtech.worker.Annotations.EnablePrivacyBudgetKeyFiltering; import com.google.aggregate.adtech.worker.Annotations.EnableStackTraceInResponse; import com.google.aggregate.adtech.worker.Annotations.EnableThresholding; import com.google.aggregate.adtech.worker.Annotations.MaxDepthOfStackTrace; import com.google.aggregate.adtech.worker.Annotations.NonBlockingThreadPool; import com.google.aggregate.adtech.worker.Annotations.OutputShardFileSizeBytes; +import com.google.aggregate.adtech.worker.Annotations.ParallelAggregatedFactNoising; import com.google.aggregate.adtech.worker.Annotations.ReportErrorThresholdPercentage; import com.google.aggregate.adtech.worker.Annotations.StreamingOutputDomainProcessing; +import com.google.aggregate.adtech.worker.Annotations.SupportedApis; import com.google.aggregate.adtech.worker.JobProcessor; import com.google.aggregate.adtech.worker.LocalFileToCloudStorageLogger.ResultWorkingDirectory; import com.google.aggregate.adtech.worker.PrivacyBudgetingSelector; @@ -52,6 +61,7 @@ import com.google.aggregate.privacy.noise.proto.Params.NoiseParameters.Distribution; import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.inject.AbstractModule; @@ -70,6 +80,7 @@ import com.google.scp.operator.cpio.cryptoclient.Annotations.CoordinatorAEncryptionKeyServiceBaseUrl; import com.google.scp.operator.cpio.cryptoclient.Annotations.CoordinatorBEncryptionKeyServiceBaseUrl; import com.google.scp.operator.cpio.cryptoclient.Annotations.DecrypterCacheEntryTtlSec; +import com.google.scp.operator.cpio.cryptoclient.Annotations.ExceptionCacheEntryTtlSec; import com.google.scp.operator.cpio.cryptoclient.gcp.GcpKmsDecryptionKeyServiceConfig; import com.google.scp.operator.cpio.distributedprivacybudgetclient.DistributedPrivacyBudgetClientModule.CoordinatorAPrivacyBudgetServiceAuthEndpoint; import com.google.scp.operator.cpio.distributedprivacybudgetclient.DistributedPrivacyBudgetClientModule.CoordinatorAPrivacyBudgetServiceBaseUrl; @@ -100,6 +111,7 @@ import java.nio.file.Paths; import java.util.Optional; import java.util.concurrent.Executors; +import java.util.concurrent.ForkJoinPool; import java.util.function.Supplier; import javax.inject.Singleton; @@ -303,12 +315,11 @@ protected void configure() { // processor bind(JobProcessor.class).to(ConcurrentAggregationProcessor.class); - bind(boolean.class) - .annotatedWith(StreamingOutputDomainProcessing.class) - .toInstance(args.isStreamingOutputDomainProcessing()); - // noising install(args.getNoisingSelector().getNoisingModule()); + bind(boolean.class) + .annotatedWith(ParallelAggregatedFactNoising.class) + .toInstance(args.isParallelAggregatedFactNoisingEnabled()); // result logger install(args.resultLoggerModuleSelector().getResultLoggerModule()); @@ -318,15 +329,27 @@ protected void configure() { .toInstance(Paths.get(args.getResultWorkingDirectoryPathString())); } + // Feature flags. bind(boolean.class) .annotatedWith(EnableParallelSummaryUpload.class) - .toInstance(args.isEnableParallelSummaryUpload()); + .toInstance(args.isParallelSummaryUploadEnabled()); + bind(boolean.class) + .annotatedWith(EnablePrivacyBudgetKeyFiltering.class) + .toInstance(args.isLabeledPrivacyBudgetKeysEnabled()); + bind(boolean.class) + .annotatedWith(StreamingOutputDomainProcessing.class) + .toInstance(args.isStreamingOutputDomainProcessingEnabled()); // Parameter to set key cache. This is a test only flag. bind(Long.class) .annotatedWith(DecrypterCacheEntryTtlSec.class) .toInstance(args.getDecrypterCacheEntryTtlSec()); + // Parameter to set exception cache. This is a test only flag. + bind(Long.class) + .annotatedWith(ExceptionCacheEntryTtlSec.class) + .toInstance(args.getExceptionCacheEntryTtlSec()); + // Response related flags bind(boolean.class) .annotatedWith(EnableStackTraceInResponse.class) @@ -345,6 +368,20 @@ protected void configure() { install(args.getOTelExporterSelector().getOTelConfigurationModule()); } + @Provides + @SupportedApis + ImmutableSet providesSupportedApis() { + if (args.isAttributionReportingDebugApiEnabled()) { + return ImmutableSet.of( + ATTRIBUTION_REPORTING_API, + ATTRIBUTION_REPORTING_DEBUG_API, + PROTECTED_AUDIENCE_API, + SHARED_STORAGE_API); + } else { + return ImmutableSet.of(ATTRIBUTION_REPORTING_API, PROTECTED_AUDIENCE_API, SHARED_STORAGE_API); + } + } + @Provides @Singleton @NonBlockingThreadPool @@ -378,4 +415,11 @@ String provideGRPCEndpoint(ParameterClient parameterClient) throws ParameterClie return args.getGrpcCollectorEndpoint(); } } + + @Provides + @Singleton + @CustomForkJoinThreadPool + ListeningExecutorService provideCustomForkJoinThreadPool() { + return MoreExecutors.listeningDecorator(new ForkJoinPool(args.getNonBlockingThreadPoolSize())); + } } diff --git a/java/com/google/aggregate/adtech/worker/gcp/BUILD b/java/com/google/aggregate/adtech/worker/gcp/BUILD index fd76d751..264503f5 100644 --- a/java/com/google/aggregate/adtech/worker/gcp/BUILD +++ b/java/com/google/aggregate/adtech/worker/gcp/BUILD @@ -106,8 +106,7 @@ java_binary( main_class = "com.google.aggregate.adtech.worker.gcp.AggregationWorkerRunner", runtime_deps = [ # //telemetry library should be before :worker_runner - # TODO(b/305100313) Re-enable prod library when building prod jar. - "//telemetry/noop/java/com/google/privacysandbox/otel:otel_noop", + "//telemetry/prod/java/com/google/privacysandbox/otel:otel_prod", ":worker_runner", "//java/external:commons_logging", "//java/external:slf4j_simple", diff --git a/java/com/google/aggregate/adtech/worker/model/AggregatedFact.java b/java/com/google/aggregate/adtech/worker/model/AggregatedFact.java index 80ef8871..3a08369d 100644 --- a/java/com/google/aggregate/adtech/worker/model/AggregatedFact.java +++ b/java/com/google/aggregate/adtech/worker/model/AggregatedFact.java @@ -16,41 +16,41 @@ package com.google.aggregate.adtech.worker.model; -import com.google.auto.value.AutoValue; import java.math.BigInteger; import java.util.List; +import java.util.Objects; import java.util.Optional; /** Single aggregated result (for one key) */ -@AutoValue -public abstract class AggregatedFact { +public class AggregatedFact { - public abstract BigInteger bucket(); + private final BigInteger bucket; - public abstract long metric(); + private long metric; /** Set it optional because it is for debug result use. */ - public abstract Optional unnoisedMetric(); + private Optional unnoisedMetric; /** Set it optional because it is for debug result use only. */ - public abstract Optional debugAnnotations(); + private Optional debugAnnotations; - public static Builder builder() { - return new AutoValue_AggregatedFact.Builder(); + private AggregatedFact( + BigInteger bucket, + long metric, + Optional unnoisedMetric, + Optional debugBucketAnnotations) { + this.bucket = bucket; + this.metric = metric; + this.unnoisedMetric = unnoisedMetric; + this.debugAnnotations = debugBucketAnnotations; } public static AggregatedFact create(BigInteger bucket, long metric) { - AggregatedFact.Builder builder = AggregatedFact.builder().setBucket(bucket).setMetric(metric); - return builder.build(); + return new AggregatedFact(bucket, metric, Optional.empty(), Optional.empty()); } public static AggregatedFact create(BigInteger bucket, long metric, Long unnoisedMetric) { - AggregatedFact.Builder builder = - AggregatedFact.builder() - .setBucket(bucket) - .setMetric(metric) - .setUnnoisedMetric(unnoisedMetric); - return builder.build(); + return new AggregatedFact(bucket, metric, Optional.of(unnoisedMetric), Optional.empty()); } public static AggregatedFact create( @@ -58,26 +58,59 @@ public static AggregatedFact create( long metric, Long unnoisedMetric, List debugAnnotations) { - AggregatedFact.Builder builder = - AggregatedFact.builder() - .setBucket(bucket) - .setMetric(metric) - .setUnnoisedMetric(unnoisedMetric) - .setDebugAnnotations(debugAnnotations); - return builder.build(); + return new AggregatedFact( + bucket, metric, Optional.of(unnoisedMetric), Optional.of(debugAnnotations)); } - @AutoValue.Builder - public abstract static class Builder { + public BigInteger getBucket() { + return bucket; + } - public abstract Builder setBucket(BigInteger value); + public long getMetric() { + return metric; + } - public abstract Builder setMetric(long value); + public void setMetric(long metric) { + this.metric = metric; + } - public abstract Builder setUnnoisedMetric(Long value); + public Optional getUnnoisedMetric() { + return unnoisedMetric; + } - public abstract Builder setDebugAnnotations(List value); + public void setUnnoisedMetric(Optional unnoisedMetric) { + this.unnoisedMetric = unnoisedMetric; + } + + public Optional getDebugAnnotations() { + return debugAnnotations; + } + + public void setDebugAnnotations(List debugBucketAnnotations) { + this.debugAnnotations = Optional.of(debugBucketAnnotations); + } + + @Override + public int hashCode() { + return Objects.hash( + this.bucket.hashCode(), + this.metric, + this.unnoisedMetric.hashCode(), + this.debugAnnotations.hashCode()); + } - public abstract AggregatedFact build(); + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } else if (!(o instanceof AggregatedFact)) { + return false; + } else { + AggregatedFact that = (AggregatedFact) o; + return this.bucket.equals(that.getBucket()) + && this.metric == that.getMetric() + && this.unnoisedMetric.equals(that.getUnnoisedMetric()) + && this.debugAnnotations.equals(that.getDebugAnnotations()); + } } } diff --git a/java/com/google/aggregate/adtech/worker/model/ErrorCounter.java b/java/com/google/aggregate/adtech/worker/model/ErrorCounter.java index 4c446ceb..5ae0bfe0 100644 --- a/java/com/google/aggregate/adtech/worker/model/ErrorCounter.java +++ b/java/com/google/aggregate/adtech/worker/model/ErrorCounter.java @@ -50,7 +50,12 @@ public enum ErrorCounter { "Report's shared_info.scheduled_report_time is too old, reports cannot be older than %s" + " days.", SharedInfo.MAX_REPORT_AGE.toDays())), - SERVICE_ERROR("Internal error occurred during operation."), + INTERNAL_ERROR("Internal error occurred during operation."), + REPORTING_SITE_MISMATCH( + "Report's shared_info.reporting_origin value does not belong to the reporting_site value set" + + " in the Aggregation job parameters. Aggregation request job parameters must have" + + " reporting_site set to the site which corresponds to the shared_info.reporting_origin" + + " value."), UNSUPPORTED_OPERATION( String.format( "Report's operation is unsupported. Supported operations are %s.", diff --git a/java/com/google/aggregate/adtech/worker/model/ErrorMessage.java b/java/com/google/aggregate/adtech/worker/model/ErrorMessage.java index faf73866..110ea247 100644 --- a/java/com/google/aggregate/adtech/worker/model/ErrorMessage.java +++ b/java/com/google/aggregate/adtech/worker/model/ErrorMessage.java @@ -34,19 +34,11 @@ public static Builder builder() { /** The category of the error message, used for identifying the type of an error message */ public abstract ErrorCounter category(); - /** - * Detailed diagnostic information related to the error that would be useful for debugging or - * error logging. Not intended to be provided in aggregation response. - */ - public abstract String detailedErrorMessage(); - @AutoValue.Builder public abstract static class Builder { public abstract Builder setCategory(ErrorCounter category); - public abstract Builder setDetailedErrorMessage(String detailedErrorMessage); - public abstract ErrorMessage build(); } } diff --git a/java/com/google/aggregate/adtech/worker/model/Fact.java b/java/com/google/aggregate/adtech/worker/model/Fact.java index 7b14d99d..e963ab8b 100644 --- a/java/com/google/aggregate/adtech/worker/model/Fact.java +++ b/java/com/google/aggregate/adtech/worker/model/Fact.java @@ -19,6 +19,7 @@ import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonSerialize; import com.google.auto.value.AutoValue; +import com.google.common.primitives.UnsignedLong; import java.math.BigInteger; import java.util.Optional; @@ -44,7 +45,7 @@ public static Builder builder() { public abstract Long value(); /** Filtering id for the contribution. */ - public abstract Optional id(); + public abstract Optional id(); @AutoValue.Builder public abstract static class Builder { @@ -53,7 +54,7 @@ public abstract static class Builder { public abstract Builder setValue(long value); - public abstract Builder setId(int id); + public abstract Builder setId(UnsignedLong id); public abstract Fact build(); } diff --git a/java/com/google/aggregate/adtech/worker/model/FactDeserializer.java b/java/com/google/aggregate/adtech/worker/model/FactDeserializer.java index 0ade7552..0b6c0efc 100644 --- a/java/com/google/aggregate/adtech/worker/model/FactDeserializer.java +++ b/java/com/google/aggregate/adtech/worker/model/FactDeserializer.java @@ -48,7 +48,7 @@ public Fact deserialize(JsonParser jsonParser, DeserializationContext deserializ Fact.Builder fact = Fact.builder().setBucket(bucket).setValue(value); if (node.has("id")) { - fact.setId(NumericConversions.getInt32FromBytes(node.get("id").binaryValue())); + fact.setId(NumericConversions.getUnsignedLongFromBytes(node.get("id").binaryValue())); } return fact.build(); diff --git a/java/com/google/aggregate/adtech/worker/model/FactSerializer.java b/java/com/google/aggregate/adtech/worker/model/FactSerializer.java index 99dc34ef..b95d66f4 100644 --- a/java/com/google/aggregate/adtech/worker/model/FactSerializer.java +++ b/java/com/google/aggregate/adtech/worker/model/FactSerializer.java @@ -43,7 +43,8 @@ public void serialize(Fact fact, JsonGenerator jsonGenerator, SerializerProvider jsonGenerator.writeBinaryField("bucket", bucketBytes); jsonGenerator.writeBinaryField("value", valueBytes); if (fact.id().isPresent()) { - jsonGenerator.writeBinaryField("id", BigInteger.valueOf(fact.id().get()).toByteArray()); + jsonGenerator.writeBinaryField( + "id", NumericConversions.toUnsignedByteArray(fact.id().get().bigIntegerValue())); } jsonGenerator.writeEndObject(); } diff --git a/java/com/google/aggregate/adtech/worker/model/SharedInfo.java b/java/com/google/aggregate/adtech/worker/model/SharedInfo.java index 6b673ed8..23edd4ea 100644 --- a/java/com/google/aggregate/adtech/worker/model/SharedInfo.java +++ b/java/com/google/aggregate/adtech/worker/model/SharedInfo.java @@ -55,10 +55,18 @@ public abstract class SharedInfo { ImmutableSet.of(MAJOR_VERSION_ZERO, MAJOR_VERSION_ONE); public static final boolean DEFAULT_DEBUG_MODE = false; public static final String ATTRIBUTION_REPORTING_API = "attribution-reporting"; + public static final String ATTRIBUTION_REPORTING_DEBUG_API = "attribution-reporting-debug"; public static final String PROTECTED_AUDIENCE_API = "protected-audience"; public static final String SHARED_STORAGE_API = "shared-storage"; + // Used by {@link ErrorCounter} to form error messages. @SupportedApis is the list used for + // validation at runtime. The two lists would differ temporarily when support for a new API is + // under development. public static final ImmutableSet SUPPORTED_APIS = - ImmutableSet.of(ATTRIBUTION_REPORTING_API, PROTECTED_AUDIENCE_API, SHARED_STORAGE_API); + ImmutableSet.of( + ATTRIBUTION_REPORTING_API, + ATTRIBUTION_REPORTING_DEBUG_API, + PROTECTED_AUDIENCE_API, + SHARED_STORAGE_API); // Max age of reports accepted for aggregation. public static final Duration MAX_REPORT_AGE = Duration.of(90, DAYS); public static final ImmutableSet SUPPORTED_OPERATIONS = @@ -68,6 +76,8 @@ public static Builder builder() { return new AutoValue_SharedInfo.Builder().setReportDebugMode(DEFAULT_DEBUG_MODE); } + public abstract Builder toBuilder(); + // TODO(b/263901045) : consider moving version to api specific code. // Version of the report @JsonProperty("version") diff --git a/java/com/google/aggregate/adtech/worker/model/Version.java b/java/com/google/aggregate/adtech/worker/model/Version.java index 769b115a..b9ede53d 100644 --- a/java/com/google/aggregate/adtech/worker/model/Version.java +++ b/java/com/google/aggregate/adtech/worker/model/Version.java @@ -19,6 +19,7 @@ import com.google.auto.value.AutoValue; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; +import java.util.function.Predicate; import java.util.regex.Pattern; /** Represents the version of the report. */ @@ -67,4 +68,32 @@ public int compareTo(Version other) { } return minor() - other.minor(); } + + /** + * Creates a predicate that checks if the version is between lowerInclusiveVersion and + * higherExclusiveVersion. + * + * @param lowerInclusiveVersion lower version included in the range + * @param higherExclusiveVersion higher version excluded from the range. + */ + public static Predicate getBetweenVersionPredicate( + Version lowerInclusiveVersion, Version higherExclusiveVersion) { + if (higherExclusiveVersion.compareTo(lowerInclusiveVersion) <= 0) { + throw new IllegalArgumentException( + "higherExclusiveVersion should be greater than lowerInclusiveVersion"); + } + return version -> + version.compareTo(lowerInclusiveVersion) >= 0 + && version.compareTo(higherExclusiveVersion) < 0; + } + + /** + * Creates a predicate that checks if the version >= compareToVersion. + * + * @param compareToVersion + */ + public static Predicate getGreaterThanOrEqualToVersionPredicate( + Version compareToVersion) { + return version -> version.compareTo(compareToVersion) >= 0; + } } diff --git a/java/com/google/aggregate/adtech/worker/model/serdes/cbor/CborPayloadSerdes.java b/java/com/google/aggregate/adtech/worker/model/serdes/cbor/CborPayloadSerdes.java index 82ef0993..f7219be0 100644 --- a/java/com/google/aggregate/adtech/worker/model/serdes/cbor/CborPayloadSerdes.java +++ b/java/com/google/aggregate/adtech/worker/model/serdes/cbor/CborPayloadSerdes.java @@ -56,7 +56,7 @@ protected Optional doForward(ByteSource byteSource) { return Optional.empty(); } return Optional.of(cborMapper.readValue(byteSource.read(), Payload.class)); - } catch (IOException | ClassCastException e) { + } catch (IOException | ClassCastException | NullPointerException e) { // Exception is not included because stack trace includes the decrypted payload which is // private information logger.warn("Failed to deserialize from CBOR bytes to Payload"); diff --git a/java/com/google/aggregate/adtech/worker/selector/DecryptionKeyClientSelector.java b/java/com/google/aggregate/adtech/worker/selector/DecryptionKeyClientSelector.java index eae15ba3..a0c88bf4 100644 --- a/java/com/google/aggregate/adtech/worker/selector/DecryptionKeyClientSelector.java +++ b/java/com/google/aggregate/adtech/worker/selector/DecryptionKeyClientSelector.java @@ -17,24 +17,15 @@ package com.google.aggregate.adtech.worker.selector; import com.google.inject.Module; -import com.google.scp.operator.cpio.cryptoclient.aws.AwsEnclaveDecryptionKeyServiceModule; import com.google.scp.operator.cpio.cryptoclient.aws.AwsEnclaveMultiPartyDecryptionKeyServiceModule; -import com.google.scp.operator.cpio.cryptoclient.aws.AwsKmsDecryptionKeyServiceModule; import com.google.scp.operator.cpio.cryptoclient.aws.AwsKmsMultiPartyDecryptionKeyServiceModule; -import com.google.scp.operator.cpio.cryptoclient.gcp.GcpKmsDecryptionKeyServiceModule; import com.google.scp.operator.cpio.cryptoclient.gcp.GcpKmsMultiPartyDecryptionKeyServiceModule; import com.google.scp.operator.cpio.cryptoclient.local.LocalFileDecryptionKeyServiceModule; public enum DecryptionKeyClientSelector { LOCAL_FILE_DECRYPTION_KEY_SERVICE(new LocalFileDecryptionKeyServiceModule()), - // GCP single party implementation - GCP_KMS_DECRYPTION_KEY_SERVICE(new GcpKmsDecryptionKeyServiceModule()), // GCP multiparty implementation GCP_KMS_MULTI_PARTY_DECRYPTION_KEY_SERVICE(new GcpKmsMultiPartyDecryptionKeyServiceModule()), - // Non-enclave implementation. - AWS_KMS_DECRYPTION_KEY_SERVICE(new AwsKmsDecryptionKeyServiceModule()), - // Enclave implmentation. - AWS_ENCLAVE_CLI_DECRYPTION_KEY_SERVICE(new AwsEnclaveDecryptionKeyServiceModule()), // Multi-party Non-enclave implementation. AWS_KMS_MULTI_PARTY_DECRYPTION_KEY_SERVICE(new AwsKmsMultiPartyDecryptionKeyServiceModule()), // Multi-party enclave implementation. diff --git a/java/com/google/aggregate/adtech/worker/testing/AvroReportsFileReader.java b/java/com/google/aggregate/adtech/worker/testing/AvroReportsFileReader.java deleted file mode 100644 index 6bec0cbb..00000000 --- a/java/com/google/aggregate/adtech/worker/testing/AvroReportsFileReader.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright 2022 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.aggregate.adtech.worker.testing; - -import static com.google.common.collect.ImmutableList.toImmutableList; - -import com.google.aggregate.adtech.worker.model.EncryptedReport; -import com.google.aggregate.protocol.avro.AvroReportsSchemaSupplier; -import com.google.common.collect.ImmutableList; -import com.google.common.io.ByteSource; -import java.io.IOException; -import java.nio.ByteBuffer; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Optional; -import java.util.stream.Stream; -import javax.inject.Inject; -import org.apache.avro.file.DataFileStream; -import org.apache.avro.generic.GenericDatumReader; -import org.apache.avro.generic.GenericRecord; -import org.apache.avro.io.DatumReader; - -/** Simple utility to read an Avro reports file, used for testing. */ -public final class AvroReportsFileReader { - - private final AvroReportsSchemaSupplier avroReportsSchemaSupplier; - - @Inject - AvroReportsFileReader(AvroReportsSchemaSupplier avroReportsSchemaSupplier) { - this.avroReportsSchemaSupplier = avroReportsSchemaSupplier; - } - - /** Reads the Avro results file at the path given to a list. */ - public ImmutableList readAvroReportsFile(Path path) throws IOException { - DatumReader datumReader = - new GenericDatumReader<>(avroReportsSchemaSupplier.get()); - DataFileStream streamReader = - new DataFileStream<>(Files.newInputStream(path), datumReader); - - return Stream.generate(() -> readRecordToEncryptedReport(streamReader)) - .takeWhile(Optional::isPresent) - .map(Optional::get) - .collect(toImmutableList()); - } - - private static Optional readRecordToEncryptedReport( - DataFileStream streamReader) { - if (streamReader.hasNext()) { - GenericRecord genericRecord = streamReader.next(); - ByteSource payload = ByteSource.wrap(((ByteBuffer) genericRecord.get("payload")).array()); - String keyId = genericRecord.get("key_id").toString(); - String sharedInfo = genericRecord.get("shared_info").toString(); - return Optional.of( - EncryptedReport.builder() - .setPayload(payload) - .setKeyId(keyId) - .setSharedInfo(sharedInfo) - .build()); - } - - return Optional.empty(); - } -} diff --git a/java/com/google/aggregate/adtech/worker/testing/BUILD b/java/com/google/aggregate/adtech/worker/testing/BUILD index 620bee54..17a3d6cc 100644 --- a/java/com/google/aggregate/adtech/worker/testing/BUILD +++ b/java/com/google/aggregate/adtech/worker/testing/BUILD @@ -151,29 +151,6 @@ java_library( ], ) -java_library( - name = "avro_reports_file_reader", - srcs = ["AvroReportsFileReader.java"], - javacopts = ["-Xep:Var"], - deps = [ - "//java/com/google/aggregate/adtech/worker/model", - "//java/com/google/aggregate/adtech/worker/util", - "//java/com/google/aggregate/protocol/avro:avro_reports_schema_supplier", - "//java/external:avro", - "//java/external:guava", - "//java/external:javax_inject", - ], -) - -java_library( - name = "fake_private_key_fetching_service", - srcs = ["FakePrivateKeyFetchingService.java"], - javacopts = ["-Xep:Var"], - deps = [ - "//java/external:clients_cryptoclient", - ], -) - java_library( name = "local_aggregation_worker_runner", srcs = ["LocalAggregationWorkerRunner.java"], diff --git a/java/com/google/aggregate/adtech/worker/testing/FakeRecordDecrypter.java b/java/com/google/aggregate/adtech/worker/testing/FakeRecordDecrypter.java index 40556c84..3a0b312b 100644 --- a/java/com/google/aggregate/adtech/worker/testing/FakeRecordDecrypter.java +++ b/java/com/google/aggregate/adtech/worker/testing/FakeRecordDecrypter.java @@ -56,7 +56,7 @@ public Report decryptSingleReport(EncryptedReport unused) throws DecryptionExcep } } - return FakeReportGenerator.generateWithParam(idToGenerate, LATEST_VERSION); + return FakeReportGenerator.generateWithParam(idToGenerate, LATEST_VERSION, "https://foo.com"); } public void setShouldThrow(boolean shouldThrow, ErrorReason reason) { diff --git a/java/com/google/aggregate/adtech/worker/testing/FakeReportGenerator.java b/java/com/google/aggregate/adtech/worker/testing/FakeReportGenerator.java index 3b3d9993..cbaf7e70 100644 --- a/java/com/google/aggregate/adtech/worker/testing/FakeReportGenerator.java +++ b/java/com/google/aggregate/adtech/worker/testing/FakeReportGenerator.java @@ -27,6 +27,7 @@ import com.google.aggregate.adtech.worker.model.Report; import com.google.aggregate.adtech.worker.model.SharedInfo; import com.google.common.collect.ImmutableList; +import com.google.common.primitives.UnsignedLong; import java.math.BigInteger; import java.time.Instant; import java.util.Optional; @@ -42,7 +43,7 @@ public static Fact generate(int bucket, int value) { return Fact.builder().setBucket(createBucketFromInt(bucket)).setValue(value).build(); } - public static Fact generate(int bucket, int value, int id) { + public static Fact generate(int bucket, int value, UnsignedLong id) { return Fact.builder() .setId(id) .setBucket(createBucketFromInt(bucket)) @@ -67,7 +68,8 @@ public static Report generateWithFactList(ImmutableList facts, String repo Optional.of(facts), /* dummyValue */ Optional.empty(), /* reportId */ Optional.empty(), - reportVersion); + reportVersion, + "https://foo.com"); } /** @@ -84,12 +86,14 @@ public static Report generateWithFactList(ImmutableList facts, String repo * @param reportVersion Version of the report to generate. * @return */ - public static Report generateWithParam(int dummyValue, String reportVersion) { + public static Report generateWithParam( + int dummyValue, String reportVersion, String reportingOrigin) { return generate( /* facts */ Optional.empty(), Optional.of(dummyValue), /* reportId */ Optional.empty(), - reportVersion); + reportVersion, + reportingOrigin); } /** @@ -113,7 +117,8 @@ public static Report generateWithFixedReportId( /* facts */ Optional.empty(), Optional.of(dummyValue), Optional.of(reportId), - reportVersion); + reportVersion, + "https://foo.com"); } /** @@ -127,7 +132,8 @@ public static Report generateNullReport() { Optional.of(ImmutableList.of(nullFact)), Optional.empty(), Optional.empty(), - LATEST_VERSION); + LATEST_VERSION, + "https://foo.com"); } /** @@ -148,7 +154,8 @@ private static Report generate( Optional> facts, Optional dummyValue, Optional reportId, - String reportVersion) { + String reportVersion, + String reportingOrigin) { // Sanity check. Evaluates as XNOR to confirm only one of facts or dummyValue is present if (!(facts.isPresent() ^ dummyValue.isPresent())) { throw new IllegalStateException( @@ -181,7 +188,7 @@ private static Report generate( .setDestination(dummyStringActual) .setScheduledReportTime(dummyTime) .setSourceRegistrationTime(dummyTime) - .setReportingOrigin(dummyStringActual) + .setReportingOrigin(reportingOrigin) .setApi(ATTRIBUTION_REPORTING_API) .setReportId(reportId.orElse(String.valueOf(UUID.randomUUID()))) .setVersion(reportVersion) diff --git a/java/com/google/aggregate/adtech/worker/testing/FakeValidator.java b/java/com/google/aggregate/adtech/worker/testing/FakeValidator.java index 55d560f2..45c17915 100644 --- a/java/com/google/aggregate/adtech/worker/testing/FakeValidator.java +++ b/java/com/google/aggregate/adtech/worker/testing/FakeValidator.java @@ -45,19 +45,13 @@ public Optional validate(Report report, Job unused) { if (this.reportIdShouldReturnError.isPresent() && report.sharedInfo().reportId().isPresent()) { if (this.reportIdShouldReturnError.get().contains(report.sharedInfo().reportId().get())) { return Optional.of( - ErrorMessage.builder() - .setCategory(ErrorCounter.DECRYPTION_ERROR) - .setDetailedErrorMessage("") - .build()); + ErrorMessage.builder().setCategory(ErrorCounter.DECRYPTION_ERROR).build()); } } if (this.nextShouldReturnError.isPresent()) { if (this.nextShouldReturnError.get().next()) { return Optional.of( - ErrorMessage.builder() - .setCategory(ErrorCounter.DECRYPTION_ERROR) - .setDetailedErrorMessage("") - .build()); + ErrorMessage.builder().setCategory(ErrorCounter.DECRYPTION_ERROR).build()); } } return Optional.empty(); diff --git a/java/com/google/aggregate/adtech/worker/testing/InMemoryResultLogger.java b/java/com/google/aggregate/adtech/worker/testing/InMemoryResultLogger.java index 96cc8987..d71569dd 100644 --- a/java/com/google/aggregate/adtech/worker/testing/InMemoryResultLogger.java +++ b/java/com/google/aggregate/adtech/worker/testing/InMemoryResultLogger.java @@ -19,10 +19,8 @@ import com.google.aggregate.adtech.worker.ResultLogger; import com.google.aggregate.adtech.worker.exceptions.ResultLogException; import com.google.aggregate.adtech.worker.model.AggregatedFact; -import com.google.aggregate.adtech.worker.model.EncryptedReport; import com.google.common.collect.ImmutableList; import com.google.scp.operator.cpio.jobclient.model.Job; -import java.util.Optional; /** * {@link ResultLogger} implementation to materialized and store aggregation results in memory for @@ -32,7 +30,6 @@ public final class InMemoryResultLogger implements ResultLogger { private MaterializedAggregationResults materializedAggregations; private MaterializedAggregationResults materializedDebugAggregations; - private Optional> materializedEncryptedReports = Optional.empty(); private boolean shouldThrow; private volatile boolean hasLogged; @@ -64,17 +61,6 @@ public void logResults(ImmutableList results, Job unused, boolea } } - @Override - public void logReports(ImmutableList reports, Job unused, String shardNumber) - throws ResultLogException { - if (shouldThrow) { - throw new ResultLogException( - new IllegalStateException("Was set to throw while logging reports.")); - } - materializedEncryptedReports = Optional.of(reports); - System.out.println("In memory encrypted reports:" + reports); - } - /** * Gets materialized aggregation results as an ImmutableList of {@link AggregatedFact} * @@ -106,21 +92,6 @@ public MaterializedAggregationResults getMaterializedDebugAggregationResults() return materializedDebugAggregations; } - /** - * Gets materialized encrypted reports as an ImmutableList of {@link EncryptedReport} - * - * @throws ResultLogException if results were not logged prior to calling this method. - */ - public ImmutableList getMaterializedEncryptedReports() - throws ResultLogException { - if (materializedEncryptedReports.isEmpty()) { - throw new ResultLogException( - new IllegalStateException( - "MaterializedEncryptionReports is null. Maybe results did not get logged.")); - } - return materializedEncryptedReports.get(); - } - public void setShouldThrow(boolean shouldThrow) { this.shouldThrow = shouldThrow; } diff --git a/java/com/google/aggregate/adtech/worker/testing/LocalAggregationWorkerRunner.java b/java/com/google/aggregate/adtech/worker/testing/LocalAggregationWorkerRunner.java index c3d086c9..27a242ba 100644 --- a/java/com/google/aggregate/adtech/worker/testing/LocalAggregationWorkerRunner.java +++ b/java/com/google/aggregate/adtech/worker/testing/LocalAggregationWorkerRunner.java @@ -36,6 +36,7 @@ public final class LocalAggregationWorkerRunner { private AggregationWorker worker; private ServiceManager serviceManager; + private LocalAggregationWorkerRunner(AggregationWorkerArgs args) { createRunner(args); } @@ -80,9 +81,13 @@ public static LocalAggregationWorkerRunner create(Path rootDir, String[] newArgs "--local_output_domain_path", rootDir.resolve("domain.txt").toAbsolutePath().toString(), "--simulation_inputs", + "--parallel_summary_upload_enabled", + "--streaming_output_domain_processing_enabled", + "--attribution_reporting_debug_api_enabled", }; AggregationWorkerArgs cliArgs = new AggregationWorkerArgs(); - JCommander jCommander = JCommander.newBuilder().allowParameterOverwriting(true).addObject(cliArgs).build(); + JCommander jCommander = + JCommander.newBuilder().allowParameterOverwriting(true).addObject(cliArgs).build(); jCommander.parse(args); jCommander.parse(newArgs); return new LocalAggregationWorkerRunner(cliArgs); diff --git a/java/com/google/aggregate/adtech/worker/util/BUILD b/java/com/google/aggregate/adtech/worker/util/BUILD index aaf362ff..ba210eee 100644 --- a/java/com/google/aggregate/adtech/worker/util/BUILD +++ b/java/com/google/aggregate/adtech/worker/util/BUILD @@ -24,6 +24,7 @@ java_library( "JobUtils.java", "NumericConversions.java", "OutputShardFileHelper.java", + "ReportingOriginUtils.java", ], javacopts = ["-Xep:Var"], deps = [ diff --git a/java/com/google/aggregate/adtech/worker/util/JobUtils.java b/java/com/google/aggregate/adtech/worker/util/JobUtils.java index 6d424ee2..5d123f3b 100644 --- a/java/com/google/aggregate/adtech/worker/util/JobUtils.java +++ b/java/com/google/aggregate/adtech/worker/util/JobUtils.java @@ -26,9 +26,15 @@ public final class JobUtils { public static final String JOB_PARAM_REPORT_ERROR_THRESHOLD_PERCENTAGE = "report_error_threshold_percentage"; + public static final String JOB_PARAM_INPUT_REPORT_COUNT = "input_report_count"; + public static final String JOB_PARAM_FILTERING_IDS = "filtering_ids"; public static final String JOB_PARAM_FILTERING_IDS_DELIMITER = ","; + public static final String JOB_PARAM_ATTRIBUTION_REPORT_TO = "attribution_report_to"; + + public static final String JOB_PARAM_REPORTING_SITE = "reporting_site"; + private JobUtils() {} } diff --git a/java/com/google/aggregate/adtech/worker/util/NumericConversions.java b/java/com/google/aggregate/adtech/worker/util/NumericConversions.java index c5e63787..3806e1c8 100644 --- a/java/com/google/aggregate/adtech/worker/util/NumericConversions.java +++ b/java/com/google/aggregate/adtech/worker/util/NumericConversions.java @@ -20,6 +20,7 @@ import com.google.common.base.Strings; import com.google.common.collect.ImmutableSet; +import com.google.common.primitives.UnsignedLong; import com.google.errorprone.annotations.Var; import java.math.BigInteger; import java.util.Arrays; @@ -62,21 +63,20 @@ public static Long uInt32FromBytes(byte[] bytes) { } /** - * Reads a 32-bit integer from a big-endian byte array. + * Reads a 64-bit UnsignedLong from a big-endian byte array. * - * @param bytes the byte array to read from. Must be 4 bytes or shorter. - * @return the decoded 32-bit integer. + * @param bytes the byte array to read from. Must be 8 bytes or shorter. + * @return the decoded 64-bit UnsignedLong. */ - public static int getInt32FromBytes(byte[] bytes) { - if (bytes.length > 4) { + public static UnsignedLong getUnsignedLongFromBytes(byte[] bytes) { + if (bytes.length > 8) { throw new IllegalArgumentException( - "Byte array provided was too long. Must be 4 bytes or shorter. Length was " + "Byte array provided was too long. Must be 8 bytes or shorter. Length was " + bytes.length); } // Decode from big-endian bytes. BigInteger assumes the byte array is big endian. - BigInteger bigIntegerValue = new BigInteger(bytes); - return bigIntegerValue.intValueExact(); + return UnsignedLong.valueOf(new BigInteger(POSITIVE_SIGN, bytes)); } /** @@ -188,12 +188,13 @@ public static double getPercentageValue(String percentageInString) { } /** - * Gets the list of integer from its string representation separated by the given delimiter. + * Gets the list of unsigned longs from its string representation of unsigned longs + * separated by the given delimiter. * - * @param stringOfNumbers integers separated by delimiter in string. + * @param stringOfNumbers unsigned longs separated by delimiter in string. * @param delimiter the delimiter separating the numbers. */ - public static ImmutableSet getIntegersFromString( + public static ImmutableSet getUnsignedLongsFromString( String stringOfNumbers, String delimiter) { if (Strings.isNullOrEmpty(stringOfNumbers)) { return ImmutableSet.of(); @@ -201,7 +202,7 @@ public static ImmutableSet getIntegersFromString( return Arrays.stream(stringOfNumbers.trim().split(delimiter)) .filter(id -> !id.trim().isEmpty()) - .map(id -> Integer.valueOf(id.trim())) + .map(id -> UnsignedLong.valueOf(id.trim())) .collect(ImmutableSet.toImmutableSet()); } diff --git a/java/com/google/aggregate/adtech/worker/util/ReportingOriginUtils.java b/java/com/google/aggregate/adtech/worker/util/ReportingOriginUtils.java new file mode 100644 index 00000000..0cf52a24 --- /dev/null +++ b/java/com/google/aggregate/adtech/worker/util/ReportingOriginUtils.java @@ -0,0 +1,67 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.aggregate.adtech.worker.util; + +import com.google.common.net.InternetDomainName; +import java.net.MalformedURLException; +import java.net.URL; + +/** Utility class providing methods for working with reporting origins URLs. */ +public class ReportingOriginUtils { + /** + * Converts a reporting origin URL to a site URL by extracting the top private domain. The return + * value is always prefixed with the https protocol. + * + * @param reportingOrigin the reporting origin URL + * @return the site URL, prefixed with https:// (e.g., https://example.com) + * @throws InvalidReportingOriginException if the reporting origin is malformed or not under a + * known public suffix + */ + public static String convertReportingOriginToSite(String reportingOrigin) + throws InvalidReportingOriginException { + URL url; + try { + // Strip trailing slash. + if (reportingOrigin.endsWith("/")) { + reportingOrigin = reportingOrigin.substring(0, reportingOrigin.length() - 1); + } + url = new URL(reportingOrigin); + } catch (MalformedURLException e) { + throw new InvalidReportingOriginException(e); + } + // Remove the protocol and port (if any) + String host = url.getHost(); + InternetDomainName domain = InternetDomainName.from(host); + if (!domain.isUnderPublicSuffix()) { + throw new InvalidReportingOriginException( + "Reporting origin is not under a known public suffix."); + } + return "https://" + domain.topPrivateDomain(); + } + + /** Exception thrown when a reporting origin is invalid. */ + public static class InvalidReportingOriginException extends Exception { + + public InvalidReportingOriginException(Throwable e) { + super(e); + } + + public InvalidReportingOriginException(String message) { + super(message); + } + } +} diff --git a/java/com/google/aggregate/adtech/worker/validation/BUILD b/java/com/google/aggregate/adtech/worker/validation/BUILD index ad65058d..5bc5caf9 100644 --- a/java/com/google/aggregate/adtech/worker/validation/BUILD +++ b/java/com/google/aggregate/adtech/worker/validation/BUILD @@ -21,6 +21,7 @@ java_library( srcs = glob(["*.java"]), javacopts = ["-Xep:Var"], deps = [ + "//java/com/google/aggregate/adtech/worker:annotations", "//java/com/google/aggregate/adtech/worker:return_code", "//java/com/google/aggregate/adtech/worker/model", "//java/com/google/aggregate/adtech/worker/util", diff --git a/java/com/google/aggregate/adtech/worker/validation/JobValidator.java b/java/com/google/aggregate/adtech/worker/validation/JobValidator.java index 3ddf8e0e..6f188e3a 100644 --- a/java/com/google/aggregate/adtech/worker/validation/JobValidator.java +++ b/java/com/google/aggregate/adtech/worker/validation/JobValidator.java @@ -16,15 +16,19 @@ package com.google.aggregate.adtech.worker.validation; +import static com.google.aggregate.adtech.worker.util.JobUtils.JOB_PARAM_ATTRIBUTION_REPORT_TO; import static com.google.aggregate.adtech.worker.util.JobUtils.JOB_PARAM_FILTERING_IDS; import static com.google.aggregate.adtech.worker.util.JobUtils.JOB_PARAM_FILTERING_IDS_DELIMITER; +import static com.google.aggregate.adtech.worker.util.JobUtils.JOB_PARAM_INPUT_REPORT_COUNT; import static com.google.aggregate.adtech.worker.util.JobUtils.JOB_PARAM_OUTPUT_DOMAIN_BLOB_PREFIX; import static com.google.aggregate.adtech.worker.util.JobUtils.JOB_PARAM_OUTPUT_DOMAIN_BUCKET_NAME; +import static com.google.aggregate.adtech.worker.util.JobUtils.JOB_PARAM_REPORTING_SITE; import static com.google.aggregate.adtech.worker.util.JobUtils.JOB_PARAM_REPORT_ERROR_THRESHOLD_PERCENTAGE; import static com.google.common.base.Preconditions.checkArgument; import static com.google.scp.operator.shared.model.BackendModelUtil.toJobKeyString; import com.google.aggregate.adtech.worker.util.NumericConversions; +import com.google.common.primitives.Longs; import com.google.scp.operator.cpio.jobclient.model.Job; import java.util.Map; import java.util.Optional; @@ -41,16 +45,7 @@ public final class JobValidator { public static void validate(Optional job, boolean domainOptional) { checkArgument(job.isPresent(), "Job metadata not found."); String jobKey = toJobKeyString(job.get().jobKey()); - checkArgument( - job.get().requestInfo().getJobParametersMap().containsKey("attribution_report_to") - && !job.get() - .requestInfo() - .getJobParametersMap() - .get("attribution_report_to") - .trim() - .isEmpty(), - String.format( - "Job parameters does not have an attribution_report_to field for the Job %s.", jobKey)); + validateReportingOriginAndSite(job.get()); Map jobParams = job.get().requestInfo().getJobParametersMap(); checkArgument( domainOptional @@ -72,6 +67,12 @@ public static void validate(Optional job, boolean domainOptional) { "Job parameters for the job '%s' should have a valid value between 0 and 100 for" + " 'report_error_threshold_percentage' parameter.", jobKey)); + checkArgument( + isAValidCount(jobParams.get(JOB_PARAM_INPUT_REPORT_COUNT)), + String.format( + "Job parameters for the job '%s' should have a valid non-negative value for" + + " 'input_report_count' parameter.", + jobKey)); String filteringIds = jobParams.getOrDefault(JOB_PARAM_FILTERING_IDS, null); checkArgument( @@ -82,10 +83,61 @@ public static void validate(Optional job, boolean domainOptional) { jobKey)); } + /** + * Validates that exactly one of the two fields 'JOB_PARAM_ATTRIBUTION_REPORT_TO' and + * 'reporting_site' is specified and the specified field is non-empty + */ + private static void validateReportingOriginAndSite(Job job) { + Map jobParams = job.requestInfo().getJobParametersMap(); + String jobKey = toJobKeyString(job.jobKey()); + boolean bothSiteAndOriginSpecified = + jobParams.containsKey(JOB_PARAM_ATTRIBUTION_REPORT_TO) + && jobParams.containsKey(JOB_PARAM_REPORTING_SITE); + boolean neitherSiteOrOriginSpecified = + !jobParams.containsKey(JOB_PARAM_ATTRIBUTION_REPORT_TO) + && !jobParams.containsKey(JOB_PARAM_REPORTING_SITE); + if (bothSiteAndOriginSpecified || neitherSiteOrOriginSpecified) { + throw new IllegalArgumentException( + String.format( + "Exactly one of 'attribution_report_to' and 'reporting_site' fields should be" + + " specified for the Job %s. It is recommended to use 'reporting_site'" + + " parameter. Parameter 'attribution_report_to' will be deprecated in the next" + + " major version upgrade of the API", + jobKey)); + } + // Verify that either the field 'JOB_PARAM_ATTRIBUTION_REPORT_TO' is not specified or is + // non-empty. + boolean emptyAttributionReportToSpecified = + jobParams.containsKey(JOB_PARAM_ATTRIBUTION_REPORT_TO) + && jobParams.get(JOB_PARAM_ATTRIBUTION_REPORT_TO).trim().isEmpty(); + checkArgument( + !emptyAttributionReportToSpecified, + String.format( + "The 'attribution_report_to' field in the Job parameters is empty for" + " the Job %s.", + jobKey)); + // Verify that either the field 'reporting_site' is not specified or is non-empty. + boolean emptyReportingSiteSpecified = + jobParams.containsKey(JOB_PARAM_REPORTING_SITE) + && jobParams.get(JOB_PARAM_REPORTING_SITE).trim().isEmpty(); + checkArgument( + !emptyReportingSiteSpecified, + String.format( + "The 'reporting_site' field in the Job parameters is empty for the Job" + " %s.", + jobKey)); + } + + /** Checks if the string represents a non-negative number or is empty. */ + private static boolean isAValidCount(String countInString) { + return countInString == null + || countInString.trim().isEmpty() + || (Longs.tryParse(countInString.trim()) != null + && Longs.tryParse(countInString.trim()) >= 0); + } + /** Checks if the given string is a list of integers separated by delimiter. */ private static boolean validStringOfIntegers(String stringOfNumbers, String delimiter) { try { - NumericConversions.getIntegersFromString(stringOfNumbers, delimiter); + NumericConversions.getUnsignedLongsFromString(stringOfNumbers, delimiter); return true; } catch (IllegalArgumentException iae) { return false; diff --git a/java/com/google/aggregate/adtech/worker/validation/PrivacyBudgetKeyValidator.java b/java/com/google/aggregate/adtech/worker/validation/PrivacyBudgetKeyValidator.java index 3edf466f..e3f881cd 100644 --- a/java/com/google/aggregate/adtech/worker/validation/PrivacyBudgetKeyValidator.java +++ b/java/com/google/aggregate/adtech/worker/validation/PrivacyBudgetKeyValidator.java @@ -25,8 +25,5 @@ * budget key for Reports. */ public interface PrivacyBudgetKeyValidator { - String NULL_OR_INVALID_SHAREDINFO_FIELD_ERROR_STRING = - "One or more required fields in report's SharedInfo are null or invalid."; - Optional validatePrivacyBudgetKey(SharedInfo sharedInfo); } diff --git a/java/com/google/aggregate/adtech/worker/validation/PrivacyBudgetKeyValidatorFactory.java b/java/com/google/aggregate/adtech/worker/validation/PrivacyBudgetKeyValidatorFactory.java index a625c7d9..f6904a53 100644 --- a/java/com/google/aggregate/adtech/worker/validation/PrivacyBudgetKeyValidatorFactory.java +++ b/java/com/google/aggregate/adtech/worker/validation/PrivacyBudgetKeyValidatorFactory.java @@ -17,12 +17,14 @@ package com.google.aggregate.adtech.worker.validation; import static com.google.aggregate.adtech.worker.model.SharedInfo.ATTRIBUTION_REPORTING_API; +import static com.google.aggregate.adtech.worker.model.SharedInfo.ATTRIBUTION_REPORTING_DEBUG_API; import static com.google.aggregate.adtech.worker.model.SharedInfo.MAJOR_VERSION_ONE; import static com.google.aggregate.adtech.worker.model.SharedInfo.MAJOR_VERSION_ZERO; import static com.google.aggregate.adtech.worker.model.SharedInfo.PROTECTED_AUDIENCE_API; import static com.google.aggregate.adtech.worker.model.SharedInfo.SHARED_STORAGE_API; import com.google.aggregate.adtech.worker.model.Version; +import com.google.aggregate.adtech.worker.validation.v01.AttributionReportingDebugPrivacyBudgetKeyFieldsValidator; import com.google.aggregate.adtech.worker.validation.v01.AttributionReportingPrivacyBudgetKeyFieldsValidator; import com.google.aggregate.adtech.worker.validation.v01.ProtectedAudiencePrivacyBudgetKeyFieldsValidator; import com.google.aggregate.adtech.worker.validation.v01.SharedStoragePrivacyBudgetKeyFieldsValidator; @@ -46,6 +48,16 @@ public final class PrivacyBudgetKeyValidatorFactory { .setApi(ATTRIBUTION_REPORTING_API) .setMajorVersion(MAJOR_VERSION_ONE) .build(); + private static final ApiAndMajorVersion ATTRIBUTION_REPORTING_DEBUG_V0 = + ApiAndMajorVersion.builder() + .setApi(ATTRIBUTION_REPORTING_DEBUG_API) + .setMajorVersion(MAJOR_VERSION_ZERO) + .build(); + private static final ApiAndMajorVersion ATTRIBUTION_REPORTING_DEBUG_V1 = + ApiAndMajorVersion.builder() + .setApi(ATTRIBUTION_REPORTING_DEBUG_API) + .setMajorVersion(MAJOR_VERSION_ONE) + .build(); private static final ApiAndMajorVersion PROTECTED_AUDIENCE_API_V0 = ApiAndMajorVersion.builder() .setApi(PROTECTED_AUDIENCE_API) @@ -73,6 +85,10 @@ public final class PrivacyBudgetKeyValidatorFactory { new AttributionReportingPrivacyBudgetKeyFieldsValidator(), ATTRIBUTION_REPORTING_V1, new AttributionReportingPrivacyBudgetKeyFieldsValidator(), + ATTRIBUTION_REPORTING_DEBUG_V0, + new AttributionReportingDebugPrivacyBudgetKeyFieldsValidator(), + ATTRIBUTION_REPORTING_DEBUG_V1, + new AttributionReportingDebugPrivacyBudgetKeyFieldsValidator(), PROTECTED_AUDIENCE_API_V0, new ProtectedAudiencePrivacyBudgetKeyFieldsValidator(), PROTECTED_AUDIENCE_API_V1, diff --git a/java/com/google/aggregate/adtech/worker/validation/ReportForDebugValidator.java b/java/com/google/aggregate/adtech/worker/validation/ReportForDebugValidator.java index 573cb539..42222b93 100644 --- a/java/com/google/aggregate/adtech/worker/validation/ReportForDebugValidator.java +++ b/java/com/google/aggregate/adtech/worker/validation/ReportForDebugValidator.java @@ -43,6 +43,6 @@ public Optional validate(Report report, Job job) { return Optional.empty(); } - return createErrorMessage(DEBUG_NOT_ENABLED, "the mode is not enabled"); + return createErrorMessage(DEBUG_NOT_ENABLED); } } diff --git a/java/com/google/aggregate/adtech/worker/validation/ReportNotTooOldValidator.java b/java/com/google/aggregate/adtech/worker/validation/ReportNotTooOldValidator.java index 4f404bf4..38727927 100644 --- a/java/com/google/aggregate/adtech/worker/validation/ReportNotTooOldValidator.java +++ b/java/com/google/aggregate/adtech/worker/validation/ReportNotTooOldValidator.java @@ -47,13 +47,6 @@ public Optional validate(Report report, Job unused) { return Optional.empty(); } - return createErrorMessage( - ORIGINAL_REPORT_TIME_TOO_OLD, - String.format( - "Report's originalReportTime is too old, reports cannot be older than %s, must be more" - + " recent than %s but was %s", - SharedInfo.MAX_REPORT_AGE, - oldestAllowedTime, - report.sharedInfo().scheduledReportTime())); + return createErrorMessage(ORIGINAL_REPORT_TIME_TOO_OLD); } } diff --git a/java/com/google/aggregate/adtech/worker/validation/ReportPrivacyBudgetKeyValidator.java b/java/com/google/aggregate/adtech/worker/validation/ReportPrivacyBudgetKeyValidator.java index 4fca7359..f7a0beb5 100644 --- a/java/com/google/aggregate/adtech/worker/validation/ReportPrivacyBudgetKeyValidator.java +++ b/java/com/google/aggregate/adtech/worker/validation/ReportPrivacyBudgetKeyValidator.java @@ -28,10 +28,6 @@ /** Validates that the Report's SharedInfo can generate valid Privacy Budget Key. */ public final class ReportPrivacyBudgetKeyValidator implements ReportValidator { - static String MISSING_API_ERROR_STRING = "Empty API in Report's SharedInfo is not supported."; - static String INVALID_API_VERSION_ERROR_STRING = - "Invalid SharedInfo API and Version combination."; - @Override public Optional validate(Report report, Job unused) { if (isFieldNonEmpty(report.sharedInfo().api())) { @@ -39,11 +35,10 @@ public Optional validate(Report report, Job unused) { PrivacyBudgetKeyValidatorFactory.getPrivacyBudgetKeyValidator( report.sharedInfo().api().get(), report.sharedInfo().version()); if (validator.isEmpty()) { - return createErrorMessage( - REQUIRED_SHAREDINFO_FIELD_INVALID, INVALID_API_VERSION_ERROR_STRING); + return createErrorMessage(REQUIRED_SHAREDINFO_FIELD_INVALID); } return validator.get().validatePrivacyBudgetKey(report.sharedInfo()); } - return createErrorMessage(REQUIRED_SHAREDINFO_FIELD_INVALID, MISSING_API_ERROR_STRING); + return createErrorMessage(REQUIRED_SHAREDINFO_FIELD_INVALID); } } diff --git a/java/com/google/aggregate/adtech/worker/validation/ReportVersionValidator.java b/java/com/google/aggregate/adtech/worker/validation/ReportVersionValidator.java index d99b4ed5..19753539 100644 --- a/java/com/google/aggregate/adtech/worker/validation/ReportVersionValidator.java +++ b/java/com/google/aggregate/adtech/worker/validation/ReportVersionValidator.java @@ -42,8 +42,7 @@ public Optional validate(Report report, Job unused) { Version version = Version.parse(report.sharedInfo().version()); if (version.isZero()) { // 0.0 is not supported sharedInfo version. - return createErrorMessage( - UNSUPPORTED_SHAREDINFO_VERSION, UNSUPPORTED_SHAREDINFO_VERSION.getDescription()); + return createErrorMessage(UNSUPPORTED_SHAREDINFO_VERSION); } else if (isReportVersionHigherThanLatestVersion(version)) { // throw exception to fail job. throw new ValidationException( @@ -60,12 +59,10 @@ public Optional validate(Report report, Job unused) { return Optional.empty(); } } catch (IllegalArgumentException ex) { - return createErrorMessage( - UNSUPPORTED_SHAREDINFO_VERSION, UNSUPPORTED_SHAREDINFO_VERSION.getDescription()); + return createErrorMessage(UNSUPPORTED_SHAREDINFO_VERSION); } - return createErrorMessage( - UNSUPPORTED_SHAREDINFO_VERSION, UNSUPPORTED_SHAREDINFO_VERSION.getDescription()); + return createErrorMessage(UNSUPPORTED_SHAREDINFO_VERSION); } private boolean isReportVersionHigherThanLatestVersion(Version version) { diff --git a/java/com/google/aggregate/adtech/worker/validation/ReportingOriginIsDomainValidator.java b/java/com/google/aggregate/adtech/worker/validation/ReportingOriginIsDomainValidator.java index 69f7e190..ac332803 100644 --- a/java/com/google/aggregate/adtech/worker/validation/ReportingOriginIsDomainValidator.java +++ b/java/com/google/aggregate/adtech/worker/validation/ReportingOriginIsDomainValidator.java @@ -41,11 +41,6 @@ public Optional validate(Report report, Job unused) { return Optional.empty(); } - return createErrorMessage( - ATTRIBUTION_REPORT_TO_MALFORMED, - String.format( - "Report's attributionReportTo to is malformed, must be a domain. Report's" - + " attributionReportTo was: %s", - report.sharedInfo().reportingOrigin())); + return createErrorMessage(ATTRIBUTION_REPORT_TO_MALFORMED); } } diff --git a/java/com/google/aggregate/adtech/worker/validation/ReportingOriginMatchesRequestValidator.java b/java/com/google/aggregate/adtech/worker/validation/ReportingOriginMatchesRequestValidator.java index 2301ef95..8c8a8aa1 100644 --- a/java/com/google/aggregate/adtech/worker/validation/ReportingOriginMatchesRequestValidator.java +++ b/java/com/google/aggregate/adtech/worker/validation/ReportingOriginMatchesRequestValidator.java @@ -18,11 +18,20 @@ import static com.google.aggregate.adtech.worker.model.ErrorCounter.ATTRIBUTION_REPORT_TO_MISMATCH; import static com.google.aggregate.adtech.worker.validation.ValidatorHelper.createErrorMessage; +import static com.google.aggregate.adtech.worker.model.ErrorCounter.ATTRIBUTION_REPORT_TO_MALFORMED; +import static com.google.aggregate.adtech.worker.model.ErrorCounter.REPORTING_SITE_MISMATCH; import com.google.aggregate.adtech.worker.model.ErrorMessage; import com.google.aggregate.adtech.worker.model.Report; import com.google.scp.operator.cpio.jobclient.model.Job; import java.util.Optional; +import com.google.aggregate.adtech.worker.util.ReportingOriginUtils; +import com.google.aggregate.adtech.worker.util.ReportingOriginUtils.InvalidReportingOriginException; +import com.google.common.cache.CacheBuilder; +import com.google.common.cache.CacheLoader; +import com.google.common.cache.LoadingCache; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; /** * Validates that the report's reportingOrigin is the same as the attributionReportTo provided in @@ -30,19 +39,45 @@ */ public final class ReportingOriginMatchesRequestValidator implements ReportValidator { + private static final int MAX_CACHE_SIZE = 100; + private static final long CACHE_ENTRY_TTL_SEC = 3600; + private final LoadingCache originToSiteMap = + CacheBuilder.newBuilder() + .maximumSize(MAX_CACHE_SIZE) + .expireAfterWrite(CACHE_ENTRY_TTL_SEC, TimeUnit.SECONDS) + .concurrencyLevel(Runtime.getRuntime().availableProcessors()) + .build( + new CacheLoader<>() { + @Override + public String load(final String reportingOrigin) + throws InvalidReportingOriginException { + return ReportingOriginUtils.convertReportingOriginToSite(reportingOrigin); + } + }); + @Override public Optional validate(Report report, Job ctx) { - String attributionReportTo = - ctx.requestInfo().getJobParametersMap().get("attribution_report_to"); - if (report.sharedInfo().reportingOrigin().equals(attributionReportTo)) { - return Optional.empty(); - } + Optional optionalSiteValue = + Optional.ofNullable(ctx.requestInfo().getJobParametersMap().get("reporting_site")); + if (optionalSiteValue.isPresent()) { + try { + String reportingSiteParameterValue = optionalSiteValue.get(); + String siteForReportingOrigin = originToSiteMap.get(report.sharedInfo().reportingOrigin()); + if (!reportingSiteParameterValue.equals(siteForReportingOrigin)) { + return createErrorMessage(REPORTING_SITE_MISMATCH); + } + return Optional.empty(); + } catch (ExecutionException e) { + return createErrorMessage(ATTRIBUTION_REPORT_TO_MALFORMED); + } + } else { + String attributionReportTo = + ctx.requestInfo().getJobParametersMap().get("attribution_report_to"); + if (report.sharedInfo().reportingOrigin().equals(attributionReportTo)) { + return Optional.empty(); + } - return createErrorMessage( - ATTRIBUTION_REPORT_TO_MISMATCH, - String.format( - "Report's attributionReportTo didn't match the AdTech request. Report's" - + " attributionReportTo: %s, Request's attributionReportTo: %s", - report.sharedInfo().reportingOrigin(), attributionReportTo)); + return createErrorMessage(ATTRIBUTION_REPORT_TO_MISMATCH); + } } } diff --git a/java/com/google/aggregate/adtech/worker/validation/SharedInfoReportIdValidator.java b/java/com/google/aggregate/adtech/worker/validation/SharedInfoReportIdValidator.java index ccc7c7f0..32a7bea5 100644 --- a/java/com/google/aggregate/adtech/worker/validation/SharedInfoReportIdValidator.java +++ b/java/com/google/aggregate/adtech/worker/validation/SharedInfoReportIdValidator.java @@ -29,9 +29,6 @@ /** Validates that the report ID in SharedInfo is a valid UUID. */ public final class SharedInfoReportIdValidator implements ReportValidator { - static final String INVALID_REPORT_ID_ERROR_STRING = - "Report ID is missing or is invalid in SharedInfo."; - @Override public Optional validate(Report report, Job unused) { if (isFieldNonEmpty(report.sharedInfo().reportId())) { @@ -39,9 +36,9 @@ public Optional validate(Report report, Job unused) { UUID.fromString(report.sharedInfo().reportId().get()); return Optional.empty(); } catch (IllegalArgumentException exception) { - return createErrorMessage(INVALID_REPORT_ID, INVALID_REPORT_ID_ERROR_STRING); + return createErrorMessage(INVALID_REPORT_ID); } } - return createErrorMessage(INVALID_REPORT_ID, INVALID_REPORT_ID_ERROR_STRING); + return createErrorMessage(INVALID_REPORT_ID); } } diff --git a/java/com/google/aggregate/adtech/worker/validation/SupportedOperationValidator.java b/java/com/google/aggregate/adtech/worker/validation/SupportedOperationValidator.java index 4b3ee370..eb38abc8 100644 --- a/java/com/google/aggregate/adtech/worker/validation/SupportedOperationValidator.java +++ b/java/com/google/aggregate/adtech/worker/validation/SupportedOperationValidator.java @@ -28,20 +28,12 @@ /** Validates that the report's operation is an accepted value */ public final class SupportedOperationValidator implements ReportValidator { - private static final String DETAILED_ERROR_MESSAGE_TEMPLATE = - "Report's operation is not supported. Operation was '%s'. Supported operations are %s."; - @Override public Optional validate(Report report, Job unused) { if (SharedInfo.SUPPORTED_OPERATIONS.contains(report.payload().operation())) { return Optional.empty(); } - return createErrorMessage( - UNSUPPORTED_OPERATION, - String.format( - DETAILED_ERROR_MESSAGE_TEMPLATE, - report.payload().operation(), - SharedInfo.SUPPORTED_OPERATIONS)); + return createErrorMessage(UNSUPPORTED_OPERATION); } } diff --git a/java/com/google/aggregate/adtech/worker/validation/SupportedReportApiTypeValidator.java b/java/com/google/aggregate/adtech/worker/validation/SupportedReportApiTypeValidator.java index 5b1af80a..634f0406 100644 --- a/java/com/google/aggregate/adtech/worker/validation/SupportedReportApiTypeValidator.java +++ b/java/com/google/aggregate/adtech/worker/validation/SupportedReportApiTypeValidator.java @@ -19,28 +19,33 @@ import static com.google.aggregate.adtech.worker.model.ErrorCounter.UNSUPPORTED_REPORT_API_TYPE; import static com.google.aggregate.adtech.worker.validation.ValidatorHelper.createErrorMessage; +import com.google.aggregate.adtech.worker.Annotations.SupportedApis; import com.google.aggregate.adtech.worker.model.ErrorMessage; import com.google.aggregate.adtech.worker.model.Report; -import com.google.aggregate.adtech.worker.model.SharedInfo; +import com.google.common.collect.ImmutableSet; +import com.google.inject.Inject; import com.google.scp.operator.cpio.jobclient.model.Job; import java.util.Optional; /** Validates that the report API type is supported for aggregation. */ public final class SupportedReportApiTypeValidator implements ReportValidator { + private final ImmutableSet supportedApis; + + @Inject + SupportedReportApiTypeValidator(@SupportedApis ImmutableSet supportedApis) { + this.supportedApis = supportedApis; + } @Override public Optional validate(Report report, Job unused) { if (report.sharedInfo().api().isEmpty() - || SharedInfo.SUPPORTED_APIS.contains(report.sharedInfo().api().get())) { - /** + || supportedApis.contains(report.sharedInfo().api().get())) { + /* * attribution-reporting reports with version "" do not have api field present in shared Info */ return Optional.empty(); } - return createErrorMessage( - UNSUPPORTED_REPORT_API_TYPE, - String.format( - "Report's api type %s is not supported.", report.sharedInfo().api().orElse(""))); + return createErrorMessage(UNSUPPORTED_REPORT_API_TYPE); } } diff --git a/java/com/google/aggregate/adtech/worker/validation/ValidatorHelper.java b/java/com/google/aggregate/adtech/worker/validation/ValidatorHelper.java index 94fd8e5c..7dabff18 100644 --- a/java/com/google/aggregate/adtech/worker/validation/ValidatorHelper.java +++ b/java/com/google/aggregate/adtech/worker/validation/ValidatorHelper.java @@ -46,18 +46,12 @@ public static boolean isFieldNonEmpty(Optional field) { } /** - * Returns ErrorMessage from given ErrorCounter and errorMessage String + * Returns ErrorMessage from given ErrorCounter. * * @param errorCounter - * @param errorMessage * @return */ - public static Optional createErrorMessage( - ErrorCounter errorCounter, String errorMessage) { - return Optional.of( - ErrorMessage.builder() - .setCategory(errorCounter) - .setDetailedErrorMessage(errorMessage) - .build()); + public static Optional createErrorMessage(ErrorCounter errorCounter) { + return Optional.of(ErrorMessage.builder().setCategory(errorCounter).build()); } } diff --git a/java/com/google/aggregate/adtech/worker/validation/v01/AttributionReportingDebugPrivacyBudgetKeyFieldsValidator.java b/java/com/google/aggregate/adtech/worker/validation/v01/AttributionReportingDebugPrivacyBudgetKeyFieldsValidator.java new file mode 100644 index 00000000..7c1651bf --- /dev/null +++ b/java/com/google/aggregate/adtech/worker/validation/v01/AttributionReportingDebugPrivacyBudgetKeyFieldsValidator.java @@ -0,0 +1,45 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.aggregate.adtech.worker.validation.v01; + +import static com.google.aggregate.adtech.worker.model.ErrorCounter.REQUIRED_SHAREDINFO_FIELD_INVALID; +import static com.google.aggregate.adtech.worker.validation.ValidatorHelper.createErrorMessage; +import static com.google.aggregate.adtech.worker.validation.ValidatorHelper.isFieldNonEmpty; + +import com.google.aggregate.adtech.worker.model.ErrorMessage; +import com.google.aggregate.adtech.worker.model.SharedInfo; +import com.google.aggregate.adtech.worker.validation.PrivacyBudgetKeyValidator; +import java.util.Optional; + +/** + * Validates that the Attribution Reporting Debug API type report's SharedInfo fields used in + * Privacy Budget Key generation are present and not null/empty. + */ +public class AttributionReportingDebugPrivacyBudgetKeyFieldsValidator + implements PrivacyBudgetKeyValidator { + + @Override + public Optional validatePrivacyBudgetKey(SharedInfo sharedInfo) { + if (isFieldNonEmpty(sharedInfo.reportingOrigin()) + && isFieldNonEmpty(sharedInfo.destination()) + && isFieldNonEmpty(sharedInfo.version()) + && sharedInfo.scheduledReportTime() != null) { + return Optional.empty(); + } + return createErrorMessage(REQUIRED_SHAREDINFO_FIELD_INVALID); + } +} diff --git a/java/com/google/aggregate/adtech/worker/validation/v01/AttributionReportingPrivacyBudgetKeyFieldsValidator.java b/java/com/google/aggregate/adtech/worker/validation/v01/AttributionReportingPrivacyBudgetKeyFieldsValidator.java index 38d50572..2428af56 100644 --- a/java/com/google/aggregate/adtech/worker/validation/v01/AttributionReportingPrivacyBudgetKeyFieldsValidator.java +++ b/java/com/google/aggregate/adtech/worker/validation/v01/AttributionReportingPrivacyBudgetKeyFieldsValidator.java @@ -40,7 +40,6 @@ && isFieldNonEmpty(sharedInfo.version()) && sharedInfo.scheduledReportTime() != null) { return Optional.empty(); } - return createErrorMessage( - REQUIRED_SHAREDINFO_FIELD_INVALID, NULL_OR_INVALID_SHAREDINFO_FIELD_ERROR_STRING); + return createErrorMessage(REQUIRED_SHAREDINFO_FIELD_INVALID); } } diff --git a/java/com/google/aggregate/adtech/worker/validation/v01/ProtectedAudiencePrivacyBudgetKeyFieldsValidator.java b/java/com/google/aggregate/adtech/worker/validation/v01/ProtectedAudiencePrivacyBudgetKeyFieldsValidator.java index de407511..d5523b47 100644 --- a/java/com/google/aggregate/adtech/worker/validation/v01/ProtectedAudiencePrivacyBudgetKeyFieldsValidator.java +++ b/java/com/google/aggregate/adtech/worker/validation/v01/ProtectedAudiencePrivacyBudgetKeyFieldsValidator.java @@ -39,7 +39,6 @@ && isFieldNonEmpty(sharedInfo.version()) return Optional.empty(); } - return createErrorMessage( - REQUIRED_SHAREDINFO_FIELD_INVALID, NULL_OR_INVALID_SHAREDINFO_FIELD_ERROR_STRING); + return createErrorMessage(REQUIRED_SHAREDINFO_FIELD_INVALID); } } diff --git a/java/com/google/aggregate/adtech/worker/validation/v01/SharedStoragePrivacyBudgetKeyFieldsValidator.java b/java/com/google/aggregate/adtech/worker/validation/v01/SharedStoragePrivacyBudgetKeyFieldsValidator.java index 42c069f2..492f321e 100644 --- a/java/com/google/aggregate/adtech/worker/validation/v01/SharedStoragePrivacyBudgetKeyFieldsValidator.java +++ b/java/com/google/aggregate/adtech/worker/validation/v01/SharedStoragePrivacyBudgetKeyFieldsValidator.java @@ -40,7 +40,6 @@ && isFieldNonEmpty(sharedInfo.version()) return Optional.empty(); } - return createErrorMessage( - REQUIRED_SHAREDINFO_FIELD_INVALID, NULL_OR_INVALID_SHAREDINFO_FIELD_ERROR_STRING); + return createErrorMessage(REQUIRED_SHAREDINFO_FIELD_INVALID); } } diff --git a/java/com/google/aggregate/adtech/worker/writer/LocalResultFileWriter.java b/java/com/google/aggregate/adtech/worker/writer/LocalResultFileWriter.java index df49580a..3f60fac3 100644 --- a/java/com/google/aggregate/adtech/worker/writer/LocalResultFileWriter.java +++ b/java/com/google/aggregate/adtech/worker/writer/LocalResultFileWriter.java @@ -17,7 +17,6 @@ package com.google.aggregate.adtech.worker.writer; import com.google.aggregate.adtech.worker.model.AggregatedFact; -import com.google.aggregate.adtech.worker.model.EncryptedReport; import java.nio.file.Path; import java.util.stream.Stream; @@ -27,10 +26,6 @@ public interface LocalResultFileWriter { /** Write the file to the local filesystem */ void writeLocalFile(Stream results, Path resultFile) throws FileWriteException; - /** Writes list of encrypted reports to a local file. */ - void writeLocalReportFile(Stream reports, Path resultFilePath) - throws FileWriteException; - /** Returns the file extension for the file type written */ String getFileExtension(); diff --git a/java/com/google/aggregate/adtech/worker/writer/avro/BUILD b/java/com/google/aggregate/adtech/worker/writer/avro/BUILD index 7b2b9095..61119a70 100644 --- a/java/com/google/aggregate/adtech/worker/writer/avro/BUILD +++ b/java/com/google/aggregate/adtech/worker/writer/avro/BUILD @@ -29,8 +29,6 @@ java_library( "//java/com/google/aggregate/adtech/worker/writer", "//java/com/google/aggregate/protocol/avro:avro_debug_results", "//java/com/google/aggregate/protocol/avro:avro_record_writer", - "//java/com/google/aggregate/protocol/avro:avro_report", - "//java/com/google/aggregate/protocol/avro:avro_reports_schema_supplier", "//java/com/google/aggregate/protocol/avro:avro_results_schema_supplier", "//java/external:avro", "//java/external:guava", diff --git a/java/com/google/aggregate/adtech/worker/writer/avro/LocalAvroDebugResultFileWriter.java b/java/com/google/aggregate/adtech/worker/writer/avro/LocalAvroDebugResultFileWriter.java index ef8bfce3..e383606f 100644 --- a/java/com/google/aggregate/adtech/worker/writer/avro/LocalAvroDebugResultFileWriter.java +++ b/java/com/google/aggregate/adtech/worker/writer/avro/LocalAvroDebugResultFileWriter.java @@ -20,7 +20,6 @@ import static java.nio.file.StandardOpenOption.TRUNCATE_EXISTING; import com.google.aggregate.adtech.worker.model.AggregatedFact; -import com.google.aggregate.adtech.worker.model.EncryptedReport; import com.google.aggregate.adtech.worker.writer.LocalResultFileWriter; import com.google.aggregate.protocol.avro.AvroDebugResultsRecord; import com.google.aggregate.protocol.avro.AvroDebugResultsWriter; @@ -56,23 +55,16 @@ public void writeLocalFile(Stream results, Path resultFilePath) results.map( (fact -> AvroDebugResultsRecord.create( - fact.bucket(), - fact.metric(), - fact.unnoisedMetric().get(), - fact.debugAnnotations().get()))); + fact.getBucket(), + fact.getMetric(), + fact.getUnnoisedMetric().get(), + fact.getDebugAnnotations().get()))); avroDebugResultsWriter.writeRecords(metaData, resultsRecords.collect(toImmutableList())); } catch (IOException e) { throw new FileWriteException("Failed to write local Avro debug file", e); } } - @Override - public void writeLocalReportFile(Stream reports, Path resultFilePath) - throws UnsupportedOperationException { - throw new UnsupportedOperationException( - "LocalAvroDebugResultFileWriter cannot write Avro report file."); - } - @Override public String getFileExtension() { return ".avro"; diff --git a/java/com/google/aggregate/adtech/worker/writer/avro/LocalAvroResultFileWriter.java b/java/com/google/aggregate/adtech/worker/writer/avro/LocalAvroResultFileWriter.java index 3c5ed4d3..0b289e3a 100644 --- a/java/com/google/aggregate/adtech/worker/writer/avro/LocalAvroResultFileWriter.java +++ b/java/com/google/aggregate/adtech/worker/writer/avro/LocalAvroResultFileWriter.java @@ -16,22 +16,14 @@ package com.google.aggregate.adtech.worker.writer.avro; -import static com.google.common.collect.ImmutableList.toImmutableList; import static java.nio.file.StandardOpenOption.CREATE; import static java.nio.file.StandardOpenOption.TRUNCATE_EXISTING; import com.google.aggregate.adtech.worker.model.AggregatedFact; -import com.google.aggregate.adtech.worker.model.EncryptedReport; import com.google.aggregate.adtech.worker.util.NumericConversions; import com.google.aggregate.adtech.worker.writer.LocalResultFileWriter; -import com.google.aggregate.protocol.avro.AvroRecordWriter.MetadataElement; -import com.google.aggregate.protocol.avro.AvroReportRecord; -import com.google.aggregate.protocol.avro.AvroReportWriter; -import com.google.aggregate.protocol.avro.AvroReportWriterFactory; import com.google.aggregate.protocol.avro.AvroResultsSchemaSupplier; -import com.google.common.collect.ImmutableList; import java.io.IOException; -import java.io.OutputStream; import java.nio.ByteBuffer; import java.nio.file.Files; import java.nio.file.Path; @@ -49,13 +41,10 @@ public final class LocalAvroResultFileWriter implements LocalResultFileWriter { private AvroResultsSchemaSupplier schemaSupplier; - private final AvroReportWriterFactory reportsWriterFactory; @Inject - LocalAvroResultFileWriter( - AvroResultsSchemaSupplier schemaSupplier, AvroReportWriterFactory reportsWriterFactory) { + LocalAvroResultFileWriter(AvroResultsSchemaSupplier schemaSupplier) { this.schemaSupplier = schemaSupplier; - this.reportsWriterFactory = reportsWriterFactory; } /** @@ -94,23 +83,6 @@ public void writeLocalFile(Stream results, Path resultFilePath) } } - @Override - public void writeLocalReportFile(Stream reports, Path resultFilePath) - throws FileWriteException { - try (OutputStream outputAvroStream = - Files.newOutputStream(resultFilePath, CREATE, TRUNCATE_EXISTING); - AvroReportWriter avroReportWriter = reportsWriterFactory.create(outputAvroStream)) { - ImmutableList metaData = ImmutableList.of(); - Stream reportsRecords = - reports.map( - (report -> - AvroReportRecord.create(report.payload(), report.keyId(), report.sharedInfo()))); - avroReportWriter.writeRecords(metaData, reportsRecords.collect(toImmutableList())); - } catch (IOException e) { - throw new FileWriteException("Failed to write local Avro report file.", e); - } - } - @Override public String getFileExtension() { return ".avro"; @@ -119,9 +91,9 @@ public String getFileExtension() { private GenericRecord aggregatedFactToGenericRecord(AggregatedFact aggregatedFact) { GenericRecord genericRecord = new GenericData.Record(schemaSupplier.get()); ByteBuffer bucketBytes = - ByteBuffer.wrap(NumericConversions.toUnsignedByteArray(aggregatedFact.bucket())); + ByteBuffer.wrap(NumericConversions.toUnsignedByteArray(aggregatedFact.getBucket())); genericRecord.put("bucket", bucketBytes); - genericRecord.put("metric", aggregatedFact.metric()); + genericRecord.put("metric", aggregatedFact.getMetric()); return genericRecord; } } diff --git a/java/com/google/aggregate/adtech/worker/writer/json/BUILD b/java/com/google/aggregate/adtech/worker/writer/json/BUILD index 0639e586..6a3e5c63 100644 --- a/java/com/google/aggregate/adtech/worker/writer/json/BUILD +++ b/java/com/google/aggregate/adtech/worker/writer/json/BUILD @@ -24,6 +24,8 @@ java_library( "//java/com/google/aggregate/adtech/worker/model", "//java/com/google/aggregate/adtech/worker/util", "//java/com/google/aggregate/adtech/worker/writer", + "//java/com/google/aggregate/protocol/avro:avro_results_schema_supplier", + "//java/external:avro", "//java/external:guava", "//java/external:jackson_core", "//java/external:jackson_databind", diff --git a/java/com/google/aggregate/adtech/worker/writer/json/LocalJsonResultFileWriter.java b/java/com/google/aggregate/adtech/worker/writer/json/LocalJsonResultFileWriter.java index 116df6d5..68450d1e 100644 --- a/java/com/google/aggregate/adtech/worker/writer/json/LocalJsonResultFileWriter.java +++ b/java/com/google/aggregate/adtech/worker/writer/json/LocalJsonResultFileWriter.java @@ -16,22 +16,31 @@ package com.google.aggregate.adtech.worker.writer.json; -import com.fasterxml.jackson.core.JsonGenerator; +import static java.nio.file.StandardOpenOption.CREATE; +import static java.nio.file.StandardOpenOption.TRUNCATE_EXISTING; + import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.SerializerProvider; import com.fasterxml.jackson.databind.module.SimpleModule; -import com.fasterxml.jackson.databind.ser.std.StdSerializer; import com.google.aggregate.adtech.worker.model.AggregatedFact; -import com.google.aggregate.adtech.worker.model.EncryptedReport; +import com.google.aggregate.adtech.worker.util.NumericConversions; import com.google.aggregate.adtech.worker.writer.LocalResultFileWriter; +import com.google.aggregate.protocol.avro.AvroResultsSchemaSupplier; import java.io.IOException; +import java.io.OutputStream; +import java.io.PrintWriter; +import java.nio.ByteBuffer; import java.nio.file.Files; import java.nio.file.Path; -import java.nio.file.StandardOpenOption; -import java.util.List; -import java.util.stream.Collectors; +import java.util.Iterator; import java.util.stream.Stream; import javax.inject.Inject; +import org.apache.avro.Schema; +import org.apache.avro.generic.GenericData; +import org.apache.avro.generic.GenericDatumWriter; +import org.apache.avro.generic.GenericRecord; +import org.apache.avro.io.DatumWriter; +import org.apache.avro.io.EncoderFactory; +import org.apache.avro.io.JsonEncoder; /** * Local writer result implementation in json format. This helps standalone library to be in @@ -39,40 +48,46 @@ */ public final class LocalJsonResultFileWriter implements LocalResultFileWriter { + private AvroResultsSchemaSupplier schemaSupplier; private final ObjectMapper mapper = new ObjectMapper(); private final SimpleModule module = new SimpleModule(); @Inject - LocalJsonResultFileWriter() { - module.addSerializer(EncryptedReport.class, new EncryptedReportSerializer()); - module.addSerializer(AggregatedFact.class, new AggregatedFactSerializer()); + LocalJsonResultFileWriter(AvroResultsSchemaSupplier schemaSupplier) { + this.schemaSupplier = schemaSupplier; mapper.registerModule(module); } @Override - public void writeLocalFile(Stream results, Path resultFile) + public void writeLocalFile(Stream results, Path resultFilePath) throws FileWriteException { + Schema schema = schemaSupplier.get(); + DatumWriter writer = new GenericDatumWriter<>(schema); try { - List aggregatedFactList = results.collect(Collectors.toList()); - String prettyJson = - mapper.writerWithDefaultPrettyPrinter().writeValueAsString(aggregatedFactList); - Files.writeString(resultFile, prettyJson, StandardOpenOption.CREATE); - } catch (Exception e) { - throw new FileWriteException("Failed to write local Json file", e); - } - } - - @Override - public void writeLocalReportFile(Stream reports, Path resultFilePath) - throws FileWriteException { - try { - List encryptedReportsList = reports.collect(Collectors.toList()); - String prettyJson = - mapper.writerWithDefaultPrettyPrinter().writeValueAsString(encryptedReportsList); - Files.writeString(resultFilePath, prettyJson, StandardOpenOption.CREATE); - } catch (Exception e) { - throw new FileWriteException("Failed to write reports to local Json file", e); + OutputStream outStream = Files.newOutputStream(resultFilePath, CREATE, TRUNCATE_EXISTING); + PrintWriter printWriter = new PrintWriter(outStream); + JsonEncoder jsonEncoder = EncoderFactory.get().jsonEncoder(schema, outStream, true); + + Iterator resultsIterator = results.iterator(); + printWriter.print('['); + printWriter.flush(); + while (resultsIterator.hasNext()) { + AggregatedFact aggregatedFact = resultsIterator.next(); + GenericRecord aggregatedFactRecord = aggregatedFactToGenericRecord(aggregatedFact); + writer.write(aggregatedFactRecord, jsonEncoder); + jsonEncoder.flush(); + outStream.flush(); + if (resultsIterator.hasNext()) { + printWriter.print(','); + printWriter.flush(); + } + } + printWriter.print(']'); + printWriter.flush(); + printWriter.close(); + } catch (IOException e) { + throw new FileWriteException("Failed to write local JSON file", e); } } @@ -81,46 +96,12 @@ public String getFileExtension() { return ".json"; } - private static class AggregatedFactSerializer extends StdSerializer { - - AggregatedFactSerializer() { - this(null); - } - - AggregatedFactSerializer(Class t) { - super(t); - } - - @Override - public void serialize( - AggregatedFact aggregatedFact, JsonGenerator jgen, SerializerProvider serializerProvider) - throws IOException { - jgen.writeStartObject(); - jgen.writeBinaryField("bucket", aggregatedFact.bucket().toByteArray()); - jgen.writeNumberField("metric", aggregatedFact.metric()); - jgen.writeEndObject(); - } - } - - private static class EncryptedReportSerializer extends StdSerializer { - - EncryptedReportSerializer() { - super(EncryptedReport.class); - } - - EncryptedReportSerializer(Class t) { - super(t); - } - - @Override - public void serialize( - EncryptedReport encryptedReport, JsonGenerator jgen, SerializerProvider serializerProvider) - throws IOException { - jgen.writeStartObject(); - jgen.writeStringField("key_id", encryptedReport.keyId()); - jgen.writeBinaryField("payload", encryptedReport.payload().read()); - jgen.writeStringField("shared_info", encryptedReport.sharedInfo()); - jgen.writeEndObject(); - } + private GenericRecord aggregatedFactToGenericRecord(AggregatedFact aggregatedFact) { + GenericRecord genericRecord = new GenericData.Record(schemaSupplier.get()); + ByteBuffer bucketBytes = + ByteBuffer.wrap(NumericConversions.toUnsignedByteArray(aggregatedFact.getBucket())); + genericRecord.put("bucket", bucketBytes); + genericRecord.put("metric", aggregatedFact.getMetric()); + return genericRecord; } } diff --git a/java/com/google/aggregate/privacy/budgeting/bridge/FakePrivacyBudgetingServiceBridge.java b/java/com/google/aggregate/privacy/budgeting/bridge/FakePrivacyBudgetingServiceBridge.java index 880f6c51..765079cf 100644 --- a/java/com/google/aggregate/privacy/budgeting/bridge/FakePrivacyBudgetingServiceBridge.java +++ b/java/com/google/aggregate/privacy/budgeting/bridge/FakePrivacyBudgetingServiceBridge.java @@ -64,14 +64,14 @@ public void setException(PrivacyBudgetingServiceBridgeException e) { @Override public ImmutableList consumePrivacyBudget( - ImmutableList budgetsToConsume, String attributionReportTo) + ImmutableList budgetsToConsume, String claimedIdentity) throws PrivacyBudgetingServiceBridgeException { if (exception != null) { throw exception; } lastBudgetsToConsumeSent = Optional.of(budgetsToConsume); - lastAttributionReportToSent = Optional.of(attributionReportTo); + lastAttributionReportToSent = Optional.of(claimedIdentity); ImmutableList insufficientPrivacyBudgetUnits = budgetsToConsume.stream() diff --git a/java/com/google/aggregate/privacy/budgeting/bridge/HttpPrivacyBudgetingServiceBridge.java b/java/com/google/aggregate/privacy/budgeting/bridge/HttpPrivacyBudgetingServiceBridge.java index d9da9ee4..db3abb12 100644 --- a/java/com/google/aggregate/privacy/budgeting/bridge/HttpPrivacyBudgetingServiceBridge.java +++ b/java/com/google/aggregate/privacy/budgeting/bridge/HttpPrivacyBudgetingServiceBridge.java @@ -21,9 +21,14 @@ import com.google.common.collect.ImmutableList; import com.google.scp.coordinator.privacy.budgeting.model.ConsumePrivacyBudgetRequest; import com.google.scp.coordinator.privacy.budgeting.model.ConsumePrivacyBudgetResponse; +import com.google.scp.coordinator.privacy.budgeting.model.ReportingOriginToPrivacyBudgetUnits; import com.google.scp.operator.cpio.distributedprivacybudgetclient.DistributedPrivacyBudgetClient; import com.google.scp.operator.cpio.distributedprivacybudgetclient.DistributedPrivacyBudgetClient.DistributedPrivacyBudgetClientException; import com.google.scp.operator.cpio.distributedprivacybudgetclient.DistributedPrivacyBudgetClient.DistributedPrivacyBudgetServiceException; +import java.util.LinkedHashMap; +import java.util.LinkedHashSet; +import java.util.Map; +import java.util.Set; import javax.inject.Inject; /** HTTP privacy budgeting bridge which consumes privacy budget from an external HTTP service. */ @@ -41,23 +46,34 @@ public HttpPrivacyBudgetingServiceBridge( @Override public ImmutableList consumePrivacyBudget( - ImmutableList budgetsToConsume, String attributionReportTo) + ImmutableList budgetsToConsume, String claimedIdentity) throws PrivacyBudgetingServiceBridgeException { + Map> + originToApiBudgetUnits = new LinkedHashMap<>(); + for (PrivacyBudgetUnit workerBudgetUnit : budgetsToConsume) { + updateOriginToApiBudgetUnitsMap(workerBudgetUnit, originToApiBudgetUnits); + } + ImmutableList reportingOriginToPrivacyBudgetUnits = + originToApiBudgetUnits.entrySet().stream() + .map( + entry -> + ReportingOriginToPrivacyBudgetUnits.builder() + .setReportingOrigin(entry.getKey()) + .setPrivacyBudgetUnits(ImmutableList.copyOf(entry.getValue())) + .build()) + .collect(toImmutableList()); ConsumePrivacyBudgetRequest consumePrivacyBudgetRequest = ConsumePrivacyBudgetRequest.builder() - .attributionReportTo(attributionReportTo) - .privacyBudgetUnits( - budgetsToConsume.stream() - .map(HttpPrivacyBudgetingServiceBridge::scpBudgetUnit) - .collect(toImmutableList())) + .reportingOriginToPrivacyBudgetUnitsList(reportingOriginToPrivacyBudgetUnits) + .claimedIdentity(claimedIdentity) .privacyBudgetLimit(DEFAULT_PRIVACY_BUDGET_LIMIT) .build(); try { ConsumePrivacyBudgetResponse budgetResponse = distributedPrivacyBudgetClient.consumePrivacyBudget(consumePrivacyBudgetRequest); - return budgetResponse.exhaustedPrivacyBudgetUnits().stream() - .map(HttpPrivacyBudgetingServiceBridge::workerBudgetUnit) + return budgetResponse.exhaustedPrivacyBudgetUnitsByOrigin().stream() + .flatMap(budgetUnitsByOrigin -> buildWorkerBudgetUnits(budgetUnitsByOrigin).stream()) .collect(toImmutableList()); } catch (DistributedPrivacyBudgetServiceException e) { throw new PrivacyBudgetingServiceBridgeException(e.getStatusCode(), e); @@ -66,18 +82,35 @@ public ImmutableList consumePrivacyBudget( } } - /** Converts worker's privacy budget unit ID to coordinator's representation */ - private static com.google.scp.coordinator.privacy.budgeting.model.PrivacyBudgetUnit scpBudgetUnit( - PrivacyBudgetUnit budgetUnit) { - return com.google.scp.coordinator.privacy.budgeting.model.PrivacyBudgetUnit.builder() - .privacyBudgetKey(budgetUnit.privacyBudgetKey()) - .reportingWindow(budgetUnit.scheduledReportTime()) - .build(); + private void updateOriginToApiBudgetUnitsMap( + PrivacyBudgetUnit workerBudgetUnit, + Map> + originToApiBudgetUnits) { + String reportingOrigin = workerBudgetUnit.reportingOrigin(); + // The ordering does not matter from code logic point of view. It simply makes it easier to + // assert on during unit tests. + Set apiBudgetUnits = + originToApiBudgetUnits.getOrDefault(reportingOrigin, new LinkedHashSet<>()); + com.google.scp.coordinator.privacy.budgeting.model.PrivacyBudgetUnit apiBudgetUnit = + com.google.scp.coordinator.privacy.budgeting.model.PrivacyBudgetUnit.builder() + .privacyBudgetKey(workerBudgetUnit.privacyBudgetKey()) + .reportingWindow(workerBudgetUnit.scheduledReportTime()) + .build(); + apiBudgetUnits.add(apiBudgetUnit); + originToApiBudgetUnits.put(reportingOrigin, apiBudgetUnits); } /** Converts coordinator's privacy budget unit ID to worker's representation */ - private static PrivacyBudgetUnit workerBudgetUnit( - com.google.scp.coordinator.privacy.budgeting.model.PrivacyBudgetUnit budgetUnit) { - return PrivacyBudgetUnit.create(budgetUnit.privacyBudgetKey(), budgetUnit.reportingWindow()); + private static ImmutableList buildWorkerBudgetUnits( + ReportingOriginToPrivacyBudgetUnits reportingOriginToPrivacyBudgetUnits) { + String reportingOrigin = reportingOriginToPrivacyBudgetUnits.reportingOrigin(); + return reportingOriginToPrivacyBudgetUnits.privacyBudgetUnits().stream() + .map( + apiBudgetUnit -> + PrivacyBudgetUnit.create( + apiBudgetUnit.privacyBudgetKey(), + apiBudgetUnit.reportingWindow(), + reportingOrigin)) + .collect(toImmutableList()); } } diff --git a/java/com/google/aggregate/privacy/budgeting/bridge/PrivacyBudgetingServiceBridge.java b/java/com/google/aggregate/privacy/budgeting/bridge/PrivacyBudgetingServiceBridge.java index dd5c0953..a08b71dc 100644 --- a/java/com/google/aggregate/privacy/budgeting/bridge/PrivacyBudgetingServiceBridge.java +++ b/java/com/google/aggregate/privacy/budgeting/bridge/PrivacyBudgetingServiceBridge.java @@ -31,26 +31,31 @@ public interface PrivacyBudgetingServiceBridge { * budgets are consumed and the first few units for which the budget was not available are * returned. * + * @param budgetsToConsume - List of PrivacyBudgetUnits to consume budget against. + * @param claimedIdentity - Adtech site value to be used for authorization. * @return Empty list if budgets were consumed successfully. Otherwise, first few privacy budget * units for which the privacy budget was not available. */ ImmutableList consumePrivacyBudget( - ImmutableList budgetsToConsume, String attributionReportTo) + ImmutableList budgetsToConsume, String claimedIdentity) throws PrivacyBudgetingServiceBridgeException; /** Identifier for an individual key of the privacy budget to be consumed. */ @AutoValue abstract class PrivacyBudgetUnit { - public static PrivacyBudgetUnit create(String privacyBudgetKey, Instant scheduledReportTime) { + public static PrivacyBudgetUnit create( + String privacyBudgetKey, Instant scheduledReportTime, String reportingOrigin) { return new com.google.aggregate.privacy.budgeting.bridge .AutoValue_PrivacyBudgetingServiceBridge_PrivacyBudgetUnit( - privacyBudgetKey, scheduledReportTime); + privacyBudgetKey, scheduledReportTime, reportingOrigin); } public abstract String privacyBudgetKey(); public abstract Instant scheduledReportTime(); + + public abstract String reportingOrigin(); } /** Exception that may happen when consuming the privacy budget. */ diff --git a/java/com/google/aggregate/privacy/budgeting/bridge/UnlimitedPrivacyBudgetingServiceBridge.java b/java/com/google/aggregate/privacy/budgeting/bridge/UnlimitedPrivacyBudgetingServiceBridge.java index 6b11c9b1..262b56db 100644 --- a/java/com/google/aggregate/privacy/budgeting/bridge/UnlimitedPrivacyBudgetingServiceBridge.java +++ b/java/com/google/aggregate/privacy/budgeting/bridge/UnlimitedPrivacyBudgetingServiceBridge.java @@ -28,7 +28,8 @@ public final class UnlimitedPrivacyBudgetingServiceBridge implements PrivacyBudg @Override public ImmutableList consumePrivacyBudget( - ImmutableList budgetsToConsume, String attributionReportTo) { + ImmutableList originToPrivacyBudgetUnits, String claimedIdentity) + throws PrivacyBudgetingServiceBridgeException { return ImmutableList.of(); } } diff --git a/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/BUILD b/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/BUILD index f20b4c1e..527be7c6 100644 --- a/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/BUILD +++ b/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/BUILD @@ -20,6 +20,7 @@ java_library( name = "privacy_budget_key_generator_module", srcs = [ "PrivacyBudgetKeyGenerator.java", + "PrivacyBudgetKeyGeneratorUtil.java", "VersionedPrivacyBudgetKeyGeneratorProvider.java", ], javacopts = ["-Xep:Var"], @@ -45,6 +46,7 @@ java_library( ":privacy_budget_key_generator_module", "//java/com/google/aggregate/adtech/worker/model", "//java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/attributionreporting:pbk_generator", + "//java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/attributionreportingdebug:pbk_generator", "//java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/protectedaudience:pbk_generator", "//java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/sharedstorage:pbk_generator", "//java/external:autovalue", diff --git a/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/PrivacyBudgetKeyGenerator.java b/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/PrivacyBudgetKeyGenerator.java index c5710f17..e5ccd9ad 100644 --- a/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/PrivacyBudgetKeyGenerator.java +++ b/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/PrivacyBudgetKeyGenerator.java @@ -18,6 +18,7 @@ import com.google.aggregate.adtech.worker.model.SharedInfo; import com.google.auto.value.AutoValue; +import com.google.common.primitives.UnsignedLong; import java.util.Optional; /** PrivacyBudgetKeyGenerator is used to generate privacy budget key for Reports */ @@ -26,21 +27,6 @@ public interface PrivacyBudgetKeyGenerator { String generatePrivacyBudgetKey(PrivacyBudgetKeyInput privacyBudgetKeyInput); - /** - * Generates Privacy Budget Key for the report. - * - * @deprecated This method is deprecated in favor of - * generatePrivacyBudgetKey(PrivacyBudgetKeyInput). - *

TODO(b/292494729): Remove this method with Privacy Budget Labels implementation. - * @param sharedInfo - */ - @Deprecated(forRemoval = true) - default Optional generatePrivacyBudgetKey(SharedInfo sharedInfo) { - return Optional.of( - generatePrivacyBudgetKey( - PrivacyBudgetKeyInput.builder().setSharedInfo(sharedInfo).build())); - } - /** An input object containing values for generating Privacy Budget Key. */ @AutoValue abstract class PrivacyBudgetKeyInput { @@ -51,7 +37,7 @@ public static Builder builder() { public abstract SharedInfo sharedInfo(); /** Queried filteringId to be included in the budget key calculation for reports > V1.0. */ - public abstract Optional filteringId(); + public abstract Optional filteringId(); @AutoValue.Builder public abstract static class Builder { @@ -59,7 +45,7 @@ public abstract static class Builder { public abstract Builder setSharedInfo(SharedInfo sharedInfo); - public abstract Builder setFilteringId(Integer filteringId); + public abstract Builder setFilteringId(UnsignedLong filteringId); } } } diff --git a/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/PrivacyBudgetKeyGeneratorFactory.java b/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/PrivacyBudgetKeyGeneratorFactory.java index b5f7be6d..dc38c39f 100644 --- a/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/PrivacyBudgetKeyGeneratorFactory.java +++ b/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/PrivacyBudgetKeyGeneratorFactory.java @@ -16,13 +16,9 @@ package com.google.aggregate.privacy.budgeting.budgetkeygenerator; -import static com.google.aggregate.adtech.worker.model.SharedInfo.ATTRIBUTION_REPORTING_API; -import static com.google.aggregate.adtech.worker.model.SharedInfo.PROTECTED_AUDIENCE_API; -import static com.google.aggregate.adtech.worker.model.SharedInfo.SHARED_STORAGE_API; - import com.google.aggregate.adtech.worker.model.SharedInfo; +import com.google.aggregate.privacy.budgeting.budgetkeygenerator.PrivacyBudgetKeyGenerator.PrivacyBudgetKeyInput; import com.google.common.collect.ImmutableMap; -import java.util.HashMap; import java.util.Optional; import javax.inject.Inject; @@ -30,54 +26,18 @@ * Factory class to supply PrivacyBudgetKeyGenerator implementation based on SharedInfo API type. */ public final class PrivacyBudgetKeyGeneratorFactory { - private static HashMap privacyBudgetKeyGeneratorMap = - new HashMap<>(); private final ImmutableMap versionedPrivacyBudgetKeyGeneratorMap; - /** - * Returns PrivacyBudgetKeyGenerator instance corresponding the API type. If api field is not - * present in report then report is of API type ATTRIBUTION_REPORTING_API. - * - * @deprecated This method is deprecated in favor of the instance method - * getPrivacyBudgetKeyGenerator(sharedInfo). - *

TODO(b/292494729): Remove this method with Privacy Budget Labels implementation. - */ - @Deprecated(forRemoval = true) - public static Optional getPrivacyBudgetKeyGenerator( - Optional api) { - Optional privacyBudgetKeyGenerator = Optional.empty(); - if (api.get().equals(ATTRIBUTION_REPORTING_API)) { - privacyBudgetKeyGeneratorMap.putIfAbsent( - ATTRIBUTION_REPORTING_API, - new com.google.aggregate.privacy.budgeting.budgetkeygenerator.attributionreporting - .V1PrivacyBudgetKeyGenerator()); - privacyBudgetKeyGenerator = - Optional.of(privacyBudgetKeyGeneratorMap.get(ATTRIBUTION_REPORTING_API)); - } else if (api.get().equals(PROTECTED_AUDIENCE_API)) { - privacyBudgetKeyGeneratorMap.putIfAbsent( - PROTECTED_AUDIENCE_API, - new com.google.aggregate.privacy.budgeting.budgetkeygenerator.protectedaudience - .V1PrivacyBudgetKeyGenerator()); - privacyBudgetKeyGenerator = - Optional.of(privacyBudgetKeyGeneratorMap.get(PROTECTED_AUDIENCE_API)); - } else if (api.get().equals(SHARED_STORAGE_API)) { - privacyBudgetKeyGeneratorMap.putIfAbsent( - SHARED_STORAGE_API, - new com.google.aggregate.privacy.budgeting.budgetkeygenerator.sharedstorage - .V1PrivacyBudgetKeyGenerator()); - privacyBudgetKeyGenerator = Optional.of(privacyBudgetKeyGeneratorMap.get(SHARED_STORAGE_API)); - } - return privacyBudgetKeyGenerator; - } - - /** Returns PrivacyBudgetKeyGenerator instance corresponding the report's SharedInfo. */ - public Optional getPrivacyBudgetKeyGenerator(SharedInfo sharedInfo) { + /** Returns PrivacyBudgetKeyGenerator instance corresponding to the privacyBudgetKeyInput. */ + public Optional getPrivacyBudgetKeyGenerator( + PrivacyBudgetKeyInput privacyBudgetKeyInput) { + SharedInfo sharedInfo = privacyBudgetKeyInput.sharedInfo(); Optional provider = Optional.ofNullable(versionedPrivacyBudgetKeyGeneratorMap.get(sharedInfo.api().get())); if (provider.isPresent()) { - return provider.get().getPrivacyBudgetKeyGenerator(sharedInfo.version()); + return provider.get().getPrivacyBudgetKeyGenerator(privacyBudgetKeyInput); } throw new IllegalArgumentException( diff --git a/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/PrivacyBudgetKeyGeneratorModule.java b/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/PrivacyBudgetKeyGeneratorModule.java index eed42fd7..c4209ac7 100644 --- a/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/PrivacyBudgetKeyGeneratorModule.java +++ b/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/PrivacyBudgetKeyGeneratorModule.java @@ -17,9 +17,11 @@ package com.google.aggregate.privacy.budgeting.budgetkeygenerator; import static com.google.aggregate.adtech.worker.model.SharedInfo.ATTRIBUTION_REPORTING_API; +import static com.google.aggregate.adtech.worker.model.SharedInfo.ATTRIBUTION_REPORTING_DEBUG_API; import static com.google.aggregate.adtech.worker.model.SharedInfo.PROTECTED_AUDIENCE_API; import static com.google.aggregate.adtech.worker.model.SharedInfo.SHARED_STORAGE_API; import static com.google.aggregate.privacy.budgeting.budgetkeygenerator.attributionreporting.PrivacyBudgetKeyGeneratorModule.AttributionReportingPrivacyBudgetKeyGenerators; +import static com.google.aggregate.privacy.budgeting.budgetkeygenerator.attributionreportingdebug.PrivacyBudgetKeyGeneratorModule.AttributionReportingDebugPrivacyBudgetKeyGenerators; import static com.google.aggregate.privacy.budgeting.budgetkeygenerator.protectedaudience.PrivacyBudgetKeyGeneratorModule.ProtectedAudiencePrivacyBudgetKeyGenerators; import static com.google.aggregate.privacy.budgeting.budgetkeygenerator.sharedstorage.PrivacyBudgetKeyGeneratorModule.SharedStoragePrivacyBudgetKeyGenerators; @@ -35,6 +37,9 @@ protected void configure() { install( new com.google.aggregate.privacy.budgeting.budgetkeygenerator.attributionreporting .PrivacyBudgetKeyGeneratorModule()); + install( + new com.google.aggregate.privacy.budgeting.budgetkeygenerator.attributionreportingdebug + .PrivacyBudgetKeyGeneratorModule()); install( new com.google.aggregate.privacy.budgeting.budgetkeygenerator.protectedaudience .PrivacyBudgetKeyGeneratorModule()); @@ -49,6 +54,9 @@ protected void configure() { @AttributionReportingPrivacyBudgetKeyGenerators VersionedPrivacyBudgetKeyGeneratorProvider attributionReportingPrivacyBudgetKeyGenerators, + @AttributionReportingDebugPrivacyBudgetKeyGenerators + VersionedPrivacyBudgetKeyGeneratorProvider + attributionReportingDebugPrivacyBudgetKeyGenerators, @ProtectedAudiencePrivacyBudgetKeyGenerators VersionedPrivacyBudgetKeyGeneratorProvider protectedAudiencePrivacyBudgetKeyGenerators, @@ -57,6 +65,8 @@ protected void configure() { return ImmutableMap.of( ATTRIBUTION_REPORTING_API, attributionReportingPrivacyBudgetKeyGenerators, + ATTRIBUTION_REPORTING_DEBUG_API, + attributionReportingDebugPrivacyBudgetKeyGenerators, PROTECTED_AUDIENCE_API, protectedAudiencePrivacyBudgetKeyGenerators, SHARED_STORAGE_API, diff --git a/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/PrivacyBudgetKeyGeneratorUtil.java b/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/PrivacyBudgetKeyGeneratorUtil.java new file mode 100644 index 00000000..9b5d2adf --- /dev/null +++ b/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/PrivacyBudgetKeyGeneratorUtil.java @@ -0,0 +1,66 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.aggregate.privacy.budgeting.budgetkeygenerator; + +import com.google.aggregate.adtech.worker.model.Version; +import com.google.aggregate.privacy.budgeting.budgetkeygenerator.PrivacyBudgetKeyGenerator.PrivacyBudgetKeyInput; +import com.google.common.primitives.UnsignedLong; +import java.util.Optional; +import java.util.function.Predicate; + +/** Static utilities related to PrivacyBudgetKeyGenerators. */ +public final class PrivacyBudgetKeyGeneratorUtil { + + /** Returns the predicate of PrivacyBudgetKeyInput for V1 version of PrivacyBudgetKeyGenerator. */ + public static Predicate + getPrivacyBudgetKeyGeneratorV1Predicate() { + Version version1_0 = + Version.create( + /** majorVersion = */ 1, + /** minorVersion = */ 0); + Predicate versionsLessThan1_0 = + Version.getBetweenVersionPredicate( + /* lowerInclusiveVersion= */ Version.create(/* major= */ 0, /* minor= */ 1), + /* higherExclusiveVersion= */ version1_0); + Predicate> filteringIdIsZero = + filteringId -> filteringId.isEmpty() || filteringId.get().equals(UnsignedLong.ZERO); + Predicate versionV1Predicate = + pbkInput -> + versionsLessThan1_0.test(Version.parse(pbkInput.sharedInfo().version())) + && filteringIdIsZero.test(pbkInput.filteringId()); + return versionV1Predicate; + } + + /** Returns the predicate of PrivacyBudgetKeyInput for V2 version of PrivacyBudgetKeyGenerator. */ + public static Predicate + getPrivacyBudgetKeyGeneratorV2Predicate() { + Version version1_0 = + Version.create( + /** majorVersion = */ 1, + /** minorVersion = */ 0); + Predicate versionsGreaterThanOrEqualTo1_0 = + Version.getGreaterThanOrEqualToVersionPredicate(version1_0); + Predicate> filteringIdIsNotZero = + filteringId -> filteringId.isPresent() && !filteringId.get().equals(UnsignedLong.ZERO); + + Predicate versionV2Predicate = + pbkInput -> + versionsGreaterThanOrEqualTo1_0.test(Version.parse(pbkInput.sharedInfo().version())) + || filteringIdIsNotZero.test(pbkInput.filteringId()); + return versionV2Predicate; + } +} diff --git a/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/VersionedPrivacyBudgetKeyGeneratorProvider.java b/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/VersionedPrivacyBudgetKeyGeneratorProvider.java index 8558923f..d7d1b2ad 100644 --- a/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/VersionedPrivacyBudgetKeyGeneratorProvider.java +++ b/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/VersionedPrivacyBudgetKeyGeneratorProvider.java @@ -16,7 +16,7 @@ package com.google.aggregate.privacy.budgeting.budgetkeygenerator; -import com.google.aggregate.adtech.worker.model.Version; +import com.google.aggregate.privacy.budgeting.budgetkeygenerator.PrivacyBudgetKeyGenerator.PrivacyBudgetKeyInput; import com.google.auto.value.AutoValue; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; @@ -35,10 +35,10 @@ public static Builder builder() { } /** Returns the PrivacyBudgetKeyGenerator for the given version of the report. */ - protected Optional getPrivacyBudgetKeyGenerator(String reportVersion) { - Version versionObj = getReportVersion(reportVersion); + protected Optional getPrivacyBudgetKeyGenerator( + PrivacyBudgetKeyInput privacyBudgetKeyInput) { return versionedPrivacyBudgetKeyGeneratorList().stream() - .filter(privacyGenerator -> privacyGenerator.isMappedVersion(versionObj)) + .filter(privacyGenerator -> privacyGenerator.isMappedVersion(privacyBudgetKeyInput)) .map(privacyGenerator -> privacyGenerator.privacyBudgetKeyGenerator()) .findAny(); } @@ -48,25 +48,16 @@ protected Optional getPrivacyBudgetKeyGenerator(Strin * testing. */ @VisibleForTesting - boolean doesExactlyOneCorrespondingPBKGeneratorExist(String reportVersion) { - Version versionObj = getReportVersion(reportVersion); + boolean doesExactlyOneCorrespondingPBKGeneratorExist( + PrivacyBudgetKeyInput privacyBudgetKeyInput) { ImmutableList privacyBudgetKeyGeneratorsList = versionedPrivacyBudgetKeyGeneratorList().stream() - .filter(privacyGenerator -> privacyGenerator.isMappedVersion(versionObj)) + .filter(privacyGenerator -> privacyGenerator.isMappedVersion(privacyBudgetKeyInput)) .map(privacyGenerator -> privacyGenerator.privacyBudgetKeyGenerator()) .collect(ImmutableList.toImmutableList()); return privacyBudgetKeyGeneratorsList.size() == 1; } - private static Version getReportVersion(String reportVersion) { - try { - return Version.parse(reportVersion); - } catch (IllegalArgumentException iae) { - // Impossible since the validation layer verifies the format. - throw new AssertionError("Invalid report version format.", iae); - } - } - @AutoValue.Builder public abstract static class Builder { @@ -74,7 +65,8 @@ public abstract static class Builder { versionedPrivacyBudgetKeyGeneratorListBuilder(); public Builder add( - Predicate versionPredicate, PrivacyBudgetKeyGenerator privacyBudgetKeyGenerator) { + Predicate versionPredicate, + PrivacyBudgetKeyGenerator privacyBudgetKeyGenerator) { versionedPrivacyBudgetKeyGeneratorListBuilder() .add( VersionedPrivacyBudgetKeyGenerator.create( @@ -88,23 +80,24 @@ public Builder add( /** Maps the version with PrivacyBudgetKeyGenerator. */ @AutoValue abstract static class VersionedPrivacyBudgetKeyGenerator { - abstract Predicate versionPredicate(); + abstract Predicate privacyBudgetKeyInputPredicate(); abstract PrivacyBudgetKeyGenerator privacyBudgetKeyGenerator(); static VersionedPrivacyBudgetKeyGenerator create( - Predicate versionPredicate, PrivacyBudgetKeyGenerator privacyBudgetKeyGenerator) { + Predicate privacyBudgetKeyInputPredicate, + PrivacyBudgetKeyGenerator privacyBudgetKeyGenerator) { return new AutoValue_VersionedPrivacyBudgetKeyGeneratorProvider_VersionedPrivacyBudgetKeyGenerator( - versionPredicate, privacyBudgetKeyGenerator); + privacyBudgetKeyInputPredicate, privacyBudgetKeyGenerator); } /** * Checks if the given version corresponds to the privacy budget calculation. * - * @param version report version. + * @param privacyBudgetKeyInput input needed for privacy budget generation. */ - boolean isMappedVersion(Version version) { - return versionPredicate().test(version); + boolean isMappedVersion(PrivacyBudgetKeyInput privacyBudgetKeyInput) { + return privacyBudgetKeyInputPredicate().test(privacyBudgetKeyInput); } } } diff --git a/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/attributionreporting/PrivacyBudgetKeyGeneratorModule.java b/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/attributionreporting/PrivacyBudgetKeyGeneratorModule.java index f7a27c99..405e7742 100644 --- a/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/attributionreporting/PrivacyBudgetKeyGeneratorModule.java +++ b/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/attributionreporting/PrivacyBudgetKeyGeneratorModule.java @@ -16,7 +16,8 @@ package com.google.aggregate.privacy.budgeting.budgetkeygenerator.attributionreporting; -import com.google.aggregate.adtech.worker.model.Version; +import com.google.aggregate.privacy.budgeting.budgetkeygenerator.PrivacyBudgetKeyGenerator.PrivacyBudgetKeyInput; +import com.google.aggregate.privacy.budgeting.budgetkeygenerator.PrivacyBudgetKeyGeneratorUtil; import com.google.aggregate.privacy.budgeting.budgetkeygenerator.VersionedPrivacyBudgetKeyGeneratorProvider; import com.google.inject.AbstractModule; import com.google.inject.Provides; @@ -31,20 +32,15 @@ public class PrivacyBudgetKeyGeneratorModule extends AbstractModule { @Provides @AttributionReportingPrivacyBudgetKeyGenerators VersionedPrivacyBudgetKeyGeneratorProvider providePrivacyBudgetKeyGenerators() { - Version version1_0 = - Version.create( - /** majorVersion = */ - 1, - /** minorVersion = */ - 0); - Predicate versionsLessThan1_0 = version -> version.compareTo(version1_0) < 0; - Predicate versionsGreaterThanOrEqualTo1_0 = - version -> version.compareTo(version1_0) >= 0; + Predicate versionV1Predicate = + PrivacyBudgetKeyGeneratorUtil.getPrivacyBudgetKeyGeneratorV1Predicate(); + Predicate versionV2Predicate = + PrivacyBudgetKeyGeneratorUtil.getPrivacyBudgetKeyGeneratorV2Predicate(); // A version should map to only one Privacy Budget Key generator. return VersionedPrivacyBudgetKeyGeneratorProvider.builder() - .add(versionsLessThan1_0, new V1PrivacyBudgetKeyGenerator()) - .add(versionsGreaterThanOrEqualTo1_0, new V2PrivacyBudgetKeyGenerator()) + .add(versionV1Predicate, new V1PrivacyBudgetKeyGenerator()) + .add(versionV2Predicate, new V2PrivacyBudgetKeyGenerator()) .build(); } diff --git a/java/com/google/aggregate/adtech/worker/encryption/publickeyuri/BUILD b/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/attributionreportingdebug/BUILD similarity index 52% rename from java/com/google/aggregate/adtech/worker/encryption/publickeyuri/BUILD rename to java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/attributionreportingdebug/BUILD index 11ac4bb3..12ff6eed 100644 --- a/java/com/google/aggregate/adtech/worker/encryption/publickeyuri/BUILD +++ b/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/attributionreportingdebug/BUILD @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,18 +14,20 @@ load("@rules_java//java:defs.bzl", "java_library") -package(default_visibility = ["//visibility:public"]) +package(default_visibility = [ + "//java/com/google/aggregate/privacy/budgeting:__subpackages__", + "//javatests/com/google/aggregate/privacy/budgeting/budgetkeygenerator:__subpackages__", +]) java_library( - name = "encryption_key_config", - srcs = [ - "CloudEncryptionKeyConfig.java", - "EncryptionKeyConfigFactory.java", - ], + name = "pbk_generator", + srcs = glob(["*.java"]), javacopts = ["-Xep:Var"], deps = [ - "//java/external:autovalue", - "//java/external:autovalue_annotations", - "//protocol/proto:encryption_key_config_java_proto", + "//java/com/google/aggregate/adtech/worker/model", + "//java/com/google/aggregate/privacy/budgeting/budgetkeygenerator:privacy_budget_key_generator_module", + "//java/external:guava", + "//java/external:guice", + "//java/external:javax_inject", ], ) diff --git a/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/attributionreportingdebug/PrivacyBudgetKeyGeneratorModule.java b/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/attributionreportingdebug/PrivacyBudgetKeyGeneratorModule.java new file mode 100644 index 00000000..b413336a --- /dev/null +++ b/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/attributionreportingdebug/PrivacyBudgetKeyGeneratorModule.java @@ -0,0 +1,50 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.aggregate.privacy.budgeting.budgetkeygenerator.attributionreportingdebug; + +import com.google.aggregate.privacy.budgeting.budgetkeygenerator.PrivacyBudgetKeyGenerator.PrivacyBudgetKeyInput; +import com.google.aggregate.privacy.budgeting.budgetkeygenerator.PrivacyBudgetKeyGeneratorUtil; +import com.google.aggregate.privacy.budgeting.budgetkeygenerator.VersionedPrivacyBudgetKeyGeneratorProvider; +import com.google.inject.AbstractModule; +import com.google.inject.Provides; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.util.function.Predicate; +import javax.inject.Qualifier; + +/** Provides Attribution Reporting Debug PrivacyBudgetKeyGenerators mapped to versions. */ +public class PrivacyBudgetKeyGeneratorModule extends AbstractModule { + + @Provides + @AttributionReportingDebugPrivacyBudgetKeyGenerators + VersionedPrivacyBudgetKeyGeneratorProvider providePrivacyBudgetKeyGenerators() { + Predicate versionV1Predicate = + PrivacyBudgetKeyGeneratorUtil.getPrivacyBudgetKeyGeneratorV1Predicate(); + Predicate versionV2Predicate = + PrivacyBudgetKeyGeneratorUtil.getPrivacyBudgetKeyGeneratorV2Predicate(); + + // A version should map to only one Privacy Budget Key generator. + return VersionedPrivacyBudgetKeyGeneratorProvider.builder() + .add(versionV1Predicate, new V1PrivacyBudgetKeyGenerator()) + .add(versionV2Predicate, new V2PrivacyBudgetKeyGenerator()) + .build(); + } + + @Qualifier + @Retention(RetentionPolicy.RUNTIME) + public @interface AttributionReportingDebugPrivacyBudgetKeyGenerators {} +} diff --git a/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/attributionreportingdebug/V1PrivacyBudgetKeyGenerator.java b/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/attributionreportingdebug/V1PrivacyBudgetKeyGenerator.java new file mode 100644 index 00000000..73a1bb06 --- /dev/null +++ b/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/attributionreportingdebug/V1PrivacyBudgetKeyGenerator.java @@ -0,0 +1,57 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.aggregate.privacy.budgeting.budgetkeygenerator.attributionreportingdebug; + +import com.google.aggregate.adtech.worker.model.SharedInfo; +import com.google.aggregate.privacy.budgeting.budgetkeygenerator.PrivacyBudgetKeyGenerator; +import com.google.common.hash.Hashing; +import java.nio.charset.StandardCharsets; +import java.util.LinkedList; +import java.util.List; + +/** + * Generates V1 PrivacyBudgetKey. This version of Budget Key is internal to the service. It is + * mapped to the report versions in {@link PrivacyBudgetKeyGeneratorModule}. + * + *

TODO(b/321719045): Deprecate V1 version of Privacy Budget Key when the corresponding report + * versions are phased out. + */ +public class V1PrivacyBudgetKeyGenerator implements PrivacyBudgetKeyGenerator { + + @Override + public String generatePrivacyBudgetKey(PrivacyBudgetKeyInput privacyBudgetKeyInput) { + SharedInfo sharedInfo = privacyBudgetKeyInput.sharedInfo(); + List privacyBudgetKeyInputElements = new LinkedList<>(); + privacyBudgetKeyInputElements.add(sharedInfo.api().get()); + privacyBudgetKeyInputElements.add(sharedInfo.version()); + privacyBudgetKeyInputElements.add(sharedInfo.reportingOrigin()); + privacyBudgetKeyInputElements.add(sharedInfo.destination().get()); + // Debug reports may omit the source registration time. + sharedInfo + .sourceRegistrationTime() + .ifPresent(time -> privacyBudgetKeyInputElements.add(time.toString())); + + String privacyBudgetKeyHashInput = + String.join(PRIVACY_BUDGET_KEY_DELIMITER, privacyBudgetKeyInputElements); + + return Hashing.sha256() + .newHasher() + .putBytes(privacyBudgetKeyHashInput.getBytes(StandardCharsets.UTF_8)) + .hash() + .toString(); + } +} diff --git a/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/attributionreportingdebug/V2PrivacyBudgetKeyGenerator.java b/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/attributionreportingdebug/V2PrivacyBudgetKeyGenerator.java new file mode 100644 index 00000000..969b533d --- /dev/null +++ b/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/attributionreportingdebug/V2PrivacyBudgetKeyGenerator.java @@ -0,0 +1,56 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.aggregate.privacy.budgeting.budgetkeygenerator.attributionreportingdebug; + +import com.google.aggregate.adtech.worker.model.SharedInfo; +import com.google.aggregate.privacy.budgeting.budgetkeygenerator.PrivacyBudgetKeyGenerator; +import com.google.common.collect.ImmutableList; +import com.google.common.hash.Hashing; +import java.nio.charset.StandardCharsets; + +/** + * Generates V2 PrivacyBudgetKey. This version includes filteringId in addition to other fields in + * V1 in privacy budget key calculations. This version of Budget Key is internal to the service. It + * is mapped to the report versions in {@link PrivacyBudgetKeyGeneratorModule}. + */ +public class V2PrivacyBudgetKeyGenerator implements PrivacyBudgetKeyGenerator { + + @Override + public String generatePrivacyBudgetKey(PrivacyBudgetKeyInput privacyBudgetKeyInput) { + SharedInfo sharedInfo = privacyBudgetKeyInput.sharedInfo(); + ImmutableList.Builder privacyBudgetKeyInputElements = ImmutableList.builder(); + privacyBudgetKeyInputElements.add(sharedInfo.api().get()); + privacyBudgetKeyInputElements.add(sharedInfo.version()); + privacyBudgetKeyInputElements.add(sharedInfo.reportingOrigin()); + privacyBudgetKeyInputElements.add(sharedInfo.destination().get()); + // Debug reports may omit the source registration time. + sharedInfo + .sourceRegistrationTime() + .ifPresent(time -> privacyBudgetKeyInputElements.add(time.toString())); + // Filtering ID will always be present. + privacyBudgetKeyInputElements.add(String.valueOf(privacyBudgetKeyInput.filteringId().get())); + + String privacyBudgetKeyHashInput = + String.join(PRIVACY_BUDGET_KEY_DELIMITER, privacyBudgetKeyInputElements.build()); + + return Hashing.sha256() + .newHasher() + .putBytes(privacyBudgetKeyHashInput.getBytes(StandardCharsets.UTF_8)) + .hash() + .toString(); + } +} diff --git a/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/protectedaudience/PrivacyBudgetKeyGeneratorModule.java b/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/protectedaudience/PrivacyBudgetKeyGeneratorModule.java index 3684a183..dacf2e1c 100644 --- a/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/protectedaudience/PrivacyBudgetKeyGeneratorModule.java +++ b/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/protectedaudience/PrivacyBudgetKeyGeneratorModule.java @@ -16,7 +16,8 @@ package com.google.aggregate.privacy.budgeting.budgetkeygenerator.protectedaudience; -import com.google.aggregate.adtech.worker.model.Version; +import com.google.aggregate.privacy.budgeting.budgetkeygenerator.PrivacyBudgetKeyGenerator.PrivacyBudgetKeyInput; +import com.google.aggregate.privacy.budgeting.budgetkeygenerator.PrivacyBudgetKeyGeneratorUtil; import com.google.aggregate.privacy.budgeting.budgetkeygenerator.VersionedPrivacyBudgetKeyGeneratorProvider; import com.google.inject.AbstractModule; import com.google.inject.Provides; @@ -31,20 +32,15 @@ public class PrivacyBudgetKeyGeneratorModule extends AbstractModule { @Provides @ProtectedAudiencePrivacyBudgetKeyGenerators VersionedPrivacyBudgetKeyGeneratorProvider providePrivacyBudgetKeyGenerators() { - Version version1_0 = - Version.create( - /** majorVersion = */ - 1, - /** minorVersion = */ - 0); - Predicate versionsLessThan1_0 = version -> version.compareTo(version1_0) < 0; - Predicate versionsGreaterThanOrEqualTo1_0 = - version -> version.compareTo(version1_0) >= 0; + Predicate versionV1Predicate = + PrivacyBudgetKeyGeneratorUtil.getPrivacyBudgetKeyGeneratorV1Predicate(); + Predicate versionV2Predicate = + PrivacyBudgetKeyGeneratorUtil.getPrivacyBudgetKeyGeneratorV2Predicate(); // A version should map to only one Privacy Budget Key generator. return VersionedPrivacyBudgetKeyGeneratorProvider.builder() - .add(versionsLessThan1_0, new V1PrivacyBudgetKeyGenerator()) - .add(versionsGreaterThanOrEqualTo1_0, new V2PrivacyBudgetKeyGenerator()) + .add(versionV1Predicate, new V1PrivacyBudgetKeyGenerator()) + .add(versionV2Predicate, new V2PrivacyBudgetKeyGenerator()) .build(); } diff --git a/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/sharedstorage/PrivacyBudgetKeyGeneratorModule.java b/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/sharedstorage/PrivacyBudgetKeyGeneratorModule.java index 5559812b..4873bfda 100644 --- a/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/sharedstorage/PrivacyBudgetKeyGeneratorModule.java +++ b/java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/sharedstorage/PrivacyBudgetKeyGeneratorModule.java @@ -16,7 +16,8 @@ package com.google.aggregate.privacy.budgeting.budgetkeygenerator.sharedstorage; -import com.google.aggregate.adtech.worker.model.Version; +import com.google.aggregate.privacy.budgeting.budgetkeygenerator.PrivacyBudgetKeyGenerator.PrivacyBudgetKeyInput; +import com.google.aggregate.privacy.budgeting.budgetkeygenerator.PrivacyBudgetKeyGeneratorUtil; import com.google.aggregate.privacy.budgeting.budgetkeygenerator.VersionedPrivacyBudgetKeyGeneratorProvider; import com.google.inject.AbstractModule; import com.google.inject.Provides; @@ -31,20 +32,15 @@ public class PrivacyBudgetKeyGeneratorModule extends AbstractModule { @Provides @SharedStoragePrivacyBudgetKeyGenerators VersionedPrivacyBudgetKeyGeneratorProvider providePrivacyBudgetKeyGenerators() { - Version version1_0 = - Version.create( - /** majorVersion = */ - 1, - /** minorVersion = */ - 0); - Predicate versionsLessThan1_0 = version -> version.compareTo(version1_0) < 0; - Predicate versionsGreaterThanOrEqualTo1_0 = - version -> version.compareTo(version1_0) >= 0; + Predicate versionV1Predicate = + PrivacyBudgetKeyGeneratorUtil.getPrivacyBudgetKeyGeneratorV1Predicate(); + Predicate versionV2Predicate = + PrivacyBudgetKeyGeneratorUtil.getPrivacyBudgetKeyGeneratorV2Predicate(); // A version should map to only one Privacy Budget Key generator. return VersionedPrivacyBudgetKeyGeneratorProvider.builder() - .add(versionsLessThan1_0, new V1PrivacyBudgetKeyGenerator()) - .add(versionsGreaterThanOrEqualTo1_0, new V2PrivacyBudgetKeyGenerator()) + .add(versionV1Predicate, new V1PrivacyBudgetKeyGenerator()) + .add(versionV2Predicate, new V2PrivacyBudgetKeyGenerator()) .build(); } diff --git a/java/com/google/aggregate/privacy/noise/BUILD b/java/com/google/aggregate/privacy/noise/BUILD index 83028559..4478cf34 100644 --- a/java/com/google/aggregate/privacy/noise/BUILD +++ b/java/com/google/aggregate/privacy/noise/BUILD @@ -34,12 +34,14 @@ java_library( ], javacopts = ["-Xep:Var"], deps = [ + "//java/com/google/aggregate/adtech/worker:annotations", "//java/com/google/aggregate/adtech/worker/model", "//java/com/google/aggregate/privacy/noise/model", "//java/com/google/aggregate/privacy/noise/proto:privacy_parameters_java_proto", "//java/external:autovalue", "//java/external:autovalue_annotations", "//java/external:differential_privacy", + "//java/external:error_prone_anntotations", "//java/external:guava", "//java/external:guice", "//java/external:javax_inject", diff --git a/java/com/google/aggregate/privacy/noise/NoisedAggregationRunnerImpl.java b/java/com/google/aggregate/privacy/noise/NoisedAggregationRunnerImpl.java index cbfdd588..11bfe70b 100644 --- a/java/com/google/aggregate/privacy/noise/NoisedAggregationRunnerImpl.java +++ b/java/com/google/aggregate/privacy/noise/NoisedAggregationRunnerImpl.java @@ -18,6 +18,8 @@ import static com.google.common.collect.ImmutableList.toImmutableList; +import com.google.aggregate.adtech.worker.Annotations.CustomForkJoinThreadPool; +import com.google.aggregate.adtech.worker.Annotations.ParallelAggregatedFactNoising; import com.google.aggregate.adtech.worker.model.AggregatedFact; import com.google.aggregate.privacy.noise.Annotations.Threshold; import com.google.aggregate.privacy.noise.model.NoisedAggregationResult; @@ -25,30 +27,35 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Streams; import com.google.common.math.DoubleMath; +import com.google.common.util.concurrent.ListeningExecutorService; +import com.google.errorprone.annotations.Var; import com.google.inject.Inject; import java.util.Optional; +import java.util.concurrent.ExecutionException; import java.util.function.Supplier; -import java.util.stream.Stream; /** * Implementation of {@code NoisedAggregationRunner} that uses Google's differential privacy library * through {@code NoiseApplier} to apply noise. */ public final class NoisedAggregationRunnerImpl implements NoisedAggregationRunner { - private static final double TOLERANCE = 0.0001; private final Supplier noiseApplierSupplier; private final Supplier privacyParams; private final Supplier thresholdSupplier; + private final Optional noisingForkJoinPool; @Inject NoisedAggregationRunnerImpl( Supplier noiseApplierSupplier, Supplier privacyParams, - @Threshold Supplier thresholdSupplier) { + @Threshold Supplier thresholdSupplier, + @ParallelAggregatedFactNoising boolean parallelNoising, + @CustomForkJoinThreadPool ListeningExecutorService forkJoinPool) { this.noiseApplierSupplier = noiseApplierSupplier; this.privacyParams = privacyParams; this.thresholdSupplier = thresholdSupplier; + this.noisingForkJoinPool = parallelNoising ? Optional.of(forkJoinPool) : Optional.empty(); } @Override @@ -64,7 +71,7 @@ public NoisedAggregationResult threshold( Streams.stream(aggregatedFacts) .filter( aggregatedFactItem -> - (DoubleMath.fuzzyCompare(aggregatedFactItem.metric(), threshold, TOLERANCE) + (DoubleMath.fuzzyCompare(aggregatedFactItem.getMetric(), threshold, TOLERANCE) >= 0)) .collect(toImmutableList()); @@ -72,6 +79,11 @@ public NoisedAggregationResult threshold( requestScopedPrivacyParamsSupplier.get(), thresholdedFacts); } + /* + * Noises AggregatedFact#metric using Google's DP library. AggregatedFact#metric is interpreted as + * unnoised data and copied to the AggregatedFact#unnoisedMetric field, and the noised value is + * set in the AggregatedFact#metric field. + */ @Override public NoisedAggregationResult noise( Iterable aggregatedFact, Optional debugPrivacyEpsilon) { @@ -80,14 +92,30 @@ public NoisedAggregationResult noise( final Supplier requestScopedNoiseApplier = getScopedNoiseApplier(debugPrivacyEpsilon, requestScopedPrivacyParamsSupplier); - Stream noisedFacts = - Streams.stream(aggregatedFact) - .map((fact -> noiseSingleFact(fact, requestScopedNoiseApplier))); + @Var ImmutableList noisedFacts; + if (this.noisingForkJoinPool.isPresent()) { + try { + noisedFacts = + this.noisingForkJoinPool + .get() + .submit( + () -> + Streams.stream(aggregatedFact) + .parallel() + .map(fact -> noiseSingleFact(fact, requestScopedNoiseApplier)) + .collect(toImmutableList())) + .get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException("Exception while noising aggregated data."); + } + } else { + noisedFacts = + Streams.stream(aggregatedFact) + .map((fact -> noiseSingleFact(fact, requestScopedNoiseApplier))) + .collect(toImmutableList()); + } - ImmutableList noisedAndThresholdedFacts = - noisedFacts.collect(toImmutableList()); - return NoisedAggregationResult.create( - requestScopedPrivacyParamsSupplier.get(), noisedAndThresholdedFacts); + return NoisedAggregationResult.create(requestScopedPrivacyParamsSupplier.get(), noisedFacts); } private AggregatedFact noiseSingleFact( @@ -96,10 +124,10 @@ private AggregatedFact noiseSingleFact( } private AggregatedFact noiseSingleFact(AggregatedFact aggregatedFact, NoiseApplier noiseApplier) { - return AggregatedFact.create( - aggregatedFact.bucket(), - noiseApplier.noiseMetric(aggregatedFact.metric()), - aggregatedFact.metric()); + long unnoisedMetric = aggregatedFact.getMetric(); + aggregatedFact.setUnnoisedMetric(Optional.of(unnoisedMetric)); + aggregatedFact.setMetric(noiseApplier.noiseMetric(unnoisedMetric)); + return aggregatedFact; } private Supplier getScopedPrivacyParamSupplier( diff --git a/java/com/google/aggregate/privacy/noise/model/NoisedAggregationResult.java b/java/com/google/aggregate/privacy/noise/model/NoisedAggregationResult.java index 035c80d7..65761bab 100644 --- a/java/com/google/aggregate/privacy/noise/model/NoisedAggregationResult.java +++ b/java/com/google/aggregate/privacy/noise/model/NoisedAggregationResult.java @@ -58,9 +58,9 @@ public static NoisedAggregationResult addDebugAnnotations( .map( aggregatedFact -> AggregatedFact.create( - aggregatedFact.bucket(), - aggregatedFact.metric(), - aggregatedFact.unnoisedMetric().get(), + aggregatedFact.getBucket(), + aggregatedFact.getMetric(), + aggregatedFact.getUnnoisedMetric().get(), debugAnnotations)) .collect(toImmutableList())); } diff --git a/java/com/google/aggregate/protocol/avro/AvroResultsDeserializerRunner.java b/java/com/google/aggregate/protocol/avro/AvroResultsDeserializerRunner.java index bec216e3..0e8c987b 100644 --- a/java/com/google/aggregate/protocol/avro/AvroResultsDeserializerRunner.java +++ b/java/com/google/aggregate/protocol/avro/AvroResultsDeserializerRunner.java @@ -62,12 +62,12 @@ private void runResultDeserialization(AvroResultsDeserializerRunnerArgs args) th } private static String formatOutput(AggregatedFact fact, boolean outputAsHex) { - String bucketString = fact.bucket().toString(); + String bucketString = fact.getBucket().toString(); if (outputAsHex) { - byte[] bucketBytes = NumericConversions.toUnsignedByteArray(fact.bucket()); + byte[] bucketBytes = NumericConversions.toUnsignedByteArray(fact.getBucket()); bucketString = "0x" + BaseEncoding.base16().encode(bucketBytes); } - return bucketString + "," + fact.metric(); + return bucketString + "," + fact.getMetric(); } static final class AvroResultsDeserializerRunnerArgs { diff --git a/java/com/google/aggregate/tools/diff/DiffRunner.java b/java/com/google/aggregate/tools/diff/DiffRunner.java index d44eafc4..a217073c 100644 --- a/java/com/google/aggregate/tools/diff/DiffRunner.java +++ b/java/com/google/aggregate/tools/diff/DiffRunner.java @@ -140,7 +140,7 @@ private static ImmutableList runAndGetResult( ImmutableList resultAggregatedFacts = localAggregationWorker.waitForAggregation(); return resultAggregatedFacts.stream() - .map(fact -> AggregatedFact.create(fact.bucket(), fact.metric())) + .map(fact -> AggregatedFact.create(fact.getBucket(), fact.getMetric())) .collect(toImmutableList()); } catch (ResultLogException | TimeoutException e) { diff --git a/java/com/google/aggregate/tools/diff/ResultDiffer.java b/java/com/google/aggregate/tools/diff/ResultDiffer.java index 84c0214a..58a7b651 100644 --- a/java/com/google/aggregate/tools/diff/ResultDiffer.java +++ b/java/com/google/aggregate/tools/diff/ResultDiffer.java @@ -117,7 +117,7 @@ public static MapDifference diffResults( private static ImmutableMap keyAggregatedFacts( Stream facts) { - return facts.collect(toImmutableMap(AggregatedFact::bucket, Function.identity())); + return facts.collect(toImmutableMap(AggregatedFact::getBucket, Function.identity())); } private static final class Env extends AbstractModule {} diff --git a/java/com/google/aggregate/tools/privacybudgetutil/aws/AwsPrivacyBudgetUnitExtraction.java b/java/com/google/aggregate/tools/privacybudgetutil/aws/AwsPrivacyBudgetUnitExtraction.java new file mode 100644 index 00000000..0ea9e192 --- /dev/null +++ b/java/com/google/aggregate/tools/privacybudgetutil/aws/AwsPrivacyBudgetUnitExtraction.java @@ -0,0 +1,189 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.aggregate.tools.privacybudgetutil.aws; + +import com.google.aggregate.privacy.budgeting.budgetkeygenerator.PrivacyBudgetKeyGeneratorFactory; +import com.google.aggregate.privacy.budgeting.budgetkeygenerator.PrivacyBudgetKeyGeneratorModule; +import com.google.aggregate.tools.privacybudgetutil.aws.AwsPrivacyBudgetUnitExtractionModule.Client; +import com.google.aggregate.tools.privacybudgetutil.common.ExtractionUtils; +import com.google.aggregate.tools.privacybudgetutil.common.PrivacyBudgetUnitExtractionConfig; +import com.google.inject.AbstractModule; +import com.google.inject.Guice; +import com.google.inject.Inject; +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.time.format.DateTimeFormatter; +import java.util.stream.Collectors; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import software.amazon.awssdk.core.ResponseInputStream; +import software.amazon.awssdk.core.sync.RequestBody; +import software.amazon.awssdk.services.s3.S3Client; +import software.amazon.awssdk.services.s3.model.GetObjectRequest; +import software.amazon.awssdk.services.s3.model.GetObjectResponse; +import software.amazon.awssdk.services.s3.model.ListObjectsV2Request; +import software.amazon.awssdk.services.s3.model.ListObjectsV2Response; +import software.amazon.awssdk.services.s3.model.PutObjectRequest; +import software.amazon.awssdk.services.s3.model.S3Object; +import software.amazon.awssdk.services.s3.paginators.ListObjectsV2Iterable; + +/** Extracts budget keys from avros in AWS */ +final class AwsPrivacyBudgetUnitExtraction { + + private static final Logger LOGGER = + LoggerFactory.getLogger(AwsPrivacyBudgetUnitExtraction.class); + + private final PrivacyBudgetUnitExtractionConfig config; + private final S3Client client; + private final PrivacyBudgetKeyGeneratorFactory generatorFactory; + + @Inject + AwsPrivacyBudgetUnitExtraction( + PrivacyBudgetUnitExtractionConfig config, + PrivacyBudgetKeyGeneratorFactory generatorFactory, + @Client S3Client client) { + this.config = config; + this.client = client; + this.generatorFactory = generatorFactory; + } + + public static void main(String[] args) throws Exception { + PrivacyBudgetUnitExtractionConfig config = + new PrivacyBudgetUnitExtractionConfig( + PrivacyBudgetUnitExtractionConfig.CloudPlatform.AWS, args); + if (config.printHelp()) { + return; + } + Guice.createInjector( + new AwsPrivacyBudgetUnitExtractionModule(), + new PrivacyBudgetKeyGeneratorModule(), + new AbstractModule() { + @Override + protected void configure() { + bind(PrivacyBudgetUnitExtractionConfig.class).toInstance(config); + } + }) + .getInstance(AwsPrivacyBudgetUnitExtraction.class) + .run(); + } + + public void run() throws Exception { + + if (this.config.getFunction().equals("generate_keys")) { + ListObjectsV2Iterable response = listObjects(); + if (!this.config.getSingleFile()) { + for (ListObjectsV2Response page : response) { + page.contents().stream() + .filter(s -> s.key().endsWith(".avro")) + .map(this::accept) + .forEach(System.out::println); + } + } else { + for (ListObjectsV2Response page : response) { + String builder = + page.contents().stream() + .filter(s -> s.key().endsWith(".avro")) + .map(this::accept) + .filter(s -> s != null) + .map(ExtractionUtils.KeyFile::body) + .collect(Collectors.joining(",", "[", "]")); + LOGGER.info(builder); + } + } + } + + if (this.config.getFunction().equals("write_keys")) { + ListObjectsV2Iterable response = listObjects(); + if (!this.config.getSingleFile()) { + for (ListObjectsV2Response page : response) { + page.contents().stream() + .filter(s -> s.key().endsWith(".avro")) + .map(this::accept) + .forEach(this::writeKeyFile); + } + } else { + for (ListObjectsV2Response page : response) { + String builder = + page.contents().stream() + .filter(s -> s.key().endsWith(".avro")) + .map(this::accept) + .filter(s -> s != null) + .map(ExtractionUtils.KeyFile::body) + .collect(Collectors.joining(",", "[", "]")); + String filename = this.config.getOutputPrefix(); + ExtractionUtils.KeyFile keyFile = ExtractionUtils.KeyFile.create(filename, builder); + writeKeyFile(keyFile); + } + } + } + } + + private ListObjectsV2Iterable listObjects() { + ListObjectsV2Request request = + ListObjectsV2Request.builder() + .bucket(this.config.getBucket()) + .prefix(this.config.getInputPrefix()) + .build(); + + return client.listObjectsV2Paginator(request); + } + + private ExtractionUtils.KeyFile readBucketObject(String key) { + GetObjectRequest getObjectRequest = + GetObjectRequest.builder().bucket(this.config.getBucket()).key(key).build(); + try { + ResponseInputStream responseInputStream = + client.getObject(getObjectRequest); + + InputStream stream = new ByteArrayInputStream(responseInputStream.readAllBytes()); + ExtractionUtils.KeyFile keyfile = + ExtractionUtils.processAvro( + stream, this.generatorFactory, key, this.config.getFilteringIds()); + return keyfile; + } catch (IOException e) { + throw new AssertionError("Failed to get credentials", e); + } + } + + /** Writes keys to destination */ + private void writeKeyFile(ExtractionUtils.KeyFile keyFile) { + if (keyFile == null) { + return; + } + String filename = + String.format( + "%s/%s_%s.json", + this.config.getOutputPrefix(), + keyFile.key(), + java.time.LocalDateTime.now().format(DateTimeFormatter.ofPattern("MM_dd_yy"))); + LOGGER.info( + String.format( + "Writing shared_ids from %s/%s to %s", + this.config.getBucket(), this.config.getInputPrefix(), filename)); + if (keyFile != null) { + PutObjectRequest putOb = + PutObjectRequest.builder().bucket(this.config.getBucket()).key(filename).build(); + + client.putObject(putOb, RequestBody.fromString(keyFile.body())); + } + } + + private ExtractionUtils.KeyFile accept(S3Object object) { + return readBucketObject(object.key()); + } +} diff --git a/java/com/google/aggregate/tools/privacybudgetutil/aws/AwsPrivacyBudgetUnitExtractionModule.java b/java/com/google/aggregate/tools/privacybudgetutil/aws/AwsPrivacyBudgetUnitExtractionModule.java new file mode 100644 index 00000000..78da343a --- /dev/null +++ b/java/com/google/aggregate/tools/privacybudgetutil/aws/AwsPrivacyBudgetUnitExtractionModule.java @@ -0,0 +1,52 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.aggregate.tools.privacybudgetutil.aws; + +import static java.lang.annotation.ElementType.FIELD; +import static java.lang.annotation.ElementType.METHOD; +import static java.lang.annotation.ElementType.PARAMETER; +import static java.lang.annotation.RetentionPolicy.RUNTIME; + +import com.google.aggregate.tools.privacybudgetutil.common.PrivacyBudgetUnitExtractionConfig; +import com.google.inject.AbstractModule; +import com.google.inject.Provides; +import java.lang.annotation.Retention; +import java.lang.annotation.Target; +import javax.inject.Qualifier; +import software.amazon.awssdk.http.urlconnection.UrlConnectionHttpClient; +import software.amazon.awssdk.regions.Region; +import software.amazon.awssdk.services.s3.S3Client; + +/** Provides bindings for {@link AwsPrivacyBudgetUnitExtraction}. */ +final class AwsPrivacyBudgetUnitExtractionModule extends AbstractModule { + + @Provides + @Client + S3Client provideS3Client(PrivacyBudgetUnitExtractionConfig config) { + S3Client client = + S3Client.builder() + .httpClient(UrlConnectionHttpClient.builder().build()) + .region(Region.of(config.getRegion())) + .build(); + return client; + } + + @Qualifier + @Target({FIELD, PARAMETER, METHOD}) + @Retention(RUNTIME) + @interface Client {} +} diff --git a/java/com/google/aggregate/tools/privacybudgetutil/aws/BUILD b/java/com/google/aggregate/tools/privacybudgetutil/aws/BUILD new file mode 100644 index 00000000..a98ad4b5 --- /dev/null +++ b/java/com/google/aggregate/tools/privacybudgetutil/aws/BUILD @@ -0,0 +1,91 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("@rules_java//java:defs.bzl", "java_binary", "java_library") + +package(default_visibility = ["//visibility:public"]) + +java_library( + name = "lib", + srcs = glob(["*.java"]), + deps = [ + "//java/com/google/aggregate/adtech/worker/model", + "//java/com/google/aggregate/privacy/budgeting/bridge:privacy_budgeting_service_bridge", + "//java/com/google/aggregate/privacy/budgeting/budgetkeygenerator:privacy_budget_key_generator", + "//java/com/google/aggregate/protocol/avro:avro_record_reader", + "//java/com/google/aggregate/tools/privacybudgetutil/common:config", + "//java/com/google/aggregate/tools/privacybudgetutil/common:extraction_utils", + "//java/external:acai", + "//java/external:autovalue", + "//java/external:autovalue_annotations", + "//java/external:avro", + "//java/external:aws_auth", + "//java/external:aws_core", + "//java/external:aws_regions", + "//java/external:aws_s3", + "//java/external:aws_url_connection_client", + "//java/external:guava", + "//java/external:guice", + "//java/external:jackson_annotations", + "//java/external:jackson_core", + "//java/external:jackson_databind", + "//java/external:jackson_datatype_jsr310", + "//java/external:javax_inject", + "//java/external:jcommander", + "//java/external:jimfs", + "//java/external:slf4j", + "//java/external:slf4j_simple", + ], +) + +java_binary( + name = "AwsPrivacyBudgetUnitExtraction", + srcs = glob(["*.java"]), + main_class = "com.google.aggregate.tools.privacybudgetutil.aws.AwsPrivacyBudgetUnitExtraction", + deps = [ + ":lib", + "//java/com/google/aggregate/adtech/worker/model", + "//java/com/google/aggregate/privacy/budgeting/bridge:privacy_budgeting_service_bridge", + "//java/com/google/aggregate/privacy/budgeting/budgetkeygenerator:privacy_budget_key_generator", + "//java/com/google/aggregate/protocol/avro:avro_record_reader", + "//java/com/google/aggregate/tools/privacybudgetutil/common:extraction_utils", + "//java/external:acai", + "//java/external:autovalue", + "//java/external:autovalue_annotations", + "//java/external:avro", + "//java/external:aws_auth", + "//java/external:aws_core", + "//java/external:aws_regions", + "//java/external:aws_s3", + "//java/external:aws_sts", + "//java/external:aws_url_connection_client", + "//java/external:guava", + "//java/external:guice", + "//java/external:jackson_annotations", + "//java/external:jackson_core", + "//java/external:jackson_databind", + "//java/external:jackson_datatype_jsr310", + "//java/external:javax_inject", + "//java/external:jcommander", + "//java/external:jimfs", + "//java/external:slf4j", + "//java/external:slf4j_simple", + ], +) + +# The deploy JAR for the PrivacyBudgetExtractionTool. +alias( + name = "AwsPrivacyBudgetUnitExtractionDeploy", + actual = ":AwsPrivacyBudgetUnitExtraction_deploy.jar", +) diff --git a/java/com/google/aggregate/tools/privacybudgetutil/common/BUILD b/java/com/google/aggregate/tools/privacybudgetutil/common/BUILD new file mode 100644 index 00000000..4846307d --- /dev/null +++ b/java/com/google/aggregate/tools/privacybudgetutil/common/BUILD @@ -0,0 +1,53 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("@rules_java//java:defs.bzl", "java_library") + +package(default_visibility = ["//visibility:public"]) + +java_library( + name = "extraction_utils", + srcs = glob(["*.java"]), + deps = [ + "//java/com/google/aggregate/adtech/worker/model", + "//java/com/google/aggregate/privacy/budgeting/bridge:privacy_budgeting_service_bridge", + "//java/com/google/aggregate/privacy/budgeting/budgetkeygenerator:privacy_budget_key_generator", + "//java/com/google/aggregate/protocol/avro:avro_record_reader", + "//java/external:acai", + "//java/external:autovalue", + "//java/external:autovalue_annotations", + "//java/external:avro", + "//java/external:guava", + "//java/external:guice", + "//java/external:jackson_annotations", + "//java/external:jackson_core", + "//java/external:jackson_databind", + "//java/external:jackson_datatype_jsr310", + "//java/external:javax_inject", + "//java/external:jcommander", + "//java/external:jimfs", + "//java/external:slf4j", + "//java/external:slf4j_simple", + ], +) + +java_library( + name = "config", + srcs = ["PrivacyBudgetUnitExtractionConfig.java"], + deps = [ + "//java/external:guava", + "//java/external:jcommander", + "//java/external:slf4j", + ], +) diff --git a/java/com/google/aggregate/tools/privacybudgetutil/common/ExtractionUtils.java b/java/com/google/aggregate/tools/privacybudgetutil/common/ExtractionUtils.java new file mode 100644 index 00000000..aa98b0f4 --- /dev/null +++ b/java/com/google/aggregate/tools/privacybudgetutil/common/ExtractionUtils.java @@ -0,0 +1,158 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.aggregate.tools.privacybudgetutil.common; + +import static com.google.aggregate.adtech.worker.model.SharedInfo.ATTRIBUTION_REPORTING_API; +import static com.google.aggregate.adtech.worker.model.SharedInfo.ATTRIBUTION_REPORTING_DEBUG_API; +import static java.time.temporal.ChronoUnit.HOURS; + +import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility; +import com.fasterxml.jackson.annotation.PropertyAccessor; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; +import com.google.aggregate.adtech.worker.model.SharedInfo; +import com.google.aggregate.privacy.budgeting.bridge.PrivacyBudgetingServiceBridge.PrivacyBudgetUnit; +import com.google.aggregate.privacy.budgeting.budgetkeygenerator.PrivacyBudgetKeyGenerator; +import com.google.aggregate.privacy.budgeting.budgetkeygenerator.PrivacyBudgetKeyGenerator.PrivacyBudgetKeyInput; +import com.google.aggregate.privacy.budgeting.budgetkeygenerator.PrivacyBudgetKeyGeneratorFactory; +import com.google.auto.value.AutoValue; +import com.google.common.primitives.UnsignedLong; +import java.io.IOException; +import java.io.InputStream; +import java.math.BigDecimal; +import java.math.RoundingMode; +import java.time.Instant; +import java.util.HashSet; +import java.util.List; +import java.util.NoSuchElementException; +import java.util.Optional; +import java.util.Set; +import org.apache.avro.file.DataFileStream; +import org.apache.avro.generic.GenericDatumReader; +import org.apache.avro.generic.GenericRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class ExtractionUtils { + + private static final Logger LOGGER = LoggerFactory.getLogger(ExtractionUtils.class); + + /** Takes a .avro file as an input and generates the correct key for the API. */ + public static ExtractionUtils.KeyFile processAvro( + InputStream stream, + PrivacyBudgetKeyGeneratorFactory generatorFactory, + String key, + List filteringIds) + throws IOException { + GenericRecord avroRecord = null; + SharedInfo info = null; + Set keyset = new HashSet(); + final DataFileStream dfStream = + new DataFileStream(stream, new GenericDatumReader()); + while (dfStream.hasNext()) { + avroRecord = dfStream.next(avroRecord); + if (!avroRecord.hasField("shared_info")) { + LOGGER.warn(String.format("No shared info in %s, skipping.%n", key)); + continue; + } + Object sharedInfo = avroRecord.get("shared_info"); + ObjectMapper objectMapper = new ObjectMapper(); + JsonNode root = objectMapper.readTree(sharedInfo.toString()); + BigDecimal reportTime = + new BigDecimal(root.get("scheduled_report_time").toString().replace("\"", "")); + + SharedInfo.Builder builder = + SharedInfo.builder() + .setApi(root.get("api").asText()) + .setVersion(root.get("version").asText()) + .setScheduledReportTime( + Instant.ofEpochSecond(reportTime.setScale(0, RoundingMode.DOWN).longValue())) + .setReportingOrigin(root.get("reporting_origin").asText()); + if (!root.has("attribution_destination") + && root.get("api").toString().equals(ATTRIBUTION_REPORTING_API) + || root.get("api").toString().equals(ATTRIBUTION_REPORTING_DEBUG_API)) { + LOGGER.error("shared info has no attribution destination. skipping"); + continue; + } + if (root.has("attribution_destination")) { + builder.setDestination(root.get("attribution_destination").asText()); + } + + if (root.has("source_registration_time")) { + BigDecimal sourceRegistrationTime = + new BigDecimal(root.get("source_registration_time").toString().replace("\"", "")); + + builder.setSourceRegistrationTime( + Instant.ofEpochSecond( + sourceRegistrationTime.setScale(0, RoundingMode.DOWN).longValue())); + } + if (root.has("debug_mode")) { + builder.setReportDebugModeString(root.get("debug_mode").asText()); + } + info = builder.build(); + + for (UnsignedLong filteringId : filteringIds) { + keyset.add( + PrivacyBudgetUnit.create( + getPrivacyBudgetKey(info, filteringId, generatorFactory), + info.scheduledReportTime().truncatedTo(HOURS), + info.reportingOrigin())); + } + } + if (keyset.isEmpty()) { + return null; + } + ObjectMapper om = new ObjectMapper(); + om.registerModule(new JavaTimeModule()); + om.setVisibility(PropertyAccessor.FIELD, Visibility.ANY); + ExtractionUtils.KeyFile keyFile = + ExtractionUtils.KeyFile.create(key, om.writeValueAsString(keyset)); + return keyFile; + } + + /** Calculates Privacy Budget Keys */ + private static String getPrivacyBudgetKey( + SharedInfo sharedInfo, + UnsignedLong filteringId, + PrivacyBudgetKeyGeneratorFactory generatorFactory) { + try { + PrivacyBudgetKeyInput input = + PrivacyBudgetKeyInput.builder() + .setSharedInfo(sharedInfo) + .setFilteringId(filteringId) + .build(); + Optional privacyBudgetKeyGenerator = + generatorFactory.getPrivacyBudgetKeyGenerator(input); + return privacyBudgetKeyGenerator.get().generatePrivacyBudgetKey(input); + } catch (IllegalArgumentException | NoSuchElementException e) { + throw new AssertionError("failed to get generator", e); + } + } + + @AutoValue + public abstract static class KeyFile { + + public static ExtractionUtils.KeyFile create(String key, String body) { + return new AutoValue_ExtractionUtils_KeyFile(key, body); + } + + public abstract String key(); + + public abstract String body(); + } +} diff --git a/java/com/google/aggregate/tools/privacybudgetutil/common/PrivacyBudgetUnitExtractionConfig.java b/java/com/google/aggregate/tools/privacybudgetutil/common/PrivacyBudgetUnitExtractionConfig.java new file mode 100644 index 00000000..d30a4b37 --- /dev/null +++ b/java/com/google/aggregate/tools/privacybudgetutil/common/PrivacyBudgetUnitExtractionConfig.java @@ -0,0 +1,159 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.aggregate.tools.privacybudgetutil.common; + +import com.beust.jcommander.IStringConverter; +import com.beust.jcommander.JCommander; +import com.beust.jcommander.Parameter; +import com.google.common.collect.ImmutableList; +import com.google.common.primitives.UnsignedLong; +import java.util.List; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/* + * This tool is for generating lists of privacy budget units in an adtech environment + * and this config controls what is extracted. + * Given the correct credentials the lists can be generated with: + * bazel run //java/com/google/aggregate/tools/privacybudgetunit/extraction/(aws:AwsPrivacyBudgetUnitExtraction | gcp:GcpPrivacyBudgetUnitExtraction) \ + * (generate_keys | write_keys) \ + * --bucket \ + * --input_prefix + */ +public final class PrivacyBudgetUnitExtractionConfig { + + private static final Logger LOGGER = + LoggerFactory.getLogger(PrivacyBudgetUnitExtractionConfig.class); + + @Parameter(description = "The main function of the tool, generate_keys or write_keys.") + private String function = "generate_keys"; + + @Parameter(names = "--help", description = "Prints usage.", help = true) + private boolean help = false; + + @Parameter(names = "--bucket", description = "Bucket where the avro files reside.") + private String bucket; + + @Parameter(names = "--input_prefix", description = "Prefix for file selection.") + private String inputPrefix; + + @Parameter( + names = "--dry_run", + description = "Run in dry run mode. This will not write any files.") + private boolean dryRun = false; + + @Parameter( + names = "--output_prefix", + description = "The output file prefix to write the file to.") + private String outputPrefix = "shared_ids"; + + @Parameter(names = "--single_file", description = "Create a single file for the output.") + private boolean singleFile = true; + + @Parameter( + names = "--filtering_ids", + description = + "The filtering IDs to use. This option can be used multiple times, and may be" + + " comma-separated.", + converter = StringToUnsignedLongConverter.class) + private List filteringIds = ImmutableList.of(UnsignedLong.ZERO); + + private String groups; + private final JCommander commander; + private final GCPCommands gcp; + private final AWSCommands aws; + + public PrivacyBudgetUnitExtractionConfig(CloudPlatform cloudPlatform, String[] args) { + aws = new AWSCommands(); + gcp = new GCPCommands(); + JCommander.Builder builder = JCommander.newBuilder().addObject(this); + if (cloudPlatform == CloudPlatform.AWS) { + builder.addObject(aws); + } else { + builder.addObject(gcp); + } + commander = builder.build(); + commander.parse(args); + } + + public boolean printHelp() { + if (help) { + commander.usage(); + } + return this.help; + } + + public String getBucket() { + return this.bucket; + } + + public String getRegion() { + return this.aws.region; + } + + public String getFunction() { + return this.function; + } + + public String getInputPrefix() { + return this.inputPrefix; + } + + public Boolean getDryRun() { + return this.dryRun; + } + + public String getOutputPrefix() { + return this.outputPrefix; + } + + public Boolean getSingleFile() { + return this.singleFile; + } + + public String getProjectId() { + return this.gcp.projectId; + } + + public List getFilteringIds() { + return this.filteringIds; + } + + private class AWSCommands { + + @Parameter(names = "--region", description = "Region for the request") + private String region = "us-east-1"; + } + + private class GCPCommands { + + @Parameter(names = "--project_id", description = "GCP project Id.") + private String projectId; + } + + public enum CloudPlatform { + GCP, + AWS + } + + public static class StringToUnsignedLongConverter implements IStringConverter { + @Override + public UnsignedLong convert(String value) { + return UnsignedLong.valueOf(value); + } + } +} diff --git a/java/com/google/aggregate/tools/privacybudgetutil/gcp/BUILD b/java/com/google/aggregate/tools/privacybudgetutil/gcp/BUILD new file mode 100644 index 00000000..399540ba --- /dev/null +++ b/java/com/google/aggregate/tools/privacybudgetutil/gcp/BUILD @@ -0,0 +1,85 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("@rules_java//java:defs.bzl", "java_binary", "java_library") + +package(default_visibility = ["//visibility:public"]) + +java_library( + name = "lib", + srcs = glob(["*.java"]), + deps = [ + "//java/com/google/aggregate/privacy/budgeting/budgetkeygenerator:privacy_budget_key_generator", + "//java/com/google/aggregate/protocol/avro:avro_record_reader", + "//java/com/google/aggregate/tools/privacybudgetutil/common:config", + "//java/com/google/aggregate/tools/privacybudgetutil/common:extraction_utils", + "//java/external:acai", + "//java/external:autovalue", + "//java/external:autovalue_annotations", + "//java/external:avro", + "//java/external:gcp_api_gax", + "//java/external:gcp_core", + "//java/external:gcp_storage", + "//java/external:guava", + "//java/external:guice", + "//java/external:jackson_annotations", + "//java/external:jackson_core", + "//java/external:jackson_databind", + "//java/external:jackson_datatype_jsr310", + "//java/external:javax_inject", + "//java/external:jcommander", + "//java/external:jimfs", + "//java/external:slf4j", + "//java/external:slf4j_simple", + ], +) + +java_binary( + name = "GcpPrivacyBudgetUnitExtraction", + srcs = glob(["*.java"]), + main_class = "com.google.aggregate.tools.privacybudgetutil.gcp.GcpPrivacyBudgetUnitExtraction", + deps = [ + ":lib", + "//java/com/google/aggregate/adtech/worker/model", + "//java/com/google/aggregate/privacy/budgeting/bridge:privacy_budgeting_service_bridge", + "//java/com/google/aggregate/privacy/budgeting/budgetkeygenerator:privacy_budget_key_generator", + "//java/com/google/aggregate/protocol/avro:avro_record_reader", + "//java/com/google/aggregate/tools/privacybudgetutil/common:config", + "//java/com/google/aggregate/tools/privacybudgetutil/common:extraction_utils", + "//java/external:acai", + "//java/external:autovalue", + "//java/external:autovalue_annotations", + "//java/external:avro", + "//java/external:gcp_api_gax", + "//java/external:gcp_core", + "//java/external:gcp_storage", + "//java/external:guava", + "//java/external:guice", + "//java/external:jackson_annotations", + "//java/external:jackson_core", + "//java/external:jackson_databind", + "//java/external:jackson_datatype_jsr310", + "//java/external:javax_inject", + "//java/external:jcommander", + "//java/external:jimfs", + "//java/external:slf4j", + "//java/external:slf4j_simple", + ], +) + +# The deploy JAR for the PrivacyBudgetUnitExtractionTool. +alias( + name = "GcpPrivacyBudgetUnitExtractionDeploy", + actual = ":GcpPrivacyBudgetUnitExtraction_deploy.jar", +) diff --git a/java/com/google/aggregate/tools/privacybudgetutil/gcp/GcpPrivacyBudgetUnitExtraction.java b/java/com/google/aggregate/tools/privacybudgetutil/gcp/GcpPrivacyBudgetUnitExtraction.java new file mode 100644 index 00000000..d0da0b0a --- /dev/null +++ b/java/com/google/aggregate/tools/privacybudgetutil/gcp/GcpPrivacyBudgetUnitExtraction.java @@ -0,0 +1,183 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.aggregate.tools.privacybudgetutil.gcp; + +import static com.google.cloud.storage.Storage.BlobListOption.*; + +import com.google.aggregate.privacy.budgeting.budgetkeygenerator.PrivacyBudgetKeyGeneratorFactory; +import com.google.aggregate.privacy.budgeting.budgetkeygenerator.PrivacyBudgetKeyGeneratorModule; +import com.google.aggregate.tools.privacybudgetutil.common.ExtractionUtils; +import com.google.aggregate.tools.privacybudgetutil.common.PrivacyBudgetUnitExtractionConfig; +import com.google.aggregate.tools.privacybudgetutil.gcp.GcpPrivacyBudgetUnitExtractionModule.StorageClient; +import com.google.api.gax.paging.Page; +import com.google.cloud.ReadChannel; +import com.google.cloud.WriteChannel; +import com.google.cloud.storage.Blob; +import com.google.cloud.storage.BlobId; +import com.google.cloud.storage.BlobInfo; +import com.google.cloud.storage.Storage; +import com.google.inject.AbstractModule; +import com.google.inject.Guice; +import com.google.inject.Inject; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.ByteBuffer; +import java.nio.channels.Channels; +import java.nio.charset.StandardCharsets; +import java.time.format.DateTimeFormatter; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** Extracts privacy budget units from avros in GCP. */ +final class GcpPrivacyBudgetUnitExtraction { + + private static final Logger LOGGER = + LoggerFactory.getLogger(GcpPrivacyBudgetUnitExtraction.class); + + private final PrivacyBudgetUnitExtractionConfig config; + private final Storage storage; + private final PrivacyBudgetKeyGeneratorFactory generatorFactory; + private final List keys; + + @Inject + GcpPrivacyBudgetUnitExtraction( + PrivacyBudgetUnitExtractionConfig config, + PrivacyBudgetKeyGeneratorFactory generatorFactory, + @StorageClient Storage storage) { + this.config = config; + this.storage = storage; + this.generatorFactory = generatorFactory; + this.keys = new ArrayList(); + } + + public static void main(String[] args) throws Exception { + PrivacyBudgetUnitExtractionConfig config = + new PrivacyBudgetUnitExtractionConfig( + PrivacyBudgetUnitExtractionConfig.CloudPlatform.GCP, args); + if (config.printHelp()) { + return; + } + Guice.createInjector( + new GcpPrivacyBudgetUnitExtractionModule(), + new PrivacyBudgetKeyGeneratorModule(), + new AbstractModule() { + @Override + protected void configure() { + bind(PrivacyBudgetUnitExtractionConfig.class).toInstance(config); + } + }) + .getInstance(GcpPrivacyBudgetUnitExtraction.class) + .run(); + } + + public void run() throws Exception { + + if (this.config.getFunction().equals("generate_keys")) { + + Page blobs = + storage.list(this.config.getBucket(), prefix(this.config.getInputPrefix())); + if (!this.config.getSingleFile()) { + blobs + .streamAll() + .filter(blob -> blob.getName().endsWith(".avro")) + .map(this::accept) + .forEach(System.out::println); + } else { + String builder = + blobs + .streamAll() + .filter(blob -> blob.getName().endsWith(".avro")) + .map(this::accept) + .filter(s -> s != null) + .map(ExtractionUtils.KeyFile::body) + .collect(Collectors.joining(",", "[", "]")); + LOGGER.info(builder); + } + } + + if (this.config.getFunction().equals("write_keys")) { + Page blobs = + storage.list(this.config.getBucket(), prefix(this.config.getInputPrefix())); + if (!this.config.getSingleFile()) { + blobs + .streamAll() + .filter(blob -> blob.getName().endsWith(".avro")) + .map(this::accept) + .forEach(this::writeKeyFile); + } else { + String builder = + blobs + .streamAll() + .filter(blob -> blob.getName().endsWith(".avro")) + .map(this::accept) + .filter(s -> s != null) + .map(ExtractionUtils.KeyFile::body) + .collect(Collectors.joining(",", "[", "]")); + String filename = storage.getOptions().getProjectId(); + ExtractionUtils.KeyFile keyFile = ExtractionUtils.KeyFile.create(filename, builder); + writeKeyFile(keyFile); + } + } + } + + private ExtractionUtils.KeyFile readBucketObject(BlobId blobId, String key) { + ReadChannel reader = storage.reader(blobId); + InputStream inputStream = Channels.newInputStream(reader); + try { + return ExtractionUtils.processAvro( + inputStream, this.generatorFactory, key, this.config.getFilteringIds()); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + /** Writes keys to destination */ + private void writeKeyFile(ExtractionUtils.KeyFile keyFile) { + String filename = + String.format( + "%s/%s_%s.json", + this.config.getOutputPrefix(), + keyFile.key(), + java.time.LocalDateTime.now().format(DateTimeFormatter.ofPattern("MM_dd_yy"))); + LOGGER.info( + String.format( + "writing %s/%s to %s", config.getBucket(), config.getInputPrefix(), filename)); + ByteArrayOutputStream stream = new ByteArrayOutputStream(); + if (!this.config.getDryRun()) { + BlobId blobId = BlobId.of(this.config.getBucket(), filename); + BlobInfo blobInfo = BlobInfo.newBuilder(blobId).build(); + try { + stream.write(keyFile.body().getBytes(StandardCharsets.UTF_8)); + } catch (IOException e) { + throw new RuntimeException(e); + } + try (WriteChannel writer = storage.writer(blobInfo)) { + writer.write(ByteBuffer.wrap(stream.toByteArray())); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + } + + private ExtractionUtils.KeyFile accept(Blob blob) { + return readBucketObject(blob.getBlobId(), blob.getName()); + } +} diff --git a/java/com/google/aggregate/tools/privacybudgetutil/gcp/GcpPrivacyBudgetUnitExtractionModule.java b/java/com/google/aggregate/tools/privacybudgetutil/gcp/GcpPrivacyBudgetUnitExtractionModule.java new file mode 100644 index 00000000..5f6e7f3b --- /dev/null +++ b/java/com/google/aggregate/tools/privacybudgetutil/gcp/GcpPrivacyBudgetUnitExtractionModule.java @@ -0,0 +1,48 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.aggregate.tools.privacybudgetutil.gcp; + +import static java.lang.annotation.ElementType.FIELD; +import static java.lang.annotation.ElementType.METHOD; +import static java.lang.annotation.ElementType.PARAMETER; +import static java.lang.annotation.RetentionPolicy.RUNTIME; + +import com.google.aggregate.tools.privacybudgetutil.common.PrivacyBudgetUnitExtractionConfig; +import com.google.cloud.storage.Storage; +import com.google.cloud.storage.StorageOptions; +import com.google.inject.AbstractModule; +import com.google.inject.Provides; +import java.lang.annotation.Retention; +import java.lang.annotation.Target; +import javax.inject.Qualifier; + +/** Provides bindings for {@link GcpPrivacyBudgetUnitExtraction}. */ +final class GcpPrivacyBudgetUnitExtractionModule extends AbstractModule { + + @Provides + @StorageClient + Storage provideStorage(PrivacyBudgetUnitExtractionConfig config) { + Storage storage = + StorageOptions.newBuilder().setProjectId(config.getProjectId()).build().getService(); + return storage; + } + + @Qualifier + @Target({FIELD, PARAMETER, METHOD}) + @Retention(RUNTIME) + @interface StorageClient {} +} diff --git a/java/external/BUILD b/java/external/BUILD index 65200eea..d33dfab3 100644 --- a/java/external/BUILD +++ b/java/external/BUILD @@ -793,7 +793,11 @@ alias( alias( name = "mockito", actual = "@maven//:org_mockito_mockito_core", - visibility = ["//javatests:__subpackages__"], + visibility = [ + "//javatests:__subpackages__", + "//telemetry/debug/javatests:__subpackages__", + "//telemetry/prod/javatests:__subpackages__", + ], ) alias( @@ -821,6 +825,11 @@ alias( actual = "@maven//:com_google_api_grpc_proto_google_cloud_compute_v1", ) +alias( + name = "proto_gcp_common", + actual = "@maven//:com_google_api_grpc_proto_google_common_protos", +) + alias( name = "gcp_pubsub_v1", actual = "@maven//:com_google_api_grpc_proto_google_cloud_pubsub_v1", @@ -846,6 +855,16 @@ alias( actual = "@maven//:com_google_api_gax_grpc", ) +alias( + name = "gcp_monitoring", + actual = "@maven//:com_google_cloud_google_cloud_monitoring", +) + +alias( + name = "gcp_proto_cloud_monitoring", + actual = "@maven//:com_google_api_grpc_proto_google_cloud_monitoring_v3", +) + alias( name = "protobuf_java", actual = "@maven//:com_google_protobuf_protobuf_java", @@ -976,3 +995,13 @@ alias( name = "scp_credsprovider", actual = "@shared_libraries//java/com/google/scp/shared/aws/credsprovider:credsprovider", ) + +alias( + name = "gcp_trace", + actual = "@maven//:com_google_cloud_google_cloud_trace", +) + +alias( + name = "gcp_trace_proto", + actual = "@maven//:com_google_api_grpc_proto_google_cloud_trace_v1", +) diff --git a/javatests/com/google/aggregate/adtech/worker/AggregationWorkerDiffTest.java b/javatests/com/google/aggregate/adtech/worker/AggregationWorkerDiffTest.java index 314a247b..f8dc74f5 100644 --- a/javatests/com/google/aggregate/adtech/worker/AggregationWorkerDiffTest.java +++ b/javatests/com/google/aggregate/adtech/worker/AggregationWorkerDiffTest.java @@ -157,7 +157,7 @@ private MapDifference diff() throws Exception { // Create AggregatedFact with only two fields: bucket and metric Stream testFactsTwoFields = - testFacts.stream().map(fact -> AggregatedFact.create(fact.bucket(), fact.metric())); + testFacts.stream().map(fact -> AggregatedFact.create(fact.getBucket(), fact.getMetric())); return ResultDiffer.diffResults(testFactsTwoFields, goldenFacts.stream()); } diff --git a/javatests/com/google/aggregate/adtech/worker/AggregationWorkerHermeticTest.java b/javatests/com/google/aggregate/adtech/worker/AggregationWorkerHermeticTest.java index db9d73c5..980ea559 100644 --- a/javatests/com/google/aggregate/adtech/worker/AggregationWorkerHermeticTest.java +++ b/javatests/com/google/aggregate/adtech/worker/AggregationWorkerHermeticTest.java @@ -36,8 +36,10 @@ import com.google.aggregate.protocol.avro.AvroReportRecord; import com.google.aggregate.protocol.avro.AvroReportWriter; import com.google.aggregate.protocol.avro.AvroReportWriterFactory; +import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.io.ByteSource; +import com.google.common.primitives.UnsignedLong; import com.google.common.util.concurrent.ServiceManager; import com.google.crypto.tink.hybrid.HybridConfig; import com.google.inject.AbstractModule; @@ -51,6 +53,7 @@ import com.google.scp.operator.shared.testing.SimulationTestParams; import java.io.IOException; import java.io.OutputStream; +import java.math.BigInteger; import java.nio.file.Files; import java.nio.file.Path; import java.time.Instant; @@ -949,7 +952,116 @@ public void aggregate_withNoQueriedFilteringId_aggregatesDefaultOrIdContribution .setVersion(VERSION_1_0) .build()); Files.copy(reportsAvro, reportShardsDir.resolve("shard_2.avro")); - setupLocalAggregationWorker(args); + setupLocalAggregationWorker( + getLocalAggregationWorkerArgs( + /* noEncryption= */ false, + /* outputDomain= */ false, + /* domainOptional= */ true, + /* reportErrorThresholdPercentage= */ "10", + /* enablePrivacyBudgetKeyFiltering= */ true, + /* filteringIds= */ "")); + + runWorker(); + ImmutableList factList = waitForAggregation(); + + // Aggregates all the facts without id or has id = 0. + AggregatedFact expectedFact1 = + AggregatedFact.create( + /* key= */ createBucketFromInt(1), /* metric= */ 40, /* unnoisedMetric= */ 40L); + AggregatedFact expectedFact2 = + AggregatedFact.create( + /* key= */ createBucketFromInt(5), /* metric= */ 50, /* unnoisedMetric= */ 50L); + assertThat(factList).containsExactly(expectedFact1, expectedFact2); + } + + @Test + public void aggregate_withQueriedFilteringId_aggregatesCorrespondingContributions() + throws Exception { + ImmutableList inputWithoutIds = ImmutableList.of("1:10,1:20,0:0", "5:50"); + AwsHermeticTestHelper.generateAvroReportsFromTextList( + SimulationTestParams.builder() + .setHybridKey(hybridKey) + .setReportsAvro(reportsAvro) + .setSimulationInputFileLines(inputWithoutIds) + .setVersion(VERSION_0_1) + .build()); + Files.copy(reportsAvro, reportShardsDir.resolve("shard_1.avro")); + UnsignedLong queriedFilteringId1 = + UnsignedLong.valueOf( + BigInteger.valueOf(Long.MAX_VALUE).multiply(BigInteger.TWO).add(BigInteger.ONE)); + UnsignedLong queriedFilteringId2 = UnsignedLong.valueOf((1L << 32) + 5); + // In this input with ids, only facts corresponding to id = 18446744073709551615 and 4294967301 + // are considered for aggregation. + ImmutableList inputWithIds = + ImmutableList.of( + String.format("1:10:0,1:20:%s,0:0:0", queriedFilteringId1), + String.format("5:51:%s", queriedFilteringId2)); + AwsHermeticTestHelper.generateAvroReportsFromTextList( + SimulationTestParams.builder() + .setHybridKey(hybridKey) + .setReportsAvro(reportsAvro) + .setSimulationInputFileLines(inputWithIds) + .setSourceRegistrationTime(Instant.EPOCH.minusSeconds(600)) + .setVersion(VERSION_1_0) + .build()); + Files.copy(reportsAvro, reportShardsDir.resolve("shard_2.avro")); + setupLocalAggregationWorker( + getLocalAggregationWorkerArgs( + /* noEncryption= */ false, + /* outputDomain= */ false, + /* domainOptional= */ true, + /* reportErrorThresholdPercentage= */ "10", + /* enablePrivacyBudgetKeyFiltering= */ true, + /* filteringIds= */ queriedFilteringId1.toString() + + "," + + queriedFilteringId2.toString())); + + runWorker(); + ImmutableList factList = waitForAggregation(); + + AggregatedFact expectedFact1 = + AggregatedFact.create( + /* key= */ createBucketFromInt(1), /* metric= */ 20, /* unnoisedMetric= */ 20L); + AggregatedFact expectedFact2 = + AggregatedFact.create( + /* key= */ createBucketFromInt(5), /* metric= */ 51, /* unnoisedMetric= */ 51L); + assertThat(factList).containsExactly(expectedFact1, expectedFact2); + } + + @Test + public void + aggregate_withFilteringNotEnabled_ignoresQueriedIds_aggregatesDefaultOrIdContributions() + throws Exception { + // All facts without ids, so considered for aggregation. + ImmutableList inputWithoutIds = ImmutableList.of("1:10,1:20,0:0", "5:50"); + AwsHermeticTestHelper.generateAvroReportsFromTextList( + SimulationTestParams.builder() + .setHybridKey(hybridKey) + .setReportsAvro(reportsAvro) + .setSimulationInputFileLines(inputWithoutIds) + .setVersion(VERSION_0_1) + .build()); + Files.copy(reportsAvro, reportShardsDir.resolve("shard_1.avro")); + // In this input with ids, only facts corresponding to id = 0 will be considered for + // aggregation. + ImmutableList inputWithIds = ImmutableList.of("1:10:0,1:20:10,0:0:0", "5:51:4"); + AwsHermeticTestHelper.generateAvroReportsFromTextList( + SimulationTestParams.builder() + .setHybridKey(hybridKey) + .setReportsAvro(reportsAvro) + .setSimulationInputFileLines(inputWithIds) + .setSourceRegistrationTime(Instant.EPOCH.minusSeconds(600)) + .setVersion(VERSION_1_0) + .build()); + Files.copy(reportsAvro, reportShardsDir.resolve("shard_2.avro")); + setupLocalAggregationWorker( + getLocalAggregationWorkerArgs( + /* noEncryption= */ false, + /* outputDomain= */ false, + /* domainOptional= */ true, + /* reportErrorThresholdPercentage= */ "10", + /* enablePrivacyBudgetKeyFiltering= */ false, + /* filteringIds= */ "4,10")); runWorker(); ImmutableList factList = waitForAggregation(); @@ -964,6 +1076,31 @@ public void aggregate_withNoQueriedFilteringId_aggregatesDefaultOrIdContribution assertThat(factList).containsExactly(expectedFact1, expectedFact2); } + @Test + public void aggregate_withInvalidFilteringIds_throwsValidation() throws Exception { + setupLocalAggregationWorker( + getLocalAggregationWorkerArgs( + /* noEncryption= */ false, + /* outputDomain= */ false, + /* domainOptional= */ true, + /* reportErrorThresholdPercentage= */ "10", + /* enablePrivacyBudgetKeyFiltering= */ false, + /* filteringIds= */ "invalid,not a number,1,2,3")); + + runWorker(); + + String actualResultSerialized = Files.readString(resultFile); + ResultInfo.Builder builder = ResultInfo.newBuilder(); + JSON_PARSER.merge(actualResultSerialized, builder); + ResultInfo actualResult = builder.build(); + assertThat(actualResult.getReturnCode()) + .isEqualTo(AggregationWorkerReturnCode.INVALID_JOB.name()); + assertThat(actualResult.getReturnMessage()) + .containsMatch( + "Job parameters for the job 'request' should have comma separated integers for" + + " 'filtering_ids' parameter."); + } + @Test public void aggregate_withDecryptionErrors_withThreshold10Percent_failsEarly() throws Exception { // 8 reports with non-deserializable SharedInfo and empty payload. @@ -1130,7 +1267,15 @@ public void aggregate_withErrorsWithinThreshold_completesTheJob() throws Excepti private String[] getLocalAggregationWorkerArgs( boolean noEncryption, boolean outputDomain, boolean domainOptional) throws Exception { - return getLocalAggregationWorkerArgs(noEncryption, outputDomain, domainOptional, "100"); + return getLocalAggregationWorkerArgs( + noEncryption, + outputDomain, + domainOptional, + "100", + /** enablePrivacyBudgetKeyFiltering = */ + true, + /** filteringIds = */ + null); } private String[] getLocalAggregationWorkerArgs( @@ -1139,6 +1284,25 @@ private String[] getLocalAggregationWorkerArgs( boolean domainOptional, String reportErrorThresholdPercentage) throws Exception { + return getLocalAggregationWorkerArgs( + noEncryption, + outputDomain, + domainOptional, + reportErrorThresholdPercentage, + /** enablePrivacyBudgetKeyFiltering = */ + true, + /** filteringIds = */ + null); + } + + private String[] getLocalAggregationWorkerArgs( + boolean noEncryption, + boolean outputDomain, + boolean domainOptional, + String reportErrorThresholdPercentage, + boolean enablePrivacyBudgetKeyFiltering, + String filteringIds) + throws Exception { // Create the local key HybridConfig.register(); ImmutableList.Builder argsBuilder = @@ -1184,6 +1348,12 @@ private String[] getLocalAggregationWorkerArgs( if (domainOptional) { argsBuilder.add("--domain_optional"); } + if (enablePrivacyBudgetKeyFiltering) { + argsBuilder.add("--labeled_privacy_budget_keys_enabled"); + } + if (!Strings.isNullOrEmpty(filteringIds)) { + argsBuilder.add("--local_job_params_input_filtering_ids").add(filteringIds); + } return argsBuilder.build().toArray(String[]::new); } diff --git a/javatests/com/google/aggregate/adtech/worker/AwsOTelTest.java b/javatests/com/google/aggregate/adtech/worker/AwsOTelTest.java index 2f154cc8..a6092857 100644 --- a/javatests/com/google/aggregate/adtech/worker/AwsOTelTest.java +++ b/javatests/com/google/aggregate/adtech/worker/AwsOTelTest.java @@ -68,9 +68,8 @@ /** * In AwsOTeltest, one job would be sent first to trigger metric/trace generation. And the following * tests are testing if OTel metrics and traces exist in Cloudwatch and AWS Xray. In continuous - * environment, prod binary is used for OTel which would only export prod metrics. The current prod - * metrics are no-op due to b/305100313; therefore, no metrics and traces should be found in AWS. - * Use FixMethodOrder for this class to ensure the job will be running first to generate metrics and + * environment, prod binary is used for OTel which would only export prod metrics. Use + * FixMethodOrder for this class to ensure the job will be running first to generate metrics and * traces. */ @RunWith(JUnit4.class) @@ -117,7 +116,7 @@ public void createJobE2ETest() throws Exception { TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); CreateJobRequest createJobRequest = - AwsWorkerContinuousTestHelper.createJobRequest( + AwsWorkerContinuousTestHelper.createJobRequestWithAttributionReportTo( testDataBucket, inputKey, testDataBucket, @@ -150,8 +149,10 @@ public void e2eCPUMetricTest() { List metrics = amazonCloudWatch.getMetricData(request).getMetricDataResults(); MetricDataResult metricResult = metrics.get(0); - // TODO(b/305100313): CPU metrics should be larger than 0 after launch. - assertThat(metricResult.getValues().size()).isEqualTo(0); + assertThat(metricResult.getValues().size()).isGreaterThan(0); + for (int i = 0; i < metricResult.getValues().size(); i++) { + assertThat(metricResult.getValues().get(i) % 1).isEqualTo(0); + } } @Test @@ -162,14 +163,16 @@ public void e2eMemoryMetricTest() { List metrics = amazonCloudWatch.getMetricData(request).getMetricDataResults(); MetricDataResult metricResult = metrics.get(0); - // TODO(b/305100313): Memory metrics should be larger than 0 after launch. - assertThat(metricResult.getValues().size()).isEqualTo(0); + assertThat(metricResult.getValues().size()).isGreaterThan(0); + for (int i = 0; i < metricResult.getValues().size(); i++) { + assertThat(metricResult.getValues().get(i) % 10).isEqualTo(0); + } } @Test public void e2eTracesTest() throws InterruptedException { - // Adding 2 mins sleep here to make sure all traces are uploaded. - Thread.sleep(120000); + // Adding 3 mins sleep here to make sure all traces are uploaded. + Thread.sleep(180000); AtomicInteger prodTraceCount = new AtomicInteger(0); AtomicInteger debugTraceCount = new AtomicInteger(0); AWSXRay awsxRay = AWSXRayClientBuilder.standard().withRegion("us-east-1").build(); @@ -197,8 +200,7 @@ public void e2eTracesTest() throws InterruptedException { } }); - // TODO(b/305100313): Prod traces should be larger than 0 after launch. - assertThat(prodTraceCount.get()).isEqualTo(0); + assertThat(prodTraceCount.get()).isEqualTo(1); assertThat(debugTraceCount.get()).isEqualTo(0); } @@ -211,7 +213,7 @@ private GetMetricDataRequest generateGetMetricDataRequest(String metricName) { Date startTime = new Date(endTime.getTime() - 600000); // query for the last 10 mins. Metric metric = new Metric().withMetricName(metricName).withNamespace(ENVIRONMENT_NAME).withDimensions(dim); - MetricStat metricStat = new MetricStat().withMetric(metric).withStat("Average").withPeriod(1); + MetricStat metricStat = new MetricStat().withMetric(metric).withStat("Sum").withPeriod(1); MetricDataQuery query = new MetricDataQuery().withId("test").withMetricStat(metricStat); return new GetMetricDataRequest() .withMetricDataQueries(query) diff --git a/javatests/com/google/aggregate/adtech/worker/AwsWorkerAutoScalingTest.java b/javatests/com/google/aggregate/adtech/worker/AwsWorkerAutoScalingTest.java index 46980b2e..231d91bd 100644 --- a/javatests/com/google/aggregate/adtech/worker/AwsWorkerAutoScalingTest.java +++ b/javatests/com/google/aggregate/adtech/worker/AwsWorkerAutoScalingTest.java @@ -17,7 +17,6 @@ package com.google.aggregate.adtech.worker; import static com.google.aggregate.adtech.worker.AwsWorkerContinuousTestHelper.KOKORO_BUILD_ID; -import static com.google.aggregate.adtech.worker.AwsWorkerContinuousTestHelper.createJobRequest; import static com.google.aggregate.adtech.worker.AwsWorkerContinuousTestHelper.submitJob; import static com.google.aggregate.adtech.worker.AwsWorkerContinuousTestHelper.waitForJobCompletions; @@ -110,7 +109,7 @@ private CreateJobRequest createE2EJob(Integer jobCount) throws Exception { String.format( "%s/test-outputs/%s/%s", TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID, outputFile); CreateJobRequest createJobRequest = - createJobRequest( + AwsWorkerContinuousTestHelper.createJobRequestWithAttributionReportTo( getTestDataBucket(), INPUT_DATA_PATH, getTestDataBucket(), diff --git a/javatests/com/google/aggregate/adtech/worker/AwsWorkerContinuousDiffTest.java b/javatests/com/google/aggregate/adtech/worker/AwsWorkerContinuousDiffTest.java index 8203339a..c2bec4f3 100644 --- a/javatests/com/google/aggregate/adtech/worker/AwsWorkerContinuousDiffTest.java +++ b/javatests/com/google/aggregate/adtech/worker/AwsWorkerContinuousDiffTest.java @@ -18,7 +18,6 @@ import static com.google.aggregate.adtech.worker.AwsWorkerContinuousTestHelper.AWS_S3_BUCKET_REGION; import static com.google.aggregate.adtech.worker.AwsWorkerContinuousTestHelper.KOKORO_BUILD_ID; -import static com.google.aggregate.adtech.worker.AwsWorkerContinuousTestHelper.createJobRequest; import static com.google.aggregate.adtech.worker.AwsWorkerContinuousTestHelper.getOutputFileName; import static com.google.aggregate.adtech.worker.AwsWorkerContinuousTestHelper.readResultsFromS3; import static com.google.aggregate.adtech.worker.AwsWorkerContinuousTestHelper.submitJobAndWaitForResult; @@ -111,7 +110,7 @@ public void e2eDiffTest() throws Exception { String goldenLocation = "testdata/golden/2022_10_18/10k_diff_test.avro.golden"; CreateJobRequest createJobRequest = - createJobRequest( + AwsWorkerContinuousTestHelper.createJobRequestWithAttributionReportTo( getTestDataBucket(), inputKey, getTestDataBucket(), @@ -165,9 +164,7 @@ protected void configure() { .httpClient(UrlConnectionHttpClient.builder().build()) .build()); bind(S3AsyncClient.class) - .toInstance( - S3AsyncClient.builder() - .region(AWS_S3_BUCKET_REGION).build()); + .toInstance(S3AsyncClient.builder().region(AWS_S3_BUCKET_REGION).build()); bind(Boolean.class).annotatedWith(S3UsePartialRequests.class).toInstance(false); bind(Integer.class).annotatedWith(PartialRequestBufferSize.class).toInstance(20); } diff --git a/javatests/com/google/aggregate/adtech/worker/AwsWorkerContinuousInvalidCredentialsTest.java b/javatests/com/google/aggregate/adtech/worker/AwsWorkerContinuousInvalidCredentialsTest.java index c6396df6..b1744d02 100644 --- a/javatests/com/google/aggregate/adtech/worker/AwsWorkerContinuousInvalidCredentialsTest.java +++ b/javatests/com/google/aggregate/adtech/worker/AwsWorkerContinuousInvalidCredentialsTest.java @@ -18,7 +18,6 @@ import static com.google.aggregate.adtech.worker.AwsWorkerContinuousTestHelper.AWS_S3_BUCKET_REGION; import static com.google.aggregate.adtech.worker.AwsWorkerContinuousTestHelper.KOKORO_BUILD_ID; -import static com.google.aggregate.adtech.worker.AwsWorkerContinuousTestHelper.createJobRequest; import static com.google.aggregate.adtech.worker.AwsWorkerContinuousTestHelper.submitJob; import static com.google.common.truth.Truth.assertThat; import static com.google.scp.operator.protos.frontend.api.v1.ReturnCodeProto.ReturnCode.RETRIES_EXHAUSTED; @@ -82,7 +81,7 @@ public void e2ePerfTest() throws Exception { // TODO(b/228085828): Modify e2e tests to use output domain CreateJobRequest createJobRequest = - createJobRequest( + AwsWorkerContinuousTestHelper.createJobRequestWithAttributionReportTo( TESTING_BUCKET, INPUT_DATA_PATH, TESTING_BUCKET, diff --git a/javatests/com/google/aggregate/adtech/worker/AwsWorkerContinuousOutOfMemoryTest.java b/javatests/com/google/aggregate/adtech/worker/AwsWorkerContinuousOutOfMemoryTest.java index 639f495b..1cea1170 100644 --- a/javatests/com/google/aggregate/adtech/worker/AwsWorkerContinuousOutOfMemoryTest.java +++ b/javatests/com/google/aggregate/adtech/worker/AwsWorkerContinuousOutOfMemoryTest.java @@ -95,7 +95,7 @@ public void createJobE2ETest() throws Exception { "%s/%s/test-outputs/OOM_test_output.avro", TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); CreateJobRequest createJobRequest1 = - AwsWorkerContinuousTestHelper.createJobRequest( + AwsWorkerContinuousTestHelper.createJobRequestWithAttributionReportTo( getTestDataBucket(), inputKey, getTestDataBucket(), @@ -119,7 +119,7 @@ public void createJobE2ETest() throws Exception { "%s/%s/test-outputs/OOM_test_output.avro", TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); CreateJobRequest createJobRequest2 = - AwsWorkerContinuousTestHelper.createJobRequest( + AwsWorkerContinuousTestHelper.createJobRequestWithAttributionReportTo( getTestDataBucket(), inputKey, getTestDataBucket(), @@ -156,9 +156,7 @@ protected void configure() { .httpClient(UrlConnectionHttpClient.builder().build()) .build()); bind(S3AsyncClient.class) - .toInstance( - S3AsyncClient.builder() - .region(AWS_S3_BUCKET_REGION).build()); + .toInstance(S3AsyncClient.builder().region(AWS_S3_BUCKET_REGION).build()); bind(Boolean.class).annotatedWith(S3UsePartialRequests.class).toInstance(false); bind(Integer.class).annotatedWith(PartialRequestBufferSize.class).toInstance(20); } diff --git a/javatests/com/google/aggregate/adtech/worker/AwsWorkerContinuousPerfTest.java b/javatests/com/google/aggregate/adtech/worker/AwsWorkerContinuousPerfTest.java index b8dc6d3b..49d92ac7 100644 --- a/javatests/com/google/aggregate/adtech/worker/AwsWorkerContinuousPerfTest.java +++ b/javatests/com/google/aggregate/adtech/worker/AwsWorkerContinuousPerfTest.java @@ -18,7 +18,6 @@ import static com.google.aggregate.adtech.worker.AwsWorkerContinuousTestHelper.AWS_S3_BUCKET_REGION; import static com.google.aggregate.adtech.worker.AwsWorkerContinuousTestHelper.KOKORO_BUILD_ID; -import static com.google.aggregate.adtech.worker.AwsWorkerContinuousTestHelper.createJobRequest; import static com.google.aggregate.adtech.worker.AwsWorkerContinuousTestHelper.getAndWriteStopwatchesFromS3; import static com.google.aggregate.adtech.worker.AwsWorkerContinuousTestHelper.submitJobAndWaitForResult; import static com.google.common.truth.Truth.assertThat; @@ -48,10 +47,8 @@ @RunWith(JUnit4.class) public class AwsWorkerContinuousPerfTest { - @Rule - public final Acai acai = new Acai(TestEnv.class); - @Rule - public final TestName name = new TestName(); + @Rule public final Acai acai = new Acai(TestEnv.class); + @Rule public final TestName name = new TestName(); private static final Duration completionTimeout = Duration.of(60, ChronoUnit.MINUTES); private static final String TESTING_BUCKET = "aggregation-service-testing"; @@ -88,8 +85,7 @@ public class AwsWorkerContinuousPerfTest { private static final String OUTPUT_DOMAIN_PREFIX = "testdata/1m_staging_2022_08_08_sharded_domain/shard"; - @Inject - S3BlobStorageClient s3BlobStorageClient; + @Inject S3BlobStorageClient s3BlobStorageClient; @Test public void e2ePerfTest() throws Exception { @@ -104,7 +100,7 @@ public void e2ePerfTest() throws Exception { "e2e_test_outputs/%s/%s", KOKORO_BUILD_ID, "createJobE2EperfTest-reports1m.avro"); CreateJobRequest createJobRequest = - createJobRequest( + AwsWorkerContinuousTestHelper.createJobRequestWithAttributionReportTo( TESTING_BUCKET, INPUT_REPORTS_PREFIX, TESTING_BUCKET, @@ -138,9 +134,7 @@ protected void configure() { .httpClient(UrlConnectionHttpClient.builder().build()) .build()); bind(S3AsyncClient.class) - .toInstance( - S3AsyncClient.builder() - .region(AWS_S3_BUCKET_REGION).build()); + .toInstance(S3AsyncClient.builder().region(AWS_S3_BUCKET_REGION).build()); bind(Boolean.class).annotatedWith(S3UsePartialRequests.class).toInstance(false); bind(Integer.class).annotatedWith(PartialRequestBufferSize.class).toInstance(20); } diff --git a/javatests/com/google/aggregate/adtech/worker/AwsWorkerContinuousSmokeTest.java b/javatests/com/google/aggregate/adtech/worker/AwsWorkerContinuousSmokeTest.java index 9c976019..ef114c29 100644 --- a/javatests/com/google/aggregate/adtech/worker/AwsWorkerContinuousSmokeTest.java +++ b/javatests/com/google/aggregate/adtech/worker/AwsWorkerContinuousSmokeTest.java @@ -34,15 +34,20 @@ import com.google.aggregate.adtech.worker.testing.AvroResultsFileReader; import com.google.aggregate.protocol.avro.AvroDebugResultsReaderFactory; import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import com.google.common.primitives.UnsignedLong; +import com.google.errorprone.annotations.Var; import com.google.inject.AbstractModule; import com.google.inject.Inject; import com.google.scp.operator.cpio.blobstorageclient.aws.S3BlobStorageClient; import com.google.scp.operator.cpio.blobstorageclient.aws.S3BlobStorageClientModule.PartialRequestBufferSize; import com.google.scp.operator.cpio.blobstorageclient.aws.S3BlobStorageClientModule.S3UsePartialRequests; import com.google.scp.operator.protos.frontend.api.v1.CreateJobRequestProto.CreateJobRequest; +import java.io.IOException; import java.time.Duration; import java.time.temporal.ChronoUnit; import java.util.Optional; +import java.util.Set; import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -59,55 +64,17 @@ * Integration test which runs against an AWS deployment and verifies that a job accessing an * existing encrypted payload can be processed by the system and produce an output avro file. * - *

The expected input files are of 3 types as detailed below and are located at the S3 prefix - * "s3://aggregation-service-testing/$KOKORO_BUILD_ID/test-inputs". Currently, there are multiple - * tests that rely on reports with version 0.1 and hence we generate multiple input report files for - * version 0.1. This is necessary because everytime a report file is used, the privacy budget - * associated with it is consumed and it cannot be reused. Similar technique can be followed to - * generate multiple report files in other versions as per need. Please see shared_e2e.sh for - * details: - * - *

    - *
  • Multiple report input files with version 0.1 of reports. Each file follows the naming - * format 10k_test_input_${number}.avro. Each test below that relies on version 0.1 reports is - * supposed to use one of the above input files each - *
  • One report input file with version 0.1 of reports and debug mode enabled on generated - * reports. The file follows the naming format 10k_attribution_report_test_input_debug.avro - *
- * - * The expected domain files are of 3 types as detailed below and are located at the S3 prefix - * "s3://aggregation-service-testing/$KOKORO_BUILD_ID/test-inputs": - * - *
    - *
  • Multiple domain files for version 0.1 of reports. Each file follows the naming format - * 10k_test_domain_${number}.avro. Each test below that relies on version 0.1 reports is - * supposed to use one of the above domain files each that matches the report input. - *
  • One domain file for reports in version 0.1 with debug mode enabled on the reports. The file - * follows the naming format 10k_test_domain_debug.avro - *
- * - * The expected distribution file used can be found at - * "s3://aggregation-service-testing/testdata/1m_staging_1_integer_buckets.txt" - * - *

The resulting output files are: - * - *

    - *
  • "s3://aggregation-service-testing/$KOKORO_BUILD_ID/test-outputs/10k_test_output_${number}.avro" - *
  • "s3://aggregation-service-testing/$KOKORO_BUILD_ID/test-outputs/10k_test_output_debug_nodebug.avro" - *
  • "s3://aggregation-service-testing/$KOKORO_BUILD_ID/test-outputs/10k_test_output_nodebug_nodebug.avro" - *
  • "s3://aggregation-service-testing/$KOKORO_BUILD_ID/test-outputs/10k_test_output_debug_debug.avro" - *
- * - * See the definition of continuous_smoke_test() in //kokoro/gcp_ubuntu/shared_e2e.sh for how these - * are generated. + *

The expected input files are located at the S3 prefix + * "s3://aggregation-service-testing/$KOKORO_BUILD_ID/test-inputs". We generate multiple input + * report files because everytime a report file is used, the privacy budget associated with it is + * consumed, and it cannot be reused. Please see the definition of continuous_smoke_test() in + * //kokoro/gcp_ubuntu/shared_e2e.sh for how these are generated. */ @RunWith(JUnit4.class) public class AwsWorkerContinuousSmokeTest { - @Rule - public final Acai acai = new Acai(TestEnv.class); - @Rule - public final TestName name = new TestName(); + @Rule public final Acai acai = new Acai(TestEnv.class); + @Rule public final TestName name = new TestName(); private static final Duration COMPLETION_TIMEOUT = Duration.of(10, ChronoUnit.MINUTES); @@ -119,12 +86,9 @@ public class AwsWorkerContinuousSmokeTest { private static final Logger logger = LoggerFactory.getLogger(AwsWorkerContinuousSmokeTest.class); - @Inject - S3BlobStorageClient s3BlobStorageClient; - @Inject - AvroResultsFileReader avroResultsFileReader; - @Inject - private AvroDebugResultsReaderFactory readerFactory; + @Inject S3BlobStorageClient s3BlobStorageClient; + @Inject AvroResultsFileReader avroResultsFileReader; + @Inject private AvroDebugResultsReaderFactory readerFactory; @Before public void checkBuildEnv() { @@ -149,10 +113,10 @@ private static boolean runMultiOutputShardTest() { } /* - Starts with a createJob request to API gateway with the test inputs pre-uploaded in s3 - bucket. Ends by calling getJob API to retrieve result information. Assertions are made on - result status (SUCCESS) and result avro (not empty) which sits in the testing bucket. - */ + Starts with a createJob request to API gateway with the test inputs pre-uploaded in s3 + bucket. Ends by calling getJob API to retrieve result information. Assertions are made on + result status (SUCCESS) and result avro (not empty) which sits in the testing bucket. + */ @Test public void createJobE2ETest() throws Exception { var inputKey = @@ -166,7 +130,7 @@ public void createJobE2ETest() throws Exception { "%s/%s/test-outputs/10k_test_output.avro", TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); CreateJobRequest createJobRequest = - AwsWorkerContinuousTestHelper.createJobRequest( + AwsWorkerContinuousTestHelper.createJobRequestWithAttributionReportTo( getTestDataBucket(), inputKey, getTestDataBucket(), @@ -174,12 +138,7 @@ public void createJobE2ETest() throws Exception { /* jobId= */ getClass().getSimpleName() + "::" + name.getMethodName(), /* outputDomainBucketName= */ Optional.of(getTestDataBucket()), /* outputDomainPrefix= */ Optional.of(domainKey)); - JsonNode result = submitJobAndWaitForResult(createJobRequest, COMPLETION_TIMEOUT); - - assertThat(result.get("result_info").get("return_code").asText()) - .isEqualTo(AggregationWorkerReturnCode.SUCCESS.name()); - assertThat(result.get("result_info").get("error_summary").get("error_counts").isEmpty()) - .isTrue(); + assertResponseForCode(createJobRequest, AggregationWorkerReturnCode.SUCCESS); // Read output avro from s3. ImmutableList aggregatedFacts = @@ -195,30 +154,30 @@ public void createJobE2ETest() throws Exception { } /** - * This test includes sending a non-debug job and aggregatable reports with debug mode enabled. + * This test includes sending a job with reporting site only. Verifies that jobs with only + * reporting site are successful. */ @Test - public void createNotDebugJobE2EReportDebugEnabledTest() throws Exception { + public void createJobE2ETestWithReportingSite() throws Exception { var inputKey = String.format( - "%s/%s/test-inputs/10k_attribution_report_test_input_debug.avro", + "%s/%s/test-inputs/10k_test_input_reporting_site.avro", TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); var domainKey = String.format( - "%s/%s/test-inputs/10k_attribution_report_test_domain_debug.avro", + "%s/%s/test-inputs/10k_test_domain_reporting_site.avro", TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); var outputKey = String.format( - "%s/%s/test-outputs/10k_attribution_report_test_output_notDebugJob_debugEnabled.avro", + "%s/%s/test-outputs/10k_test_output_reporting_site.avro", TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); CreateJobRequest createJobRequest = - AwsWorkerContinuousTestHelper.createJobRequest( + AwsWorkerContinuousTestHelper.createJobRequestWithReportingSite( getTestDataBucket(), inputKey, getTestDataBucket(), outputKey, - /* debugRun= */ false, /* jobId= */ getClass().getSimpleName() + "::" + name.getMethodName(), /* outputDomainBucketName= */ Optional.of(getTestDataBucket()), /* outputDomainPrefix= */ Optional.of(domainKey)); @@ -237,42 +196,32 @@ public void createNotDebugJobE2EReportDebugEnabledTest() throws Exception { getTestDataBucket(), getOutputFileName(outputKey)); - // If the domainOptional is true, the aggregatedFact keys would be more than domain keys - // Otherwise, aggregatedFact keys would be equal to domain keys - // The "isAtLeast" assert is set here to accommodate both conditions. - assertThat(aggregatedFacts.size()).isAtLeast(DEBUG_DOMAIN_KEY_SIZE); - // The debug file shouldn't exist because it's not debug run - assertThat( - AwsWorkerContinuousTestHelper.checkS3FileExists( - s3BlobStorageClient, getTestDataBucket(), getDebugFilePrefix(outputKey))) - .isFalse(); + assertThat(aggregatedFacts.size()).isGreaterThan(10); } /** - * This test includes sending a debug job and aggregatable reports with debug mode enabled. + * This test includes sending a job with reports from multiple reporting origins belonging to the + * same reporting site. Verifies that all the reports are processed successfully. */ @Test - public void createDebugJobE2EReportDebugModeEnabledTest() throws Exception { + public void createJobE2ETestWithMultipleReportingOrigins() throws Exception { var inputKey = - String.format( - "%s/%s/test-inputs/10k_attribution_report_test_input_debug_enabled_nondebug_run.avro", - TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); + String.format("%s/%s/test-inputs/same-site/", TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); var domainKey = String.format( - "%s/%s/test-inputs/10k_attribution_report_test_domain_debug_enabled_nondebug_run.avro", + "%s/%s/test-inputs/10k_test_domain_multiple_origins_same_site.avro", TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); var outputKey = String.format( - "%s/%s/test-outputs/10k_attribution_report_test_output_DebugJob_debugEnabled.avro", + "%s/%s/test-outputs/10k_test_output_multiple_origins_same_site.avro", TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); CreateJobRequest createJobRequest = - AwsWorkerContinuousTestHelper.createJobRequest( + AwsWorkerContinuousTestHelper.createJobRequestWithReportingSite( getTestDataBucket(), inputKey, getTestDataBucket(), outputKey, - /* debugRun= */ true, /* jobId= */ getClass().getSimpleName() + "::" + name.getMethodName(), /* outputDomainBucketName= */ Optional.of(getTestDataBucket()), /* outputDomainPrefix= */ Optional.of(domainKey)); @@ -291,46 +240,34 @@ public void createDebugJobE2EReportDebugModeEnabledTest() throws Exception { getTestDataBucket(), getOutputFileName(outputKey)); - // The "isAtLeast" assert is set here to accommodate domainOptional(True/False) conditions. - assertThat(aggregatedFacts.size()).isAtLeast(DEBUG_DOMAIN_KEY_SIZE); - - // Read debug results avro from s3. - ImmutableList aggregatedDebugFacts = - readDebugResultsFromS3( - s3BlobStorageClient, - readerFactory, - getTestDataBucket(), - getOutputFileName(getDebugFilePrefix(outputKey))); - - // Debug facts count should be greater than or equal to the summary facts count because some - // keys are filtered out due to thresholding or not in domain. - assertThat(aggregatedDebugFacts.size()).isAtLeast(DEBUG_DOMAIN_KEY_SIZE); + assertThat(aggregatedFacts.size()).isGreaterThan(10); } /** - * This test includes sending a debug job and aggregatable reports with debug mode disabled. Uses - * the same data as the normal e2e test. + * This test includes sending a job with reports from multiple reporting origins belonging to + * different reporting sites. It is expected that the 5k reports with a different reporting site + * will fail and come up in the error counts. */ @Test - public void createDebugJobE2EReportDebugModeDisabledTest() throws Exception { + public void createJobE2ETestWithSomeReportsHavingDifferentReportingOrigins() throws Exception { var inputKey = String.format( - "%s/%s/test-inputs/10k_test_input_2.avro", TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); + "%s/%s/test-inputs/different-site/", TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); var domainKey = String.format( - "%s/%s/test-inputs/10k_test_domain_2.avro", TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); + "%s/%s/test-inputs/10k_test_domain_multiple_origins_different_site.avro", + TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); var outputKey = String.format( - "%s/%s/test-outputs/10k_test_output_DebugJob_debugDisabled.avro", + "%s/%s/test-outputs/10k_test_output_multiple_origins_different_site.avro", TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); CreateJobRequest createJobRequest = - AwsWorkerContinuousTestHelper.createJobRequest( + AwsWorkerContinuousTestHelper.createJobRequestWithReportingSite( getTestDataBucket(), inputKey, getTestDataBucket(), outputKey, - /* debugRun= */ true, /* jobId= */ getClass().getSimpleName() + "::" + name.getMethodName(), /* outputDomainBucketName= */ Optional.of(getTestDataBucket()), /* outputDomainPrefix= */ Optional.of(domainKey)); @@ -339,32 +276,111 @@ public void createDebugJobE2EReportDebugModeDisabledTest() throws Exception { assertThat(result.get("result_info").get("return_code").asText()) .isEqualTo(AggregationWorkerReturnCode.SUCCESS_WITH_ERRORS.name()); assertThat( - result - .get("result_info") - .get("error_summary") - .get("error_counts") - .get(0) - .get("count") - .asInt()) - .isEqualTo(10000); + result + .get("result_info") + .get("error_summary") + .get("error_counts") + .get(0) + .get("count") + .asInt()) + .isEqualTo(5000); assertThat( - result - .get("result_info") - .get("error_summary") - .get("error_counts") - .get(0) - .get("category") - .asText()) - .isEqualTo(ErrorCounter.DEBUG_NOT_ENABLED.name()); + result + .get("result_info") + .get("error_summary") + .get("error_counts") + .get(0) + .get("category") + .asText()) + .isEqualTo(ErrorCounter.REPORTING_SITE_MISMATCH.name()); + + // Read output avro from s3. + ImmutableList aggregatedFacts = + readResultsFromS3( + s3BlobStorageClient, + avroResultsFileReader, + getTestDataBucket(), + getOutputFileName(outputKey)); + + assertThat(aggregatedFacts.size()).isGreaterThan(10); + } + + /** + * This test includes sending a non-debug job and aggregatable reports with debug mode enabled. + */ + @Test + public void createNotDebugJobE2EReportDebugEnabledTest() throws Exception { + var inputKey = + String.format( + "%s/%s/test-inputs/10k_attribution_report_test_input_debug.avro", + TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); + var domainKey = + String.format( + "%s/%s/test-inputs/10k_attribution_report_test_domain_debug.avro", + TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); + var outputKey = + String.format( + "%s/%s/test-outputs/10k_attribution_report_test_output_notDebugJob_debugEnabled.avro", + TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); + + CreateJobRequest createJobRequest = + AwsWorkerContinuousTestHelper.createJobRequestWithAttributionReportTo( + getTestDataBucket(), + inputKey, + getTestDataBucket(), + outputKey, + /* debugRun= */ false, + /* jobId= */ getClass().getSimpleName() + "::" + name.getMethodName(), + /* outputDomainBucketName= */ Optional.of(getTestDataBucket()), + /* outputDomainPrefix= */ Optional.of(domainKey)); + assertResponseForCode(createJobRequest, AggregationWorkerReturnCode.SUCCESS); + + // Read output avro from s3. + ImmutableList aggregatedFacts = + readResultsFromS3( + s3BlobStorageClient, + avroResultsFileReader, + getTestDataBucket(), + getOutputFileName(outputKey)); + + // If the domainOptional is true, the aggregatedFact keys would be more than domain keys + // Otherwise, aggregatedFact keys would be equal to domain keys + // The "isAtLeast" assert is set here to accommodate both conditions. + assertThat(aggregatedFacts.size()).isAtLeast(DEBUG_DOMAIN_KEY_SIZE); + // The debug file shouldn't exist because it's not debug run assertThat( - result - .get("result_info") - .get("error_summary") - .get("error_counts") - .get(0) - .get("description") - .asText()) - .isEqualTo(ErrorCounter.DEBUG_NOT_ENABLED.getDescription()); + AwsWorkerContinuousTestHelper.checkS3FileExists( + s3BlobStorageClient, getTestDataBucket(), getDebugFilePrefix(outputKey))) + .isFalse(); + } + + /** This test includes sending a debug job and aggregatable reports with debug mode enabled. */ + @Test + public void createDebugJobE2EReportDebugModeEnabledTest() throws Exception { + var inputKey = + String.format( + "%s/%s/test-inputs/10k_attribution_report_test_input_debug_enabled_nondebug_run.avro", + TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); + var domainKey = + String.format( + "%s/%s/test-inputs/10k_attribution_report_test_domain_debug_enabled_nondebug_run.avro", + TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); + var outputKey = + String.format( + "%s/%s/test-outputs/10k_attribution_report_test_output_DebugJob_debugEnabled.avro", + TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); + + CreateJobRequest createJobRequest = + AwsWorkerContinuousTestHelper.createJobRequestWithAttributionReportTo( + getTestDataBucket(), + inputKey, + getTestDataBucket(), + outputKey, + /* debugRun= */ true, + /* jobId= */ getClass().getSimpleName() + "::" + name.getMethodName(), + /* outputDomainBucketName= */ Optional.of(getTestDataBucket()), + /* outputDomainPrefix= */ Optional.of(domainKey)); + assertResponseForCode(createJobRequest, AggregationWorkerReturnCode.SUCCESS); // Read output avro from s3. ImmutableList aggregatedFacts = @@ -374,9 +390,10 @@ public void createDebugJobE2EReportDebugModeDisabledTest() throws Exception { getTestDataBucket(), getOutputFileName(outputKey)); - assertThat(aggregatedFacts.size()).isEqualTo(DEBUG_DOMAIN_KEY_SIZE); + // The "isAtLeast" assert is set here to accommodate domainOptional(True/False) conditions. + assertThat(aggregatedFacts.size()).isAtLeast(DEBUG_DOMAIN_KEY_SIZE); - // Read debug result from s3. + // Read debug results avro from s3. ImmutableList aggregatedDebugFacts = readDebugResultsFromS3( s3BlobStorageClient, @@ -384,12 +401,9 @@ public void createDebugJobE2EReportDebugModeDisabledTest() throws Exception { getTestDataBucket(), getOutputFileName(getDebugFilePrefix(outputKey))); - // Only contains keys in domain because all reports are filtered out. - assertThat(aggregatedDebugFacts.size()).isEqualTo(DEBUG_DOMAIN_KEY_SIZE); - // The unnoisedMetric of aggregatedDebugFacts should be 0 for all keys because - // all reports are filtered out. - // Noised metric in both debug reports and summary reports should be noise value instead of 0. - aggregatedDebugFacts.forEach(fact -> assertThat(fact.unnoisedMetric().get()).isEqualTo(0)); + // Debug facts count should be greater than or equal to the summary facts count because some + // keys are filtered out due to thresholding or not in domain. + assertThat(aggregatedDebugFacts.size()).isAtLeast(DEBUG_DOMAIN_KEY_SIZE); } @Test @@ -407,7 +421,7 @@ public void aggregate_withDebugReportsInNonDebugMode_errorsExceedsThreshold_quit TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); CreateJobRequest createJobRequest = - AwsWorkerContinuousTestHelper.createJobRequest( + AwsWorkerContinuousTestHelper.createJobRequestWithAttributionReportTo( getTestDataBucket(), inputKey, getTestDataBucket(), @@ -416,44 +430,92 @@ public void aggregate_withDebugReportsInNonDebugMode_errorsExceedsThreshold_quit /* jobId= */ getClass().getSimpleName() + "::" + name.getMethodName(), /* outputDomainBucketName= */ Optional.of(getTestDataBucket()), /* outputDomainPrefix= */ Optional.of(domainKey), - /* reportErrorThresholdPercentage= */ 10); + /* reportErrorThresholdPercentage= */ 10, + /* inputReportCount= */ Optional.of(10000L)); JsonNode result = submitJobAndWaitForResult(createJobRequest, COMPLETION_TIMEOUT); assertThat(result.get("result_info").get("return_code").asText()) .isEqualTo(AggregationWorkerReturnCode.REPORTS_WITH_ERRORS_EXCEEDED_THRESHOLD.name()); - assertThat( + // Due to parallel aggregation, the processing may stop a little over the threshold. + // So, asserting below that the processing stopped somewhere above the threshold but before all + // the 10K reports are processed. + int erroringReportCount = result .get("result_info") .get("error_summary") .get("error_counts") .get(0) .get("count") - .asInt()) - .isAtLeast(1000); + .asInt(); + assertThat(erroringReportCount).isAtLeast(1000); + assertThat(erroringReportCount).isLessThan(10000); assertThat( - result - .get("result_info") - .get("error_summary") - .get("error_counts") - .get(0) - .get("category") - .asText()) + result + .get("result_info") + .get("error_summary") + .get("error_counts") + .get(0) + .get("category") + .asText()) .isEqualTo(ErrorCounter.DEBUG_NOT_ENABLED.name()); assertThat( - result - .get("result_info") - .get("error_summary") - .get("error_counts") - .get(0) - .get("description") - .asText()) + result + .get("result_info") + .get("error_summary") + .get("error_counts") + .get(0) + .get("description") + .asText()) .isEqualTo(ErrorCounter.DEBUG_NOT_ENABLED.getDescription()); assertThat( - AwsWorkerContinuousTestHelper.checkS3FileExists( - s3BlobStorageClient, getTestDataBucket(), outputKey)) + AwsWorkerContinuousTestHelper.checkS3FileExists( + s3BlobStorageClient, getTestDataBucket(), outputKey)) .isFalse(); } + /** + * End to end test for the Aggregate Reporting Debug API. 10k attribution-reporting-debug + * type reports are provided for aggregation. + */ + @Test + public void createJobE2EAggregateReportingDebugTest() throws Exception { + var inputKey = + String.format( + "%s/%s/test-inputs/10k_test_input_attribution_debug.avro", + TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); + var domainKey = + String.format( + "%s/%s/test-inputs/10k_test_domain_attribution_debug.avro", + TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); + var outputKey = + String.format( + "%s/%s/test-outputs/10k_test_output_attribution_debug.avro", + TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); + + CreateJobRequest createJobRequest = + AwsWorkerContinuousTestHelper.createJobRequestWithAttributionReportTo( + getTestDataBucket(), + inputKey, + getTestDataBucket(), + outputKey, + /* jobId= */ getClass().getSimpleName() + "::" + name.getMethodName(), + /* outputDomainBucketName= */ Optional.of(getTestDataBucket()), + /* outputDomainPrefix= */ Optional.of(domainKey)); + JsonNode result = submitJobAndWaitForResult(createJobRequest, COMPLETION_TIMEOUT); + ImmutableList aggregatedFacts = + readResultsFromS3( + s3BlobStorageClient, + avroResultsFileReader, + getTestDataBucket(), + getOutputFileName(outputKey)); + + assertThat(result.get("result_info").get("return_code").asText()) + .isEqualTo(AggregationWorkerReturnCode.SUCCESS.name()); + assertThat(result.get("result_info").get("error_summary").get("error_counts").isEmpty()) + .isTrue(); + assertThat(aggregatedFacts.size()).isAtLeast(DEBUG_DOMAIN_KEY_SIZE); + } + @Test public void createJobE2ETestPrivacyBudgetExhausted() throws Exception { // End to end testing: @@ -474,7 +536,7 @@ public void createJobE2ETestPrivacyBudgetExhausted() throws Exception { "%s/%s/test-outputs/10k_test_output.avro", TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); CreateJobRequest createJobRequest1 = - AwsWorkerContinuousTestHelper.createJobRequest( + AwsWorkerContinuousTestHelper.createJobRequestWithAttributionReportTo( getTestDataBucket(), inputKey, getTestDataBucket(), @@ -482,12 +544,7 @@ public void createJobE2ETestPrivacyBudgetExhausted() throws Exception { /* jobId= */ getClass().getSimpleName() + "::" + name.getMethodName() + "_request_1", /* outputDomainBucketName= */ Optional.of(getTestDataBucket()), /* outputDomainPrefix= */ Optional.of(domainKey)); - JsonNode result = submitJobAndWaitForResult(createJobRequest1, COMPLETION_TIMEOUT); - - assertThat(result.get("result_info").get("return_code").asText()) - .isEqualTo(AggregationWorkerReturnCode.SUCCESS.name()); - assertThat(result.get("result_info").get("error_summary").get("error_counts").isEmpty()) - .isTrue(); + assertResponseForCode(createJobRequest1, AggregationWorkerReturnCode.SUCCESS); CreateJobRequest createJobRequest2 = createJobRequest1.toBuilder() @@ -495,7 +552,7 @@ public void createJobE2ETestPrivacyBudgetExhausted() throws Exception { getClass().getSimpleName() + "::" + name.getMethodName() + "_request_2") .build(); - result = submitJobAndWaitForResult(createJobRequest2, COMPLETION_TIMEOUT); + JsonNode result = submitJobAndWaitForResult(createJobRequest2, COMPLETION_TIMEOUT); assertThat(result.get("result_info").get("return_code").asText()) .isEqualTo(PRIVACY_BUDGET_EXHAUSTED.name()); @@ -530,7 +587,7 @@ public void createJobE2EFledgeTest() throws Exception { TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); CreateJobRequest createJobRequest = - AwsWorkerContinuousTestHelper.createJobRequest( + AwsWorkerContinuousTestHelper.createJobRequestWithAttributionReportTo( getTestDataBucket(), inputKey, getTestDataBucket(), @@ -538,12 +595,7 @@ public void createJobE2EFledgeTest() throws Exception { /* jobId= */ getClass().getSimpleName() + "::" + name.getMethodName(), /* outputDomainBucketName= */ Optional.of(getTestDataBucket()), /* outputDomainPrefix= */ Optional.of(domainKey)); - JsonNode result = submitJobAndWaitForResult(createJobRequest, COMPLETION_TIMEOUT); - - assertThat(result.get("result_info").get("return_code").asText()) - .isEqualTo(AggregationWorkerReturnCode.SUCCESS.name()); - assertThat(result.get("result_info").get("error_summary").get("error_counts").isEmpty()) - .isTrue(); + assertResponseForCode(createJobRequest, AggregationWorkerReturnCode.SUCCESS); // Read output avro from s3. ImmutableList aggregatedFacts = @@ -584,7 +636,7 @@ public void createJobE2ESharedStorageTest() throws Exception { TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); CreateJobRequest createJobRequest = - AwsWorkerContinuousTestHelper.createJobRequest( + AwsWorkerContinuousTestHelper.createJobRequestWithAttributionReportTo( getTestDataBucket(), inputKey, getTestDataBucket(), @@ -592,12 +644,7 @@ public void createJobE2ESharedStorageTest() throws Exception { /* jobId= */ getClass().getSimpleName() + "::" + name.getMethodName(), /* outputDomainBucketName= */ Optional.of(getTestDataBucket()), /* outputDomainPrefix= */ Optional.of(domainKey)); - JsonNode result = submitJobAndWaitForResult(createJobRequest, COMPLETION_TIMEOUT); - - assertThat(result.get("result_info").get("return_code").asText()) - .isEqualTo(AggregationWorkerReturnCode.SUCCESS.name()); - assertThat(result.get("result_info").get("error_summary").get("error_counts").isEmpty()) - .isTrue(); + assertResponseForCode(createJobRequest, AggregationWorkerReturnCode.SUCCESS); // Read output avro from s3. ImmutableList aggregatedFacts = @@ -627,7 +674,7 @@ public void createDebugJobE2ETestPrivacyBudgetExhausted() throws Exception { TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); CreateJobRequest createJobRequest1 = - AwsWorkerContinuousTestHelper.createJobRequest( + AwsWorkerContinuousTestHelper.createJobRequestWithAttributionReportTo( getTestDataBucket(), inputKey, getTestDataBucket(), @@ -636,19 +683,14 @@ public void createDebugJobE2ETestPrivacyBudgetExhausted() throws Exception { /* jobId= */ getClass().getSimpleName() + "::" + name.getMethodName() + "_request_1", /* outputDomainBucketName= */ Optional.of(getTestDataBucket()), /* outputDomainPrefix= */ Optional.of(domainKey)); - JsonNode result = submitJobAndWaitForResult(createJobRequest1, COMPLETION_TIMEOUT); - - assertThat(result.get("result_info").get("return_code").asText()) - .isEqualTo(DEBUG_SUCCESS_WITH_PRIVACY_BUDGET_EXHAUSTED.name()); - assertThat(result.get("result_info").get("error_summary").get("error_counts").isEmpty()) - .isTrue(); + assertResponseForCode(createJobRequest1, DEBUG_SUCCESS_WITH_PRIVACY_BUDGET_EXHAUSTED); CreateJobRequest createJobRequest2 = createJobRequest1.toBuilder() .setJobRequestId( getClass().getSimpleName() + "::" + name.getMethodName() + "_request_2") .build(); - result = submitJobAndWaitForResult(createJobRequest2, COMPLETION_TIMEOUT); + JsonNode result = submitJobAndWaitForResult(createJobRequest2, COMPLETION_TIMEOUT); assertThat(result.get("result_info").get("return_code").asText()) .isEqualTo(DEBUG_SUCCESS_WITH_PRIVACY_BUDGET_EXHAUSTED.name()); @@ -657,11 +699,11 @@ public void createDebugJobE2ETestPrivacyBudgetExhausted() throws Exception { } /* - Starts with a createJob request to API gateway with the test inputs pre-uploaded to s3 - bucket. Ends by calling getJob API to retrieve result information. Assertions are made on - result status (SUCCESS) and result avro (not empty) which sits in the testing bucket. - The output files must be sharded into 3 separate files. - */ + Starts with a createJob request to API gateway with the test inputs pre-uploaded to s3 + bucket. Ends by calling getJob API to retrieve result information. Assertions are made on + result status (SUCCESS) and result avro (not empty) which sits in the testing bucket. + The output files must be sharded into 3 separate files. + */ @Test public void createJobE2ETestWithMultiOutputShard() throws Exception { // Skip this test for the case where build parameter cannot be set (ex> release image) @@ -681,7 +723,7 @@ public void createJobE2ETestWithMultiOutputShard() throws Exception { "%s/%s/test-outputs/30k_test_output.avro", TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); CreateJobRequest createJobRequest = - AwsWorkerContinuousTestHelper.createJobRequest( + AwsWorkerContinuousTestHelper.createJobRequestWithAttributionReportTo( getTestDataBucket(), inputKey, getTestDataBucket(), @@ -689,12 +731,7 @@ public void createJobE2ETestWithMultiOutputShard() throws Exception { /* jobId= */ getClass().getSimpleName() + "::" + name.getMethodName(), /* outputDomainBucketName= */ Optional.of(getTestDataBucket()), /* outputDomainPrefix= */ Optional.of(domainKey)); - JsonNode result = submitJobAndWaitForResult(createJobRequest, COMPLETION_TIMEOUT); - - assertThat(result.get("result_info").get("return_code").asText()) - .isEqualTo(AggregationWorkerReturnCode.SUCCESS.name()); - assertThat(result.get("result_info").get("error_summary").get("error_counts").isEmpty()) - .isTrue(); + assertResponseForCode(createJobRequest, AggregationWorkerReturnCode.SUCCESS); // Read output avro from s3. ImmutableList aggregatedFactsInShard1 = @@ -714,6 +751,151 @@ public void createJobE2ETestWithMultiOutputShard() throws Exception { assertThat(aggregatedFactsInShard2.size()).isGreaterThan(14000); } + /** + * End to end test for aggregating 10k reports with invalid key. Tests if the exception caching + * works. This test would fail without exception caching due to timeout. + */ + @Test + public void createJobE2ETestWithInvalidReports() throws Exception { + var inputKey = + String.format( + "%s/%s/test-inputs/10k_test_input_invalid_key.avro", + TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); + var domainKey = + String.format( + "%s/%s/test-inputs/10k_test_domain_invalid_key.avro", + TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); + var outputKey = + String.format( + "%s/%s/test-outputs/10k_test_output_invalid_key.avro", + TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); + + CreateJobRequest createJobRequest = + AwsWorkerContinuousTestHelper.createJobRequestWithAttributionReportTo( + getTestDataBucket(), + inputKey, + getTestDataBucket(), + outputKey, + /* jobId= */ getClass().getSimpleName() + "::" + name.getMethodName(), + /* outputDomainBucketName= */ Optional.of(getTestDataBucket()), + /* outputDomainPrefix= */ Optional.of(domainKey)); + JsonNode result = submitJobAndWaitForResult(createJobRequest, COMPLETION_TIMEOUT); + + // The job should be completed before the completion timeout. + assertThat(result.get("job_status").asText()).isEqualTo("FINISHED"); + assertThat(result.get("result_info").get("return_code").asText()) + .isEqualTo(AggregationWorkerReturnCode.REPORTS_WITH_ERRORS_EXCEEDED_THRESHOLD.name()); + } + + @Test + public void createJob_withFilteringId() throws Exception { + // This tests depends on the continued usage of CONSTANT_NOISING when building the worker image. + // The Constant Noising adds 0 noise enabling the testing of the contribution filtering. + + // The source data from which the input reports are generated has 50k reports with 50k unique + // contribution ids. These are divided equally among 5 ids [0, 5, 65536, 4294967296, 18446744073709551615]. + // Filtering on any one of these ids should have all except 10k contribution keys filtered out. + + String inputKey = + String.format( + "%s/%s/test-inputs/50k_test_input_filtering_ids.avro", + TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); + String domainKey = + String.format( + "%s/%s/test-inputs/50k_test_domain_filtering_ids.avro", + TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); + String outputKeyPrefix = + String.format( + "%s/%s/test-outputs/50k_test_output_filtering_ids", + TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); + String outputKey = outputKeyPrefix + ".avro"; + + @Var Set filteringIds = ImmutableSet.of(); + @Var + CreateJobRequest createJobRequest = + AwsWorkerContinuousTestHelper.createJobRequestWithAttributionReportTo( + getTestDataBucket(), + inputKey, + getTestDataBucket(), + outputKey, + /* debugRun= */ false, + /* jobId= */ getClass().getSimpleName() + "::" + name.getMethodName() + "::1", + /* outputDomainBucketName= */ Optional.of(getTestDataBucket()), + /* outputDomainPrefix= */ Optional.of(domainKey), + 2, + Optional.of(50000L), + filteringIds); + assertResponseForCode(createJobRequest, AggregationWorkerReturnCode.SUCCESS); + @Var + ImmutableList aggregatedFacts = + AwsWorkerContinuousTestHelper.readResultsFromMultipleFiles( + s3BlobStorageClient, avroResultsFileReader, getTestDataBucket(), outputKeyPrefix); + // assert that aggregated facts count is at least equal to number of domain keys + assertThat(aggregatedFacts.size()).isAtLeast(50000); + // Filtering Id = 0 filters out all contributions except 10000 keys. + assertThat( + aggregatedFacts.stream() + .filter(aggregatedFact -> aggregatedFact.getMetric() > 0) + .count()) + .isAtLeast(10000); + + filteringIds = + ImmutableSet.of(UnsignedLong.valueOf("18446744073709551615"), UnsignedLong.valueOf(65536)); + createJobRequest = + AwsWorkerContinuousTestHelper.createJobRequestWithAttributionReportTo( + getTestDataBucket(), + inputKey, + getTestDataBucket(), + outputKey, + /* debugRun= */ false, + /* jobId= */ getClass().getSimpleName() + "::" + name.getMethodName() + "::2", + /* outputDomainBucketName= */ Optional.of(getTestDataBucket()), + /* outputDomainPrefix= */ Optional.of(domainKey), + 2, + Optional.of(50000L), + filteringIds); + // Privacy Budget is not exhausted for the same data because different filtering Ids are used. + assertResponseForCode(createJobRequest, AggregationWorkerReturnCode.SUCCESS); + aggregatedFacts = + AwsWorkerContinuousTestHelper.readResultsFromMultipleFiles( + s3BlobStorageClient, avroResultsFileReader, getTestDataBucket(), outputKeyPrefix); + // assert that aggregated facts count is at least equal to number of domain keys + assertThat(aggregatedFacts.size()).isAtLeast(50000); + // Filtering Id = 65536 & 18446744073709551615 filters out all contributions except 20000 keys. + assertThat( + aggregatedFacts.stream() + .filter(aggregatedFact -> aggregatedFact.getMetric() > 0) + .count()) + .isAtLeast(20000); + + filteringIds = ImmutableSet.of(UnsignedLong.valueOf(5), UnsignedLong.ZERO); + createJobRequest = + AwsWorkerContinuousTestHelper.createJobRequestWithAttributionReportTo( + getTestDataBucket(), + inputKey, + getTestDataBucket(), + outputKey, + /* debugRun= */ false, + /* jobId= */ getClass().getSimpleName() + "::" + name.getMethodName() + "::3", + /* outputDomainBucketName= */ Optional.of(getTestDataBucket()), + /* outputDomainPrefix= */ Optional.of(domainKey), + 2, + Optional.of(50000L), + filteringIds); + // Privacy Budget is exhausted for the same data and the same filtering ids. + assertResponseForCode(createJobRequest, PRIVACY_BUDGET_EXHAUSTED); + } + + private static void assertResponseForCode( + CreateJobRequest createJobRequest, AggregationWorkerReturnCode returnCode) + throws IOException, InterruptedException { + JsonNode result = submitJobAndWaitForResult(createJobRequest, COMPLETION_TIMEOUT); + + assertThat(result.get("result_info").get("return_code").asText()).isEqualTo(returnCode.name()); + assertThat(result.get("result_info").get("error_summary").get("error_counts").isEmpty()) + .isTrue(); + } + private static class TestEnv extends AbstractModule { @Override @@ -725,9 +907,7 @@ protected void configure() { .httpClient(UrlConnectionHttpClient.builder().build()) .build()); bind(S3AsyncClient.class) - .toInstance( - S3AsyncClient.builder() - .region(AWS_S3_BUCKET_REGION).build()); + .toInstance(S3AsyncClient.builder().region(AWS_S3_BUCKET_REGION).build()); bind(Boolean.class).annotatedWith(S3UsePartialRequests.class).toInstance(false); bind(Integer.class).annotatedWith(PartialRequestBufferSize.class).toInstance(20); } diff --git a/javatests/com/google/aggregate/adtech/worker/AwsWorkerContinuousSmokeTestChromeReports.java b/javatests/com/google/aggregate/adtech/worker/AwsWorkerContinuousSmokeTestChromeReports.java index 9cbdaf28..35e7c764 100644 --- a/javatests/com/google/aggregate/adtech/worker/AwsWorkerContinuousSmokeTestChromeReports.java +++ b/javatests/com/google/aggregate/adtech/worker/AwsWorkerContinuousSmokeTestChromeReports.java @@ -100,7 +100,7 @@ public void createJobE2ETest() throws Exception { // Create the job and wait for the result CreateJobRequest createJobRequest = - AwsWorkerContinuousTestHelper.createJobRequest( + AwsWorkerContinuousTestHelper.createJobRequestWithAttributionReportTo( inputBucket, inputKey, outputBucket, @@ -148,7 +148,7 @@ public void createDebugJobE2ETest() throws Exception { // Create the job and wait for the result CreateJobRequest createJobRequest = - AwsWorkerContinuousTestHelper.createJobRequest( + AwsWorkerContinuousTestHelper.createJobRequestWithAttributionReportTo( inputBucket, inputKey, outputBucket, @@ -212,9 +212,7 @@ protected void configure() { .httpClient(UrlConnectionHttpClient.builder().build()) .build()); bind(S3AsyncClient.class) - .toInstance( - S3AsyncClient.builder() - .region(AWS_S3_BUCKET_REGION).build()); + .toInstance(S3AsyncClient.builder().region(AWS_S3_BUCKET_REGION).build()); bind(Boolean.class).annotatedWith(S3UsePartialRequests.class).toInstance(false); bind(Integer.class).annotatedWith(PartialRequestBufferSize.class).toInstance(20); } diff --git a/javatests/com/google/aggregate/adtech/worker/AwsWorkerContinuousTestHelper.java b/javatests/com/google/aggregate/adtech/worker/AwsWorkerContinuousTestHelper.java index e7f730ea..44b8a004 100644 --- a/javatests/com/google/aggregate/adtech/worker/AwsWorkerContinuousTestHelper.java +++ b/javatests/com/google/aggregate/adtech/worker/AwsWorkerContinuousTestHelper.java @@ -22,12 +22,15 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.aggregate.adtech.worker.model.AggregatedFact; import com.google.aggregate.adtech.worker.testing.AvroResultsFileReader; +import com.google.aggregate.adtech.worker.util.JobUtils; import com.google.aggregate.protocol.avro.AvroDebugResultsReader; import com.google.aggregate.protocol.avro.AvroDebugResultsReaderFactory; import com.google.aggregate.protocol.avro.AvroDebugResultsRecord; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; import com.google.common.io.ByteStreams; +import com.google.common.primitives.UnsignedLong; import com.google.protobuf.util.JsonFormat; import com.google.scp.operator.cpio.blobstorageclient.BlobStorageClient.BlobStorageClientException; import com.google.scp.operator.cpio.blobstorageclient.aws.S3BlobStorageClient; @@ -47,6 +50,7 @@ import java.util.Iterator; import java.util.List; import java.util.Optional; +import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; @@ -73,13 +77,15 @@ public class AwsWorkerContinuousTestHelper { public static final Region AWS_API_GATEWAY_REGION = Region.US_EAST_1; public static final Region AWS_S3_BUCKET_REGION = Region.US_EAST_1; + + public static final String DEFAULT_ATTRIBUTION_REPORT_TO = "https://subdomain.fakeurl.com"; + public static final String DEFAULT_REPORTING_SITE = "https://fakeurl.com"; + + public static final String ENV_ATTRIBUTION_REPORT_TO = System.getenv("ATTRIBUTION_REPORT_TO"); + public static final String ENV_REPORTING_SITE = System.getenv("REPORTING_SITE"); public static final String FRONTEND_API = System.getenv("FRONTEND_API"); public static final String KOKORO_BUILD_ID = System.getenv("KOKORO_BUILD_ID"); - // The attribution_report_to in job params should be configurable because this needs to match - // allowed_principals_map in coordinator setting which would be different in different test - // environments. - public static final String ENV_ATTRIBUTION_REPORT_TO = System.getenv("ATTRIBUTION_REPORT_TO"); - public static final String DEFAULT_ATTRIBUTION_REPORT_TO = "foo.com"; + public static final String CREATE_JOB_URI_PATTERN = "https://%s.execute-api.us-east-1.amazonaws.com/%s/%s/createJob"; public static final String GET_JOB_URI_PATTERN = @@ -110,6 +116,13 @@ private static String getAttributionReportTo() { return DEFAULT_ATTRIBUTION_REPORT_TO; } + private static String getReportingSite() { + if (ENV_REPORTING_SITE != null) { + return ENV_REPORTING_SITE; + } + return DEFAULT_REPORTING_SITE; + } + /** Helper for extracting a bucket name from an S3 URI. */ public static String getS3Bucket(String s3Uri) { return parseS3Uri(s3Uri).group("bucket"); @@ -133,7 +146,7 @@ public static String getOutputFileName(String outputKey, int shardId, int numSha : outputKey + outputSuffix; } - public static CreateJobRequest createJobRequest( + public static CreateJobRequest createJobRequestWithAttributionReportTo( String inputDataBlobBucket, String inputDataBlobPrefix, String outputDataBlobBucket, @@ -147,11 +160,43 @@ public static CreateJobRequest createJobRequest( outputDataBlobBucket, outputDataBlobPrefix, jobId) - .putAllJobParameters(getJobParams(false, outputDomainBucketName, outputDomainPrefix, 100)) + .putAllJobParameters( + getJobParamsWithAttributionReportTo( + false, + outputDomainBucketName, + outputDomainPrefix, + /* reportErrorThresholdPercentage= */ 0, + /* inputReportCount= */ Optional.empty(), + /* filteringIds= */ Optional.empty())) .build(); } - public static CreateJobRequest createJobRequest( + public static CreateJobRequest createJobRequestWithReportingSite( + String inputDataBlobBucket, + String inputDataBlobPrefix, + String outputDataBlobBucket, + String outputDataBlobPrefix, + String jobId, + Optional outputDomainBucketName, + Optional outputDomainPrefix) { + ImmutableMap jobParams = + getJobParamsWithReportingSite( + false, + outputDomainBucketName, + outputDomainPrefix, + /* reportErrorThresholdPercentage= */ 100, + /* inputReportCount= */ Optional.empty()); + return createDefaultJobRequestBuilder( + inputDataBlobBucket, + inputDataBlobPrefix, + outputDataBlobBucket, + outputDataBlobPrefix, + jobId) + .putAllJobParameters(jobParams) + .build(); + } + + public static CreateJobRequest createJobRequestWithAttributionReportTo( String inputDataBlobBucket, String inputDataBlobPrefix, String outputDataBlobBucket, @@ -167,11 +212,46 @@ public static CreateJobRequest createJobRequest( outputDataBlobPrefix, jobId) .putAllJobParameters( - getJobParams(debugRun, outputDomainBucketName, outputDomainPrefix, 100)) + getJobParamsWithAttributionReportTo( + debugRun, + outputDomainBucketName, + outputDomainPrefix, + /* reportErrorThresholdPercentage= */ 0, + /* inputReportCount= */ Optional.empty(), + /* filteringIds= */ Optional.empty())) + .build(); + } + + public static CreateJobRequest createJobRequestWithAttributionReportTo( + String inputDataBlobBucket, + String inputDataBlobPrefix, + String outputDataBlobBucket, + String outputDataBlobPrefix, + Boolean debugRun, + String jobId, + Optional outputDomainBucketName, + Optional outputDomainPrefix, + int reportErrorThresholdPercentage, + Optional inputReportCount, + Set filteringIds) { + return createDefaultJobRequestBuilder( + inputDataBlobBucket, + inputDataBlobPrefix, + outputDataBlobBucket, + outputDataBlobPrefix, + jobId) + .putAllJobParameters( + getJobParamsWithAttributionReportTo( + debugRun, + outputDomainBucketName, + outputDomainPrefix, + reportErrorThresholdPercentage, + inputReportCount, + Optional.of(filteringIds))) .build(); } - public static CreateJobRequest createJobRequest( + public static CreateJobRequest createJobRequestWithAttributionReportTo( String inputDataBlobBucket, String inputDataBlobPrefix, String outputDataBlobBucket, @@ -180,7 +260,8 @@ public static CreateJobRequest createJobRequest( String jobId, Optional outputDomainBucketName, Optional outputDomainPrefix, - int reportErrorThresholdPercentage) { + int reportErrorThresholdPercentage, + Optional inputReportCount) { return createDefaultJobRequestBuilder( inputDataBlobBucket, inputDataBlobPrefix, @@ -188,11 +269,13 @@ public static CreateJobRequest createJobRequest( outputDataBlobPrefix, jobId) .putAllJobParameters( - getJobParams( + getJobParamsWithAttributionReportTo( debugRun, outputDomainBucketName, outputDomainPrefix, - reportErrorThresholdPercentage)) + reportErrorThresholdPercentage, + inputReportCount, + /* filteringIds= */ Optional.empty())) .build(); } @@ -219,17 +302,60 @@ private static CreateJobRequest.Builder createDefaultJobRequestBuilder( .putAllJobParameters(ImmutableMap.of()); } - private static ImmutableMap getJobParams( + private static ImmutableMap getJobParamsWithAttributionReportTo( Boolean debugRun, Optional outputDomainBucketName, Optional outputDomainPrefix, - int reportErrorThresholdPercentage) { + int reportErrorThresholdPercentage, + Optional inputReportCountOptional, + Optional> filteringIdsOptional) { ImmutableMap.Builder jobParams = ImmutableMap.builder(); jobParams.put("attribution_report_to", getAttributionReportTo()); if (debugRun) { jobParams.put("debug_run", "true"); } + inputReportCountOptional.ifPresent( + inputReportCount -> + jobParams.put(JobUtils.JOB_PARAM_INPUT_REPORT_COUNT, String.valueOf(inputReportCount))); + jobParams.put( + "report_error_threshold_percentage", String.valueOf(reportErrorThresholdPercentage)); + if (outputDomainPrefix.isPresent() && outputDomainBucketName.isPresent()) { + jobParams.put("output_domain_blob_prefix", outputDomainPrefix.get()); + jobParams.put("output_domain_bucket_name", outputDomainBucketName.get()); + } else if (!(outputDomainPrefix.isEmpty() && outputDomainBucketName.isEmpty())) { + throw new IllegalStateException( + "outputDomainPrefix and outputDomainBucketName must both be provided or both be empty."); + } + + if (filteringIdsOptional.isPresent()) { + Set filteringIds = filteringIdsOptional.get(); + jobParams.put( + JobUtils.JOB_PARAM_FILTERING_IDS, + String.join( + ",", + filteringIds.stream() + .map(id -> id.toString()) + .collect(ImmutableSet.toImmutableSet()))); + } + + return jobParams.build(); + } + + private static ImmutableMap getJobParamsWithReportingSite( + Boolean debugRun, + Optional outputDomainBucketName, + Optional outputDomainPrefix, + int reportErrorThresholdPercentage, + Optional inputReportCountOptional) { + ImmutableMap.Builder jobParams = ImmutableMap.builder(); + jobParams.put("reporting_site", getReportingSite()); + if (debugRun) { + jobParams.put("debug_run", "true"); + } + inputReportCountOptional.ifPresent( + inputReportCount -> + jobParams.put(JobUtils.JOB_PARAM_INPUT_REPORT_COUNT, String.valueOf(inputReportCount))); jobParams.put( "report_error_threshold_percentage", String.valueOf(reportErrorThresholdPercentage)); if (outputDomainPrefix.isPresent() && outputDomainBucketName.isPresent()) { @@ -342,19 +468,19 @@ public static JsonNode getJobResult(String uri) throws IOException { return callGetJobAPI(uri); } + /** Returns a list of AggregatedFact from the file matching the bucket and key. */ public static ImmutableList readResultsFromS3( S3BlobStorageClient s3BlobStorageClient, AvroResultsFileReader avroResultsFileReader, - String outputBucket, - String outputPrefix) - throws Exception { + String bucket, + String key) + throws BlobStorageClientException, IOException { Path tempResultFile = Files.createTempFile(/* prefix= */ "results", /* suffix= */ "avro"); try (InputStream resultStream = s3BlobStorageClient.getBlob( - DataLocation.ofBlobStoreDataLocation( - BlobStoreDataLocation.create(outputBucket, outputPrefix))); + DataLocation.ofBlobStoreDataLocation(BlobStoreDataLocation.create(bucket, key))); OutputStream outputStream = Files.newOutputStream(tempResultFile)) { ByteStreams.copy(resultStream, outputStream); outputStream.flush(); @@ -365,6 +491,26 @@ public static ImmutableList readResultsFromS3( return facts; } + /** Returns a list of AggregatedFacts from a list of files matching the bucket and prefix. */ + public static ImmutableList readResultsFromMultipleFiles( + S3BlobStorageClient s3BlobStorageClient, + AvroResultsFileReader avroResultsFileReader, + String bucket, + String prefix) + throws BlobStorageClientException, IOException { + BlobStoreDataLocation blobsPrefixLocation = BlobStoreDataLocation.create(bucket, prefix); + DataLocation prefixLocation = DataLocation.ofBlobStoreDataLocation(blobsPrefixLocation); + ImmutableList shardBlobs = s3BlobStorageClient.listBlobs(prefixLocation); + + ImmutableList.Builder aggregatedFactBuilder = ImmutableList.builder(); + for (String shard : shardBlobs) { + aggregatedFactBuilder.addAll( + readResultsFromS3( + s3BlobStorageClient, avroResultsFileReader, blobsPrefixLocation.bucket(), shard)); + } + return aggregatedFactBuilder.build(); + } + private static AvroDebugResultsReader getReader( AvroDebugResultsReaderFactory readerFactory, Path avroFile) throws Exception { return readerFactory.create(Files.newInputStream(avroFile)); diff --git a/javatests/com/google/aggregate/adtech/worker/AwsWorkerPerformanceRegressionTest.java b/javatests/com/google/aggregate/adtech/worker/AwsWorkerPerformanceRegressionTest.java index 6b997b75..6b477aec 100644 --- a/javatests/com/google/aggregate/adtech/worker/AwsWorkerPerformanceRegressionTest.java +++ b/javatests/com/google/aggregate/adtech/worker/AwsWorkerPerformanceRegressionTest.java @@ -98,7 +98,7 @@ public void aggregateARA500kTransient() throws Exception { "test-data/%s/test-outputs/500k_report_%s_500k_domain_output.avro", KOKORO_BUILD_ID, i); CreateJobRequest createJobRequest = - AwsWorkerContinuousTestHelper.createJobRequest( + AwsWorkerContinuousTestHelper.createJobRequestWithAttributionReportTo( getTestDataBucket(), inputKey, getTestDataBucket(), @@ -141,7 +141,7 @@ public void aggregateARA500kReports500kDomainWarmup() throws Exception { "test-data/%s/test-outputs/500k_report_%s_500k_domain_warmup_output.avro", KOKORO_BUILD_ID, i); CreateJobRequest createJobRequest = - AwsWorkerContinuousTestHelper.createJobRequest( + AwsWorkerContinuousTestHelper.createJobRequestWithAttributionReportTo( getTestDataBucket(), inputKey, getTestDataBucket(), @@ -194,7 +194,7 @@ public void aggregateARA500kReports500kDomainTransient() throws Exception { "test-data/%s/test-outputs/500k_report_%s_500k_domain_transient_output.avro", KOKORO_BUILD_ID, i); CreateJobRequest createJobRequest = - AwsWorkerContinuousTestHelper.createJobRequest( + AwsWorkerContinuousTestHelper.createJobRequestWithAttributionReportTo( getTestDataBucket(), inputKey, getTestDataBucket(), diff --git a/javatests/com/google/aggregate/adtech/worker/AwsWorkerPrivateAggregationAPITest.java b/javatests/com/google/aggregate/adtech/worker/AwsWorkerPrivateAggregationAPITest.java index 9169e9a7..8aa6fdff 100644 --- a/javatests/com/google/aggregate/adtech/worker/AwsWorkerPrivateAggregationAPITest.java +++ b/javatests/com/google/aggregate/adtech/worker/AwsWorkerPrivateAggregationAPITest.java @@ -53,10 +53,8 @@ @RunWith(JUnit4.class) public class AwsWorkerPrivateAggregationAPITest { - @Rule - public final Acai acai = new Acai(TestEnv.class); - @Rule - public final TestName name = new TestName(); + @Rule public final Acai acai = new Acai(TestEnv.class); + @Rule public final TestName name = new TestName(); private static final Duration COMPLETION_TIMEOUT = Duration.of(10, ChronoUnit.MINUTES); @@ -64,10 +62,8 @@ public class AwsWorkerPrivateAggregationAPITest { private static final String TEST_DATA_S3_KEY_PREFIX = "generated-test-data"; - @Inject - S3BlobStorageClient s3BlobStorageClient; - @Inject - AvroResultsFileReader avroResultsFileReader; + @Inject S3BlobStorageClient s3BlobStorageClient; + @Inject AvroResultsFileReader avroResultsFileReader; private static String getTestDataBucket() { if (System.getenv("TEST_DATA_BUCKET") != null) { @@ -111,7 +107,7 @@ public void createJobE2EProtectedAudienceTest() throws Exception { TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); CreateJobRequest createJobRequest = - AwsWorkerContinuousTestHelper.createJobRequest( + AwsWorkerContinuousTestHelper.createJobRequestWithAttributionReportTo( getTestDataBucket(), inputKey, getTestDataBucket(), @@ -123,7 +119,7 @@ public void createJobE2EProtectedAudienceTest() throws Exception { /* Debug job */ CreateJobRequest createDebugJobRequest = - AwsWorkerContinuousTestHelper.createJobRequest( + AwsWorkerContinuousTestHelper.createJobRequestWithAttributionReportTo( getTestDataBucket(), inputKeyDebug, getTestDataBucket(), @@ -204,7 +200,7 @@ public void createJobE2ESharedStorageTest() throws Exception { TEST_DATA_S3_KEY_PREFIX, KOKORO_BUILD_ID); CreateJobRequest createJobRequest = - AwsWorkerContinuousTestHelper.createJobRequest( + AwsWorkerContinuousTestHelper.createJobRequestWithAttributionReportTo( getTestDataBucket(), inputKey, getTestDataBucket(), @@ -216,7 +212,7 @@ public void createJobE2ESharedStorageTest() throws Exception { /* Debug job */ CreateJobRequest createDebugJobRequest = - AwsWorkerContinuousTestHelper.createJobRequest( + AwsWorkerContinuousTestHelper.createJobRequestWithAttributionReportTo( getTestDataBucket(), inputKeyDebug, getTestDataBucket(), @@ -273,9 +269,7 @@ protected void configure() { .httpClient(UrlConnectionHttpClient.builder().build()) .build()); bind(S3AsyncClient.class) - .toInstance( - S3AsyncClient.builder() - .region(AWS_S3_BUCKET_REGION).build()); + .toInstance(S3AsyncClient.builder().region(AWS_S3_BUCKET_REGION).build()); bind(Boolean.class).annotatedWith(S3UsePartialRequests.class).toInstance(false); bind(Integer.class).annotatedWith(PartialRequestBufferSize.class).toInstance(20); } diff --git a/javatests/com/google/aggregate/adtech/worker/BUILD b/javatests/com/google/aggregate/adtech/worker/BUILD index 0bae4339..1898e2d4 100644 --- a/javatests/com/google/aggregate/adtech/worker/BUILD +++ b/javatests/com/google/aggregate/adtech/worker/BUILD @@ -348,8 +348,10 @@ java_library( srcs = ["AwsWorkerContinuousTestHelper.java"], javacopts = ["-Xep:Var"], deps = [ + "//java/com/google/aggregate/adtech/worker/exceptions", "//java/com/google/aggregate/adtech/worker/model", "//java/com/google/aggregate/adtech/worker/testing:avro_results_file_reader", + "//java/com/google/aggregate/adtech/worker/util", "//java/com/google/aggregate/protocol/avro:avro_debug_results", "//java/com/google/aggregate/protocol/avro:avro_debug_results_schema_supplier", "//java/com/google/aggregate/protocol/avro:avro_results_schema_supplier", @@ -446,14 +448,12 @@ java_test( "//java/com/google/aggregate/adtech/worker", "//java/com/google/aggregate/adtech/worker/exceptions", "//java/com/google/aggregate/adtech/worker/model", - "//java/com/google/aggregate/adtech/worker/testing:avro_reports_file_reader", "//java/com/google/aggregate/adtech/worker/testing:avro_results_file_reader", "//java/com/google/aggregate/adtech/worker/util", "//java/com/google/aggregate/adtech/worker/writer", "//java/com/google/aggregate/adtech/worker/writer/avro", "//java/com/google/aggregate/protocol/avro:avro_debug_results", "//java/com/google/aggregate/protocol/avro:avro_debug_results_schema_supplier", - "//java/com/google/aggregate/protocol/avro:avro_report", "//java/com/google/aggregate/protocol/avro:avro_results_schema_supplier", "//java/external:acai", "//java/external:clients_blobstorageclient_aws", @@ -568,6 +568,7 @@ java_library( deps = [ "//java/com/google/aggregate/adtech/worker/model", "//java/com/google/aggregate/adtech/worker/testing:avro_results_file_reader", + "//java/com/google/aggregate/adtech/worker/util", "//java/com/google/aggregate/protocol/avro:avro_debug_results", "//java/com/google/aggregate/protocol/avro:avro_debug_results_schema_supplier", "//java/com/google/aggregate/protocol/avro:avro_results_schema_supplier", @@ -739,6 +740,47 @@ java_test( ], ) +java_test( + name = "GcpOTelTest", + timeout = "eternal", + srcs = ["GcpOTelTest.java"], + tags = ["manual"], + runtime_deps = [ + "//java/external:slf4j_simple", + ], + deps = [ + ":SmokeTestBase", + "//java/com/google/aggregate/adtech/worker", + "//java/com/google/aggregate/adtech/worker/model", + "//java/com/google/aggregate/adtech/worker/testing:avro_results_file_reader", + "//java/com/google/aggregate/adtech/worker/util", + "//java/com/google/aggregate/protocol/avro:avro_debug_results", + "//java/com/google/aggregate/protocol/avro:avro_results_schema_supplier", + "//java/external:acai", + "//java/external:apache_httpclient", + "//java/external:apache_httpcore", + "//java/external:clients_blobstorageclient", + "//java/external:clients_blobstorageclient_gcp", + "//java/external:clients_blobstorageclient_model", + "//java/external:frontend_java_proto", + "//java/external:gcp_monitoring", + "//java/external:gcp_proto_cloud_monitoring", + "//java/external:gcp_storage", + "//java/external:gcp_trace", + "//java/external:gcp_trace_proto", + "//java/external:google_auth_library_oauth2_http", + "//java/external:google_truth", + "//java/external:guava", + "//java/external:guice", + "//java/external:jackson_databind", + "//java/external:jackson_datatype_jdk8", + "//java/external:jackson_datatype_jsr310", + "//java/external:proto_gcp_common", + "//java/external:protobuf_java_util", + "//java/external:shared_model", + ], +) + java_test( name = "WorkerPullWorkServiceTest", srcs = ["WorkerPullWorkServiceTest.java"], @@ -750,6 +792,7 @@ java_test( "//java/com/google/aggregate/adtech/worker/selector", "//java/com/google/aggregate/adtech/worker/testing:fake_job_result_generator", "//java/com/google/aggregate/adtech/worker/testing:noop_job_processor", + "//java/com/google/aggregate/adtech/worker/util", "//java/com/google/aggregate/perf", "//java/com/google/aggregate/perf/export:no_op_exporter", "//java/external:acai", diff --git a/javatests/com/google/aggregate/adtech/worker/ErrorSummaryAggregatorTest.java b/javatests/com/google/aggregate/adtech/worker/ErrorSummaryAggregatorTest.java index e60988b1..d7c378b7 100644 --- a/javatests/com/google/aggregate/adtech/worker/ErrorSummaryAggregatorTest.java +++ b/javatests/com/google/aggregate/adtech/worker/ErrorSummaryAggregatorTest.java @@ -47,7 +47,9 @@ public class ErrorSummaryAggregatorTest { private static final DecryptionValidationResult NO_ERROR_RESULTS = DecryptionValidationResult.builder() - .setReport(FakeReportGenerator.generateWithParam(0, /* reportVersion */ LATEST_VERSION)) + .setReport( + FakeReportGenerator.generateWithParam( + 0, /* reportVersion */ LATEST_VERSION, "https://foo.com")) .build(); private static final ImmutableList DECRYPTION_VALIDATION_RESULTS = @@ -173,6 +175,23 @@ public void countsAboveThreshold_withCountProvided_withinThreshold() { assertThat(aggregator.countsAboveThreshold()).isFalse(); } + @Test + public void countsAboveThreshold_withNoCountProvided_withThresholdZero_exceedsThreshold() { + ImmutableList decryptionValidationResults = + ImmutableList.of( + generateResult(ErrorCounter.DECRYPTION_ERROR), + generateResult(ErrorCounter.ATTRIBUTION_REPORT_TO_MISMATCH), + generateResult(ErrorCounter.ATTRIBUTION_REPORT_TO_MISMATCH), + NO_ERROR_RESULTS, + NO_ERROR_RESULTS, + NO_ERROR_RESULTS); + ErrorSummaryAggregator aggregator = + ErrorSummaryAggregator.createErrorSummaryAggregator(Optional.empty(), 0); + decryptionValidationResults.forEach(aggregator::add); + + assertThat(aggregator.countsAboveThreshold()).isTrue(); + } + @Test public void countsAboveThreshold_withNoCountProvided_exceedsThreshold() { ImmutableList decryptionValidationResults = @@ -217,8 +236,7 @@ private static ErrorCount generateExpectedResult(ErrorCounter error, Long count) private static DecryptionValidationResult generateResult(ErrorCounter error) { return DecryptionValidationResult.builder() - .addErrorMessage( - ErrorMessage.builder().setCategory(error).setDetailedErrorMessage("foo").build()) + .addErrorMessage(ErrorMessage.builder().setCategory(error).build()) .build(); } diff --git a/javatests/com/google/aggregate/adtech/worker/GcpOTelTest.java b/javatests/com/google/aggregate/adtech/worker/GcpOTelTest.java new file mode 100644 index 00000000..d3082a47 --- /dev/null +++ b/javatests/com/google/aggregate/adtech/worker/GcpOTelTest.java @@ -0,0 +1,258 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.aggregate.adtech.worker; + +import static com.google.aggregate.adtech.worker.SmokeTestBase.KOKORO_BUILD_ID; +import static com.google.aggregate.adtech.worker.SmokeTestBase.checkJobExecutionResult; +import static com.google.aggregate.adtech.worker.SmokeTestBase.getTestDataBucket; +import static com.google.aggregate.adtech.worker.SmokeTestBase.getTestProjectId; +import static com.google.aggregate.adtech.worker.SmokeTestBase.getTestServiceAccount; +import static com.google.aggregate.adtech.worker.SmokeTestBase.readResultsFromCloud; +import static com.google.aggregate.adtech.worker.SmokeTestBase.submitJobAndWaitForResult; +import static com.google.common.truth.Truth.assertThat; +import static com.google.scp.operator.protos.frontend.api.v1.ReturnCodeProto.ReturnCode.SUCCESS; + +import com.fasterxml.jackson.databind.JsonNode; +import com.google.acai.Acai; +import com.google.aggregate.adtech.worker.model.AggregatedFact; +import com.google.aggregate.adtech.worker.testing.AvroResultsFileReader; +import com.google.auth.oauth2.GoogleCredentials; +import com.google.auth.oauth2.ImpersonatedCredentials; +import com.google.cloud.monitoring.v3.MetricServiceClient; +import com.google.cloud.monitoring.v3.MetricServiceClient.ListTimeSeriesPagedResponse; +import com.google.cloud.storage.Storage; +import com.google.cloud.storage.StorageOptions; +import com.google.cloud.trace.v1.TraceServiceClient; +import com.google.common.collect.ImmutableList; +import com.google.devtools.cloudtrace.v1.ListTracesRequest; +import com.google.devtools.cloudtrace.v1.Trace; +import com.google.inject.AbstractModule; +import com.google.inject.Inject; +import com.google.monitoring.v3.ListTimeSeriesRequest; +import com.google.monitoring.v3.ListTimeSeriesRequest.TimeSeriesView; +import com.google.monitoring.v3.ProjectName; +import com.google.monitoring.v3.TimeInterval; +import com.google.monitoring.v3.TimeSeries; +import com.google.protobuf.util.Timestamps; +import com.google.scp.operator.cpio.blobstorageclient.gcp.GcsBlobStorageClient; +import com.google.scp.operator.protos.frontend.api.v1.CreateJobRequestProto.CreateJobRequest; +import java.io.IOException; +import java.time.Duration; +import java.time.temporal.ChronoUnit; +import java.util.Arrays; +import java.util.Optional; +import org.junit.Before; +import org.junit.FixMethodOrder; +import org.junit.Rule; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; +import org.junit.runners.MethodSorters; + +/** + * In GcpOTelTest, one job would be sent first to trigger metric/trace generation. And the following + * tests are testing if OTel metrics and traces exist in Monitoring and traces. In continuous + * environment, prod binary is used for OTel which would only export prod metrics. Use + * FixMethodOrder for this class to ensure the job will be running first to generate metrics and + * traces. + */ +@RunWith(JUnit4.class) +@FixMethodOrder(MethodSorters.NAME_ASCENDING) +public final class GcpOTelTest { + + @Rule public final Acai acai = new Acai(TestEnv.class); + + @Inject GcsBlobStorageClient gcsBlobStorageClient; + @Inject AvroResultsFileReader avroResultsFileReader; + public static final String OUTPUT_DATA_PREFIX_NAME = "-1-of-1"; + private static final Integer DEBUG_DOMAIN_KEY_SIZE = 1000; + private final String jobId = getClass().getSimpleName(); + + private static final ProjectName projectName = ProjectName.of("ps-msmt-aggserv-test"); + private static final String ENVIRONMENT_NAME = "continuous-mp"; + private static final Duration COMPLETION_TIMEOUT = Duration.of(10, ChronoUnit.MINUTES); + + @Before + public void checkBuildEnv() { + if (KOKORO_BUILD_ID == null) { + throw new IllegalStateException("KOKORO_BUILD_ID env var must be set."); + } + } + + /* + End-to-end test. Creates a job request for data already created in GCS. Asserts the job status and + the size of summary report facts. + */ + @Test + public void createJobE2ETest() throws Exception { + String inputDataPrefix = String.format("%s/test-inputs/otel_test_input.avro", KOKORO_BUILD_ID); + String domainDataPrefix = + String.format("%s/test-inputs/otel_test_domain.avro", KOKORO_BUILD_ID); + String outputDataPrefix = + String.format("%s/test-outputs/otel_test.avro.result", KOKORO_BUILD_ID); + + CreateJobRequest createJobRequest = + SmokeTestBase.createJobRequestWithAttributionReportTo( + getTestDataBucket(), + inputDataPrefix, + getTestDataBucket(), + outputDataPrefix, + jobId, + Optional.of(getTestDataBucket()), + Optional.of(domainDataPrefix)); + JsonNode result = submitJobAndWaitForResult(createJobRequest, COMPLETION_TIMEOUT); + checkJobExecutionResult(result, SUCCESS.name(), 0); + + ImmutableList aggregatedFacts = + readResultsFromCloud( + gcsBlobStorageClient, + avroResultsFileReader, + getTestDataBucket(), + outputDataPrefix + OUTPUT_DATA_PREFIX_NAME); + assertThat(aggregatedFacts.size()).isEqualTo(DEBUG_DOMAIN_KEY_SIZE); + } + + @Test + public void e2eCPUMetricTest() throws IOException { + String metricName = "workload.googleapis.com/process.runtime.jvm.CPU.utilization"; + + ListTimeSeriesPagedResponse response = listAllMetrics(metricName); + int count = 0; + + for (TimeSeries ts : response.iterateAll()) { + for (int i = 0; i < ts.getPointsCount(); i++) { + assertThat(ts.getPoints(i).getValue().getDoubleValue() % 1).isEqualTo(0); + } + count += 1; + } + assertThat(count).isGreaterThan(0); + } + + @Test + public void e2eMemoryMetricTest() throws IOException { + String metricName = "workload.googleapis.com/process.runtime.jvm.memory.utilization_ratio"; + + ListTimeSeriesPagedResponse response = listAllMetrics(metricName); + int count = 0; + + for (TimeSeries ts : response.iterateAll()) { + for (int i = 0; i < ts.getPointsCount(); i++) { + assertThat(ts.getPoints(i).getValue().getDoubleValue() % 10).isEqualTo(0); + } + count += 1; + } + assertThat(count).isGreaterThan(0); + } + + @Test + public void e2eTracesTest() throws InterruptedException, IOException { + // Wait for 3 mins for uploading traces + Thread.sleep(180000); + int prodTraceCount = 0; + int debugTraceCount = 0; + // Restrict time to last 15 minutes + long startMillis = System.currentTimeMillis() - ((60 * 15) * 1000); + + try (TraceServiceClient traceServiceClient = TraceServiceClient.create()) { + ListTracesRequest request = + ListTracesRequest.newBuilder() + .setProjectId(projectName.getProject()) + .setStartTime(Timestamps.fromMillis(startMillis)) + .setEndTime(Timestamps.fromMillis(System.currentTimeMillis())) + .setFilter("+root:total_execution_time" + " " + "job-id:" + jobId) + .build(); + + for (Trace element : traceServiceClient.listTraces(request).iterateAll()) { + prodTraceCount += 1; + } + } + + // decryption_time_per_report is a debug trace which won't be generated when using prod binary. + try (TraceServiceClient traceServiceClient = TraceServiceClient.create()) { + ListTracesRequest request = + ListTracesRequest.newBuilder() + .setProjectId(projectName.getProject()) + .setStartTime(Timestamps.fromMillis(startMillis)) + .setEndTime(Timestamps.fromMillis(System.currentTimeMillis())) + .setFilter("+root:decryption_time_per_report" + " " + "job-id:" + jobId) + .build(); + + for (Trace element : traceServiceClient.listTraces(request).iterateAll()) { + debugTraceCount += 1; + } + } + + assertThat(prodTraceCount).isEqualTo(1); + assertThat(debugTraceCount).isEqualTo(0); + } + + private ListTimeSeriesPagedResponse listAllMetrics(String metricType) throws IOException { + // Restrict time to last 10 minutes + long startMillis = System.currentTimeMillis() - ((60 * 10) * 1000); + TimeInterval interval = + TimeInterval.newBuilder() + .setStartTime(Timestamps.fromMillis(startMillis)) + .setEndTime(Timestamps.fromMillis(System.currentTimeMillis())) + .build(); + ListTimeSeriesRequest.Builder requestBuilder = + ListTimeSeriesRequest.newBuilder() + .setName(projectName.toString()) + .setFilter( + "metric.type=\"" + + metricType + + "\"" + + " AND " + + "metric.label.custom_namespace=\"" + + ENVIRONMENT_NAME + + "\"" + + " AND " + + "resource.type=\"generic_node\"") + .setInterval(interval) + .setView(TimeSeriesView.FULL); + ListTimeSeriesRequest request = requestBuilder.build(); + ListTimeSeriesPagedResponse response; + try (final MetricServiceClient client = MetricServiceClient.create(); ) { + response = client.listTimeSeries(request); + } + return response; + } + + private static class TestEnv extends AbstractModule { + + @Override + protected void configure() { + ImpersonatedCredentials credentials; + try { + credentials = + ImpersonatedCredentials.newBuilder() + .setSourceCredentials(GoogleCredentials.getApplicationDefault()) + .setTargetPrincipal(getTestServiceAccount()) + .setScopes(Arrays.asList("https://www.googleapis.com/auth/devstorage.read_write")) + .build(); + } catch (IOException e) { + throw new RuntimeException("Invalid credentials", e); + } + bind(Storage.class) + .toInstance( + StorageOptions.newBuilder() + .setProjectId(getTestProjectId()) + .setCredentials(credentials) + .build() + .getService()); + } + } +} diff --git a/javatests/com/google/aggregate/adtech/worker/GcpWorkerAutoScalingTest.java b/javatests/com/google/aggregate/adtech/worker/GcpWorkerAutoScalingTest.java index 902eb0ac..b7d1eac7 100644 --- a/javatests/com/google/aggregate/adtech/worker/GcpWorkerAutoScalingTest.java +++ b/javatests/com/google/aggregate/adtech/worker/GcpWorkerAutoScalingTest.java @@ -49,18 +49,16 @@ @RunWith(JUnit4.class) public class GcpWorkerAutoScalingTest { - @Rule - public final Acai acai = new Acai(TestEnv.class); + @Rule public final Acai acai = new Acai(TestEnv.class); private static final Duration SUBMIT_JOB_TIMEOUT = Duration.of(1, ChronoUnit.SECONDS); - private static final Duration SCALE_ACTION_COMPLETION_TIMEOUT = Duration.of(20, - ChronoUnit.MINUTES); + private static final Duration SCALE_ACTION_COMPLETION_TIMEOUT = + Duration.of(20, ChronoUnit.MINUTES); private static final Duration COMPLETION_TIMEOUT = Duration.of(15, ChronoUnit.MINUTES); private static final Integer MIN_INSTANCES = 1; public static final int CONCURRENT_JOBS = 5; - @Inject - InstancesClient gcpInstancesClient; + @Inject InstancesClient gcpInstancesClient; @Test public void autoscalingE2ETest() throws Exception { @@ -74,13 +72,14 @@ public void autoscalingE2ETest() throws Exception { String outputFile = String.format("100k_auto_scale_job_%d.avro.test", jobNum); String outputDataPrefix = String.format("%s/test-outputs/%s", KOKORO_BUILD_ID, outputFile); - CreateJobRequest jobRequest = SmokeTestBase.createJobRequest( - getTestDataBucket(), - inputDataPrefix, - getTestDataBucket(), - outputDataPrefix, - Optional.of(getTestDataBucket()), - Optional.of(domainDataPrefix)); + CreateJobRequest jobRequest = + SmokeTestBase.createJobRequestWithAttributionReportTo( + getTestDataBucket(), + inputDataPrefix, + getTestDataBucket(), + outputDataPrefix, + Optional.of(getTestDataBucket()), + Optional.of(domainDataPrefix)); SmokeTestBase.submitJob(jobRequest, SUBMIT_JOB_TIMEOUT, false); @@ -104,7 +103,8 @@ private void waitForInstanceScaleAction(boolean isScaleOut) throws InterruptedEx while (!scaleSuccessful && Instant.now().isBefore(waitMax)) { instanceCount = getInstanceCount(); System.out.println( - "Verifying instance count. Is scale out: " + isScaleOut + "Verifying instance count. Is scale out: " + + isScaleOut + ". Current instance count: " + instanceCount); if ((!isScaleOut && instanceCount == MIN_INSTANCES) @@ -130,13 +130,15 @@ private void waitForInstanceScaleAction(boolean isScaleOut) throws InterruptedEx } private int getInstanceCount() { - AggregatedListPagedResponse pagedResponse = gcpInstancesClient.aggregatedList( - getTestProjectId()); + AggregatedListPagedResponse pagedResponse = + gcpInstancesClient.aggregatedList(getTestProjectId()); int instancesCount = 0; for (Entry entry : pagedResponse.iterateAll()) { - instancesCount += entry.getValue().getInstancesList().stream() - .filter(i -> i.getName().contains(getEnvironmentName())).count(); + instancesCount += + entry.getValue().getInstancesList().stream() + .filter(i -> i.getName().contains(getEnvironmentName())) + .count(); } return instancesCount; } @@ -165,8 +167,7 @@ protected void configure() { .getService()); try { - bind(InstancesClient.class) - .toInstance(InstancesClient.create()); + bind(InstancesClient.class).toInstance(InstancesClient.create()); } catch (IOException e) { throw new RuntimeException("Unable to instantiate GCP Instances client: ", e); } diff --git a/javatests/com/google/aggregate/adtech/worker/GcpWorkerContinuousDiffTest.java b/javatests/com/google/aggregate/adtech/worker/GcpWorkerContinuousDiffTest.java index 9ab1422a..d3db481b 100644 --- a/javatests/com/google/aggregate/adtech/worker/GcpWorkerContinuousDiffTest.java +++ b/javatests/com/google/aggregate/adtech/worker/GcpWorkerContinuousDiffTest.java @@ -55,15 +55,12 @@ @RunWith(JUnit4.class) public class GcpWorkerContinuousDiffTest { - @Rule - public final Acai acai = new Acai(TestEnv.class); + @Rule public final Acai acai = new Acai(TestEnv.class); private static final Duration COMPLETION_TIMEOUT = Duration.of(10, ChronoUnit.MINUTES); - @Inject - GcsBlobStorageClient gcsBlobStorageClient; - @Inject - AvroResultsFileReader avroResultsFileReader; + @Inject GcsBlobStorageClient gcsBlobStorageClient; + @Inject AvroResultsFileReader avroResultsFileReader; @Before public void checkBuildEnv() { @@ -86,7 +83,7 @@ public void e2eDiffTest() throws Exception { "%s/test-outputs/10k_diff_test_output.avro.result", SmokeTestBase.KOKORO_BUILD_ID); CreateJobRequest createJobRequest = - SmokeTestBase.createJobRequest( + SmokeTestBase.createJobRequestWithAttributionReportTo( getTestDataBucket(), inputKey, getTestDataBucket(), @@ -114,12 +111,12 @@ public void e2eDiffTest() throws Exception { MapDifference diffs = ResultDiffer.diffResults(aggregatedFacts.stream(), goldenAggregatedFacts.stream()); assertWithMessage( - String.format( - "Found (%s) diffs between left(test) and right(golden). Found (%s) entries only on" - + " left(test) and (%s) entries only on right(golden).", - diffs.entriesDiffering().size(), - diffs.entriesOnlyOnLeft().size(), - diffs.entriesOnlyOnRight().size())) + String.format( + "Found (%s) diffs between left(test) and right(golden). Found (%s) entries only on" + + " left(test) and (%s) entries only on right(golden).", + diffs.entriesDiffering().size(), + diffs.entriesOnlyOnLeft().size(), + diffs.entriesOnlyOnRight().size())) .that(diffs.areEqual()) .isTrue(); diff --git a/javatests/com/google/aggregate/adtech/worker/GcpWorkerContinuousOutOfMemoryTest.java b/javatests/com/google/aggregate/adtech/worker/GcpWorkerContinuousOutOfMemoryTest.java index 85adcb3f..823436cb 100644 --- a/javatests/com/google/aggregate/adtech/worker/GcpWorkerContinuousOutOfMemoryTest.java +++ b/javatests/com/google/aggregate/adtech/worker/GcpWorkerContinuousOutOfMemoryTest.java @@ -77,7 +77,7 @@ public void createJobE2EOOMTest() throws Exception { "%s/test-outputs/OOM_test_output_1.avro.result", SmokeTestBase.KOKORO_BUILD_ID); CreateJobRequest createJobRequest1 = - SmokeTestBase.createJobRequest( + SmokeTestBase.createJobRequestWithAttributionReportTo( getTestDataBucket(), inputKey, getTestDataBucket(), @@ -101,7 +101,7 @@ public void createJobE2EOOMTest() throws Exception { "%s/test-outputs/OOM_test_output_2.avro.result", SmokeTestBase.KOKORO_BUILD_ID); CreateJobRequest createJobRequest2 = - SmokeTestBase.createJobRequest( + SmokeTestBase.createJobRequestWithAttributionReportTo( getTestDataBucket(), inputKey, getTestDataBucket(), diff --git a/javatests/com/google/aggregate/adtech/worker/GcpWorkerContinuousSmokeTest.java b/javatests/com/google/aggregate/adtech/worker/GcpWorkerContinuousSmokeTest.java index 7470049c..8df9b654 100644 --- a/javatests/com/google/aggregate/adtech/worker/GcpWorkerContinuousSmokeTest.java +++ b/javatests/com/google/aggregate/adtech/worker/GcpWorkerContinuousSmokeTest.java @@ -57,21 +57,15 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -/** - * GCP integration tests. - */ +/** GCP integration tests. */ @RunWith(JUnit4.class) public final class GcpWorkerContinuousSmokeTest { - @Rule - public final Acai acai = new Acai(TestEnv.class); + @Rule public final Acai acai = new Acai(TestEnv.class); - @Inject - GcsBlobStorageClient gcsBlobStorageClient; - @Inject - AvroResultsFileReader avroResultsFileReader; - @Inject - private AvroDebugResultsReaderFactory readerFactory; + @Inject GcsBlobStorageClient gcsBlobStorageClient; + @Inject AvroResultsFileReader avroResultsFileReader; + @Inject private AvroDebugResultsReaderFactory readerFactory; public static final String OUTPUT_DATA_PREFIX_NAME = "-1-of-1"; private static final Integer DEBUG_DOMAIN_KEY_SIZE = 10000; private static final Duration COMPLETION_TIMEOUT = Duration.of(30, ChronoUnit.MINUTES); @@ -96,7 +90,7 @@ public void createJobE2ETest() throws Exception { String.format("%s/test-outputs/10k_test_domain_1.avro.result", KOKORO_BUILD_ID); CreateJobRequest createJobRequest = - SmokeTestBase.createJobRequest( + SmokeTestBase.createJobRequestWithAttributionReportTo( getTestDataBucket(), inputDataPrefix, getTestDataBucket(), @@ -128,7 +122,7 @@ public void createNotDebugJobE2EReportDebugEnabledTest() throws Exception { String.format("%s/test-outputs/10k_test_input_non_debug.avro.result", KOKORO_BUILD_ID); CreateJobRequest createJobRequest = - SmokeTestBase.createJobRequest( + SmokeTestBase.createJobRequestWithAttributionReportTo( getTestDataBucket(), inputDataPrefix, getTestDataBucket(), @@ -136,7 +130,7 @@ public void createNotDebugJobE2EReportDebugEnabledTest() throws Exception { false, Optional.of(getTestDataBucket()), Optional.of(domainDataPrefix)); - JsonNode result = SmokeTestBase.submitJobAndWaitForResult(createJobRequest, COMPLETION_TIMEOUT); + JsonNode result = submitJobAndWaitForResult(createJobRequest, COMPLETION_TIMEOUT); checkJobExecutionResult(result, SUCCESS.name(), 0); // Read output avro from GCS. @@ -152,10 +146,10 @@ public void createNotDebugJobE2EReportDebugEnabledTest() throws Exception { assertThat(aggregatedFacts.size()).isAtLeast(DEBUG_DOMAIN_KEY_SIZE); // The debug file shouldn't exist because it's not debug run assertThat( - checkFileExists( - gcsBlobStorageClient, - getTestDataBucket(), - getDebugFilePrefix(outputDataPrefix + OUTPUT_DATA_PREFIX_NAME))) + checkFileExists( + gcsBlobStorageClient, + getTestDataBucket(), + getDebugFilePrefix(outputDataPrefix + OUTPUT_DATA_PREFIX_NAME))) .isFalse(); } @@ -175,7 +169,7 @@ public void createDebugJobE2EReportDebugModeEnabledTest() throws Exception { "%s/test-outputs/10k_test_input_debug_for_debug_disabled.avro.result", KOKORO_BUILD_ID); CreateJobRequest createJobRequest = - SmokeTestBase.createJobRequest( + SmokeTestBase.createJobRequestWithAttributionReportTo( getTestDataBucket(), inputDataPrefix, getTestDataBucket(), @@ -183,7 +177,7 @@ public void createDebugJobE2EReportDebugModeEnabledTest() throws Exception { true, Optional.of(getTestDataBucket()), Optional.of(domainDataPrefix)); - JsonNode result = SmokeTestBase.submitJobAndWaitForResult(createJobRequest, COMPLETION_TIMEOUT); + JsonNode result = submitJobAndWaitForResult(createJobRequest, COMPLETION_TIMEOUT); checkJobExecutionResult(result, SUCCESS.name(), 0); // Read output avro from GCS. @@ -221,70 +215,213 @@ public void createDebugJobE2EReportDebugModeDisabledTest() throws Exception { String.format("%s/test-outputs/10k_test_input_2.avro.result", KOKORO_BUILD_ID); CreateJobRequest createJobRequest = - SmokeTestBase.createJobRequest( + SmokeTestBase.createJobRequestWithAttributionReportTo( getTestDataBucket(), inputDataPrefix, getTestDataBucket(), outputDataPrefix, true, Optional.of(getTestDataBucket()), - Optional.of(domainDataPrefix)); + Optional.of(domainDataPrefix), + /* totalReportsCount= */ 10000, + /* reportErrorThreshold= */ 10); JsonNode result = SmokeTestBase.submitJobAndWaitForResult(createJobRequest, COMPLETION_TIMEOUT); assertThat(result.get("result_info").get("return_code").asText()) - .isEqualTo(AggregationWorkerReturnCode.SUCCESS_WITH_ERRORS.name()); - assertThat( + .isEqualTo(AggregationWorkerReturnCode.REPORTS_WITH_ERRORS_EXCEEDED_THRESHOLD.name()); + // Due to parallel aggregation, the processing may stop a little over the threshold. + // So, asserting below that the processing stopped somewhere above the threshold but before all + // the 10K reports are processed. + int erroringReportCount = result .get("result_info") .get("error_summary") .get("error_counts") .get(0) .get("count") - .asInt()) - .isEqualTo(10000); + .asInt(); + assertThat(erroringReportCount).isAtLeast(1000); + assertThat(erroringReportCount).isLessThan(10000); assertThat( - result - .get("result_info") - .get("error_summary") - .get("error_counts") - .get(0) - .get("category") - .asText()) + result + .get("result_info") + .get("error_summary") + .get("error_counts") + .get(0) + .get("category") + .asText()) .isEqualTo(ErrorCounter.DEBUG_NOT_ENABLED.name()); assertThat( - result - .get("result_info") - .get("error_summary") - .get("error_counts") - .get(0) - .get("description") - .asText()) + result + .get("result_info") + .get("error_summary") + .get("error_counts") + .get(0) + .get("description") + .asText()) .isEqualTo(ErrorCounter.DEBUG_NOT_ENABLED.getDescription()); + } + + /** + * End-to-end test for the Aggregate Reporting Debug API. 10k attribution-reporting-debug + * type reports are provided for aggregation. Verifies job status and the size of summary report + * facts. + */ + @Test + public void createJobE2EAggregateReportingDebugTest() throws Exception { + String inputDataPrefix = + String.format("%s/test-inputs/10k_test_input_attribution_debug.avro", KOKORO_BUILD_ID); + String domainDataPrefix = + String.format("%s/test-inputs/10k_test_domain_attribution_debug.avro", KOKORO_BUILD_ID); + String outputDataPrefix = + String.format( + "%s/test-outputs/10k_test_output_attribution_debug.avro.result", KOKORO_BUILD_ID); + + CreateJobRequest createJobRequest = + SmokeTestBase.createJobRequestWithAttributionReportTo( + getTestDataBucket(), + inputDataPrefix, + getTestDataBucket(), + outputDataPrefix, + Optional.of(getTestDataBucket()), + Optional.of(domainDataPrefix)); + JsonNode result = submitJobAndWaitForResult(createJobRequest, COMPLETION_TIMEOUT); - // Read output avro from s3. + checkJobExecutionResult(result, SUCCESS.name(), 0); ImmutableList aggregatedFacts = readResultsFromCloud( gcsBlobStorageClient, avroResultsFileReader, getTestDataBucket(), outputDataPrefix + OUTPUT_DATA_PREFIX_NAME); + assertThat(aggregatedFacts.size()).isAtLeast(DEBUG_DOMAIN_KEY_SIZE); + } - assertThat(aggregatedFacts.size()).isEqualTo(DEBUG_DOMAIN_KEY_SIZE); + /** + * This test includes sending a job with reporting site only. Verifies that jobs with only + * reporting site are successful. + */ + @Test + public void createJobE2ETestWithReportingSite() throws Exception { + var inputDataPrefix = + String.format("%s/test-inputs/10k_test_input_reporting_site.avro", KOKORO_BUILD_ID); + var domainDataPrefix = + String.format("%s/test-inputs/10k_test_domain_reporting_site.avro", KOKORO_BUILD_ID); + var outputDataPrefix = + String.format( + "%s/test-outputs/10k_test_output_reporting_site.avro.result", KOKORO_BUILD_ID); - // Read debug result from s3. - ImmutableList aggregatedDebugFacts = - readDebugResultsFromCloud( + CreateJobRequest createJobRequest = + SmokeTestBase.createJobRequestWithReportingSite( + getTestDataBucket(), + inputDataPrefix, + getTestDataBucket(), + outputDataPrefix, + /* outputDomainBucketName= */ Optional.of(getTestDataBucket()), + /* outputDomainPrefix= */ Optional.of(domainDataPrefix)); + JsonNode result = submitJobAndWaitForResult(createJobRequest, COMPLETION_TIMEOUT); + + checkJobExecutionResult(result, SUCCESS.name(), 0); + + ImmutableList aggregatedFacts = + readResultsFromCloud( gcsBlobStorageClient, - readerFactory, + avroResultsFileReader, getTestDataBucket(), - getDebugFilePrefix(outputDataPrefix + OUTPUT_DATA_PREFIX_NAME)); + outputDataPrefix + OUTPUT_DATA_PREFIX_NAME); + assertThat(aggregatedFacts.size()).isAtLeast(DEBUG_DOMAIN_KEY_SIZE); + } + + /** + * This test includes sending a job with reports from multiple reporting origins belonging to the + * same reporting site. Verifies that all the reports are processed successfully. + */ + @Test + public void createJobE2ETestWithMultipleReportingOrigins() throws Exception { + var inputDataPrefix = String.format("%s/test-inputs/same-site/", KOKORO_BUILD_ID); + var domainDataPrefix = + String.format( + "%s/test-inputs/10k_test_domain_multiple_origins_same_site.avro", KOKORO_BUILD_ID); + var outputDataPrefix = + String.format( + "%s/test-outputs/10k_test_output_multiple_origins_same_site.avro.result", + KOKORO_BUILD_ID); + + CreateJobRequest createJobRequest = + SmokeTestBase.createJobRequestWithReportingSite( + getTestDataBucket(), + inputDataPrefix, + getTestDataBucket(), + outputDataPrefix, + /* outputDomainBucketName= */ Optional.of(getTestDataBucket()), + /* outputDomainPrefix= */ Optional.of(domainDataPrefix)); + JsonNode result = submitJobAndWaitForResult(createJobRequest, COMPLETION_TIMEOUT); + + checkJobExecutionResult(result, SUCCESS.name(), 0); + + ImmutableList aggregatedFacts = + readResultsFromCloud( + gcsBlobStorageClient, + avroResultsFileReader, + getTestDataBucket(), + outputDataPrefix + OUTPUT_DATA_PREFIX_NAME); + assertThat(aggregatedFacts.size()).isAtLeast(DEBUG_DOMAIN_KEY_SIZE); + } + + /** + * This test includes sending a job with reports from multiple reporting origins belonging to + * different reporting sites. It is expected that the 5k reports with a different reporting site + * will fail and come up in the error counts. + */ + @Test + public void createJobE2ETestWithSomeReportsHavingDifferentReportingOrigins() throws Exception { + var inputDataPrefix = String.format("%s/test-inputs/different-site/", KOKORO_BUILD_ID); + var domainDataPrefix = + String.format( + "%s/test-inputs/10k_test_domain_multiple_origins_different_site.avro", KOKORO_BUILD_ID); + var outputDataPrefix = + String.format( + "%s/test-outputs/10k_test_output_multiple_origins_different_site.avro.result", + KOKORO_BUILD_ID); - // Only contains keys in domain because all reports are filtered out. - assertThat(aggregatedDebugFacts.size()).isEqualTo(DEBUG_DOMAIN_KEY_SIZE); - // The unnoisedMetric of aggregatedDebugFacts should be 0 for all keys because - // all reports are filtered out. - // Noised metric in both debug reports and summary reports should be noise value instead of 0. - aggregatedDebugFacts.forEach(fact -> assertThat(fact.unnoisedMetric().get()).isEqualTo(0)); + CreateJobRequest createJobRequest = + SmokeTestBase.createJobRequestWithReportingSite( + getTestDataBucket(), + inputDataPrefix, + getTestDataBucket(), + outputDataPrefix, + /* outputDomainBucketName= */ Optional.of(getTestDataBucket()), + /* outputDomainPrefix= */ Optional.of(domainDataPrefix)); + JsonNode result = submitJobAndWaitForResult(createJobRequest, COMPLETION_TIMEOUT); + + assertThat(result.get("result_info").get("return_code").asText()) + .isEqualTo(AggregationWorkerReturnCode.SUCCESS_WITH_ERRORS.name()); + assertThat( + result + .get("result_info") + .get("error_summary") + .get("error_counts") + .get(0) + .get("count") + .asInt()) + .isEqualTo(5000); + assertThat( + result + .get("result_info") + .get("error_summary") + .get("error_counts") + .get(0) + .get("category") + .asText()) + .isEqualTo(ErrorCounter.REPORTING_SITE_MISMATCH.name()); + + ImmutableList aggregatedFacts = + readResultsFromCloud( + gcsBlobStorageClient, + avroResultsFileReader, + getTestDataBucket(), + outputDataPrefix + OUTPUT_DATA_PREFIX_NAME); + assertThat(aggregatedFacts.size()).isAtLeast(5000); } /* @@ -300,15 +437,14 @@ public void createJobE2ETestPrivacyBudgetExhausted() throws Exception { String.format("%s/test-outputs/10k_test_input_3.avro.result", KOKORO_BUILD_ID); CreateJobRequest createJobRequest1 = - SmokeTestBase.createJobRequest( + SmokeTestBase.createJobRequestWithAttributionReportTo( getTestDataBucket(), inputDataPrefix, getTestDataBucket(), outputDataPrefix, Optional.of(getTestDataBucket()), Optional.of(domainDataPrefix)); - JsonNode result = - SmokeTestBase.submitJobAndWaitForResult(createJobRequest1, COMPLETION_TIMEOUT); + JsonNode result = submitJobAndWaitForResult(createJobRequest1, COMPLETION_TIMEOUT); assertThat(result.get("result_info").get("return_code").asText()) .isEqualTo(AggregationWorkerReturnCode.SUCCESS.name()); assertThat(result.get("result_info").get("error_summary").get("error_counts").isEmpty()) diff --git a/javatests/com/google/aggregate/adtech/worker/GcpWorkerKhsLoadtest.java b/javatests/com/google/aggregate/adtech/worker/GcpWorkerKhsLoadtest.java index e9c74f86..328b515c 100644 --- a/javatests/com/google/aggregate/adtech/worker/GcpWorkerKhsLoadtest.java +++ b/javatests/com/google/aggregate/adtech/worker/GcpWorkerKhsLoadtest.java @@ -39,6 +39,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Optional; +import java.util.UUID; import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -46,19 +47,14 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -/** - * GCP KHS loadtest implementation - */ +/** GCP KHS loadtest implementation */ @RunWith(JUnit4.class) public final class GcpWorkerKhsLoadtest { - @Rule - public final Acai acai = new Acai(TestEnv.class); - @Rule - public TestName name = new TestName(); + @Rule public final Acai acai = new Acai(TestEnv.class); + @Rule public TestName name = new TestName(); - private static final String KHS_LOADTEST_DATA_BUCKET = - "loadtest_data"; + private static final String KHS_LOADTEST_DATA_BUCKET = "loadtest_data"; private static final int NUM_RUNS = 5; private static final Duration COMPLETION_TIMEOUT = Duration.of(30, ChronoUnit.MINUTES); @@ -70,35 +66,29 @@ public void checkBuildEnv() { } } - /** - * Run Aggregation job for KHS loadtest. - */ + /** Run Aggregation job for KHS loadtest. */ @Test public void aggregateKhsLoadTest() throws Exception { ArrayList jobRequests = new ArrayList<>(NUM_RUNS); ArrayList jobRequestsDeepCopy = new ArrayList<>(NUM_RUNS); for (int i = 1; i <= NUM_RUNS; i++) { - var inputKey = String.format("test-data/%s/test-inputs/loadtest_report.avro", KOKORO_BUILD_ID); - var domainKey = String.format("test-data/%s/test-inputs/loadtest_domain.avro", KOKORO_BUILD_ID); + var inputKey = + String.format("test-data/%s/test-inputs/loadtest_report.avro", KOKORO_BUILD_ID); + var domainKey = + String.format("test-data/%s/test-inputs/loadtest_domain.avro", KOKORO_BUILD_ID); var outputKey = - String.format( - "test-data/%s/test-outputs/loadtest_%s_output.avro", - KOKORO_BUILD_ID, i); + String.format("test-data/%s/test-outputs/loadtest_%s_output.avro", KOKORO_BUILD_ID, i); CreateJobRequest createJobRequest = - SmokeTestBase.createJobRequest( + SmokeTestBase.createJobRequestWithAttributionReportTo( getTestDataBucket(KHS_LOADTEST_DATA_BUCKET), inputKey, getTestDataBucket(KHS_LOADTEST_DATA_BUCKET), outputKey, /* debugRun= */ true, - /* jobId= */ getClass().getSimpleName() - + "::" - + name.getMethodName() - + "-" - + i, - /* outputDomainBucketName= */ - Optional.of(getTestDataBucket(KHS_LOADTEST_DATA_BUCKET)), + /* jobId= */ UUID.randomUUID().toString(), + /* outputDomainBucketName= */ Optional.of( + getTestDataBucket(KHS_LOADTEST_DATA_BUCKET)), /* outputDomainPrefix= */ Optional.of(domainKey)); createJob(createJobRequest); diff --git a/javatests/com/google/aggregate/adtech/worker/GcpWorkerPerformanceRegressionTest.java b/javatests/com/google/aggregate/adtech/worker/GcpWorkerPerformanceRegressionTest.java index 494e6c35..b442a6e2 100644 --- a/javatests/com/google/aggregate/adtech/worker/GcpWorkerPerformanceRegressionTest.java +++ b/javatests/com/google/aggregate/adtech/worker/GcpWorkerPerformanceRegressionTest.java @@ -50,23 +50,16 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -/** - * GCP performance regression test implementation - */ +/** GCP performance regression test implementation */ @RunWith(JUnit4.class) public final class GcpWorkerPerformanceRegressionTest { - @Rule - public final Acai acai = new Acai(TestEnv.class); - @Rule - public TestName name = new TestName(); - - @Inject - GcsBlobStorageClient gcsBlobStorageClient; - @Inject - AvroResultsFileReader avroResultsFileReader; - @Inject - private AvroDebugResultsReaderFactory readerFactory; + @Rule public final Acai acai = new Acai(TestEnv.class); + @Rule public TestName name = new TestName(); + + @Inject GcsBlobStorageClient gcsBlobStorageClient; + @Inject AvroResultsFileReader avroResultsFileReader; + @Inject private AvroDebugResultsReaderFactory readerFactory; private static final String PERFORMANCE_REGRESSION_DATA_BUCKET = "gcp_performance_regression_test_data"; private static final int NUM_WARMUP_RUNS = 5; @@ -99,7 +92,7 @@ public void aggregateARA500kReports500kDomainWarmup() throws Exception { "test-data/%s/test-outputs/500k_report_%s_500k_domain_warmup_output.avro", KOKORO_BUILD_ID, i); CreateJobRequest createJobRequest = - SmokeTestBase.createJobRequest( + SmokeTestBase.createJobRequestWithAttributionReportTo( getTestDataBucket(PERFORMANCE_REGRESSION_DATA_BUCKET), inputKey, getTestDataBucket(PERFORMANCE_REGRESSION_DATA_BUCKET), @@ -110,8 +103,8 @@ public void aggregateARA500kReports500kDomainWarmup() throws Exception { + name.getMethodName() + "_warmup-" + i, - /* outputDomainBucketName= */ - Optional.of(getTestDataBucket(PERFORMANCE_REGRESSION_DATA_BUCKET)), + /* outputDomainBucketName= */ Optional.of( + getTestDataBucket(PERFORMANCE_REGRESSION_DATA_BUCKET)), /* outputDomainPrefix= */ Optional.of(domainKey)); createJob(createJobRequest); @@ -119,7 +112,7 @@ public void aggregateARA500kReports500kDomainWarmup() throws Exception { warmUpJobRequestsDeepCopy.add(createJobRequest); } - waitForJobCompletions(warmUpJobRequestsDeepCopy, COMPLETION_TIMEOUT); + waitForJobCompletions(warmUpJobRequestsDeepCopy, COMPLETION_TIMEOUT, true); for (int i = 1; i <= NUM_WARMUP_RUNS; i++) { String outputKey = @@ -153,7 +146,7 @@ public void aggregateARA500kReports500kDomainTransient() throws Exception { "test-data/%s/test-outputs/500k_report_%s_500k_domain_transient_output.avro", KOKORO_BUILD_ID, i); CreateJobRequest createJobRequest = - SmokeTestBase.createJobRequest( + SmokeTestBase.createJobRequestWithAttributionReportTo( getTestDataBucket(PERFORMANCE_REGRESSION_DATA_BUCKET), inputKey, getTestDataBucket(PERFORMANCE_REGRESSION_DATA_BUCKET), @@ -164,8 +157,8 @@ public void aggregateARA500kReports500kDomainTransient() throws Exception { + name.getMethodName() + "_transient-" + i, - /* outputDomainBucketName= */ - Optional.of(getTestDataBucket(PERFORMANCE_REGRESSION_DATA_BUCKET)), + /* outputDomainBucketName= */ Optional.of( + getTestDataBucket(PERFORMANCE_REGRESSION_DATA_BUCKET)), /* outputDomainPrefix= */ Optional.of(domainKey)); createJob(createJobRequest); transientJobRequests.add(createJobRequest); @@ -173,7 +166,7 @@ public void aggregateARA500kReports500kDomainTransient() throws Exception { } waitForJobCompletions( - transientJobRequestsDeepCopy, COMPLETION_TIMEOUT, false); + transientJobRequestsDeepCopy, COMPLETION_TIMEOUT, true); for (int i = 1; i <= NUM_TRANSIENT_RUNS; i++) { var outputKey = diff --git a/javatests/com/google/aggregate/adtech/worker/LocalFileToCloudStorageLoggerTest.java b/javatests/com/google/aggregate/adtech/worker/LocalFileToCloudStorageLoggerTest.java index 93dd25ec..bd99ac8e 100644 --- a/javatests/com/google/aggregate/adtech/worker/LocalFileToCloudStorageLoggerTest.java +++ b/javatests/com/google/aggregate/adtech/worker/LocalFileToCloudStorageLoggerTest.java @@ -18,7 +18,6 @@ import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.truth.Truth.assertThat; -import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.reset; import static org.mockito.Mockito.spy; @@ -32,7 +31,6 @@ import com.google.aggregate.adtech.worker.exceptions.ResultLogException; import com.google.aggregate.adtech.worker.model.AggregatedFact; import com.google.aggregate.adtech.worker.model.DebugBucketAnnotation; -import com.google.aggregate.adtech.worker.model.EncryptedReport; import com.google.aggregate.adtech.worker.testing.AvroResultsFileReader; import com.google.aggregate.adtech.worker.util.OutputShardFileHelper; import com.google.aggregate.adtech.worker.writer.LocalResultFileWriter; @@ -41,11 +39,7 @@ import com.google.aggregate.protocol.avro.AvroDebugResultsReader; import com.google.aggregate.protocol.avro.AvroDebugResultsReaderFactory; import com.google.aggregate.protocol.avro.AvroDebugResultsRecord; -import com.google.aggregate.protocol.avro.AvroReportRecord; -import com.google.aggregate.protocol.avro.AvroReportsReader; -import com.google.aggregate.protocol.avro.AvroReportsReaderFactory; import com.google.common.collect.ImmutableList; -import com.google.common.io.ByteSource; import com.google.common.jimfs.Configuration; import com.google.common.jimfs.Jimfs; import com.google.common.util.concurrent.ListeningExecutorService; @@ -66,7 +60,6 @@ import java.util.ArrayList; import java.util.Comparator; import java.util.List; -import java.util.UUID; import java.util.concurrent.Executors; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -119,37 +112,12 @@ public class LocalFileToCloudStorageLoggerTest { AggregatedFact.create(BigInteger.valueOf(3789), 90L, 80L, annotationReportOnly), AggregatedFact.create(BigInteger.valueOf(4123), 100L, 70L, annotationReportOnly)); - private static final ByteSource encryptedReport1Payload = - ByteSource.wrap(new byte[] {0x00, 0x01}); - private static final ByteSource encryptedReport2Payload = - ByteSource.wrap(new byte[] {0x01, 0x02}); - private static final String encryptedReport1KeyId = UUID.randomUUID().toString(); - private static final String encryptedReport2KeyId = UUID.randomUUID().toString(); - private static final String encryptedReport1SharedInfo = "foo"; - private static final String encryptedReport2SharedInfo = "bar"; - private static final EncryptedReport report1 = - EncryptedReport.builder() - .setPayload(encryptedReport1Payload) - .setKeyId(encryptedReport1KeyId) - .setSharedInfo(encryptedReport1SharedInfo) - .build(); - - private static final EncryptedReport report2 = - EncryptedReport.builder() - .setPayload(encryptedReport2Payload) - .setKeyId(encryptedReport2KeyId) - .setSharedInfo(encryptedReport2SharedInfo) - .build(); - - private static final ImmutableList reportsList = - ImmutableList.of(report1, report2); // Under test @Inject private Provider localFileToCloudStorageLogger; @Inject private FSBlobStorageClient blobStorageClient; @Inject private AvroResultsFileReader avroResultsFileReader; @Inject private AvroDebugResultsReaderFactory readerFactory; - @Inject private AvroReportsReaderFactory reportReaderFactory; @Inject private ParallelUploadFlagHelper uploadFlagHelper; @Inject private FileSystem testFS; @Inject @ResultWorkingDirectory private Path workingDirectory; @@ -178,43 +146,6 @@ public void logResultsTest_singleThreaded() throws Exception { logResultsTest(); } - @Test - public void logReports_writesReports() throws Exception { - localFileToCloudStorageLogger.get().logReports(reportsList, ctx, "1"); - - Path reportsFilePath = blobStorageClient.getLastWrittenFile(); - Stream writtenFile; - try (AvroReportsReader reader = getReportsReader(reportsFilePath)) { - writtenFile = reader.streamRecords(); - } - Stream writtenFileEncryptedReports = - writtenFile.map( - report -> - EncryptedReport.builder() - .setKeyId(report.keyId()) - .setPayload(report.payload()) - .setSharedInfo(report.sharedInfo()) - .build()); - - List encryptedReportsList = - writtenFileEncryptedReports.collect(toImmutableList()); - - // check reencrypted reports file name - assertThat(reportsFilePath.toString()).isEqualTo("/bucket/abc123-reencrypted-1.avro"); - // Check the output reports - assertThat(encryptedReportsList.get(0).keyId()).isEqualTo(encryptedReport1KeyId); - assertTrue(encryptedReportsList.get(0).payload().contentEquals(encryptedReport1Payload)); - assertThat(encryptedReportsList.get(0).sharedInfo()).isEqualTo(encryptedReport1SharedInfo); - - assertThat(encryptedReportsList.get(1).keyId()).isEqualTo(encryptedReport2KeyId); - assertTrue(encryptedReportsList.get(1).payload().contentEquals(encryptedReport2Payload)); - assertThat(encryptedReportsList.get(1).sharedInfo()).isEqualTo(encryptedReport2SharedInfo); - } - - private AvroReportsReader getReportsReader(Path avroFile) throws Exception { - return reportReaderFactory.create(Files.newInputStream(avroFile)); - } - private void logResultsTest() throws Exception { OutputShardFileHelper.setOutputShardFileSizeBytes(100_000_000L); diff --git a/javatests/com/google/aggregate/adtech/worker/LocalRunnerTest.java b/javatests/com/google/aggregate/adtech/worker/LocalRunnerTest.java index 69c3797a..09d6545b 100644 --- a/javatests/com/google/aggregate/adtech/worker/LocalRunnerTest.java +++ b/javatests/com/google/aggregate/adtech/worker/LocalRunnerTest.java @@ -16,15 +16,14 @@ package com.google.aggregate.adtech.worker; +import static com.google.aggregate.adtech.worker.util.NumericConversions.createBucketFromString; import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.assertThrows; -import static org.junit.Assert.fail; import com.beust.jcommander.ParameterException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.aggregate.adtech.worker.model.AggregatedFact; -import com.google.aggregate.adtech.worker.util.NumericConversions; import com.google.common.math.Stats; import com.google.common.util.concurrent.ServiceManager; import com.google.privacysandbox.otel.OTelConfiguration; @@ -474,7 +473,7 @@ public void testMainMethodJsonOutputConstantNoise_Set2_no_noise() convertToAggregatedFact(objectMapper.readTree(Files.newInputStream(outputJson))); List nonZeroFacts = output.stream() - .filter((aggregatedFact) -> aggregatedFact.metric() != 0) + .filter((aggregatedFact) -> aggregatedFact.getMetric() != 0) .collect(Collectors.toList()); assertThat(nonZeroFacts).hasSize(0); @@ -505,7 +504,7 @@ public void testMainMethodJsonOutputConstantNoise_Set2_noise_epsilon64() Path outputJson = outputDirectoryPath.resolve("output.json"); List output = convertToAggregatedFact(objectMapper.readTree(Files.newInputStream(outputJson))); - Stream allMetrics = output.stream().map(aggregatedFact -> aggregatedFact.metric()); + Stream allMetrics = output.stream().map(aggregatedFact -> aggregatedFact.getMetric()); Stats statsAccumulator = Stats.of(allMetrics.collect(Collectors.toList())); assertThat(statsAccumulator.count()).isEqualTo(10000); @@ -540,7 +539,7 @@ public void testMainMethodJsonOutputConstantNoise_Set2_noise_epsilon1() Path outputJson = outputDirectoryPath.resolve("output.json"); List output = convertToAggregatedFact(objectMapper.readTree(Files.newInputStream(outputJson))); - Stream allMetrics = output.stream().map(aggregatedFact -> aggregatedFact.metric()); + Stream allMetrics = output.stream().map(aggregatedFact -> aggregatedFact.getMetric()); Stats statsAccumulator = Stats.of(allMetrics.collect(Collectors.toList())); assertThat(statsAccumulator.count()).isEqualTo(10000); @@ -573,7 +572,7 @@ public void testMainMethodJsonOutputConstantNoise_Set2_noise_default_epsilon10() Path outputJson = outputDirectoryPath.resolve("output.json"); List output = convertToAggregatedFact(objectMapper.readTree(Files.newInputStream(outputJson))); - Stream allMetrics = output.stream().map(aggregatedFact -> aggregatedFact.metric()); + Stream allMetrics = output.stream().map(aggregatedFact -> aggregatedFact.getMetric()); Stats statsAccumulator = Stats.of(allMetrics.collect(Collectors.toList())); assertThat(statsAccumulator.count()).isEqualTo(10000); @@ -701,7 +700,7 @@ public void testMainMethodJsonOutput_fledge_set2_withNoise_epsilon64() Path outputJson = outputDirectoryPath.resolve("output.json"); List output = convertToAggregatedFact(objectMapper.readTree(Files.newInputStream(outputJson))); - Stream allMetrics = output.stream().map(aggregatedFact -> aggregatedFact.metric()); + Stream allMetrics = output.stream().map(aggregatedFact -> aggregatedFact.getMetric()); Stats statsAccumulator = Stats.of(allMetrics.collect(Collectors.toList())); assertThat(statsAccumulator.count()).isEqualTo(10000); @@ -738,7 +737,7 @@ public void testMainMethodJsonOutput_fledge_set2_constantNoise() convertToAggregatedFact(objectMapper.readTree(Files.newInputStream(outputJson))); List nonZeroFacts = output.stream() - .filter((aggregatedFact) -> aggregatedFact.metric() != 0) + .filter((aggregatedFact) -> aggregatedFact.getMetric() != 0) .collect(Collectors.toList()); assertThat(nonZeroFacts).isEmpty(); @@ -750,14 +749,10 @@ private List convertToAggregatedFact(JsonNode jsonNode) { .iterator() .forEachRemaining( entry -> { - try { - writtenResults.add( - AggregatedFact.create( - NumericConversions.uInt128FromBytes(entry.get("bucket").binaryValue()), - entry.get("metric").asLong())); - } catch (IOException e) { - fail(e.getMessage()); - } + writtenResults.add( + AggregatedFact.create( + createBucketFromString(entry.get("bucket").asText()), + entry.get("metric").asLong())); }); return writtenResults; } diff --git a/javatests/com/google/aggregate/adtech/worker/ReportDecrypterAndValidatorTest.java b/javatests/com/google/aggregate/adtech/worker/ReportDecrypterAndValidatorTest.java index 5133f5d0..8f6b1cc2 100644 --- a/javatests/com/google/aggregate/adtech/worker/ReportDecrypterAndValidatorTest.java +++ b/javatests/com/google/aggregate/adtech/worker/ReportDecrypterAndValidatorTest.java @@ -126,7 +126,7 @@ public void testDecryptionKeyServiceError_INTERNAL() { assertThat(decryptionValidationResult.report()).isEmpty(); assertThat(decryptionValidationResult.errorMessages().stream().map(ErrorMessage::category)) - .containsExactly(ErrorCounter.SERVICE_ERROR); + .containsExactly(ErrorCounter.INTERNAL_ERROR); } @Test @@ -159,7 +159,7 @@ public void testDecryptionKeyServiceError_DEFAULT() { assertThat(decryptionValidationResult.report()).isEmpty(); assertThat(decryptionValidationResult.errorMessages().stream().map(ErrorMessage::category)) - .containsExactly(ErrorCounter.SERVICE_ERROR); + .containsExactly(ErrorCounter.INTERNAL_ERROR); } public static final class TestEnv extends AbstractModule { diff --git a/javatests/com/google/aggregate/adtech/worker/SmokeTestBase.java b/javatests/com/google/aggregate/adtech/worker/SmokeTestBase.java index ce6eee37..bea3248a 100644 --- a/javatests/com/google/aggregate/adtech/worker/SmokeTestBase.java +++ b/javatests/com/google/aggregate/adtech/worker/SmokeTestBase.java @@ -23,6 +23,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.aggregate.adtech.worker.model.AggregatedFact; import com.google.aggregate.adtech.worker.testing.AvroResultsFileReader; +import com.google.aggregate.adtech.worker.util.JobUtils; import com.google.aggregate.protocol.avro.AvroDebugResultsReader; import com.google.aggregate.protocol.avro.AvroDebugResultsReaderFactory; import com.google.aggregate.protocol.avro.AvroDebugResultsRecord; @@ -58,13 +59,14 @@ import org.apache.http.impl.client.HttpClients; import org.apache.http.util.EntityUtils; -/** - * Smoke test base class - */ +/** Smoke test base class */ public abstract class SmokeTestBase { public static final String ENV_ATTRIBUTION_REPORT_TO = System.getenv("ATTRIBUTION_REPORT_TO"); - public static final String DEFAULT_ATTRIBUTION_REPORT_TO = "foo.com"; + public static final String ENV_REPORTING_SITE = System.getenv("REPORTING_SITE"); + public static final String DEFAULT_ATTRIBUTION_REPORT_TO = "https://subdomain.fakeurl.com"; + public static final String DEFAULT_REPORTING_SITE = "https://fakeurl.com"; + public static final String FRONTEND_CLOUDFUNCTION_URL = System.getenv("FRONTEND_CLOUDFUNCTION_URL"); public static final String KOKORO_BUILD_ID = System.getenv("KOKORO_BUILD_ID"); @@ -75,56 +77,104 @@ public abstract class SmokeTestBase { public static final String FRONTEND_API = System.getenv("FRONTEND_API"); public static final String API_GATEWAY_STAGE = "stage"; public static final String GCP_ACCESS_TOKEN = System.getenv("GCP_ACCESS_TOKEN"); - public static final String DEFAULT_DEPLOY_SA = "deploy-sa@ps-msmt-aggserv-test.iam.gserviceaccount.com"; + public static final String DEFAULT_DEPLOY_SA = + "deploy-sa@ps-msmt-aggserv-test.iam.gserviceaccount.com"; public static final String DEFAULT_TEST_DATA_BUCKET = "test_reports_data"; public static final String DEFAULT_PROJECT_ID = "ps-msmt-aggserv-test"; public static final String DEFAULT_ENVIRONMENT_NAME = "continuous_mp"; protected CreateJobRequest createJobRequest; - public static CreateJobRequest createJobRequest( + public static CreateJobRequest createJobRequestWithAttributionReportTo( String inputDataBlobBucket, String inputDataBlobPrefix, String outputDataBlobBucket, String outputDataBlobPrefix, String jobId, Optional outputDomainBucketName, + Optional outputDomainPrefix, + long totalReportsCount, + int reportErrorThreshold) { + return createDefaultJobRequestBuilder( + inputDataBlobBucket, + inputDataBlobPrefix, + outputDataBlobBucket, + outputDataBlobPrefix, + jobId) + .putAllJobParameters( + getJobParamsWithAttributionReportTo( + false, outputDomainBucketName, outputDomainPrefix, Optional.of(totalReportsCount), reportErrorThreshold)) + .build(); + } + + public static CreateJobRequest createJobRequestWithAttributionReportTo( + String inputDataBlobBucket, + String inputDataBlobPrefix, + String outputDataBlobBucket, + String outputDataBlobPrefix, + Boolean debugRun, + Optional outputDomainBucketName, Optional outputDomainPrefix) { return createDefaultJobRequestBuilder( - inputDataBlobBucket, - inputDataBlobPrefix, - outputDataBlobBucket, - outputDataBlobPrefix, - jobId) - .putAllJobParameters(getJobParams(false, outputDomainBucketName, outputDomainPrefix, 100)) + inputDataBlobBucket, inputDataBlobPrefix, outputDataBlobBucket, outputDataBlobPrefix) + .putAllJobParameters( + getJobParamsWithAttributionReportTo( + debugRun, + outputDomainBucketName, + outputDomainPrefix, + /* inputReportCount= */ Optional.empty(), + /* reportErrorThreshold= */ 0)) .build(); } - public static CreateJobRequest createJobRequest( + public static CreateJobRequest createJobRequestWithAttributionReportTo( String inputDataBlobBucket, String inputDataBlobPrefix, String outputDataBlobBucket, String outputDataBlobPrefix, Boolean debugRun, + String jobId, Optional outputDomainBucketName, Optional outputDomainPrefix) { + return createDefaultJobRequestBuilder( + inputDataBlobBucket, + inputDataBlobPrefix, + outputDataBlobBucket, + outputDataBlobPrefix, + jobId) + .putAllJobParameters( + getJobParamsWithAttributionReportTo( + debugRun, outputDomainBucketName, outputDomainPrefix, Optional.empty(), 0)) + .build(); + } + + public static CreateJobRequest createJobRequestWithAttributionReportTo( + String inputDataBlobBucket, + String inputDataBlobPrefix, + String outputDataBlobBucket, + String outputDataBlobPrefix, + Boolean debugRun, + Optional outputDomainBucketName, + Optional outputDomainPrefix, + long totalReportsCount, + int reportErrorThreshold) { return createDefaultJobRequestBuilder( inputDataBlobBucket, inputDataBlobPrefix, outputDataBlobBucket, outputDataBlobPrefix) .putAllJobParameters( - getJobParams( + getJobParamsWithAttributionReportTo( debugRun, outputDomainBucketName, outputDomainPrefix, - /* reportErrorThreshold= */ 100)) + Optional.of(totalReportsCount), + reportErrorThreshold)) .build(); } - public static CreateJobRequest createJobRequest( + public static CreateJobRequest createJobRequestWithAttributionReportTo( String inputDataBlobBucket, String inputDataBlobPrefix, String outputDataBlobBucket, String outputDataBlobPrefix, - Boolean debugRun, String jobId, Optional outputDomainBucketName, Optional outputDomainPrefix) { @@ -135,7 +185,12 @@ public static CreateJobRequest createJobRequest( outputDataBlobPrefix, jobId) .putAllJobParameters( - getJobParams(debugRun, outputDomainBucketName, outputDomainPrefix, 100)) + getJobParamsWithAttributionReportTo( + false, + outputDomainBucketName, + outputDomainPrefix, + /* inputReportCount= */ Optional.empty(), + /* reportErrorThreshold= */ 0)) .build(); } @@ -159,7 +214,7 @@ private static CreateJobRequest.Builder createDefaultJobRequestBuilder( .putAllJobParameters(ImmutableMap.of()); } - public static CreateJobRequest createJobRequest( + public static CreateJobRequest createJobRequestWithAttributionReportTo( String inputDataBlobBucket, String inputDataBlobPrefix, String outputDataBlobBucket, @@ -167,9 +222,24 @@ public static CreateJobRequest createJobRequest( Optional outputDomainBucketName, Optional outputDomainPrefix) { return createDefaultJobRequestBuilder( - inputDataBlobBucket, inputDataBlobPrefix, outputDataBlobBucket, outputDataBlobPrefix) + inputDataBlobBucket, inputDataBlobPrefix, outputDataBlobBucket, outputDataBlobPrefix) + .putAllJobParameters( + getJobParamsWithAttributionReportTo( + false, outputDomainBucketName, outputDomainPrefix, /* reportErrorThreshold= */ Optional.empty(), 0)) + .build(); + } + + public static CreateJobRequest createJobRequestWithReportingSite( + String inputDataBlobBucket, + String inputDataBlobPrefix, + String outputDataBlobBucket, + String outputDataBlobPrefix, + Optional outputDomainBucketName, + Optional outputDomainPrefix) { + return createDefaultJobRequestBuilder( + inputDataBlobBucket, inputDataBlobPrefix, outputDataBlobBucket, outputDataBlobPrefix) .putAllJobParameters( - getJobParams( + getJobParamsWithReportingSite( false, outputDomainBucketName, outputDomainPrefix, /* reportErrorThreshold= */ 100)) .build(); } @@ -404,9 +474,9 @@ protected static ImmutableList rea throws Exception { Path tempResultFile = Files.createTempFile(/* prefix= */ "results", /* suffix= */ "avro"); try (InputStream resultStream = - blobStorageClient.getBlob( - DataLocation.ofBlobStoreDataLocation( - BlobStoreDataLocation.create(outputBucket, outputPrefix))); + blobStorageClient.getBlob( + DataLocation.ofBlobStoreDataLocation( + BlobStoreDataLocation.create(outputBucket, outputPrefix))); OutputStream outputStream = Files.newOutputStream(tempResultFile)) { ByteStreams.copy(resultStream, outputStream); outputStream.flush(); @@ -419,18 +489,18 @@ protected static ImmutableList rea } protected static - ImmutableList readDebugResultsFromCloud( - T blobStorageClient, - AvroDebugResultsReaderFactory readerFactory, - String outputBucket, - String outputPrefix) - throws Exception { + ImmutableList readDebugResultsFromCloud( + T blobStorageClient, + AvroDebugResultsReaderFactory readerFactory, + String outputBucket, + String outputPrefix) + throws Exception { Stream writtenResults; Path tempResultFile = Files.createTempFile(/* prefix= */ "debug_results", /* suffix= */ "avro"); try (InputStream resultStream = - blobStorageClient.getBlob( - DataLocation.ofBlobStoreDataLocation( - BlobStoreDataLocation.create(outputBucket, outputPrefix))); + blobStorageClient.getBlob( + DataLocation.ofBlobStoreDataLocation( + BlobStoreDataLocation.create(outputBucket, outputPrefix))); OutputStream outputStream = Files.newOutputStream(tempResultFile)) { ByteStreams.copy(resultStream, outputStream); outputStream.flush(); @@ -465,17 +535,45 @@ protected static boolean checkFileExists( } } - private static ImmutableMap getJobParams( + private static ImmutableMap getJobParamsWithReportingSite( Boolean debugRun, Optional outputDomainBucketName, Optional outputDomainPrefix, int reportErrorThresholdPercentage) { ImmutableMap.Builder jobParams = ImmutableMap.builder(); + jobParams.put("reporting_site", getReportingSite()); + if (debugRun) { + jobParams.put("debug_run", "true"); + } + jobParams.put( + "report_error_threshold_percentage", String.valueOf(reportErrorThresholdPercentage)); + if (outputDomainPrefix.isPresent() && outputDomainBucketName.isPresent()) { + jobParams.put("output_domain_blob_prefix", outputDomainPrefix.get()); + jobParams.put("output_domain_bucket_name", outputDomainBucketName.get()); + return jobParams.build(); + } else if (outputDomainPrefix.isEmpty() && outputDomainBucketName.isEmpty()) { + return jobParams.build(); + } else { + throw new IllegalStateException( + "outputDomainPrefix and outputDomainBucketName must both be provided or both be empty."); + } + } + + private static ImmutableMap getJobParamsWithAttributionReportTo( + Boolean debugRun, + Optional outputDomainBucketName, + Optional outputDomainPrefix, + Optional inputReportCountOptional, + int reportErrorThresholdPercentage) { + ImmutableMap.Builder jobParams = ImmutableMap.builder(); jobParams.put("attribution_report_to", getAttributionReportTo()); if (debugRun) { jobParams.put("debug_run", "true"); } + inputReportCountOptional.ifPresent( + inputReportCount -> + jobParams.put(JobUtils.JOB_PARAM_INPUT_REPORT_COUNT, String.valueOf(inputReportCount))); jobParams.put( "report_error_threshold_percentage", String.valueOf(reportErrorThresholdPercentage)); if (outputDomainPrefix.isPresent() && outputDomainBucketName.isPresent()) { @@ -497,6 +595,13 @@ private static String getAttributionReportTo() { return DEFAULT_ATTRIBUTION_REPORT_TO; } + private static String getReportingSite() { + if (ENV_REPORTING_SITE != null) { + return ENV_REPORTING_SITE; + } + return DEFAULT_REPORTING_SITE; + } + protected static void checkJobExecutionResult( JsonNode result, String returnCode, int errorCount) { assertThat(result.get("result_info").get("return_code").asText()).isEqualTo(returnCode); @@ -505,13 +610,13 @@ protected static void checkJobExecutionResult( .isTrue(); } else { assertThat( - result - .get("result_info") - .get("error_summary") - .get("error_counts") - .get(0) - .get("count") - .asInt()) + result + .get("result_info") + .get("error_summary") + .get("error_counts") + .get(0) + .get("count") + .asInt()) .isEqualTo(errorCount); } } diff --git a/javatests/com/google/aggregate/adtech/worker/WorkerPullWorkServiceTest.java b/javatests/com/google/aggregate/adtech/worker/WorkerPullWorkServiceTest.java index 77c2b773..c1c4b26a 100644 --- a/javatests/com/google/aggregate/adtech/worker/WorkerPullWorkServiceTest.java +++ b/javatests/com/google/aggregate/adtech/worker/WorkerPullWorkServiceTest.java @@ -38,6 +38,7 @@ import com.google.aggregate.adtech.worker.selector.MetricClientSelector; import com.google.aggregate.adtech.worker.testing.NoopJobProcessor; import com.google.aggregate.adtech.worker.testing.NoopJobProcessor.ExceptionToThrow; +import com.google.aggregate.adtech.worker.util.JobUtils; import com.google.aggregate.perf.StopwatchExporter; import com.google.aggregate.perf.export.NoOpStopwatchExporter; import com.google.common.base.Ticker; @@ -103,6 +104,43 @@ public void pullJob() throws Exception { .isEqualTo(RETURN_CODE_SUCCESS); } + @Test + public void withInvalidFilteringIds_returnsInvalidJobCode() throws Exception { + RequestInfo requestInfo = + RequestInfo.getDefaultInstance().toBuilder() + .putJobParameters("attribution_report_to", "https://foo.com") + .putJobParameters(JobUtils.JOB_PARAM_FILTERING_IDS, "5,6,null") + .build(); + Job job = createJob("test job").toBuilder().setRequestInfo(requestInfo).build(); + jobClient.setReturnConstant(job); + + service.run(); + + verify(jobClient).markJobCompleted(jobResultCaptor.capture()); + assertThat(jobResultCaptor.getAllValues()).hasSize(1); + assertThat(jobResultCaptor.getValue().resultInfo().getReturnCode()) + .isEqualTo(AggregationWorkerReturnCode.INVALID_JOB.name()); + } + + @Test + public void withValidFilteringIds_processingSucceeds() throws Exception { + RequestInfo requestInfo = + RequestInfo.getDefaultInstance().toBuilder() + .putJobParameters("attribution_report_to", "https://foo.com") + .putJobParameters(JobUtils.JOB_PARAM_FILTERING_IDS, " ,5,6, ,, 67, ") + .build(); + Job job = createJob("test job").toBuilder().setRequestInfo(requestInfo).build(); + jobClient.setReturnConstant(job); + jobProcessor.setJobResultToReturn(createJobResult(job, RETURN_CODE_SUCCESS)); + + service.run(); + + verify(jobClient).markJobCompleted(jobResultCaptor.capture()); + assertThat(jobResultCaptor.getAllValues()).hasSize(1); + assertThat(jobResultCaptor.getValue().resultInfo().getReturnCode()) + .isEqualTo(RETURN_CODE_SUCCESS); + } + @Test public void pullJob_invalid() throws Exception { Job invalidJob = createInvalidJob("test job"); diff --git a/javatests/com/google/aggregate/adtech/worker/aggregation/concurrent/ConcurrentAggregationProcessorTest.java b/javatests/com/google/aggregate/adtech/worker/aggregation/concurrent/ConcurrentAggregationProcessorTest.java index 9999cb31..4c73929f 100644 --- a/javatests/com/google/aggregate/adtech/worker/aggregation/concurrent/ConcurrentAggregationProcessorTest.java +++ b/javatests/com/google/aggregate/adtech/worker/aggregation/concurrent/ConcurrentAggregationProcessorTest.java @@ -28,6 +28,7 @@ import static com.google.aggregate.adtech.worker.aggregation.concurrent.ConcurrentAggregationProcessor.JOB_PARAM_ATTRIBUTION_REPORT_TO; import static com.google.aggregate.adtech.worker.aggregation.concurrent.ConcurrentAggregationProcessor.JOB_PARAM_DEBUG_PRIVACY_EPSILON; import static com.google.aggregate.adtech.worker.aggregation.concurrent.ConcurrentAggregationProcessor.JOB_PARAM_DEBUG_RUN; +import static com.google.aggregate.adtech.worker.aggregation.concurrent.ConcurrentAggregationProcessor.JOB_PARAM_REPORTING_SITE; import static com.google.aggregate.adtech.worker.model.ErrorCounter.NUM_REPORTS_WITH_ERRORS; import static com.google.aggregate.adtech.worker.model.SharedInfo.LATEST_VERSION; import static com.google.aggregate.adtech.worker.model.SharedInfo.VERSION_0_1; @@ -35,6 +36,8 @@ import static com.google.aggregate.adtech.worker.util.JobResultHelper.RESULT_REPORTS_WITH_ERRORS_EXCEEDED_THRESHOLD_MESSAGE; import static com.google.aggregate.adtech.worker.util.JobResultHelper.RESULT_SUCCESS_MESSAGE; import static com.google.aggregate.adtech.worker.util.JobResultHelper.RESULT_SUCCESS_WITH_ERRORS_MESSAGE; +import static com.google.aggregate.adtech.worker.util.JobUtils.JOB_PARAM_FILTERING_IDS; +import static com.google.aggregate.adtech.worker.util.JobUtils.JOB_PARAM_INPUT_REPORT_COUNT; import static com.google.aggregate.adtech.worker.util.JobUtils.JOB_PARAM_OUTPUT_DOMAIN_BLOB_PREFIX; import static com.google.aggregate.adtech.worker.util.JobUtils.JOB_PARAM_OUTPUT_DOMAIN_BUCKET_NAME; import static com.google.aggregate.adtech.worker.util.JobUtils.JOB_PARAM_REPORT_ERROR_THRESHOLD_PERCENTAGE; @@ -55,11 +58,14 @@ import com.google.acai.TestScoped; import com.google.aggregate.adtech.worker.AggregationWorkerReturnCode; import com.google.aggregate.adtech.worker.Annotations.BlockingThreadPool; +import com.google.aggregate.adtech.worker.Annotations.CustomForkJoinThreadPool; import com.google.aggregate.adtech.worker.Annotations.DomainOptional; +import com.google.aggregate.adtech.worker.Annotations.EnablePrivacyBudgetKeyFiltering; import com.google.aggregate.adtech.worker.Annotations.EnableStackTraceInResponse; import com.google.aggregate.adtech.worker.Annotations.EnableThresholding; import com.google.aggregate.adtech.worker.Annotations.MaxDepthOfStackTrace; import com.google.aggregate.adtech.worker.Annotations.NonBlockingThreadPool; +import com.google.aggregate.adtech.worker.Annotations.ParallelAggregatedFactNoising; import com.google.aggregate.adtech.worker.Annotations.ReportErrorThresholdPercentage; import com.google.aggregate.adtech.worker.Annotations.StreamingOutputDomainProcessing; import com.google.aggregate.adtech.worker.ResultLogger; @@ -94,6 +100,7 @@ import com.google.aggregate.adtech.worker.testing.FakeValidator; import com.google.aggregate.adtech.worker.testing.InMemoryResultLogger; import com.google.aggregate.adtech.worker.util.NumericConversions; +import com.google.aggregate.adtech.worker.util.ReportingOriginUtils; import com.google.aggregate.adtech.worker.validation.ReportValidator; import com.google.aggregate.adtech.worker.validation.ReportVersionValidator; import com.google.aggregate.perf.StopwatchExporter; @@ -105,6 +112,7 @@ import com.google.aggregate.privacy.budgeting.bridge.PrivacyBudgetingServiceBridge.PrivacyBudgetingServiceBridgeException; import com.google.aggregate.privacy.budgeting.bridge.UnlimitedPrivacyBudgetingServiceBridge; import com.google.aggregate.privacy.budgeting.budgetkeygenerator.PrivacyBudgetKeyGenerator; +import com.google.aggregate.privacy.budgeting.budgetkeygenerator.PrivacyBudgetKeyGenerator.PrivacyBudgetKeyInput; import com.google.aggregate.privacy.budgeting.budgetkeygenerator.PrivacyBudgetKeyGeneratorFactory; import com.google.aggregate.privacy.budgeting.budgetkeygenerator.PrivacyBudgetKeyGeneratorModule; import com.google.aggregate.privacy.noise.Annotations.Threshold; @@ -129,11 +137,13 @@ import com.google.common.collect.ImmutableSet; import com.google.common.collect.Maps; import com.google.common.io.ByteSource; +import com.google.common.primitives.UnsignedLong; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.inject.AbstractModule; import com.google.inject.Inject; import com.google.inject.Provider; import com.google.inject.Provides; +import com.google.inject.Singleton; import com.google.inject.multibindings.Multibinder; import com.google.privacysandbox.otel.OtlpJsonLoggingOTelConfigurationModule; import com.google.scp.operator.cpio.blobstorageclient.BlobStorageClient; @@ -145,9 +155,10 @@ import com.google.scp.operator.cpio.distributedprivacybudgetclient.StatusCode; import com.google.scp.operator.cpio.jobclient.model.Job; import com.google.scp.operator.cpio.jobclient.model.JobResult; -import com.google.scp.operator.cpio.jobclient.testing.FakeJobGenerator; import com.google.scp.operator.protos.shared.backend.ErrorCountProto.ErrorCount; import com.google.scp.operator.protos.shared.backend.ErrorSummaryProto.ErrorSummary; +import com.google.scp.operator.protos.shared.backend.JobKeyProto.JobKey; +import com.google.scp.operator.protos.shared.backend.JobStatusProto.JobStatus; import com.google.scp.operator.protos.shared.backend.RequestInfoProto.RequestInfo; import com.google.scp.operator.protos.shared.backend.ResultInfoProto.ResultInfo; import com.google.scp.shared.proto.ProtoUtil; @@ -161,6 +172,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.time.Clock; +import java.time.Duration; import java.time.Instant; import java.time.ZoneId; import java.util.Arrays; @@ -172,6 +184,7 @@ import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; +import java.util.stream.Stream; import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -182,6 +195,10 @@ public class ConcurrentAggregationProcessorTest { private static final Instant FIXED_TIME = Instant.parse("2021-01-01T00:00:00Z"); + private static final Instant REQUEST_RECEIVED_AT = Instant.parse("2019-10-01T08:25:24.00Z"); + private static final Instant REQUEST_PROCESSING_STARTED_AT = + Instant.parse("2019-10-01T08:29:24.00Z"); + private static final Instant REQUEST_UPDATED_AT = Instant.parse("2019-10-01T08:29:24.00Z"); @Rule public final Acai acai = new Acai(TestEnv.class); @Rule public final TemporaryFolder testWorkingDir = new TemporaryFolder(); @@ -197,6 +214,8 @@ public class ConcurrentAggregationProcessorTest { @Inject SharedInfoSerdes sharedInfoSerdes; @Inject AvroOutputDomainWriterFactory domainWriterFactory; @Inject OutputDomainProcessorHelper outputDomainProcessorHelper; + @Inject private PrivacyBudgetKeyGeneratorFactory privacyBudgetKeyGeneratorFactory; + @Inject private FeatureFlagHelper featureFlagHelper; private Path outputDomainDirectory; private Path reportsDirectory; private Path invalidReportsDirectory; @@ -218,8 +237,6 @@ public class ConcurrentAggregationProcessorTest { // Under test. @Inject private Provider processor; - @Inject private PrivacyBudgetKeyGeneratorFactory privacyBudgetKeyGeneratorFactory; - @Before public void setUpFlags() { outputDomainProcessorHelper.setAvroOutputDomainProcessor(true); @@ -280,7 +297,7 @@ public void setUpInputData() throws Exception { Files.createDirectory(reportsDirectory); Files.createDirectory(invalidReportsDirectory); - ctx = FakeJobGenerator.generateBuilder("foo").build(); + ctx = generateJob("foo", Optional.of("https://example.foo.com"), Optional.empty()); ctx = ctx.toBuilder() .setRequestInfo( @@ -295,7 +312,7 @@ public void setUpInputData() throws Exception { expectedJobResult = makeExpectedJobResult(); // Job context for job with invalid version input report. - ctxInvalidReport = FakeJobGenerator.generateBuilder("bar").build(); + ctxInvalidReport = generateJob("bar", Optional.of("https://example.foo.com"), Optional.empty()); ctxInvalidReport = ctxInvalidReport.toBuilder() .setRequestInfo( @@ -324,18 +341,6 @@ public void setUpInputData() throws Exception { ImmutableList.of(invalidEncryptedReport)); } - private RequestInfo getRequestInfoWithInputDataBucketName( - RequestInfo requestInfo, Path inputReportDirectory) { - Map jobParameters = new HashMap<>(requestInfo.getJobParametersMap()); - jobParameters.put("report_error_threshold_percentage", "100"); - return requestInfo.toBuilder() - .putAllJobParameters(jobParameters) - // Simulating shards of input. - .setInputDataBucketName(inputReportDirectory.toAbsolutePath().toString()) - .setInputDataBlobPrefix("") - .build(); - } - @Test public void aggregate_domainOptionalAndNoOutputDomain() throws Exception { JobResult jobResultProcessor = processor.get().process(ctx); @@ -347,6 +352,27 @@ public void aggregate_domainOptionalAndNoOutputDomain() throws Exception { AggregatedFact.create(/* bucket= */ createBucketFromInt(2), /* metric= */ 8, 8L)); } + @Test + public void aggregate_skipZeroSizedBlobs() throws Exception { + // Write an empty report. + writeReports(reportsDirectory.resolve("reports_6.avro"), ImmutableList.of()); + // Followed by two additional non-empty reports. + EncryptedReport testReport1 = generateEncryptedReport(3, String.valueOf(UUID.randomUUID())); + EncryptedReport testReport2 = generateEncryptedReport(4, String.valueOf(UUID.randomUUID())); + writeReports( + reportsDirectory.resolve("reports_7.avro"), ImmutableList.of(testReport1, testReport2)); + JobResult jobResultProcessor = processor.get().process(ctx); + + // Check if empty report is skipped and output is processed without errors. + assertThat(jobResultProcessor).isEqualTo(expectedJobResult); + assertThat(resultLogger.getMaterializedAggregationResults().getMaterializedAggregations()) + .containsExactly( + AggregatedFact.create(/* bucket= */ createBucketFromInt(1), /* metric= */ 2, 2L), + AggregatedFact.create(/* bucket= */ createBucketFromInt(2), /* metric= */ 8, 8L), + AggregatedFact.create(/* bucket= */ createBucketFromInt(3), /* metric= */ 9, 9L), + AggregatedFact.create(/* bucket= */ createBucketFromInt(4), /* metric= */ 16, 16L)); + } + @Test public void aggregate_invalidVersionReport() { AggregationJobProcessException ex = @@ -371,6 +397,23 @@ public void aggregate_noOutputDomain_thresholding() throws Exception { AggregatedFact.create(/* bucket= */ createBucketFromInt(2), /* metric= */ 5, 8L)); } + @Test + public void aggregate_reportingSiteProvided() throws Exception { + ctx = generateJob("foo", Optional.empty(), Optional.of("https://foo.com")); + ctx = + ctx.toBuilder() + .setRequestInfo( + getRequestInfoWithInputDataBucketName(ctx.requestInfo(), reportsDirectory)) + .build(); + JobResult jobResultProcessor = processor.get().process(ctx); + + assertThat(jobResultProcessor).isEqualTo(expectedJobResult); + assertThat(resultLogger.getMaterializedAggregationResults().getMaterializedAggregations()) + .containsExactly( + AggregatedFact.create(/* bucket= */ createBucketFromInt(1), /* metric= */ 2, 2L), + AggregatedFact.create(/* bucket= */ createBucketFromInt(2), /* metric= */ 8, 8L)); + } + @Test public void aggregate_withOutputDomain_overlappingDomainKeysInResults() throws Exception { outputDomainProcessorHelper.setDomainOptional(false); @@ -577,14 +620,14 @@ public void aggregate_debugRunDomainOptional_resultsInSameDebugFacts() throws Ex assertThat(jobResultProcessor).isEqualTo(makeExpectedJobResult()); Map resultFacts = resultLogger.getMaterializedAggregationResults().getMaterializedAggregations().stream() - .collect(Collectors.toMap(AggregatedFact::bucket, Function.identity())); + .collect(Collectors.toMap(AggregatedFact::getBucket, Function.identity())); assertThat(resultFacts).hasSize(3); assertThat(resultLogger.getMaterializedDebugAggregationResults().getMaterializedAggregations()) .hasSize(3); Map debugFacts = resultLogger.getMaterializedDebugAggregationResults().getMaterializedAggregations().stream() - .collect(Collectors.toMap(AggregatedFact::bucket, Function.identity())); + .collect(Collectors.toMap(AggregatedFact::getBucket, Function.identity())); assertThat(debugFacts).hasSize(3); // Key 1 is in the report only, key 2 overlaps both sets, and key 3 is in the domain only. @@ -633,12 +676,12 @@ public void aggregate_debugRunWithOutputDomain_resultsInSameDebugFacts() throws Map resultFacts = resultLogger.getMaterializedAggregationResults().getMaterializedAggregations().stream() - .collect(Collectors.toMap(AggregatedFact::bucket, Function.identity())); + .collect(Collectors.toMap(AggregatedFact::getBucket, Function.identity())); assertThat(resultFacts).hasSize(2); Map debugFacts = resultLogger.getMaterializedDebugAggregationResults().getMaterializedAggregations().stream() - .collect(Collectors.toMap(AggregatedFact::bucket, Function.identity())); + .collect(Collectors.toMap(AggregatedFact::getBucket, Function.identity())); assertThat(debugFacts).hasSize(3); // Key 2 is in both domain and reports; key 3 is in the domain only. @@ -653,8 +696,8 @@ public void aggregate_debugRunWithOutputDomain_resultsInSameDebugFacts() throws // Key 1 is in the report only but should be present in the debug facts. assertThat(resultFacts).doesNotContainKey(createBucketFromInt(1)); assertThat(debugFacts).containsKey(createBucketFromInt(1)); - assertThat(debugFacts.get(createBucketFromInt(1)).debugAnnotations()).isPresent(); - assertThat(debugFacts.get(createBucketFromInt(1)).debugAnnotations().get()) + assertThat(debugFacts.get(createBucketFromInt(1)).getDebugAnnotations()).isPresent(); + assertThat(debugFacts.get(createBucketFromInt(1)).getDebugAnnotations().get()) .containsExactly(DebugBucketAnnotation.IN_REPORTS); } @@ -1011,6 +1054,53 @@ public void process_inputReadFailedCodeWhenBadShardThrows() throws Exception { assertThat(ex.getMessage()).contains("Exception while reading reports input data."); } + @Test + public void process_withEmptyReportsAndDomainOptional_returnsSuccess() throws Exception { + outputDomainProcessorHelper.setDomainOptional(true); + Path pathToEmptyReports = testWorkingDir.getRoot().toPath().resolve("empty_reports_dir"); + Files.createDirectory(pathToEmptyReports); + writeReports(pathToEmptyReports.resolve("reports_1.avro"), ImmutableList.of()); + + Job emptyReportsCtx = + ctx.toBuilder() + .setRequestInfo( + getRequestInfoWithInputDataBucketName(ctx.requestInfo(), pathToEmptyReports)) + .build(); + + JobResult result = processor.get().process(emptyReportsCtx); + assertThat(result.resultInfo().getReturnCode()).contains("SUCCESS"); + assertThat(resultLogger.getMaterializedAggregationResults().getMaterializedAggregations()) + .isEmpty(); + } + + @Test + public void process_withEmptyReportsWithDomain_returnsNoisedDomain() throws Exception { + outputDomainProcessorHelper.setDomainOptional(false); + fakeNoiseApplierSupplier.setFakeNoiseApplier(new ConstantNoiseApplier(10)); + + Path dirToEmptyReports = testWorkingDir.getRoot().toPath().resolve("empty_reports_dir"); + Files.createDirectory(dirToEmptyReports); + + writeReports(dirToEmptyReports.resolve("reports_1.avro"), ImmutableList.of()); + writeOutputDomainAvroFile(outputDomainDirectory.resolve("output_domain_1.avro"), "3"); + + ctx = addOutputDomainToJob(); + Job emptyReportsCtx = + ctx.toBuilder() + .setRequestInfo( + getRequestInfoWithInputDataBucketName(ctx.requestInfo(), dirToEmptyReports)) + .build(); + + JobResult result = processor.get().process(emptyReportsCtx); + assertThat(result.resultInfo().getReturnCode()).contains("SUCCESS"); + + // No reports and one key specified in the domain so a single aggregated fact is expected. + assertThat(resultLogger.getMaterializedAggregationResults().getMaterializedAggregations()) + .containsExactly( + AggregatedFact.create( + /* bucket= */ createBucketFromInt(3), /* metric= */ 10, /* unnoisedMetric= */ 0L)); + } + @Test public void process_outputWriteFailedCodeWhenResultLoggerThrows() { resultLogger.setShouldThrow(true); @@ -1058,8 +1148,8 @@ public void process_decryptionKeyFetchFailedOtherReasons() throws Exception { .addAllErrorCounts( ImmutableList.of( ErrorCount.newBuilder() - .setCategory(ErrorCounter.SERVICE_ERROR.name()) - .setDescription(ErrorCounter.SERVICE_ERROR.getDescription()) + .setCategory(ErrorCounter.INTERNAL_ERROR.name()) + .setDescription(ErrorCounter.INTERNAL_ERROR.getDescription()) .setCount(4L) .build(), ErrorCount.newBuilder() @@ -1141,6 +1231,76 @@ public void process_errorCountExceedsThreshold_quitsEarly() throws Exception { assertFalse(resultLogger.hasLogged()); } + @Test + public void process_withInputReportCountInRequest_errorCountExceedsThreshold_quitsEarly() + throws Exception { + ImmutableList encryptedReports1 = + ImmutableList.of( + generateEncryptedReport(1, reportId1), + generateEncryptedReport(2, reportId2), + generateEncryptedReport(3, String.valueOf(UUID.randomUUID())), + generateEncryptedReport(4, String.valueOf(UUID.randomUUID())), + generateEncryptedReport(5, reportId3)); + ImmutableList encryptedReports2 = + ImmutableList.of( + generateEncryptedReport(6, reportId4), + generateEncryptedReport(7, String.valueOf(UUID.randomUUID())), + generateEncryptedReport(8, String.valueOf(UUID.randomUUID())), + generateEncryptedReport(9, String.valueOf(UUID.randomUUID())), + generateEncryptedReport(10, String.valueOf(UUID.randomUUID()))); + writeReports(reportsDirectory.resolve("reports_1.avro"), encryptedReports1); + writeReports(reportsDirectory.resolve("reports_2.avro"), encryptedReports2); + FakePrivacyBudgetingServiceBridge fakePrivacyBudgetingServiceBridge = + new FakePrivacyBudgetingServiceBridge(); + privacyBudgetingServiceBridge.setPrivacyBudgetingServiceBridgeImpl( + fakePrivacyBudgetingServiceBridge); + fakeValidator.setReportIdShouldReturnError( + ImmutableSet.of(reportId1, reportId2, reportId3, reportId4)); + ImmutableMap jobParams = + ImmutableMap.of( + JOB_PARAM_REPORT_ERROR_THRESHOLD_PERCENTAGE, "20", JOB_PARAM_INPUT_REPORT_COUNT, "10"); + ctx = + ctx.toBuilder() + .setRequestInfo( + ctx.requestInfo().toBuilder() + .putAllJobParameters( + combineJobParams(ctx.requestInfo().getJobParametersMap(), jobParams)) + .build()) + .build(); + + JobResult actualJobResult = processor.get().process(ctx); + + // Job quits on error count 4 > threshold 2 (20% threshold of 10 reports) + JobResult expectedJobResult = + this.expectedJobResult.toBuilder() + .setResultInfo( + resultInfoBuilder + .setReturnCode( + AggregationWorkerReturnCode.REPORTS_WITH_ERRORS_EXCEEDED_THRESHOLD.name()) + .setReturnMessage(RESULT_REPORTS_WITH_ERRORS_EXCEEDED_THRESHOLD_MESSAGE) + .setErrorSummary( + ErrorSummary.newBuilder() + .addAllErrorCounts( + ImmutableList.of( + ErrorCount.newBuilder() + .setCategory(ErrorCounter.DECRYPTION_ERROR.name()) + .setDescription( + ErrorCounter.DECRYPTION_ERROR.getDescription()) + .setCount(4L) + .build(), + ErrorCount.newBuilder() + .setCategory(ErrorCounter.NUM_REPORTS_WITH_ERRORS.name()) + .setDescription(NUM_REPORTS_WITH_ERRORS.getDescription()) + .setCount(4L) + .build())) + .build()) + .build()) + .build(); + assertThat(actualJobResult).isEqualTo(expectedJobResult); + assertThat(fakePrivacyBudgetingServiceBridge.getLastBudgetsToConsumeSent()).isEmpty(); + assertFalse(resultLogger.hasLogged()); + } + @Test public void process_errorCountWithinThreshold_succeedsWithErrors() throws Exception { ImmutableList encryptedReports1 = @@ -1163,7 +1323,8 @@ public void process_errorCountWithinThreshold_succeedsWithErrors() throws Except fakeValidator.setReportIdShouldReturnError( ImmutableSet.of(reportId1, reportId2, reportId3, reportId4)); ImmutableMap jobParams = - ImmutableMap.of(JOB_PARAM_REPORT_ERROR_THRESHOLD_PERCENTAGE, "50.0"); + ImmutableMap.of( + JOB_PARAM_REPORT_ERROR_THRESHOLD_PERCENTAGE, "50.0", JOB_PARAM_INPUT_REPORT_COUNT, ""); ctx = ctx.toBuilder() .setRequestInfo( @@ -1220,7 +1381,9 @@ public void process_errorCountWithinThreshold_succeedsWithErrors() throws Except } @Test - public void process_withNoQueriedFilteringId() throws Exception { + public void process_withNoQueriedFilteringId_filteringNotEnabled_queries0OrNullIds() + throws Exception { + featureFlagHelper.setEnablePrivacyBudgetKeyFiltering(false); Fact factWithoutId = Fact.builder().setBucket(new BigInteger("11111")).setValue(11).build(); Fact nullFact1 = Fact.builder().setBucket(new BigInteger("0")).setValue(0).build(); EncryptedReport reportWithoutId = @@ -1229,10 +1392,19 @@ public void process_withNoQueriedFilteringId() throws Exception { ImmutableList.of(factWithoutId, nullFact1), VERSION_0_1)); Fact factWithDefaultId = - Fact.builder().setBucket(new BigInteger("11111")).setValue(11).setId(0).build(); + Fact.builder() + .setBucket(new BigInteger("11111")) + .setValue(11) + .setId(UnsignedLong.ZERO) + .build(); Fact factWithId = - Fact.builder().setBucket(new BigInteger("33333")).setValue(33).setId(12).build(); - Fact nullFact2 = Fact.builder().setBucket(new BigInteger("0")).setValue(0).setId(0).build(); + Fact.builder() + .setBucket(new BigInteger("33333")) + .setValue(33) + .setId(UnsignedLong.valueOf(12)) + .build(); + Fact nullFact2 = + Fact.builder().setBucket(new BigInteger("0")).setValue(0).setId(UnsignedLong.ZERO).build(); EncryptedReport reportWithIds = getEncryptedReport( FakeReportGenerator.generateWithFactList( @@ -1251,10 +1423,249 @@ public void process_withNoQueriedFilteringId() throws Exception { /* unnoisedMetric= */ 22L)); } + @Test + public void process_withQueriedFilteringId_filteringNotEnabled_queries0OrNullIds() + throws Exception { + featureFlagHelper.setEnablePrivacyBudgetKeyFiltering(false); + Fact factWithoutId = Fact.builder().setBucket(new BigInteger("11111")).setValue(11).build(); + Fact nullFact1 = Fact.builder().setBucket(new BigInteger("0")).setValue(0).build(); + EncryptedReport reportWithoutId = + getEncryptedReport( + FakeReportGenerator.generateWithFactList( + ImmutableList.of(factWithoutId, nullFact1), VERSION_0_1)); + + Fact factWithDefaultId = + Fact.builder() + .setBucket(new BigInteger("11111")) + .setValue(11) + .setId(UnsignedLong.ZERO) + .build(); + Fact factWithId = + Fact.builder() + .setBucket(new BigInteger("33333")) + .setValue(33) + .setId(UnsignedLong.valueOf(12)) + .build(); + Fact nullFact2 = + Fact.builder().setBucket(new BigInteger("0")).setValue(0).setId(UnsignedLong.ZERO).build(); + EncryptedReport reportWithIds = + getEncryptedReport( + FakeReportGenerator.generateWithFactList( + ImmutableList.of(factWithDefaultId, factWithId, nullFact2), "1.0")); + + writeReports(reportsDirectory.resolve("reports_1.avro"), ImmutableList.of(reportWithoutId)); + writeReports(reportsDirectory.resolve("reports_2.avro"), ImmutableList.of(reportWithIds)); + + ImmutableMap jobParams = ImmutableMap.of(JOB_PARAM_FILTERING_IDS, "12"); + ctx = + ctx.toBuilder() + .setRequestInfo( + ctx.requestInfo().toBuilder() + .putAllJobParameters( + combineJobParams(ctx.requestInfo().getJobParametersMap(), jobParams)) + .build()) + .build(); + processor.get().process(ctx); + + // Even though the job queries for the id = 12, the aggregation is done for id = 0 or null since + // the feature flag is not enabled. + assertThat(resultLogger.getMaterializedAggregationResults().getMaterializedAggregations()) + .containsExactly( + AggregatedFact.create( + /* bucket= */ new BigInteger("11111"), + /* metric= */ 22, + /* unnoisedMetric= */ 22L)); + } + + @Test + public void process_withQueriedFilteringId_filteringEnabled_filtersForTheGivenIds() + throws Exception { + featureFlagHelper.setEnablePrivacyBudgetKeyFiltering(true); + Fact factWithoutId = Fact.builder().setBucket(new BigInteger("11111")).setValue(11).build(); + Fact nullFact1 = Fact.builder().setBucket(new BigInteger("0")).setValue(0).build(); + EncryptedReport reportWithoutId = + getEncryptedReport( + FakeReportGenerator.generateWithFactList( + ImmutableList.of(factWithoutId, nullFact1), VERSION_0_1)); + // Aggregation is done only for contributions corresponding to ids = 12, 13. + Fact factWithDefaultId = + Fact.builder() + .setBucket(new BigInteger("11111")) + .setValue(11) + .setId(UnsignedLong.ZERO) + .build(); + Fact factWithId = + Fact.builder() + .setBucket(new BigInteger("33333")) + .setValue(33) + .setId(UnsignedLong.valueOf(12)) + .build(); + Fact nullFact2 = + Fact.builder().setBucket(new BigInteger("0")).setValue(0).setId(UnsignedLong.ZERO).build(); + EncryptedReport reportWithIds = + getEncryptedReport( + FakeReportGenerator.generateWithFactList( + ImmutableList.of(factWithDefaultId, factWithId, nullFact2), "1.0")); + + writeReports(reportsDirectory.resolve("reports_1.avro"), ImmutableList.of(reportWithoutId)); + writeReports(reportsDirectory.resolve("reports_2.avro"), ImmutableList.of(reportWithIds)); + // Privacy budget is consumed for 13 as well even though there are no contributions with this + // id. + ImmutableSet expectedPrivacyBudgetUnits = + ImmutableSet.of( + getPrivacyBudgetUnit(reportWithoutId, /* filteringIds= */ UnsignedLong.valueOf(12)), + getPrivacyBudgetUnit(reportWithIds, /* filteringIds= */ UnsignedLong.valueOf(12)), + getPrivacyBudgetUnit(reportWithoutId, /* filteringIds= */ UnsignedLong.valueOf(13)), + getPrivacyBudgetUnit(reportWithIds, /* filteringIds= */ UnsignedLong.valueOf(13))); + FakePrivacyBudgetingServiceBridge fakePrivacyBudgetingServiceBridge = + new FakePrivacyBudgetingServiceBridge(); + expectedPrivacyBudgetUnits.forEach( + pbu -> fakePrivacyBudgetingServiceBridge.setPrivacyBudget(pbu, /* budget= */ 1)); + privacyBudgetingServiceBridge.setPrivacyBudgetingServiceBridgeImpl( + fakePrivacyBudgetingServiceBridge); + + ImmutableMap jobParams = ImmutableMap.of(JOB_PARAM_FILTERING_IDS, "12,13"); + ctx = + ctx.toBuilder() + .setRequestInfo( + ctx.requestInfo().toBuilder() + .putAllJobParameters( + combineJobParams(ctx.requestInfo().getJobParametersMap(), jobParams)) + .build()) + .build(); + processor.get().process(ctx); + + assertThat(resultLogger.getMaterializedAggregationResults().getMaterializedAggregations()) + .containsExactly( + AggregatedFact.create( + /* bucket= */ new BigInteger("33333"), + /* metric= */ 33, + /* unnoisedMetric= */ 33L)); + assertThat(fakePrivacyBudgetingServiceBridge.getLastBudgetsToConsumeSent()).isPresent(); + assertThat(fakePrivacyBudgetingServiceBridge.getLastBudgetsToConsumeSent().get()) + .containsExactlyElementsIn(expectedPrivacyBudgetUnits); + } + + @Test + public void process_withConsecutiveJobsAndSameFilteringIds_throwsPrivacyExhausted() + throws Exception { + featureFlagHelper.setEnablePrivacyBudgetKeyFiltering(true); + Fact factWithoutId1 = Fact.builder().setBucket(new BigInteger("11111")).setValue(11).build(); + EncryptedReport reportWithoutId = + getEncryptedReport( + FakeReportGenerator.generateWithFactList( + ImmutableList.of(factWithoutId1), VERSION_0_1)); + + Fact factWithId = + Fact.builder() + .setBucket(new BigInteger("22222")) + .setValue(11) + .setId(UnsignedLong.valueOf(12)) + .build(); + EncryptedReport reportWithIds = + getEncryptedReport( + FakeReportGenerator.generateWithFactList(ImmutableList.of(factWithId), "1.0")); + writeReports(reportsDirectory.resolve("reports_1.avro"), ImmutableList.of(reportWithoutId)); + writeReports(reportsDirectory.resolve("reports_2.avro"), ImmutableList.of(reportWithIds)); + + UnsignedLong filteringIdJob = UnsignedLong.valueOf(1963698); + + ImmutableSet expectedPrivacyBudgetUnitsJobs = + ImmutableSet.of( + getPrivacyBudgetUnit(reportWithoutId, filteringIdJob), + getPrivacyBudgetUnit(reportWithIds, filteringIdJob)); + FakePrivacyBudgetingServiceBridge fakePrivacyBudgetingServiceBridge = + new FakePrivacyBudgetingServiceBridge(); + expectedPrivacyBudgetUnitsJobs.stream() + .forEach(pbu -> fakePrivacyBudgetingServiceBridge.setPrivacyBudget(pbu, /* budget= */ 1)); + privacyBudgetingServiceBridge.setPrivacyBudgetingServiceBridgeImpl( + fakePrivacyBudgetingServiceBridge); + + Job job = + getJobWithGivenJobParams( + /* jobParams= */ ImmutableMap.of(JOB_PARAM_FILTERING_IDS, filteringIdJob.toString())); + processor.get().process(job); + assertThat(fakePrivacyBudgetingServiceBridge.getLastBudgetsToConsumeSent()).isPresent(); + assertThat(fakePrivacyBudgetingServiceBridge.getLastBudgetsToConsumeSent().get()) + .containsExactlyElementsIn(expectedPrivacyBudgetUnitsJobs); + + AggregationJobProcessException ex = + assertThrows(AggregationJobProcessException.class, () -> processor.get().process(job)); + assertThat(ex.getCode()).isEqualTo(PRIVACY_BUDGET_EXHAUSTED); + } + + @Test + public void process_withConsecutiveJobsAndDifferentFilteringIds_budgetingSucceeds() + throws Exception { + featureFlagHelper.setEnablePrivacyBudgetKeyFiltering(true); + Fact factWithoutId1 = Fact.builder().setBucket(new BigInteger("11111")).setValue(11).build(); + Fact factWithoutId2 = Fact.builder().setBucket(new BigInteger("11111")).setValue(22).build(); + Fact nullFact1 = Fact.builder().setBucket(new BigInteger("0")).setValue(0).build(); + EncryptedReport reportWithoutId = + getEncryptedReport( + FakeReportGenerator.generateWithFactList( + ImmutableList.of(factWithoutId1, factWithoutId2, nullFact1), VERSION_0_1)); + + Fact factWithDefaultId = + Fact.builder() + .setBucket(new BigInteger("22222")) + .setValue(11) + .setId(UnsignedLong.ZERO) + .build(); + Fact factWithId = + Fact.builder() + .setBucket(new BigInteger("22222")) + .setValue(11) + .setId(UnsignedLong.valueOf(12)) + .build(); + EncryptedReport reportWithIds = + getEncryptedReport( + FakeReportGenerator.generateWithFactList( + ImmutableList.of(factWithDefaultId, factWithId), "1.0")); + writeReports(reportsDirectory.resolve("reports_1.avro"), ImmutableList.of(reportWithoutId)); + writeReports(reportsDirectory.resolve("reports_2.avro"), ImmutableList.of(reportWithIds)); + + UnsignedLong filteringIdJob1 = UnsignedLong.ZERO; + UnsignedLong filteringIdJob2 = UnsignedLong.valueOf(12); + + ImmutableSet expectedPrivacyBudgetUnitsJob1 = + ImmutableSet.of( + getPrivacyBudgetUnit(reportWithoutId, filteringIdJob1), + getPrivacyBudgetUnit(reportWithIds, filteringIdJob1)); + ImmutableSet expectedPrivacyBudgetUnitsJob2 = + ImmutableSet.of( + getPrivacyBudgetUnit(reportWithoutId, filteringIdJob2), + getPrivacyBudgetUnit(reportWithIds, filteringIdJob2)); + FakePrivacyBudgetingServiceBridge fakePrivacyBudgetingServiceBridge = + new FakePrivacyBudgetingServiceBridge(); + Stream.concat(expectedPrivacyBudgetUnitsJob1.stream(), expectedPrivacyBudgetUnitsJob2.stream()) + .forEach(pbu -> fakePrivacyBudgetingServiceBridge.setPrivacyBudget(pbu, /* budget= */ 1)); + privacyBudgetingServiceBridge.setPrivacyBudgetingServiceBridgeImpl( + fakePrivacyBudgetingServiceBridge); + + Job job1 = + getJobWithGivenJobParams( + /* jobParams= */ ImmutableMap.of(JOB_PARAM_FILTERING_IDS, filteringIdJob1.toString())); + processor.get().process(job1); + assertThat(fakePrivacyBudgetingServiceBridge.getLastBudgetsToConsumeSent()).isPresent(); + assertThat(fakePrivacyBudgetingServiceBridge.getLastBudgetsToConsumeSent().get()) + .containsExactlyElementsIn(expectedPrivacyBudgetUnitsJob1); + + Job job2 = + getJobWithGivenJobParams( + /* jobParams= */ ImmutableMap.of(JOB_PARAM_FILTERING_IDS, filteringIdJob2.toString())); + processor.get().process(job2); + assertThat(fakePrivacyBudgetingServiceBridge.getLastBudgetsToConsumeSent()).isPresent(); + assertThat(fakePrivacyBudgetingServiceBridge.getLastBudgetsToConsumeSent().get()) + .containsExactlyElementsIn(expectedPrivacyBudgetUnitsJob2); + } + @Test public void processingWithWrongSharedInfo() throws Exception { String keyId = UUID.randomUUID().toString(); - Report report = FakeReportGenerator.generateWithParam(1, /* reportVersion */ LATEST_VERSION); + Report report = + FakeReportGenerator.generateWithParam( + 1, /* reportVersion */ LATEST_VERSION, "https://example.foo.com"); // Encrypt with a different sharedInfo than what is provided with the report so that decryption // fails String sharedInfoForEncryption = "foobarbaz"; @@ -1333,8 +1744,11 @@ public void aggregate_withPrivacyBudgeting() throws Exception { AggregatedFact.create( /* bucket= */ createBucketFromInt(2), /* metric= */ 8, /* unnoisedMetric= */ 8L)); // Check that the right attributionReportTo and debugPrivacyBudgetLimit were sent to the bridge + String claimedIdentity = + ReportingOriginUtils.convertReportingOriginToSite( + ctx.requestInfo().getJobParametersMap().get(JOB_PARAM_ATTRIBUTION_REPORT_TO)); assertThat(fakePrivacyBudgetingServiceBridge.getLastAttributionReportToSent()) - .hasValue(ctx.requestInfo().getJobParametersMap().get(JOB_PARAM_ATTRIBUTION_REPORT_TO)); + .hasValue(claimedIdentity); } @Test @@ -1369,12 +1783,38 @@ public void aggregate_withPrivacyBudgeting_unauthorizedException_failJob() { assertThat(ex.getCode()).isEqualTo(PRIVACY_BUDGET_AUTHORIZATION_ERROR); } + @Test + public void aggregate_withPrivacyBudgeting_invalidReportingOriginException_failJob() { + FakePrivacyBudgetingServiceBridge fakePrivacyBudgetingServiceBridge = + new FakePrivacyBudgetingServiceBridge(); + + Map jobParameters = new HashMap<>(ctx.requestInfo().getJobParametersMap()); + jobParameters.put(JOB_PARAM_ATTRIBUTION_REPORT_TO, "https://subdomain.coordinator.test"); + ctx = + ctx.toBuilder() + .setRequestInfo( + ctx.requestInfo().toBuilder() + .putAllJobParameters( + combineJobParams(ctx.requestInfo().getJobParametersMap(), jobParameters)) + .build()) + .build(); + privacyBudgetingServiceBridge.setPrivacyBudgetingServiceBridgeImpl( + fakePrivacyBudgetingServiceBridge); + + IllegalStateException ex = + assertThrows(IllegalStateException.class, () -> processor.get().process(ctx)); + assertThat(ex.getMessage()) + .isEqualTo( + "Invalid reporting origin found while consuming budget, this should not happen as job" + + " validations ensure the reporting origin is always valid."); + } + @Test public void aggregate_withPrivacyBudgeting_oneBudgetMissing() { FakePrivacyBudgetingServiceBridge fakePrivacyBudgetingServiceBridge = new FakePrivacyBudgetingServiceBridge(); fakePrivacyBudgetingServiceBridge.setPrivacyBudget( - PrivacyBudgetUnit.create("1", Instant.ofEpochMilli(0)), 1); + PrivacyBudgetUnit.create("1", Instant.ofEpochMilli(0), "https://example.foo.com"), 1); // Missing budget for the second report. privacyBudgetingServiceBridge.setPrivacyBudgetingServiceBridgeImpl( fakePrivacyBudgetingServiceBridge); @@ -1445,7 +1885,7 @@ public void aggregate_withDebugRunAndPrivacyBudgetFailure_succeedsWithErrorCode( // Privacy Budget failure via thrown exception fakePrivacyBudgetingServiceBridge.setShouldThrow(); fakePrivacyBudgetingServiceBridge.setPrivacyBudget( - PrivacyBudgetUnit.create("1", Instant.ofEpochMilli(0)), 1); + PrivacyBudgetUnit.create("1", Instant.ofEpochMilli(0), "https://example.foo.com"), 1); // Missing budget for the second report. privacyBudgetingServiceBridge.setPrivacyBudgetingServiceBridgeImpl( fakePrivacyBudgetingServiceBridge); @@ -1475,7 +1915,7 @@ public void aggregateDebug_withPrivacyBudgetExhausted() throws Exception { FakePrivacyBudgetingServiceBridge fakePrivacyBudgetingServiceBridge = new FakePrivacyBudgetingServiceBridge(); fakePrivacyBudgetingServiceBridge.setPrivacyBudget( - PrivacyBudgetUnit.create("1", Instant.ofEpochMilli(0)), 1); + PrivacyBudgetUnit.create("1", Instant.ofEpochMilli(0), "https://example.foo.com"), 1); // Missing budget for the second report. privacyBudgetingServiceBridge.setPrivacyBudgetingServiceBridgeImpl( fakePrivacyBudgetingServiceBridge); @@ -1488,6 +1928,18 @@ public void aggregateDebug_withPrivacyBudgetExhausted() throws Exception { .isEqualTo(AggregationWorkerReturnCode.DEBUG_SUCCESS_WITH_PRIVACY_BUDGET_EXHAUSTED.name()); } + private RequestInfo getRequestInfoWithInputDataBucketName( + RequestInfo requestInfo, Path inputReportDirectory) { + Map jobParameters = new HashMap<>(requestInfo.getJobParametersMap()); + jobParameters.put("report_error_threshold_percentage", "100"); + return requestInfo.toBuilder() + .putAllJobParameters(jobParameters) + // Simulating shards of input. + .setInputDataBucketName(inputReportDirectory.toAbsolutePath().toString()) + .setInputDataBlobPrefix("") + .build(); + } + private void compareDebugFactByKey( Map resultFacts, Map debugFacts, @@ -1496,18 +1948,18 @@ private void compareDebugFactByKey( assertThat(resultFacts).containsKey(key); assertThat(debugFacts).containsKey(key); compareDebugFact(resultFacts.get(key), debugFacts.get(key)); - assertThat(debugFacts.get(key).debugAnnotations()).isPresent(); - assertThat(debugFacts.get(key).debugAnnotations().get()) + assertThat(debugFacts.get(key).getDebugAnnotations()).isPresent(); + assertThat(debugFacts.get(key).getDebugAnnotations().get()) .containsExactlyElementsIn(expectedAnnotation); } private void compareDebugFact(AggregatedFact resultFact, AggregatedFact debugFact) { - assertEquals(resultFact.bucket(), debugFact.bucket()); - assertEquals(resultFact.metric(), debugFact.metric()); + assertEquals(resultFact.getBucket(), debugFact.getBucket()); + assertEquals(resultFact.getMetric(), debugFact.getMetric()); - assertThat(resultFact.unnoisedMetric()).isPresent(); - assertThat(debugFact.unnoisedMetric()).isPresent(); - assertEquals(resultFact.unnoisedMetric().get(), debugFact.unnoisedMetric().get()); + assertThat(resultFact.getUnnoisedMetric()).isPresent(); + assertThat(debugFact.getUnnoisedMetric()).isPresent(); + assertEquals(resultFact.getUnnoisedMetric().get(), debugFact.getUnnoisedMetric().get()); } private void writeOutputDomainTextFile(Path outputDomainPath, String... keys) throws IOException { @@ -1535,17 +1987,31 @@ private JobResult makeExpectedJobResult() { } private PrivacyBudgetUnit getPrivacyBudgetUnit(EncryptedReport encryptedReport) { + return getPrivacyBudgetUnit( + encryptedReport, + /** filteringId = */ + UnsignedLong.ZERO); + } + + private PrivacyBudgetUnit getPrivacyBudgetUnit( + EncryptedReport encryptedReport, UnsignedLong filteringId) { SharedInfo sharedInfo = sharedInfoSerdes.convert(encryptedReport.sharedInfo()).get(); + PrivacyBudgetKeyInput privacyBudgetKeyInput = + PrivacyBudgetKeyInput.builder() + .setFilteringId(filteringId) + .setSharedInfo(sharedInfo) + .build(); PrivacyBudgetKeyGenerator privacyBudgetKeyGenerator = - privacyBudgetKeyGeneratorFactory.getPrivacyBudgetKeyGenerator(sharedInfo).get(); + privacyBudgetKeyGeneratorFactory.getPrivacyBudgetKeyGenerator(privacyBudgetKeyInput).get(); PrivacyBudgetUnit privacyBudgetUnit = PrivacyBudgetUnit.create( privacyBudgetKeyGenerator.generatePrivacyBudgetKey( PrivacyBudgetKeyGenerator.PrivacyBudgetKeyInput.builder() - .setFilteringId(0) + .setFilteringId(filteringId) .setSharedInfo(sharedInfo) .build()), - Instant.ofEpochMilli(0)); + Instant.ofEpochMilli(0), + sharedInfo.reportingOrigin()); return privacyBudgetUnit; } @@ -1582,6 +2048,16 @@ private Job addOutputDomainToJob() { .build(); } + private Job getJobWithGivenJobParams(ImmutableMap jobParams) { + return ctx.toBuilder() + .setRequestInfo( + ctx.requestInfo().toBuilder() + .putAllJobParameters( + combineJobParams(ctx.requestInfo().getJobParametersMap(), jobParams)) + .build()) + .build(); + } + /** * Proxy implementation for the privacy budgeting service that passes the call to the wrapped * budgeting bridge: this enables the testing to dynamically swap out implementations instead of @@ -1642,7 +2118,54 @@ boolean isAvroOutputDomainProcessor() { } } - // TODO: these setup steps could be consolidated with the SimpleAggregationProcessorTest TestEnv. + private static class FeatureFlagHelper { + + boolean enablePrivacyBudgetKeyFiltering = true; + + void setEnablePrivacyBudgetKeyFiltering(boolean enablePrivacyBudgetKeyFiltering) { + this.enablePrivacyBudgetKeyFiltering = enablePrivacyBudgetKeyFiltering; + } + } + + public static Job generateJob( + String id, Optional attributionReportTo, Optional reportingSite) { + if (attributionReportTo.isEmpty() && reportingSite.isEmpty()) { + throw new RuntimeException( + "At least one of attributionReportTo and reportingSite should be provided"); + } + RequestInfo.Builder requestInfoBuilder = + RequestInfo.newBuilder() + .setJobRequestId(id) + .setInputDataBlobPrefix("dataHandle") + .setInputDataBucketName("bucket") + .setOutputDataBlobPrefix("dataHandle") + .setOutputDataBucketName("bucket") + .setPostbackUrl("http://postback.com"); + RequestInfo requestInfo; + if (attributionReportTo.isPresent()) { + requestInfo = + requestInfoBuilder + .putAllJobParameters( + ImmutableMap.of(JOB_PARAM_ATTRIBUTION_REPORT_TO, attributionReportTo.get())) + .build(); + } else { + requestInfo = + requestInfoBuilder + .putAllJobParameters(ImmutableMap.of(JOB_PARAM_REPORTING_SITE, reportingSite.get())) + .build(); + } + return Job.builder() + .setJobKey(JobKey.newBuilder().setJobRequestId(id).build()) + .setJobProcessingTimeout(Duration.ofSeconds(3600)) + .setRequestInfo(requestInfo) + .setCreateTime(REQUEST_RECEIVED_AT) + .setUpdateTime(REQUEST_UPDATED_AT) + .setProcessingStartTime(Optional.of(REQUEST_PROCESSING_STARTED_AT)) + .setJobStatus(JobStatus.IN_PROGRESS) + .setNumAttempts(0) + .build(); + } + private static final class TestEnv extends AbstractModule { OutputDomainProcessorHelper helper = new OutputDomainProcessorHelper(); @@ -1672,6 +2195,7 @@ protected void configure() { // noising bind(FakeNoiseApplierSupplier.class).in(TestScoped.class); bind(NoisedAggregationRunner.class).to(NoisedAggregationRunnerImpl.class); + bind(boolean.class).annotatedWith(ParallelAggregatedFactNoising.class).toInstance(true); // loggers. bind(InMemoryResultLogger.class).in(TestScoped.class); @@ -1699,6 +2223,8 @@ protected void configure() { bind(Integer.class).annotatedWith(MaxDepthOfStackTrace.class).toInstance(3); bind(double.class).annotatedWith(ReportErrorThresholdPercentage.class).toInstance(10.0); bind(OutputDomainProcessorHelper.class).toInstance(helper); + + bind(FeatureFlagHelper.class).toInstance(new FeatureFlagHelper()); } @Provides @@ -1730,13 +2256,13 @@ OutputDomainProcessor provideDomainProcess( @DomainOptional Boolean domainOptional) { return helper.isAvroOutputDomainProcessor() ? new AvroOutputDomainProcessor( - blockingThreadPool, - nonBlockingThreadPool, - blobStorageClient, - avroOutputDomainReaderFactory, - stopwatchRegistry, - domainOptional, - enableThresholding) + blockingThreadPool, + nonBlockingThreadPool, + blobStorageClient, + avroOutputDomainReaderFactory, + stopwatchRegistry, + domainOptional, + enableThresholding) : new TextOutputDomainProcessor( blockingThreadPool, nonBlockingThreadPool, @@ -1774,20 +2300,35 @@ Ticker provideTimingTicker() { } @Provides + @Singleton @NonBlockingThreadPool ListeningExecutorService provideNonBlockingThreadPool() { return newDirectExecutorService(); } @Provides + @Singleton @BlockingThreadPool ListeningExecutorService provideBlockingThreadPool() { return newDirectExecutorService(); } + @Provides + @Singleton + @CustomForkJoinThreadPool + ListeningExecutorService provideCustomForkJoinThreadPool() { + return newDirectExecutorService(); + } + @Provides AggregationEngine provideAggregationEngine(AggregationEngineFactory aggregationEngineFactory) { return aggregationEngineFactory.create(); } + + @Provides + @EnablePrivacyBudgetKeyFiltering + Boolean provideEnableBudgetKeyFiltering(FeatureFlagHelper featureFlagHelper) { + return featureFlagHelper.enablePrivacyBudgetKeyFiltering; + } } } diff --git a/javatests/com/google/aggregate/adtech/worker/aggregation/domain/AvroOutputDomainProcessorTest.java b/javatests/com/google/aggregate/adtech/worker/aggregation/domain/AvroOutputDomainProcessorTest.java index 6b3caa86..7f44dbc7 100644 --- a/javatests/com/google/aggregate/adtech/worker/aggregation/domain/AvroOutputDomainProcessorTest.java +++ b/javatests/com/google/aggregate/adtech/worker/aggregation/domain/AvroOutputDomainProcessorTest.java @@ -25,11 +25,32 @@ import static org.junit.Assert.assertThrows; import com.google.acai.Acai; +import com.google.acai.TestScoped; import com.google.aggregate.adtech.worker.Annotations.BlockingThreadPool; +import com.google.aggregate.adtech.worker.Annotations.CustomForkJoinThreadPool; import com.google.aggregate.adtech.worker.Annotations.DomainOptional; import com.google.aggregate.adtech.worker.Annotations.EnableThresholding; import com.google.aggregate.adtech.worker.Annotations.NonBlockingThreadPool; +import com.google.aggregate.adtech.worker.Annotations.ParallelAggregatedFactNoising; +import com.google.aggregate.adtech.worker.aggregation.engine.AggregationEngine; +import com.google.aggregate.adtech.worker.aggregation.engine.AggregationEngineFactory; +import com.google.aggregate.adtech.worker.configs.PrivacyParametersSupplier; +import com.google.aggregate.adtech.worker.configs.PrivacyParametersSupplier.NoisingDelta; +import com.google.aggregate.adtech.worker.configs.PrivacyParametersSupplier.NoisingDistribution; +import com.google.aggregate.adtech.worker.configs.PrivacyParametersSupplier.NoisingEpsilon; +import com.google.aggregate.adtech.worker.configs.PrivacyParametersSupplier.NoisingL1Sensitivity; import com.google.aggregate.adtech.worker.exceptions.DomainReadException; +import com.google.aggregate.adtech.worker.model.AggregatedFact; +import com.google.aggregate.privacy.budgeting.budgetkeygenerator.PrivacyBudgetKeyGeneratorModule; +import com.google.aggregate.privacy.noise.Annotations.Threshold; +import com.google.aggregate.privacy.noise.NoiseApplier; +import com.google.aggregate.privacy.noise.NoisedAggregationRunner; +import com.google.aggregate.privacy.noise.NoisedAggregationRunnerImpl; +import com.google.aggregate.privacy.noise.model.NoisedAggregatedResultSet; +import com.google.aggregate.privacy.noise.proto.Params.NoiseParameters.Distribution; +import com.google.aggregate.privacy.noise.proto.Params.PrivacyParameters; +import com.google.aggregate.privacy.noise.testing.ConstantNoiseModule.ConstantNoiseApplier; +import com.google.aggregate.privacy.noise.testing.FakeNoiseApplierSupplier; import com.google.aggregate.protocol.avro.AvroOutputDomainRecord; import com.google.aggregate.protocol.avro.AvroOutputDomainWriter; import com.google.aggregate.protocol.avro.AvroOutputDomainWriterFactory; @@ -39,6 +60,7 @@ import com.google.common.util.concurrent.ListeningExecutorService; import com.google.inject.AbstractModule; import com.google.inject.Provides; +import com.google.inject.Singleton; import com.google.scp.operator.cpio.blobstorageclient.model.DataLocation; import com.google.scp.operator.cpio.blobstorageclient.model.DataLocation.BlobStoreDataLocation; import com.google.scp.operator.cpio.blobstorageclient.testing.FSBlobStorageClientModule; @@ -50,7 +72,8 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.List; -import java.util.concurrent.ExecutionException; +import java.util.Optional; +import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.Stream; import javax.inject.Inject; @@ -69,8 +92,12 @@ public class AvroOutputDomainProcessorTest { @Inject AvroOutputDomainWriterFactory avroOutputDomainWriterFactory; // Under test @Inject AvroOutputDomainProcessor outputDomainProcessor; + @Inject AggregationEngineFactory aggregationEngineFactory; + @Inject FakeNoiseApplierSupplier fakeNoiseApplierSupplier; + @Inject NoisedAggregationRunnerImpl noisedAggregationRunner; private Path outputDomainDirectory; private DataLocation outputDomainLocation; + private AggregationEngine aggregationEngine; @Before public void setUp() throws Exception { @@ -80,6 +107,8 @@ public void setUp() throws Exception { DataLocation.ofBlobStoreDataLocation( BlobStoreDataLocation.create( /* bucket= */ outputDomainDirectory.toAbsolutePath().toString(), /* key= */ "")); + aggregationEngine = aggregationEngineFactory.create(ImmutableSet.of()); + fakeNoiseApplierSupplier.setFakeNoiseApplier(new ConstantNoiseApplier(0)); } @Test @@ -137,12 +166,22 @@ public void readShardedDeduplicate() throws Exception { .containsExactly(BigInteger.valueOf(11), BigInteger.valueOf(22), BigInteger.valueOf(33)); } + @Test + public void skipsZeroByteDomains() throws Exception { + writeOutputDomain(outputDomainDirectory.resolve("domain_1.avro"), Stream.of()); + writeOutputDomain( + outputDomainDirectory.resolve("domain_2.avro"), Stream.of(11, 22, 11, 11, 22, 33)); + + ImmutableSet keys = readOutputDomain(); + + assertThat(keys) + .containsExactly(BigInteger.valueOf(11), BigInteger.valueOf(22), BigInteger.valueOf(33)); + } + @Test public void ioProblem() { // No file written, path pointing to a non-existing file, this should be an IO exception. - ExecutionException error = assertThrows(ExecutionException.class, this::readOutputDomain); - - assertThat(error).hasCauseThat().isInstanceOf(DomainReadException.class); + assertThrows(DomainReadException.class, this::readOutputDomain); } @Test @@ -150,10 +189,9 @@ public void readOutputDomain_emptyOutputDomain_throwsException() throws Exceptio writeOutputDomain(outputDomainDirectory.resolve("domain_1.avro"), Stream.of()); writeOutputDomain(outputDomainDirectory.resolve("domain_2.avro"), Stream.of()); - ExecutionException error = assertThrows(ExecutionException.class, this::readOutputDomain); + DomainReadException error = assertThrows(DomainReadException.class, this::readOutputDomain); - assertThat(error).hasCauseThat().isInstanceOf(DomainReadException.class); - assertThat(error.getCause()).hasCauseThat().isInstanceOf(IllegalArgumentException.class); + assertThat(error).hasCauseThat().isInstanceOf(IllegalArgumentException.class); assertThat(error.getCause()) .hasMessageThat() .containsMatch("No output domain provided in the location.*"); @@ -163,17 +201,22 @@ public void readOutputDomain_emptyOutputDomain_throwsException() throws Exceptio public void readOutputDomain_notReadableOutputDomain_throwsException() throws Exception { writeOutputDomainTextFile(outputDomainDirectory.resolve("domain_1.avro"), "bad domain"); - ExecutionException error = assertThrows(ExecutionException.class, this::readOutputDomain); - - assertThat(error).hasCauseThat().isInstanceOf(DomainReadException.class); + assertThrows(DomainReadException.class, this::readOutputDomain); } - private ImmutableSet readOutputDomain() - throws ExecutionException, InterruptedException { - return outputDomainProcessor - .readAndDedupeDomain( - outputDomainLocation, outputDomainProcessor.listShards(outputDomainLocation)) - .get(); + private ImmutableSet readOutputDomain() { + NoisedAggregatedResultSet noisedResultset = + outputDomainProcessor.adjustAggregationWithDomainAndNoiseStreaming( + aggregationEngine, + Optional.of(outputDomainLocation), + outputDomainProcessor.listShards(outputDomainLocation), + noisedAggregationRunner, + Optional.empty(), + false); + + return noisedResultset.noisedResult().noisedAggregatedFacts().stream() + .map(AggregatedFact::getBucket) + .collect(ImmutableSet.toImmutableSet()); } private void writeOutputDomain(Path path, Stream keys) throws IOException { @@ -195,10 +238,22 @@ private static final class TestEnv extends AbstractModule { @Override protected void configure() { install(new FSBlobStorageClientModule()); + install(new PrivacyBudgetKeyGeneratorModule()); + bind(FileSystem.class).toInstance(FileSystems.getDefault()); bind(OutputDomainProcessor.class).to(AvroOutputDomainProcessor.class); bind(Boolean.class).annotatedWith(DomainOptional.class).toInstance(true); bind(Boolean.class).annotatedWith(EnableThresholding.class).toInstance(true); + + bind(FakeNoiseApplierSupplier.class).in(TestScoped.class); + bind(NoisedAggregationRunner.class).to(NoisedAggregationRunnerImpl.class); + bind(boolean.class).annotatedWith(ParallelAggregatedFactNoising.class).toInstance(true); + bind(Distribution.class) + .annotatedWith(NoisingDistribution.class) + .toInstance(Distribution.LAPLACE); + bind(double.class).annotatedWith(NoisingEpsilon.class).toInstance(0.1); + bind(long.class).annotatedWith(NoisingL1Sensitivity.class).toInstance(4L); + bind(double.class).annotatedWith(NoisingDelta.class).toInstance(5.00); } @Provides @@ -213,6 +268,30 @@ ListeningExecutorService provideBlockingThreadPool() { return newDirectExecutorService(); } + @Provides + @Threshold + Supplier provideThreshold() { + return () -> 0.0; + } + + @Provides + Supplier provideNoiseApplierSupplier( + FakeNoiseApplierSupplier fakeNoiseApplierSupplier) { + return fakeNoiseApplierSupplier; + } + + @Provides + @Singleton + @CustomForkJoinThreadPool + ListeningExecutorService provideCustomForkJoinThreadPool() { + return newDirectExecutorService(); + } + + @Provides + Supplier providePrivacyParamConfig(PrivacyParametersSupplier supplier) { + return () -> supplier.get().toBuilder().setDelta(1e-5).build(); + } + @Provides Ticker provideTimingTicker() { return Ticker.systemTicker(); diff --git a/javatests/com/google/aggregate/adtech/worker/aggregation/domain/BUILD b/javatests/com/google/aggregate/adtech/worker/aggregation/domain/BUILD index 516a9ce8..029c5fce 100644 --- a/javatests/com/google/aggregate/adtech/worker/aggregation/domain/BUILD +++ b/javatests/com/google/aggregate/adtech/worker/aggregation/domain/BUILD @@ -23,9 +23,16 @@ java_test( "//java/com/google/aggregate/adtech/worker", "//java/com/google/aggregate/adtech/worker/aggregation/domain", "//java/com/google/aggregate/adtech/worker/aggregation/domain:text_domain", + "//java/com/google/aggregate/adtech/worker/aggregation/engine", + "//java/com/google/aggregate/adtech/worker/configs", "//java/com/google/aggregate/adtech/worker/exceptions", "//java/com/google/aggregate/adtech/worker/model", "//java/com/google/aggregate/adtech/worker/util", + "//java/com/google/aggregate/privacy/budgeting/budgetkeygenerator:privacy_budget_key_generator", + "//java/com/google/aggregate/privacy/noise", + "//java/com/google/aggregate/privacy/noise/model", + "//java/com/google/aggregate/privacy/noise/proto:privacy_parameters_java_proto", + "//java/com/google/aggregate/privacy/noise/testing", "//java/external:acai", "//java/external:clients_blobstorageclient_model", "//java/external:google_truth", @@ -43,8 +50,15 @@ java_test( "//java/com/google/aggregate/adtech/worker", "//java/com/google/aggregate/adtech/worker/aggregation/domain", "//java/com/google/aggregate/adtech/worker/aggregation/domain:avro_domain", + "//java/com/google/aggregate/adtech/worker/aggregation/engine", + "//java/com/google/aggregate/adtech/worker/configs", "//java/com/google/aggregate/adtech/worker/exceptions", "//java/com/google/aggregate/adtech/worker/model", + "//java/com/google/aggregate/privacy/budgeting/budgetkeygenerator:privacy_budget_key_generator", + "//java/com/google/aggregate/privacy/noise", + "//java/com/google/aggregate/privacy/noise/model", + "//java/com/google/aggregate/privacy/noise/proto:privacy_parameters_java_proto", + "//java/com/google/aggregate/privacy/noise/testing", "//java/com/google/aggregate/protocol/avro:avro_output_domain", "//java/external:acai", "//java/external:clients_blobstorageclient_model", diff --git a/javatests/com/google/aggregate/adtech/worker/aggregation/domain/TextOutputDomainProcessorTest.java b/javatests/com/google/aggregate/adtech/worker/aggregation/domain/TextOutputDomainProcessorTest.java index 63c8fe8a..711dd92d 100644 --- a/javatests/com/google/aggregate/adtech/worker/aggregation/domain/TextOutputDomainProcessorTest.java +++ b/javatests/com/google/aggregate/adtech/worker/aggregation/domain/TextOutputDomainProcessorTest.java @@ -26,17 +26,39 @@ import static org.junit.Assert.assertThrows; import com.google.acai.Acai; +import com.google.acai.TestScoped; import com.google.aggregate.adtech.worker.Annotations.BlockingThreadPool; +import com.google.aggregate.adtech.worker.Annotations.CustomForkJoinThreadPool; import com.google.aggregate.adtech.worker.Annotations.DomainOptional; import com.google.aggregate.adtech.worker.Annotations.EnableThresholding; import com.google.aggregate.adtech.worker.Annotations.NonBlockingThreadPool; +import com.google.aggregate.adtech.worker.Annotations.ParallelAggregatedFactNoising; +import com.google.aggregate.adtech.worker.aggregation.engine.AggregationEngine; +import com.google.aggregate.adtech.worker.aggregation.engine.AggregationEngineFactory; +import com.google.aggregate.adtech.worker.configs.PrivacyParametersSupplier; +import com.google.aggregate.adtech.worker.configs.PrivacyParametersSupplier.NoisingDelta; +import com.google.aggregate.adtech.worker.configs.PrivacyParametersSupplier.NoisingDistribution; +import com.google.aggregate.adtech.worker.configs.PrivacyParametersSupplier.NoisingEpsilon; +import com.google.aggregate.adtech.worker.configs.PrivacyParametersSupplier.NoisingL1Sensitivity; import com.google.aggregate.adtech.worker.exceptions.DomainReadException; +import com.google.aggregate.adtech.worker.model.AggregatedFact; +import com.google.aggregate.privacy.budgeting.budgetkeygenerator.PrivacyBudgetKeyGeneratorModule; +import com.google.aggregate.privacy.noise.Annotations.Threshold; +import com.google.aggregate.privacy.noise.NoiseApplier; +import com.google.aggregate.privacy.noise.NoisedAggregationRunner; +import com.google.aggregate.privacy.noise.NoisedAggregationRunnerImpl; +import com.google.aggregate.privacy.noise.model.NoisedAggregatedResultSet; +import com.google.aggregate.privacy.noise.proto.Params.NoiseParameters.Distribution; +import com.google.aggregate.privacy.noise.proto.Params.PrivacyParameters; +import com.google.aggregate.privacy.noise.testing.ConstantNoiseModule.ConstantNoiseApplier; +import com.google.aggregate.privacy.noise.testing.FakeNoiseApplierSupplier; import com.google.common.base.Ticker; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.inject.AbstractModule; import com.google.inject.Provides; +import com.google.inject.Singleton; import com.google.scp.operator.cpio.blobstorageclient.model.DataLocation; import com.google.scp.operator.cpio.blobstorageclient.model.DataLocation.BlobStoreDataLocation; import com.google.scp.operator.cpio.blobstorageclient.testing.FSBlobStorageClientModule; @@ -48,7 +70,8 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.List; -import java.util.concurrent.ExecutionException; +import java.util.Optional; +import java.util.function.Supplier; import java.util.stream.Collectors; import javax.inject.Inject; import org.junit.Before; @@ -65,6 +88,10 @@ public class TextOutputDomainProcessorTest { @Rule public final Acai acai = new Acai(TestEnv.class); // Under test @Inject TextOutputDomainProcessor outputDomainProcessor; + @Inject AggregationEngineFactory aggregationEngineFactory; + @Inject FakeNoiseApplierSupplier fakeNoiseApplierSupplier; + @Inject NoisedAggregationRunnerImpl noisedAggregationRunner; + private AggregationEngine aggregationEngine; private Path outputDomainDirectory; private DataLocation outputDomainLocation; @@ -76,6 +103,8 @@ public void setUp() throws Exception { DataLocation.ofBlobStoreDataLocation( BlobStoreDataLocation.create( /* bucket= */ outputDomainDirectory.toAbsolutePath().toString(), /* key= */ "")); + aggregationEngine = aggregationEngineFactory.create(ImmutableSet.of()); + fakeNoiseApplierSupplier.setFakeNoiseApplier(new ConstantNoiseApplier(0)); } @Test @@ -149,12 +178,22 @@ public void deduplicate() throws Exception { .containsExactly(createBucketFromInt(11), createBucketFromInt(22), createBucketFromInt(33)); } + @Test + public void skipZeroByteDomains() throws Exception { + writeOutputDomain(outputDomainDirectory.resolve("domain_1.txt")); + writeOutputDomain( + outputDomainDirectory.resolve("domain_2.txt"), "11", "22", "11", "11", "22", "33"); + + ImmutableSet keys = readOutputDomain(); + + assertThat(keys) + .containsExactly(createBucketFromInt(11), createBucketFromInt(22), createBucketFromInt(33)); + } + @Test public void ioProblem() throws Exception { // No file written, path pointing to a non-existing file, this should be an IO exception. - ExecutionException error = assertThrows(ExecutionException.class, () -> readOutputDomain()); - - assertThat(error).hasCauseThat().isInstanceOf(DomainReadException.class); + assertThrows(DomainReadException.class, this::readOutputDomain); } @Test @@ -162,17 +201,22 @@ public void readDomain_notReadableTextFile() throws Exception { String badString = "abcdabcdabcdabcdabcdabcdabcdabcd"; writeOutputDomain(outputDomainDirectory.resolve("domain_1.txt"), badString); - ExecutionException error = assertThrows(ExecutionException.class, () -> readOutputDomain()); - - assertThat(error).hasCauseThat().isInstanceOf(DomainReadException.class); + assertThrows(DomainReadException.class, this::readOutputDomain); } - private ImmutableSet readOutputDomain() - throws ExecutionException, InterruptedException { - return outputDomainProcessor - .readAndDedupeDomain( - outputDomainLocation, outputDomainProcessor.listShards(outputDomainLocation)) - .get(); + private ImmutableSet readOutputDomain() { + NoisedAggregatedResultSet noisedResultset = + outputDomainProcessor.adjustAggregationWithDomainAndNoiseStreaming( + aggregationEngine, + Optional.of(outputDomainLocation), + outputDomainProcessor.listShards(outputDomainLocation), + noisedAggregationRunner, + Optional.empty(), + false); + + return noisedResultset.noisedResult().noisedAggregatedFacts().stream() + .map(AggregatedFact::getBucket) + .collect(ImmutableSet.toImmutableSet()); } private void writeOutputDomain(Path path, String... keys) throws IOException { @@ -184,10 +228,46 @@ private static final class TestEnv extends AbstractModule { @Override protected void configure() { install(new FSBlobStorageClientModule()); + install(new PrivacyBudgetKeyGeneratorModule()); + bind(FileSystem.class).toInstance(FileSystems.getDefault()); bind(OutputDomainProcessor.class).to(TextOutputDomainProcessor.class); bind(Boolean.class).annotatedWith(DomainOptional.class).toInstance(true); bind(Boolean.class).annotatedWith(EnableThresholding.class).toInstance(true); + + bind(FakeNoiseApplierSupplier.class).in(TestScoped.class); + bind(NoisedAggregationRunner.class).to(NoisedAggregationRunnerImpl.class); + bind(boolean.class).annotatedWith(ParallelAggregatedFactNoising.class).toInstance(true); + bind(Distribution.class) + .annotatedWith(NoisingDistribution.class) + .toInstance(Distribution.LAPLACE); + bind(double.class).annotatedWith(NoisingEpsilon.class).toInstance(0.1); + bind(long.class).annotatedWith(NoisingL1Sensitivity.class).toInstance(4L); + bind(double.class).annotatedWith(NoisingDelta.class).toInstance(5.00); + } + + @Provides + @Threshold + Supplier provideThreshold() { + return () -> 0.0; + } + + @Provides + Supplier provideNoiseApplierSupplier( + FakeNoiseApplierSupplier fakeNoiseApplierSupplier) { + return fakeNoiseApplierSupplier; + } + + @Provides + @Singleton + @CustomForkJoinThreadPool + ListeningExecutorService provideCustomForkJoinThreadPool() { + return newDirectExecutorService(); + } + + @Provides + Supplier providePrivacyParamConfig(PrivacyParametersSupplier supplier) { + return () -> supplier.get().toBuilder().setDelta(1e-5).build(); } @Provides diff --git a/javatests/com/google/aggregate/adtech/worker/aggregation/engine/AggregationEngineTest.java b/javatests/com/google/aggregate/adtech/worker/aggregation/engine/AggregationEngineTest.java index e7f7f3be..86ea1db0 100644 --- a/javatests/com/google/aggregate/adtech/worker/aggregation/engine/AggregationEngineTest.java +++ b/javatests/com/google/aggregate/adtech/worker/aggregation/engine/AggregationEngineTest.java @@ -18,7 +18,6 @@ import static com.google.aggregate.adtech.worker.util.NumericConversions.createBucketFromInt; import static com.google.common.truth.Truth.assertThat; -import static org.junit.Assert.assertThrows; import com.google.acai.Acai; import com.google.aggregate.adtech.worker.model.AggregatedFact; @@ -32,6 +31,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; +import com.google.common.primitives.UnsignedLong; import com.google.inject.AbstractModule; import java.math.BigInteger; import java.time.Instant; @@ -60,7 +60,7 @@ public void setUp() { public void oneReportOneFact() { Report report = FakeReportGenerator.generateWithParam( - /* bucket= */ 1, /* reportVersion */ SharedInfo.LATEST_VERSION); + /* bucket= */ 1, /* reportVersion */ SharedInfo.LATEST_VERSION, "https://foo.com"); engine.accept(report); ImmutableMap aggregation = engine.makeAggregation(); @@ -218,16 +218,20 @@ public void makeAggregation_multipleReportsWithSomeNullReports() { public void privacyBudgetUnits() { Report report = FakeReportGenerator.generateWithParam( - /* bucket= */ 1, /* reportVersion */ SharedInfo.VERSION_0_1); + /* bucket= */ 1, /* reportVersion */ SharedInfo.VERSION_0_1, "https://origin1.foo.com"); Report reportDuplicate = FakeReportGenerator.generateWithParam( - /* bucket= */ 1, /* reportVersion */ SharedInfo.VERSION_0_1); + /* bucket= */ 1, /* reportVersion */ SharedInfo.VERSION_0_1, "https://origin1.foo.com"); Report secondReport = FakeReportGenerator.generateWithParam( - /* bucket= */ 4000, /* reportVersion */ SharedInfo.VERSION_0_1); + /* bucket= */ 4000, /* reportVersion */ + SharedInfo.VERSION_0_1, + "https://origin2.foo.com"); Report thirdReport = FakeReportGenerator.generateWithParam( - /* bucket= */ 100, /* reportVersion */ SharedInfo.VERSION_0_1); + /* bucket= */ 100, /* reportVersion */ + SharedInfo.VERSION_0_1, + "https://origin3.foo.com"); engine.accept(report); engine.accept(reportDuplicate); @@ -240,11 +244,17 @@ public void privacyBudgetUnits() { assertThat(privacyBudgetUnits) .containsExactly( budgetUnit( - "feb6671c7739adeb5140f2af92bb345545e8f16e1761292ac871eaae7904393f", hourZero), + "686f11a611747492b911f8de3dc514c502246e78cc9d7a82e99d0b5af1cc2594", + hourZero, + /* reportingOrigin= */ "https://origin1.foo.com"), budgetUnit( - "089ddac6c5bd89cc488d35924ce6416520f63d9a9f3ecfe53e97fd570d2c4f62", hourZero), + "02e1093c37d58ed073a379f191289a42db00eb7901c0fb9598ed2e74e6782570", + hourZero, + /* reportingOrigin= */ "https://origin3.foo.com"), budgetUnit( - "43a9149aa0326808345d5f9b780c48f823d7aba37d72bf4decbba387bf3a283d", hourOne)); + "2b873089e720f2fc134b74cdef5f2edbb0fc4307a4a919036cc0ecd66d53f208", + hourOne, + /* reportingOrigin= */ "https://origin2.foo.com")); } @Test @@ -270,7 +280,9 @@ public void makeAggregation_withFilteringId_forReportsWithoutLabelIds_aggregates FakeReportGenerator.generateWithFactList( ImmutableList.of(factWithoutLabel1, factWithoutLabel2), SharedInfo.VERSION_0_1); - AggregationEngine engine = aggregationEngineFactory.create(/** filteringIds = */ ImmutableSet.of(7)); + AggregationEngine engine = + aggregationEngineFactory.create( + /* filteringIds= */ ImmutableSet.of(UnsignedLong.valueOf(7))); engine.accept(reportWithoutLabels); ImmutableMap aggregation = engine.makeAggregation(); @@ -279,13 +291,19 @@ public void makeAggregation_withFilteringId_forReportsWithoutLabelIds_aggregates @Test public void makeAggregation_withoutFilteringId_forReportsWithLabelIds_aggregatesForLabelId0() { - Fact factWithLabel1 = FakeFactGenerator.generate(/* bucket= */ 1, /* value= */ 2, /* id= */ 0); - Fact factWithLabel2 = FakeFactGenerator.generate(/* bucket= */ 1, /* value= */ 3, /* id= */ 0); + Fact factWithLabel1 = + FakeFactGenerator.generate(/* bucket= */ 1, /* value= */ 2, /* id= */ UnsignedLong.ZERO); + Fact factWithLabel2 = + FakeFactGenerator.generate(/* bucket= */ 1, /* value= */ 3, /* id= */ UnsignedLong.ZERO); Report reportWithLabels1 = FakeReportGenerator.generateWithFactList( ImmutableList.of(factWithLabel1, factWithLabel2), /* version= */ "1.0"); - Fact factWithLabel3 = FakeFactGenerator.generate(/* bucket= */ 1, /* value= */ 2, /* id= */ 3); - Fact factWithLabel4 = FakeFactGenerator.generate(/* bucket= */ 1, /* value= */ 3, /* id= */ 2); + Fact factWithLabel3 = + FakeFactGenerator.generate( + /* bucket= */ 1, /* value= */ 2, /* id= */ UnsignedLong.valueOf(3)); + Fact factWithLabel4 = + FakeFactGenerator.generate( + /* bucket= */ 1, /* value= */ 3, /* id= */ UnsignedLong.valueOf(2)); Report reportWithLabels2 = FakeReportGenerator.generateWithFactList( ImmutableList.of(factWithLabel3, factWithLabel4), /* version= */ "1.1"); @@ -305,10 +323,10 @@ public void makeAggregation_withoutFilteringId_forReportsWithLabelIds_aggregates @Test public void makeAggregation_withFilteringIds_aggregatesForMatchingLabelId_PBKsGeneratedForEveryFilteringId() { - int matchingFilteringId1 = 12345; - int matchingFilteringId2 = 123; - int nonmatchingFilteringId1 = 0; - int nonmatchingFilteringId2 = 999; + UnsignedLong matchingFilteringId1 = UnsignedLong.valueOf(12345); + UnsignedLong matchingFilteringId2 = UnsignedLong.valueOf(123); + UnsignedLong nonmatchingFilteringId1 = UnsignedLong.ZERO; + UnsignedLong nonmatchingFilteringId2 = UnsignedLong.valueOf(999); Fact factWithMatchingLabel1 = FakeFactGenerator.generate(/* bucket= */ 1, /* value= */ 2, /* id= */ matchingFilteringId1); Fact factWithMatchingLabel2 = @@ -339,7 +357,8 @@ public void makeAggregation_withoutFilteringId_forReportsWithLabelIds_aggregates AggregationEngine engine = aggregationEngineFactory.create( - /** filteringIds = */ ImmutableSet.of(matchingFilteringId1, matchingFilteringId2)); + /** filteringIds = */ + ImmutableSet.of(matchingFilteringId1, matchingFilteringId2)); engine.accept(reportsWithMatchingLabelIds); engine.accept(reportsWithSomeMatchingLabelIds); engine.accept(reportsWithoutMatchingLabelIds); @@ -357,8 +376,9 @@ public void makeAggregation_withoutFilteringId_forReportsWithLabelIds_aggregates assertThat(privacyBudgetUnits).hasSize(8); } - private static PrivacyBudgetUnit budgetUnit(String key, Instant scheduledTime) { - return PrivacyBudgetUnit.create(key, scheduledTime); + private static PrivacyBudgetUnit budgetUnit( + String key, Instant scheduledTime, String reportingOrigin) { + return PrivacyBudgetUnit.create(key, scheduledTime, reportingOrigin); } static final class TestEnv extends AbstractModule { diff --git a/javatests/com/google/aggregate/adtech/worker/aggregation/engine/BUILD b/javatests/com/google/aggregate/adtech/worker/aggregation/engine/BUILD index 24e3ff75..90ca45d8 100644 --- a/javatests/com/google/aggregate/adtech/worker/aggregation/engine/BUILD +++ b/javatests/com/google/aggregate/adtech/worker/aggregation/engine/BUILD @@ -33,14 +33,3 @@ java_test( "//java/external:javax_inject", ], ) - -java_test( - name = "SingleFactAggregationTest", - srcs = ["SingleFactAggregationTest.java"], - deps = [ - "//java/com/google/aggregate/adtech/worker/aggregation/engine:single_fact_aggregation", - "//java/com/google/aggregate/adtech/worker/model", - "//java/com/google/aggregate/adtech/worker/testing:fake_report_generator", - "//java/external:google_truth", - ], -) diff --git a/javatests/com/google/aggregate/adtech/worker/aggregation/engine/SingleFactAggregationTest.java b/javatests/com/google/aggregate/adtech/worker/aggregation/engine/SingleFactAggregationTest.java deleted file mode 100644 index 83dd76f3..00000000 --- a/javatests/com/google/aggregate/adtech/worker/aggregation/engine/SingleFactAggregationTest.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright 2022 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.aggregate.adtech.worker.aggregation.engine; - -import static com.google.common.truth.Truth.assertThat; - -import com.google.aggregate.adtech.worker.model.Fact; -import com.google.aggregate.adtech.worker.testing.FakeReportGenerator.FakeFactGenerator; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.JUnit4; - -@RunWith(JUnit4.class) -public class SingleFactAggregationTest { - - @Test - public void aggregateSingle() { - SingleFactAggregation aggr = new SingleFactAggregation(); - Fact fact = makeFact(/* value= */ 5); - - aggr.accept(fact); - - assertThat(aggr.getSum()).isEqualTo(5L); - } - - @Test - public void aggregateNone() { - SingleFactAggregation aggr = new SingleFactAggregation(); - - // Nothing sent for aggregation, testing an edge case. - - assertThat(aggr.getSum()).isEqualTo(0L); - } - - @Test - public void aggregateMultiple() { - SingleFactAggregation aggr = new SingleFactAggregation(); - Fact firstFact = makeFact(/* value= */ 5); - Fact secondFact = makeFact(/* value= */ 7); - Fact thirdFact = makeFact(/* value= */ 13); - - aggr.accept(firstFact); - aggr.accept(secondFact); - aggr.accept(thirdFact); - - assertThat(aggr.getSum()).isEqualTo(25L); - } - - private static Fact makeFact(int value) { - // ID is hardcoded to foo. This is a single fact aggregator, it doesn't use ID for anything. - return FakeFactGenerator.generate(/* bucket= */ 1, value); - } -} diff --git a/javatests/com/google/aggregate/adtech/worker/decryption/DeserializingReportDecrypterTest.java b/javatests/com/google/aggregate/adtech/worker/decryption/DeserializingReportDecrypterTest.java index b746519f..6cb9172f 100644 --- a/javatests/com/google/aggregate/adtech/worker/decryption/DeserializingReportDecrypterTest.java +++ b/javatests/com/google/aggregate/adtech/worker/decryption/DeserializingReportDecrypterTest.java @@ -78,7 +78,9 @@ public class DeserializingReportDecrypterTest { @Before public void setUp() throws Exception { - report = FakeReportGenerator.generateWithParam(1, /* reportVersion */ LATEST_VERSION); + report = + FakeReportGenerator.generateWithParam( + 1, /* reportVersion */ LATEST_VERSION, "https/foo.com"); sharedInfo = sharedInfoSerdes.reverse().convert(Optional.of(report.sharedInfo())); encryptReport(); } @@ -109,9 +111,33 @@ public void testExceptionInSharedInfoDeserialization() throws Exception { .contains("Couldn't deserialize shared_info"); } + /** Test error handling for modified sharedInfo after report encryption */ + @Test + public void testExceptionInDeserializationWithModifiedSharedInfo() throws Exception { + encryptReport(); + + // Modify the shared info after the report has been encrypted. + SharedInfo originalSharedInfo = sharedInfoSerdes.convert(encryptedReport.sharedInfo()).get(); + SharedInfo modifiedSharedInfo = + originalSharedInfo.toBuilder().setReportingOrigin("newReportingOrigin.com").build(); + + encryptedReport = + EncryptedReport.builder() + .setPayload(encryptedReport.payload()) + .setKeyId(DECRYPTION_KEY_ID) + .setSharedInfo(sharedInfoSerdes.reverse().convert(Optional.of(modifiedSharedInfo))) + .build(); + + DecryptionException decryptionException = + assertThrows( + DecryptionException.class, + () -> deserializingReportDecrypter.decryptSingleReport(encryptedReport)); + assertThat(decryptionException).hasCauseThat().hasMessageThat().contains("decryption failed"); + } + /** Test error handling for failed payload deserialization */ @Test - public void testExceptionInPayloadDeserialization() throws Exception { + public void testExceptionInPayloadDeserialization() { // No setup DecryptionException decryptionException = diff --git a/javatests/com/google/aggregate/adtech/worker/encryption/RecordEncrypterImplTest.java b/javatests/com/google/aggregate/adtech/worker/encryption/RecordEncrypterImplTest.java index a6345261..d0cba832 100644 --- a/javatests/com/google/aggregate/adtech/worker/encryption/RecordEncrypterImplTest.java +++ b/javatests/com/google/aggregate/adtech/worker/encryption/RecordEncrypterImplTest.java @@ -16,10 +16,8 @@ package com.google.aggregate.adtech.worker.encryption; -import static com.google.aggregate.adtech.worker.model.SharedInfo.ATTRIBUTION_REPORTING_API; import static com.google.aggregate.adtech.worker.model.SharedInfo.LATEST_VERSION; import static com.google.common.truth.Truth.assertThat; -import static org.junit.Assert.assertTrue; import com.google.acai.Acai; import com.google.aggregate.adtech.worker.decryption.DecryptionCipher.PayloadDecryptionException; @@ -27,18 +25,8 @@ import com.google.aggregate.adtech.worker.encryption.RecordEncrypter.EncryptionException; import com.google.aggregate.adtech.worker.encryption.hybrid.HybridCipherModule; import com.google.aggregate.adtech.worker.encryption.hybrid.key.EncryptionKeyService; -import com.google.aggregate.adtech.worker.encryption.hybrid.key.ReEncryptionKeyService; import com.google.aggregate.adtech.worker.encryption.hybrid.key.testing.FakeEncryptionKeyService; -import com.google.aggregate.adtech.worker.encryption.hybrid.key.testing.FakeReEncryptionKeyService; import com.google.aggregate.adtech.worker.model.EncryptedReport; -import com.google.aggregate.adtech.worker.model.Fact; -import com.google.aggregate.adtech.worker.model.Payload; -import com.google.aggregate.adtech.worker.model.Report; -import com.google.aggregate.adtech.worker.model.SharedInfo; -import com.google.aggregate.adtech.worker.model.serdes.PayloadSerdes; -import com.google.aggregate.adtech.worker.model.serdes.SharedInfoSerdes; -import com.google.aggregate.adtech.worker.model.serdes.cbor.CborPayloadSerdes; -import com.google.common.collect.ImmutableList; import com.google.common.io.ByteSource; import com.google.crypto.tink.HybridDecrypt; import com.google.crypto.tink.KeysetHandle; @@ -49,12 +37,7 @@ import com.google.inject.Provides; import com.google.inject.Singleton; import com.google.scp.operator.shared.testing.StringToByteSourceConverter; -import java.io.IOException; -import java.math.BigInteger; import java.security.GeneralSecurityException; -import java.time.Instant; -import java.util.Optional; -import java.util.UUID; import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -65,14 +48,8 @@ public class RecordEncrypterImplTest { @Rule public final Acai acai = new Acai(TestEnv.class); @Inject RecordEncrypter recordEncrypter; @Inject KeysetHandle keysetHandle; - @Inject PayloadSerdes payloadSerdes; - @Inject SharedInfoSerdes sharedInfoSerdes; private StringToByteSourceConverter converter; - private static final String DESTINATION = "dest.com"; - private static final UUID FIXED_UUID = UUID.randomUUID(); - private static final String REPORTING_ORIGIN = "foo.com"; - @Before public void setUp() throws GeneralSecurityException { converter = new StringToByteSourceConverter(); @@ -92,39 +69,6 @@ public void encryptSingleReport() assertThat(encryptedReport.sharedInfo()).isEqualTo(sharedInfo); } - @Test - public void encryptSerializedReport_succeeds() - throws EncryptionException, - PayloadDecryptionException, - GeneralSecurityException, - IOException { - ImmutableList factList = - ImmutableList.of(Fact.builder().setBucket(BigInteger.valueOf(123)).setValue(5).build()); - Payload payload = Payload.builder().addAllFact(factList).build(); - SharedInfo sharedInfo = - SharedInfo.builder() - .setSourceRegistrationTime(Instant.now()) - .setDestination(DESTINATION) - .setScheduledReportTime(Instant.now()) - .setReportId(FIXED_UUID.toString()) - .setVersion(LATEST_VERSION) - .setApi(ATTRIBUTION_REPORTING_API) - .setReportingOrigin(REPORTING_ORIGIN) - .build(); - Report deserializedReport = - Report.builder().setPayload(payload).setSharedInfo(sharedInfo).build(); - ByteSource serializedPayload = payloadSerdes.reverse().convert(Optional.of(payload)); - String serializedSharedInfo = sharedInfoSerdes.reverse().convert(Optional.of(sharedInfo)); - - EncryptedReport generatedReport = - recordEncrypter.encryptReport(deserializedReport, "fakeuri.com"); - - assertThat(serializedPayload).isNotNull(); - assertTrue(decryptReport(generatedReport).contentEquals(serializedPayload)); - assertThat(generatedReport.keyId()).isEqualTo(ENCRYPTION_KEY_ID); - assertTrue(generatedReport.sharedInfo().contentEquals(serializedSharedInfo)); - } - private ByteSource decryptReport(EncryptedReport encryptedReport) throws PayloadDecryptionException, GeneralSecurityException { return HybridDecryptionCipher.of(keysetHandle.getPrimitive(HybridDecrypt.class)) @@ -138,8 +82,6 @@ protected void configure() { install(new HybridCipherModule()); bind(EncryptionKeyService.class).to(FakeEncryptionKeyService.class); bind(RecordEncrypter.class).to(RecordEncrypterImpl.class); - bind(ReEncryptionKeyService.class).to(FakeReEncryptionKeyService.class); - bind(PayloadSerdes.class).to(CborPayloadSerdes.class); } @Provides diff --git a/javatests/com/google/aggregate/adtech/worker/encryption/hybrid/key/cloud/BUILD b/javatests/com/google/aggregate/adtech/worker/encryption/hybrid/key/cloud/BUILD index b738ac66..179e4848 100644 --- a/javatests/com/google/aggregate/adtech/worker/encryption/hybrid/key/cloud/BUILD +++ b/javatests/com/google/aggregate/adtech/worker/encryption/hybrid/key/cloud/BUILD @@ -35,24 +35,3 @@ java_test( "//protocol/proto:encryption_key_config_java_proto", ], ) - -java_test( - name = "CloudReEncryptionKeyServiceTest", - srcs = ["CloudReEncryptionKeyServiceTest.java"], - deps = [ - "//java/com/google/aggregate/adtech/worker/encryption/hybrid/key", - "//java/com/google/aggregate/adtech/worker/encryption/hybrid/key/cloud", - "//java/com/google/aggregate/adtech/worker/encryption/publickeyuri:encryption_key_config", - "//java/external:acai", - "//java/external:apache_httpclient", - "//java/external:apache_httpcore", - "//java/external:api_shared_util", - "//java/external:aws_dynamodb", - "//java/external:google_truth", - "//java/external:guava", - "//java/external:guice", - "//java/external:mockito", - "//java/external:tink", - "//protocol/proto:encryption_key_config_java_proto", - ], -) diff --git a/javatests/com/google/aggregate/adtech/worker/encryption/hybrid/key/cloud/CloudReEncryptionKeyServiceTest.java b/javatests/com/google/aggregate/adtech/worker/encryption/hybrid/key/cloud/CloudReEncryptionKeyServiceTest.java deleted file mode 100644 index 9721f327..00000000 --- a/javatests/com/google/aggregate/adtech/worker/encryption/hybrid/key/cloud/CloudReEncryptionKeyServiceTest.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.aggregate.adtech.worker.encryption.hybrid.key.cloud; - -import static com.google.common.truth.Truth.assertThat; -import static org.junit.Assert.assertThrows; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.when; - -import com.google.aggregate.adtech.worker.encryption.hybrid.key.EncryptionKey; -import com.google.aggregate.adtech.worker.encryption.hybrid.key.ReEncryptionKeyService.ReencryptionKeyFetchException; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import com.google.scp.shared.api.util.HttpClientResponse; -import com.google.scp.shared.api.util.HttpClientWrapper; -import org.apache.http.client.methods.HttpRequestBase; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.JUnit4; -import org.mockito.Mock; -import org.mockito.junit.MockitoJUnit; -import org.mockito.junit.MockitoRule; - -@RunWith(JUnit4.class) -public class CloudReEncryptionKeyServiceTest { - - private static final String KEY_ID_1 = "00000000-0000-0000-0000-000000000000"; - private static final String KEY_ID_2 = "00000000-0000-0000-0000-111111111111"; - private static final String KEY_ID_3 = "00000000-0000-0000-0000-222222222222"; - private static final String KEY_ID_4 = "00000000-0000-0000-0000-333333333333"; - private static final String KEY_ID_5 = "00000000-0000-0000-0000-444444444444"; - private static final ImmutableList keySet = - ImmutableList.of(KEY_ID_1, KEY_ID_2, KEY_ID_3, KEY_ID_4, KEY_ID_5); - - @Rule public final MockitoRule mockito = MockitoJUnit.rule(); - - @Mock private HttpClientWrapper httpClient; - CloudReEncryptionKeyService cloudReEncryptionKeyService; - String keyVendingResponse; - String publicKey; - String keyVendingServiceUri = - "https://publickeyservice.aggregationhelper.com/.well-known/aggregation-service/v1/public-keys"; - - @Before - public void setup() { - cloudReEncryptionKeyService = new CloudReEncryptionKeyService(httpClient); - publicKey = - "EkQKBAgCEAMSOhI4CjB0eXBlLmdvb2dsZWFwaXMuY29tL2dvb2dsZS5jcnlwdG8udGluay5BZXNH" - + "Y21LZXkSAhAQGAEYARohAJryfZtZSsWNdh86h3sOuxRurI4q/Qg2ECaABVGfgOu6IiEAjAYDniN7v5mb" - + "bMhPbXVSkPhEZFx84sB7MKB/AiN6KBI="; - keyVendingResponse = - String.format( - "{\"keys\":[{\"id\":\"%s\",\"key\":\"%s\"}," - + "{\"id\":\"%s\",\"key\":\"%s\"}," - + "{\"id\":\"%s\",\"key\":\"%s\"}," - + "{\"id\":\"%s\",\"key\":\"%s\"}," - + "{\"id\":\"%s\",\"key\":\"%s\"}]}", - KEY_ID_1, publicKey, KEY_ID_2, publicKey, KEY_ID_3, publicKey, KEY_ID_4, publicKey, - KEY_ID_5, publicKey); - } - - @Test - public void getCloudProviderKey_succeeds() throws Exception { - HttpClientResponse response = buildFakeResponse(200, keyVendingResponse); - when(httpClient.execute(any(HttpRequestBase.class))).thenReturn(response); - - EncryptionKey key = cloudReEncryptionKeyService.getEncryptionPublicKey(keyVendingServiceUri); - - assertThat(keySet).contains(key.id()); - } - - @Test - public void getCloudProviderKey_fails() throws Exception { - HttpClientResponse response = buildFakeResponse(500, keyVendingResponse); - when(httpClient.execute(any(HttpRequestBase.class))).thenReturn(response); - - assertThrows( - ReencryptionKeyFetchException.class, - () -> cloudReEncryptionKeyService.getEncryptionPublicKey(keyVendingServiceUri)); - } - - private HttpClientResponse buildFakeResponse(int statusCode, String body) { - HttpClientResponse response = HttpClientResponse.create(statusCode, body, ImmutableMap.of()); - return response; - } -} diff --git a/javatests/com/google/aggregate/adtech/worker/encryption/publickeyuri/BUILD b/javatests/com/google/aggregate/adtech/worker/encryption/publickeyuri/BUILD deleted file mode 100644 index 69d96c14..00000000 --- a/javatests/com/google/aggregate/adtech/worker/encryption/publickeyuri/BUILD +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -load("@rules_java//java:defs.bzl", "java_test") - -package(default_visibility = ["//visibility:public"]) - -java_test( - name = "EncryptionKeyConfigFactoryTest", - srcs = ["EncryptionKeyConfigFactoryTest.java"], - deps = [ - "//java/com/google/aggregate/adtech/worker/encryption/publickeyuri:encryption_key_config", - "//java/external:google_truth", - ], -) diff --git a/javatests/com/google/aggregate/adtech/worker/encryption/publickeyuri/EncryptionKeyConfigFactoryTest.java b/javatests/com/google/aggregate/adtech/worker/encryption/publickeyuri/EncryptionKeyConfigFactoryTest.java deleted file mode 100644 index ebc68a6e..00000000 --- a/javatests/com/google/aggregate/adtech/worker/encryption/publickeyuri/EncryptionKeyConfigFactoryTest.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.aggregate.adtech.worker.encryption.publickeyuri; - -import static com.google.common.truth.Truth.assertThat; -import static org.junit.Assert.assertThrows; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.JUnit4; - -@RunWith(JUnit4.class) -public class EncryptionKeyConfigFactoryTest { - @Test - public void getValidEncryptionKeyConfig_succeeds() { - CloudEncryptionKeyConfig gcpCloudEncryptionKeyConfig = - CloudEncryptionKeyConfig.builder() - .setKeyVendingServiceUri( - "https://publickeyservice-a.postsb-a.test.aggregationhelper.com/.well-known/aggregation-service/v1/public-keys") - .build(); - CloudEncryptionKeyConfig cloudEncryptionKeyConfig = - EncryptionKeyConfigFactory.getCloudEncryptionKeyConfig("GCP"); - assertThat(cloudEncryptionKeyConfig.keyVendingServiceUri()) - .isEqualTo(gcpCloudEncryptionKeyConfig.keyVendingServiceUri()); - } - - @Test - public void getInvalidEncryptionKeyConfig_fails() { - assertThrows( - IllegalArgumentException.class, - () -> EncryptionKeyConfigFactory.getCloudEncryptionKeyConfig("invalid-cloud")); - } -} diff --git a/javatests/com/google/aggregate/adtech/worker/model/BUILD b/javatests/com/google/aggregate/adtech/worker/model/BUILD index ff302110..b6512857 100644 --- a/javatests/com/google/aggregate/adtech/worker/model/BUILD +++ b/javatests/com/google/aggregate/adtech/worker/model/BUILD @@ -32,5 +32,16 @@ java_test( srcs = ["SharedInfoTest.java"], deps = [ "//java/com/google/aggregate/adtech/worker/model", + "//java/external:google_truth", + "//java/external:google_truth8", + ], +) + +java_test( + name = "VersionTest", + srcs = ["VersionTest.java"], + deps = [ + "//java/com/google/aggregate/adtech/worker/model", + "//java/external:google_truth", ], ) diff --git a/javatests/com/google/aggregate/adtech/worker/model/DecryptionValidationResultTest.java b/javatests/com/google/aggregate/adtech/worker/model/DecryptionValidationResultTest.java index 6285ea57..956f7a31 100644 --- a/javatests/com/google/aggregate/adtech/worker/model/DecryptionValidationResultTest.java +++ b/javatests/com/google/aggregate/adtech/worker/model/DecryptionValidationResultTest.java @@ -33,7 +33,8 @@ public void testValidationCanSetReport() { DecryptionValidationResult.Builder resultBuilder = DecryptionValidationResult.builder() .setReport( - FakeReportGenerator.generateWithParam(1, /* reportVersion */ LATEST_VERSION)); + FakeReportGenerator.generateWithParam( + 1, /* reportVersion */ LATEST_VERSION, "https://foo.com")); resultBuilder.build(); @@ -46,10 +47,7 @@ public void testValidationCanSetErrors() { DecryptionValidationResult.Builder resultBuilder = DecryptionValidationResult.builder() .addErrorMessage( - ErrorMessage.builder() - .setCategory(ErrorCounter.DECRYPTION_ERROR) - .setDetailedErrorMessage("") - .build()); + ErrorMessage.builder().setCategory(ErrorCounter.DECRYPTION_ERROR).build()); resultBuilder.build(); @@ -63,12 +61,11 @@ public void testValidationCanSetErrors() { public void testValidationErrorThrownIfBothSet() { DecryptionValidationResult.Builder resultBuilder = DecryptionValidationResult.builder() - .setReport(FakeReportGenerator.generateWithParam(1, /* reportVersion */ LATEST_VERSION)) + .setReport( + FakeReportGenerator.generateWithParam( + 1, /* reportVersion */ LATEST_VERSION, "https://foo.com")) .addErrorMessage( - ErrorMessage.builder() - .setCategory(ErrorCounter.DECRYPTION_ERROR) - .setDetailedErrorMessage("") - .build()); + ErrorMessage.builder().setCategory(ErrorCounter.DECRYPTION_ERROR).build()); // An exception should be thrown as the DecryptionValidationResult is not valid, it contains // both a record and error messages. diff --git a/javatests/com/google/aggregate/adtech/worker/model/SharedInfoTest.java b/javatests/com/google/aggregate/adtech/worker/model/SharedInfoTest.java index 4d0580ab..1e88c15e 100644 --- a/javatests/com/google/aggregate/adtech/worker/model/SharedInfoTest.java +++ b/javatests/com/google/aggregate/adtech/worker/model/SharedInfoTest.java @@ -17,9 +17,13 @@ package com.google.aggregate.adtech.worker.model; import static com.google.aggregate.adtech.worker.model.SharedInfo.ATTRIBUTION_REPORTING_API; +import static com.google.aggregate.adtech.worker.model.SharedInfo.ATTRIBUTION_REPORTING_DEBUG_API; import static com.google.aggregate.adtech.worker.model.SharedInfo.PROTECTED_AUDIENCE_API; import static com.google.aggregate.adtech.worker.model.SharedInfo.SHARED_STORAGE_API; import static com.google.aggregate.adtech.worker.model.SharedInfo.VERSION_0_1; +import static com.google.aggregate.adtech.worker.model.SharedInfo.VERSION_1_0; +import static com.google.common.truth.Truth.assertThat; +import static com.google.common.truth.Truth8.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; @@ -34,8 +38,6 @@ @RunWith(JUnit4.class) public class SharedInfoTest { - private static final String PRIVACY_BUDGET_KEY_1 = "test_privacy_budget_key"; - // FIXED_TIME = Jan 01 2021 00:00:00 GMT+0000 private static final Instant FIXED_TIME = Instant.ofEpochSecond(1609459200); @@ -154,6 +156,37 @@ public void sharedInfo_withAttributionReportingAPIType() { assertEquals(sharedInfoAttributionReporting.api().get(), ATTRIBUTION_REPORTING_API); } + /** + * Verifies that both V0.1 and V1.0 of the Attribution Reporting Debug API work with SharedInfo. + */ + @Test + public void sharedInfo_withAttributionReportingDebugAPIType() { + SharedInfo attributionReportingDebug1 = + SharedInfo.builder() + .setVersion(VERSION_0_1) + .setApi(ATTRIBUTION_REPORTING_DEBUG_API) + .setScheduledReportTime(FIXED_TIME) + .setReportingOrigin(REPORTING_ORIGIN) + .setDestination(DESTINATION) + .setSourceRegistrationTime(FIXED_TIME) + .setReportId(RANDOM_UUID) + .build(); + + SharedInfo attributionReportingDebug2 = + SharedInfo.builder() + .setVersion(VERSION_1_0) + .setApi(ATTRIBUTION_REPORTING_DEBUG_API) + .setScheduledReportTime(FIXED_TIME) + .setReportingOrigin(REPORTING_ORIGIN) + .setDestination(DESTINATION) + .setSourceRegistrationTime(FIXED_TIME) + .setReportId(RANDOM_UUID) + .build(); + + assertThat(attributionReportingDebug1.api()).hasValue(ATTRIBUTION_REPORTING_DEBUG_API); + assertThat(attributionReportingDebug2.api()).hasValue(ATTRIBUTION_REPORTING_DEBUG_API); + } + @Test public void sharedInfo_withProtectedAudienceAPIType() { SharedInfo.Builder sharedInfoSharedStorageBuilder = diff --git a/javatests/com/google/aggregate/adtech/worker/model/VersionTest.java b/javatests/com/google/aggregate/adtech/worker/model/VersionTest.java new file mode 100644 index 00000000..3ae33344 --- /dev/null +++ b/javatests/com/google/aggregate/adtech/worker/model/VersionTest.java @@ -0,0 +1,67 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.aggregate.adtech.worker.model; + +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.assertThrows; + +import java.util.function.Predicate; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; + +@RunWith(JUnit4.class) +public class VersionTest { + + @Test + public void getBetweenVersionPredicate() { + Version lowerVersion = Version.create(/* major= */ 0, /* minor= */ 1); + Version higherVersion = Version.create(/* major= */ 25, /* minor= */ 1); + Predicate betweenPredicate = + Version.getBetweenVersionPredicate(lowerVersion, higherVersion); + + assertThat(betweenPredicate.test(lowerVersion)).isTrue(); + assertThat(betweenPredicate.test(Version.create(/* major= */ 25, /* minor= */ 0))).isTrue(); + assertThat(betweenPredicate.test(Version.create(/* major= */ 6, /* minor= */ 99999))).isTrue(); + assertThat(betweenPredicate.test(higherVersion)).isFalse(); + } + + @Test + public void getBetweenVersionPredicate_withIllegalRange_throwsIllegalArgument() { + Version lowerVersion = Version.create(/* major= */ 0, /* minor= */ 1); + Version higherVersion = Version.create(/* major= */ 25, /* minor= */ 1); + + assertThrows( + IllegalArgumentException.class, + () -> + Version.getBetweenVersionPredicate( + /* higherExclusiveVersion= */ higherVersion, + /* lowerInclusiveVersion= */ lowerVersion)); + } + + @Test + public void getGreaterThanOrEqualToVersionPredicate() { + Version compareToVersion = Version.create(/* major= */ 75, /* minor= */ 30); + Predicate gePredicate = + Version.getGreaterThanOrEqualToVersionPredicate(compareToVersion); + + assertThat(gePredicate.test(compareToVersion)).isTrue(); + assertThat(gePredicate.test(Version.create(/* major= */ 75, /* minor= */ 29))).isFalse(); + assertThat(gePredicate.test(Version.create(/* major= */ 6, /* minor= */ 99999))).isFalse(); + assertThat(gePredicate.test(Version.create(/* major= */ 200, /* minor= */ 0))).isTrue(); + } +} diff --git a/javatests/com/google/aggregate/adtech/worker/model/serdes/SharedInfoSerdesTest.java b/javatests/com/google/aggregate/adtech/worker/model/serdes/SharedInfoSerdesTest.java index 75a2e224..c0d59c74 100644 --- a/javatests/com/google/aggregate/adtech/worker/model/serdes/SharedInfoSerdesTest.java +++ b/javatests/com/google/aggregate/adtech/worker/model/serdes/SharedInfoSerdesTest.java @@ -18,6 +18,7 @@ import static com.google.aggregate.adtech.worker.model.SharedInfo.LATEST_VERSION; import static com.google.aggregate.adtech.worker.model.SharedInfo.SHARED_STORAGE_API; +import static com.google.aggregate.adtech.worker.model.SharedInfo.ATTRIBUTION_REPORTING_DEBUG_API; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth8.assertThat; import static org.junit.Assert.assertFalse; @@ -273,14 +274,35 @@ public void deserialize_withGoldenReportVersion1() { String sharedInfoJsonString = "{\"api\":\"shared-storage\",\"debug_mode\":\"enabled\",\"report_id\":\"21abd97f-73e8-4b88-9389-a9fee6abda5e\",\"reporting_origin\":\"https://report.test\",\"scheduled_report_time\":\"1234486400\",\"version\":\"1.0\"}"; SharedInfo expectedSharedInfo = - SharedInfo.builder() - .setVersion("1.0") - .setApi(SHARED_STORAGE_API) - .setReportId("21abd97f-73e8-4b88-9389-a9fee6abda5e") - .setReportingOrigin(REPORTING_ORIGIN_CHROME_GOLDEN_REPORT) - .setScheduledReportTime(Instant.ofEpochSecond(1234486400)) - .setReportDebugMode(true) - .build(); + SharedInfo.builder() + .setVersion("1.0") + .setApi(SHARED_STORAGE_API) + .setReportId("21abd97f-73e8-4b88-9389-a9fee6abda5e") + .setReportingOrigin(REPORTING_ORIGIN_CHROME_GOLDEN_REPORT) + .setScheduledReportTime(Instant.ofEpochSecond(1234486400)) + .setReportDebugMode(true) + .build(); + + Optional deserialized = sharedInfoSerdes.convert(sharedInfoJsonString); + + assertThat(deserialized).hasValue(expectedSharedInfo); + } + + /** Test with Chrome generated attribution-reporting-debug reports used from here - b/324143474 */ + @Test + public void deserialize_withGoldenReport_debugApi() { + String sharedInfoJsonString = + "{\"api\":\"attribution-reporting-debug\",\"attribution_destination\":\"https://conversion.test\",\"debug_mode\":\"enabled\",\"report_id\":\"21abd97f-73e8-4b88-9389-a9fee6abda5e\",\"reporting_origin\":\"https://report.test\",\"scheduled_report_time\":\"1234486400\",\"version\":\"0.1\"}"; + SharedInfo expectedSharedInfo = + SharedInfo.builder() + .setVersion("0.1") + .setApi(ATTRIBUTION_REPORTING_DEBUG_API) + .setReportId("21abd97f-73e8-4b88-9389-a9fee6abda5e") + .setReportingOrigin(REPORTING_ORIGIN_CHROME_GOLDEN_REPORT) + .setDestination(DESTINATION_CHROME_GOLDEN_REPORT) + .setScheduledReportTime(Instant.ofEpochSecond(1234486400)) + .setReportDebugMode(true) + .build(); Optional deserialized = sharedInfoSerdes.convert(sharedInfoJsonString); diff --git a/javatests/com/google/aggregate/adtech/worker/model/serdes/cbor/BUILD b/javatests/com/google/aggregate/adtech/worker/model/serdes/cbor/BUILD index 19a46e7b..84066e61 100644 --- a/javatests/com/google/aggregate/adtech/worker/model/serdes/cbor/BUILD +++ b/javatests/com/google/aggregate/adtech/worker/model/serdes/cbor/BUILD @@ -20,6 +20,8 @@ java_test( name = "CborPayloadSerdesTest", srcs = ["CborPayloadSerdesTest.java"], data = [ + ":resources/debug_api_report_1.cbor", # Generated by Chrome + ":resources/debug_api_report_2.cbor", # Generated by Chrome ":resources/nullReport.cbor", # Generated by Chrome ":resources/report1.cbor", # Generated by Chrome ":resources/report2.cbor", # Generated by Chrome @@ -33,6 +35,8 @@ java_test( # Pass the path to the input file via environment variable instead of # hard-coding a path in the test env = { + "CBOR_DEBUG_REPORT_1_LOCATION": "$(location :resources/debug_api_report_1.cbor)", + "CBOR_DEBUG_REPORT_2_LOCATION": "$(location :resources/debug_api_report_2.cbor)", "CBOR_NULL_REPORT_LOCATION": "$(location :resources/nullReport.cbor)", "CBOR_REPORT_1_LOCATION": "$(location :resources/report1.cbor)", "CBOR_REPORT_2_LOCATION": "$(location :resources/report2.cbor)", diff --git a/javatests/com/google/aggregate/adtech/worker/model/serdes/cbor/CborPayloadSerdesTest.java b/javatests/com/google/aggregate/adtech/worker/model/serdes/cbor/CborPayloadSerdesTest.java index 069d05a9..768f0bf5 100644 --- a/javatests/com/google/aggregate/adtech/worker/model/serdes/cbor/CborPayloadSerdesTest.java +++ b/javatests/com/google/aggregate/adtech/worker/model/serdes/cbor/CborPayloadSerdesTest.java @@ -23,6 +23,7 @@ import com.google.aggregate.adtech.worker.model.Fact; import com.google.aggregate.adtech.worker.model.Payload; import com.google.common.io.ByteSource; +import com.google.common.primitives.UnsignedLong; import com.google.inject.AbstractModule; import com.google.inject.Inject; import java.io.IOException; @@ -55,6 +56,30 @@ public void setUp() { .build(); } + @Test + public void testDeserializeFromCborBytes_debugReport1() throws Exception { + Payload expectedPayload = + Payload.builder() + .addFact(Fact.builder().setBucket(BigInteger.valueOf(0x1)).setValue(2).build()) + .addFact(Fact.builder().setBucket(BigInteger.valueOf(0x3)).setValue(4).build()) + .build(); + + readCborBytesFromFileAndAssert( + Path.of(System.getenv("CBOR_DEBUG_REPORT_1_LOCATION")), expectedPayload); + } + + @Test + public void testDeserializeFromCborBytes_debugReport2() throws Exception { + Payload expectedPayload = + Payload.builder() + .addFact(Fact.builder().setBucket(BigInteger.valueOf(0x1)).setValue(2).build()) + .addFact(Fact.builder().setBucket(BigInteger.valueOf(0x0)).setValue(0).build()) + .build(); + + readCborBytesFromFileAndAssert( + Path.of(System.getenv("CBOR_DEBUG_REPORT_2_LOCATION")), expectedPayload); + } + @Test public void testDeserializeFromCborBytes_report1() throws Exception { Payload expectedPayload = @@ -233,13 +258,27 @@ public void withIdsInFact() { Fact.builder() .setBucket(BigInteger.valueOf(12345)) .setValue(12345) - .setId(Integer.MIN_VALUE) + .setId(UnsignedLong.valueOf((1L << 64) - 1)) .build()) .addFact( Fact.builder() .setBucket(BigInteger.valueOf(987654321)) .setValue(987654321) - .setId(Integer.MAX_VALUE - 1) + .setId(UnsignedLong.valueOf(Integer.MAX_VALUE + 1L)) + .build()) + .addFact( + Fact.builder() + .setBucket(BigInteger.valueOf(8563215486562L)) + .setValue(5555556) + .setId( + UnsignedLong.valueOf( + BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.ONE))) + .build()) + .addFact( + Fact.builder() + .setBucket(BigInteger.valueOf(4444)) + .setValue(4444) + .setId(UnsignedLong.ZERO) .build()) .build(); @@ -252,30 +291,55 @@ public void withIdsInFact() { @Test public void deserializeFromCborBytes_reportWithId() throws Exception { Payload.Builder expectedPayload = - Payload.builder() - .addFact(Fact.builder().setBucket(new BigInteger("1")).setValue(2).setId(0).build()) - .addFact(Fact.builder().setBucket(new BigInteger("3")).setValue(4).setId(1).build()); + Payload.builder() + .addFact( + Fact.builder() + .setBucket(new BigInteger("1")) + .setValue(2) + .setId(UnsignedLong.ZERO) + .build()) + .addFact( + Fact.builder() + .setBucket(new BigInteger("3")) + .setValue(4) + .setId(UnsignedLong.valueOf(1)) + .build()); // null padding to 20 contributions. - for(int ind = 0; ind < 18; ind ++) { - expectedPayload.addFact(Fact.builder().setBucket(new BigInteger("0")).setValue(0).setId(0).build()); + for (int ind = 0; ind < 18; ind++) { + expectedPayload.addFact( + Fact.builder() + .setBucket(new BigInteger("0")) + .setValue(0) + .setId(UnsignedLong.ZERO) + .build()); } readCborBytesFromFileAndAssert( - Path.of(System.getenv("CBOR_REPORT_WITH_ID_1_LOCATION")), expectedPayload.build()); + Path.of(System.getenv("CBOR_REPORT_WITH_ID_1_LOCATION")), expectedPayload.build()); } @Test public void deserializeFromCborBytes_reportWith32BitId() throws Exception { Payload.Builder expectedPayload = - Payload.builder() - .addFact(Fact.builder().setBucket(new BigInteger("1")).setValue(2).setId(1).build()); + Payload.builder() + .addFact( + Fact.builder() + .setBucket(new BigInteger("1")) + .setValue(2) + .setId(UnsignedLong.valueOf(1)) + .build()); // null padding to 20 contributions. - for(int ind = 0; ind < 19; ind ++) { - expectedPayload.addFact(Fact.builder().setBucket(new BigInteger("0")).setValue(0).setId(0).build()); + for (int ind = 0; ind < 19; ind++) { + expectedPayload.addFact( + Fact.builder() + .setBucket(new BigInteger("0")) + .setValue(0) + .setId(UnsignedLong.ZERO) + .build()); } readCborBytesFromFileAndAssert( - Path.of(System.getenv("CBOR_REPORT_WITH_ID_2_LOCATION")), expectedPayload.build()); + Path.of(System.getenv("CBOR_REPORT_WITH_ID_2_LOCATION")), expectedPayload.build()); } /** No overrides or bindings needed */ diff --git a/javatests/com/google/aggregate/adtech/worker/model/serdes/cbor/README.md b/javatests/com/google/aggregate/adtech/worker/model/serdes/cbor/README.md index f075b20d..193f4f5f 100644 --- a/javatests/com/google/aggregate/adtech/worker/model/serdes/cbor/README.md +++ b/javatests/com/google/aggregate/adtech/worker/model/serdes/cbor/README.md @@ -12,5 +12,11 @@ base64 decoder to get cbor, for example: base64 -d report_1_cleartext_payloads.json > report1.cbor ``` +Alternative, to ignore garbage while decoding use the following command: + +```sh + base64 -di report_1_cleartext_payloads.json > report1.cbor +``` + CborPayloadSerdesTest reads the cbor files from reportx.cbor files, deserializes the cbor payload and compares it with manually constructed test payload. diff --git a/javatests/com/google/aggregate/adtech/worker/model/serdes/cbor/resources/debug_api_report_1.cbor b/javatests/com/google/aggregate/adtech/worker/model/serdes/cbor/resources/debug_api_report_1.cbor new file mode 100644 index 00000000..271d8e81 Binary files /dev/null and b/javatests/com/google/aggregate/adtech/worker/model/serdes/cbor/resources/debug_api_report_1.cbor differ diff --git a/javatests/com/google/aggregate/adtech/worker/model/serdes/cbor/resources/debug_api_report_2.cbor b/javatests/com/google/aggregate/adtech/worker/model/serdes/cbor/resources/debug_api_report_2.cbor new file mode 100644 index 00000000..fda1649e Binary files /dev/null and b/javatests/com/google/aggregate/adtech/worker/model/serdes/cbor/resources/debug_api_report_2.cbor differ diff --git a/javatests/com/google/aggregate/adtech/worker/testing/AvroReportsFileReaderTest.java b/javatests/com/google/aggregate/adtech/worker/testing/AvroReportsFileReaderTest.java deleted file mode 100644 index c9ccd1b6..00000000 --- a/javatests/com/google/aggregate/adtech/worker/testing/AvroReportsFileReaderTest.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Copyright 2022 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.aggregate.adtech.worker.testing; - -import static com.google.common.truth.Truth.assertThat; -import static org.junit.Assert.assertThrows; -import static org.junit.Assert.assertTrue; - -import com.google.acai.Acai; -import com.google.aggregate.adtech.worker.model.EncryptedReport; -import com.google.aggregate.adtech.worker.writer.avro.LocalAvroResultFileWriter; -import com.google.common.collect.ImmutableList; -import com.google.common.io.ByteSource; -import com.google.common.jimfs.Configuration; -import com.google.common.jimfs.Jimfs; -import com.google.inject.AbstractModule; -import java.io.IOException; -import java.nio.file.FileSystem; -import java.nio.file.Path; -import javax.inject.Inject; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.JUnit4; - -@RunWith(JUnit4.class) -public class AvroReportsFileReaderTest { - - @Rule public final Acai acai = new Acai(TestEnv.class); - - @Inject LocalAvroResultFileWriter localAvroResultFileWriter; - - // Under test - @Inject AvroReportsFileReader avroReportsFileReader; - - private FileSystem filesystem; - private Path avroFile; - private ImmutableList reports; - - private final ByteSource encryptedReport1Payload = ByteSource.wrap(new byte[] {0x00, 0x01}); - private final ByteSource encryptedReport2Payload = ByteSource.wrap(new byte[] {0x01, 0x02}); - private final EncryptedReport encryptedReport1 = - EncryptedReport.builder() - .setPayload(encryptedReport1Payload) - .setKeyId("key1") - .setSharedInfo("foo") - .build(); - - private final EncryptedReport encryptedReport2 = - EncryptedReport.builder() - .setPayload(encryptedReport2Payload) - .setKeyId("key1") - .setSharedInfo("foo") - .build(); - - @Before - public void setUp() { - filesystem = - Jimfs.newFileSystem(Configuration.unix().toBuilder().setWorkingDirectory("/").build()); - avroFile = filesystem.getPath("reports.avro"); - reports = ImmutableList.of(encryptedReport1, encryptedReport2); - } - - /** Writes reports and reads to confirm data is read correctly. */ - @Test - public void testLocalReportFile_writesSuccessfully() throws Exception { - localAvroResultFileWriter.writeLocalReportFile(reports.stream(), avroFile); - - ImmutableList writtenReports = - avroReportsFileReader.readAvroReportsFile(avroFile); - - assertThat(writtenReports.get(0).sharedInfo()).isEqualTo(encryptedReport1.sharedInfo()); - assertTrue(writtenReports.get(0).payload().contentEquals(encryptedReport1.payload())); - assertThat(writtenReports.get(0).keyId()).isEqualTo(encryptedReport1.keyId()); - - assertThat(writtenReports.get(1).sharedInfo()).isEqualTo(encryptedReport2.sharedInfo()); - assertTrue(writtenReports.get(1).payload().contentEquals(encryptedReport2.payload())); - assertThat(writtenReports.get(1).keyId()).isEqualTo(encryptedReport2.keyId()); - } - - @Test - public void readMissingFile_throwsException() throws Exception { - Path missingAvroFile = filesystem.getPath("filedoesnotexist.avro"); - - localAvroResultFileWriter.writeLocalReportFile(reports.stream(), missingAvroFile); - - assertThrows(IOException.class, () -> avroReportsFileReader.readAvroReportsFile(avroFile)); - } - - public static final class TestEnv extends AbstractModule {} -} diff --git a/javatests/com/google/aggregate/adtech/worker/testing/BUILD b/javatests/com/google/aggregate/adtech/worker/testing/BUILD index df41b667..8670cfaf 100644 --- a/javatests/com/google/aggregate/adtech/worker/testing/BUILD +++ b/javatests/com/google/aggregate/adtech/worker/testing/BUILD @@ -149,18 +149,3 @@ java_test( "//java/external:jimfs", ], ) - -java_test( - name = "AvroReportsFileReaderTest", - srcs = ["AvroReportsFileReaderTest.java"], - deps = [ - "//java/com/google/aggregate/adtech/worker/model", - "//java/com/google/aggregate/adtech/worker/testing:avro_reports_file_reader", - "//java/com/google/aggregate/adtech/worker/writer/avro", - "//java/external:acai", - "//java/external:google_truth", - "//java/external:guava", - "//java/external:guice", - "//java/external:jimfs", - ], -) diff --git a/javatests/com/google/aggregate/adtech/worker/testing/FakeReportGeneratorTest.java b/javatests/com/google/aggregate/adtech/worker/testing/FakeReportGeneratorTest.java index e0225a64..b02eb43b 100644 --- a/javatests/com/google/aggregate/adtech/worker/testing/FakeReportGeneratorTest.java +++ b/javatests/com/google/aggregate/adtech/worker/testing/FakeReportGeneratorTest.java @@ -86,7 +86,7 @@ public void testGenerate_FakeReportList_version_0_1() { .setVersion("0.1") .setApi("attribution-reporting") .setDestination("dummy") - .setReportingOrigin("dummy") + .setReportingOrigin("https://foo.com") .setScheduledReportTime(Instant.EPOCH.plus(1, SECONDS)) .setSourceRegistrationTime(Instant.EPOCH.plus(1, SECONDS)) .setReportId(generatedReport.sharedInfo().reportId().get()) @@ -103,7 +103,8 @@ public void testGenerate_FakeReportList_version_0_1() { public void testGenerate_version_0_1() { int id = 2; - Report generatedReport = FakeReportGenerator.generateWithParam(id, VERSION_0_1); + Report generatedReport = + FakeReportGenerator.generateWithParam(id, VERSION_0_1, "https://foo.com"); assertThat(generatedReport) .isEqualTo( @@ -113,7 +114,7 @@ public void testGenerate_version_0_1() { .setVersion("0.1") .setApi("attribution-reporting") .setDestination(String.valueOf(id)) - .setReportingOrigin(String.valueOf(id)) + .setReportingOrigin("https://foo.com") .setScheduledReportTime(Instant.EPOCH.plus(id, SECONDS)) .setSourceRegistrationTime(Instant.EPOCH.plus(id, SECONDS)) .setReportId(generatedReport.sharedInfo().reportId().get()) @@ -138,7 +139,7 @@ public void generateNullReport() { SharedInfo.builder() .setVersion(LATEST_VERSION) .setDestination("dummy") - .setReportingOrigin("dummy") + .setReportingOrigin("https://foo.com") .setScheduledReportTime(Instant.EPOCH.plus(1, SECONDS)) .setSourceRegistrationTime(Instant.EPOCH.plus(1, SECONDS)) .setReportId(generatedReport.sharedInfo().reportId().get()) diff --git a/javatests/com/google/aggregate/adtech/worker/testing/InMemoryResultLoggerTest.java b/javatests/com/google/aggregate/adtech/worker/testing/InMemoryResultLoggerTest.java index d73590f0..81e1e7dd 100644 --- a/javatests/com/google/aggregate/adtech/worker/testing/InMemoryResultLoggerTest.java +++ b/javatests/com/google/aggregate/adtech/worker/testing/InMemoryResultLoggerTest.java @@ -21,9 +21,7 @@ import com.google.aggregate.adtech.worker.exceptions.ResultLogException; import com.google.aggregate.adtech.worker.model.AggregatedFact; -import com.google.aggregate.adtech.worker.model.EncryptedReport; import com.google.common.collect.ImmutableList; -import com.google.common.io.ByteSource; import com.google.scp.operator.cpio.jobclient.model.Job; import com.google.scp.operator.cpio.jobclient.testing.FakeJobGenerator; import java.math.BigInteger; @@ -99,46 +97,10 @@ public void getDebugAggregationWithoutLogging() { .contains("MaterializedAggregations is null. Maybe results did not get logged."); } - @Test - public void logInMemoryReports_logSucceeds() throws ResultLogException { - EncryptedReport encryptedReport1 = - EncryptedReport.builder() - .setPayload(ByteSource.wrap(new byte[] {0x00, 0x01})) - .setKeyId("key1") - .setSharedInfo("foo") - .build(); - EncryptedReport encryptedReport2 = - EncryptedReport.builder() - .setPayload(ByteSource.wrap(new byte[] {0x01, 0x02})) - .setKeyId("key2") - .setSharedInfo("foo") - .build(); - ImmutableList encryptedReports = - ImmutableList.of(encryptedReport1, encryptedReport2); - - inMemoryResultLogger.logReports(encryptedReports, FakeJobGenerator.generate("foo"), "1"); - - assertThat(inMemoryResultLogger.getMaterializedEncryptedReports()) - .containsExactly(encryptedReport1, encryptedReport2); - } - - @Test - public void logNullReports_throwsException() { - ResultLogException exception = - assertThrows( - ResultLogException.class, () -> inMemoryResultLogger.getMaterializedEncryptedReports()); - - assertThat(exception).hasCauseThat().isInstanceOf(IllegalStateException.class); - assertThat(exception) - .hasMessageThat() - .contains("MaterializedEncryptionReports is null. Maybe results did not get logged."); - } - @Test public void throwsWhenSetTo() { inMemoryResultLogger.setShouldThrow(true); ImmutableList aggregatedFacts = ImmutableList.of(); - ImmutableList encryptedReports = ImmutableList.of(); Job Job = FakeJobGenerator.generate("foo"); assertThrows( @@ -147,8 +109,5 @@ public void throwsWhenSetTo() { assertThrows( ResultLogException.class, () -> inMemoryResultLogger.logResults(aggregatedFacts, Job, /* isDebugRun= */ true)); - assertThrows( - ResultLogException.class, - () -> inMemoryResultLogger.logReports(encryptedReports, Job, "1")); } } diff --git a/javatests/com/google/aggregate/adtech/worker/util/BUILD b/javatests/com/google/aggregate/adtech/worker/util/BUILD index c8be7343..3a5a0494 100644 --- a/javatests/com/google/aggregate/adtech/worker/util/BUILD +++ b/javatests/com/google/aggregate/adtech/worker/util/BUILD @@ -71,3 +71,13 @@ java_test( "//java/external:google_truth", ], ) + +java_test( + name = "ReportingOriginUtilsTest", + srcs = ["ReportingOriginUtilsTest.java"], + deps = [ + "//java/com/google/aggregate/adtech/worker/util", + "//java/external:google_truth", + "//java/external:guava", + ], +) diff --git a/javatests/com/google/aggregate/adtech/worker/util/NumericConversionsTest.java b/javatests/com/google/aggregate/adtech/worker/util/NumericConversionsTest.java index be7b2598..2e5d9b38 100644 --- a/javatests/com/google/aggregate/adtech/worker/util/NumericConversionsTest.java +++ b/javatests/com/google/aggregate/adtech/worker/util/NumericConversionsTest.java @@ -20,6 +20,7 @@ import static org.junit.Assert.assertThrows; import com.google.common.primitives.Bytes; +import com.google.common.primitives.UnsignedLong; import java.math.BigInteger; import org.junit.Test; import org.junit.runner.RunWith; @@ -303,25 +304,60 @@ public void getPercentageValue_invalidPercentageRange_throwsIllegalArgument() { } @Test - public void getIntegersFromString_withValidList() { - assertThat(NumericConversions.getIntegersFromString(",2, 3, , 99999, ", ",")) - .containsExactly(3, 99999, 2); - assertThat(NumericConversions.getIntegersFromString(" 5 ", "\\s*,\\s*")).containsExactly(5); + public void getLongsFromString_withValidList() { + assertThat(NumericConversions.getUnsignedLongsFromString(",2, 3, , 99999, 4294967295, 9223372036854775808", ",")) + .containsExactly( + UnsignedLong.valueOf(3), + UnsignedLong.valueOf(99999), + UnsignedLong.valueOf(2), + UnsignedLong.valueOf(4294967295L), + UnsignedLong.valueOf(new BigInteger("9223372036854775808"))); + assertThat(NumericConversions.getUnsignedLongsFromString(" 5 ", ",")) + .containsExactly(UnsignedLong.valueOf(5)); } @Test - public void getIntegersFromString_emptyString_returnsEmptySet() { - assertThat(NumericConversions.getIntegersFromString(" ", ",")).isEmpty(); - assertThat(NumericConversions.getIntegersFromString(" ,, , ,", ",")).isEmpty(); + public void getLongsFromString_emptyString_returnsEmptySet() { + assertThat(NumericConversions.getUnsignedLongsFromString(" ", ",")).isEmpty(); + assertThat(NumericConversions.getUnsignedLongsFromString(" ,, , ,", ",")).isEmpty(); } @Test - public void getIntegersFromString_withNonIntegers_throwsIllegalArgument() { + public void getLongsFromString_withNonIntegers_throwsIllegalArgument() { assertThrows( - IllegalArgumentException.class, () -> NumericConversions.getIntegersFromString("5.5", ",")); + IllegalArgumentException.class, + () -> NumericConversions.getUnsignedLongsFromString("5.5", ",")); assertThrows( IllegalArgumentException.class, - () -> NumericConversions.getIntegersFromString("5,6,null", ",")); + () -> NumericConversions.getUnsignedLongsFromString("5,6,null", ",")); + } + + @Test + public void getUnsignedLongFromBytes_withVariousByteSize() { + byte[] value1 = NumericConversions.toUnsignedByteArray(BigInteger.valueOf(4)); + byte[] value2 = NumericConversions.toUnsignedByteArray(BigInteger.valueOf(127)); + byte[] value3 = NumericConversions.toUnsignedByteArray(BigInteger.valueOf(Integer.MAX_VALUE)); + byte[] value4 = NumericConversions.toUnsignedByteArray(BigInteger.valueOf(Long.MAX_VALUE)); + byte[] value5 = NumericConversions.toUnsignedByteArray(BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.ONE)); + + assertThat(value1).hasLength(1); + assertThat(value2).hasLength(1); + assertThat(value3).hasLength(4); + assertThat(value4).hasLength(8); + assertThat(value5).hasLength(8); + + assertThat(NumericConversions.getUnsignedLongFromBytes(value1)).isEqualTo(UnsignedLong.valueOf(4)); + assertThat(NumericConversions.getUnsignedLongFromBytes(value2)).isEqualTo(UnsignedLong.valueOf(127)); + assertThat(NumericConversions.getUnsignedLongFromBytes(value3)).isEqualTo(UnsignedLong.valueOf(Integer.MAX_VALUE)); + assertThat(NumericConversions.getUnsignedLongFromBytes(value4)).isEqualTo(UnsignedLong.valueOf(Long.MAX_VALUE)); + assertThat(NumericConversions.getUnsignedLongFromBytes(value5)).isEqualTo(UnsignedLong.valueOf(BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.ONE))); + } + + @Test + public void getUnsignedLongFromBytes_withInvalidValue_throwsIllegalArgument() { + byte[] valueLargerThanMaxUnsignedLong = BigInteger.valueOf(Long.MAX_VALUE).multiply(BigInteger.valueOf(5)).toByteArray(); + + assertThrows(IllegalArgumentException.class, () -> NumericConversions.getUnsignedLongFromBytes(valueLargerThanMaxUnsignedLong)); } @Test diff --git a/javatests/com/google/aggregate/adtech/worker/util/ReportingOriginUtilsTest.java b/javatests/com/google/aggregate/adtech/worker/util/ReportingOriginUtilsTest.java new file mode 100644 index 00000000..46731445 --- /dev/null +++ b/javatests/com/google/aggregate/adtech/worker/util/ReportingOriginUtilsTest.java @@ -0,0 +1,114 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.aggregate.adtech.worker.util; + +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.assertThrows; + +import com.google.aggregate.adtech.worker.util.ReportingOriginUtils.InvalidReportingOriginException; +import org.junit.Test; + +public class ReportingOriginUtilsTest { + + @Test + public void convertToSite_singlePartTld() throws InvalidReportingOriginException { + assertThat(ReportingOriginUtils.convertReportingOriginToSite("https://dummyOrigin.foo.com")) + .isEqualTo("https://foo.com"); + } + + @Test + public void convertToSite_multipartTld() throws InvalidReportingOriginException { + assertThat(ReportingOriginUtils.convertReportingOriginToSite("https://dummyOrigin.foo.co.uk")) + .isEqualTo("https://foo.co.uk"); + } + + @Test + public void convertToSite_whenSiteProvided() throws InvalidReportingOriginException { + assertThat(ReportingOriginUtils.convertReportingOriginToSite("https://foo.co.uk")) + .isEqualTo("https://foo.co.uk"); + } + + @Test + public void convertToSite_whenHttpUrlProvided() throws InvalidReportingOriginException { + assertThat(ReportingOriginUtils.convertReportingOriginToSite("http://about.foo.blogspot.com")) + .isEqualTo("https://foo.blogspot.com"); + } + + @Test + public void convertToSite_whenUrlWithTrailingSlashProvided() + throws InvalidReportingOriginException { + assertThat(ReportingOriginUtils.convertReportingOriginToSite("http://about.foo.blogspot.com/")) + .isEqualTo("https://foo.blogspot.com"); + } + + @Test + public void convertToSite_whenUrlWithPortProvided() + throws InvalidReportingOriginException { + assertThat(ReportingOriginUtils.convertReportingOriginToSite("http://about.foo.blogspot.com:8443/bar")) + .isEqualTo("https://foo.blogspot.com"); + } + + @Test + public void convertToSite_onlyPublicSuffixProvided_throwsException() { + InvalidReportingOriginException ex = + assertThrows( + InvalidReportingOriginException.class, + () -> ReportingOriginUtils.convertReportingOriginToSite("https://blogspot.com")); + assertThat(ex.getMessage()).contains("not under a known public suffix"); + } + + @Test + public void convertToSite_underUnknownPublicSuffix_throwsException() { + + InvalidReportingOriginException ex = + assertThrows( + InvalidReportingOriginException.class, + () -> + ReportingOriginUtils.convertReportingOriginToSite( + "https://dummyOrigin.coordinator.test")); + assertThat(ex.getMessage()).contains("not under a known public suffix"); + } + + @Test + public void convertToSite_invalidDomainNoProtocol_throwsException() { + + InvalidReportingOriginException ex = + assertThrows( + InvalidReportingOriginException.class, + () -> + ReportingOriginUtils.convertReportingOriginToSite("dummyOrigin.coordinator.test")); + assertThat(ex.getMessage()).contains("no protocol"); + } + + @Test + public void convertToSite_malformedUrl_throwsException() { + InvalidReportingOriginException ex = + assertThrows( + InvalidReportingOriginException.class, + () -> ReportingOriginUtils.convertReportingOriginToSite("some invalid URL")); + assertThat(ex.getMessage()).contains("no protocol"); + } + + @Test + public void convertToSite_emptyInput_throwsException() { + InvalidReportingOriginException ex = + assertThrows( + InvalidReportingOriginException.class, + () -> ReportingOriginUtils.convertReportingOriginToSite("")); + assertThat(ex.getMessage()).contains("no protocol"); + } +} diff --git a/javatests/com/google/aggregate/adtech/worker/validation/BUILD b/javatests/com/google/aggregate/adtech/worker/validation/BUILD index 06eb2ebb..75cc2efe 100644 --- a/javatests/com/google/aggregate/adtech/worker/validation/BUILD +++ b/javatests/com/google/aggregate/adtech/worker/validation/BUILD @@ -99,12 +99,16 @@ java_test( name = "SupportedReportApiTypeValidatorTest", srcs = ["SupportedReportApiTypeValidatorTest.java"], deps = [ + "//java/com/google/aggregate/adtech/worker:annotations", "//java/com/google/aggregate/adtech/worker/model", "//java/com/google/aggregate/adtech/worker/validation", + "//java/external:acai", "//java/external:clients_jobclient_aws", "//java/external:clients_jobclient_model", "//java/external:google_truth", "//java/external:google_truth8", + "//java/external:guava", + "//java/external:guice", ], ) @@ -147,6 +151,18 @@ java_test( ], ) +java_test( + name = "AttributionReportingDebugPrivacyBudgetKeyFieldsValidatorTest", + srcs = ["v01/AttributionReportingDebugPrivacyBudgetKeyFieldsValidatorTest.java"], + deps = [ + "//java/com/google/aggregate/adtech/worker/model", + "//java/com/google/aggregate/adtech/worker/validation", + "//java/com/google/aggregate/adtech/worker/validation/v01", + "//java/external:google_truth", + "//java/external:google_truth8", + ], +) + java_test( name = "ProtectedAudiencePrivacyBudgetKeyFieldsValidatorTest", srcs = ["v01/ProtectedAudiencePrivacyBudgetKeyFieldsValidatorTest.java"], diff --git a/javatests/com/google/aggregate/adtech/worker/validation/JobValidatorTest.java b/javatests/com/google/aggregate/adtech/worker/validation/JobValidatorTest.java index a6ac9cd0..5caf5e9b 100644 --- a/javatests/com/google/aggregate/adtech/worker/validation/JobValidatorTest.java +++ b/javatests/com/google/aggregate/adtech/worker/validation/JobValidatorTest.java @@ -56,7 +56,9 @@ public void validate_noAttributionReportToKeyInParams_fails() { assertThat(exception) .hasMessageThat() - .containsMatch("Job parameters does not have an attribution_report_to field for the Job"); + .containsMatch( + "Exactly one of 'attribution_report_to' and 'reporting_site' fields should be specified" + + " for the Job"); } @Test @@ -71,7 +73,8 @@ public void validate_noAttributionReportTo_fails() { assertThat(exception) .hasMessageThat() - .containsMatch("Job parameters does not have an attribution_report_to field for the Job"); + .containsMatch( + "The 'attribution_report_to' field in the Job parameters is empty for the Job"); } @Test @@ -210,6 +213,52 @@ public void validate_invalidReportErrorThresholdPercentage_fails() { + " 'report_error_threshold_percentage' parameter"); } + @Test + public void validate_validInputReportCount_succeeds() { + Job jobWithoutCount = buildJob(ImmutableMap.of("attribution_report_to", "foo.com")).build(); + Job jobWithEmptyString = + buildJob(ImmutableMap.of("attribution_report_to", "foo.com", "input_report_count", " ")) + .build(); + Job jobWithTrailingSpace = + buildJob( + ImmutableMap.of( + "attribution_report_to", "foo.com", "input_report_count", "100 ")) + .build(); + Job jobWithZeroReportCount = + buildJob(ImmutableMap.of("attribution_report_to", "foo.com", "input_report_count", "0")) + .build(); + + JobValidator.validate(Optional.of(jobWithoutCount), /* domainOptional= */ true); + JobValidator.validate(Optional.of(jobWithEmptyString), /* domainOptional= */ true); + JobValidator.validate(Optional.of(jobWithTrailingSpace), /* domainOptional= */ true); + JobValidator.validate(Optional.of(jobWithZeroReportCount), /* domainOptional= */ true); + } + + @Test + public void validate_invalidInputReportCount_fails() { + Job job1 = + buildJob(ImmutableMap.of("attribution_report_to", "foo.com", "input_report_count", "-1")) + .build(); + Job job2 = + buildJob( + ImmutableMap.of( + "attribution_report_to", "foo.com", "input_report_count", "not a number")) + .build(); + Job job3 = + buildJob(ImmutableMap.of("attribution_report_to", "foo.com", "input_report_count", "100.1")) + .build(); + + assertThrows( + IllegalArgumentException.class, + () -> JobValidator.validate(Optional.of(job1), /* domainOptional= */ true)); + assertThrows( + IllegalArgumentException.class, + () -> JobValidator.validate(Optional.of(job2), /* domainOptional= */ true)); + assertThrows( + IllegalArgumentException.class, + () -> JobValidator.validate(Optional.of(job3), /* domainOptional= */ true)); + } + @Test public void validate_reportErrorThresholdPercentageNotANumber_fails() { Job job1 = @@ -298,6 +347,39 @@ public void validate_invalidFilteringIds_throws() { () -> JobValidator.validate(Optional.of(jobWithNonNumberIds), /* domainOptional= */ true)); } + @Test + public void validate_noReportingSite_fails() { + ImmutableMap jobParams = ImmutableMap.of("reporting_site", ""); + Job job = buildJob(jobParams).build(); + + IllegalArgumentException exception = + assertThrows( + IllegalArgumentException.class, + () -> JobValidator.validate(Optional.of(job), /* domainOptional= */ false)); + + assertThat(exception) + .hasMessageThat() + .containsMatch("The 'reporting_site' field in the Job parameters is empty for the Job"); + } + + @Test + public void validate_attributionReportToAndReportingSiteBothPresent_fails() { + ImmutableMap jobParams = + ImmutableMap.of("attribution_report_to", "someOrigin", "reporting_site", "someSite"); + Job job = buildJob(jobParams).build(); + + IllegalArgumentException exception = + assertThrows( + IllegalArgumentException.class, + () -> JobValidator.validate(Optional.of(job), /* domainOptional= */ false)); + + assertThat(exception) + .hasMessageThat() + .containsMatch( + "Exactly one of 'attribution_report_to' and 'reporting_site' fields should be specified" + + " for the Job"); + } + private Job.Builder buildJob(ImmutableMap jobParams) { return jobBuilder.setRequestInfo(requestInfoBuilder.putAllJobParameters(jobParams).build()); } diff --git a/javatests/com/google/aggregate/adtech/worker/validation/PrivacyBudgetKeyValidatorFactoryTest.java b/javatests/com/google/aggregate/adtech/worker/validation/PrivacyBudgetKeyValidatorFactoryTest.java index 2fee019a..16272f42 100644 --- a/javatests/com/google/aggregate/adtech/worker/validation/PrivacyBudgetKeyValidatorFactoryTest.java +++ b/javatests/com/google/aggregate/adtech/worker/validation/PrivacyBudgetKeyValidatorFactoryTest.java @@ -17,11 +17,13 @@ package com.google.aggregate.adtech.worker.validation; import static com.google.aggregate.adtech.worker.model.SharedInfo.ATTRIBUTION_REPORTING_API; +import static com.google.aggregate.adtech.worker.model.SharedInfo.ATTRIBUTION_REPORTING_DEBUG_API; import static com.google.aggregate.adtech.worker.model.SharedInfo.PROTECTED_AUDIENCE_API; import static com.google.aggregate.adtech.worker.model.SharedInfo.SHARED_STORAGE_API; import static com.google.aggregate.adtech.worker.model.SharedInfo.VERSION_0_1; import static com.google.common.truth.Truth.assertThat; +import com.google.aggregate.adtech.worker.validation.v01.AttributionReportingDebugPrivacyBudgetKeyFieldsValidator; import com.google.aggregate.adtech.worker.validation.v01.AttributionReportingPrivacyBudgetKeyFieldsValidator; import com.google.aggregate.adtech.worker.validation.v01.ProtectedAudiencePrivacyBudgetKeyFieldsValidator; import com.google.aggregate.adtech.worker.validation.v01.SharedStoragePrivacyBudgetKeyFieldsValidator; @@ -66,6 +68,38 @@ public void attributionReporting_higherMajorVersion_privacyBudgetValidator_retur .isEqualTo(Optional.empty()); } + @Test + public void attributionReportingDebugV01_privacyBudgetValidator_returnsValidator() { + Optional attributionReportingDebugValidator = + PrivacyBudgetKeyValidatorFactory.getPrivacyBudgetKeyValidator( + ATTRIBUTION_REPORTING_DEBUG_API, VERSION_0_1); + + assertThat(attributionReportingDebugValidator).isPresent(); + assertThat(attributionReportingDebugValidator.get()) + .isInstanceOf(AttributionReportingDebugPrivacyBudgetKeyFieldsValidator.class); + } + + @Test + public void + attributionReportingDebug_higherMinorVersion_privacyBudgetValidator_returnsValidator() { + Optional attributionReportingDebugValidator = + PrivacyBudgetKeyValidatorFactory.getPrivacyBudgetKeyValidator( + ATTRIBUTION_REPORTING_DEBUG_API, HIGHER_MINOR_VERSION); + + assertThat(attributionReportingDebugValidator).isPresent(); + assertThat(attributionReportingDebugValidator.get()) + .isInstanceOf(AttributionReportingDebugPrivacyBudgetKeyFieldsValidator.class); + } + + @Test + public void + attributionReportingDebug_higherMajorVersion_privacyBudgetValidator_returnsNoValidator() { + assertThat( + PrivacyBudgetKeyValidatorFactory.getPrivacyBudgetKeyValidator( + ATTRIBUTION_REPORTING_API, HIGHER_MAJOR_VERSION)) + .isEqualTo(Optional.empty()); + } + @Test public void protectedAudienceV01_privacyBudgetValidator_returnsValidator() { PrivacyBudgetKeyValidator protectedAudiencePrivacyBudgetKeyValidator = diff --git a/javatests/com/google/aggregate/adtech/worker/validation/ReportVersionValidatorTest.java b/javatests/com/google/aggregate/adtech/worker/validation/ReportVersionValidatorTest.java index 58f3e0f8..fc33262b 100644 --- a/javatests/com/google/aggregate/adtech/worker/validation/ReportVersionValidatorTest.java +++ b/javatests/com/google/aggregate/adtech/worker/validation/ReportVersionValidatorTest.java @@ -153,8 +153,6 @@ public void attributionReporting_v00Reports_validationFails() { assertThat(validationErrorVersion00.get().category()) .isEqualTo(ErrorCounter.UNSUPPORTED_SHAREDINFO_VERSION); - assertThat(validationErrorVersion00.get().detailedErrorMessage()) - .contains("Report has an unsupported version value in its shared_info."); } @Test @@ -177,8 +175,6 @@ public void attributionReporting_v0Reports_validationFails() { assertThat(validationErrorVersion0.get().category()) .isEqualTo(ErrorCounter.UNSUPPORTED_SHAREDINFO_VERSION); - assertThat(validationErrorVersion0.get().detailedErrorMessage()) - .contains("Report has an unsupported version value in its shared_info."); } @Test @@ -201,8 +197,6 @@ public void attributionReporting_v1Reports_validationFails() { assertThat(validationErrorVersion1.get().category()) .isEqualTo(ErrorCounter.UNSUPPORTED_SHAREDINFO_VERSION); - assertThat(validationErrorVersion1.get().detailedErrorMessage()) - .contains("Report has an unsupported version value in its shared_info."); } @Test @@ -226,8 +220,6 @@ public void attributionReporting_emptyVersionReports_validationFails() { assertThat(validationErrorVersionEmpty.get().category()) .isEqualTo(ErrorCounter.UNSUPPORTED_SHAREDINFO_VERSION); - assertThat(validationErrorVersionEmpty.get().detailedErrorMessage()) - .contains("Report has an unsupported version value in its shared_info."); } @Test @@ -250,8 +242,6 @@ public void attributionReporting_v123Reports_validationFails() { assertThat(validationErrorVersion123.get().category()) .isEqualTo(ErrorCounter.UNSUPPORTED_SHAREDINFO_VERSION); - assertThat(validationErrorVersion123.get().detailedErrorMessage()) - .contains("Report has an unsupported version value in its shared_info."); } @Test @@ -275,8 +265,6 @@ public void attributionReporting_invalidVersionReports_validationFails() { assertThat(validationErrorVersionInvalid.get().category()) .isEqualTo(ErrorCounter.UNSUPPORTED_SHAREDINFO_VERSION); - assertThat(validationErrorVersionInvalid.get().detailedErrorMessage()) - .contains("Report has an unsupported version value in its shared_info."); } @Test @@ -300,8 +288,6 @@ public void attributionReporting_invalidVersion2Reports_validationFails() { assertThat(validationErrorVersionInvalid.get().category()) .isEqualTo(ErrorCounter.UNSUPPORTED_SHAREDINFO_VERSION); - assertThat(validationErrorVersionInvalid.get().detailedErrorMessage()) - .contains("Report has an unsupported version value in its shared_info."); } @Test @@ -325,8 +311,6 @@ public void attributionReporting_invalidVersion3Reports_validationFails() { assertThat(validationErrorVersionInvalid.get().category()) .isEqualTo(ErrorCounter.UNSUPPORTED_SHAREDINFO_VERSION); - assertThat(validationErrorVersionInvalid.get().detailedErrorMessage()) - .contains("Report has an unsupported version value in its shared_info."); } @Test @@ -350,8 +334,6 @@ public void attributionReporting_invalidVersion4Reports_validationFails() { assertThat(validationErrorVersionInvalid.get().category()) .isEqualTo(ErrorCounter.UNSUPPORTED_SHAREDINFO_VERSION); - assertThat(validationErrorVersionInvalid.get().detailedErrorMessage()) - .contains("Report has an unsupported version value in its shared_info."); } @Test @@ -375,8 +357,6 @@ public void attributionReporting_negativeVersionReports_validationFails() { assertThat(validationErrorVersionNegative.get().category()) .isEqualTo(ErrorCounter.UNSUPPORTED_SHAREDINFO_VERSION); - assertThat(validationErrorVersionNegative.get().detailedErrorMessage()) - .contains("Report has an unsupported version value in its shared_info."); } @Test @@ -400,7 +380,5 @@ public void attributionReporting_invalidPositiveVersionReports_validationFails() assertThat(validationErrorVersionInvalidPositive.get().category()) .isEqualTo(ErrorCounter.UNSUPPORTED_SHAREDINFO_VERSION); - assertThat(validationErrorVersionInvalidPositive.get().detailedErrorMessage()) - .contains("Report has an unsupported version value in its shared_info."); } } diff --git a/javatests/com/google/aggregate/adtech/worker/validation/ReportingOriginMatchesRequestValidatorTest.java b/javatests/com/google/aggregate/adtech/worker/validation/ReportingOriginMatchesRequestValidatorTest.java index fad99524..4e991a43 100644 --- a/javatests/com/google/aggregate/adtech/worker/validation/ReportingOriginMatchesRequestValidatorTest.java +++ b/javatests/com/google/aggregate/adtech/worker/validation/ReportingOriginMatchesRequestValidatorTest.java @@ -20,6 +20,8 @@ import static com.google.aggregate.adtech.worker.model.SharedInfo.LATEST_VERSION; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth8.assertThat; +import static com.google.aggregate.adtech.worker.model.ErrorCounter.ATTRIBUTION_REPORT_TO_MALFORMED; +import static com.google.aggregate.adtech.worker.model.ErrorCounter.REPORTING_SITE_MISMATCH; import com.google.aggregate.adtech.worker.model.ErrorMessage; import com.google.aggregate.adtech.worker.model.Payload; @@ -59,6 +61,16 @@ public void setUp() { ctx = FakeJobGenerator.generateBuilder("").build(); } + private Job createTestJob(ImmutableMap jobParameters) { + return ctx.toBuilder() + .setRequestInfo( + ctx.requestInfo().toBuilder() + .clearJobParameters() + .putAllJobParameters(jobParameters) + .build()) + .build(); + } + /** * Test that the validation passed when the report and the aggregation request ({@code Job}) have * matching attributionReportTo values. @@ -69,14 +81,7 @@ public void testMatchingPasses() { reportBuilder .setSharedInfo(sharedInfoBuilder.setReportingOrigin("foo.com").build()) .build(); - Job testCtx = - ctx.toBuilder() - .setRequestInfo( - ctx.requestInfo().toBuilder() - .clearJobParameters() - .putAllJobParameters(ImmutableMap.of("attribution_report_to", "foo.com")) - .build()) - .build(); + Job testCtx = createTestJob(ImmutableMap.of("attribution_report_to", "foo.com")); Optional validationError = validator.validate(report, testCtx); @@ -93,18 +98,67 @@ public void testMismatchingFails() { reportBuilder .setSharedInfo(sharedInfoBuilder.setReportingOrigin("foo.com").build()) .build(); - Job testCtx = - ctx.toBuilder() - .setRequestInfo( - ctx.requestInfo().toBuilder() - .clearJobParameters() - .putAllJobParameters(ImmutableMap.of("attribution_report_to", "bar.com")) - .build()) - .build(); + Job testCtx = createTestJob(ImmutableMap.of("attribution_report_to", "bar.com")); Optional validationError = validator.validate(report, testCtx); assertThat(validationError).isPresent(); assertThat(validationError.get().category()).isEqualTo(ATTRIBUTION_REPORT_TO_MISMATCH); } + + /** + * Test that the validation passed when the report's reporting origin belongs to the site provided + * in the aggregation request ({@code Job}). + */ + @Test + public void siteProvided_reportOriginBelongsToSite_success() { + Report report1 = + reportBuilder + .setSharedInfo(sharedInfoBuilder.setReportingOrigin("https://origin1.foo.com").build()) + .build(); + Report report2 = + reportBuilder + .setSharedInfo(sharedInfoBuilder.setReportingOrigin("https://origin2.foo.com").build()) + .build(); + Job testCtx = createTestJob(ImmutableMap.of("reporting_site", "https://foo.com")); + + Optional validationError1 = validator.validate(report1, testCtx); + Optional validationError2 = validator.validate(report2, testCtx); + + assertThat(validationError1).isEmpty(); + assertThat(validationError2).isEmpty(); + } + + /** + * Test that the validation fails when the report's reporting origin belongs to a different site + * than the one provided in the aggregation request ({@code Job}). + */ + @Test + public void siteProvided_reportOriginDoesNotBelongsToSite_failure() { + Report report = + reportBuilder + .setSharedInfo(sharedInfoBuilder.setReportingOrigin("https://origin.foo.com").build()) + .build(); + Job testCtx = createTestJob(ImmutableMap.of("reporting_site", "https://foo1.com")); + + Optional validationError = validator.validate(report, testCtx); + + assertThat(validationError).isPresent(); + assertThat(validationError.get().category()).isEqualTo(REPORTING_SITE_MISMATCH); + } + + /** Tests validation failure when the report's reporting origin is malformed. */ + @Test + public void siteProvided_reportOriginInvalid_failure() { + Report report = + reportBuilder + .setSharedInfo(sharedInfoBuilder.setReportingOrigin("origin.foo.com").build()) + .build(); + Job testCtx = createTestJob(ImmutableMap.of("reporting_site", "https://foo1.com")); + + Optional validationError = validator.validate(report, testCtx); + + assertThat(validationError).isPresent(); + assertThat(validationError.get().category()).isEqualTo(ATTRIBUTION_REPORT_TO_MALFORMED); + } } diff --git a/javatests/com/google/aggregate/adtech/worker/validation/SharedInfoReportIdValidatorTest.java b/javatests/com/google/aggregate/adtech/worker/validation/SharedInfoReportIdValidatorTest.java index 46142326..de686fb8 100644 --- a/javatests/com/google/aggregate/adtech/worker/validation/SharedInfoReportIdValidatorTest.java +++ b/javatests/com/google/aggregate/adtech/worker/validation/SharedInfoReportIdValidatorTest.java @@ -19,9 +19,7 @@ import static com.google.aggregate.adtech.worker.model.ErrorCounter.INVALID_REPORT_ID; import static com.google.aggregate.adtech.worker.model.SharedInfo.ATTRIBUTION_REPORTING_API; import static com.google.aggregate.adtech.worker.model.SharedInfo.LATEST_VERSION; -import static com.google.aggregate.adtech.worker.validation.SharedInfoReportIdValidator.INVALID_REPORT_ID_ERROR_STRING; import static com.google.common.truth.Truth.assertThat; -import static com.google.common.truth.Truth8.assertThat; import com.google.aggregate.adtech.worker.model.ErrorMessage; import com.google.aggregate.adtech.worker.model.Payload; @@ -74,15 +72,14 @@ public void sharedInfo_missingReportId_validationFails() { .setReportingOrigin(REPORTING_ORIGIN) .setScheduledReportTime(FIXED_TIME) .setSourceRegistrationTime(FIXED_TIME) - .setDestination(DESTINATION).build(); + .setDestination(DESTINATION) + .build(); Report report = reportBuilder.setSharedInfo(sharedInfoBuilder).build(); Optional validationError = validator.validate(report, ctx); assertThat(validationError).isPresent(); assertThat(validationError.get().category()).isEqualTo(INVALID_REPORT_ID); - assertThat(validationError.get().detailedErrorMessage()) - .isEqualTo(INVALID_REPORT_ID_ERROR_STRING); } @Test @@ -95,15 +92,14 @@ public void sharedInfo_emptyReportId_validationFails() { .setReportingOrigin(REPORTING_ORIGIN) .setScheduledReportTime(FIXED_TIME) .setSourceRegistrationTime(FIXED_TIME) - .setDestination(DESTINATION).build(); + .setDestination(DESTINATION) + .build(); Report report = reportBuilder.setSharedInfo(sharedInfoBuilder).build(); Optional validationError = validator.validate(report, ctx); assertThat(validationError).isPresent(); assertThat(validationError.get().category()).isEqualTo(INVALID_REPORT_ID); - assertThat(validationError.get().detailedErrorMessage()) - .isEqualTo(INVALID_REPORT_ID_ERROR_STRING); } @Test @@ -116,7 +112,8 @@ public void sharedInfo_validReportId_validationSucceeds() { .setReportingOrigin(REPORTING_ORIGIN) .setScheduledReportTime(FIXED_TIME) .setSourceRegistrationTime(FIXED_TIME) - .setDestination(DESTINATION).build(); + .setDestination(DESTINATION) + .build(); Report report = reportBuilder.setSharedInfo(sharedInfoBuilder).build(); Optional validationError = validator.validate(report, ctx); diff --git a/javatests/com/google/aggregate/adtech/worker/validation/SupportedReportApiTypeValidatorTest.java b/javatests/com/google/aggregate/adtech/worker/validation/SupportedReportApiTypeValidatorTest.java index 15798176..50b0587d 100644 --- a/javatests/com/google/aggregate/adtech/worker/validation/SupportedReportApiTypeValidatorTest.java +++ b/javatests/com/google/aggregate/adtech/worker/validation/SupportedReportApiTypeValidatorTest.java @@ -18,31 +18,41 @@ import static com.google.aggregate.adtech.worker.model.ErrorCounter.UNSUPPORTED_REPORT_API_TYPE; import static com.google.aggregate.adtech.worker.model.SharedInfo.ATTRIBUTION_REPORTING_API; +import static com.google.aggregate.adtech.worker.model.SharedInfo.ATTRIBUTION_REPORTING_DEBUG_API; import static com.google.aggregate.adtech.worker.model.SharedInfo.PROTECTED_AUDIENCE_API; import static com.google.aggregate.adtech.worker.model.SharedInfo.SHARED_STORAGE_API; import static com.google.aggregate.adtech.worker.model.SharedInfo.VERSION_0_1; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth8.assertThat; +import com.google.acai.Acai; +import com.google.aggregate.adtech.worker.Annotations.SupportedApis; import com.google.aggregate.adtech.worker.model.ErrorMessage; import com.google.aggregate.adtech.worker.model.Payload; import com.google.aggregate.adtech.worker.model.Report; import com.google.aggregate.adtech.worker.model.SharedInfo; +import com.google.common.collect.ImmutableSet; +import com.google.inject.AbstractModule; +import com.google.inject.Inject; +import com.google.inject.TypeLiteral; import com.google.scp.operator.cpio.jobclient.model.Job; import com.google.scp.operator.cpio.jobclient.testing.FakeJobGenerator; import java.time.Instant; import java.util.Optional; import java.util.UUID; import org.junit.Before; +import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; @RunWith(JUnit4.class) public class SupportedReportApiTypeValidatorTest { + @Rule + public final Acai acai = new Acai(TestEnv.class); // Under test - private SupportedReportApiTypeValidator validator; + @Inject private SupportedReportApiTypeValidator validator; private Report.Builder reportBuilder; @@ -59,13 +69,12 @@ public class SupportedReportApiTypeValidatorTest { @Before public void setUp() { - validator = new SupportedReportApiTypeValidator(); reportBuilder = Report.builder().setPayload(Payload.builder().build()); ctx = FakeJobGenerator.generate(""); } @Test - public void attributionReportingReports_validationSucceed() { + public void attributionReportingReports_validationSucceeds() { SharedInfo.Builder sharedInfoVersion01Builder = SharedInfo.builder() .setApi(ATTRIBUTION_REPORTING_API) @@ -83,6 +92,25 @@ public void attributionReportingReports_validationSucceed() { assertThat(validationErrorVersion01).isEmpty(); } + @Test + public void attributionReportingDebugReports_validationSucceeds() { + SharedInfo sharedInfoVersion01 = + SharedInfo.builder() + .setApi(ATTRIBUTION_REPORTING_DEBUG_API) + .setVersion(VERSION_0_1) + .setReportId(RANDOM_UUID) + .setReportingOrigin(REPORTING_ORIGIN) + .setScheduledReportTime(FIXED_TIME) + .setSourceRegistrationTime(FIXED_TIME) + .setDestination(DESTINATION) + .build(); + Report reportVersion01 = reportBuilder.setSharedInfo(sharedInfoVersion01).build(); + + Optional validationErrorVersion01 = validator.validate(reportVersion01, ctx); + + assertThat(validationErrorVersion01).isEmpty(); + } + @Test public void protectedAudienceReports_validationSucceeds() { SharedInfo.Builder sharedInfoBuilder = @@ -131,4 +159,19 @@ public void invalidApiTypeReports_validationFails() { assertThat(validationError).isPresent(); assertThat(validationError.get().category()).isEqualTo(UNSUPPORTED_REPORT_API_TYPE); } + + public static final class TestEnv extends AbstractModule { + + @Override + protected void configure() { + bind(new TypeLiteral>() {}) + .annotatedWith(SupportedApis.class) + .toInstance( + ImmutableSet.of( + ATTRIBUTION_REPORTING_API, + ATTRIBUTION_REPORTING_DEBUG_API, + PROTECTED_AUDIENCE_API, + SHARED_STORAGE_API)); + } + } } diff --git a/javatests/com/google/aggregate/adtech/worker/validation/v01/AttributionReportingDebugPrivacyBudgetKeyFieldsValidatorTest.java b/javatests/com/google/aggregate/adtech/worker/validation/v01/AttributionReportingDebugPrivacyBudgetKeyFieldsValidatorTest.java new file mode 100644 index 00000000..e0123b52 --- /dev/null +++ b/javatests/com/google/aggregate/adtech/worker/validation/v01/AttributionReportingDebugPrivacyBudgetKeyFieldsValidatorTest.java @@ -0,0 +1,164 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.aggregate.adtech.worker.validation.v01; + +import static com.google.aggregate.adtech.worker.model.ErrorCounter.REQUIRED_SHAREDINFO_FIELD_INVALID; +import static com.google.aggregate.adtech.worker.model.SharedInfo.ATTRIBUTION_REPORTING_DEBUG_API; +import static com.google.aggregate.adtech.worker.model.SharedInfo.VERSION_0_1; +import static com.google.common.truth.Truth.assertThat; + +import com.google.aggregate.adtech.worker.model.ErrorMessage; +import com.google.aggregate.adtech.worker.model.SharedInfo; +import java.time.Instant; +import java.util.Optional; +import java.util.UUID; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; + +@RunWith(JUnit4.class) +public class AttributionReportingDebugPrivacyBudgetKeyFieldsValidatorTest { + + // Under test + private final AttributionReportingDebugPrivacyBudgetKeyFieldsValidator + attributionReportingDebugValidator = + new AttributionReportingDebugPrivacyBudgetKeyFieldsValidator(); + + // FIXED_TIME = Jan 01 2021 00:00:00 GMT+0000 + private static final Instant FIXED_TIME = Instant.ofEpochSecond(1609459200); + + private static final String REPORTING_ORIGIN = "https://www.origin.com"; + + private static final String DESTINATION = "dest.com"; + + private static final String RANDOM_UUID = UUID.randomUUID().toString(); + + private static final String EMPTY_STRING = ""; + + @Test + public void attributionReportingDebugReport_emptyReportingOrigin_validationFails() { + SharedInfo sharedInfo = + SharedInfo.builder() + .setApi(ATTRIBUTION_REPORTING_DEBUG_API) + .setVersion(VERSION_0_1) + .setReportId(RANDOM_UUID) + .setDestination(DESTINATION) + .setReportingOrigin(EMPTY_STRING) + .setScheduledReportTime(FIXED_TIME) + .setSourceRegistrationTime(FIXED_TIME) + .build(); + + Optional validationError = + attributionReportingDebugValidator.validatePrivacyBudgetKey(sharedInfo); + + assertThat(validationError).isPresent(); + assertThat(validationError.get().category()).isEqualTo(REQUIRED_SHAREDINFO_FIELD_INVALID); + } + + @Test + public void attributionReportingDebugReport_emptyDestination_validationFails() { + SharedInfo sharedInfoNoDestination = + SharedInfo.builder() + .setApi(ATTRIBUTION_REPORTING_DEBUG_API) + .setVersion(VERSION_0_1) + .setReportId(RANDOM_UUID) + .setReportingOrigin(REPORTING_ORIGIN) + .setScheduledReportTime(FIXED_TIME) + .setSourceRegistrationTime(FIXED_TIME) + .build(); + + SharedInfo sharedInfoEmptyDestination = + SharedInfo.builder() + .setApi(ATTRIBUTION_REPORTING_DEBUG_API) + .setVersion(VERSION_0_1) + .setReportId(RANDOM_UUID) + .setDestination(EMPTY_STRING) + .setReportingOrigin(REPORTING_ORIGIN) + .setScheduledReportTime(FIXED_TIME) + .setSourceRegistrationTime(FIXED_TIME) + .build(); + + Optional validationErrorNoDestination = + attributionReportingDebugValidator.validatePrivacyBudgetKey(sharedInfoNoDestination); + Optional validationErrorEmptyDestination = + attributionReportingDebugValidator.validatePrivacyBudgetKey(sharedInfoEmptyDestination); + + assertThat(validationErrorNoDestination).isPresent(); + assertThat(validationErrorNoDestination.get().category()) + .isEqualTo(REQUIRED_SHAREDINFO_FIELD_INVALID); + assertThat(validationErrorEmptyDestination).isPresent(); + assertThat(validationErrorEmptyDestination.get().category()) + .isEqualTo(REQUIRED_SHAREDINFO_FIELD_INVALID); + } + + @Test + public void attributionReportingDebugReport_emptyVersion_validationFails() { + SharedInfo sharedInfo = + SharedInfo.builder() + .setApi(ATTRIBUTION_REPORTING_DEBUG_API) + .setVersion(EMPTY_STRING) + .setReportId(RANDOM_UUID) + .setDestination(DESTINATION) + .setReportingOrigin(REPORTING_ORIGIN) + .setScheduledReportTime(FIXED_TIME) + .setSourceRegistrationTime(FIXED_TIME) + .build(); + + Optional validationError = + attributionReportingDebugValidator.validatePrivacyBudgetKey(sharedInfo); + + assertThat(validationError).isPresent(); + assertThat(validationError.get().category()).isEqualTo(REQUIRED_SHAREDINFO_FIELD_INVALID); + } + + @Test + public void attributionReportingDebugReport_emptySourceRegistrationTime_validationSucceeds() { + SharedInfo sharedInfo = + SharedInfo.builder() + .setApi(ATTRIBUTION_REPORTING_DEBUG_API) + .setVersion(VERSION_0_1) + .setReportId(RANDOM_UUID) + .setDestination(DESTINATION) + .setReportingOrigin(REPORTING_ORIGIN) + .setScheduledReportTime(FIXED_TIME) + .build(); + + Optional validationError = + attributionReportingDebugValidator.validatePrivacyBudgetKey(sharedInfo); + + assertThat(validationError).isEmpty(); + } + + @Test + public void attributionReportingDebugReport_validationSucceeds() { + SharedInfo sharedInfo = + SharedInfo.builder() + .setApi(ATTRIBUTION_REPORTING_DEBUG_API) + .setVersion(VERSION_0_1) + .setReportId(RANDOM_UUID) + .setDestination(DESTINATION) + .setReportingOrigin(REPORTING_ORIGIN) + .setScheduledReportTime(FIXED_TIME) + .setSourceRegistrationTime(FIXED_TIME) + .build(); + + Optional validationError = + attributionReportingDebugValidator.validatePrivacyBudgetKey(sharedInfo); + + assertThat(validationError).isEmpty(); + } +} diff --git a/javatests/com/google/aggregate/adtech/worker/validation/v01/AttributionReportingPrivacyBudgetKeyFieldsValidatorTest.java b/javatests/com/google/aggregate/adtech/worker/validation/v01/AttributionReportingPrivacyBudgetKeyFieldsValidatorTest.java index 89ccb9de..56ac26b7 100644 --- a/javatests/com/google/aggregate/adtech/worker/validation/v01/AttributionReportingPrivacyBudgetKeyFieldsValidatorTest.java +++ b/javatests/com/google/aggregate/adtech/worker/validation/v01/AttributionReportingPrivacyBudgetKeyFieldsValidatorTest.java @@ -19,9 +19,7 @@ import static com.google.aggregate.adtech.worker.model.ErrorCounter.REQUIRED_SHAREDINFO_FIELD_INVALID; import static com.google.aggregate.adtech.worker.model.SharedInfo.ATTRIBUTION_REPORTING_API; import static com.google.aggregate.adtech.worker.model.SharedInfo.VERSION_0_1; -import static com.google.aggregate.adtech.worker.validation.PrivacyBudgetKeyValidator.NULL_OR_INVALID_SHAREDINFO_FIELD_ERROR_STRING; import static com.google.common.truth.Truth.assertThat; -import static com.google.common.truth.Truth8.assertThat; import com.google.aggregate.adtech.worker.model.ErrorMessage; import com.google.aggregate.adtech.worker.model.SharedInfo; @@ -69,8 +67,6 @@ public void attributionReportingReport_emptyReportingOrigin_validationFails() { assertThat(validationError).isPresent(); assertThat(validationError.get().category()).isEqualTo(REQUIRED_SHAREDINFO_FIELD_INVALID); - assertThat(validationError.get().detailedErrorMessage()) - .isEqualTo(NULL_OR_INVALID_SHAREDINFO_FIELD_ERROR_STRING); } @Test @@ -103,12 +99,8 @@ public void attributionReportingReport_emptyDestination_validationFails() { assertThat(validationError1).isPresent(); assertThat(validationError1.get().category()).isEqualTo(REQUIRED_SHAREDINFO_FIELD_INVALID); - assertThat(validationError1.get().detailedErrorMessage()) - .isEqualTo(NULL_OR_INVALID_SHAREDINFO_FIELD_ERROR_STRING); assertThat(validationError2).isPresent(); assertThat(validationError2.get().category()).isEqualTo(REQUIRED_SHAREDINFO_FIELD_INVALID); - assertThat(validationError2.get().detailedErrorMessage()) - .isEqualTo(NULL_OR_INVALID_SHAREDINFO_FIELD_ERROR_STRING); } @Test @@ -129,16 +121,14 @@ public void attributionReportingReport_emptyVersion_validationFails() { assertThat(validationError).isPresent(); assertThat(validationError.get().category()).isEqualTo(REQUIRED_SHAREDINFO_FIELD_INVALID); - assertThat(validationError.get().detailedErrorMessage()) - .isEqualTo(NULL_OR_INVALID_SHAREDINFO_FIELD_ERROR_STRING); } @Test - public void attributionReportingReport_emptySourceRegistrationTime_validationFails() { + public void attributionReportingReport_emptySourceRegistrationTime_validationSucceeds() { SharedInfo sharedInfo = SharedInfo.builder() .setApi(ATTRIBUTION_REPORTING_API) - .setVersion(EMPTY_STRING) + .setVersion(VERSION_0_1) .setReportId(RANDOM_UUID) .setDestination(DESTINATION) .setReportingOrigin(REPORTING_ORIGIN) @@ -148,10 +138,7 @@ public void attributionReportingReport_emptySourceRegistrationTime_validationFai Optional validationError = attributionReportingPrivacyBudgetKeyFieldsValidator.validatePrivacyBudgetKey(sharedInfo); - assertThat(validationError).isPresent(); - assertThat(validationError.get().category()).isEqualTo(REQUIRED_SHAREDINFO_FIELD_INVALID); - assertThat(validationError.get().detailedErrorMessage()) - .isEqualTo(NULL_OR_INVALID_SHAREDINFO_FIELD_ERROR_STRING); + assertThat(validationError).isEmpty(); } @Test diff --git a/javatests/com/google/aggregate/adtech/worker/validation/v01/ProtectedAudiencePrivacyBudgetKeyFieldsValidatorTest.java b/javatests/com/google/aggregate/adtech/worker/validation/v01/ProtectedAudiencePrivacyBudgetKeyFieldsValidatorTest.java index d3b05aa0..66d32032 100644 --- a/javatests/com/google/aggregate/adtech/worker/validation/v01/ProtectedAudiencePrivacyBudgetKeyFieldsValidatorTest.java +++ b/javatests/com/google/aggregate/adtech/worker/validation/v01/ProtectedAudiencePrivacyBudgetKeyFieldsValidatorTest.java @@ -19,9 +19,7 @@ import static com.google.aggregate.adtech.worker.model.ErrorCounter.REQUIRED_SHAREDINFO_FIELD_INVALID; import static com.google.aggregate.adtech.worker.model.SharedInfo.PROTECTED_AUDIENCE_API; import static com.google.aggregate.adtech.worker.model.SharedInfo.VERSION_0_1; -import static com.google.aggregate.adtech.worker.validation.PrivacyBudgetKeyValidator.NULL_OR_INVALID_SHAREDINFO_FIELD_ERROR_STRING; import static com.google.common.truth.Truth.assertThat; -import static com.google.common.truth.Truth8.assertThat; import com.google.aggregate.adtech.worker.model.ErrorMessage; import com.google.aggregate.adtech.worker.model.SharedInfo; @@ -65,8 +63,6 @@ public void protectedAudienceReport_emptyReportingOrigin_validationFails() { assertThat(validationError).isPresent(); assertThat(validationError.get().category()).isEqualTo(REQUIRED_SHAREDINFO_FIELD_INVALID); - assertThat(validationError.get().detailedErrorMessage()) - .isEqualTo(NULL_OR_INVALID_SHAREDINFO_FIELD_ERROR_STRING); } @Test @@ -85,8 +81,6 @@ public void protectedAudienceReport_emptyVersion_validationFails() { assertThat(validationError).isPresent(); assertThat(validationError.get().category()).isEqualTo(REQUIRED_SHAREDINFO_FIELD_INVALID); - assertThat(validationError.get().detailedErrorMessage()) - .isEqualTo(NULL_OR_INVALID_SHAREDINFO_FIELD_ERROR_STRING); } @Test diff --git a/javatests/com/google/aggregate/adtech/worker/validation/v01/SharedStoragePrivacyBudgetKeyFieldsValidatorTest.java b/javatests/com/google/aggregate/adtech/worker/validation/v01/SharedStoragePrivacyBudgetKeyFieldsValidatorTest.java index fad8dd72..569c6d5a 100644 --- a/javatests/com/google/aggregate/adtech/worker/validation/v01/SharedStoragePrivacyBudgetKeyFieldsValidatorTest.java +++ b/javatests/com/google/aggregate/adtech/worker/validation/v01/SharedStoragePrivacyBudgetKeyFieldsValidatorTest.java @@ -19,9 +19,7 @@ import static com.google.aggregate.adtech.worker.model.ErrorCounter.REQUIRED_SHAREDINFO_FIELD_INVALID; import static com.google.aggregate.adtech.worker.model.SharedInfo.SHARED_STORAGE_API; import static com.google.aggregate.adtech.worker.model.SharedInfo.VERSION_0_1; -import static com.google.aggregate.adtech.worker.validation.PrivacyBudgetKeyValidator.NULL_OR_INVALID_SHAREDINFO_FIELD_ERROR_STRING; import static com.google.common.truth.Truth.assertThat; -import static com.google.common.truth.Truth8.assertThat; import com.google.aggregate.adtech.worker.model.ErrorMessage; import com.google.aggregate.adtech.worker.model.SharedInfo; @@ -65,8 +63,6 @@ public void sharedStorageReport_emptyReportingOrigin_validationFails() { assertThat(validationError).isPresent(); assertThat(validationError.get().category()).isEqualTo(REQUIRED_SHAREDINFO_FIELD_INVALID); - assertThat(validationError.get().detailedErrorMessage()) - .isEqualTo(NULL_OR_INVALID_SHAREDINFO_FIELD_ERROR_STRING); } @Test @@ -85,8 +81,6 @@ public void sharedStorageReport_emptyVersion_validationFails() { assertThat(validationError).isPresent(); assertThat(validationError.get().category()).isEqualTo(REQUIRED_SHAREDINFO_FIELD_INVALID); - assertThat(validationError.get().detailedErrorMessage()) - .isEqualTo(NULL_OR_INVALID_SHAREDINFO_FIELD_ERROR_STRING); } @Test diff --git a/javatests/com/google/aggregate/adtech/worker/writer/avro/BUILD b/javatests/com/google/aggregate/adtech/worker/writer/avro/BUILD index b1ff2595..7d4e9038 100644 --- a/javatests/com/google/aggregate/adtech/worker/writer/avro/BUILD +++ b/javatests/com/google/aggregate/adtech/worker/writer/avro/BUILD @@ -21,7 +21,6 @@ java_test( srcs = ["LocalAvroResultFileWriterTest.java"], deps = [ "//java/com/google/aggregate/adtech/worker/model", - "//java/com/google/aggregate/adtech/worker/testing:avro_reports_file_reader", "//java/com/google/aggregate/adtech/worker/testing:avro_results_file_reader", "//java/com/google/aggregate/adtech/worker/writer", "//java/com/google/aggregate/adtech/worker/writer/avro", diff --git a/javatests/com/google/aggregate/adtech/worker/writer/avro/LocalAvroResultFileWriterTest.java b/javatests/com/google/aggregate/adtech/worker/writer/avro/LocalAvroResultFileWriterTest.java index c6db000f..1f63942e 100644 --- a/javatests/com/google/aggregate/adtech/worker/writer/avro/LocalAvroResultFileWriterTest.java +++ b/javatests/com/google/aggregate/adtech/worker/writer/avro/LocalAvroResultFileWriterTest.java @@ -18,16 +18,12 @@ import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.assertThrows; -import static org.junit.Assert.assertTrue; import com.google.acai.Acai; import com.google.aggregate.adtech.worker.model.AggregatedFact; -import com.google.aggregate.adtech.worker.model.EncryptedReport; -import com.google.aggregate.adtech.worker.testing.AvroReportsFileReader; import com.google.aggregate.adtech.worker.testing.AvroResultsFileReader; import com.google.aggregate.adtech.worker.writer.LocalResultFileWriter.FileWriteException; import com.google.common.collect.ImmutableList; -import com.google.common.io.ByteSource; import com.google.common.jimfs.Configuration; import com.google.common.jimfs.Jimfs; import com.google.inject.AbstractModule; @@ -48,29 +44,10 @@ public class LocalAvroResultFileWriterTest { // Under test @Inject LocalAvroResultFileWriter localAvroResultFileWriter; - @Inject AvroResultsFileReader avroResultsFileReader; - @Inject AvroReportsFileReader avroReportsFileReader; - private FileSystem filesystem; private Path avroFile; ImmutableList results; - private ImmutableList reports; - private final ByteSource encryptedReport1Payload = ByteSource.wrap(new byte[] {0x00, 0x01}); - private final ByteSource encryptedReport2Payload = ByteSource.wrap(new byte[] {0x01, 0x02}); - private final EncryptedReport encryptedReport1 = - EncryptedReport.builder() - .setPayload(encryptedReport1Payload) - .setKeyId("key1") - .setSharedInfo("foo") - .build(); - - private final EncryptedReport encryptedReport2 = - EncryptedReport.builder() - .setPayload(encryptedReport2Payload) - .setKeyId("key2") - .setSharedInfo("bar") - .build(); @Before public void setUp() throws Exception { @@ -83,7 +60,6 @@ public void setUp() throws Exception { AggregatedFact.create(BigInteger.valueOf(123), 50L), AggregatedFact.create(BigInteger.valueOf(456), 30L), AggregatedFact.create(BigInteger.valueOf(789), 40L)); - reports = ImmutableList.of(encryptedReport1, encryptedReport2); } /** @@ -109,32 +85,6 @@ public void testExceptionOnFailedWrite() throws Exception { () -> localAvroResultFileWriter.writeLocalFile(results.stream(), nonExistentDirectory)); } - @Test - public void localReportWrite_succeeds() throws Exception { - localAvroResultFileWriter.writeLocalReportFile(reports.stream(), avroFile); - - ImmutableList writtenReports = - avroReportsFileReader.readAvroReportsFile(avroFile); - assertThat(writtenReports.get(0).sharedInfo()).isEqualTo(encryptedReport1.sharedInfo()); - assertTrue(writtenReports.get(0).payload().contentEquals(encryptedReport1.payload())); - assertThat(writtenReports.get(0).keyId()).isEqualTo(encryptedReport1.keyId()); - - assertThat(writtenReports.get(1).sharedInfo()).isEqualTo(encryptedReport2.sharedInfo()); - assertTrue(writtenReports.get(1).payload().contentEquals(encryptedReport2.payload())); - assertThat(writtenReports.get(1).keyId()).isEqualTo(encryptedReport2.keyId()); - } - - @Test - public void localReportWrite_invalidWritePath_fails() throws Exception { - Path nonExistentDirectory = - avroFile.getFileSystem().getPath("/doesnotexist", avroFile.toString()); - - assertThrows( - FileWriteException.class, - () -> - localAvroResultFileWriter.writeLocalReportFile(reports.stream(), nonExistentDirectory)); - } - @Test public void testFileExtension() { assertThat(localAvroResultFileWriter.getFileExtension()).isEqualTo(".avro"); diff --git a/javatests/com/google/aggregate/adtech/worker/writer/json/LocalJsonResultFileWriterTest.java b/javatests/com/google/aggregate/adtech/worker/writer/json/LocalJsonResultFileWriterTest.java index 04c3757c..d7b3a70b 100644 --- a/javatests/com/google/aggregate/adtech/worker/writer/json/LocalJsonResultFileWriterTest.java +++ b/javatests/com/google/aggregate/adtech/worker/writer/json/LocalJsonResultFileWriterTest.java @@ -18,21 +18,17 @@ import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.assertThrows; -import static org.junit.Assert.fail; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.acai.Acai; import com.google.aggregate.adtech.worker.model.AggregatedFact; -import com.google.aggregate.adtech.worker.model.EncryptedReport; import com.google.aggregate.adtech.worker.util.NumericConversions; import com.google.aggregate.adtech.worker.writer.LocalResultFileWriter.FileWriteException; import com.google.common.collect.ImmutableList; -import com.google.common.io.ByteSource; import com.google.common.jimfs.Configuration; import com.google.common.jimfs.Jimfs; import com.google.inject.AbstractModule; -import java.io.IOException; import java.math.BigInteger; import java.nio.file.FileSystem; import java.nio.file.Files; @@ -58,24 +54,6 @@ public class LocalJsonResultFileWriterTest { private FileSystem filesystem; private Path jsonFile; - private ImmutableList reports; - - // Not testing for payload, since encrypted payload in json is not useful. - private final ByteSource encryptedReportPayload = ByteSource.wrap(new byte[] {0x00, 0x01}); - private final EncryptedReport encryptedReport1 = - EncryptedReport.builder() - .setPayload(encryptedReportPayload) - .setKeyId("key1") - .setSharedInfo("foo") - .build(); - - private final EncryptedReport encryptedReport2 = - EncryptedReport.builder() - .setPayload(encryptedReportPayload) - .setKeyId("key2") - .setSharedInfo("bar") - .build(); - @Before public void setUp() throws Exception { filesystem = @@ -88,7 +66,6 @@ public void setUp() throws Exception { AggregatedFact.create(NumericConversions.createBucketFromInt(123), 50L), AggregatedFact.create(NumericConversions.createBucketFromInt(456), 30L), AggregatedFact.create(NumericConversions.createBucketFromInt(789), 40L)); - reports = ImmutableList.of(encryptedReport1, encryptedReport2); } /** @@ -105,14 +82,10 @@ public void testWriteFile() throws Exception { .iterator() .forEachRemaining( entry -> { - try { - writtenResults.add( - AggregatedFact.create( - NumericConversions.uInt128FromBytes(entry.get("bucket").binaryValue()), - entry.get("metric").asLong())); - } catch (IOException e) { - fail(e.getMessage()); - } + writtenResults.add( + AggregatedFact.create( + NumericConversions.createBucketFromString(entry.get("bucket").asText()), + entry.get("metric").asLong())); }); assertThat(writtenResults).containsExactly(results.toArray()); } @@ -127,41 +100,6 @@ public void testExceptionOnFailedWrite() throws Exception { () -> localJsonResultFileWriter.writeLocalFile(results.stream(), nonExistentDirectory)); } - @Test - public void writeLocalJsonReport_succeeds() throws Exception { - localJsonResultFileWriter.writeLocalReportFile(reports.stream(), jsonFile); - ObjectMapper mapper = new ObjectMapper(); - JsonNode jsonNode = mapper.readTree(Files.newInputStream(jsonFile)); - List writtenReports = new ArrayList<>(); - jsonNode - .iterator() - .forEachRemaining( - entry -> { - writtenReports.add( - EncryptedReport.builder() - .setSharedInfo((entry.get("shared_info").asText())) - .setKeyId(entry.get("key_id").asText()) - .setPayload(encryptedReportPayload) - .build()); - }); - assertThat(writtenReports.get(0).sharedInfo()).isEqualTo(encryptedReport1.sharedInfo()); - assertThat(writtenReports.get(0).keyId()).isEqualTo(encryptedReport1.keyId()); - - assertThat(writtenReports.get(1).sharedInfo()).isEqualTo(encryptedReport2.sharedInfo()); - assertThat(writtenReports.get(1).keyId()).isEqualTo(encryptedReport2.keyId()); - } - - @Test - public void writeLocalJsonReport_invalidPath_fails() throws Exception { - Path nonExistentDirectory = - jsonFile.getFileSystem().getPath("/doesnotexist", jsonFile.toString()); - - assertThrows( - FileWriteException.class, - () -> - localJsonResultFileWriter.writeLocalReportFile(reports.stream(), nonExistentDirectory)); - } - @Test public void testFileExtension() { assertThat(localJsonResultFileWriter.getFileExtension()).isEqualTo(".json"); diff --git a/javatests/com/google/aggregate/privacy/budgeting/bridge/FakePrivacyBudgetingServiceBridgeTest.java b/javatests/com/google/aggregate/privacy/budgeting/bridge/FakePrivacyBudgetingServiceBridgeTest.java index 4bdd35a7..939af21b 100644 --- a/javatests/com/google/aggregate/privacy/budgeting/bridge/FakePrivacyBudgetingServiceBridgeTest.java +++ b/javatests/com/google/aggregate/privacy/budgeting/bridge/FakePrivacyBudgetingServiceBridgeTest.java @@ -32,12 +32,12 @@ public class FakePrivacyBudgetingServiceBridgeTest { private FakePrivacyBudgetingServiceBridge privacyBudgetingService; - private String attributionReportTo = "foo.com"; + private String attributionReportTo = "https://foo.com"; private final PrivacyBudgetUnit firstId = - PrivacyBudgetUnit.create("foo", Instant.ofEpochMilli(1000)); + PrivacyBudgetUnit.create("foo", Instant.ofEpochMilli(1000), attributionReportTo); private final PrivacyBudgetUnit secondId = - PrivacyBudgetUnit.create("foo", Instant.ofEpochMilli(2000)); + PrivacyBudgetUnit.create("foo", Instant.ofEpochMilli(2000), attributionReportTo); @Before public void setUp() { diff --git a/javatests/com/google/aggregate/privacy/budgeting/bridge/HttpPrivacyBudgetingServiceBridgeTest.java b/javatests/com/google/aggregate/privacy/budgeting/bridge/HttpPrivacyBudgetingServiceBridgeTest.java index 97813215..4dadef48 100644 --- a/javatests/com/google/aggregate/privacy/budgeting/bridge/HttpPrivacyBudgetingServiceBridgeTest.java +++ b/javatests/com/google/aggregate/privacy/budgeting/bridge/HttpPrivacyBudgetingServiceBridgeTest.java @@ -21,12 +21,12 @@ import com.google.acai.Acai; import com.google.acai.TestScoped; -import com.google.aggregate.privacy.budgeting.bridge.HttpPrivacyBudgetingServiceBridge; import com.google.aggregate.privacy.budgeting.bridge.PrivacyBudgetingServiceBridge.PrivacyBudgetUnit; import com.google.common.collect.ImmutableList; import com.google.inject.AbstractModule; import com.google.scp.coordinator.privacy.budgeting.model.ConsumePrivacyBudgetRequest; import com.google.scp.coordinator.privacy.budgeting.model.ConsumePrivacyBudgetResponse; +import com.google.scp.coordinator.privacy.budgeting.model.ReportingOriginToPrivacyBudgetUnits; import com.google.scp.operator.cpio.distributedprivacybudgetclient.DistributedPrivacyBudgetClient; import java.time.Instant; import javax.inject.Inject; @@ -39,35 +39,85 @@ public class HttpPrivacyBudgetingServiceBridgeTest { @Rule public final Acai acai = new Acai(TestEnv.class); + private static final String CLAIMED_IDENTITY = "https://foo.com"; + + private static final String REPORTING_ORIGIN_1 = "origin1.foo.com"; + private static final String REPORTING_ORIGIN_2 = "origin2.foo.com"; + + private static final PrivacyBudgetUnit WORKER_FIRST_UNIT = + PrivacyBudgetUnit.create("foo1", Instant.ofEpochMilli(1000), REPORTING_ORIGIN_1); + private static final PrivacyBudgetUnit WORKER_SECOND_UNIT = + PrivacyBudgetUnit.create("foo2", Instant.ofEpochMilli(2000), REPORTING_ORIGIN_1); + + private static final PrivacyBudgetUnit WORKER_THIRD_UNIT = + PrivacyBudgetUnit.create("foo3", Instant.ofEpochMilli(3000), REPORTING_ORIGIN_2); + + private static final PrivacyBudgetUnit WORKER_FOURTH_UNIT = + PrivacyBudgetUnit.create("foo4", Instant.ofEpochMilli(4000), REPORTING_ORIGIN_1); + + private static final com.google.scp.coordinator.privacy.budgeting.model.PrivacyBudgetUnit + API_FIRST_UNIT = + com.google.scp.coordinator.privacy.budgeting.model.PrivacyBudgetUnit.builder() + .privacyBudgetKey("foo1") + .reportingWindow(Instant.ofEpochMilli(1000)) + .build(); + + private static final com.google.scp.coordinator.privacy.budgeting.model.PrivacyBudgetUnit + API_SECOND_UNIT = + com.google.scp.coordinator.privacy.budgeting.model.PrivacyBudgetUnit.builder() + .privacyBudgetKey("foo2") + .reportingWindow(Instant.ofEpochMilli(2000)) + .build(); + + private static final com.google.scp.coordinator.privacy.budgeting.model.PrivacyBudgetUnit + API_THIRD_UNIT = + com.google.scp.coordinator.privacy.budgeting.model.PrivacyBudgetUnit.builder() + .privacyBudgetKey("foo3") + .reportingWindow(Instant.ofEpochMilli(3000)) + .build(); + + private static final com.google.scp.coordinator.privacy.budgeting.model.PrivacyBudgetUnit + API_FOURTH_UNIT = + com.google.scp.coordinator.privacy.budgeting.model.PrivacyBudgetUnit.builder() + .privacyBudgetKey("foo4") + .reportingWindow(Instant.ofEpochMilli(4000)) + .build(); + + private static final ReportingOriginToPrivacyBudgetUnits ORIGIN_1_UNITS = + ReportingOriginToPrivacyBudgetUnits.builder() + .setReportingOrigin(REPORTING_ORIGIN_1) + .setPrivacyBudgetUnits(ImmutableList.of(API_FIRST_UNIT, API_SECOND_UNIT, API_FOURTH_UNIT)) + .build(); - private final PrivacyBudgetUnit firstId = - PrivacyBudgetUnit.create("foo", Instant.ofEpochMilli(1000)); - private final PrivacyBudgetUnit secondId = - PrivacyBudgetUnit.create("foo", Instant.ofEpochMilli(2000)); - private final String attributionReportTo = "foo.com"; + private static final ReportingOriginToPrivacyBudgetUnits ORIGIN_2_UNITS = + ReportingOriginToPrivacyBudgetUnits.builder() + .setReportingOrigin(REPORTING_ORIGIN_2) + .setPrivacyBudgetUnits(ImmutableList.of(API_THIRD_UNIT)) + .build(); @Inject FakeHttpPrivacyBudgetingServiceClient fakeHttpPrivacyBudgetingServiceClient; // Under test - @Inject - HttpPrivacyBudgetingServiceBridge privacyBudgetingService; + @Inject HttpPrivacyBudgetingServiceBridge privacyBudgetingService; @Test public void noBudget() throws Exception { fakeHttpPrivacyBudgetingServiceClient.setExhaustedUnits( - ImmutableList.of(workerToScpUnit(firstId), workerToScpUnit(secondId))); + ImmutableList.of(workerToScpUnit(WORKER_FIRST_UNIT), workerToScpUnit(WORKER_SECOND_UNIT))); ImmutableList missingBudget = privacyBudgetingService.consumePrivacyBudget( - ImmutableList.of(firstId, secondId), attributionReportTo); + ImmutableList.of( + WORKER_THIRD_UNIT, WORKER_FIRST_UNIT, WORKER_SECOND_UNIT, WORKER_FOURTH_UNIT), + CLAIMED_IDENTITY); - assertThat(missingBudget).containsExactly(firstId, secondId); + assertThat(missingBudget).containsExactly(WORKER_FIRST_UNIT, WORKER_SECOND_UNIT); assertThat(fakeHttpPrivacyBudgetingServiceClient.lastRequestSent) .isEqualTo( ConsumePrivacyBudgetRequest.builder() - .privacyBudgetUnits( - ImmutableList.of(workerToScpUnit(firstId), workerToScpUnit(secondId))) - .attributionReportTo(attributionReportTo) + .reportingOriginToPrivacyBudgetUnitsList( + ImmutableList.of(ORIGIN_2_UNITS, ORIGIN_1_UNITS)) + .claimedIdentity(CLAIMED_IDENTITY) .privacyBudgetLimit(DEFAULT_PRIVACY_BUDGET_LIMIT) .build()); } @@ -75,19 +125,19 @@ public void noBudget() throws Exception { @Test public void oneBudgetMissing() throws Exception { fakeHttpPrivacyBudgetingServiceClient.setExhaustedUnits( - ImmutableList.of(workerToScpUnit(firstId))); + ImmutableList.of(workerToScpUnit(WORKER_FIRST_UNIT))); ImmutableList missingBudget = privacyBudgetingService.consumePrivacyBudget( - ImmutableList.of(firstId, secondId), attributionReportTo); + ImmutableList.of(WORKER_FIRST_UNIT, WORKER_SECOND_UNIT, WORKER_FOURTH_UNIT), + CLAIMED_IDENTITY); - assertThat(missingBudget).containsExactly(firstId); + assertThat(missingBudget).containsExactly(WORKER_FIRST_UNIT); assertThat(fakeHttpPrivacyBudgetingServiceClient.lastRequestSent) .isEqualTo( ConsumePrivacyBudgetRequest.builder() - .privacyBudgetUnits( - ImmutableList.of(workerToScpUnit(firstId), workerToScpUnit(secondId))) - .attributionReportTo(attributionReportTo) + .reportingOriginToPrivacyBudgetUnitsList(ImmutableList.of(ORIGIN_1_UNITS)) + .claimedIdentity(CLAIMED_IDENTITY) .privacyBudgetLimit(DEFAULT_PRIVACY_BUDGET_LIMIT) .build()); } @@ -98,15 +148,17 @@ public void success() throws Exception { ImmutableList missingBudget = privacyBudgetingService.consumePrivacyBudget( - ImmutableList.of(firstId, secondId), attributionReportTo); + ImmutableList.of( + WORKER_FIRST_UNIT, WORKER_SECOND_UNIT, WORKER_THIRD_UNIT, WORKER_FOURTH_UNIT), + CLAIMED_IDENTITY); assertThat(missingBudget).isEmpty(); assertThat(fakeHttpPrivacyBudgetingServiceClient.lastRequestSent) .isEqualTo( ConsumePrivacyBudgetRequest.builder() - .privacyBudgetUnits( - ImmutableList.of(workerToScpUnit(firstId), workerToScpUnit(secondId))) - .attributionReportTo(attributionReportTo) + .reportingOriginToPrivacyBudgetUnitsList( + ImmutableList.of(ORIGIN_1_UNITS, ORIGIN_2_UNITS)) + .claimedIdentity(CLAIMED_IDENTITY) .privacyBudgetLimit(DEFAULT_PRIVACY_BUDGET_LIMIT) .build()); } @@ -133,17 +185,18 @@ public void setExhaustedUnits( } @Override - public ConsumePrivacyBudgetResponse consumePrivacyBudget(ConsumePrivacyBudgetRequest request) - throws DistributedPrivacyBudgetServiceException { + public ConsumePrivacyBudgetResponse consumePrivacyBudget(ConsumePrivacyBudgetRequest request) { lastRequestSent = request; + String reportingOrigin1 = "origin1.foo.com"; return ConsumePrivacyBudgetResponse.builder() - .exhaustedPrivacyBudgetUnits(exhaustedUnits) + .exhaustedPrivacyBudgetUnitsByOrigin( + ImmutableList.of( + ReportingOriginToPrivacyBudgetUnits.builder() + .setReportingOrigin(reportingOrigin1) + .setPrivacyBudgetUnits(exhaustedUnits) + .build())) .build(); } - - public ConsumePrivacyBudgetRequest getLastRequestSent() { - return lastRequestSent; - } } private static final class TestEnv extends AbstractModule { diff --git a/javatests/com/google/aggregate/privacy/budgeting/budgetkeygenerator/AttributionReportingDebugPrivacyBudgetKeyGeneratorTest.java b/javatests/com/google/aggregate/privacy/budgeting/budgetkeygenerator/AttributionReportingDebugPrivacyBudgetKeyGeneratorTest.java new file mode 100644 index 00000000..b53fed68 --- /dev/null +++ b/javatests/com/google/aggregate/privacy/budgeting/budgetkeygenerator/AttributionReportingDebugPrivacyBudgetKeyGeneratorTest.java @@ -0,0 +1,247 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.aggregate.privacy.budgeting.budgetkeygenerator; + +import static com.google.aggregate.adtech.worker.model.SharedInfo.ATTRIBUTION_REPORTING_DEBUG_API; +import static com.google.aggregate.adtech.worker.model.SharedInfo.VERSION_0_1; +import static com.google.aggregate.adtech.worker.model.SharedInfo.VERSION_1_0; +import static com.google.aggregate.privacy.budgeting.budgetkeygenerator.attributionreportingdebug.PrivacyBudgetKeyGeneratorModule.AttributionReportingDebugPrivacyBudgetKeyGenerators; +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import com.google.acai.Acai; +import com.google.aggregate.adtech.worker.model.SharedInfo; +import com.google.aggregate.privacy.budgeting.budgetkeygenerator.PrivacyBudgetKeyGenerator.PrivacyBudgetKeyInput; +import com.google.aggregate.privacy.budgeting.budgetkeygenerator.attributionreportingdebug.V1PrivacyBudgetKeyGenerator; +import com.google.aggregate.privacy.budgeting.budgetkeygenerator.attributionreportingdebug.V2PrivacyBudgetKeyGenerator; +import com.google.common.primitives.UnsignedLong; +import com.google.inject.AbstractModule; +import java.time.Instant; +import javax.inject.Inject; +import org.junit.Rule; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; + +@RunWith(JUnit4.class) +public class AttributionReportingDebugPrivacyBudgetKeyGeneratorTest { + + @Rule public final Acai acai = new Acai(TestEnv.class); + + // FIXED_TIME = Jan 01 2021 00:00:00 GMT+0000 + private static final Instant FIXED_TIME = Instant.ofEpochSecond(1609459200); + private static final String DESTINATION = "https://www.destination.com"; + private static final String REPORTING_ORIGIN = "https://www.origin.com"; + private static final UnsignedLong FILTERING_ID = UnsignedLong.valueOf(123); + + /** + * String representation of sha256 digest generated using UTF-8 representation of key. Key + * constructed from shared info fields - api + version + reporting_origin + destination + + * source_registration_time. Actual values used for generating key for V1 : + * + *

UTF-8 Input = ATTRIBUTION_REPORTING_DEBUG_API + {Version} + REPORTING_ORIGIN + DESTINATION + + * FIXED_TIME. + */ + private static final String PRIVACY_BUDGET_KEY_DEBUG_V1 = + "56f9dee8fe909ddc293084429e614cc272b8a79090577a7d566aea140ff087a4"; + + /** + * Actual values used for generating key for V2 : + * + *

UTF-8 Input = ATTRIBUTION_REPORTING_DEBUG_API + {Version} + REPORTING_ORIGIN + DESTINATION + + * FIXED_TIME + FILTERING_ID. + */ + private static final String PRIVACY_BUDGET_KEY_DEBUG_V2 = + "768bf2420536f93b4cf25ea907711eff43e12e600aa9c7099a7d3aa095b4152f"; + + @Inject @AttributionReportingDebugPrivacyBudgetKeyGenerators + VersionedPrivacyBudgetKeyGeneratorProvider versionedPrivacyBudgetKeyGeneratorProvider; + + private static final PrivacyBudgetKeyGenerator V1_PRIVACY_BUDGET_KEY_GENERATOR = + new V1PrivacyBudgetKeyGenerator(); + + private static final PrivacyBudgetKeyGenerator V2_PRIVACY_BUDGET_KEY_GENERATOR = + new V2PrivacyBudgetKeyGenerator(); + + /** + * Test to verify Privacy Budget Key is generated correctly from Shared Info with API VERSION_0_1. + */ + @Test + public void generatePrivacyBudgetKey_forV1_succeeds() { + SharedInfo sharedInfo = + SharedInfo.builder() + .setVersion(VERSION_0_1) + .setApi(ATTRIBUTION_REPORTING_DEBUG_API) + .setDestination(DESTINATION) + .setScheduledReportTime(FIXED_TIME) + .setSourceRegistrationTime(FIXED_TIME) + .setReportingOrigin(REPORTING_ORIGIN) + .build(); + PrivacyBudgetKeyInput privacyBudgetKeyInput = + PrivacyBudgetKeyInput.builder().setSharedInfo(sharedInfo).build(); + + assertEquals( + V1_PRIVACY_BUDGET_KEY_GENERATOR.generatePrivacyBudgetKey(privacyBudgetKeyInput), + PRIVACY_BUDGET_KEY_DEBUG_V1); + } + + /** + * Test to verify Privacy Budget Key is generated correctly from Shared Info with API VERSION_1_0. + */ + @Test + public void generatePrivacyBudgetKey_forV2_succeeds() { + SharedInfo sharedInfo = + SharedInfo.builder() + .setVersion(VERSION_1_0) + .setApi(ATTRIBUTION_REPORTING_DEBUG_API) + .setDestination(DESTINATION) + .setScheduledReportTime(FIXED_TIME) + .setSourceRegistrationTime(FIXED_TIME) + .setReportingOrigin(REPORTING_ORIGIN) + .build(); + PrivacyBudgetKeyInput privacyBudgetKeyInput = + PrivacyBudgetKeyInput.builder() + .setSharedInfo(sharedInfo) + .setFilteringId(FILTERING_ID) + .build(); + + assertEquals( + V2_PRIVACY_BUDGET_KEY_GENERATOR.generatePrivacyBudgetKey(privacyBudgetKeyInput), + PRIVACY_BUDGET_KEY_DEBUG_V2); + } + + @Test + public void zeroSourceRegistrationTime_generatesValidKey() { + SharedInfo.Builder sharedInfoBuilder1 = + SharedInfo.builder() + .setVersion(VERSION_0_1) + .setApi(ATTRIBUTION_REPORTING_DEBUG_API) + .setDestination(DESTINATION) + .setReportingOrigin(REPORTING_ORIGIN) + .setScheduledReportTime(Instant.ofEpochSecond(1234486400)) + .setSourceRegistrationTime(Instant.ofEpochSecond(0)); + SharedInfo sharedInfo = sharedInfoBuilder1.build(); + PrivacyBudgetKeyInput privacyBudgetKeyInput = + PrivacyBudgetKeyInput.builder().setSharedInfo(sharedInfo).build(); + + String privacyBudgetKey1 = + V1_PRIVACY_BUDGET_KEY_GENERATOR.generatePrivacyBudgetKey(privacyBudgetKeyInput); + + assertThat(privacyBudgetKey1).isNotEmpty(); + } + + @Test + public void negativeSourceRegistrationTime_generatesValidKey() { + SharedInfo.Builder sharedInfoBuilder1 = + SharedInfo.builder() + .setVersion(VERSION_0_1) + .setApi(ATTRIBUTION_REPORTING_DEBUG_API) + .setDestination(DESTINATION) + .setReportingOrigin(REPORTING_ORIGIN) + .setScheduledReportTime(Instant.ofEpochSecond(1234486400)) + .setSourceRegistrationTime(Instant.ofEpochSecond(-900)); + SharedInfo sharedInfo = sharedInfoBuilder1.build(); + PrivacyBudgetKeyInput privacyBudgetKeyInput = + PrivacyBudgetKeyInput.builder().setSharedInfo(sharedInfo).build(); + + String privacyBudgetKey1 = + V1_PRIVACY_BUDGET_KEY_GENERATOR.generatePrivacyBudgetKey(privacyBudgetKeyInput); + + assertThat(privacyBudgetKey1).isNotEmpty(); + } + + @Test + public void generatorV1_withoutSourceRegistrationTime_generatesValidKey() { + SharedInfo.Builder sharedInfoBuilder = + SharedInfo.builder() + .setVersion(VERSION_0_1) + .setApi(ATTRIBUTION_REPORTING_DEBUG_API) + .setDestination(DESTINATION) + .setReportingOrigin(REPORTING_ORIGIN) + .setScheduledReportTime(Instant.ofEpochSecond(1234486400)); + SharedInfo sharedInfo = sharedInfoBuilder.build(); + PrivacyBudgetKeyInput privacyBudgetKeyInput = + PrivacyBudgetKeyInput.builder().setSharedInfo(sharedInfo).build(); + + String privacyBudgetKey = + V1_PRIVACY_BUDGET_KEY_GENERATOR.generatePrivacyBudgetKey(privacyBudgetKeyInput); + + assertThat(privacyBudgetKey).isNotEmpty(); + } + + @Test + public void generatorV2_withoutSourceRegistrationTime_generatesValidKey() { + SharedInfo sharedInfo = + SharedInfo.builder() + .setVersion(VERSION_1_0) + .setApi(ATTRIBUTION_REPORTING_DEBUG_API) + .setDestination(DESTINATION) + .setReportingOrigin(REPORTING_ORIGIN) + .setScheduledReportTime(Instant.ofEpochSecond(1234486400)) + .build(); + PrivacyBudgetKeyInput privacyBudgetKeyInput = + PrivacyBudgetKeyInput.builder() + .setSharedInfo(sharedInfo) + .setFilteringId(UnsignedLong.valueOf(456)) + .build(); + + String privacyBudgetKey = + V2_PRIVACY_BUDGET_KEY_GENERATOR.generatePrivacyBudgetKey(privacyBudgetKeyInput); + + assertThat(privacyBudgetKey).isNotEmpty(); + } + + @Test + public void versionedPBKProvider_providesExactlyOneGeneratorPerVersion() { + assertExactlyOnePBKGeneratorForVersion(/* version= */ 0.1, UnsignedLong.ZERO); + assertExactlyOnePBKGeneratorForVersion(/* version= */ 0.98765, UnsignedLong.ZERO); + + assertExactlyOnePBKGeneratorForVersion(/* version= */ 1.0, UnsignedLong.ZERO); + assertExactlyOnePBKGeneratorForVersion(/* version= */ 0.98765, UnsignedLong.ONE); + assertExactlyOnePBKGeneratorForVersion(/* version= */ 1.0, UnsignedLong.valueOf(5)); + assertExactlyOnePBKGeneratorForVersion(/* version= */ 5.987, UnsignedLong.valueOf(5)); + assertExactlyOnePBKGeneratorForVersion(/* version= */ 19.678, UnsignedLong.valueOf(5)); + } + + private void assertExactlyOnePBKGeneratorForVersion(double version, UnsignedLong filteringId) { + SharedInfo sharedInfo = + SharedInfo.builder() + .setVersion(String.valueOf(version)) + .setApi(ATTRIBUTION_REPORTING_DEBUG_API) + .setDestination(DESTINATION) + .setReportingOrigin(REPORTING_ORIGIN) + .setScheduledReportTime(Instant.ofEpochSecond(1234486400)) + .build(); + PrivacyBudgetKeyInput privacyBudgetKeyInput = + PrivacyBudgetKeyInput.builder() + .setSharedInfo(sharedInfo) + .setFilteringId(filteringId) + .build(); + + assertTrue( + versionedPrivacyBudgetKeyGeneratorProvider.doesExactlyOneCorrespondingPBKGeneratorExist( + privacyBudgetKeyInput)); + } + + static final class TestEnv extends AbstractModule { + + @Override + protected void configure() { + install(new PrivacyBudgetKeyGeneratorModule()); + } + } +} diff --git a/javatests/com/google/aggregate/privacy/budgeting/budgetkeygenerator/AttributionReportingPrivacyBudgetKeyGeneratorTest.java b/javatests/com/google/aggregate/privacy/budgeting/budgetkeygenerator/AttributionReportingPrivacyBudgetKeyGeneratorTest.java index 99445244..87aa10cd 100644 --- a/javatests/com/google/aggregate/privacy/budgeting/budgetkeygenerator/AttributionReportingPrivacyBudgetKeyGeneratorTest.java +++ b/javatests/com/google/aggregate/privacy/budgeting/budgetkeygenerator/AttributionReportingPrivacyBudgetKeyGeneratorTest.java @@ -28,11 +28,10 @@ import com.google.aggregate.privacy.budgeting.budgetkeygenerator.attributionreporting.V2PrivacyBudgetKeyGenerator; import com.google.common.collect.ImmutableList; import com.google.common.hash.Hashing; +import com.google.common.primitives.UnsignedLong; import com.google.inject.AbstractModule; import java.nio.charset.StandardCharsets; import java.time.Instant; -import java.util.Random; -import java.util.stream.DoubleStream; import javax.inject.Inject; import org.junit.Rule; import org.junit.Test; @@ -93,7 +92,7 @@ public void generatePrivacyBudgetKey_forV1() { PrivacyBudgetKeyGenerator.PrivacyBudgetKeyInput.builder().setSharedInfo(si).build(); PrivacyBudgetKeyGenerator privacyBudgetKeyGenerator = - privacyBudgetKeyGeneratorFactory.getPrivacyBudgetKeyGenerator(si).get(); + privacyBudgetKeyGeneratorFactory.getPrivacyBudgetKeyGenerator(privacyBudgetKeyInput).get(); String privacyBudgetKey = privacyBudgetKeyGenerator.generatePrivacyBudgetKey(privacyBudgetKeyInput); @@ -103,7 +102,7 @@ public void generatePrivacyBudgetKey_forV1() { @Test public void generatePrivacyBudgetKey_forV2() { - int filteringId = 78; + UnsignedLong filteringId = UnsignedLong.valueOf(78); SharedInfo si = SharedInfo.builder() .setVersion("1.0") @@ -136,7 +135,7 @@ public void generatePrivacyBudgetKey_forV2() { .toString(); PrivacyBudgetKeyGenerator privacyBudgetKeyGenerator = - privacyBudgetKeyGeneratorFactory.getPrivacyBudgetKeyGenerator(si).get(); + privacyBudgetKeyGeneratorFactory.getPrivacyBudgetKeyGenerator(privacyBudgetKeyInput).get(); String privacyBudgetKey = privacyBudgetKeyGenerator.generatePrivacyBudgetKey(privacyBudgetKeyInput); @@ -262,7 +261,7 @@ public void validate_withoutSourceRegistrationTime_forV1() { @Test public void validate_withoutSourceRegistrationTime_forV2() { - int filteringId = 456; + UnsignedLong filteringId = UnsignedLong.valueOf(456); SharedInfo sharedInfo = SharedInfo.builder() .setVersion("1.0") @@ -285,38 +284,50 @@ public void validate_withoutSourceRegistrationTime_forV2() { @Test public void versionedPBKProvider_noOverlapsInVersions() { - Random random = new Random(); - - DoubleStream reportVersionsCorrespondingToPBKV1 = - random.doubles().map(version -> Math.floor(version * 1000) / 1000).limit(50); - reportVersionsCorrespondingToPBKV1.forEach( - version -> assertPBKGeneratorForVersion(version, V1PrivacyBudgetKeyGenerator.class)); - - DoubleStream reportVersionsCorrespondingToPBKV2 = - random - .doubles( - /* size= */ 50, - /* startingVersionInclusive= */ 1.0, - /* endingVersionExclusive= */ 100) - .map(version -> Math.floor(version * 1000) / 1000); - reportVersionsCorrespondingToPBKV2.forEach( - version -> assertPBKGeneratorForVersion(version, V2PrivacyBudgetKeyGenerator.class)); + assertPBKGeneratorForVersion( + /* version= */ "0.1", UnsignedLong.ZERO, V1PrivacyBudgetKeyGenerator.class); + assertPBKGeneratorForVersion( + /* version= */ "0.679999", UnsignedLong.ZERO, V1PrivacyBudgetKeyGenerator.class); + assertPBKGeneratorForVersion( + /* version= */ "0.999999", UnsignedLong.ZERO, V1PrivacyBudgetKeyGenerator.class); + + assertPBKGeneratorForVersion( + /* version= */ "0.1", UnsignedLong.ONE, V2PrivacyBudgetKeyGenerator.class); + assertPBKGeneratorForVersion( + /* version= */ "0.679999", UnsignedLong.ONE, V2PrivacyBudgetKeyGenerator.class); + assertPBKGeneratorForVersion( + /* version= */ "0.999999", UnsignedLong.ONE, V2PrivacyBudgetKeyGenerator.class); + assertPBKGeneratorForVersion( + /* version= */ "1.0", UnsignedLong.ZERO, V2PrivacyBudgetKeyGenerator.class); + assertPBKGeneratorForVersion( + /* version= */ "1.9999", UnsignedLong.ONE, V2PrivacyBudgetKeyGenerator.class); + assertPBKGeneratorForVersion( + /* version= */ "167.9999", UnsignedLong.ONE, V2PrivacyBudgetKeyGenerator.class); } - private void assertPBKGeneratorForVersion(double version, Class privacyBudgetKeyGeneratorClass) { + private void assertPBKGeneratorForVersion( + String version, UnsignedLong filteringId, Class privacyBudgetKeyGeneratorClass) { SharedInfo sharedInfo = SharedInfo.builder() - .setVersion(String.valueOf(version)) + .setVersion(version) .setApi(ATTRIBUTION_REPORTING_API) .setDestination(DESTINATION_CHROME_GOLDEN_REPORT) .setReportingOrigin(REPORTING_ORIGIN_CHROME_GOLDEN_REPORT) .setScheduledReportTime(Instant.ofEpochSecond(1234486400)) .build(); + PrivacyBudgetKeyGenerator.PrivacyBudgetKeyInput privacyBudgetKeyInput = + PrivacyBudgetKeyGenerator.PrivacyBudgetKeyInput.builder() + .setFilteringId(filteringId) + .setSharedInfo(sharedInfo) + .build(); assertTrue( versionedPrivacyBudgetKeyGeneratorProvider.doesExactlyOneCorrespondingPBKGeneratorExist( - sharedInfo.version())); - assertThat(privacyBudgetKeyGeneratorFactory.getPrivacyBudgetKeyGenerator(sharedInfo).get()) + privacyBudgetKeyInput)); + assertThat( + privacyBudgetKeyGeneratorFactory + .getPrivacyBudgetKeyGenerator(privacyBudgetKeyInput) + .get()) .isInstanceOf(privacyBudgetKeyGeneratorClass); } diff --git a/javatests/com/google/aggregate/privacy/budgeting/budgetkeygenerator/BUILD b/javatests/com/google/aggregate/privacy/budgeting/budgetkeygenerator/BUILD index f7f60c5e..8c6e9d69 100644 --- a/javatests/com/google/aggregate/privacy/budgeting/budgetkeygenerator/BUILD +++ b/javatests/com/google/aggregate/privacy/budgeting/budgetkeygenerator/BUILD @@ -32,6 +32,23 @@ java_test( ], ) +java_test( + name = "AttributionReportingDebugPrivacyBudgetKeyGeneratorTest", + srcs = ["AttributionReportingDebugPrivacyBudgetKeyGeneratorTest.java"], + deps = [ + "//java/com/google/aggregate/adtech/worker/model", + "//java/com/google/aggregate/privacy/budgeting/budgetkeygenerator:privacy_budget_key_generator", + "//java/com/google/aggregate/privacy/budgeting/budgetkeygenerator:privacy_budget_key_generator_module", + "//java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/attributionreportingdebug:pbk_generator", + "//java/external:acai", + "//java/external:google_truth", + "//java/external:google_truth8", + "//java/external:guava", + "//java/external:guice", + "//java/external:javax_inject", + ], +) + java_test( name = "ProtectedAudiencePrivacyBudgetKeyGeneratorTest", srcs = ["ProtectedAudiencePrivacyBudgetKeyGeneratorTest.java"], @@ -74,12 +91,25 @@ java_test( "//java/com/google/aggregate/privacy/budgeting/budgetkeygenerator:privacy_budget_key_generator", "//java/com/google/aggregate/privacy/budgeting/budgetkeygenerator:privacy_budget_key_generator_module", "//java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/attributionreporting:pbk_generator", + "//java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/attributionreportingdebug:pbk_generator", "//java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/protectedaudience:pbk_generator", "//java/com/google/aggregate/privacy/budgeting/budgetkeygenerator/sharedstorage:pbk_generator", "//java/external:acai", "//java/external:google_truth", "//java/external:google_truth8", + "//java/external:guava", "//java/external:guice", "//java/external:javax_inject", ], ) + +java_test( + name = "PrivacyBudgetKeyGeneratorUtilTest", + srcs = ["PrivacyBudgetKeyGeneratorUtilTest.java"], + deps = [ + "//java/com/google/aggregate/adtech/worker/model", + "//java/com/google/aggregate/privacy/budgeting/budgetkeygenerator:privacy_budget_key_generator_module", + "//java/external:google_truth", + "//java/external:guava", + ], +) diff --git a/javatests/com/google/aggregate/privacy/budgeting/budgetkeygenerator/PrivacyBudgetKeyGeneratorFactoryTest.java b/javatests/com/google/aggregate/privacy/budgeting/budgetkeygenerator/PrivacyBudgetKeyGeneratorFactoryTest.java index 732c5c68..76517834 100644 --- a/javatests/com/google/aggregate/privacy/budgeting/budgetkeygenerator/PrivacyBudgetKeyGeneratorFactoryTest.java +++ b/javatests/com/google/aggregate/privacy/budgeting/budgetkeygenerator/PrivacyBudgetKeyGeneratorFactoryTest.java @@ -16,15 +16,16 @@ package com.google.aggregate.privacy.budgeting.budgetkeygenerator; import static com.google.aggregate.adtech.worker.model.SharedInfo.ATTRIBUTION_REPORTING_API; +import static com.google.aggregate.adtech.worker.model.SharedInfo.ATTRIBUTION_REPORTING_DEBUG_API; import static com.google.aggregate.adtech.worker.model.SharedInfo.PROTECTED_AUDIENCE_API; import static com.google.aggregate.adtech.worker.model.SharedInfo.SHARED_STORAGE_API; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth8.assertThat; import static org.junit.Assert.assertThrows; -import static org.junit.Assert.assertTrue; import com.google.acai.Acai; import com.google.aggregate.adtech.worker.model.SharedInfo; +import com.google.common.primitives.UnsignedLong; import com.google.inject.AbstractModule; import com.google.inject.Inject; import java.time.Instant; @@ -50,67 +51,24 @@ public class PrivacyBudgetKeyGeneratorFactoryTest { @Inject private PrivacyBudgetKeyGeneratorFactory privacyBudgetKeyGeneratorFactory; - @Test - public void testGetAttributionReportingPbkGenerator_withStaticFactory() { - Optional privacyBudgetKeyGenerator = - PrivacyBudgetKeyGeneratorFactory.getPrivacyBudgetKeyGenerator( - Optional.of(ATTRIBUTION_REPORTING_API)); - - assertThat(privacyBudgetKeyGenerator).isPresent(); - assertTrue( - privacyBudgetKeyGenerator.get() - instanceof - com.google.aggregate.privacy.budgeting.budgetkeygenerator.attributionreporting - .V1PrivacyBudgetKeyGenerator); - } - - @Test - public void testGetProtectedAudiencePbkGenerator_withStaticFactory() { - Optional privacyBudgetKeyGenerator = - PrivacyBudgetKeyGeneratorFactory.getPrivacyBudgetKeyGenerator( - Optional.of(PROTECTED_AUDIENCE_API)); - - assertThat(privacyBudgetKeyGenerator).isPresent(); - assertTrue( - privacyBudgetKeyGenerator.get() - instanceof - com.google.aggregate.privacy.budgeting.budgetkeygenerator.protectedaudience - .V1PrivacyBudgetKeyGenerator); - } - - @Test - public void testGetSharedStoragePbkGenerator_withStaticFactory() { - Optional privacyBudgetKeyGenerator = - PrivacyBudgetKeyGeneratorFactory.getPrivacyBudgetKeyGenerator( - Optional.of(SHARED_STORAGE_API)); - - assertThat(privacyBudgetKeyGenerator).isPresent(); - assertTrue( - privacyBudgetKeyGenerator.get() - instanceof - com.google.aggregate.privacy.budgeting.budgetkeygenerator.sharedstorage - .V1PrivacyBudgetKeyGenerator); - } - - @Test - public void testGetInvalidApiPbkGenerator_withStaticFactory() { - assertThat( - PrivacyBudgetKeyGeneratorFactory.getPrivacyBudgetKeyGenerator( - Optional.of("invalid-api"))) - .isEqualTo(Optional.empty()); - } - @Test public void getSharedStoragePbkGenerator() { SharedInfo sharedInfoV1 = buildSharedInfo(/* api= */ SHARED_STORAGE_API, /* version= */ "0.9"); validatePrivacyGeneratorClass( sharedInfoV1, + /* filteringId */UnsignedLong.ZERO, com.google.aggregate.privacy.budgeting.budgetkeygenerator.sharedstorage .V1PrivacyBudgetKeyGenerator.class); + validatePrivacyGeneratorClass( + sharedInfoV1, + /* filteringId */UnsignedLong.ONE, + com.google.aggregate.privacy.budgeting.budgetkeygenerator.sharedstorage + .V2PrivacyBudgetKeyGenerator.class); SharedInfo sharedInfoV2 = buildSharedInfo(/* api= */ SHARED_STORAGE_API, /* version= */ "1.0"); validatePrivacyGeneratorClass( sharedInfoV2, + /* filteringId */ UnsignedLong.ZERO, com.google.aggregate.privacy.budgeting.budgetkeygenerator.sharedstorage .V2PrivacyBudgetKeyGenerator.class); } @@ -121,13 +79,20 @@ public void getProtectedAudienceGenerator() { buildSharedInfo(/* api= */ PROTECTED_AUDIENCE_API, /* version= */ "0.1"); validatePrivacyGeneratorClass( sharedInfoV1, + /* filteringId */ UnsignedLong.ZERO, /* expectedGeneratorClass= */ com.google.aggregate.privacy.budgeting.budgetkeygenerator .protectedaudience.V1PrivacyBudgetKeyGenerator.class); + validatePrivacyGeneratorClass( + sharedInfoV1, + /* filteringId = */ UnsignedLong.ONE, + /* expectedGeneratorClass= */ com.google.aggregate.privacy.budgeting.budgetkeygenerator + .protectedaudience.V2PrivacyBudgetKeyGenerator.class); SharedInfo sharedInfoV2 = buildSharedInfo(/* api= */ PROTECTED_AUDIENCE_API, /* version= */ "1.0"); validatePrivacyGeneratorClass( - sharedInfoV2, + sharedInfoV2, /* filteringId */ + UnsignedLong.ZERO, /* expectedGeneratorClass= */ com.google.aggregate.privacy.budgeting.budgetkeygenerator .protectedaudience.V2PrivacyBudgetKeyGenerator.class); } @@ -138,24 +103,59 @@ public void getAttributionReportingGenerator() { buildSharedInfo(/* api= */ ATTRIBUTION_REPORTING_API, /* version= */ "0.1"); validatePrivacyGeneratorClass( sharedInfoV1, + /* filteringId= */ UnsignedLong.ZERO, /* expectedGeneratorClass= */ com.google.aggregate.privacy.budgeting.budgetkeygenerator .attributionreporting.V1PrivacyBudgetKeyGenerator.class); + validatePrivacyGeneratorClass( + sharedInfoV1, + /* filteringId= */ UnsignedLong.ONE, + /* expectedGeneratorClass= */ com.google.aggregate.privacy.budgeting.budgetkeygenerator + .attributionreporting.V2PrivacyBudgetKeyGenerator.class); SharedInfo sharedInfoV2 = buildSharedInfo(/* api= */ ATTRIBUTION_REPORTING_API, /* version= */ "1.0"); validatePrivacyGeneratorClass( sharedInfoV2, + /* filteringId= */ UnsignedLong.ONE, /* expectedGeneratorClass= */ com.google.aggregate.privacy.budgeting.budgetkeygenerator .attributionreporting.V2PrivacyBudgetKeyGenerator.class); } + @Test + public void attributionReportingDebugGenerator_generatesValidGenerator() { + SharedInfo sharedInfoV1 = + buildSharedInfo(/* api= */ ATTRIBUTION_REPORTING_DEBUG_API, /* version= */ "0.1"); + validatePrivacyGeneratorClass( + sharedInfoV1, + /* filteringId= */ UnsignedLong.ZERO, + /* expectedGeneratorClass= */ com.google.aggregate.privacy.budgeting.budgetkeygenerator + .attributionreportingdebug.V1PrivacyBudgetKeyGenerator.class); + validatePrivacyGeneratorClass( + sharedInfoV1, + /* filteringId= */ UnsignedLong.ONE, + /* expectedGeneratorClass= */ com.google.aggregate.privacy.budgeting.budgetkeygenerator + .attributionreportingdebug.V2PrivacyBudgetKeyGenerator.class); + + SharedInfo sharedInfoV2 = + buildSharedInfo(/* api= */ ATTRIBUTION_REPORTING_DEBUG_API, /* version= */ "1.0"); + validatePrivacyGeneratorClass( + sharedInfoV2, + /* filteringId= */ UnsignedLong.ONE, + /* expectedGeneratorClass= */ com.google.aggregate.privacy.budgeting.budgetkeygenerator + .attributionreportingdebug.V2PrivacyBudgetKeyGenerator.class); + } + @Test public void getPBKGenerator_forInvalidAPI_throwsIllegalArgument() { SharedInfo sharedInfo = buildSharedInfo(/* api= */ "invalid-api", /* version= */ "0.1"); assertThrows( IllegalArgumentException.class, - () -> privacyBudgetKeyGeneratorFactory.getPrivacyBudgetKeyGenerator(sharedInfo)); + () -> + privacyBudgetKeyGeneratorFactory.getPrivacyBudgetKeyGenerator( + PrivacyBudgetKeyGenerator.PrivacyBudgetKeyInput.builder() + .setSharedInfo(sharedInfo) + .build())); } @Test @@ -163,28 +163,35 @@ public void getPBKGenerator_forInvalidVersion_throwsIllegalArgument() { SharedInfo sharedInfo = buildSharedInfo(/* api= */ SHARED_STORAGE_API, /* version= */ "-2.0"); assertThrows( - AssertionError.class, - () -> privacyBudgetKeyGeneratorFactory.getPrivacyBudgetKeyGenerator(sharedInfo)); + IllegalArgumentException.class, + () -> + privacyBudgetKeyGeneratorFactory.getPrivacyBudgetKeyGenerator( + PrivacyBudgetKeyGenerator.PrivacyBudgetKeyInput.builder() + .setSharedInfo(sharedInfo) + .build())); } private static SharedInfo buildSharedInfo(String api, String version) { - SharedInfo sharedInfo = - SharedInfo.builder() - .setScheduledReportTime(FIXED_TIME) - .setReportingOrigin(REPORTING_ORIGIN) - .setDestination(DESTINATION) - .setSourceRegistrationTime(FIXED_TIME) - .setReportId(RANDOM_UUID) - .setReportDebugMode(true) - .setApi(api) - .setVersion(version) - .build(); - return sharedInfo; + return SharedInfo.builder() + .setScheduledReportTime(FIXED_TIME) + .setReportingOrigin(REPORTING_ORIGIN) + .setDestination(DESTINATION) + .setSourceRegistrationTime(FIXED_TIME) + .setReportId(RANDOM_UUID) + .setApi(api) + .setVersion(version) + .build(); } - private void validatePrivacyGeneratorClass(SharedInfo sharedInfo, Class expectedGeneratorClass) { + private void validatePrivacyGeneratorClass( + SharedInfo sharedInfo, UnsignedLong filteringId, Class expectedGeneratorClass) { + PrivacyBudgetKeyGenerator.PrivacyBudgetKeyInput privacyBudgetKeyInput = + PrivacyBudgetKeyGenerator.PrivacyBudgetKeyInput.builder() + .setSharedInfo(sharedInfo) + .setFilteringId(filteringId) + .build(); Optional privacyBudgetKeyGenerator = - privacyBudgetKeyGeneratorFactory.getPrivacyBudgetKeyGenerator(sharedInfo); + privacyBudgetKeyGeneratorFactory.getPrivacyBudgetKeyGenerator(privacyBudgetKeyInput); assertThat(privacyBudgetKeyGenerator).isPresent(); assertThat(privacyBudgetKeyGenerator.get()).isInstanceOf(expectedGeneratorClass); diff --git a/javatests/com/google/aggregate/privacy/budgeting/budgetkeygenerator/PrivacyBudgetKeyGeneratorUtilTest.java b/javatests/com/google/aggregate/privacy/budgeting/budgetkeygenerator/PrivacyBudgetKeyGeneratorUtilTest.java new file mode 100644 index 00000000..d4ba773f --- /dev/null +++ b/javatests/com/google/aggregate/privacy/budgeting/budgetkeygenerator/PrivacyBudgetKeyGeneratorUtilTest.java @@ -0,0 +1,86 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.aggregate.privacy.budgeting.budgetkeygenerator; + +import static com.google.common.truth.Truth.assertThat; + +import com.google.aggregate.adtech.worker.model.SharedInfo; +import com.google.aggregate.privacy.budgeting.budgetkeygenerator.PrivacyBudgetKeyGenerator.PrivacyBudgetKeyInput; +import com.google.common.primitives.UnsignedLong; +import java.time.Instant; +import java.util.Optional; +import java.util.function.Predicate; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; + +@RunWith(JUnit4.class) +public class PrivacyBudgetKeyGeneratorUtilTest { + + @Test + public void getPrivacyBudgetKeyGeneratorV1Predicate() { + Predicate pbkPredicate = + PrivacyBudgetKeyGeneratorUtil.getPrivacyBudgetKeyGeneratorV1Predicate(); + + // The predicate should be true for 0.0 <= version < 1.0 and filteringId = 0 or empty + assertThat(pbkPredicate.test(getPrivacyBudgetKeyInput("0.1", Optional.of("54")))).isFalse(); + assertThat(pbkPredicate.test(getPrivacyBudgetKeyInput("1.0", Optional.of("0")))).isFalse(); + assertThat(pbkPredicate.test(getPrivacyBudgetKeyInput("1.0", Optional.of("65")))).isFalse(); + assertThat(pbkPredicate.test(getPrivacyBudgetKeyInput("78900.0", Optional.of("0")))).isFalse(); + assertThat(pbkPredicate.test(getPrivacyBudgetKeyInput("785.36", Optional.empty()))).isFalse(); + + assertThat(pbkPredicate.test(getPrivacyBudgetKeyInput("0.1", Optional.of("0")))).isTrue(); + assertThat(pbkPredicate.test(getPrivacyBudgetKeyInput("0.9999", Optional.of("0")))).isTrue(); + assertThat(pbkPredicate.test(getPrivacyBudgetKeyInput("0.9999", Optional.empty()))).isTrue(); + } + + @Test + public void getPrivacyBudgetKeyGeneratorV2Predicate() { + Predicate pbkPredicate = + PrivacyBudgetKeyGeneratorUtil.getPrivacyBudgetKeyGeneratorV2Predicate(); + + // The predicate should be true for version >= 1.0 or filteringId = 0 or empty + assertThat(pbkPredicate.test(getPrivacyBudgetKeyInput("0.1", Optional.of("54")))).isTrue(); + assertThat(pbkPredicate.test(getPrivacyBudgetKeyInput("1.0", Optional.of("0")))).isTrue(); + assertThat(pbkPredicate.test(getPrivacyBudgetKeyInput("78900.0", Optional.of("0")))).isTrue(); + assertThat(pbkPredicate.test(getPrivacyBudgetKeyInput("785.36", Optional.empty()))).isTrue(); + assertThat(pbkPredicate.test(getPrivacyBudgetKeyInput("78900.0", Optional.of("9632587410")))) + .isTrue(); + + assertThat(pbkPredicate.test(getPrivacyBudgetKeyInput("0.1", Optional.of("0")))).isFalse(); + assertThat(pbkPredicate.test(getPrivacyBudgetKeyInput("0.9999", Optional.of("0")))).isFalse(); + assertThat(pbkPredicate.test(getPrivacyBudgetKeyInput("0.888", Optional.empty()))).isFalse(); + } + + private static PrivacyBudgetKeyInput getPrivacyBudgetKeyInput( + String version, Optional filteringIdOptional) { + SharedInfo sharedInfo = + SharedInfo.builder() + .setVersion(version) + .setReportId("report_id") + .setScheduledReportTime(Instant.now()) + .setDestination("destination.com") + .setReportingOrigin("bar.com") + .setReportDebugMode(true) + .build(); + PrivacyBudgetKeyInput.Builder privacyBudgetKeyInput = + PrivacyBudgetKeyInput.builder().setSharedInfo(sharedInfo); + filteringIdOptional.ifPresent( + filteringId -> privacyBudgetKeyInput.setFilteringId(UnsignedLong.valueOf(filteringId))); + return privacyBudgetKeyInput.build(); + } +} diff --git a/javatests/com/google/aggregate/privacy/budgeting/budgetkeygenerator/ProtectedAudiencePrivacyBudgetKeyGeneratorTest.java b/javatests/com/google/aggregate/privacy/budgeting/budgetkeygenerator/ProtectedAudiencePrivacyBudgetKeyGeneratorTest.java index 9214717e..39ce6952 100644 --- a/javatests/com/google/aggregate/privacy/budgeting/budgetkeygenerator/ProtectedAudiencePrivacyBudgetKeyGeneratorTest.java +++ b/javatests/com/google/aggregate/privacy/budgeting/budgetkeygenerator/ProtectedAudiencePrivacyBudgetKeyGeneratorTest.java @@ -15,9 +15,7 @@ */ package com.google.aggregate.privacy.budgeting.budgetkeygenerator; -import static com.google.aggregate.adtech.worker.model.SharedInfo.LATEST_VERSION; -import static com.google.aggregate.adtech.worker.model.SharedInfo.PROTECTED_AUDIENCE_API; -import static com.google.aggregate.adtech.worker.model.SharedInfo.VERSION_0_1; +import static com.google.aggregate.adtech.worker.model.SharedInfo.*; import static com.google.aggregate.privacy.budgeting.budgetkeygenerator.protectedaudience.PrivacyBudgetKeyGeneratorModule.ProtectedAudiencePrivacyBudgetKeyGenerators; import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.assertEquals; @@ -29,11 +27,10 @@ import com.google.aggregate.privacy.budgeting.budgetkeygenerator.protectedaudience.V2PrivacyBudgetKeyGenerator; import com.google.common.collect.ImmutableList; import com.google.common.hash.Hashing; +import com.google.common.primitives.UnsignedLong; import com.google.inject.AbstractModule; import java.nio.charset.StandardCharsets; import java.time.Instant; -import java.util.Random; -import java.util.stream.DoubleStream; import javax.inject.Inject; import org.junit.Rule; import org.junit.Test; @@ -74,7 +71,7 @@ public void generatePrivacyBudgetKey_V1() { PrivacyBudgetKeyGenerator.PrivacyBudgetKeyInput.builder().setSharedInfo(si).build(); PrivacyBudgetKeyGenerator privacyBudgetKeyGenerator = - privacyBudgetKeyGeneratorFactory.getPrivacyBudgetKeyGenerator(si).get(); + privacyBudgetKeyGeneratorFactory.getPrivacyBudgetKeyGenerator(privacyBudgetKeyInput).get(); String privacyBudgetKey = privacyBudgetKeyGenerator.generatePrivacyBudgetKey(privacyBudgetKeyInput); @@ -84,7 +81,7 @@ public void generatePrivacyBudgetKey_V1() { @Test public void generatePrivacyBudgetKey_V2() { - int filteringId = 67890; + UnsignedLong filteringId = UnsignedLong.valueOf(67890); SharedInfo sharedInfo = SharedInfo.builder() .setApi(PROTECTED_AUDIENCE_API) @@ -113,7 +110,7 @@ public void generatePrivacyBudgetKey_V2() { .toString(); PrivacyBudgetKeyGenerator privacyBudgetKeyGenerator = - privacyBudgetKeyGeneratorFactory.getPrivacyBudgetKeyGenerator(sharedInfo).get(); + privacyBudgetKeyGeneratorFactory.getPrivacyBudgetKeyGenerator(privacyBudgetKeyInput).get(); String privacyBudgetKey = privacyBudgetKeyGenerator.generatePrivacyBudgetKey(privacyBudgetKeyInput); @@ -121,67 +118,51 @@ public void generatePrivacyBudgetKey_V2() { assertEquals(privacyBudgetKey, expectedPBK); } - /** - * Test to verify Privacy Budget Key generated for two protected audience SharedInfo with same - * fields is same. This ensures the budget key generator hash is stable. - */ - @Test - public void validate_PrivacyBudgetKey_ProtectedAudienceAPI_forSameSharedInfos() { - SharedInfo.Builder sharedInfoBuilder1 = - SharedInfo.builder() - .setVersion(LATEST_VERSION) - .setApi(PROTECTED_AUDIENCE_API) - .setReportingOrigin(REPORTING_ORIGIN) - .setScheduledReportTime(FIXED_TIME); - SharedInfo si1 = sharedInfoBuilder1.build(); - SharedInfo.Builder sharedInfoBuilder2 = - SharedInfo.builder() - .setVersion(LATEST_VERSION) - .setApi(PROTECTED_AUDIENCE_API) - .setReportingOrigin(REPORTING_ORIGIN) - .setScheduledReportTime(FIXED_TIME); - SharedInfo si2 = sharedInfoBuilder2.build(); - - PrivacyBudgetKeyGenerator v1PrivacyBudgetKeyGenerator = new V1PrivacyBudgetKeyGenerator(); - String privacyBudgetKey1 = v1PrivacyBudgetKeyGenerator.generatePrivacyBudgetKey(si1).get(); - String privacyBudgetKey2 = v1PrivacyBudgetKeyGenerator.generatePrivacyBudgetKey(si2).get(); - - assertEquals(privacyBudgetKey1, privacyBudgetKey2); - } - @Test public void versionedPBKProvider_noOverlapsInVersions() { - Random random = new Random(); - - DoubleStream reportVersionsCorrespondingToPBKV1 = - random.doubles().map(version -> Math.floor(version * 1000) / 1000).limit(50); - reportVersionsCorrespondingToPBKV1.forEach( - version -> assertPBKGeneratorForVersion(version, V1PrivacyBudgetKeyGenerator.class)); - - DoubleStream reportVersionsCorrespondingToPBKV2 = - random - .doubles( - /* size= */ 50, - /* startingVersionInclusive= */ 1.0, - /* endingVersionExclusive= */ 100) - .map(version -> Math.floor(version * 1000) / 1000); - reportVersionsCorrespondingToPBKV2.forEach( - version -> assertPBKGeneratorForVersion(version, V2PrivacyBudgetKeyGenerator.class)); + assertPBKGeneratorForVersion( + /* version= */ "0.1", UnsignedLong.ZERO, V1PrivacyBudgetKeyGenerator.class); + assertPBKGeneratorForVersion( + /* version= */ "0.679999", UnsignedLong.ZERO, V1PrivacyBudgetKeyGenerator.class); + assertPBKGeneratorForVersion( + /* version= */ "0.999999", UnsignedLong.ZERO, V1PrivacyBudgetKeyGenerator.class); + + assertPBKGeneratorForVersion( + /* version= */ "0.1", UnsignedLong.ONE, V2PrivacyBudgetKeyGenerator.class); + assertPBKGeneratorForVersion( + /* version= */ "0.679999", UnsignedLong.ONE, V2PrivacyBudgetKeyGenerator.class); + assertPBKGeneratorForVersion( + /* version= */ "0.999999", UnsignedLong.ONE, V2PrivacyBudgetKeyGenerator.class); + assertPBKGeneratorForVersion( + /* version= */ "1.0", UnsignedLong.ZERO, V2PrivacyBudgetKeyGenerator.class); + assertPBKGeneratorForVersion( + /* version= */ "1.9999", UnsignedLong.ONE, V2PrivacyBudgetKeyGenerator.class); + assertPBKGeneratorForVersion( + /* version= */ "167.9999", UnsignedLong.ONE, V2PrivacyBudgetKeyGenerator.class); } - private void assertPBKGeneratorForVersion(double version, Class privacyBudgetKeyGeneratorClass) { + private void assertPBKGeneratorForVersion( + String version, UnsignedLong filteringId, Class privacyBudgetKeyGeneratorClass) { SharedInfo sharedInfo = SharedInfo.builder() - .setVersion(String.valueOf(version)) + .setVersion(version) .setApi(PROTECTED_AUDIENCE_API) .setReportingOrigin(REPORTING_ORIGIN) .setScheduledReportTime(FIXED_TIME) .build(); + PrivacyBudgetKeyGenerator.PrivacyBudgetKeyInput privacyBudgetKeyInput = + PrivacyBudgetKeyGenerator.PrivacyBudgetKeyInput.builder() + .setFilteringId(filteringId) + .setSharedInfo(sharedInfo) + .build(); assertTrue( versionedPrivacyBudgetKeyGeneratorProvider.doesExactlyOneCorrespondingPBKGeneratorExist( - sharedInfo.version())); - assertThat(privacyBudgetKeyGeneratorFactory.getPrivacyBudgetKeyGenerator(sharedInfo).get()) + privacyBudgetKeyInput)); + assertThat( + privacyBudgetKeyGeneratorFactory + .getPrivacyBudgetKeyGenerator(privacyBudgetKeyInput) + .get()) .isInstanceOf(privacyBudgetKeyGeneratorClass); } diff --git a/javatests/com/google/aggregate/privacy/budgeting/budgetkeygenerator/SharedStoragePrivacyBudgetKeyGeneratorTest.java b/javatests/com/google/aggregate/privacy/budgeting/budgetkeygenerator/SharedStoragePrivacyBudgetKeyGeneratorTest.java index 8f02e7cb..54c6b2d1 100644 --- a/javatests/com/google/aggregate/privacy/budgeting/budgetkeygenerator/SharedStoragePrivacyBudgetKeyGeneratorTest.java +++ b/javatests/com/google/aggregate/privacy/budgeting/budgetkeygenerator/SharedStoragePrivacyBudgetKeyGeneratorTest.java @@ -27,11 +27,10 @@ import com.google.aggregate.privacy.budgeting.budgetkeygenerator.sharedstorage.V2PrivacyBudgetKeyGenerator; import com.google.common.collect.ImmutableList; import com.google.common.hash.Hashing; +import com.google.common.primitives.UnsignedLong; import com.google.inject.AbstractModule; import java.nio.charset.StandardCharsets; import java.time.Instant; -import java.util.Random; -import java.util.stream.DoubleStream; import javax.inject.Inject; import org.junit.Rule; import org.junit.Test; @@ -76,7 +75,7 @@ public void generatePrivacyBudgetKey_forV1() { PrivacyBudgetKeyGenerator.PrivacyBudgetKeyInput.builder().setSharedInfo(si).build(); PrivacyBudgetKeyGenerator privacyBudgetKeyGenerator = - privacyBudgetKeyGeneratorFactory.getPrivacyBudgetKeyGenerator(si).get(); + privacyBudgetKeyGeneratorFactory.getPrivacyBudgetKeyGenerator(privacyBudgetKeyInput).get(); String privacyBudgetKey = privacyBudgetKeyGenerator.generatePrivacyBudgetKey(privacyBudgetKeyInput); @@ -87,7 +86,7 @@ public void generatePrivacyBudgetKey_forV1() { @Test public void generatePrivacyBudgetKey_forV2() { SharedInfo sharedInfo = buildSharedInfo(SHARED_STORAGE_VERSION_1_0); - int filteringId = 78; + UnsignedLong filteringId = UnsignedLong.valueOf(78); String privacyBudgetKeyHashInput = String.join( "-", @@ -109,7 +108,7 @@ public void generatePrivacyBudgetKey_forV2() { .build(); PrivacyBudgetKeyGenerator privacyBudgetKeyGenerator = - privacyBudgetKeyGeneratorFactory.getPrivacyBudgetKeyGenerator(sharedInfo).get(); + privacyBudgetKeyGeneratorFactory.getPrivacyBudgetKeyGenerator(privacyBudgetKeyInput).get(); String privacyBudgetKey = privacyBudgetKeyGenerator.generatePrivacyBudgetKey(privacyBudgetKeyInput); @@ -117,75 +116,45 @@ public void generatePrivacyBudgetKey_forV2() { assertEquals(privacyBudgetKey, expectedPBK); } - /** - * Test to verify Privacy Budget Key generated for two Shared Storage SharedInfo with same fields - * is same. This ensures the budget key generator hash is stable. - */ - @Test - public void validate_PrivacyBudgetKey_ProtectedAudienceAPI_forSameSharedInfos() { - SharedInfo.Builder sharedInfoBuilder1 = - SharedInfo.builder() - .setVersion(SHARED_STORAGE_VERSION_0_1) - .setApi(SHARED_STORAGE_API) - .setReportingOrigin(REPORTING_ORIGIN) - .setScheduledReportTime(FIXED_TIME); - SharedInfo si1 = sharedInfoBuilder1.build(); - SharedInfo.Builder sharedInfoBuilder2 = - SharedInfo.builder() - .setVersion(SHARED_STORAGE_VERSION_0_1) - .setApi(SHARED_STORAGE_API) - .setReportingOrigin(REPORTING_ORIGIN) - .setScheduledReportTime(FIXED_TIME); - SharedInfo si2 = sharedInfoBuilder2.build(); - - String privacyBudgetKey1 = - sharedStoragePrivacyBudgetKeyGenerator.generatePrivacyBudgetKey(si1).get(); - String privacyBudgetKey2 = - sharedStoragePrivacyBudgetKeyGenerator.generatePrivacyBudgetKey(si2).get(); - - assertEquals(privacyBudgetKey1, privacyBudgetKey2); - } - - @Test - public void generatePrivacyBudgetKey_forV1_withStaticFactory() { - SharedInfo si = buildSharedInfo(SHARED_STORAGE_VERSION_0_1); - - String privacyBudgetKey = - PrivacyBudgetKeyGeneratorFactory.getPrivacyBudgetKeyGenerator(si.api()) - .get() - .generatePrivacyBudgetKey(si) - .get(); - - assertEquals(privacyBudgetKey, PRIVACY_BUDGET_KEY_1); - } - @Test public void versionedPBKProvider_noOverlapsInVersions() { - Random random = new Random(); - - DoubleStream reportVersionsCorrespondingToPBKV1 = - random.doubles().map(version -> Math.floor(version * 1000) / 1000).limit(50); - reportVersionsCorrespondingToPBKV1.forEach( - version -> assertPBKGeneratorForVersion(version, V1PrivacyBudgetKeyGenerator.class)); - - DoubleStream reportVersionsCorrespondingToPBKV2 = - random - .doubles( - /* size= */ 50, - /* startingVersionInclusive= */ 1.0, - /* endingVersionExclusive= */ 100) - .map(version -> Math.floor(version * 1000) / 1000); - reportVersionsCorrespondingToPBKV2.forEach( - version -> assertPBKGeneratorForVersion(version, V2PrivacyBudgetKeyGenerator.class)); + assertPBKGeneratorForVersion( + /* version= */ "0.1", UnsignedLong.ZERO, V1PrivacyBudgetKeyGenerator.class); + assertPBKGeneratorForVersion( + /* version= */ "0.679999", UnsignedLong.ZERO, V1PrivacyBudgetKeyGenerator.class); + assertPBKGeneratorForVersion( + /* version= */ "0.999999", UnsignedLong.ZERO, V1PrivacyBudgetKeyGenerator.class); + + assertPBKGeneratorForVersion( + /* version= */ "0.1", UnsignedLong.ONE, V2PrivacyBudgetKeyGenerator.class); + assertPBKGeneratorForVersion( + /* version= */ "0.679999", UnsignedLong.ONE, V2PrivacyBudgetKeyGenerator.class); + assertPBKGeneratorForVersion( + /* version= */ "0.999999", UnsignedLong.ONE, V2PrivacyBudgetKeyGenerator.class); + assertPBKGeneratorForVersion( + /* version= */ "1.0", UnsignedLong.ZERO, V2PrivacyBudgetKeyGenerator.class); + assertPBKGeneratorForVersion( + /* version= */ "1.9999", UnsignedLong.ONE, V2PrivacyBudgetKeyGenerator.class); + assertPBKGeneratorForVersion( + /* version= */ "167.9999", UnsignedLong.ONE, V2PrivacyBudgetKeyGenerator.class); } - private void assertPBKGeneratorForVersion(double version, Class privacyBudgetKeyGeneratorClass) { - SharedInfo sharedInfoForV2 = buildSharedInfo(String.valueOf(version)); + private void assertPBKGeneratorForVersion( + String version, UnsignedLong filteringId, Class privacyBudgetKeyGeneratorClass) { + SharedInfo sharedInfo = buildSharedInfo(version); + PrivacyBudgetKeyGenerator.PrivacyBudgetKeyInput privacyBudgetKeyInput = + PrivacyBudgetKeyGenerator.PrivacyBudgetKeyInput.builder() + .setFilteringId(filteringId) + .setSharedInfo(sharedInfo) + .build(); assertTrue( versionedPrivacyBudgetKeyGeneratorProvider.doesExactlyOneCorrespondingPBKGeneratorExist( - sharedInfoForV2.version())); - assertThat(privacyBudgetKeyGeneratorFactory.getPrivacyBudgetKeyGenerator(sharedInfoForV2).get()) + privacyBudgetKeyInput)); + assertThat( + privacyBudgetKeyGeneratorFactory + .getPrivacyBudgetKeyGenerator(privacyBudgetKeyInput) + .get()) .isInstanceOf(privacyBudgetKeyGeneratorClass); } diff --git a/javatests/com/google/aggregate/privacy/noise/BUILD b/javatests/com/google/aggregate/privacy/noise/BUILD index 00a9369e..ef0a8dd3 100644 --- a/javatests/com/google/aggregate/privacy/noise/BUILD +++ b/javatests/com/google/aggregate/privacy/noise/BUILD @@ -20,6 +20,7 @@ java_test( name = "NoisedAggregationRunnerImplTest", srcs = ["NoisedAggregationRunnerImplTest.java"], deps = [ + "//java/com/google/aggregate/adtech/worker:annotations", "//java/com/google/aggregate/adtech/worker/configs", "//java/com/google/aggregate/adtech/worker/model", "//java/com/google/aggregate/privacy/noise", diff --git a/javatests/com/google/aggregate/privacy/noise/NoisedAggregationRunnerImplTest.java b/javatests/com/google/aggregate/privacy/noise/NoisedAggregationRunnerImplTest.java index 11bc4a1a..1f3a957a 100644 --- a/javatests/com/google/aggregate/privacy/noise/NoisedAggregationRunnerImplTest.java +++ b/javatests/com/google/aggregate/privacy/noise/NoisedAggregationRunnerImplTest.java @@ -17,9 +17,12 @@ package com.google.aggregate.privacy.noise; import static com.google.common.truth.Truth.assertThat; +import static com.google.common.util.concurrent.MoreExecutors.newDirectExecutorService; import com.google.acai.Acai; import com.google.acai.TestScoped; +import com.google.aggregate.adtech.worker.Annotations.CustomForkJoinThreadPool; +import com.google.aggregate.adtech.worker.Annotations.ParallelAggregatedFactNoising; import com.google.aggregate.adtech.worker.configs.PrivacyParametersSupplier; import com.google.aggregate.adtech.worker.configs.PrivacyParametersSupplier.NoisingDelta; import com.google.aggregate.adtech.worker.configs.PrivacyParametersSupplier.NoisingDistribution; @@ -33,9 +36,12 @@ import com.google.aggregate.privacy.noise.testing.FakeNoiseApplierSupplier; import com.google.aggregate.privacy.noise.testing.FakeNoiseApplierSupplier.FakeNoiseApplier; import com.google.common.collect.ImmutableList; +import com.google.common.util.concurrent.ListeningExecutorService; import com.google.inject.AbstractModule; import com.google.inject.Inject; +import com.google.inject.Provider; import com.google.inject.Provides; +import com.google.inject.Singleton; import java.math.BigInteger; import java.util.Optional; import java.util.function.Supplier; @@ -49,12 +55,6 @@ public class NoisedAggregationRunnerImplTest { private static final ImmutableList VALUE_NOISE_LIST = ImmutableList.of(10L, -10L); - - private static final AggregatedFact FACT1 = - AggregatedFact.create(BigInteger.valueOf(1), /* metric= */ 5, 5L); - private static final AggregatedFact FACT2 = - AggregatedFact.create(BigInteger.valueOf(2), /* metric= */ 500, 500L); - private static final AggregatedFact NOISED_FACT1 = AggregatedFact.create(BigInteger.valueOf(1), /* metric= */ 15, 5L); private static final AggregatedFact NOISED_FACT2 = @@ -67,37 +67,57 @@ public class NoisedAggregationRunnerImplTest { @Inject private FakeThresholdSupplier thresholdSupplier; // Under test. - @Inject private NoisedAggregationRunner noisedAggregationRunner; + @Inject private Provider noisedAggregationRunner; + @Inject private NoisedAggregationRunnerFlagsHelper aggregationRunnerFlagsHelper; @Before public void setUp() { fakeNoiseApplierSupplier.setFakeNoiseApplier( FakeNoiseApplier.builder().setNextValueNoiseToAdd(VALUE_NOISE_LIST.iterator()).build()); + aggregationRunnerFlagsHelper.setParallelAggregatedFactNoisingEnabled(false); } @Test public void noise() { thresholdSupplier.setThreshold(0); - ImmutableList input = ImmutableList.of(FACT1, FACT2); - NoisedAggregationResult result = noiseAndThreshold( - input, /* doThreshold= */ true, /* debugPrivacyEpsilon= */ Optional.empty()); + getTestFacts(), /* doThreshold= */ true, /* debugPrivacyEpsilon= */ Optional.empty()); assertThat(result.privacyParameters()).isEqualTo(customDeltaPrivacyParamsSupplier.get()); assertThat(result.noisedAggregatedFacts()).containsExactly(NOISED_FACT1, NOISED_FACT2); } + @Test + public void noise_parallelNoisingEnabled() { + thresholdSupplier.setThreshold(0); + aggregationRunnerFlagsHelper.setParallelAggregatedFactNoisingEnabled(true); + fakeNoiseApplierSupplier.setFakeNoiseApplier( + FakeNoiseApplier.builder() + .setNextValueNoiseToAdd(ImmutableList.of(10L, 10L).iterator()) + .build()); + + AggregatedFact noisedFact1 = + AggregatedFact.create(BigInteger.valueOf(1), /* metric= */ 15, /* unnoisedMetric= */ 5L); + AggregatedFact noisedFact2 = + AggregatedFact.create(BigInteger.valueOf(2), /* metric= */ 510, /* unnoisedMetric= */ 500L); + + NoisedAggregationResult result = + noiseAndThreshold( + getTestFacts(), /* doThreshold= */ true, /* debugPrivacyEpsilon= */ Optional.empty()); + + assertThat(result.privacyParameters()).isEqualTo(customDeltaPrivacyParamsSupplier.get()); + assertThat(result.noisedAggregatedFacts()).containsExactly(noisedFact1, noisedFact2); + } + @Test public void noise_noThresholding() { thresholdSupplier.setThreshold(10000); - ImmutableList input = ImmutableList.of(FACT1, FACT2); - NoisedAggregationResult result = noiseAndThreshold( - input, /* doThreshold= */ false, /* debugPrivacyEpsilon= */ Optional.empty()); + getTestFacts(), /* doThreshold= */ false, /* debugPrivacyEpsilon= */ Optional.empty()); assertThat(result.privacyParameters()).isEqualTo(customDeltaPrivacyParamsSupplier.get()); assertThat(result.noisedAggregatedFacts()).containsExactly(NOISED_FACT1, NOISED_FACT2); @@ -106,11 +126,10 @@ public void noise_noThresholding() { @Test public void threshold() { thresholdSupplier.setThreshold(30); - ImmutableList input = ImmutableList.of(FACT1, FACT2); NoisedAggregationResult result = noiseAndThreshold( - input, /* doThreshold= */ true, /* debugPrivacyEpsilon= */ Optional.empty()); + getTestFacts(), /* doThreshold= */ true, /* debugPrivacyEpsilon= */ Optional.empty()); assertThat(result.privacyParameters()).isEqualTo(customDeltaPrivacyParamsSupplier.get()); assertThat(result.noisedAggregatedFacts()).containsExactly(NOISED_FACT2); @@ -119,11 +138,10 @@ public void threshold() { @Test public void countEqualToIntThreshold() { thresholdSupplier.setThreshold(10); - ImmutableList input = ImmutableList.of(FACT1, FACT2); NoisedAggregationResult result = noiseAndThreshold( - input, /* doThreshold= */ true, /* debugPrivacyEpsilon= */ Optional.empty()); + getTestFacts(), /* doThreshold= */ true, /* debugPrivacyEpsilon= */ Optional.empty()); assertThat(result.privacyParameters()).isEqualTo(customDeltaPrivacyParamsSupplier.get()); assertThat(result.noisedAggregatedFacts()).containsExactly(NOISED_FACT1, NOISED_FACT2); @@ -132,11 +150,10 @@ public void countEqualToIntThreshold() { @Test public void countEqualToDoubleThreshold() { thresholdSupplier.setThreshold(10.0001); - ImmutableList input = ImmutableList.of(FACT1, FACT2); NoisedAggregationResult result = noiseAndThreshold( - input, /* doThreshold= */ true, /* debugPrivacyEpsilon= */ Optional.empty()); + getTestFacts(), /* doThreshold= */ true, /* debugPrivacyEpsilon= */ Optional.empty()); assertThat(result.privacyParameters()).isEqualTo(customDeltaPrivacyParamsSupplier.get()); assertThat(result.noisedAggregatedFacts()).containsExactly(NOISED_FACT1, NOISED_FACT2); @@ -145,11 +162,10 @@ public void countEqualToDoubleThreshold() { @Test public void countLessThanDoubleThreshold() { thresholdSupplier.setThreshold(25.0002); - ImmutableList input = ImmutableList.of(FACT1, FACT2); NoisedAggregationResult result = noiseAndThreshold( - input, /* doThreshold= */ true, /* debugPrivacyEpsilon= */ Optional.empty()); + getTestFacts(), /* doThreshold= */ true, /* debugPrivacyEpsilon= */ Optional.empty()); assertThat(result.privacyParameters()).isEqualTo(customDeltaPrivacyParamsSupplier.get()); assertThat(result.noisedAggregatedFacts()).containsExactly(NOISED_FACT2); @@ -158,11 +174,10 @@ public void countLessThanDoubleThreshold() { @Test public void testDebugEpsilonRequestScoped() { thresholdSupplier.setThreshold(25.0002); - ImmutableList input = ImmutableList.of(FACT1, FACT2); NoisedAggregationResult result = noiseAndThreshold( - input, /* doThreshold= */ true, /* debugPrivacyEpsilon= */ Optional.of(0.2)); + getTestFacts(), /* doThreshold= */ true, /* debugPrivacyEpsilon= */ Optional.of(0.2)); // debugEpsilon is not used with constant noise, but we can check that the epsilon is updated in // the privacy params. @@ -179,14 +194,24 @@ private NoisedAggregationResult noiseAndThreshold( boolean doThreshold, Optional debugPrivacyEpsilon) { NoisedAggregationResult noisedAggregationResult = - noisedAggregationRunner.noise(input, debugPrivacyEpsilon); + noisedAggregationRunner.get().noise(input, debugPrivacyEpsilon); return doThreshold - ? noisedAggregationRunner.threshold( - noisedAggregationResult.noisedAggregatedFacts(), debugPrivacyEpsilon) + ? noisedAggregationRunner + .get() + .threshold(noisedAggregationResult.noisedAggregatedFacts(), debugPrivacyEpsilon) : noisedAggregationResult; } + private static ImmutableList getTestFacts() { + AggregatedFact fact1 = + AggregatedFact.create(BigInteger.valueOf(1), /* metric= */ 5, /* unnoisedMetric= */ 5L); + AggregatedFact fact2 = + AggregatedFact.create(BigInteger.valueOf(2), /* metric= */ 500, /* unnoisedMetric= */ 500L); + + return ImmutableList.of(fact1, fact2); + } + /** * Test implementation of {@code PrivacyParameters} supplier to allow delta to be modified for * testing threshold. @@ -230,7 +255,21 @@ public Double get() { } } + private static final class NoisedAggregationRunnerFlagsHelper { + private boolean parallelAggregatedFactNoisingEnabled = false; + + public void setParallelAggregatedFactNoisingEnabled(boolean parallelEnabled) { + this.parallelAggregatedFactNoisingEnabled = parallelEnabled; + } + + public boolean isParallelAggregatedFactNoisingEnabled() { + return parallelAggregatedFactNoisingEnabled; + } + } + private static final class TestEnv extends AbstractModule { + private final NoisedAggregationRunnerFlagsHelper flagsHelper = + new NoisedAggregationRunnerFlagsHelper(); @Override protected void configure() { @@ -246,6 +285,13 @@ protected void configure() { bind(double.class).annotatedWith(NoisingEpsilon.class).toInstance(0.1); bind(long.class).annotatedWith(NoisingL1Sensitivity.class).toInstance(4L); bind(double.class).annotatedWith(NoisingDelta.class).toInstance(5.00); + bind(NoisedAggregationRunnerFlagsHelper.class).toInstance(flagsHelper); + } + + @Provides + @ParallelAggregatedFactNoising + boolean provideParallelAggregatedFactNoising() { + return flagsHelper.isParallelAggregatedFactNoisingEnabled(); } @Provides @@ -265,5 +311,12 @@ Supplier provideNoiseApplierSupplier( Supplier provideThreshold(FakeThresholdSupplier thresholdSupplier) { return thresholdSupplier; } + + @Provides + @Singleton + @CustomForkJoinThreadPool + ListeningExecutorService provideCustomForkJoinThreadPool() { + return newDirectExecutorService(); + } } } diff --git a/javatests/com/google/aggregate/tools/privacybudgetutil/common/BUILD b/javatests/com/google/aggregate/tools/privacybudgetutil/common/BUILD new file mode 100644 index 00000000..99e929fe --- /dev/null +++ b/javatests/com/google/aggregate/tools/privacybudgetutil/common/BUILD @@ -0,0 +1,43 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +load("@rules_java//java:defs.bzl", "java_test") + +package(default_visibility = ["//visibility:public"]) + +java_test( + name = "ExtractionUtilsTest", + srcs = ["ExtractionUtilsTest.java"], + data = [":data_files"], + deps = [ + "//java/com/google/aggregate/privacy/budgeting/bridge:privacy_budgeting_service_bridge", + "//java/com/google/aggregate/privacy/budgeting/budgetkeygenerator:privacy_budget_key_generator", + "//java/com/google/aggregate/tools/privacybudgetutil/common:extraction_utils", + "//java/external:acai", + "//java/external:google_truth", + "//java/external:guava", + "//java/external:guice", + "//java/external:jackson_annotations", + "//java/external:jackson_core", + "//java/external:jackson_databind", + "//java/external:jackson_datatype_jsr310", + ], +) + +filegroup( + name = "data_files", + srcs = [ + "test_data/input_version_01.avro", + "test_data/input_version_1.avro", + ], +) diff --git a/javatests/com/google/aggregate/tools/privacybudgetutil/common/ExtractionUtilsTest.java b/javatests/com/google/aggregate/tools/privacybudgetutil/common/ExtractionUtilsTest.java new file mode 100644 index 00000000..76dc2a61 --- /dev/null +++ b/javatests/com/google/aggregate/tools/privacybudgetutil/common/ExtractionUtilsTest.java @@ -0,0 +1,153 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.aggregate.tools.privacybudgetutil.common; + +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.fail; + +import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility; +import com.fasterxml.jackson.annotation.PropertyAccessor; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; +import com.google.acai.Acai; +import com.google.aggregate.privacy.budgeting.bridge.PrivacyBudgetingServiceBridge.PrivacyBudgetUnit; +import com.google.aggregate.privacy.budgeting.budgetkeygenerator.PrivacyBudgetKeyGeneratorFactory; +import com.google.aggregate.privacy.budgeting.budgetkeygenerator.PrivacyBudgetKeyGeneratorModule; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import com.google.common.primitives.UnsignedLong; +import com.google.inject.AbstractModule; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.time.Instant; +import java.util.Set; +import javax.inject.Inject; +import org.junit.Rule; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; + +@RunWith(JUnit4.class) +public class ExtractionUtilsTest { + + @Rule public final Acai acai = new Acai(TestEnv.class); + + @Inject private PrivacyBudgetKeyGeneratorFactory privacyBudgetKeyGeneratorFactory; + + private static final String KEYFILE_NAME = "key"; + + @Test + public void testPrivacyBudgetKeyFromAvro() throws Exception { + ExtractionUtils.KeyFile goldenKeyFile = + ExtractionUtils.KeyFile.create(KEYFILE_NAME, getKeySetAsString(buildUnitSet())); + Path avro = + Paths.get( + "javatests/com/google/aggregate/tools/privacybudgetutil/common/test_data/input_version_01.avro"); + + try { + InputStream stream = Files.newInputStream(avro); + ExtractionUtils.KeyFile file = + ExtractionUtils.processAvro( + stream, + privacyBudgetKeyGeneratorFactory, + KEYFILE_NAME, + ImmutableList.of(UnsignedLong.ZERO)); + assertThat(file).isEqualTo(goldenKeyFile); + } catch (IOException e) { + fail(e.getMessage()); + } + } + + @Test + public void testPrivacyBudgetKeyFromAvroWithFilteringIds() throws Exception { + ExtractionUtils.KeyFile goldenKeyFile = + ExtractionUtils.KeyFile.create(KEYFILE_NAME, getKeySetAsString(buildFilteringIdsUnitSet())); + Path avro = + Paths.get( + "javatests/com/google/aggregate/tools/privacybudgetutil/common/test_data/input_version_1.avro"); + + InputStream stream = Files.newInputStream(avro); + ExtractionUtils.KeyFile file = + ExtractionUtils.processAvro( + stream, + privacyBudgetKeyGeneratorFactory, + KEYFILE_NAME, + ImmutableList.of( + UnsignedLong.valueOf(1), UnsignedLong.valueOf(2), UnsignedLong.valueOf(3))); + assertThat(file).isEqualTo(goldenKeyFile); + } + + private static Set buildUnitSet() { + return ImmutableSet.of( + PrivacyBudgetUnit.create( + "4c136a585949aefbd6180b817933393353a27bdd3936d246a10c694066d5a79e", + Instant.ofEpochSecond(1710288000), + "https://privacy-sandbox-demos-dsp.dev"), + PrivacyBudgetUnit.create( + "4c136a585949aefbd6180b817933393353a27bdd3936d246a10c694066d5a79e", + Instant.ofEpochSecond(1710284400), + "https://privacy-sandbox-demos-dsp.dev")); + } + + private static Set buildFilteringIdsUnitSet() { + return ImmutableSet.of( + PrivacyBudgetUnit.create( + "3bd6a0d3635c6d16f5c8c70bd63005f6f8e2ad31b72c95907afea9b35919cdc1", + Instant.ofEpochSecond(1710284400), + "https://privacy-sandbox-demos-dsp.dev"), + PrivacyBudgetUnit.create( + "b3eb3651049728d8edc02542fc685586a9ffea9c559852e0746c6cef6ddcc137", + Instant.ofEpochSecond(1710288000), + "https://privacy-sandbox-demos-dsp.dev"), + PrivacyBudgetUnit.create( + "3bd6a0d3635c6d16f5c8c70bd63005f6f8e2ad31b72c95907afea9b35919cdc1", + Instant.ofEpochSecond(1710288000), + "https://privacy-sandbox-demos-dsp.dev"), + PrivacyBudgetUnit.create( + "db4bb8c6d309ce494bb8f9fc0f17c2b2dc7b34ed0338e7b92319da82456bf03e", + Instant.ofEpochSecond(1710284400), + "https://privacy-sandbox-demos-dsp.dev"), + PrivacyBudgetUnit.create( + "b3eb3651049728d8edc02542fc685586a9ffea9c559852e0746c6cef6ddcc137", + Instant.ofEpochSecond(1710284400), + "https://privacy-sandbox-demos-dsp.dev"), + PrivacyBudgetUnit.create( + "db4bb8c6d309ce494bb8f9fc0f17c2b2dc7b34ed0338e7b92319da82456bf03e", + Instant.ofEpochSecond(1710288000), + "https://privacy-sandbox-demos-dsp.dev")); + } + + private static String getKeySetAsString(Set unitSet) + throws JsonProcessingException { + ObjectMapper om = new ObjectMapper(); + om.registerModule(new JavaTimeModule()); + om.setVisibility(PropertyAccessor.FIELD, Visibility.ANY); + return om.writeValueAsString(unitSet); + } + + static final class TestEnv extends AbstractModule { + + @Override + protected void configure() { + install(new PrivacyBudgetKeyGeneratorModule()); + } + } +} diff --git a/javatests/com/google/aggregate/tools/privacybudgetutil/common/test_data/input_version_01.avro b/javatests/com/google/aggregate/tools/privacybudgetutil/common/test_data/input_version_01.avro new file mode 100644 index 00000000..2fdd8dae Binary files /dev/null and b/javatests/com/google/aggregate/tools/privacybudgetutil/common/test_data/input_version_01.avro differ diff --git a/javatests/com/google/aggregate/tools/privacybudgetutil/common/test_data/input_version_1.avro b/javatests/com/google/aggregate/tools/privacybudgetutil/common/test_data/input_version_1.avro new file mode 100644 index 00000000..0454a4e1 Binary files /dev/null and b/javatests/com/google/aggregate/tools/privacybudgetutil/common/test_data/input_version_1.avro differ diff --git a/javatests/testData/aggregatable_report_goldens/latest/debug_api_report_1.json b/javatests/testData/aggregatable_report_goldens/latest/debug_api_report_1.json new file mode 100644 index 00000000..b5509828 --- /dev/null +++ b/javatests/testData/aggregatable_report_goldens/latest/debug_api_report_1.json @@ -0,0 +1,13 @@ +{ + "aggregation_coordinator_origin": "https://publickeyservice.msmt.aws.privacysandboxservices.com", + "aggregation_service_payloads": [ + { + "debug_cleartext_payload": "omRkYXRhgqJldmFsdWVEAAAAAmZidWNrZXRQAAAAAAAAAAAAAAAAAAAAAaJldmFsdWVEAAAABGZidWNrZXRQAAAAAAAAAAAAAAAAAAAAA2lvcGVyYXRpb25paGlzdG9ncmFt", + "key_id": "example_id", + "payload": "X5tO0bPyX0D/E/NrwN2EQCJCg1qFLUlSoNm7fZZrlUTiZp9Rou1xSAjL5AmpKhmEjqakj8aiIDXCfEyeHcfxd7LoXfA04Bi1QrZTTT0e86qlfbE//+1cG5o+2vWaEoJMMjvUMfeeqtG0p7SkyTLbz8nmEC0dR7JaPnP6gM3EfgzD354n1YdKs504ySRClkKwnZO/" + } + ], + "shared_info": "{\"api\":\"attribution-reporting-debug\",\"attribution_destination\":\"https://conversion.test\",\"debug_mode\":\"enabled\",\"report_id\":\"21abd97f-73e8-4b88-9389-a9fee6abda5e\",\"reporting_origin\":\"https://report.test\",\"scheduled_report_time\":\"1234486400\",\"version\":\"0.1\"}", + "source_debug_key": "123", + "trigger_debug_key": "456" +} \ No newline at end of file diff --git a/javatests/testData/aggregatable_report_goldens/latest/debug_api_report_1_cleartext_payloads.json b/javatests/testData/aggregatable_report_goldens/latest/debug_api_report_1_cleartext_payloads.json new file mode 100644 index 00000000..8dbb5d2d --- /dev/null +++ b/javatests/testData/aggregatable_report_goldens/latest/debug_api_report_1_cleartext_payloads.json @@ -0,0 +1,3 @@ +[ + "omRkYXRhgqJldmFsdWVEAAAAAmZidWNrZXRQAAAAAAAAAAAAAAAAAAAAAaJldmFsdWVEAAAABGZidWNrZXRQAAAAAAAAAAAAAAAAAAAAA2lvcGVyYXRpb25paGlzdG9ncmFt" +] diff --git a/javatests/testData/aggregatable_report_goldens/latest/debug_api_report_2.json b/javatests/testData/aggregatable_report_goldens/latest/debug_api_report_2.json new file mode 100644 index 00000000..176bc05b --- /dev/null +++ b/javatests/testData/aggregatable_report_goldens/latest/debug_api_report_2.json @@ -0,0 +1,13 @@ +{ + "aggregation_coordinator_origin": "https://publickeyservice.msmt.gcp.privacysandboxservices.com", + "aggregation_service_payloads": [ + { + "debug_cleartext_payload": "omRkYXRhgqJldmFsdWVEAAAAAmZidWNrZXRQAAAAAAAAAAAAAAAAAAAAAaJldmFsdWVEAAAAAGZidWNrZXRQAAAAAAAAAAAAAAAAAAAAAGlvcGVyYXRpb25paGlzdG9ncmFt", + "key_id": "example_id", + "payload": "g9IeDXvoc655PVAmt10dcqhTJyR/EU+vNIep+0ZL3CytfmTlE2a0XB7pAs+GmX8ejIl4f5PkIpLV4Bds4GS0kzLP/wPV5VFftLwJ1RKUsN0eEjE3UMS+D8NewSGaCy+AU5UP1B3rHsgg9gy4Xfw0zvKLtMAl8FGnkLKxNrvjRVOFNZWnm77CYvzahkedtqEJV1wN" + } + ], + "shared_info": "{\"api\":\"attribution-reporting-debug\",\"attribution_destination\":\"https://conversion.test\",\"debug_mode\":\"enabled\",\"report_id\":\"21abd97f-73e8-4b88-9389-a9fee6abda5e\",\"reporting_origin\":\"https://report.test\",\"scheduled_report_time\":\"1234486400\",\"version\":\"0.1\"}", + "source_debug_key": "123", + "trigger_debug_key": "456" +} diff --git a/javatests/testData/aggregatable_report_goldens/latest/debug_api_report_2_cleartext_payloads.json b/javatests/testData/aggregatable_report_goldens/latest/debug_api_report_2_cleartext_payloads.json new file mode 100644 index 00000000..0a0d3a32 --- /dev/null +++ b/javatests/testData/aggregatable_report_goldens/latest/debug_api_report_2_cleartext_payloads.json @@ -0,0 +1,3 @@ +[ + "omRkYXRhgqJldmFsdWVEAAAAAmZidWNrZXRQAAAAAAAAAAAAAAAAAAAAAaJldmFsdWVEAAAAAGZidWNrZXRQAAAAAAAAAAAAAAAAAAAAAGlvcGVyYXRpb25paGlzdG9ncmFt" +] diff --git a/maven_install.json b/maven_install.json index 08094264..95259341 100644 --- a/maven_install.json +++ b/maven_install.json @@ -1,8 +1,8 @@ { "dependency_tree": { - "__AUTOGENERATED_FILE_DO_NOT_MODIFY_THIS_FILE_MANUALLY": -1421689655, + "__AUTOGENERATED_FILE_DO_NOT_MODIFY_THIS_FILE_MANUALLY": -693403977, "conflict_resolution": { - "com.google.api:gax:2.38.0": "com.google.api:gax:2.41.0", + "com.google.api:gax:2.38.0": "com.google.api:gax:2.42.0", "com.google.auto.value:auto-value-annotations:1.7.4": "com.google.auto.value:auto-value-annotations:1.10.4", "com.google.errorprone:error_prone_annotations:2.+": "com.google.errorprone:error_prone_annotations:2.24.1" }, @@ -23,10 +23,10 @@ { "coord": "com.amazonaws:aws-java-sdk-cloudwatch:1.12.641", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "commons-logging:commons-logging:1.3.0", "org.apache.httpcomponents:httpclient:4.5.14", "com.fasterxml.jackson.core:jackson-databind:2.16.1", + "commons-codec:commons-codec:1.17.0", "joda-time:joda-time:2.10.8", "com.amazonaws:jmespath-java:1.12.641", "com.fasterxml.jackson.core:jackson-databind:jar:2.16.1", @@ -49,17 +49,17 @@ { "coord": "com.amazonaws:aws-java-sdk-core:1.12.641", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "commons-logging:commons-logging:1.3.0", "org.apache.httpcomponents:httpclient:4.5.14", + "commons-codec:commons-codec:1.17.0", "joda-time:joda-time:2.10.8", "com.fasterxml.jackson.core:jackson-databind:jar:2.16.1", "com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:jar:2.16.1" ], "directDependencies": [ - "commons-codec:commons-codec:1.16.0", "commons-logging:commons-logging:1.3.0", "org.apache.httpcomponents:httpclient:4.5.14", + "commons-codec:commons-codec:1.17.0", "joda-time:joda-time:2.10.8", "com.fasterxml.jackson.core:jackson-databind:jar:2.16.1", "com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:jar:2.16.1" @@ -76,10 +76,10 @@ { "coord": "com.amazonaws:aws-java-sdk-kms:1.12.641", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "commons-logging:commons-logging:1.3.0", "org.apache.httpcomponents:httpclient:4.5.14", "com.fasterxml.jackson.core:jackson-databind:2.16.1", + "commons-codec:commons-codec:1.17.0", "joda-time:joda-time:2.10.8", "com.amazonaws:jmespath-java:1.12.641", "com.fasterxml.jackson.core:jackson-databind:jar:2.16.1", @@ -102,10 +102,10 @@ { "coord": "com.amazonaws:aws-java-sdk-s3:1.12.641", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "commons-logging:commons-logging:1.3.0", "org.apache.httpcomponents:httpclient:4.5.14", "com.fasterxml.jackson.core:jackson-databind:2.16.1", + "commons-codec:commons-codec:1.17.0", "joda-time:joda-time:2.10.8", "com.amazonaws:jmespath-java:1.12.641", "com.fasterxml.jackson.core:jackson-databind:jar:2.16.1", @@ -130,10 +130,10 @@ { "coord": "com.amazonaws:aws-java-sdk-sqs:1.12.641", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "commons-logging:commons-logging:1.3.0", "org.apache.httpcomponents:httpclient:4.5.14", "com.fasterxml.jackson.core:jackson-databind:2.16.1", + "commons-codec:commons-codec:1.17.0", "joda-time:joda-time:2.10.8", "com.amazonaws:jmespath-java:1.12.641", "com.fasterxml.jackson.core:jackson-databind:jar:2.16.1", @@ -156,10 +156,10 @@ { "coord": "com.amazonaws:aws-java-sdk-xray:1.12.641", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "commons-logging:commons-logging:1.3.0", "org.apache.httpcomponents:httpclient:4.5.14", "com.fasterxml.jackson.core:jackson-databind:2.16.1", + "commons-codec:commons-codec:1.17.0", "joda-time:joda-time:2.10.8", "com.amazonaws:jmespath-java:1.12.641", "com.fasterxml.jackson.core:jackson-databind:jar:2.16.1", @@ -769,8 +769,8 @@ "com.google.code.findbugs:jsr305:3.0.2", "org.checkerframework:checker-qual:3.42.0", "com.google.errorprone:error_prone_annotations:2.24.1", - "com.google.api:api-common:2.24.0", - "io.grpc:grpc-protobuf-lite:1.61.0" + "io.grpc:grpc-protobuf-lite:1.61.0", + "com.google.api:api-common:2.25.0" ], "directDependencies": [ "com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava", @@ -779,8 +779,8 @@ "com.google.code.findbugs:jsr305:3.0.2", "org.checkerframework:checker-qual:3.42.0", "com.google.errorprone:error_prone_annotations:2.24.1", - "com.google.api:api-common:2.24.0", - "io.grpc:grpc-protobuf-lite:1.61.0" + "io.grpc:grpc-protobuf-lite:1.61.0", + "com.google.api:api-common:2.25.0" ], "exclusions": [ "com.google.guava:guava", @@ -812,8 +812,8 @@ "com.google.code.findbugs:jsr305:3.0.2", "org.checkerframework:checker-qual:3.42.0", "com.google.errorprone:error_prone_annotations:2.24.1", - "com.google.api:api-common:2.24.0", - "io.grpc:grpc-protobuf-lite:1.61.0" + "io.grpc:grpc-protobuf-lite:1.61.0", + "com.google.api:api-common:2.25.0" ], "directDependencies": [ "com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava", @@ -822,8 +822,8 @@ "com.google.code.findbugs:jsr305:3.0.2", "org.checkerframework:checker-qual:3.42.0", "com.google.errorprone:error_prone_annotations:2.24.1", - "com.google.api:api-common:2.24.0", - "io.grpc:grpc-protobuf-lite:1.61.0" + "io.grpc:grpc-protobuf-lite:1.61.0", + "com.google.api:api-common:2.25.0" ], "exclusions": [ "com.google.guava:guava", @@ -853,22 +853,22 @@ "com.google.j2objc:j2objc-annotations:2.8", "com.google.guava:failureaccess:1.0.2", "com.google.code.findbugs:jsr305:3.0.2", - "com.google.api.grpc:proto-google-common-protos:2.32.0", "org.checkerframework:checker-qual:3.42.0", "com.google.errorprone:error_prone_annotations:2.24.1", - "com.google.api:api-common:2.24.0", - "io.grpc:grpc-protobuf-lite:1.61.0" + "com.google.api.grpc:proto-google-common-protos:2.34.0", + "io.grpc:grpc-protobuf-lite:1.61.0", + "com.google.api:api-common:2.25.0" ], "directDependencies": [ "com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava", "com.google.j2objc:j2objc-annotations:2.8", "com.google.guava:failureaccess:1.0.2", "com.google.code.findbugs:jsr305:3.0.2", - "com.google.api.grpc:proto-google-common-protos:2.32.0", "org.checkerframework:checker-qual:3.42.0", "com.google.errorprone:error_prone_annotations:2.24.1", - "com.google.api:api-common:2.24.0", - "io.grpc:grpc-protobuf-lite:1.61.0" + "com.google.api.grpc:proto-google-common-protos:2.34.0", + "io.grpc:grpc-protobuf-lite:1.61.0", + "com.google.api:api-common:2.25.0" ], "exclusions": [ "com.google.guava:guava", @@ -974,11 +974,11 @@ { "coord": "com.google.api.grpc:proto-google-cloud-compute-v1:1.44.0", "dependencies": [ - "com.google.api.grpc:proto-google-common-protos:2.32.0", - "com.google.protobuf:protobuf-java:3.25.2" + "com.google.protobuf:protobuf-java:3.25.2", + "com.google.api.grpc:proto-google-common-protos:2.34.0" ], "directDependencies": [ - "com.google.api.grpc:proto-google-common-protos:2.32.0", + "com.google.api.grpc:proto-google-common-protos:2.34.0", "com.google.protobuf:protobuf-java:3.25.2" ], "file": "v1/https/repo1.maven.org/maven2/com/google/api/grpc/proto-google-cloud-compute-v1/1.44.0/proto-google-cloud-compute-v1-1.44.0.jar", @@ -1034,28 +1034,28 @@ "com.google.j2objc:j2objc-annotations:2.8", "com.google.guava:failureaccess:1.0.2", "com.google.code.findbugs:jsr305:3.0.2", - "com.google.api.grpc:proto-google-common-protos:2.32.0", "org.checkerframework:checker-qual:3.42.0", "com.google.errorprone:error_prone_annotations:2.24.1", + "com.google.api.grpc:proto-google-common-protos:2.34.0", "com.google.guava:guava:33.0.0-jre", "javax.annotation:javax.annotation-api:1.3.2", - "com.google.api:api-common:2.24.0", "com.google.auto.value:auto-value-annotations:1.10.4", - "com.google.protobuf:protobuf-java:3.25.2" + "com.google.protobuf:protobuf-java:3.25.2", + "com.google.api:api-common:2.25.0" ], "directDependencies": [ "com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava", "com.google.j2objc:j2objc-annotations:2.8", "com.google.guava:failureaccess:1.0.2", "com.google.code.findbugs:jsr305:3.0.2", - "com.google.api.grpc:proto-google-common-protos:2.32.0", "org.checkerframework:checker-qual:3.42.0", "com.google.errorprone:error_prone_annotations:2.24.1", + "com.google.api.grpc:proto-google-common-protos:2.34.0", "com.google.guava:guava:33.0.0-jre", "javax.annotation:javax.annotation-api:1.3.2", - "com.google.api:api-common:2.24.0", "com.google.auto.value:auto-value-annotations:1.10.4", - "com.google.protobuf:protobuf-java:3.25.2" + "com.google.protobuf:protobuf-java:3.25.2", + "com.google.api:api-common:2.25.0" ], "file": "v1/https/repo1.maven.org/maven2/com/google/api/grpc/proto-google-cloud-monitoring-v3/3.35.0/proto-google-cloud-monitoring-v3-3.35.0.jar", "mirror_urls": [ @@ -1354,21 +1354,77 @@ "url": "https://repo1.maven.org/maven2/com/google/api/grpc/proto-google-cloud-storage-v2/2.32.1-alpha/proto-google-cloud-storage-v2-2.32.1-alpha.jar" }, { - "coord": "com.google.api.grpc:proto-google-common-protos:2.32.0", + "coord": "com.google.api.grpc:proto-google-cloud-trace-v1:2.35.0", + "dependencies": [ + "com.google.protobuf:protobuf-java:3.25.2", + "com.google.api.grpc:proto-google-common-protos:2.34.0" + ], + "directDependencies": [ + "com.google.api.grpc:proto-google-common-protos:2.34.0", + "com.google.protobuf:protobuf-java:3.25.2" + ], + "file": "v1/https/repo1.maven.org/maven2/com/google/api/grpc/proto-google-cloud-trace-v1/2.35.0/proto-google-cloud-trace-v1-2.35.0.jar", + "mirror_urls": [ + "https://repo1.maven.org/maven2/com/google/api/grpc/proto-google-cloud-trace-v1/2.35.0/proto-google-cloud-trace-v1-2.35.0.jar", + "https://maven.google.com/com/google/api/grpc/proto-google-cloud-trace-v1/2.35.0/proto-google-cloud-trace-v1-2.35.0.jar", + "https://jcenter.bintray.com/com/google/api/grpc/proto-google-cloud-trace-v1/2.35.0/proto-google-cloud-trace-v1-2.35.0.jar" + ], + "sha256": "249b834c059d2f229028ab0ce6892aec9b9e4cef0ddd3cc3c1a0ab9fdff6703d", + "url": "https://repo1.maven.org/maven2/com/google/api/grpc/proto-google-cloud-trace-v1/2.35.0/proto-google-cloud-trace-v1-2.35.0.jar" + }, + { + "coord": "com.google.api.grpc:proto-google-cloud-trace-v2:2.35.0", + "dependencies": [ + "com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava", + "com.google.j2objc:j2objc-annotations:2.8", + "com.google.guava:failureaccess:1.0.2", + "com.google.code.findbugs:jsr305:3.0.2", + "org.checkerframework:checker-qual:3.42.0", + "com.google.errorprone:error_prone_annotations:2.24.1", + "javax.annotation:javax.annotation-api:1.3.2", + "com.google.auto.value:auto-value-annotations:1.10.4" + ], + "directDependencies": [ + "com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava", + "com.google.j2objc:j2objc-annotations:2.8", + "com.google.guava:failureaccess:1.0.2", + "com.google.code.findbugs:jsr305:3.0.2", + "org.checkerframework:checker-qual:3.42.0", + "com.google.errorprone:error_prone_annotations:2.24.1", + "javax.annotation:javax.annotation-api:1.3.2", + "com.google.auto.value:auto-value-annotations:1.10.4" + ], + "exclusions": [ + "com.google.protobuf:protobuf-java", + "com.google.api:api-common", + "com.google.api.grpc:proto-google-common-protos", + "com.google.guava:guava" + ], + "file": "v1/https/repo1.maven.org/maven2/com/google/api/grpc/proto-google-cloud-trace-v2/2.35.0/proto-google-cloud-trace-v2-2.35.0.jar", + "mirror_urls": [ + "https://repo1.maven.org/maven2/com/google/api/grpc/proto-google-cloud-trace-v2/2.35.0/proto-google-cloud-trace-v2-2.35.0.jar", + "https://maven.google.com/com/google/api/grpc/proto-google-cloud-trace-v2/2.35.0/proto-google-cloud-trace-v2-2.35.0.jar", + "https://jcenter.bintray.com/com/google/api/grpc/proto-google-cloud-trace-v2/2.35.0/proto-google-cloud-trace-v2-2.35.0.jar" + ], + "sha256": "edec01d73dc4bfad6172a3e5fc4430c2fe769e7aaa51724e3f35543a49f5be0d", + "url": "https://repo1.maven.org/maven2/com/google/api/grpc/proto-google-cloud-trace-v2/2.35.0/proto-google-cloud-trace-v2-2.35.0.jar" + }, + { + "coord": "com.google.api.grpc:proto-google-common-protos:2.34.0", "dependencies": [ "com.google.protobuf:protobuf-java:3.25.2" ], "directDependencies": [ "com.google.protobuf:protobuf-java:3.25.2" ], - "file": "v1/https/repo1.maven.org/maven2/com/google/api/grpc/proto-google-common-protos/2.32.0/proto-google-common-protos-2.32.0.jar", + "file": "v1/https/repo1.maven.org/maven2/com/google/api/grpc/proto-google-common-protos/2.34.0/proto-google-common-protos-2.34.0.jar", "mirror_urls": [ - "https://repo1.maven.org/maven2/com/google/api/grpc/proto-google-common-protos/2.32.0/proto-google-common-protos-2.32.0.jar", - "https://maven.google.com/com/google/api/grpc/proto-google-common-protos/2.32.0/proto-google-common-protos-2.32.0.jar", - "https://jcenter.bintray.com/com/google/api/grpc/proto-google-common-protos/2.32.0/proto-google-common-protos-2.32.0.jar" + "https://repo1.maven.org/maven2/com/google/api/grpc/proto-google-common-protos/2.34.0/proto-google-common-protos-2.34.0.jar", + "https://maven.google.com/com/google/api/grpc/proto-google-common-protos/2.34.0/proto-google-common-protos-2.34.0.jar", + "https://jcenter.bintray.com/com/google/api/grpc/proto-google-common-protos/2.34.0/proto-google-common-protos-2.34.0.jar" ], - "sha256": "863cd9c4c9075de234ac8c1d05ad477110bfe00ff1bd43ccc9c51aa6eb053226", - "url": "https://repo1.maven.org/maven2/com/google/api/grpc/proto-google-common-protos/2.32.0/proto-google-common-protos-2.32.0.jar" + "sha256": "8d2d8ccae1c0fc89dc628ce07e21611c1200a8cd673e26c8b0f44599511b6e7e", + "url": "https://repo1.maven.org/maven2/com/google/api/grpc/proto-google-common-protos/2.34.0/proto-google-common-protos-2.34.0.jar" }, { "coord": "com.google.api.grpc:proto-google-iam-v1:1.27.0", @@ -1388,7 +1444,7 @@ "url": "https://repo1.maven.org/maven2/com/google/api/grpc/proto-google-iam-v1/1.27.0/proto-google-iam-v1-1.27.0.jar" }, { - "coord": "com.google.api:api-common:2.24.0", + "coord": "com.google.api:api-common:2.25.0", "dependencies": [ "com.google.code.findbugs:jsr305:3.0.2", "com.google.errorprone:error_prone_annotations:2.24.1", @@ -1403,17 +1459,17 @@ "javax.annotation:javax.annotation-api:1.3.2", "com.google.auto.value:auto-value-annotations:1.10.4" ], - "file": "v1/https/repo1.maven.org/maven2/com/google/api/api-common/2.24.0/api-common-2.24.0.jar", + "file": "v1/https/repo1.maven.org/maven2/com/google/api/api-common/2.25.0/api-common-2.25.0.jar", "mirror_urls": [ - "https://repo1.maven.org/maven2/com/google/api/api-common/2.24.0/api-common-2.24.0.jar", - "https://maven.google.com/com/google/api/api-common/2.24.0/api-common-2.24.0.jar", - "https://jcenter.bintray.com/com/google/api/api-common/2.24.0/api-common-2.24.0.jar" + "https://repo1.maven.org/maven2/com/google/api/api-common/2.25.0/api-common-2.25.0.jar", + "https://maven.google.com/com/google/api/api-common/2.25.0/api-common-2.25.0.jar", + "https://jcenter.bintray.com/com/google/api/api-common/2.25.0/api-common-2.25.0.jar" ], - "sha256": "dbd43dc585ff1caca4b04ae05358bb441a0d5761e969a3a6cf19c3db1143ce2b", - "url": "https://repo1.maven.org/maven2/com/google/api/api-common/2.24.0/api-common-2.24.0.jar" + "sha256": "8e0a32aa85dc5dffa2a96200fde05068dc266c566ed56bfe48851d08f9f2d14b", + "url": "https://repo1.maven.org/maven2/com/google/api/api-common/2.25.0/api-common-2.25.0.jar" }, { - "coord": "com.google.api:gax-grpc:2.41.0", + "coord": "com.google.api:gax-grpc:2.42.0", "dependencies": [], "directDependencies": [], "exclusions": [ @@ -1436,17 +1492,17 @@ "com.google.auth:google-auth-library-credentials", "com.google.api.grpc:proto-google-common-protos" ], - "file": "v1/https/repo1.maven.org/maven2/com/google/api/gax-grpc/2.41.0/gax-grpc-2.41.0.jar", + "file": "v1/https/repo1.maven.org/maven2/com/google/api/gax-grpc/2.42.0/gax-grpc-2.42.0.jar", "mirror_urls": [ - "https://repo1.maven.org/maven2/com/google/api/gax-grpc/2.41.0/gax-grpc-2.41.0.jar", - "https://maven.google.com/com/google/api/gax-grpc/2.41.0/gax-grpc-2.41.0.jar", - "https://jcenter.bintray.com/com/google/api/gax-grpc/2.41.0/gax-grpc-2.41.0.jar" + "https://repo1.maven.org/maven2/com/google/api/gax-grpc/2.42.0/gax-grpc-2.42.0.jar", + "https://maven.google.com/com/google/api/gax-grpc/2.42.0/gax-grpc-2.42.0.jar", + "https://jcenter.bintray.com/com/google/api/gax-grpc/2.42.0/gax-grpc-2.42.0.jar" ], - "sha256": "06ed0ba87f7adcf96516f70a41eaa0ece68da60e9f8987755084d6b1a2ed0d67", - "url": "https://repo1.maven.org/maven2/com/google/api/gax-grpc/2.41.0/gax-grpc-2.41.0.jar" + "sha256": "3f6be58ce14cd18507b2da6b68d5661e1cd28a9a11c99028f9f80c8307ed3e13", + "url": "https://repo1.maven.org/maven2/com/google/api/gax-grpc/2.42.0/gax-grpc-2.42.0.jar" }, { - "coord": "com.google.api:gax-httpjson:2.41.0", + "coord": "com.google.api:gax-httpjson:2.42.0", "dependencies": [], "directDependencies": [], "exclusions": [ @@ -1465,59 +1521,59 @@ "com.google.api.grpc:proto-google-common-protos", "com.google.http-client:google-http-client" ], - "file": "v1/https/repo1.maven.org/maven2/com/google/api/gax-httpjson/2.41.0/gax-httpjson-2.41.0.jar", + "file": "v1/https/repo1.maven.org/maven2/com/google/api/gax-httpjson/2.42.0/gax-httpjson-2.42.0.jar", "mirror_urls": [ - "https://repo1.maven.org/maven2/com/google/api/gax-httpjson/2.41.0/gax-httpjson-2.41.0.jar", - "https://maven.google.com/com/google/api/gax-httpjson/2.41.0/gax-httpjson-2.41.0.jar", - "https://jcenter.bintray.com/com/google/api/gax-httpjson/2.41.0/gax-httpjson-2.41.0.jar" + "https://repo1.maven.org/maven2/com/google/api/gax-httpjson/2.42.0/gax-httpjson-2.42.0.jar", + "https://maven.google.com/com/google/api/gax-httpjson/2.42.0/gax-httpjson-2.42.0.jar", + "https://jcenter.bintray.com/com/google/api/gax-httpjson/2.42.0/gax-httpjson-2.42.0.jar" ], - "sha256": "72d1ef8391ec12da19492b0469a0425003f3d461b0f7ca4005de194d1a875b08", - "url": "https://repo1.maven.org/maven2/com/google/api/gax-httpjson/2.41.0/gax-httpjson-2.41.0.jar" + "sha256": "42eed0ffab063acf521b8d099c2cdc106b03ec2da204cdcd4e006c9ca50df85d", + "url": "https://repo1.maven.org/maven2/com/google/api/gax-httpjson/2.42.0/gax-httpjson-2.42.0.jar" }, { - "coord": "com.google.api:gax:2.41.0", + "coord": "com.google.api:gax:2.42.0", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "commons-logging:commons-logging:1.3.0", "com.google.j2objc:j2objc-annotations:2.8", "org.apache.httpcomponents:httpclient:4.5.14", "com.google.http-client:google-http-client:1.43.3", "com.google.code.findbugs:jsr305:3.0.2", "com.google.auth:google-auth-library-credentials:1.22.0", - "com.google.api.grpc:proto-google-common-protos:2.32.0", + "commons-codec:commons-codec:1.17.0", "com.google.errorprone:error_prone_annotations:2.24.1", "io.opencensus:opencensus-api:0.31.1", "com.google.code.gson:gson:2.10.1", + "com.google.api.grpc:proto-google-common-protos:2.34.0", "io.opencensus:opencensus-contrib-http-util:0.31.1", "com.google.auth:google-auth-library-oauth2-http:1.22.0", "com.google.guava:guava:33.0.0-jre", "javax.annotation:javax.annotation-api:1.3.2", "org.threeten:threetenbp:1.6.8", - "com.google.api:api-common:2.24.0", "io.grpc:grpc-context:1.61.0", "com.google.auto.value:auto-value-annotations:1.10.4", "com.google.protobuf:protobuf-java:3.25.2", "org.apache.httpcomponents:httpcore:4.4.16", + "com.google.api:api-common:2.25.0", "com.google.http-client:google-http-client-gson:1.43.3" ], "directDependencies": [ "com.google.auth:google-auth-library-credentials:1.22.0", - "com.google.api.grpc:proto-google-common-protos:2.32.0", "io.opencensus:opencensus-api:0.31.1", + "com.google.api.grpc:proto-google-common-protos:2.34.0", "com.google.auth:google-auth-library-oauth2-http:1.22.0", "com.google.guava:guava:33.0.0-jre", "org.threeten:threetenbp:1.6.8", - "com.google.api:api-common:2.24.0", - "com.google.protobuf:protobuf-java:3.25.2" + "com.google.protobuf:protobuf-java:3.25.2", + "com.google.api:api-common:2.25.0" ], - "file": "v1/https/repo1.maven.org/maven2/com/google/api/gax/2.41.0/gax-2.41.0.jar", + "file": "v1/https/repo1.maven.org/maven2/com/google/api/gax/2.42.0/gax-2.42.0.jar", "mirror_urls": [ - "https://repo1.maven.org/maven2/com/google/api/gax/2.41.0/gax-2.41.0.jar", - "https://maven.google.com/com/google/api/gax/2.41.0/gax-2.41.0.jar", - "https://jcenter.bintray.com/com/google/api/gax/2.41.0/gax-2.41.0.jar" + "https://repo1.maven.org/maven2/com/google/api/gax/2.42.0/gax-2.42.0.jar", + "https://maven.google.com/com/google/api/gax/2.42.0/gax-2.42.0.jar", + "https://jcenter.bintray.com/com/google/api/gax/2.42.0/gax-2.42.0.jar" ], - "sha256": "a9f28849169e24ba095f9a43a4ddbcf6d87b212d32e709aaaa33cf8827956f0c", - "url": "https://repo1.maven.org/maven2/com/google/api/gax/2.41.0/gax-2.41.0.jar" + "sha256": "6303a610c81a65ed8d3858be2e369760137eb42e57231c5c27e9228e15765e1a", + "url": "https://repo1.maven.org/maven2/com/google/api/gax/2.42.0/gax-2.42.0.jar" }, { "coord": "com.google.apis:google-api-services-cloudkms:v1-rev20221107-2.0.0", @@ -1568,13 +1624,13 @@ { "coord": "com.google.auth:google-auth-library-oauth2-http:1.22.0", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "commons-logging:commons-logging:1.3.0", "com.google.j2objc:j2objc-annotations:2.8", "org.apache.httpcomponents:httpclient:4.5.14", "com.google.http-client:google-http-client:1.43.3", "com.google.code.findbugs:jsr305:3.0.2", "com.google.auth:google-auth-library-credentials:1.22.0", + "commons-codec:commons-codec:1.17.0", "com.google.errorprone:error_prone_annotations:2.24.1", "io.opencensus:opencensus-api:0.31.1", "com.google.code.gson:gson:2.10.1", @@ -1741,8 +1797,6 @@ "coord": "com.google.cloud:google-cloud-compute:1.44.0", "dependencies": [ "com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava", - "commons-codec:commons-codec:1.16.0", - "com.google.api:gax:2.41.0", "commons-logging:commons-logging:1.3.0", "com.google.j2objc:j2objc-annotations:2.8", "org.apache.httpcomponents:httpclient:4.5.14", @@ -1752,29 +1806,29 @@ "io.grpc:grpc-api:1.61.0", "com.google.code.findbugs:jsr305:3.0.2", "com.google.auth:google-auth-library-credentials:1.22.0", - "com.google.api.grpc:proto-google-common-protos:2.32.0", "org.checkerframework:checker-qual:3.42.0", + "com.google.api:gax:2.42.0", + "commons-codec:commons-codec:1.17.0", "com.google.errorprone:error_prone_annotations:2.24.1", "io.opencensus:opencensus-api:0.31.1", + "com.google.api:gax-httpjson:2.42.0", "com.google.code.gson:gson:2.10.1", + "com.google.api.grpc:proto-google-common-protos:2.34.0", "io.opencensus:opencensus-contrib-http-util:0.31.1", "com.google.auth:google-auth-library-oauth2-http:1.22.0", "com.google.guava:guava:33.0.0-jre", - "com.google.api:gax-httpjson:2.41.0", "javax.annotation:javax.annotation-api:1.3.2", "com.google.protobuf:protobuf-java-util:3.25.2", "org.threeten:threetenbp:1.6.8", - "com.google.api:api-common:2.24.0", "io.grpc:grpc-context:1.61.0", "com.google.auto.value:auto-value-annotations:1.10.4", "com.google.protobuf:protobuf-java:3.25.2", "org.apache.httpcomponents:httpcore:4.4.16", + "com.google.api:api-common:2.25.0", "com.google.http-client:google-http-client-gson:1.43.3" ], "directDependencies": [ "com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava", - "commons-codec:commons-codec:1.16.0", - "com.google.api:gax:2.41.0", "commons-logging:commons-logging:1.3.0", "com.google.j2objc:j2objc-annotations:2.8", "org.apache.httpcomponents:httpclient:4.5.14", @@ -1784,23 +1838,25 @@ "io.grpc:grpc-api:1.61.0", "com.google.code.findbugs:jsr305:3.0.2", "com.google.auth:google-auth-library-credentials:1.22.0", - "com.google.api.grpc:proto-google-common-protos:2.32.0", "org.checkerframework:checker-qual:3.42.0", + "com.google.api:gax:2.42.0", + "commons-codec:commons-codec:1.17.0", "com.google.errorprone:error_prone_annotations:2.24.1", "io.opencensus:opencensus-api:0.31.1", + "com.google.api:gax-httpjson:2.42.0", "com.google.code.gson:gson:2.10.1", + "com.google.api.grpc:proto-google-common-protos:2.34.0", "io.opencensus:opencensus-contrib-http-util:0.31.1", "com.google.auth:google-auth-library-oauth2-http:1.22.0", "com.google.guava:guava:33.0.0-jre", - "com.google.api:gax-httpjson:2.41.0", "javax.annotation:javax.annotation-api:1.3.2", "com.google.protobuf:protobuf-java-util:3.25.2", "org.threeten:threetenbp:1.6.8", - "com.google.api:api-common:2.24.0", "io.grpc:grpc-context:1.61.0", "com.google.auto.value:auto-value-annotations:1.10.4", "com.google.protobuf:protobuf-java:3.25.2", "org.apache.httpcomponents:httpcore:4.4.16", + "com.google.api:api-common:2.25.0", "com.google.http-client:google-http-client-gson:1.43.3" ], "file": "v1/https/repo1.maven.org/maven2/com/google/cloud/google-cloud-compute/1.44.0/google-cloud-compute-1.44.0.jar", @@ -1899,9 +1955,7 @@ "coord": "com.google.cloud:google-cloud-kms:2.37.0", "dependencies": [ "com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava", - "commons-codec:commons-codec:1.16.0", "io.grpc:grpc-inprocess:1.61.0", - "com.google.api:gax:2.41.0", "commons-logging:commons-logging:1.3.0", "com.google.api.grpc:grpc-google-common-protos:2.31.0", "io.opencensus:opencensus-proto:0.2.0", @@ -1916,29 +1970,31 @@ "com.google.api.grpc:proto-google-cloud-kms-v1:0.128.0", "io.grpc:grpc-core:1.61.0", "com.google.auth:google-auth-library-credentials:1.22.0", - "com.google.api.grpc:proto-google-common-protos:2.32.0", "org.checkerframework:checker-qual:3.42.0", "com.google.android:annotations:4.1.1.4", + "com.google.api:gax:2.42.0", "org.conscrypt:conscrypt-openjdk-uber:2.5.2", "io.grpc:grpc-xds:1.61.0", + "commons-codec:commons-codec:1.17.0", "com.google.errorprone:error_prone_annotations:2.24.1", "io.opencensus:opencensus-api:0.31.1", + "com.google.api:gax-httpjson:2.42.0", "com.google.api.grpc:proto-google-iam-v1:1.27.0", "com.google.re2j:re2j:1.7", "com.google.api.grpc:grpc-google-iam-v1:1.26.0", "com.google.code.gson:gson:2.10.1", + "com.google.api.grpc:proto-google-common-protos:2.34.0", "io.grpc:grpc-netty-shaded:1.61.0", "io.opencensus:opencensus-contrib-http-util:0.31.1", "com.google.auth:google-auth-library-oauth2-http:1.22.0", "com.google.guava:guava:33.0.0-jre", "io.perfmark:perfmark-api:0.27.0", "io.grpc:grpc-services:1.61.0", - "com.google.api:gax-httpjson:2.41.0", + "com.google.api:gax-grpc:2.42.0", "javax.annotation:javax.annotation-api:1.3.2", "com.google.protobuf:protobuf-java-util:3.25.2", "io.grpc:grpc-stub:1.61.0", "org.threeten:threetenbp:1.6.8", - "com.google.api:api-common:2.24.0", "io.grpc:grpc-util:1.61.0", "io.grpc:grpc-protobuf-lite:1.61.0", "io.grpc:grpc-context:1.61.0", @@ -1946,17 +2002,15 @@ "com.google.protobuf:protobuf-java:3.25.2", "io.grpc:grpc-protobuf:1.61.0", "io.grpc:grpc-grpclb:1.61.0", - "com.google.api:gax-grpc:2.41.0", "org.apache.httpcomponents:httpcore:4.4.16", "io.grpc:grpc-auth:1.61.0", + "com.google.api:api-common:2.25.0", "com.google.http-client:google-http-client-gson:1.43.3", "io.grpc:grpc-alts:1.61.0" ], "directDependencies": [ "com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava", - "commons-codec:commons-codec:1.16.0", "io.grpc:grpc-inprocess:1.61.0", - "com.google.api:gax:2.41.0", "commons-logging:commons-logging:1.3.0", "com.google.api.grpc:grpc-google-common-protos:2.31.0", "io.opencensus:opencensus-proto:0.2.0", @@ -1971,29 +2025,31 @@ "com.google.api.grpc:proto-google-cloud-kms-v1:0.128.0", "io.grpc:grpc-core:1.61.0", "com.google.auth:google-auth-library-credentials:1.22.0", - "com.google.api.grpc:proto-google-common-protos:2.32.0", "org.checkerframework:checker-qual:3.42.0", "com.google.android:annotations:4.1.1.4", + "com.google.api:gax:2.42.0", "org.conscrypt:conscrypt-openjdk-uber:2.5.2", "io.grpc:grpc-xds:1.61.0", + "commons-codec:commons-codec:1.17.0", "com.google.errorprone:error_prone_annotations:2.24.1", "io.opencensus:opencensus-api:0.31.1", + "com.google.api:gax-httpjson:2.42.0", "com.google.api.grpc:proto-google-iam-v1:1.27.0", "com.google.re2j:re2j:1.7", "com.google.api.grpc:grpc-google-iam-v1:1.26.0", "com.google.code.gson:gson:2.10.1", + "com.google.api.grpc:proto-google-common-protos:2.34.0", "io.grpc:grpc-netty-shaded:1.61.0", "io.opencensus:opencensus-contrib-http-util:0.31.1", "com.google.auth:google-auth-library-oauth2-http:1.22.0", "com.google.guava:guava:33.0.0-jre", "io.perfmark:perfmark-api:0.27.0", "io.grpc:grpc-services:1.61.0", - "com.google.api:gax-httpjson:2.41.0", + "com.google.api:gax-grpc:2.42.0", "javax.annotation:javax.annotation-api:1.3.2", "com.google.protobuf:protobuf-java-util:3.25.2", "io.grpc:grpc-stub:1.61.0", "org.threeten:threetenbp:1.6.8", - "com.google.api:api-common:2.24.0", "io.grpc:grpc-util:1.61.0", "io.grpc:grpc-protobuf-lite:1.61.0", "io.grpc:grpc-context:1.61.0", @@ -2001,9 +2057,9 @@ "com.google.protobuf:protobuf-java:3.25.2", "io.grpc:grpc-protobuf:1.61.0", "io.grpc:grpc-grpclb:1.61.0", - "com.google.api:gax-grpc:2.41.0", "org.apache.httpcomponents:httpcore:4.4.16", "io.grpc:grpc-auth:1.61.0", + "com.google.api:api-common:2.25.0", "com.google.http-client:google-http-client-gson:1.43.3", "io.grpc:grpc-alts:1.61.0" ], @@ -2020,9 +2076,7 @@ "coord": "com.google.cloud:google-cloud-monitoring:3.35.0", "dependencies": [ "com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava", - "commons-codec:commons-codec:1.16.0", "io.grpc:grpc-inprocess:1.61.0", - "com.google.api:gax:2.41.0", "commons-logging:commons-logging:1.3.0", "io.opencensus:opencensus-proto:0.2.0", "com.google.j2objc:j2objc-annotations:2.8", @@ -2035,27 +2089,29 @@ "com.google.code.findbugs:jsr305:3.0.2", "io.grpc:grpc-core:1.61.0", "com.google.auth:google-auth-library-credentials:1.22.0", - "com.google.api.grpc:proto-google-common-protos:2.32.0", "org.checkerframework:checker-qual:3.42.0", "com.google.android:annotations:4.1.1.4", "com.google.api.grpc:proto-google-cloud-monitoring-v3:3.35.0", + "com.google.api:gax:2.42.0", "org.conscrypt:conscrypt-openjdk-uber:2.5.2", "io.grpc:grpc-xds:1.61.0", + "commons-codec:commons-codec:1.17.0", "com.google.errorprone:error_prone_annotations:2.24.1", "io.opencensus:opencensus-api:0.31.1", "com.google.re2j:re2j:1.7", "com.google.code.gson:gson:2.10.1", + "com.google.api.grpc:proto-google-common-protos:2.34.0", "io.grpc:grpc-netty-shaded:1.61.0", "io.opencensus:opencensus-contrib-http-util:0.31.1", "com.google.auth:google-auth-library-oauth2-http:1.22.0", "com.google.guava:guava:33.0.0-jre", "io.perfmark:perfmark-api:0.27.0", "io.grpc:grpc-services:1.61.0", + "com.google.api:gax-grpc:2.42.0", "javax.annotation:javax.annotation-api:1.3.2", "com.google.protobuf:protobuf-java-util:3.25.2", "io.grpc:grpc-stub:1.61.0", "org.threeten:threetenbp:1.6.8", - "com.google.api:api-common:2.24.0", "io.grpc:grpc-util:1.61.0", "io.grpc:grpc-protobuf-lite:1.61.0", "io.grpc:grpc-context:1.61.0", @@ -2063,17 +2119,15 @@ "com.google.protobuf:protobuf-java:3.25.2", "io.grpc:grpc-protobuf:1.61.0", "io.grpc:grpc-grpclb:1.61.0", - "com.google.api:gax-grpc:2.41.0", "org.apache.httpcomponents:httpcore:4.4.16", "io.grpc:grpc-auth:1.61.0", + "com.google.api:api-common:2.25.0", "com.google.http-client:google-http-client-gson:1.43.3", "io.grpc:grpc-alts:1.61.0" ], "directDependencies": [ "com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava", - "commons-codec:commons-codec:1.16.0", "io.grpc:grpc-inprocess:1.61.0", - "com.google.api:gax:2.41.0", "commons-logging:commons-logging:1.3.0", "io.opencensus:opencensus-proto:0.2.0", "com.google.j2objc:j2objc-annotations:2.8", @@ -2086,27 +2140,29 @@ "com.google.code.findbugs:jsr305:3.0.2", "io.grpc:grpc-core:1.61.0", "com.google.auth:google-auth-library-credentials:1.22.0", - "com.google.api.grpc:proto-google-common-protos:2.32.0", "org.checkerframework:checker-qual:3.42.0", "com.google.android:annotations:4.1.1.4", "com.google.api.grpc:proto-google-cloud-monitoring-v3:3.35.0", + "com.google.api:gax:2.42.0", "org.conscrypt:conscrypt-openjdk-uber:2.5.2", "io.grpc:grpc-xds:1.61.0", + "commons-codec:commons-codec:1.17.0", "com.google.errorprone:error_prone_annotations:2.24.1", "io.opencensus:opencensus-api:0.31.1", "com.google.re2j:re2j:1.7", "com.google.code.gson:gson:2.10.1", + "com.google.api.grpc:proto-google-common-protos:2.34.0", "io.grpc:grpc-netty-shaded:1.61.0", "io.opencensus:opencensus-contrib-http-util:0.31.1", "com.google.auth:google-auth-library-oauth2-http:1.22.0", "com.google.guava:guava:33.0.0-jre", "io.perfmark:perfmark-api:0.27.0", "io.grpc:grpc-services:1.61.0", + "com.google.api:gax-grpc:2.42.0", "javax.annotation:javax.annotation-api:1.3.2", "com.google.protobuf:protobuf-java-util:3.25.2", "io.grpc:grpc-stub:1.61.0", "org.threeten:threetenbp:1.6.8", - "com.google.api:api-common:2.24.0", "io.grpc:grpc-util:1.61.0", "io.grpc:grpc-protobuf-lite:1.61.0", "io.grpc:grpc-context:1.61.0", @@ -2114,9 +2170,9 @@ "com.google.protobuf:protobuf-java:3.25.2", "io.grpc:grpc-protobuf:1.61.0", "io.grpc:grpc-grpclb:1.61.0", - "com.google.api:gax-grpc:2.41.0", "org.apache.httpcomponents:httpcore:4.4.16", "io.grpc:grpc-auth:1.61.0", + "com.google.api:api-common:2.25.0", "com.google.http-client:google-http-client-gson:1.43.3", "io.grpc:grpc-alts:1.61.0" ], @@ -2133,9 +2189,7 @@ "coord": "com.google.cloud:google-cloud-pubsub:1.126.2", "dependencies": [ "com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava", - "commons-codec:commons-codec:1.16.0", "io.grpc:grpc-inprocess:1.61.0", - "com.google.api:gax:2.41.0", "commons-logging:commons-logging:1.3.0", "io.opencensus:opencensus-proto:0.2.0", "com.google.j2objc:j2objc-annotations:2.8", @@ -2148,29 +2202,31 @@ "com.google.code.findbugs:jsr305:3.0.2", "io.grpc:grpc-core:1.61.0", "com.google.auth:google-auth-library-credentials:1.22.0", - "com.google.api.grpc:proto-google-common-protos:2.32.0", "org.checkerframework:checker-qual:3.42.0", "com.google.android:annotations:4.1.1.4", + "com.google.api:gax:2.42.0", "org.conscrypt:conscrypt-openjdk-uber:2.5.2", "com.google.api.grpc:proto-google-cloud-pubsub-v1:1.108.2", "io.grpc:grpc-xds:1.61.0", + "commons-codec:commons-codec:1.17.0", "com.google.errorprone:error_prone_annotations:2.24.1", "io.opencensus:opencensus-api:0.31.1", + "com.google.api:gax-httpjson:2.42.0", "com.google.api.grpc:proto-google-iam-v1:1.27.0", "com.google.re2j:re2j:1.7", "com.google.code.gson:gson:2.10.1", + "com.google.api.grpc:proto-google-common-protos:2.34.0", "io.grpc:grpc-netty-shaded:1.61.0", "io.opencensus:opencensus-contrib-http-util:0.31.1", "com.google.auth:google-auth-library-oauth2-http:1.22.0", "com.google.guava:guava:33.0.0-jre", "io.perfmark:perfmark-api:0.27.0", "io.grpc:grpc-services:1.61.0", - "com.google.api:gax-httpjson:2.41.0", + "com.google.api:gax-grpc:2.42.0", "javax.annotation:javax.annotation-api:1.3.2", "com.google.protobuf:protobuf-java-util:3.25.2", "io.grpc:grpc-stub:1.61.0", "org.threeten:threetenbp:1.6.8", - "com.google.api:api-common:2.24.0", "io.grpc:grpc-util:1.61.0", "io.grpc:grpc-protobuf-lite:1.61.0", "io.grpc:grpc-context:1.61.0", @@ -2178,17 +2234,15 @@ "com.google.protobuf:protobuf-java:3.25.2", "io.grpc:grpc-protobuf:1.61.0", "io.grpc:grpc-grpclb:1.61.0", - "com.google.api:gax-grpc:2.41.0", "org.apache.httpcomponents:httpcore:4.4.16", "io.grpc:grpc-auth:1.61.0", + "com.google.api:api-common:2.25.0", "com.google.http-client:google-http-client-gson:1.43.3", "io.grpc:grpc-alts:1.61.0" ], "directDependencies": [ "com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava", - "commons-codec:commons-codec:1.16.0", "io.grpc:grpc-inprocess:1.61.0", - "com.google.api:gax:2.41.0", "commons-logging:commons-logging:1.3.0", "io.opencensus:opencensus-proto:0.2.0", "com.google.j2objc:j2objc-annotations:2.8", @@ -2201,29 +2255,31 @@ "com.google.code.findbugs:jsr305:3.0.2", "io.grpc:grpc-core:1.61.0", "com.google.auth:google-auth-library-credentials:1.22.0", - "com.google.api.grpc:proto-google-common-protos:2.32.0", "org.checkerframework:checker-qual:3.42.0", "com.google.android:annotations:4.1.1.4", + "com.google.api:gax:2.42.0", "org.conscrypt:conscrypt-openjdk-uber:2.5.2", "com.google.api.grpc:proto-google-cloud-pubsub-v1:1.108.2", "io.grpc:grpc-xds:1.61.0", + "commons-codec:commons-codec:1.17.0", "com.google.errorprone:error_prone_annotations:2.24.1", "io.opencensus:opencensus-api:0.31.1", + "com.google.api:gax-httpjson:2.42.0", "com.google.api.grpc:proto-google-iam-v1:1.27.0", "com.google.re2j:re2j:1.7", "com.google.code.gson:gson:2.10.1", + "com.google.api.grpc:proto-google-common-protos:2.34.0", "io.grpc:grpc-netty-shaded:1.61.0", "io.opencensus:opencensus-contrib-http-util:0.31.1", "com.google.auth:google-auth-library-oauth2-http:1.22.0", "com.google.guava:guava:33.0.0-jre", "io.perfmark:perfmark-api:0.27.0", "io.grpc:grpc-services:1.61.0", - "com.google.api:gax-httpjson:2.41.0", + "com.google.api:gax-grpc:2.42.0", "javax.annotation:javax.annotation-api:1.3.2", "com.google.protobuf:protobuf-java-util:3.25.2", "io.grpc:grpc-stub:1.61.0", "org.threeten:threetenbp:1.6.8", - "com.google.api:api-common:2.24.0", "io.grpc:grpc-util:1.61.0", "io.grpc:grpc-protobuf-lite:1.61.0", "io.grpc:grpc-context:1.61.0", @@ -2231,9 +2287,9 @@ "com.google.protobuf:protobuf-java:3.25.2", "io.grpc:grpc-protobuf:1.61.0", "io.grpc:grpc-grpclb:1.61.0", - "com.google.api:gax-grpc:2.41.0", "org.apache.httpcomponents:httpcore:4.4.16", "io.grpc:grpc-auth:1.61.0", + "com.google.api:api-common:2.25.0", "com.google.http-client:google-http-client-gson:1.43.3", "io.grpc:grpc-alts:1.61.0" ], @@ -2250,9 +2306,7 @@ "coord": "com.google.cloud:google-cloud-secretmanager:2.34.0", "dependencies": [ "com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava", - "commons-codec:commons-codec:1.16.0", "io.grpc:grpc-inprocess:1.61.0", - "com.google.api:gax:2.41.0", "commons-logging:commons-logging:1.3.0", "io.opencensus:opencensus-proto:0.2.0", "com.google.j2objc:j2objc-annotations:2.8", @@ -2265,30 +2319,32 @@ "com.google.code.findbugs:jsr305:3.0.2", "io.grpc:grpc-core:1.61.0", "com.google.auth:google-auth-library-credentials:1.22.0", - "com.google.api.grpc:proto-google-common-protos:2.32.0", "org.checkerframework:checker-qual:3.42.0", "com.google.android:annotations:4.1.1.4", "com.google.api.grpc:proto-google-cloud-secretmanager-v1:2.34.0", + "com.google.api:gax:2.42.0", "org.conscrypt:conscrypt-openjdk-uber:2.5.2", "com.google.api.grpc:proto-google-cloud-secretmanager-v1beta1:2.34.0", "io.grpc:grpc-xds:1.61.0", + "commons-codec:commons-codec:1.17.0", "com.google.errorprone:error_prone_annotations:2.24.1", "io.opencensus:opencensus-api:0.31.1", + "com.google.api:gax-httpjson:2.42.0", "com.google.api.grpc:proto-google-iam-v1:1.27.0", "com.google.re2j:re2j:1.7", "com.google.code.gson:gson:2.10.1", + "com.google.api.grpc:proto-google-common-protos:2.34.0", "io.grpc:grpc-netty-shaded:1.61.0", "io.opencensus:opencensus-contrib-http-util:0.31.1", "com.google.auth:google-auth-library-oauth2-http:1.22.0", "com.google.guava:guava:33.0.0-jre", "io.perfmark:perfmark-api:0.27.0", "io.grpc:grpc-services:1.61.0", - "com.google.api:gax-httpjson:2.41.0", + "com.google.api:gax-grpc:2.42.0", "javax.annotation:javax.annotation-api:1.3.2", "com.google.protobuf:protobuf-java-util:3.25.2", "io.grpc:grpc-stub:1.61.0", "org.threeten:threetenbp:1.6.8", - "com.google.api:api-common:2.24.0", "io.grpc:grpc-util:1.61.0", "io.grpc:grpc-protobuf-lite:1.61.0", "io.grpc:grpc-context:1.61.0", @@ -2296,17 +2352,15 @@ "com.google.protobuf:protobuf-java:3.25.2", "io.grpc:grpc-protobuf:1.61.0", "io.grpc:grpc-grpclb:1.61.0", - "com.google.api:gax-grpc:2.41.0", "org.apache.httpcomponents:httpcore:4.4.16", "io.grpc:grpc-auth:1.61.0", + "com.google.api:api-common:2.25.0", "com.google.http-client:google-http-client-gson:1.43.3", "io.grpc:grpc-alts:1.61.0" ], "directDependencies": [ "com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava", - "commons-codec:commons-codec:1.16.0", "io.grpc:grpc-inprocess:1.61.0", - "com.google.api:gax:2.41.0", "commons-logging:commons-logging:1.3.0", "io.opencensus:opencensus-proto:0.2.0", "com.google.j2objc:j2objc-annotations:2.8", @@ -2319,30 +2373,32 @@ "com.google.code.findbugs:jsr305:3.0.2", "io.grpc:grpc-core:1.61.0", "com.google.auth:google-auth-library-credentials:1.22.0", - "com.google.api.grpc:proto-google-common-protos:2.32.0", "org.checkerframework:checker-qual:3.42.0", "com.google.android:annotations:4.1.1.4", "com.google.api.grpc:proto-google-cloud-secretmanager-v1:2.34.0", + "com.google.api:gax:2.42.0", "org.conscrypt:conscrypt-openjdk-uber:2.5.2", "com.google.api.grpc:proto-google-cloud-secretmanager-v1beta1:2.34.0", "io.grpc:grpc-xds:1.61.0", + "commons-codec:commons-codec:1.17.0", "com.google.errorprone:error_prone_annotations:2.24.1", "io.opencensus:opencensus-api:0.31.1", + "com.google.api:gax-httpjson:2.42.0", "com.google.api.grpc:proto-google-iam-v1:1.27.0", "com.google.re2j:re2j:1.7", "com.google.code.gson:gson:2.10.1", + "com.google.api.grpc:proto-google-common-protos:2.34.0", "io.grpc:grpc-netty-shaded:1.61.0", "io.opencensus:opencensus-contrib-http-util:0.31.1", "com.google.auth:google-auth-library-oauth2-http:1.22.0", "com.google.guava:guava:33.0.0-jre", "io.perfmark:perfmark-api:0.27.0", "io.grpc:grpc-services:1.61.0", - "com.google.api:gax-httpjson:2.41.0", + "com.google.api:gax-grpc:2.42.0", "javax.annotation:javax.annotation-api:1.3.2", "com.google.protobuf:protobuf-java-util:3.25.2", "io.grpc:grpc-stub:1.61.0", "org.threeten:threetenbp:1.6.8", - "com.google.api:api-common:2.24.0", "io.grpc:grpc-util:1.61.0", "io.grpc:grpc-protobuf-lite:1.61.0", "io.grpc:grpc-context:1.61.0", @@ -2350,9 +2406,9 @@ "com.google.protobuf:protobuf-java:3.25.2", "io.grpc:grpc-protobuf:1.61.0", "io.grpc:grpc-grpclb:1.61.0", - "com.google.api:gax-grpc:2.41.0", "org.apache.httpcomponents:httpcore:4.4.16", "io.grpc:grpc-auth:1.61.0", + "com.google.api:api-common:2.25.0", "com.google.http-client:google-http-client-gson:1.43.3", "io.grpc:grpc-alts:1.61.0" ], @@ -2369,9 +2425,7 @@ "coord": "com.google.cloud:google-cloud-spanner:6.56.0", "dependencies": [ "com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava", - "commons-codec:commons-codec:1.16.0", "io.grpc:grpc-inprocess:1.61.0", - "com.google.api:gax:2.41.0", "com.google.api.grpc:proto-google-cloud-spanner-v1:6.56.0", "commons-logging:commons-logging:1.3.0", "com.google.api.grpc:grpc-google-common-protos:2.31.0", @@ -2387,20 +2441,23 @@ "com.google.api.grpc:proto-google-cloud-spanner-admin-database-v1:6.56.0", "io.grpc:grpc-core:1.61.0", "com.google.auth:google-auth-library-credentials:1.22.0", - "com.google.api.grpc:proto-google-common-protos:2.32.0", "org.checkerframework:checker-qual:3.42.0", "com.google.android:annotations:4.1.1.4", + "com.google.api:gax:2.42.0", "org.conscrypt:conscrypt-openjdk-uber:2.5.2", "com.google.api.grpc:proto-google-cloud-spanner-admin-instance-v1:6.56.0", "io.grpc:grpc-xds:1.61.0", + "commons-codec:commons-codec:1.17.0", "com.google.errorprone:error_prone_annotations:2.24.1", "io.opencensus:opencensus-api:0.31.1", + "com.google.api:gax-httpjson:2.42.0", "com.google.api.grpc:proto-google-iam-v1:1.27.0", "com.google.cloud:google-cloud-core-grpc:2.31.0", "com.google.re2j:re2j:1.7", "com.google.api.grpc:proto-google-cloud-spanner-executor-v1:6.56.0", "com.google.code.gson:gson:2.10.1", "com.google.api.grpc:grpc-google-cloud-spanner-admin-database-v1:6.56.0", + "com.google.api.grpc:proto-google-common-protos:2.34.0", "io.grpc:grpc-netty-shaded:1.61.0", "io.opencensus:opencensus-contrib-http-util:0.31.1", "io.grpc:grpc-rls:1.61.0", @@ -2409,12 +2466,11 @@ "io.opencensus:opencensus-contrib-grpc-util:0.31.1", "io.perfmark:perfmark-api:0.27.0", "io.grpc:grpc-services:1.61.0", - "com.google.api:gax-httpjson:2.41.0", + "com.google.api:gax-grpc:2.42.0", "javax.annotation:javax.annotation-api:1.3.2", "com.google.protobuf:protobuf-java-util:3.25.2", "io.grpc:grpc-stub:1.61.0", "org.threeten:threetenbp:1.6.8", - "com.google.api:api-common:2.24.0", "io.grpc:grpc-util:1.61.0", "io.grpc:grpc-protobuf-lite:1.61.0", "io.grpc:grpc-context:1.61.0", @@ -2426,17 +2482,15 @@ "com.google.api.grpc:grpc-google-cloud-spanner-v1:6.56.0", "io.grpc:grpc-grpclb:1.61.0", "com.google.api.grpc:grpc-google-cloud-spanner-admin-instance-v1:6.56.0", - "com.google.api:gax-grpc:2.41.0", "org.apache.httpcomponents:httpcore:4.4.16", "io.grpc:grpc-auth:1.61.0", + "com.google.api:api-common:2.25.0", "com.google.http-client:google-http-client-gson:1.43.3", "io.grpc:grpc-alts:1.61.0" ], "directDependencies": [ "com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava", - "commons-codec:commons-codec:1.16.0", "io.grpc:grpc-inprocess:1.61.0", - "com.google.api:gax:2.41.0", "com.google.api.grpc:proto-google-cloud-spanner-v1:6.56.0", "commons-logging:commons-logging:1.3.0", "com.google.api.grpc:grpc-google-common-protos:2.31.0", @@ -2452,20 +2506,23 @@ "com.google.api.grpc:proto-google-cloud-spanner-admin-database-v1:6.56.0", "io.grpc:grpc-core:1.61.0", "com.google.auth:google-auth-library-credentials:1.22.0", - "com.google.api.grpc:proto-google-common-protos:2.32.0", "org.checkerframework:checker-qual:3.42.0", "com.google.android:annotations:4.1.1.4", + "com.google.api:gax:2.42.0", "org.conscrypt:conscrypt-openjdk-uber:2.5.2", "com.google.api.grpc:proto-google-cloud-spanner-admin-instance-v1:6.56.0", "io.grpc:grpc-xds:1.61.0", + "commons-codec:commons-codec:1.17.0", "com.google.errorprone:error_prone_annotations:2.24.1", "io.opencensus:opencensus-api:0.31.1", + "com.google.api:gax-httpjson:2.42.0", "com.google.api.grpc:proto-google-iam-v1:1.27.0", "com.google.cloud:google-cloud-core-grpc:2.31.0", "com.google.re2j:re2j:1.7", "com.google.api.grpc:proto-google-cloud-spanner-executor-v1:6.56.0", "com.google.code.gson:gson:2.10.1", "com.google.api.grpc:grpc-google-cloud-spanner-admin-database-v1:6.56.0", + "com.google.api.grpc:proto-google-common-protos:2.34.0", "io.grpc:grpc-netty-shaded:1.61.0", "io.opencensus:opencensus-contrib-http-util:0.31.1", "io.grpc:grpc-rls:1.61.0", @@ -2474,12 +2531,11 @@ "io.opencensus:opencensus-contrib-grpc-util:0.31.1", "io.perfmark:perfmark-api:0.27.0", "io.grpc:grpc-services:1.61.0", - "com.google.api:gax-httpjson:2.41.0", + "com.google.api:gax-grpc:2.42.0", "javax.annotation:javax.annotation-api:1.3.2", "com.google.protobuf:protobuf-java-util:3.25.2", "io.grpc:grpc-stub:1.61.0", "org.threeten:threetenbp:1.6.8", - "com.google.api:api-common:2.24.0", "io.grpc:grpc-util:1.61.0", "io.grpc:grpc-protobuf-lite:1.61.0", "io.grpc:grpc-context:1.61.0", @@ -2491,9 +2547,9 @@ "com.google.api.grpc:grpc-google-cloud-spanner-v1:6.56.0", "io.grpc:grpc-grpclb:1.61.0", "com.google.api.grpc:grpc-google-cloud-spanner-admin-instance-v1:6.56.0", - "com.google.api:gax-grpc:2.41.0", "org.apache.httpcomponents:httpcore:4.4.16", "io.grpc:grpc-auth:1.61.0", + "com.google.api:api-common:2.25.0", "com.google.http-client:google-http-client-gson:1.43.3", "io.grpc:grpc-alts:1.61.0" ], @@ -2510,9 +2566,7 @@ "coord": "com.google.cloud:google-cloud-storage:2.32.1", "dependencies": [ "com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava", - "commons-codec:commons-codec:1.16.0", "io.grpc:grpc-inprocess:1.61.0", - "com.google.api:gax:2.41.0", "io.opencensus:opencensus-proto:0.2.0", "com.google.j2objc:j2objc-annotations:2.8", "com.google.guava:failureaccess:1.0.2", @@ -2526,14 +2580,16 @@ "com.google.http-client:google-http-client-jackson2:1.43.3", "com.google.apis:google-api-services-storage:v1-rev20240105-2.0.0", "com.google.auth:google-auth-library-credentials:1.22.0", - "com.google.api.grpc:proto-google-common-protos:2.32.0", "org.checkerframework:checker-qual:3.42.0", "com.google.android:annotations:4.1.1.4", + "com.google.api:gax:2.42.0", "org.conscrypt:conscrypt-openjdk-uber:2.5.2", "io.grpc:grpc-xds:1.61.0", "com.google.api.grpc:gapic-google-cloud-storage-v2:2.32.1-alpha", + "commons-codec:commons-codec:1.17.0", "com.google.errorprone:error_prone_annotations:2.24.1", "io.opencensus:opencensus-api:0.31.1", + "com.google.api:gax-httpjson:2.42.0", "com.google.api.grpc:grpc-google-cloud-storage-v2:2.32.1-alpha", "com.google.api.grpc:proto-google-iam-v1:1.27.0", "com.google.api.grpc:proto-google-cloud-storage-v2:2.32.1-alpha", @@ -2542,6 +2598,7 @@ "com.google.http-client:google-http-client-appengine:1.43.3", "com.google.api-client:google-api-client:2.2.0", "com.google.code.gson:gson:2.10.1", + "com.google.api.grpc:proto-google-common-protos:2.34.0", "io.grpc:grpc-netty-shaded:1.61.0", "io.opencensus:opencensus-contrib-http-util:0.31.1", "io.grpc:grpc-rls:1.61.0", @@ -2549,12 +2606,11 @@ "com.google.guava:guava:33.0.0-jre", "io.perfmark:perfmark-api:0.27.0", "io.grpc:grpc-services:1.61.0", - "com.google.api:gax-httpjson:2.41.0", + "com.google.api:gax-grpc:2.42.0", "javax.annotation:javax.annotation-api:1.3.2", "com.google.protobuf:protobuf-java-util:3.25.2", "io.grpc:grpc-stub:1.61.0", "org.threeten:threetenbp:1.6.8", - "com.google.api:api-common:2.24.0", "io.grpc:grpc-util:1.61.0", "io.grpc:grpc-protobuf-lite:1.61.0", "io.grpc:grpc-context:1.61.0", @@ -2565,17 +2621,15 @@ "com.google.cloud:google-cloud-core:2.31.0", "io.grpc:grpc-grpclb:1.61.0", "com.google.http-client:google-http-client-apache-v2:1.43.3", - "com.google.api:gax-grpc:2.41.0", "io.grpc:grpc-auth:1.61.0", + "com.google.api:api-common:2.25.0", "com.google.http-client:google-http-client-gson:1.43.3", "io.grpc:grpc-alts:1.61.0", "com.google.oauth-client:google-oauth-client:1.35.0" ], "directDependencies": [ "com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava", - "commons-codec:commons-codec:1.16.0", "io.grpc:grpc-inprocess:1.61.0", - "com.google.api:gax:2.41.0", "io.opencensus:opencensus-proto:0.2.0", "com.google.j2objc:j2objc-annotations:2.8", "com.google.guava:failureaccess:1.0.2", @@ -2589,14 +2643,16 @@ "com.google.http-client:google-http-client-jackson2:1.43.3", "com.google.apis:google-api-services-storage:v1-rev20240105-2.0.0", "com.google.auth:google-auth-library-credentials:1.22.0", - "com.google.api.grpc:proto-google-common-protos:2.32.0", "org.checkerframework:checker-qual:3.42.0", "com.google.android:annotations:4.1.1.4", + "com.google.api:gax:2.42.0", "org.conscrypt:conscrypt-openjdk-uber:2.5.2", "io.grpc:grpc-xds:1.61.0", "com.google.api.grpc:gapic-google-cloud-storage-v2:2.32.1-alpha", + "commons-codec:commons-codec:1.17.0", "com.google.errorprone:error_prone_annotations:2.24.1", "io.opencensus:opencensus-api:0.31.1", + "com.google.api:gax-httpjson:2.42.0", "com.google.api.grpc:grpc-google-cloud-storage-v2:2.32.1-alpha", "com.google.api.grpc:proto-google-iam-v1:1.27.0", "com.google.api.grpc:proto-google-cloud-storage-v2:2.32.1-alpha", @@ -2605,6 +2661,7 @@ "com.google.http-client:google-http-client-appengine:1.43.3", "com.google.api-client:google-api-client:2.2.0", "com.google.code.gson:gson:2.10.1", + "com.google.api.grpc:proto-google-common-protos:2.34.0", "io.grpc:grpc-netty-shaded:1.61.0", "io.opencensus:opencensus-contrib-http-util:0.31.1", "io.grpc:grpc-rls:1.61.0", @@ -2612,12 +2669,11 @@ "com.google.guava:guava:33.0.0-jre", "io.perfmark:perfmark-api:0.27.0", "io.grpc:grpc-services:1.61.0", - "com.google.api:gax-httpjson:2.41.0", + "com.google.api:gax-grpc:2.42.0", "javax.annotation:javax.annotation-api:1.3.2", "com.google.protobuf:protobuf-java-util:3.25.2", "io.grpc:grpc-stub:1.61.0", "org.threeten:threetenbp:1.6.8", - "com.google.api:api-common:2.24.0", "io.grpc:grpc-util:1.61.0", "io.grpc:grpc-protobuf-lite:1.61.0", "io.grpc:grpc-context:1.61.0", @@ -2628,8 +2684,8 @@ "com.google.cloud:google-cloud-core:2.31.0", "io.grpc:grpc-grpclb:1.61.0", "com.google.http-client:google-http-client-apache-v2:1.43.3", - "com.google.api:gax-grpc:2.41.0", "io.grpc:grpc-auth:1.61.0", + "com.google.api:api-common:2.25.0", "com.google.http-client:google-http-client-gson:1.43.3", "io.grpc:grpc-alts:1.61.0", "com.google.oauth-client:google-oauth-client:1.35.0" @@ -2643,6 +2699,123 @@ "sha256": "29abbea87530925a6c01bfa6778f0a601fa902791a01ff06782db97497de8337", "url": "https://repo1.maven.org/maven2/com/google/cloud/google-cloud-storage/2.32.1/google-cloud-storage-2.32.1.jar" }, + { + "coord": "com.google.cloud:google-cloud-trace:2.35.0", + "dependencies": [ + "com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava", + "io.grpc:grpc-inprocess:1.61.0", + "commons-logging:commons-logging:1.3.0", + "io.opencensus:opencensus-proto:0.2.0", + "com.google.j2objc:j2objc-annotations:2.8", + "org.apache.httpcomponents:httpclient:4.5.14", + "com.google.guava:failureaccess:1.0.2", + "org.codehaus.mojo:animal-sniffer-annotations:1.23", + "com.google.http-client:google-http-client:1.43.3", + "io.grpc:grpc-googleapis:1.61.0", + "io.grpc:grpc-api:1.61.0", + "com.google.code.findbugs:jsr305:3.0.2", + "io.grpc:grpc-core:1.61.0", + "com.google.auth:google-auth-library-credentials:1.22.0", + "org.checkerframework:checker-qual:3.42.0", + "com.google.android:annotations:4.1.1.4", + "com.google.api.grpc:proto-google-cloud-trace-v1:2.35.0", + "com.google.api.grpc:proto-google-cloud-trace-v2:2.35.0", + "com.google.api:gax:2.42.0", + "org.conscrypt:conscrypt-openjdk-uber:2.5.2", + "io.grpc:grpc-xds:1.61.0", + "commons-codec:commons-codec:1.17.0", + "com.google.errorprone:error_prone_annotations:2.24.1", + "io.opencensus:opencensus-api:0.31.1", + "com.google.api:gax-httpjson:2.42.0", + "com.google.re2j:re2j:1.7", + "com.google.code.gson:gson:2.10.1", + "com.google.api.grpc:proto-google-common-protos:2.34.0", + "io.grpc:grpc-netty-shaded:1.61.0", + "io.opencensus:opencensus-contrib-http-util:0.31.1", + "com.google.auth:google-auth-library-oauth2-http:1.22.0", + "com.google.guava:guava:33.0.0-jre", + "io.perfmark:perfmark-api:0.27.0", + "io.grpc:grpc-services:1.61.0", + "com.google.api:gax-grpc:2.42.0", + "javax.annotation:javax.annotation-api:1.3.2", + "com.google.protobuf:protobuf-java-util:3.25.2", + "io.grpc:grpc-stub:1.61.0", + "org.threeten:threetenbp:1.6.8", + "io.grpc:grpc-util:1.61.0", + "io.grpc:grpc-protobuf-lite:1.61.0", + "io.grpc:grpc-context:1.61.0", + "com.google.auto.value:auto-value-annotations:1.10.4", + "com.google.protobuf:protobuf-java:3.25.2", + "io.grpc:grpc-protobuf:1.61.0", + "io.grpc:grpc-grpclb:1.61.0", + "org.apache.httpcomponents:httpcore:4.4.16", + "io.grpc:grpc-auth:1.61.0", + "com.google.api:api-common:2.25.0", + "com.google.http-client:google-http-client-gson:1.43.3", + "io.grpc:grpc-alts:1.61.0" + ], + "directDependencies": [ + "com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava", + "io.grpc:grpc-inprocess:1.61.0", + "commons-logging:commons-logging:1.3.0", + "io.opencensus:opencensus-proto:0.2.0", + "com.google.j2objc:j2objc-annotations:2.8", + "org.apache.httpcomponents:httpclient:4.5.14", + "com.google.guava:failureaccess:1.0.2", + "org.codehaus.mojo:animal-sniffer-annotations:1.23", + "com.google.http-client:google-http-client:1.43.3", + "io.grpc:grpc-googleapis:1.61.0", + "io.grpc:grpc-api:1.61.0", + "com.google.code.findbugs:jsr305:3.0.2", + "io.grpc:grpc-core:1.61.0", + "com.google.auth:google-auth-library-credentials:1.22.0", + "org.checkerframework:checker-qual:3.42.0", + "com.google.android:annotations:4.1.1.4", + "com.google.api.grpc:proto-google-cloud-trace-v1:2.35.0", + "com.google.api.grpc:proto-google-cloud-trace-v2:2.35.0", + "com.google.api:gax:2.42.0", + "org.conscrypt:conscrypt-openjdk-uber:2.5.2", + "io.grpc:grpc-xds:1.61.0", + "commons-codec:commons-codec:1.17.0", + "com.google.errorprone:error_prone_annotations:2.24.1", + "io.opencensus:opencensus-api:0.31.1", + "com.google.api:gax-httpjson:2.42.0", + "com.google.re2j:re2j:1.7", + "com.google.code.gson:gson:2.10.1", + "com.google.api.grpc:proto-google-common-protos:2.34.0", + "io.grpc:grpc-netty-shaded:1.61.0", + "io.opencensus:opencensus-contrib-http-util:0.31.1", + "com.google.auth:google-auth-library-oauth2-http:1.22.0", + "com.google.guava:guava:33.0.0-jre", + "io.perfmark:perfmark-api:0.27.0", + "io.grpc:grpc-services:1.61.0", + "com.google.api:gax-grpc:2.42.0", + "javax.annotation:javax.annotation-api:1.3.2", + "com.google.protobuf:protobuf-java-util:3.25.2", + "io.grpc:grpc-stub:1.61.0", + "org.threeten:threetenbp:1.6.8", + "io.grpc:grpc-util:1.61.0", + "io.grpc:grpc-protobuf-lite:1.61.0", + "io.grpc:grpc-context:1.61.0", + "com.google.auto.value:auto-value-annotations:1.10.4", + "com.google.protobuf:protobuf-java:3.25.2", + "io.grpc:grpc-protobuf:1.61.0", + "io.grpc:grpc-grpclb:1.61.0", + "org.apache.httpcomponents:httpcore:4.4.16", + "io.grpc:grpc-auth:1.61.0", + "com.google.api:api-common:2.25.0", + "com.google.http-client:google-http-client-gson:1.43.3", + "io.grpc:grpc-alts:1.61.0" + ], + "file": "v1/https/repo1.maven.org/maven2/com/google/cloud/google-cloud-trace/2.35.0/google-cloud-trace-2.35.0.jar", + "mirror_urls": [ + "https://repo1.maven.org/maven2/com/google/cloud/google-cloud-trace/2.35.0/google-cloud-trace-2.35.0.jar", + "https://maven.google.com/com/google/cloud/google-cloud-trace/2.35.0/google-cloud-trace-2.35.0.jar", + "https://jcenter.bintray.com/com/google/cloud/google-cloud-trace/2.35.0/google-cloud-trace-2.35.0.jar" + ], + "sha256": "2a9ce0209a01b851bc661cc369529fb7f117533c621de5488ccc9daf57c0be4b", + "url": "https://repo1.maven.org/maven2/com/google/cloud/google-cloud-trace/2.35.0/google-cloud-trace-2.35.0.jar" + }, { "coord": "com.google.cloud:grpc-gcp:1.5.0", "dependencies": [], @@ -2694,12 +2867,12 @@ { "coord": "com.google.crypto.tink:tink-gcpkms:1.9.0", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "commons-logging:commons-logging:1.3.0", "org.apache.httpcomponents:httpclient:4.5.14", "com.google.http-client:google-http-client:1.43.3", "com.google.code.findbugs:jsr305:3.0.2", - "com.google.crypto.tink:tink:1.12.0", + "com.google.crypto.tink:tink:1.13.0", + "commons-codec:commons-codec:1.17.0", "com.google.errorprone:error_prone_annotations:2.24.1", "com.google.auto.service:auto-service-annotations:1.1.1", "com.google.api-client:google-api-client:2.2.0", @@ -2714,7 +2887,7 @@ "directDependencies": [ "com.google.http-client:google-http-client:1.43.3", "com.google.code.findbugs:jsr305:3.0.2", - "com.google.crypto.tink:tink:1.12.0", + "com.google.crypto.tink:tink:1.13.0", "com.google.errorprone:error_prone_annotations:2.24.1", "com.google.auto.service:auto-service-annotations:1.1.1", "com.google.api-client:google-api-client:2.2.0", @@ -2733,7 +2906,7 @@ "url": "https://repo1.maven.org/maven2/com/google/crypto/tink/tink-gcpkms/1.9.0/tink-gcpkms-1.9.0.jar" }, { - "coord": "com.google.crypto.tink:tink:1.12.0", + "coord": "com.google.crypto.tink:tink:1.13.0", "dependencies": [ "com.google.code.gson:gson:2.10.1", "com.google.protobuf:protobuf-java:3.25.2", @@ -2746,14 +2919,14 @@ "com.google.errorprone:error_prone_annotations:2.24.1", "com.google.protobuf:protobuf-java:3.25.2" ], - "file": "v1/https/repo1.maven.org/maven2/com/google/crypto/tink/tink/1.12.0/tink-1.12.0.jar", + "file": "v1/https/repo1.maven.org/maven2/com/google/crypto/tink/tink/1.13.0/tink-1.13.0.jar", "mirror_urls": [ - "https://repo1.maven.org/maven2/com/google/crypto/tink/tink/1.12.0/tink-1.12.0.jar", - "https://maven.google.com/com/google/crypto/tink/tink/1.12.0/tink-1.12.0.jar", - "https://jcenter.bintray.com/com/google/crypto/tink/tink/1.12.0/tink-1.12.0.jar" + "https://repo1.maven.org/maven2/com/google/crypto/tink/tink/1.13.0/tink-1.13.0.jar", + "https://maven.google.com/com/google/crypto/tink/tink/1.13.0/tink-1.13.0.jar", + "https://jcenter.bintray.com/com/google/crypto/tink/tink/1.13.0/tink-1.13.0.jar" ], - "sha256": "b6b3338aba9aeb4379abd8fabf685d3d9873005a2667523681da1e8293846bc9", - "url": "https://repo1.maven.org/maven2/com/google/crypto/tink/tink/1.12.0/tink-1.12.0.jar" + "sha256": "e2fbd1763285c95b59abfee2deccd4ab4768a770cc0983ee89a5b0bd8f9354e8", + "url": "https://repo1.maven.org/maven2/com/google/crypto/tink/tink/1.13.0/tink-1.13.0.jar" }, { "coord": "com.google.errorprone:error_prone_annotations:2.24.1", @@ -2906,13 +3079,13 @@ { "coord": "com.google.http-client:google-http-client-jackson2:1.43.3", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "commons-logging:commons-logging:1.3.0", "com.google.j2objc:j2objc-annotations:2.8", "org.apache.httpcomponents:httpclient:4.5.14", "com.google.http-client:google-http-client:1.43.3", "com.fasterxml.jackson.core:jackson-core:2.16.1", "com.google.code.findbugs:jsr305:3.0.2", + "commons-codec:commons-codec:1.17.0", "com.google.errorprone:error_prone_annotations:2.24.1", "io.opencensus:opencensus-api:0.31.1", "io.opencensus:opencensus-contrib-http-util:0.31.1", @@ -3075,12 +3248,12 @@ { "coord": "com.google.oauth-client:google-oauth-client:1.35.0", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "commons-logging:commons-logging:1.3.0", "com.google.j2objc:j2objc-annotations:2.8", "org.apache.httpcomponents:httpclient:4.5.14", "com.google.http-client:google-http-client:1.43.3", "com.google.code.findbugs:jsr305:3.0.2", + "commons-codec:commons-codec:1.17.0", "com.google.errorprone:error_prone_annotations:2.24.1", "io.opencensus:opencensus-api:0.31.1", "com.google.code.gson:gson:2.10.1", @@ -3458,30 +3631,30 @@ "url": "https://repo1.maven.org/maven2/com/sun/mail/mailapi/1.6.2/mailapi-1.6.2.jar" }, { - "coord": "commons-codec:commons-codec:1.16.0", + "coord": "commons-codec:commons-codec:1.17.0", "dependencies": [], "directDependencies": [], - "file": "v1/https/repo1.maven.org/maven2/commons-codec/commons-codec/1.16.0/commons-codec-1.16.0.jar", + "file": "v1/https/repo1.maven.org/maven2/commons-codec/commons-codec/1.17.0/commons-codec-1.17.0.jar", "mirror_urls": [ - "https://repo1.maven.org/maven2/commons-codec/commons-codec/1.16.0/commons-codec-1.16.0.jar", - "https://maven.google.com/commons-codec/commons-codec/1.16.0/commons-codec-1.16.0.jar", - "https://jcenter.bintray.com/commons-codec/commons-codec/1.16.0/commons-codec-1.16.0.jar" + "https://repo1.maven.org/maven2/commons-codec/commons-codec/1.17.0/commons-codec-1.17.0.jar", + "https://maven.google.com/commons-codec/commons-codec/1.17.0/commons-codec-1.17.0.jar", + "https://jcenter.bintray.com/commons-codec/commons-codec/1.17.0/commons-codec-1.17.0.jar" ], - "sha256": "56595fb20b0b85bc91d0d503dad50bb7f1b9afc0eed5dffa6cbb25929000484d", - "url": "https://repo1.maven.org/maven2/commons-codec/commons-codec/1.16.0/commons-codec-1.16.0.jar" + "sha256": "f700de80ac270d0344fdea7468201d8b9c805e5c648331c3619f2ee067ccfc59", + "url": "https://repo1.maven.org/maven2/commons-codec/commons-codec/1.17.0/commons-codec-1.17.0.jar" }, { - "coord": "commons-io:commons-io:2.8.0", + "coord": "commons-io:commons-io:2.16.1", "dependencies": [], "directDependencies": [], - "file": "v1/https/repo1.maven.org/maven2/commons-io/commons-io/2.8.0/commons-io-2.8.0.jar", + "file": "v1/https/repo1.maven.org/maven2/commons-io/commons-io/2.16.1/commons-io-2.16.1.jar", "mirror_urls": [ - "https://repo1.maven.org/maven2/commons-io/commons-io/2.8.0/commons-io-2.8.0.jar", - "https://maven.google.com/commons-io/commons-io/2.8.0/commons-io-2.8.0.jar", - "https://jcenter.bintray.com/commons-io/commons-io/2.8.0/commons-io-2.8.0.jar" + "https://repo1.maven.org/maven2/commons-io/commons-io/2.16.1/commons-io-2.16.1.jar", + "https://maven.google.com/commons-io/commons-io/2.16.1/commons-io-2.16.1.jar", + "https://jcenter.bintray.com/commons-io/commons-io/2.16.1/commons-io-2.16.1.jar" ], - "sha256": "02f291e5d1243dc143496e3cbbb40a1ced47aa58f2d633d3e38780cd068d5074", - "url": "https://repo1.maven.org/maven2/commons-io/commons-io/2.8.0/commons-io-2.8.0.jar" + "sha256": "f41f7baacd716896447ace9758621f62c1c6b0a91d89acee488da26fc477c84f", + "url": "https://repo1.maven.org/maven2/commons-io/commons-io/2.16.1/commons-io-2.16.1.jar" }, { "coord": "commons-logging:commons-logging:1.3.0", @@ -4754,6 +4927,7 @@ "coord": "io.swagger.core.v3:swagger-core:2.1.5", "dependencies": [ "com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.11.1", + "org.apache.commons:commons-lang3:3.14.0", "com.fasterxml.jackson.core:jackson-core:2.16.1", "io.swagger.core.v3:swagger-annotations:2.1.5", "com.fasterxml.jackson.core:jackson-databind:2.16.1", @@ -4763,11 +4937,11 @@ "com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.16.1", "jakarta.validation:jakarta.validation-api:2.0.2", "com.fasterxml.jackson.core:jackson-annotations:2.16.1", - "org.apache.commons:commons-lang3:3.11", "jakarta.xml.bind:jakarta.xml.bind-api:2.3.3" ], "directDependencies": [ "com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.11.1", + "org.apache.commons:commons-lang3:3.14.0", "io.swagger.core.v3:swagger-annotations:2.1.5", "com.fasterxml.jackson.core:jackson-databind:2.16.1", "org.slf4j:slf4j-api:2.0.11", @@ -4775,7 +4949,6 @@ "com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.16.1", "jakarta.validation:jakarta.validation-api:2.0.2", "com.fasterxml.jackson.core:jackson-annotations:2.16.1", - "org.apache.commons:commons-lang3:3.11", "jakarta.xml.bind:jakarta.xml.bind-api:2.3.3" ], "exclusions": [ @@ -4838,13 +5011,13 @@ "io.swagger:swagger-compat-spec-parser:1.0.52", "io.swagger:swagger-parser:1.0.52", "com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.11.1", + "org.apache.commons:commons-lang3:3.14.0", "com.fasterxml.jackson.core:jackson-core:2.16.1", "io.swagger.core.v3:swagger-annotations:2.1.5", "com.fasterxml.jackson.core:jackson-databind:2.16.1", "org.yaml:snakeyaml:2.0", "io.swagger:swagger-annotations:1.6.2", "io.swagger:swagger-core:1.6.2", - "commons-io:commons-io:2.8.0", "org.slf4j:slf4j-api:2.0.11", "io.swagger.core.v3:swagger-core:2.1.5", "io.swagger.parser.v3:swagger-parser-v3:2.0.23", @@ -4856,8 +5029,8 @@ "com.fasterxml.jackson.core:jackson-annotations:2.16.1", "io.swagger.parser.v3:swagger-parser-core:2.0.23", "org.slf4j:slf4j-ext:1.7.28", - "org.apache.commons:commons-lang3:3.11", "javax.validation:validation-api:1.1.0.Final", + "commons-io:commons-io:2.16.1", "io.swagger:swagger-models:1.6.2", "jakarta.xml.bind:jakarta.xml.bind-api:2.3.3" ], @@ -4885,11 +5058,11 @@ "coord": "io.swagger.parser.v3:swagger-parser-v3:2.0.23", "dependencies": [ "com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.11.1", + "org.apache.commons:commons-lang3:3.14.0", "com.fasterxml.jackson.core:jackson-core:2.16.1", "io.swagger.core.v3:swagger-annotations:2.1.5", "com.fasterxml.jackson.core:jackson-databind:2.16.1", "org.yaml:snakeyaml:2.0", - "commons-io:commons-io:2.8.0", "org.slf4j:slf4j-api:2.0.11", "io.swagger.core.v3:swagger-core:2.1.5", "io.swagger.core.v3:swagger-models:2.1.5", @@ -4897,17 +5070,17 @@ "jakarta.validation:jakarta.validation-api:2.0.2", "com.fasterxml.jackson.core:jackson-annotations:2.16.1", "io.swagger.parser.v3:swagger-parser-core:2.0.23", - "org.apache.commons:commons-lang3:3.11", + "commons-io:commons-io:2.16.1", "jakarta.xml.bind:jakarta.xml.bind-api:2.3.3" ], "directDependencies": [ "com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.11.1", "com.fasterxml.jackson.core:jackson-databind:2.16.1", - "commons-io:commons-io:2.8.0", "io.swagger.core.v3:swagger-core:2.1.5", "io.swagger.core.v3:swagger-models:2.1.5", "com.fasterxml.jackson.core:jackson-annotations:2.16.1", - "io.swagger.parser.v3:swagger-parser-core:2.0.23" + "io.swagger.parser.v3:swagger-parser-core:2.0.23", + "commons-io:commons-io:2.16.1" ], "exclusions": [ "com.github.fge:json-patch" @@ -4928,6 +5101,7 @@ "io.swagger:swagger-compat-spec-parser:1.0.52", "io.swagger:swagger-parser:1.0.52", "com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.11.1", + "org.apache.commons:commons-lang3:3.14.0", "com.fasterxml.jackson.core:jackson-core:2.16.1", "io.swagger.core.v3:swagger-annotations:2.1.5", "com.fasterxml.jackson.core:jackson-databind:2.16.1", @@ -4935,7 +5109,6 @@ "org.yaml:snakeyaml:2.0", "io.swagger:swagger-annotations:1.6.2", "io.swagger:swagger-core:1.6.2", - "commons-io:commons-io:2.8.0", "org.slf4j:slf4j-api:2.0.11", "io.swagger.core.v3:swagger-core:2.1.5", "io.swagger.parser.v3:swagger-parser-v3:2.0.23", @@ -4947,13 +5120,13 @@ "com.fasterxml.jackson.core:jackson-annotations:2.16.1", "io.swagger.parser.v3:swagger-parser-core:2.0.23", "org.slf4j:slf4j-ext:1.7.28", - "org.apache.commons:commons-lang3:3.11", "javax.validation:validation-api:1.1.0.Final", + "commons-io:commons-io:2.16.1", "io.swagger:swagger-models:1.6.2", "jakarta.xml.bind:jakarta.xml.bind-api:2.3.3" ], "directDependencies": [ - "commons-io:commons-io:2.8.0", + "commons-io:commons-io:2.16.1", "io.swagger.parser.v3:swagger-parser-v2-converter:2.0.23", "io.swagger.parser.v3:swagger-parser-v3:2.0.23" ], @@ -4991,10 +5164,10 @@ "org.apache.httpcomponents:httpclient:4.5.14", "io.swagger:swagger-parser:1.0.52", "io.swagger:swagger-core:1.6.2", - "commons-io:commons-io:2.8.0", "org.slf4j:slf4j-api:2.0.11", "com.github.java-json-tools:json-schema-validator:2.2.14", - "org.slf4j:slf4j-ext:1.7.28" + "org.slf4j:slf4j-ext:1.7.28", + "commons-io:commons-io:2.16.1" ], "directDependencies": [ "com.github.java-json-tools:json-schema-validator:2.2.14", @@ -5018,6 +5191,7 @@ "coord": "io.swagger:swagger-core:1.6.2", "dependencies": [ "com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.11.1", + "org.apache.commons:commons-lang3:3.14.0", "com.fasterxml.jackson.core:jackson-core:2.16.1", "com.fasterxml.jackson.core:jackson-databind:2.16.1", "org.yaml:snakeyaml:2.0", @@ -5025,17 +5199,16 @@ "org.slf4j:slf4j-api:2.0.11", "com.google.guava:guava:33.0.0-jre", "com.fasterxml.jackson.core:jackson-annotations:2.16.1", - "org.apache.commons:commons-lang3:3.11", "javax.validation:validation-api:1.1.0.Final", "io.swagger:swagger-models:1.6.2" ], "directDependencies": [ "com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.11.1", + "org.apache.commons:commons-lang3:3.14.0", "com.fasterxml.jackson.core:jackson-databind:2.16.1", "org.slf4j:slf4j-api:2.0.11", "com.google.guava:guava:33.0.0-jre", "com.fasterxml.jackson.core:jackson-annotations:2.16.1", - "org.apache.commons:commons-lang3:3.11", "javax.validation:validation-api:1.1.0.Final", "io.swagger:swagger-models:1.6.2" ], @@ -5078,13 +5251,13 @@ { "coord": "io.swagger:swagger-parser:1.0.52", "dependencies": [ - "commons-io:commons-io:2.8.0", "io.swagger:swagger-core:1.6.2", "org.slf4j:slf4j-ext:1.7.28", - "org.slf4j:slf4j-api:2.0.11" + "org.slf4j:slf4j-api:2.0.11", + "commons-io:commons-io:2.16.1" ], "directDependencies": [ - "commons-io:commons-io:2.8.0", + "commons-io:commons-io:2.16.1", "io.swagger:swagger-core:1.6.2", "org.slf4j:slf4j-api:2.0.11", "org.slf4j:slf4j-ext:1.7.28" @@ -5416,15 +5589,15 @@ { "coord": "org.apache.avro:avro:1.11.3", "dependencies": [ + "org.apache.commons:commons-compress:1.26.2", "com.fasterxml.jackson.core:jackson-core:2.16.1", "org.slf4j:slf4j-api:2.0.11", - "com.fasterxml.jackson.core:jackson-databind:2.16.1", - "org.apache.commons:commons-compress:1.24.0" + "com.fasterxml.jackson.core:jackson-databind:2.16.1" ], "directDependencies": [ "com.fasterxml.jackson.core:jackson-core:2.16.1", "com.fasterxml.jackson.core:jackson-databind:2.16.1", - "org.apache.commons:commons-compress:1.24.0", + "org.apache.commons:commons-compress:1.26.2", "org.slf4j:slf4j-api:2.0.11" ], "file": "v1/https/repo1.maven.org/maven2/org/apache/avro/avro/1.11.3/avro-1.11.3.jar", @@ -5437,30 +5610,38 @@ "url": "https://repo1.maven.org/maven2/org/apache/avro/avro/1.11.3/avro-1.11.3.jar" }, { - "coord": "org.apache.commons:commons-compress:1.24.0", - "dependencies": [], - "directDependencies": [], - "file": "v1/https/repo1.maven.org/maven2/org/apache/commons/commons-compress/1.24.0/commons-compress-1.24.0.jar", + "coord": "org.apache.commons:commons-compress:1.26.2", + "dependencies": [ + "org.apache.commons:commons-lang3:3.14.0", + "commons-io:commons-io:2.16.1", + "commons-codec:commons-codec:1.17.0" + ], + "directDependencies": [ + "commons-codec:commons-codec:1.17.0", + "commons-io:commons-io:2.16.1", + "org.apache.commons:commons-lang3:3.14.0" + ], + "file": "v1/https/repo1.maven.org/maven2/org/apache/commons/commons-compress/1.26.2/commons-compress-1.26.2.jar", "mirror_urls": [ - "https://repo1.maven.org/maven2/org/apache/commons/commons-compress/1.24.0/commons-compress-1.24.0.jar", - "https://maven.google.com/org/apache/commons/commons-compress/1.24.0/commons-compress-1.24.0.jar", - "https://jcenter.bintray.com/org/apache/commons/commons-compress/1.24.0/commons-compress-1.24.0.jar" + "https://repo1.maven.org/maven2/org/apache/commons/commons-compress/1.26.2/commons-compress-1.26.2.jar", + "https://maven.google.com/org/apache/commons/commons-compress/1.26.2/commons-compress-1.26.2.jar", + "https://jcenter.bintray.com/org/apache/commons/commons-compress/1.26.2/commons-compress-1.26.2.jar" ], - "sha256": "fbf2c5d275e513c08f4e4fb539726dbf3ac2142af956ba93efb983244f0c36bd", - "url": "https://repo1.maven.org/maven2/org/apache/commons/commons-compress/1.24.0/commons-compress-1.24.0.jar" + "sha256": "9168a03141d8fc7eda21a2360d83cc0412bcbb1d6204d992bd48c2573cb3c6b8", + "url": "https://repo1.maven.org/maven2/org/apache/commons/commons-compress/1.26.2/commons-compress-1.26.2.jar" }, { - "coord": "org.apache.commons:commons-lang3:3.11", + "coord": "org.apache.commons:commons-lang3:3.14.0", "dependencies": [], "directDependencies": [], - "file": "v1/https/repo1.maven.org/maven2/org/apache/commons/commons-lang3/3.11/commons-lang3-3.11.jar", + "file": "v1/https/repo1.maven.org/maven2/org/apache/commons/commons-lang3/3.14.0/commons-lang3-3.14.0.jar", "mirror_urls": [ - "https://repo1.maven.org/maven2/org/apache/commons/commons-lang3/3.11/commons-lang3-3.11.jar", - "https://maven.google.com/org/apache/commons/commons-lang3/3.11/commons-lang3-3.11.jar", - "https://jcenter.bintray.com/org/apache/commons/commons-lang3/3.11/commons-lang3-3.11.jar" + "https://repo1.maven.org/maven2/org/apache/commons/commons-lang3/3.14.0/commons-lang3-3.14.0.jar", + "https://maven.google.com/org/apache/commons/commons-lang3/3.14.0/commons-lang3-3.14.0.jar", + "https://jcenter.bintray.com/org/apache/commons/commons-lang3/3.14.0/commons-lang3-3.14.0.jar" ], - "sha256": "4ee380259c068d1dbe9e84ab52186f2acd65de067ec09beff731fca1697fdb16", - "url": "https://repo1.maven.org/maven2/org/apache/commons/commons-lang3/3.11/commons-lang3-3.11.jar" + "sha256": "7b96bf3ee68949abb5bc465559ac270e0551596fa34523fddf890ec418dde13c", + "url": "https://repo1.maven.org/maven2/org/apache/commons/commons-lang3/3.14.0/commons-lang3-3.14.0.jar" }, { "coord": "org.apache.commons:commons-math3:3.6.1", @@ -5546,11 +5727,11 @@ "coord": "org.apache.httpcomponents:httpclient:4.5.14", "dependencies": [ "org.apache.httpcomponents:httpcore:4.4.16", - "commons-codec:commons-codec:1.16.0", + "commons-codec:commons-codec:1.17.0", "commons-logging:commons-logging:1.3.0" ], "directDependencies": [ - "commons-codec:commons-codec:1.16.0", + "commons-codec:commons-codec:1.17.0", "commons-logging:commons-logging:1.3.0", "org.apache.httpcomponents:httpcore:4.4.16" ], @@ -5626,11 +5807,11 @@ { "coord": "org.apache.velocity:velocity-engine-core:2.2", "dependencies": [ - "org.slf4j:slf4j-api:2.0.11", - "org.apache.commons:commons-lang3:3.11" + "org.apache.commons:commons-lang3:3.14.0", + "org.slf4j:slf4j-api:2.0.11" ], "directDependencies": [ - "org.apache.commons:commons-lang3:3.11", + "org.apache.commons:commons-lang3:3.14.0", "org.slf4j:slf4j-api:2.0.11" ], "file": "v1/https/repo1.maven.org/maven2/org/apache/velocity/velocity-engine-core/2.2/velocity-engine-core-2.2.jar", @@ -5646,8 +5827,8 @@ "coord": "org.apache.velocity:velocity-engine-scripting:2.2", "dependencies": [ "org.apache.velocity:velocity-engine-core:2.2", - "org.slf4j:slf4j-api:2.0.11", - "org.apache.commons:commons-lang3:3.11" + "org.apache.commons:commons-lang3:3.14.0", + "org.slf4j:slf4j-api:2.0.11" ], "directDependencies": [ "org.apache.velocity:velocity-engine-core:2.2" @@ -5857,7 +6038,6 @@ { "coord": "org.mock-server:mockserver-client-java:5.11.2", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "com.jcraft:jzlib:1.1.3", "com.github.java-json-tools:jackson-coreutils:2.0", "org.hamcrest:hamcrest:2.2", @@ -5867,6 +6047,7 @@ "io.swagger:swagger-parser:1.0.52", "com.sun.mail:mailapi:1.6.2", "com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.11.1", + "org.apache.commons:commons-lang3:3.14.0", "io.netty:netty-codec-socks:4.1.53.Final", "com.fasterxml.jackson.core:jackson-core:2.16.1", "io.netty:netty-handler:4.1.100.Final", @@ -5882,9 +6063,9 @@ "org.mock-server:mockserver-core:5.11.2", "io.swagger:swagger-annotations:1.6.2", "io.swagger:swagger-core:1.6.2", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", - "commons-io:commons-io:2.8.0", "io.netty:netty-codec-http:4.1.100.Final", "joda-time:joda-time:2.10.8", "io.netty:netty-common:4.1.100.Final", @@ -5911,13 +6092,13 @@ "io.swagger.parser.v3:swagger-parser:2.0.23", "org.slf4j:slf4j-ext:1.7.28", "org.mozilla:rhino:1.7.7.2", - "org.apache.commons:commons-lang3:3.11", "com.github.java-json-tools:json-schema-core:1.2.14", "org.xmlunit:xmlunit-core:2.8.0", "io.netty:netty-handler-proxy:4.1.53.Final", "net.javacrumbs.json-unit:json-unit-core:2.19.0", "jakarta.activation:jakarta.activation-api:1.2.2", "javax.validation:validation-api:1.1.0.Final", + "commons-io:commons-io:2.16.1", "io.netty:netty-codec:4.1.100.Final", "javax.servlet:javax.servlet-api:4.0.1", "io.swagger:swagger-models:1.6.2", @@ -5927,7 +6108,7 @@ ], "directDependencies": [ "com.google.guava:guava:33.0.0-jre", - "org.apache.commons:commons-lang3:3.11", + "org.apache.commons:commons-lang3:3.14.0", "org.mock-server:mockserver-core:5.11.2", "org.slf4j:slf4j-api:2.0.11" ], @@ -5943,7 +6124,6 @@ { "coord": "org.mock-server:mockserver-core:5.11.2", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "com.jcraft:jzlib:1.1.3", "com.github.java-json-tools:jackson-coreutils:2.0", "org.hamcrest:hamcrest:2.2", @@ -5953,6 +6133,7 @@ "io.swagger:swagger-parser:1.0.52", "com.sun.mail:mailapi:1.6.2", "com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.11.1", + "org.apache.commons:commons-lang3:3.14.0", "io.netty:netty-codec-socks:4.1.53.Final", "com.fasterxml.jackson.core:jackson-core:2.16.1", "io.netty:netty-handler:4.1.100.Final", @@ -5967,9 +6148,9 @@ "org.yaml:snakeyaml:2.0", "io.swagger:swagger-annotations:1.6.2", "io.swagger:swagger-core:1.6.2", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", - "commons-io:commons-io:2.8.0", "io.netty:netty-codec-http:4.1.100.Final", "joda-time:joda-time:2.10.8", "io.netty:netty-common:4.1.100.Final", @@ -5996,13 +6177,13 @@ "io.swagger.parser.v3:swagger-parser:2.0.23", "org.slf4j:slf4j-ext:1.7.28", "org.mozilla:rhino:1.7.7.2", - "org.apache.commons:commons-lang3:3.11", "com.github.java-json-tools:json-schema-core:1.2.14", "org.xmlunit:xmlunit-core:2.8.0", "io.netty:netty-handler-proxy:4.1.53.Final", "net.javacrumbs.json-unit:json-unit-core:2.19.0", "jakarta.activation:jakarta.activation-api:1.2.2", "javax.validation:validation-api:1.1.0.Final", + "commons-io:commons-io:2.16.1", "io.netty:netty-codec:4.1.100.Final", "javax.servlet:javax.servlet-api:4.0.1", "io.swagger:swagger-models:1.6.2", @@ -6011,17 +6192,17 @@ "jakarta.xml.bind:jakarta.xml.bind-api:2.3.3" ], "directDependencies": [ - "commons-codec:commons-codec:1.16.0", "com.jcraft:jzlib:1.1.3", "com.lmax:disruptor:3.4.2", + "org.apache.commons:commons-lang3:3.14.0", "io.netty:netty-codec-socks:4.1.53.Final", "com.fasterxml.jackson.core:jackson-core:2.16.1", "io.netty:netty-handler:4.1.100.Final", "org.apache.velocity:velocity-engine-scripting:2.2", "com.fasterxml.jackson.core:jackson-databind:2.16.1", "com.fasterxml.uuid:java-uuid-generator:4.0.1", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-buffer:4.1.100.Final", - "commons-io:commons-io:2.8.0", "io.netty:netty-codec-http:4.1.100.Final", "org.apache.velocity:velocity-engine-core:2.2", "org.slf4j:slf4j-api:2.0.11", @@ -6033,10 +6214,10 @@ "org.apache.commons:commons-text:1.9", "com.fasterxml.jackson.core:jackson-annotations:2.16.1", "io.swagger.parser.v3:swagger-parser:2.0.23", - "org.apache.commons:commons-lang3:3.11", "org.xmlunit:xmlunit-core:2.8.0", "io.netty:netty-handler-proxy:4.1.53.Final", "net.javacrumbs.json-unit:json-unit-core:2.19.0", + "commons-io:commons-io:2.16.1", "io.netty:netty-codec:4.1.100.Final", "javax.servlet:javax.servlet-api:4.0.1", "io.netty:netty-transport:4.1.100.Final" @@ -6053,7 +6234,6 @@ { "coord": "org.mock-server:mockserver-junit-rule:5.11.2", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "com.jcraft:jzlib:1.1.3", "com.github.java-json-tools:jackson-coreutils:2.0", "org.hamcrest:hamcrest:2.2", @@ -6063,6 +6243,7 @@ "io.swagger:swagger-parser:1.0.52", "com.sun.mail:mailapi:1.6.2", "com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.11.1", + "org.apache.commons:commons-lang3:3.14.0", "io.netty:netty-codec-socks:4.1.53.Final", "com.fasterxml.jackson.core:jackson-core:2.16.1", "io.netty:netty-handler:4.1.100.Final", @@ -6079,9 +6260,9 @@ "io.swagger:swagger-annotations:1.6.2", "org.mock-server:mockserver-client-java:5.11.2", "io.swagger:swagger-core:1.6.2", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", - "commons-io:commons-io:2.8.0", "io.netty:netty-codec-http:4.1.100.Final", "joda-time:joda-time:2.10.8", "io.netty:netty-common:4.1.100.Final", @@ -6109,13 +6290,13 @@ "io.swagger.parser.v3:swagger-parser:2.0.23", "org.slf4j:slf4j-ext:1.7.28", "org.mozilla:rhino:1.7.7.2", - "org.apache.commons:commons-lang3:3.11", "com.github.java-json-tools:json-schema-core:1.2.14", "org.xmlunit:xmlunit-core:2.8.0", "io.netty:netty-handler-proxy:4.1.53.Final", "net.javacrumbs.json-unit:json-unit-core:2.19.0", "jakarta.activation:jakarta.activation-api:1.2.2", "javax.validation:validation-api:1.1.0.Final", + "commons-io:commons-io:2.16.1", "junit:junit:4.13.2", "io.netty:netty-codec:4.1.100.Final", "javax.servlet:javax.servlet-api:4.0.1", @@ -6141,7 +6322,6 @@ { "coord": "org.mock-server:mockserver-netty:5.11.2", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "com.jcraft:jzlib:1.1.3", "com.github.java-json-tools:jackson-coreutils:2.0", "org.hamcrest:hamcrest:2.2", @@ -6151,6 +6331,7 @@ "io.swagger:swagger-parser:1.0.52", "com.sun.mail:mailapi:1.6.2", "com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.11.1", + "org.apache.commons:commons-lang3:3.14.0", "io.netty:netty-codec-socks:4.1.53.Final", "com.fasterxml.jackson.core:jackson-core:2.16.1", "io.netty:netty-handler:4.1.100.Final", @@ -6167,9 +6348,9 @@ "io.swagger:swagger-annotations:1.6.2", "org.mock-server:mockserver-client-java:5.11.2", "io.swagger:swagger-core:1.6.2", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", - "commons-io:commons-io:2.8.0", "io.netty:netty-codec-http:4.1.100.Final", "joda-time:joda-time:2.10.8", "io.netty:netty-common:4.1.100.Final", @@ -6196,13 +6377,13 @@ "io.swagger.parser.v3:swagger-parser:2.0.23", "org.slf4j:slf4j-ext:1.7.28", "org.mozilla:rhino:1.7.7.2", - "org.apache.commons:commons-lang3:3.11", "com.github.java-json-tools:json-schema-core:1.2.14", "org.xmlunit:xmlunit-core:2.8.0", "io.netty:netty-handler-proxy:4.1.53.Final", "net.javacrumbs.json-unit:json-unit-core:2.19.0", "jakarta.activation:jakarta.activation-api:1.2.2", "javax.validation:validation-api:1.1.0.Final", + "commons-io:commons-io:2.16.1", "io.netty:netty-codec:4.1.100.Final", "javax.servlet:javax.servlet-api:4.0.1", "io.swagger:swagger-models:1.6.2", @@ -6215,11 +6396,11 @@ "org.mock-server:mockserver-core:5.11.2", "org.mock-server:mockserver-client-java:5.11.2", "io.netty:netty-buffer:4.1.100.Final", - "commons-io:commons-io:2.8.0", "io.netty:netty-codec-http:4.1.100.Final", "io.netty:netty-common:4.1.100.Final", "org.slf4j:slf4j-api:2.0.11", "com.google.guava:guava:33.0.0-jre", + "commons-io:commons-io:2.16.1", "io.netty:netty-codec:4.1.100.Final", "io.netty:netty-transport:4.1.100.Final" ], @@ -6398,6 +6579,7 @@ { "coord": "org.testcontainers:localstack:1.19.3", "dependencies": [ + "org.apache.commons:commons-compress:1.26.2", "com.github.docker-java:docker-java-transport-zerodep:3.3.4", "com.github.docker-java:docker-java-api:3.3.4", "net.java.dev.jna:jna:5.13.0", @@ -6408,8 +6590,7 @@ "com.fasterxml.jackson.core:jackson-annotations:2.16.1", "org.jetbrains:annotations:17.0.0", "org.rnorth.duct-tape:duct-tape:1.0.8", - "junit:junit:4.13.2", - "org.apache.commons:commons-compress:1.24.0" + "junit:junit:4.13.2" ], "directDependencies": [ "org.testcontainers:testcontainers:1.19.3" @@ -6426,6 +6607,7 @@ { "coord": "org.testcontainers:testcontainers:1.19.3", "dependencies": [ + "org.apache.commons:commons-compress:1.26.2", "com.github.docker-java:docker-java-transport-zerodep:3.3.4", "com.github.docker-java:docker-java-api:3.3.4", "net.java.dev.jna:jna:5.13.0", @@ -6435,16 +6617,15 @@ "com.fasterxml.jackson.core:jackson-annotations:2.16.1", "org.jetbrains:annotations:17.0.0", "org.rnorth.duct-tape:duct-tape:1.0.8", - "junit:junit:4.13.2", - "org.apache.commons:commons-compress:1.24.0" + "junit:junit:4.13.2" ], "directDependencies": [ + "org.apache.commons:commons-compress:1.26.2", "com.github.docker-java:docker-java-transport-zerodep:3.3.4", "com.github.docker-java:docker-java-api:3.3.4", "org.slf4j:slf4j-api:2.0.11", "org.rnorth.duct-tape:duct-tape:1.0.8", - "junit:junit:4.13.2", - "org.apache.commons:commons-compress:1.24.0" + "junit:junit:4.13.2" ], "file": "v1/https/repo1.maven.org/maven2/org/testcontainers/testcontainers/1.19.3/testcontainers-1.19.3.jar", "mirror_urls": [ @@ -6521,7 +6702,6 @@ { "coord": "software.amazon.awssdk:accessanalyzer:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -6533,6 +6713,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -6593,7 +6774,6 @@ { "coord": "software.amazon.awssdk:account:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -6605,6 +6785,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -6665,7 +6846,6 @@ { "coord": "software.amazon.awssdk:acm:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -6677,6 +6857,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -6737,7 +6918,6 @@ { "coord": "software.amazon.awssdk:acmpca:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -6749,6 +6929,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -6809,7 +6990,6 @@ { "coord": "software.amazon.awssdk:alexaforbusiness:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -6821,6 +7001,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -6881,7 +7062,6 @@ { "coord": "software.amazon.awssdk:amp:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -6893,6 +7073,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -6953,7 +7134,6 @@ { "coord": "software.amazon.awssdk:amplify:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -6965,6 +7145,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -7025,7 +7206,6 @@ { "coord": "software.amazon.awssdk:amplifybackend:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -7037,6 +7217,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -7097,7 +7278,6 @@ { "coord": "software.amazon.awssdk:amplifyuibuilder:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -7109,6 +7289,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -7182,9 +7363,9 @@ { "coord": "software.amazon.awssdk:apache-client:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "org.apache.httpcomponents:httpclient:4.5.14", "org.reactivestreams:reactive-streams:1.0.4", + "commons-codec:commons-codec:1.17.0", "org.slf4j:slf4j-api:2.0.11", "software.amazon.awssdk:utils:2.21.16", "software.amazon.awssdk:annotations:2.21.16", @@ -7193,8 +7374,8 @@ "software.amazon.awssdk:metrics-spi:2.21.16" ], "directDependencies": [ - "commons-codec:commons-codec:1.16.0", "org.apache.httpcomponents:httpclient:4.5.14", + "commons-codec:commons-codec:1.17.0", "software.amazon.awssdk:utils:2.21.16", "software.amazon.awssdk:annotations:2.21.16", "software.amazon.awssdk:http-client-spi:2.21.16", @@ -7213,7 +7394,6 @@ { "coord": "software.amazon.awssdk:apigateway:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -7225,6 +7405,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -7285,7 +7466,6 @@ { "coord": "software.amazon.awssdk:apigatewaymanagementapi:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -7297,6 +7477,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -7357,7 +7538,6 @@ { "coord": "software.amazon.awssdk:apigatewayv2:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -7369,6 +7549,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -7429,7 +7610,6 @@ { "coord": "software.amazon.awssdk:appconfig:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -7441,6 +7621,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -7501,7 +7682,6 @@ { "coord": "software.amazon.awssdk:appconfigdata:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -7513,6 +7693,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -7573,7 +7754,6 @@ { "coord": "software.amazon.awssdk:appfabric:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -7585,6 +7765,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -7645,7 +7826,6 @@ { "coord": "software.amazon.awssdk:appflow:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -7657,6 +7837,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -7717,7 +7898,6 @@ { "coord": "software.amazon.awssdk:appintegrations:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -7729,6 +7909,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -7789,7 +7970,6 @@ { "coord": "software.amazon.awssdk:applicationautoscaling:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -7801,6 +7981,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -7861,7 +8042,6 @@ { "coord": "software.amazon.awssdk:applicationcostprofiler:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -7873,6 +8053,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -7933,7 +8114,6 @@ { "coord": "software.amazon.awssdk:applicationdiscovery:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -7945,6 +8125,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -8005,7 +8186,6 @@ { "coord": "software.amazon.awssdk:applicationinsights:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -8017,6 +8197,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -8077,7 +8258,6 @@ { "coord": "software.amazon.awssdk:appmesh:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -8089,6 +8269,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -8149,7 +8330,6 @@ { "coord": "software.amazon.awssdk:apprunner:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -8161,6 +8341,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -8221,7 +8402,6 @@ { "coord": "software.amazon.awssdk:appstream:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -8233,6 +8413,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -8293,7 +8474,6 @@ { "coord": "software.amazon.awssdk:appsync:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -8305,6 +8485,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -8365,7 +8546,6 @@ { "coord": "software.amazon.awssdk:arczonalshift:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -8377,6 +8557,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -8458,7 +8639,6 @@ { "coord": "software.amazon.awssdk:athena:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -8470,6 +8650,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -8530,7 +8711,6 @@ { "coord": "software.amazon.awssdk:auditmanager:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -8542,6 +8722,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -8648,7 +8829,6 @@ { "coord": "software.amazon.awssdk:autoscaling:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -8659,6 +8839,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -8720,7 +8901,6 @@ { "coord": "software.amazon.awssdk:autoscalingplans:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -8732,6 +8912,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -8973,7 +9154,6 @@ { "coord": "software.amazon.awssdk:aws-sdk-java:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:waf:2.21.16", "software.amazon.awssdk:budgets:2.21.16", "software.amazon.awssdk:datasync:2.21.16", @@ -9127,6 +9307,7 @@ "software.amazon.awssdk:iotanalytics:2.21.16", "software.amazon.awssdk:launchwizard:2.21.16", "software.amazon.awssdk:ssm:2.21.16", + "commons-codec:commons-codec:1.17.0", "software.amazon.awssdk:computeoptimizer:2.21.16", "software.amazon.awssdk:codepipeline:2.21.16", "software.amazon.awssdk:cloudsearch:2.21.16", @@ -9794,7 +9975,6 @@ { "coord": "software.amazon.awssdk:backup:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -9806,6 +9986,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -9866,7 +10047,6 @@ { "coord": "software.amazon.awssdk:backupgateway:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -9878,6 +10058,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -9938,7 +10119,6 @@ { "coord": "software.amazon.awssdk:backupstorage:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -9950,6 +10130,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -10010,7 +10191,6 @@ { "coord": "software.amazon.awssdk:batch:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -10022,6 +10202,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -10082,7 +10263,6 @@ { "coord": "software.amazon.awssdk:bedrock:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -10094,6 +10274,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -10154,7 +10335,6 @@ { "coord": "software.amazon.awssdk:bedrockruntime:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -10166,6 +10346,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -10226,7 +10407,6 @@ { "coord": "software.amazon.awssdk:billingconductor:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -10238,6 +10418,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -10298,7 +10479,6 @@ { "coord": "software.amazon.awssdk:braket:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -10310,6 +10490,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -10370,7 +10551,6 @@ { "coord": "software.amazon.awssdk:budgets:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -10382,6 +10562,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -10478,7 +10659,6 @@ { "coord": "software.amazon.awssdk:chime:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -10490,6 +10670,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -10550,7 +10731,6 @@ { "coord": "software.amazon.awssdk:chimesdkidentity:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -10562,6 +10742,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -10622,7 +10803,6 @@ { "coord": "software.amazon.awssdk:chimesdkmediapipelines:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -10634,6 +10814,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -10694,7 +10875,6 @@ { "coord": "software.amazon.awssdk:chimesdkmeetings:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -10706,6 +10886,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -10766,7 +10947,6 @@ { "coord": "software.amazon.awssdk:chimesdkmessaging:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -10778,6 +10958,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -10838,7 +11019,6 @@ { "coord": "software.amazon.awssdk:chimesdkvoice:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -10850,6 +11030,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -10910,7 +11091,6 @@ { "coord": "software.amazon.awssdk:cleanrooms:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -10922,6 +11102,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -10982,7 +11163,6 @@ { "coord": "software.amazon.awssdk:cloud9:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -10994,6 +11174,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -11054,7 +11235,6 @@ { "coord": "software.amazon.awssdk:cloudcontrol:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -11066,6 +11246,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -11126,7 +11307,6 @@ { "coord": "software.amazon.awssdk:clouddirectory:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -11138,6 +11318,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -11198,7 +11379,6 @@ { "coord": "software.amazon.awssdk:cloudformation:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -11209,6 +11389,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -11270,7 +11451,6 @@ { "coord": "software.amazon.awssdk:cloudfront:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:aws-xml-protocol:2.21.16", @@ -11282,6 +11462,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -11343,7 +11524,6 @@ { "coord": "software.amazon.awssdk:cloudhsm:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -11355,6 +11535,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -11415,7 +11596,6 @@ { "coord": "software.amazon.awssdk:cloudhsmv2:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -11427,6 +11607,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -11487,7 +11668,6 @@ { "coord": "software.amazon.awssdk:cloudsearch:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -11498,6 +11678,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -11559,7 +11740,6 @@ { "coord": "software.amazon.awssdk:cloudsearchdomain:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -11571,6 +11751,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -11631,7 +11812,6 @@ { "coord": "software.amazon.awssdk:cloudtrail:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -11643,6 +11823,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -11703,7 +11884,6 @@ { "coord": "software.amazon.awssdk:cloudtraildata:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -11715,6 +11895,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -11775,7 +11956,6 @@ { "coord": "software.amazon.awssdk:cloudwatch-metric-publisher:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -11787,6 +11967,7 @@ "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", "software.amazon.awssdk:cloudwatch:2.21.16", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -11836,7 +12017,6 @@ { "coord": "software.amazon.awssdk:cloudwatch:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -11847,6 +12027,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -11908,7 +12089,6 @@ { "coord": "software.amazon.awssdk:cloudwatchevents:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -11920,6 +12100,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -11980,7 +12161,6 @@ { "coord": "software.amazon.awssdk:cloudwatchlogs:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -11992,6 +12172,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -12052,7 +12233,6 @@ { "coord": "software.amazon.awssdk:codeartifact:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -12064,6 +12244,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -12124,7 +12305,6 @@ { "coord": "software.amazon.awssdk:codebuild:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -12136,6 +12316,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -12196,7 +12377,6 @@ { "coord": "software.amazon.awssdk:codecatalyst:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -12208,6 +12388,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -12267,7 +12448,6 @@ { "coord": "software.amazon.awssdk:codecommit:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -12279,6 +12459,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -12339,7 +12520,6 @@ { "coord": "software.amazon.awssdk:codedeploy:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -12351,6 +12531,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -12411,7 +12592,6 @@ { "coord": "software.amazon.awssdk:codeguruprofiler:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -12423,6 +12603,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -12483,7 +12664,6 @@ { "coord": "software.amazon.awssdk:codegurureviewer:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -12495,6 +12675,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -12555,7 +12736,6 @@ { "coord": "software.amazon.awssdk:codegurusecurity:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -12567,6 +12747,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -12627,7 +12808,6 @@ { "coord": "software.amazon.awssdk:codepipeline:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -12639,6 +12819,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -12699,7 +12880,6 @@ { "coord": "software.amazon.awssdk:codestar:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -12711,6 +12891,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -12771,7 +12952,6 @@ { "coord": "software.amazon.awssdk:codestarconnections:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -12783,6 +12963,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -12843,7 +13024,6 @@ { "coord": "software.amazon.awssdk:codestarnotifications:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -12855,6 +13035,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -12915,7 +13096,6 @@ { "coord": "software.amazon.awssdk:cognitoidentity:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -12927,6 +13107,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -12987,7 +13168,6 @@ { "coord": "software.amazon.awssdk:cognitoidentityprovider:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -12999,6 +13179,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -13059,7 +13240,6 @@ { "coord": "software.amazon.awssdk:cognitosync:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -13071,6 +13251,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -13131,7 +13312,6 @@ { "coord": "software.amazon.awssdk:comprehend:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -13143,6 +13323,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -13203,7 +13384,6 @@ { "coord": "software.amazon.awssdk:comprehendmedical:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -13215,6 +13395,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -13275,7 +13456,6 @@ { "coord": "software.amazon.awssdk:computeoptimizer:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -13287,6 +13467,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -13347,7 +13528,6 @@ { "coord": "software.amazon.awssdk:config:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -13359,6 +13539,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -13419,7 +13600,6 @@ { "coord": "software.amazon.awssdk:connect:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -13431,6 +13611,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -13491,7 +13672,6 @@ { "coord": "software.amazon.awssdk:connectcampaigns:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -13503,6 +13683,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -13563,7 +13744,6 @@ { "coord": "software.amazon.awssdk:connectcases:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -13575,6 +13755,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -13635,7 +13816,6 @@ { "coord": "software.amazon.awssdk:connectcontactlens:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -13647,6 +13827,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -13707,7 +13888,6 @@ { "coord": "software.amazon.awssdk:connectparticipant:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -13719,6 +13899,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -13779,7 +13960,6 @@ { "coord": "software.amazon.awssdk:controltower:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -13791,6 +13971,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -13851,7 +14032,6 @@ { "coord": "software.amazon.awssdk:costandusagereport:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -13863,6 +14043,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -13923,7 +14104,6 @@ { "coord": "software.amazon.awssdk:costexplorer:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -13935,6 +14115,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -14016,7 +14197,6 @@ { "coord": "software.amazon.awssdk:customerprofiles:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -14028,6 +14208,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -14088,7 +14269,6 @@ { "coord": "software.amazon.awssdk:databasemigration:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -14100,6 +14280,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -14160,7 +14341,6 @@ { "coord": "software.amazon.awssdk:databrew:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -14172,6 +14352,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -14232,7 +14413,6 @@ { "coord": "software.amazon.awssdk:dataexchange:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -14244,6 +14424,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -14304,7 +14485,6 @@ { "coord": "software.amazon.awssdk:datapipeline:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -14316,6 +14496,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -14376,7 +14557,6 @@ { "coord": "software.amazon.awssdk:datasync:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -14388,6 +14568,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -14448,7 +14629,6 @@ { "coord": "software.amazon.awssdk:datazone:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -14460,6 +14640,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -14520,7 +14701,6 @@ { "coord": "software.amazon.awssdk:dax:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -14532,6 +14712,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -14592,7 +14773,6 @@ { "coord": "software.amazon.awssdk:detective:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -14604,6 +14784,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -14664,7 +14845,6 @@ { "coord": "software.amazon.awssdk:devicefarm:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -14676,6 +14856,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -14736,7 +14917,6 @@ { "coord": "software.amazon.awssdk:devopsguru:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -14748,6 +14928,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -14808,7 +14989,6 @@ { "coord": "software.amazon.awssdk:directconnect:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -14820,6 +15000,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -14880,7 +15061,6 @@ { "coord": "software.amazon.awssdk:directory:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -14892,6 +15072,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -14952,7 +15133,6 @@ { "coord": "software.amazon.awssdk:dlm:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -14964,6 +15144,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -15024,7 +15205,6 @@ { "coord": "software.amazon.awssdk:docdb:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -15035,6 +15215,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -15096,7 +15277,6 @@ { "coord": "software.amazon.awssdk:docdbelastic:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -15108,6 +15288,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -15168,7 +15349,6 @@ { "coord": "software.amazon.awssdk:drs:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -15180,6 +15360,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -15240,7 +15421,6 @@ { "coord": "software.amazon.awssdk:dynamodb-enhanced:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -15252,6 +15432,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -15303,7 +15484,6 @@ { "coord": "software.amazon.awssdk:dynamodb:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -15315,6 +15495,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -15375,7 +15556,6 @@ { "coord": "software.amazon.awssdk:ebs:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -15387,6 +15567,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -15447,7 +15628,6 @@ { "coord": "software.amazon.awssdk:ec2:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -15458,6 +15638,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -15519,7 +15700,6 @@ { "coord": "software.amazon.awssdk:ec2instanceconnect:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -15531,6 +15711,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -15591,7 +15772,6 @@ { "coord": "software.amazon.awssdk:ecr:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -15603,6 +15783,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -15663,7 +15844,6 @@ { "coord": "software.amazon.awssdk:ecrpublic:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -15675,6 +15855,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -15735,7 +15916,6 @@ { "coord": "software.amazon.awssdk:ecs:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -15747,6 +15927,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -15807,7 +15988,6 @@ { "coord": "software.amazon.awssdk:efs:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -15819,6 +15999,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -15879,7 +16060,6 @@ { "coord": "software.amazon.awssdk:eks:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -15891,6 +16071,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -15951,7 +16132,6 @@ { "coord": "software.amazon.awssdk:elasticache:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -15962,6 +16142,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -16023,7 +16204,6 @@ { "coord": "software.amazon.awssdk:elasticbeanstalk:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -16034,6 +16214,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -16095,7 +16276,6 @@ { "coord": "software.amazon.awssdk:elasticinference:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -16107,6 +16287,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -16167,7 +16348,6 @@ { "coord": "software.amazon.awssdk:elasticloadbalancing:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -16178,6 +16358,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -16239,7 +16420,6 @@ { "coord": "software.amazon.awssdk:elasticloadbalancingv2:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -16250,6 +16430,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -16311,7 +16492,6 @@ { "coord": "software.amazon.awssdk:elasticsearch:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -16323,6 +16503,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -16383,7 +16564,6 @@ { "coord": "software.amazon.awssdk:elastictranscoder:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -16395,6 +16575,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -16455,7 +16636,6 @@ { "coord": "software.amazon.awssdk:emr:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -16467,6 +16647,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -16527,7 +16708,6 @@ { "coord": "software.amazon.awssdk:emrcontainers:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -16539,6 +16719,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -16599,7 +16780,6 @@ { "coord": "software.amazon.awssdk:emrserverless:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -16611,6 +16791,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -16688,7 +16869,6 @@ { "coord": "software.amazon.awssdk:entityresolution:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -16700,6 +16880,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -16760,7 +16941,6 @@ { "coord": "software.amazon.awssdk:eventbridge:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -16772,6 +16952,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -16832,7 +17013,6 @@ { "coord": "software.amazon.awssdk:evidently:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -16844,6 +17024,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -16904,7 +17085,6 @@ { "coord": "software.amazon.awssdk:finspace:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -16916,6 +17096,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -16976,7 +17157,6 @@ { "coord": "software.amazon.awssdk:finspacedata:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -16988,6 +17168,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -17048,7 +17229,6 @@ { "coord": "software.amazon.awssdk:firehose:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -17060,6 +17240,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -17120,7 +17301,6 @@ { "coord": "software.amazon.awssdk:fis:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -17132,6 +17312,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -17192,7 +17373,6 @@ { "coord": "software.amazon.awssdk:fms:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -17204,6 +17384,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -17264,7 +17445,6 @@ { "coord": "software.amazon.awssdk:forecast:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -17276,6 +17456,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -17336,7 +17517,6 @@ { "coord": "software.amazon.awssdk:forecastquery:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -17348,6 +17528,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -17408,7 +17589,6 @@ { "coord": "software.amazon.awssdk:frauddetector:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -17420,6 +17600,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -17480,7 +17661,6 @@ { "coord": "software.amazon.awssdk:fsx:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -17492,6 +17672,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -17552,7 +17733,6 @@ { "coord": "software.amazon.awssdk:gamelift:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -17564,6 +17744,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -17624,7 +17805,6 @@ { "coord": "software.amazon.awssdk:glacier:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -17636,6 +17816,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -17696,7 +17877,6 @@ { "coord": "software.amazon.awssdk:globalaccelerator:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -17708,6 +17888,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -17768,7 +17949,6 @@ { "coord": "software.amazon.awssdk:glue:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -17780,6 +17960,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -17840,7 +18021,6 @@ { "coord": "software.amazon.awssdk:grafana:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -17852,6 +18032,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -17912,7 +18093,6 @@ { "coord": "software.amazon.awssdk:greengrass:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -17924,6 +18104,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -17984,7 +18165,6 @@ { "coord": "software.amazon.awssdk:greengrassv2:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -17996,6 +18176,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -18056,7 +18237,6 @@ { "coord": "software.amazon.awssdk:groundstation:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -18068,6 +18248,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -18128,7 +18309,6 @@ { "coord": "software.amazon.awssdk:guardduty:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -18140,6 +18320,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -18200,7 +18381,6 @@ { "coord": "software.amazon.awssdk:health:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -18212,6 +18392,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -18272,7 +18453,6 @@ { "coord": "software.amazon.awssdk:healthlake:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -18284,6 +18464,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -18344,7 +18525,6 @@ { "coord": "software.amazon.awssdk:honeycode:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -18356,6 +18536,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -18571,7 +18752,6 @@ { "coord": "software.amazon.awssdk:iam:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -18582,6 +18762,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -18664,7 +18845,6 @@ { "coord": "software.amazon.awssdk:identitystore:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -18676,6 +18856,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -18736,7 +18917,6 @@ { "coord": "software.amazon.awssdk:imagebuilder:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -18748,6 +18928,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -18808,7 +18989,6 @@ { "coord": "software.amazon.awssdk:inspector2:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -18820,6 +19000,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -18880,7 +19061,6 @@ { "coord": "software.amazon.awssdk:inspector:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -18892,6 +19072,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -18952,7 +19133,6 @@ { "coord": "software.amazon.awssdk:internetmonitor:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -18964,6 +19144,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -19024,7 +19205,6 @@ { "coord": "software.amazon.awssdk:iot1clickdevices:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -19036,6 +19216,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -19096,7 +19277,6 @@ { "coord": "software.amazon.awssdk:iot1clickprojects:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -19108,6 +19288,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -19168,7 +19349,6 @@ { "coord": "software.amazon.awssdk:iot:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -19180,6 +19360,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -19240,7 +19421,6 @@ { "coord": "software.amazon.awssdk:iotanalytics:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -19252,6 +19432,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -19312,7 +19493,6 @@ { "coord": "software.amazon.awssdk:iotdataplane:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -19324,6 +19504,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -19384,7 +19565,6 @@ { "coord": "software.amazon.awssdk:iotdeviceadvisor:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -19396,6 +19576,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -19456,7 +19637,6 @@ { "coord": "software.amazon.awssdk:iotevents:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -19468,6 +19648,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -19528,7 +19709,6 @@ { "coord": "software.amazon.awssdk:ioteventsdata:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -19540,6 +19720,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -19600,7 +19781,6 @@ { "coord": "software.amazon.awssdk:iotfleethub:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -19612,6 +19792,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -19672,7 +19853,6 @@ { "coord": "software.amazon.awssdk:iotfleetwise:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -19684,6 +19864,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -19744,7 +19925,6 @@ { "coord": "software.amazon.awssdk:iotjobsdataplane:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -19756,6 +19936,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -19816,7 +19997,6 @@ { "coord": "software.amazon.awssdk:iotroborunner:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -19828,6 +20008,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -19888,7 +20069,6 @@ { "coord": "software.amazon.awssdk:iotsecuretunneling:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -19900,6 +20080,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -19960,7 +20141,6 @@ { "coord": "software.amazon.awssdk:iotsitewise:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -19972,6 +20152,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -20032,7 +20213,6 @@ { "coord": "software.amazon.awssdk:iotthingsgraph:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -20044,6 +20224,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -20104,7 +20285,6 @@ { "coord": "software.amazon.awssdk:iottwinmaker:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -20116,6 +20296,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -20176,7 +20357,6 @@ { "coord": "software.amazon.awssdk:iotwireless:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -20188,6 +20368,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -20248,7 +20429,6 @@ { "coord": "software.amazon.awssdk:ivs:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -20260,6 +20440,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -20320,7 +20501,6 @@ { "coord": "software.amazon.awssdk:ivschat:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -20332,6 +20512,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -20392,7 +20573,6 @@ { "coord": "software.amazon.awssdk:ivsrealtime:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -20404,6 +20584,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -20487,7 +20668,6 @@ { "coord": "software.amazon.awssdk:kafka:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -20499,6 +20679,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -20559,7 +20740,6 @@ { "coord": "software.amazon.awssdk:kafkaconnect:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -20571,6 +20751,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -20631,7 +20812,6 @@ { "coord": "software.amazon.awssdk:kendra:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -20643,6 +20823,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -20703,7 +20884,6 @@ { "coord": "software.amazon.awssdk:kendraranking:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -20715,6 +20895,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -20775,7 +20956,6 @@ { "coord": "software.amazon.awssdk:keyspaces:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -20787,6 +20967,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -20847,7 +21028,6 @@ { "coord": "software.amazon.awssdk:kinesis:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -20860,6 +21040,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -20922,7 +21103,6 @@ { "coord": "software.amazon.awssdk:kinesisanalytics:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -20934,6 +21114,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -20994,7 +21175,6 @@ { "coord": "software.amazon.awssdk:kinesisanalyticsv2:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -21006,6 +21186,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -21066,7 +21247,6 @@ { "coord": "software.amazon.awssdk:kinesisvideo:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -21078,6 +21258,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -21138,7 +21319,6 @@ { "coord": "software.amazon.awssdk:kinesisvideoarchivedmedia:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -21150,6 +21330,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -21210,7 +21391,6 @@ { "coord": "software.amazon.awssdk:kinesisvideomedia:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -21222,6 +21402,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -21282,7 +21463,6 @@ { "coord": "software.amazon.awssdk:kinesisvideosignaling:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -21294,6 +21474,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -21354,7 +21535,6 @@ { "coord": "software.amazon.awssdk:kinesisvideowebrtcstorage:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -21366,6 +21546,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -21426,7 +21607,6 @@ { "coord": "software.amazon.awssdk:kms:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -21438,6 +21618,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -21498,7 +21679,6 @@ { "coord": "software.amazon.awssdk:lakeformation:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -21510,6 +21690,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -21570,7 +21751,6 @@ { "coord": "software.amazon.awssdk:lambda:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -21582,6 +21762,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -21642,7 +21823,6 @@ { "coord": "software.amazon.awssdk:launchwizard:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -21654,6 +21834,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -21714,7 +21895,6 @@ { "coord": "software.amazon.awssdk:lexmodelbuilding:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -21726,6 +21906,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -21786,7 +21967,6 @@ { "coord": "software.amazon.awssdk:lexmodelsv2:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -21798,6 +21978,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -21858,7 +22039,6 @@ { "coord": "software.amazon.awssdk:lexruntime:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -21870,6 +22050,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -21930,7 +22111,6 @@ { "coord": "software.amazon.awssdk:lexruntimev2:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -21942,6 +22122,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -22002,7 +22183,6 @@ { "coord": "software.amazon.awssdk:licensemanager:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -22014,6 +22194,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -22074,7 +22255,6 @@ { "coord": "software.amazon.awssdk:licensemanagerlinuxsubscriptions:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -22086,6 +22266,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -22146,7 +22327,6 @@ { "coord": "software.amazon.awssdk:licensemanagerusersubscriptions:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -22158,6 +22338,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -22218,7 +22399,6 @@ { "coord": "software.amazon.awssdk:lightsail:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -22230,6 +22410,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -22290,7 +22471,6 @@ { "coord": "software.amazon.awssdk:location:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -22302,6 +22482,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -22362,7 +22543,6 @@ { "coord": "software.amazon.awssdk:lookoutequipment:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -22374,6 +22554,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -22434,7 +22615,6 @@ { "coord": "software.amazon.awssdk:lookoutmetrics:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -22446,6 +22626,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -22506,7 +22687,6 @@ { "coord": "software.amazon.awssdk:lookoutvision:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -22518,6 +22698,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -22578,7 +22759,6 @@ { "coord": "software.amazon.awssdk:m2:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -22590,6 +22770,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -22650,7 +22831,6 @@ { "coord": "software.amazon.awssdk:machinelearning:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -22662,6 +22842,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -22722,7 +22903,6 @@ { "coord": "software.amazon.awssdk:macie2:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -22734,6 +22914,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -22794,7 +22975,6 @@ { "coord": "software.amazon.awssdk:macie:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -22806,6 +22986,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -22866,7 +23047,6 @@ { "coord": "software.amazon.awssdk:managedblockchain:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -22878,6 +23058,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -22938,7 +23119,6 @@ { "coord": "software.amazon.awssdk:managedblockchainquery:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -22950,6 +23130,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -23010,7 +23191,6 @@ { "coord": "software.amazon.awssdk:marketplacecatalog:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -23022,6 +23202,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -23082,7 +23263,6 @@ { "coord": "software.amazon.awssdk:marketplacecommerceanalytics:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -23094,6 +23274,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -23154,7 +23335,6 @@ { "coord": "software.amazon.awssdk:marketplaceentitlement:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -23166,6 +23346,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -23226,7 +23407,6 @@ { "coord": "software.amazon.awssdk:marketplacemetering:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -23238,6 +23418,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -23298,7 +23479,6 @@ { "coord": "software.amazon.awssdk:mediaconnect:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -23310,6 +23490,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -23370,7 +23551,6 @@ { "coord": "software.amazon.awssdk:mediaconvert:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -23382,6 +23562,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -23442,7 +23623,6 @@ { "coord": "software.amazon.awssdk:medialive:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -23454,6 +23634,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -23514,7 +23695,6 @@ { "coord": "software.amazon.awssdk:mediapackage:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -23526,6 +23706,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -23586,7 +23767,6 @@ { "coord": "software.amazon.awssdk:mediapackagev2:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -23598,6 +23778,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -23658,7 +23839,6 @@ { "coord": "software.amazon.awssdk:mediapackagevod:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -23670,6 +23850,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -23730,7 +23911,6 @@ { "coord": "software.amazon.awssdk:mediastore:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -23742,6 +23922,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -23802,7 +23983,6 @@ { "coord": "software.amazon.awssdk:mediastoredata:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -23814,6 +23994,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -23874,7 +24055,6 @@ { "coord": "software.amazon.awssdk:mediatailor:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -23886,6 +24066,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -23946,7 +24127,6 @@ { "coord": "software.amazon.awssdk:medicalimaging:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -23958,6 +24138,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -24018,7 +24199,6 @@ { "coord": "software.amazon.awssdk:memorydb:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -24030,6 +24210,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -24111,7 +24292,6 @@ { "coord": "software.amazon.awssdk:mgn:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -24123,6 +24303,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -24183,7 +24364,6 @@ { "coord": "software.amazon.awssdk:migrationhub:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -24195,6 +24375,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -24255,7 +24436,6 @@ { "coord": "software.amazon.awssdk:migrationhubconfig:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -24267,6 +24447,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -24327,7 +24508,6 @@ { "coord": "software.amazon.awssdk:migrationhuborchestrator:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -24339,6 +24519,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -24399,7 +24580,6 @@ { "coord": "software.amazon.awssdk:migrationhubrefactorspaces:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -24411,6 +24591,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -24471,7 +24652,6 @@ { "coord": "software.amazon.awssdk:migrationhubstrategy:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -24483,6 +24663,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -24543,7 +24724,6 @@ { "coord": "software.amazon.awssdk:mobile:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -24555,6 +24735,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -24615,7 +24796,6 @@ { "coord": "software.amazon.awssdk:mq:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -24627,6 +24807,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -24687,7 +24868,6 @@ { "coord": "software.amazon.awssdk:mturk:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -24699,6 +24879,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -24759,7 +24940,6 @@ { "coord": "software.amazon.awssdk:mwaa:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -24771,6 +24951,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -24831,7 +25012,6 @@ { "coord": "software.amazon.awssdk:neptune:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -24842,6 +25022,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -24903,7 +25084,6 @@ { "coord": "software.amazon.awssdk:neptunedata:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -24915,6 +25095,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -25021,7 +25202,6 @@ { "coord": "software.amazon.awssdk:networkfirewall:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -25033,6 +25213,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -25093,7 +25274,6 @@ { "coord": "software.amazon.awssdk:networkmanager:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -25105,6 +25285,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -25165,7 +25346,6 @@ { "coord": "software.amazon.awssdk:nimble:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -25177,6 +25357,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -25237,7 +25418,6 @@ { "coord": "software.amazon.awssdk:oam:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -25249,6 +25429,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -25309,7 +25490,6 @@ { "coord": "software.amazon.awssdk:omics:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -25321,6 +25501,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -25381,7 +25562,6 @@ { "coord": "software.amazon.awssdk:opensearch:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -25393,6 +25573,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -25453,7 +25634,6 @@ { "coord": "software.amazon.awssdk:opensearchserverless:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -25465,6 +25645,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -25525,7 +25706,6 @@ { "coord": "software.amazon.awssdk:opsworks:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -25537,6 +25717,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -25597,7 +25778,6 @@ { "coord": "software.amazon.awssdk:opsworkscm:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -25609,6 +25789,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -25669,7 +25850,6 @@ { "coord": "software.amazon.awssdk:organizations:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -25681,6 +25861,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -25741,7 +25922,6 @@ { "coord": "software.amazon.awssdk:osis:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -25753,6 +25933,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -25813,7 +25994,6 @@ { "coord": "software.amazon.awssdk:outposts:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -25825,6 +26005,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -25885,7 +26066,6 @@ { "coord": "software.amazon.awssdk:panorama:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -25897,6 +26077,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -25957,7 +26138,6 @@ { "coord": "software.amazon.awssdk:paymentcryptography:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -25969,6 +26149,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -26029,7 +26210,6 @@ { "coord": "software.amazon.awssdk:paymentcryptographydata:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -26041,6 +26221,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -26101,7 +26282,6 @@ { "coord": "software.amazon.awssdk:pcaconnectorad:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -26113,6 +26293,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -26173,7 +26354,6 @@ { "coord": "software.amazon.awssdk:personalize:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -26185,6 +26365,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -26245,7 +26426,6 @@ { "coord": "software.amazon.awssdk:personalizeevents:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -26257,6 +26437,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -26317,7 +26498,6 @@ { "coord": "software.amazon.awssdk:personalizeruntime:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -26329,6 +26509,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -26389,7 +26570,6 @@ { "coord": "software.amazon.awssdk:pi:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -26401,6 +26581,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -26461,7 +26642,6 @@ { "coord": "software.amazon.awssdk:pinpoint:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -26473,6 +26653,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -26533,7 +26714,6 @@ { "coord": "software.amazon.awssdk:pinpointemail:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -26545,6 +26725,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -26605,7 +26786,6 @@ { "coord": "software.amazon.awssdk:pinpointsmsvoice:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -26617,6 +26797,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -26677,7 +26858,6 @@ { "coord": "software.amazon.awssdk:pinpointsmsvoicev2:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -26689,6 +26869,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -26749,7 +26930,6 @@ { "coord": "software.amazon.awssdk:pipes:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -26761,6 +26941,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -26821,7 +27002,6 @@ { "coord": "software.amazon.awssdk:polly:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -26833,6 +27013,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -26893,7 +27074,6 @@ { "coord": "software.amazon.awssdk:pricing:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -26905,6 +27085,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -26965,7 +27146,6 @@ { "coord": "software.amazon.awssdk:privatenetworks:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -26977,6 +27157,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -27091,7 +27272,6 @@ { "coord": "software.amazon.awssdk:proton:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -27103,6 +27283,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -27163,7 +27344,6 @@ { "coord": "software.amazon.awssdk:qldb:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -27175,6 +27355,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -27235,7 +27416,6 @@ { "coord": "software.amazon.awssdk:qldbsession:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -27247,6 +27427,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -27307,7 +27488,6 @@ { "coord": "software.amazon.awssdk:quicksight:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -27319,6 +27499,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -27379,7 +27560,6 @@ { "coord": "software.amazon.awssdk:ram:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -27391,6 +27571,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -27451,7 +27632,6 @@ { "coord": "software.amazon.awssdk:rbin:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -27463,6 +27643,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -27523,7 +27704,6 @@ { "coord": "software.amazon.awssdk:rds:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -27534,6 +27714,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -27595,7 +27776,6 @@ { "coord": "software.amazon.awssdk:rdsdata:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -27607,6 +27787,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -27667,7 +27848,6 @@ { "coord": "software.amazon.awssdk:redshift:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -27678,6 +27858,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -27739,7 +27920,6 @@ { "coord": "software.amazon.awssdk:redshiftdata:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -27751,6 +27931,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -27811,7 +27992,6 @@ { "coord": "software.amazon.awssdk:redshiftserverless:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -27823,6 +28003,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -27920,7 +28101,6 @@ { "coord": "software.amazon.awssdk:rekognition:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -27932,6 +28112,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -27992,7 +28173,6 @@ { "coord": "software.amazon.awssdk:resiliencehub:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -28004,6 +28184,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -28064,7 +28245,6 @@ { "coord": "software.amazon.awssdk:resourceexplorer2:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -28076,6 +28256,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -28136,7 +28317,6 @@ { "coord": "software.amazon.awssdk:resourcegroups:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -28148,6 +28328,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -28208,7 +28389,6 @@ { "coord": "software.amazon.awssdk:resourcegroupstaggingapi:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -28220,6 +28400,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -28280,7 +28461,6 @@ { "coord": "software.amazon.awssdk:robomaker:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -28292,6 +28472,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -28352,7 +28533,6 @@ { "coord": "software.amazon.awssdk:rolesanywhere:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -28364,6 +28544,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -28424,7 +28605,6 @@ { "coord": "software.amazon.awssdk:route53:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:aws-xml-protocol:2.21.16", @@ -28436,6 +28616,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -28497,7 +28678,6 @@ { "coord": "software.amazon.awssdk:route53domains:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -28509,6 +28689,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -28569,7 +28750,6 @@ { "coord": "software.amazon.awssdk:route53recoverycluster:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -28581,6 +28761,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -28641,7 +28822,6 @@ { "coord": "software.amazon.awssdk:route53recoverycontrolconfig:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -28653,6 +28833,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -28713,7 +28894,6 @@ { "coord": "software.amazon.awssdk:route53recoveryreadiness:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -28725,6 +28905,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -28785,7 +28966,6 @@ { "coord": "software.amazon.awssdk:route53resolver:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -28797,6 +28977,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -28857,7 +29038,6 @@ { "coord": "software.amazon.awssdk:rum:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -28869,6 +29049,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -28929,7 +29110,6 @@ { "coord": "software.amazon.awssdk:s3-transfer-manager:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:aws-xml-protocol:2.21.16", @@ -28943,6 +29123,7 @@ "io.netty:netty-codec-http2:4.1.100.Final", "software.amazon.awssdk:s3:2.21.16", "software.amazon.awssdk:crt-core:2.21.16", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -28997,7 +29178,6 @@ { "coord": "software.amazon.awssdk:s3:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:aws-xml-protocol:2.21.16", @@ -29010,6 +29190,7 @@ "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", "software.amazon.awssdk:crt-core:2.21.16", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -29074,7 +29255,6 @@ { "coord": "software.amazon.awssdk:s3control:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:aws-xml-protocol:2.21.16", @@ -29088,6 +29268,7 @@ "io.netty:netty-codec-http2:4.1.100.Final", "software.amazon.awssdk:s3:2.21.16", "software.amazon.awssdk:crt-core:2.21.16", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -29152,7 +29333,6 @@ { "coord": "software.amazon.awssdk:s3outposts:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -29164,6 +29344,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -29224,7 +29405,6 @@ { "coord": "software.amazon.awssdk:sagemaker:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -29236,6 +29416,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -29296,7 +29477,6 @@ { "coord": "software.amazon.awssdk:sagemakera2iruntime:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -29308,6 +29488,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -29368,7 +29549,6 @@ { "coord": "software.amazon.awssdk:sagemakeredge:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -29380,6 +29560,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -29440,7 +29621,6 @@ { "coord": "software.amazon.awssdk:sagemakerfeaturestoreruntime:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -29452,6 +29632,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -29512,7 +29693,6 @@ { "coord": "software.amazon.awssdk:sagemakergeospatial:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -29524,6 +29704,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -29584,7 +29765,6 @@ { "coord": "software.amazon.awssdk:sagemakermetrics:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -29596,6 +29776,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -29656,7 +29837,6 @@ { "coord": "software.amazon.awssdk:sagemakerruntime:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -29668,6 +29848,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -29728,7 +29909,6 @@ { "coord": "software.amazon.awssdk:savingsplans:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -29740,6 +29920,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -29800,7 +29981,6 @@ { "coord": "software.amazon.awssdk:scheduler:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -29812,6 +29992,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -29872,7 +30053,6 @@ { "coord": "software.amazon.awssdk:schemas:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -29884,6 +30064,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -29985,7 +30166,6 @@ { "coord": "software.amazon.awssdk:secretsmanager:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -29997,6 +30177,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -30057,7 +30238,6 @@ { "coord": "software.amazon.awssdk:securityhub:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -30069,6 +30249,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -30129,7 +30310,6 @@ { "coord": "software.amazon.awssdk:securitylake:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -30141,6 +30321,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -30201,7 +30382,6 @@ { "coord": "software.amazon.awssdk:serverlessapplicationrepository:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -30213,6 +30393,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -30273,7 +30454,6 @@ { "coord": "software.amazon.awssdk:servicecatalog:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -30285,6 +30465,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -30345,7 +30526,6 @@ { "coord": "software.amazon.awssdk:servicecatalogappregistry:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -30357,6 +30537,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -30417,7 +30598,6 @@ { "coord": "software.amazon.awssdk:servicediscovery:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -30429,6 +30609,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -30489,7 +30670,6 @@ { "coord": "software.amazon.awssdk:servicequotas:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -30501,6 +30681,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -30561,7 +30742,6 @@ { "coord": "software.amazon.awssdk:ses:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -30572,6 +30752,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -30633,7 +30814,6 @@ { "coord": "software.amazon.awssdk:sesv2:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -30645,6 +30825,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -30705,7 +30886,6 @@ { "coord": "software.amazon.awssdk:sfn:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -30717,6 +30897,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -30777,7 +30958,6 @@ { "coord": "software.amazon.awssdk:shield:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -30789,6 +30969,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -30849,7 +31030,6 @@ { "coord": "software.amazon.awssdk:signer:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -30861,6 +31041,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -30921,7 +31102,6 @@ { "coord": "software.amazon.awssdk:simspaceweaver:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -30933,6 +31113,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -30993,7 +31174,6 @@ { "coord": "software.amazon.awssdk:sms:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -31005,6 +31185,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -31065,7 +31246,6 @@ { "coord": "software.amazon.awssdk:snowball:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -31077,6 +31257,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -31137,7 +31318,6 @@ { "coord": "software.amazon.awssdk:snowdevicemanagement:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -31149,6 +31329,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -31209,7 +31390,6 @@ { "coord": "software.amazon.awssdk:sns:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -31220,6 +31400,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -31281,7 +31462,6 @@ { "coord": "software.amazon.awssdk:sqs:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -31292,6 +31472,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -31353,7 +31534,6 @@ { "coord": "software.amazon.awssdk:ssm:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -31365,6 +31545,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -31425,7 +31606,6 @@ { "coord": "software.amazon.awssdk:ssmcontacts:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -31437,6 +31617,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -31497,7 +31678,6 @@ { "coord": "software.amazon.awssdk:ssmincidents:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -31509,6 +31689,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -31569,7 +31750,6 @@ { "coord": "software.amazon.awssdk:ssmsap:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -31581,6 +31761,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -31641,7 +31822,6 @@ { "coord": "software.amazon.awssdk:sso:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -31653,6 +31833,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -31713,7 +31894,6 @@ { "coord": "software.amazon.awssdk:ssoadmin:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -31725,6 +31905,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -31785,7 +31966,6 @@ { "coord": "software.amazon.awssdk:ssooidc:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -31797,6 +31977,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -31858,7 +32039,6 @@ { "coord": "software.amazon.awssdk:storagegateway:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -31870,6 +32050,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -31930,7 +32111,6 @@ { "coord": "software.amazon.awssdk:sts:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -31941,6 +32121,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -32002,7 +32183,6 @@ { "coord": "software.amazon.awssdk:support:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -32014,6 +32194,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -32074,7 +32255,6 @@ { "coord": "software.amazon.awssdk:supportapp:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -32086,6 +32266,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -32146,7 +32327,6 @@ { "coord": "software.amazon.awssdk:swf:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -32158,6 +32338,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -32218,7 +32399,6 @@ { "coord": "software.amazon.awssdk:synthetics:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -32230,6 +32410,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -32290,7 +32471,6 @@ { "coord": "software.amazon.awssdk:textract:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -32302,6 +32482,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -32392,7 +32573,6 @@ { "coord": "software.amazon.awssdk:timestreamquery:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -32404,6 +32584,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -32464,7 +32645,6 @@ { "coord": "software.amazon.awssdk:timestreamwrite:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -32476,6 +32656,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -32536,7 +32717,6 @@ { "coord": "software.amazon.awssdk:tnb:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -32548,6 +32728,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -32608,7 +32789,6 @@ { "coord": "software.amazon.awssdk:transcribe:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -32620,6 +32800,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -32680,7 +32861,6 @@ { "coord": "software.amazon.awssdk:transcribestreaming:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -32692,6 +32872,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -32754,7 +32935,6 @@ { "coord": "software.amazon.awssdk:transfer:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -32766,6 +32946,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -32826,7 +33007,6 @@ { "coord": "software.amazon.awssdk:translate:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -32838,6 +33018,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -32943,7 +33124,6 @@ { "coord": "software.amazon.awssdk:verifiedpermissions:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -32955,6 +33135,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -33015,7 +33196,6 @@ { "coord": "software.amazon.awssdk:voiceid:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -33027,6 +33207,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -33087,7 +33268,6 @@ { "coord": "software.amazon.awssdk:vpclattice:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -33099,6 +33279,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -33159,7 +33340,6 @@ { "coord": "software.amazon.awssdk:waf:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -33171,6 +33351,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -33231,7 +33412,6 @@ { "coord": "software.amazon.awssdk:wafv2:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -33243,6 +33423,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -33303,7 +33484,6 @@ { "coord": "software.amazon.awssdk:wellarchitected:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -33315,6 +33495,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -33375,7 +33556,6 @@ { "coord": "software.amazon.awssdk:wisdom:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -33387,6 +33567,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -33447,7 +33628,6 @@ { "coord": "software.amazon.awssdk:workdocs:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -33459,6 +33639,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -33519,7 +33700,6 @@ { "coord": "software.amazon.awssdk:worklink:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -33531,6 +33711,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -33591,7 +33772,6 @@ { "coord": "software.amazon.awssdk:workmail:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -33603,6 +33783,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -33663,7 +33844,6 @@ { "coord": "software.amazon.awssdk:workmailmessageflow:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -33675,6 +33855,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -33735,7 +33916,6 @@ { "coord": "software.amazon.awssdk:workspaces:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -33747,6 +33927,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -33807,7 +33988,6 @@ { "coord": "software.amazon.awssdk:workspacesweb:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -33819,6 +33999,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", @@ -33879,7 +34060,6 @@ { "coord": "software.amazon.awssdk:xray:2.21.16", "dependencies": [ - "commons-codec:commons-codec:1.16.0", "software.amazon.awssdk:netty-nio-client:2.21.16", "software.amazon.awssdk:aws-core:2.21.16", "software.amazon.awssdk:protocol-core:2.21.16", @@ -33891,6 +34071,7 @@ "io.netty:netty-handler:4.1.100.Final", "org.reactivestreams:reactive-streams:1.0.4", "io.netty:netty-codec-http2:4.1.100.Final", + "commons-codec:commons-codec:1.17.0", "io.netty:netty-resolver:4.1.100.Final", "io.netty:netty-buffer:4.1.100.Final", "io.netty:netty-codec-http:4.1.100.Final", diff --git a/sampledata/aggregate_result.json b/sampledata/aggregate_result.json index 77d2866c..8bf3c062 100644 --- a/sampledata/aggregate_result.json +++ b/sampledata/aggregate_result.json @@ -1,4 +1,8 @@ [{ - "bucket" : "BVk=", - "metric" : 32768 + "bucket":"<øg\u0090?»sì&Õ\u0018À\u0096\u008c)Ü", + "metric":32768 +}, +{ + "bucket":"$Reô2ñns&Õ\u0018À\u0096\u008c)Ü", + "metric":4400 }] diff --git a/sampledata/output_debug_reports.avro b/sampledata/output_debug_reports.avro index a43dd74b..9fe75d10 100644 Binary files a/sampledata/output_debug_reports.avro and b/sampledata/output_debug_reports.avro differ diff --git a/sampledata/output_domain.avro b/sampledata/output_domain.avro index 2c4f1fd5..3d6b4c90 100644 Binary files a/sampledata/output_domain.avro and b/sampledata/output_domain.avro differ diff --git a/sampledata/output_reports.avro b/sampledata/output_reports.avro index 7ab60726..d38cb128 100644 Binary files a/sampledata/output_reports.avro and b/sampledata/output_reports.avro differ diff --git a/telemetry/common/java/com/google/privacysandbox/otel/BUILD b/telemetry/common/java/com/google/privacysandbox/otel/BUILD index c814c98a..5384893b 100644 --- a/telemetry/common/java/com/google/privacysandbox/otel/BUILD +++ b/telemetry/common/java/com/google/privacysandbox/otel/BUILD @@ -22,6 +22,7 @@ java_library( javacopts = ["-Xep:Var:ERROR"], deps = [ "//java/external:error_prone_anntotations", + "//java/external:guava", "//java/external:guice", "//java/external:opentelemetry_api", "//java/external:opentelemetry_context", diff --git a/telemetry/common/java/com/google/privacysandbox/otel/OTelConfiguration.java b/telemetry/common/java/com/google/privacysandbox/otel/OTelConfiguration.java index 9e85397d..6e4cbd6e 100644 --- a/telemetry/common/java/com/google/privacysandbox/otel/OTelConfiguration.java +++ b/telemetry/common/java/com/google/privacysandbox/otel/OTelConfiguration.java @@ -98,6 +98,16 @@ public interface OTelConfiguration { */ Timer createProdTimerStarted(String name, String jobID); + /** + * Creates a {@link Timer} given name in both debug and prod environments + * + * @param name {@link String} + * @param jobID {@link String} + * @param timeUnit {@link String} + * @return {@link Timer} + */ + Timer createProdTimerStarted(String name, String jobID, TimerUnit timeUnit); + /** * Creates a {@link Timer} given name in both prod environments only. Add attributes to its span * attributes. diff --git a/telemetry/common/java/com/google/privacysandbox/otel/OTelConfigurationImplHelper.java b/telemetry/common/java/com/google/privacysandbox/otel/OTelConfigurationImplHelper.java index 4221b557..a4d2e974 100644 --- a/telemetry/common/java/com/google/privacysandbox/otel/OTelConfigurationImplHelper.java +++ b/telemetry/common/java/com/google/privacysandbox/otel/OTelConfigurationImplHelper.java @@ -16,6 +16,7 @@ package com.google.privacysandbox.otel; +import com.google.common.annotations.VisibleForTesting; import com.google.errorprone.annotations.MustBeClosed; import com.sun.management.OperatingSystemMXBean; import io.opentelemetry.api.common.Attributes; @@ -28,7 +29,7 @@ import java.util.Map; /** Implements helper methods for {@link OTelConfiguration} implementations */ -public final class OTelConfigurationImplHelper { +public class OTelConfigurationImplHelper { private final Meter meter; private final Tracer tracer; @@ -45,13 +46,21 @@ public void createMemoryUtilizationRatioGauge() { .setUnit("percent") .buildWithCallback( measurement -> { - double usedMemory = - (double) (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()); - double ratio = usedMemory / Runtime.getRuntime().maxMemory(); - measurement.record((ratio) * 100.0); + double ratio = getUsedMemoryRatio(); + // This rounds 14 to 10 and 15 to 20. + int ratioRoundToTen = (int) Math.round(ratio / 10.0) * 10; + // Clamp the ratio at 90. + measurement.record(Math.min(ratioRoundToTen, 90)); }); } + @VisibleForTesting + double getUsedMemoryRatio() { + double usedMemory = + (double) (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()); + return usedMemory / Runtime.getRuntime().maxMemory() * 100.0; + } + /** Creates a gauge meter that periodically exports memory utilization */ public void createMemoryUtilizationGauge() { meter @@ -76,8 +85,8 @@ public void createCPUUtilizationGauge() { measurement -> { OperatingSystemMXBean osBean = ManagementFactory.getPlatformMXBean(OperatingSystemMXBean.class); - Double CpuUsage = osBean.getProcessCpuLoad() * 100; - measurement.record(CpuUsage); + int cpuUsage = (int) (osBean.getProcessCpuLoad() * 100); + measurement.record(cpuUsage); }); } @@ -113,7 +122,21 @@ public Timer createTimerStarted(String name) { @MustBeClosed public Timer createTimerStarted(String name, String jobID) { SpanBuilder sp = tracer.spanBuilder(name).setNoParent(); - return new TimerImpl(sp, jobID); + return new TimerImpl(sp, jobID, TimerUnit.NANOSECONDS); + } + + /** + * Creates a {@link Timer} and adds jobID to the span attributes + * + * @param name {@link String} + * @param jobID {@link String} + * @param timeUnit {@link TimerUnit} + * @return {@link Timer} + */ + @MustBeClosed + public Timer createTimerStarted(String name, String jobID, TimerUnit timeUnit) { + SpanBuilder sp = tracer.spanBuilder(name).setNoParent(); + return new TimerImpl(sp, jobID, timeUnit); } /** diff --git a/telemetry/common/java/com/google/privacysandbox/otel/TimerImpl.java b/telemetry/common/java/com/google/privacysandbox/otel/TimerImpl.java index 207822b2..d8ab81e4 100644 --- a/telemetry/common/java/com/google/privacysandbox/otel/TimerImpl.java +++ b/telemetry/common/java/com/google/privacysandbox/otel/TimerImpl.java @@ -20,6 +20,8 @@ import io.opentelemetry.api.trace.Span; import io.opentelemetry.api.trace.SpanBuilder; import io.opentelemetry.context.Scope; +import java.time.Instant; +import java.util.concurrent.TimeUnit; /** * Manages {@link Span}. @@ -40,21 +42,30 @@ public class TimerImpl implements Timer { private final Span span; private final Scope scope; + private final TimerUnit timeUnit; @SuppressWarnings("MustBeClosedChecker") TimerImpl(SpanBuilder sb) { + timeUnit = TimerUnit.NANOSECONDS; span = sb.startSpan(); scope = span.makeCurrent(); } @SuppressWarnings("MustBeClosedChecker") - TimerImpl(SpanBuilder sb, String jobID) { + TimerImpl(SpanBuilder sb, String jobID, TimerUnit unit) { + timeUnit = unit; + if (timeUnit.equals(TimerUnit.SECONDS)) { + long timeMillis = System.currentTimeMillis(); + long timeSecond = TimeUnit.SECONDS.convert(timeMillis, TimeUnit.MILLISECONDS); + sb.setStartTimestamp(timeSecond, TimeUnit.SECONDS); + } span = sb.startSpan().setAttribute("job-id", jobID); scope = span.makeCurrent(); } @SuppressWarnings("MustBeClosedChecker") TimerImpl(SpanBuilder sb, Attributes attributes) { + timeUnit = TimerUnit.NANOSECONDS; span = sb.startSpan().setAllAttributes(attributes); scope = span.makeCurrent(); } @@ -72,6 +83,12 @@ public void addEvent(String text) { @Override public void close() { scope.close(); - span.end(); + if (timeUnit.equals(TimerUnit.SECONDS)) { + long timeMillis = System.currentTimeMillis(); + long timeSecond = TimeUnit.SECONDS.convert(timeMillis, TimeUnit.MILLISECONDS); + span.end(Instant.ofEpochSecond(timeSecond)); + } else { + span.end(); + } } } diff --git a/java/com/google/aggregate/adtech/worker/testing/FakePrivateKeyFetchingService.java b/telemetry/common/java/com/google/privacysandbox/otel/TimerUnit.java similarity index 50% rename from java/com/google/aggregate/adtech/worker/testing/FakePrivateKeyFetchingService.java rename to telemetry/common/java/com/google/privacysandbox/otel/TimerUnit.java index 9a11a9c9..a6ce2979 100644 --- a/java/com/google/aggregate/adtech/worker/testing/FakePrivateKeyFetchingService.java +++ b/telemetry/common/java/com/google/privacysandbox/otel/TimerUnit.java @@ -1,5 +1,5 @@ /* - * Copyright 2022 Google LLC + * Copyright 2024 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,20 +14,10 @@ * limitations under the License. */ -package com.google.aggregate.adtech.worker.testing; +package com.google.privacysandbox.otel; -import com.google.scp.operator.cpio.cryptoclient.PrivateKeyFetchingService; - -/** Fake PrivateKeyFetchingService which returns a preconfigured response. */ -public final class FakePrivateKeyFetchingService implements PrivateKeyFetchingService { - - private String response = ""; - - public String fetchKeyCiphertext(String keyId) throws PrivateKeyFetchingServiceException { - return response; - } - - public void setResponse(String newResponse) { - response = newResponse; - } +/** enum to select the unit of timer */ +public enum TimerUnit { + SECONDS, + NANOSECONDS } diff --git a/telemetry/debug/java/com/google/privacysandbox/otel/BUILD b/telemetry/debug/java/com/google/privacysandbox/otel/BUILD index c56ef50f..29f2987f 100644 --- a/telemetry/debug/java/com/google/privacysandbox/otel/BUILD +++ b/telemetry/debug/java/com/google/privacysandbox/otel/BUILD @@ -22,6 +22,7 @@ java_library( javacopts = ["-Xep:Var:ERROR"], deps = [ "//java/external:error_prone_anntotations", + "//java/external:guava", "//java/external:guice", "//java/external:opentelemetry_api", "//java/external:opentelemetry_contrib_aws_xray", diff --git a/telemetry/debug/java/com/google/privacysandbox/otel/OTelConfigurationImpl.java b/telemetry/debug/java/com/google/privacysandbox/otel/OTelConfigurationImpl.java index 1d349239..d3a397b0 100644 --- a/telemetry/debug/java/com/google/privacysandbox/otel/OTelConfigurationImpl.java +++ b/telemetry/debug/java/com/google/privacysandbox/otel/OTelConfigurationImpl.java @@ -16,6 +16,7 @@ package com.google.privacysandbox.otel; +import com.google.common.annotations.VisibleForTesting; import com.google.errorprone.annotations.MustBeClosed; import io.opentelemetry.api.OpenTelemetry; import io.opentelemetry.api.metrics.LongCounter; @@ -36,6 +37,11 @@ public final class OTelConfigurationImpl implements OTelConfiguration { oTel.getTracer(OTelConfigurationImpl.class.getName())); } + @VisibleForTesting + OTelConfigurationImpl(OTelConfigurationImplHelper oTelConfigurationImplHelper) { + this.oTelConfigurationImplHelper = oTelConfigurationImplHelper; + } + @Override public void createProdMemoryUtilizationRatioGauge() { oTelConfigurationImplHelper.createMemoryUtilizationRatioGauge(); @@ -88,7 +94,13 @@ public Timer createDebugTimerStarted(String name, Map attributeMap) { @Override @MustBeClosed public Timer createProdTimerStarted(String name, String jobID) { - return oTelConfigurationImplHelper.createTimerStarted(name, jobID); + return oTelConfigurationImplHelper.createTimerStarted(name, jobID, TimerUnit.NANOSECONDS); + } + + @Override + @MustBeClosed + public Timer createProdTimerStarted(String name, String jobID, TimerUnit timeUnit) { + return oTelConfigurationImplHelper.createTimerStarted(name, jobID, timeUnit); } @Override diff --git a/telemetry/debug/javatests/com/google/privacysandbox/otel/BUILD b/telemetry/debug/javatests/com/google/privacysandbox/otel/BUILD index d2d32b9f..85d8fe3b 100644 --- a/telemetry/debug/javatests/com/google/privacysandbox/otel/BUILD +++ b/telemetry/debug/javatests/com/google/privacysandbox/otel/BUILD @@ -22,6 +22,7 @@ java_test( deps = [ "//java/external:google_truth", "//java/external:guava", + "//java/external:mockito", "//java/external:opentelemetry_api", "//java/external:opentelemetry_sdk", "//java/external:opentelemetry_sdk_common", diff --git a/telemetry/debug/javatests/com/google/privacysandbox/otel/OTelConfigurationImplTest.java b/telemetry/debug/javatests/com/google/privacysandbox/otel/OTelConfigurationImplTest.java index fd51b590..14286fc0 100644 --- a/telemetry/debug/javatests/com/google/privacysandbox/otel/OTelConfigurationImplTest.java +++ b/telemetry/debug/javatests/com/google/privacysandbox/otel/OTelConfigurationImplTest.java @@ -18,6 +18,8 @@ import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.truth.Truth.assertThat; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.when; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -55,6 +57,7 @@ public final class OTelConfigurationImplTest { private InMemorySpanExporter spanExporter; private InMemoryMetricReader metricReader; private OTelConfiguration oTelConfigurationImpl; + private OpenTelemetry openTelemetry; @Before public void setUp() { @@ -78,7 +81,7 @@ public void setUp() { .build(); // Setup OpenTelemetry object - OpenTelemetry openTelemetry = + openTelemetry = OpenTelemetrySdk.builder() .setTracerProvider(sdkTracerProvider) .setMeterProvider(sdkMeterProvider) @@ -155,6 +158,41 @@ public void createProdCounter_ensuresNoDecrement() { assertCounterValue(counterName, counterValue); } + @Test + public void createProdMemoryUtilizationRatioGauge_returnCorrectValues() { + ImmutableList getMemoryValues = ImmutableList.of(1.0, 14.0, 15.0, 91.0, 95.0, 100.0); + ImmutableList expectedMemoryRatio = ImmutableList.of(0, 10, 20, 90, 90, 90); + String gaugeName = "process.runtime.jvm.memory.utilization_ratio"; + + for (int i = 0; i < expectedMemoryRatio.size(); i++) { + metricReader = InMemoryMetricReader.create(); + SdkMeterProvider sdkMeterProvider = + SdkMeterProvider.builder() + .setResource(RESOURCE) + .setClock(CLOCK) + .registerMetricReader(metricReader) + .build(); + openTelemetry = OpenTelemetrySdk.builder().setMeterProvider(sdkMeterProvider).build(); + OTelConfigurationImplHelper helper = + spy( + new OTelConfigurationImplHelper( + openTelemetry.getMeter(OTelConfigurationImpl.class.getName()), + openTelemetry.getTracer(OTelConfigurationImpl.class.getName()))); + when(helper.getUsedMemoryRatio()).thenReturn(getMemoryValues.get(i)); + OTelConfigurationImpl mockOTelConfigurationImpl = new OTelConfigurationImpl(helper); + mockOTelConfigurationImpl.createProdMemoryUtilizationRatioGauge(); + MetricData metric = + metricReader.collectAllMetrics().stream().collect(toImmutableList()).get(0); + DoublePointData point = + (DoublePointData) metric.getData().getPoints().stream().collect(toImmutableList()).get(0); + + assertThat(metric.getName()).isEqualTo(gaugeName); + assertThat(metric.getUnit()).isEqualTo("percent"); + assertThat(point.getValue()).isNotNull(); + assertThat(point.getValue()).isEqualTo(expectedMemoryRatio.get(i)); + } + } + private void assertGaugeNonNull(String name, String unit) { MetricData metric = metricReader.collectAllMetrics().stream().collect(toImmutableList()).get(0); DoublePointData point = @@ -174,15 +212,6 @@ public void createProdMemoryUtilizationGauge_isNotNull() { assertGaugeNonNull(gaugeName, "MiB"); } - @Test - public void createProdMemoryUtilizationRatioGauge_isNotNull() { - String gaugeName = "process.runtime.jvm.memory.utilization_ratio"; - - oTelConfigurationImpl.createProdMemoryUtilizationRatioGauge(); - - assertGaugeNonNull(gaugeName, "percent"); - } - @Test public void createProdCPUUtilizationGauge_isNotNull() { String gaugeName = "process.runtime.jvm.CPU.utilization"; @@ -324,4 +353,29 @@ public void createProdTimerStarted_addEventSucceeds() { assertThat(spanData.getName()).isEqualTo(timerName); assertThat(spanData.getEvents().get(0).getName()).isEqualTo(eventName); } + + @Test + public void createProdTimerStart_setTimeUnitSeconds() { + String timerName = "prodTimer"; + TimerUnit timeUnit = TimerUnit.SECONDS; + String jobID = "job1"; + + try (Timer timer = oTelConfigurationImpl.createProdTimerStarted(timerName, jobID, timeUnit)) { + Thread.sleep(1234); // Add delay to make startEpoch and endEpoch different + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + List spanItems = spanExporter.getFinishedSpanItems(); + + assertThat(spanItems).isNotNull(); + + ImmutableList startEpoch = + spanItems.stream().map(SpanData::getStartEpochNanos).collect(toImmutableList()); + ImmutableList endEpoch = + spanItems.stream().map(SpanData::getEndEpochNanos).collect(toImmutableList()); + + // It should end with 0s because it is in seconds and converts to nanoseconds. + assertThat((long) startEpoch.get(0) % 1000000000).isEqualTo(0); + assertThat((long) endEpoch.get(0) % 1000000000).isEqualTo(0); + } } diff --git a/telemetry/noop/java/com/google/privacysandbox/otel/OTelConfigurationImpl.java b/telemetry/noop/java/com/google/privacysandbox/otel/OTelConfigurationImpl.java index e4401e4e..82146f07 100644 --- a/telemetry/noop/java/com/google/privacysandbox/otel/OTelConfigurationImpl.java +++ b/telemetry/noop/java/com/google/privacysandbox/otel/OTelConfigurationImpl.java @@ -78,6 +78,12 @@ public Timer createProdTimerStarted(String name, String jobID) { return NOOP_TIMER; } + @Override + @MustBeClosed + public Timer createProdTimerStarted(String name, String jobID, TimerUnit unit) { + return NOOP_TIMER; + } + @Override @MustBeClosed public Timer createProdTimerStarted(String name, Map attributeMap) { diff --git a/telemetry/noop/javatests/com/google/privacysandbox/otel/OTelConfigurationImplTest.java b/telemetry/noop/javatests/com/google/privacysandbox/otel/OTelConfigurationImplTest.java index 146cfba9..85738be8 100644 --- a/telemetry/noop/javatests/com/google/privacysandbox/otel/OTelConfigurationImplTest.java +++ b/telemetry/noop/javatests/com/google/privacysandbox/otel/OTelConfigurationImplTest.java @@ -246,4 +246,16 @@ private void assertGaugeIsNull() { assertThat(metric.size()).isEqualTo(0); } + + @Test + public void createProdTimerStart_setTimeUnitSeconds() { + String timerName = "prodTimer"; + TimerUnit timeUnit = TimerUnit.SECONDS; + String jobID = "job1"; + + try (Timer timer = oTelConfigurationImpl.createProdTimerStarted(timerName, jobID, timeUnit)) {} + List spanItems = spanExporter.getFinishedSpanItems(); + + assertThat(spanItems).isEmpty(); + } } diff --git a/telemetry/prod/java/com/google/privacysandbox/otel/BUILD b/telemetry/prod/java/com/google/privacysandbox/otel/BUILD index cc0b1904..f4c99492 100644 --- a/telemetry/prod/java/com/google/privacysandbox/otel/BUILD +++ b/telemetry/prod/java/com/google/privacysandbox/otel/BUILD @@ -22,6 +22,7 @@ java_library( javacopts = ["-Xep:Var:ERROR"], deps = [ "//java/external:error_prone_anntotations", + "//java/external:guava", "//java/external:guice", "//java/external:opentelemetry_api", "//java/external:opentelemetry_context", diff --git a/telemetry/prod/java/com/google/privacysandbox/otel/OTelConfigurationImpl.java b/telemetry/prod/java/com/google/privacysandbox/otel/OTelConfigurationImpl.java index 0fdf2937..c2d2a605 100644 --- a/telemetry/prod/java/com/google/privacysandbox/otel/OTelConfigurationImpl.java +++ b/telemetry/prod/java/com/google/privacysandbox/otel/OTelConfigurationImpl.java @@ -16,6 +16,7 @@ package com.google.privacysandbox.otel; +import com.google.common.annotations.VisibleForTesting; import com.google.errorprone.annotations.MustBeClosed; import io.opentelemetry.api.OpenTelemetry; import io.opentelemetry.api.metrics.LongCounter; @@ -39,6 +40,11 @@ public final class OTelConfigurationImpl implements OTelConfiguration { oTel.getTracer(OTelConfigurationImpl.class.getName())); } + @VisibleForTesting + OTelConfigurationImpl(OTelConfigurationImplHelper oTelConfigurationImplHelper) { + this.oTelConfigurationImplHelper = oTelConfigurationImplHelper; + } + @Override public void createProdMemoryUtilizationRatioGauge() { oTelConfigurationImplHelper.createMemoryUtilizationRatioGauge(); @@ -91,7 +97,13 @@ public Timer createDebugTimerStarted(String name, Map attributeMap) { @Override @MustBeClosed public Timer createProdTimerStarted(String name, String jobID) { - return oTelConfigurationImplHelper.createTimerStarted(name, jobID); + return oTelConfigurationImplHelper.createTimerStarted(name, jobID, TimerUnit.NANOSECONDS); + } + + @Override + @MustBeClosed + public Timer createProdTimerStarted(String name, String jobID, TimerUnit timeUnit) { + return oTelConfigurationImplHelper.createTimerStarted(name, jobID, timeUnit); } @Override diff --git a/telemetry/prod/javatests/com/google/privacysandbox/otel/BUILD b/telemetry/prod/javatests/com/google/privacysandbox/otel/BUILD index 6be20c76..2426ad84 100644 --- a/telemetry/prod/javatests/com/google/privacysandbox/otel/BUILD +++ b/telemetry/prod/javatests/com/google/privacysandbox/otel/BUILD @@ -22,6 +22,7 @@ java_test( deps = [ "//java/external:google_truth", "//java/external:guava", + "//java/external:mockito", "//java/external:opentelemetry_api", "//java/external:opentelemetry_sdk", "//java/external:opentelemetry_sdk_common", diff --git a/telemetry/prod/javatests/com/google/privacysandbox/otel/OTelConfigurationImplTest.java b/telemetry/prod/javatests/com/google/privacysandbox/otel/OTelConfigurationImplTest.java index 016fb463..efe0f8fc 100644 --- a/telemetry/prod/javatests/com/google/privacysandbox/otel/OTelConfigurationImplTest.java +++ b/telemetry/prod/javatests/com/google/privacysandbox/otel/OTelConfigurationImplTest.java @@ -18,6 +18,8 @@ import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.truth.Truth.assertThat; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.when; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -55,6 +57,7 @@ public final class OTelConfigurationImplTest { private InMemorySpanExporter spanExporter; private InMemoryMetricReader metricReader; private OTelConfiguration oTelConfigurationImpl; + private OpenTelemetry openTelemetry; @Before public void setUp() { @@ -80,7 +83,7 @@ public void setUp() { .build(); // Setup OpenTelemetry object - OpenTelemetry openTelemetry = + openTelemetry = OpenTelemetrySdk.builder() .setTracerProvider(sdkTracerProvider) .setMeterProvider(sdkMeterProvider) @@ -144,6 +147,41 @@ public void createProdCounter_ensuresNoDecrement() { assertCounterValue(counterName, counterValue); } + @Test + public void createProdMemoryUtilizationRatioGauge_returnCorrectValues() { + ImmutableList getMemoryValues = ImmutableList.of(1.0, 14.0, 15.0, 91.0, 95.0, 100.0); + ImmutableList expectedMemoryRatio = ImmutableList.of(0, 10, 20, 90, 90, 90); + String gaugeName = "process.runtime.jvm.memory.utilization_ratio"; + + for (int i = 0; i < expectedMemoryRatio.size(); i++) { + metricReader = InMemoryMetricReader.create(); + SdkMeterProvider sdkMeterProvider = + SdkMeterProvider.builder() + .setResource(RESOURCE) + .setClock(CLOCK) + .registerMetricReader(metricReader) + .build(); + openTelemetry = OpenTelemetrySdk.builder().setMeterProvider(sdkMeterProvider).build(); + OTelConfigurationImplHelper helper = + spy( + new OTelConfigurationImplHelper( + openTelemetry.getMeter(OTelConfigurationImpl.class.getName()), + openTelemetry.getTracer(OTelConfigurationImpl.class.getName()))); + when(helper.getUsedMemoryRatio()).thenReturn(getMemoryValues.get(i)); + OTelConfigurationImpl mockOTelConfigurationImpl = new OTelConfigurationImpl(helper); + mockOTelConfigurationImpl.createProdMemoryUtilizationRatioGauge(); + MetricData metric = + metricReader.collectAllMetrics().stream().collect(toImmutableList()).get(0); + DoublePointData point = + (DoublePointData) metric.getData().getPoints().stream().collect(toImmutableList()).get(0); + + assertThat(metric.getName()).isEqualTo(gaugeName); + assertThat(metric.getUnit()).isEqualTo("percent"); + assertThat(point.getValue()).isNotNull(); + assertThat(point.getValue()).isEqualTo(expectedMemoryRatio.get(i)); + } + } + private void assertGaugeNonNull(String name, String unit) { MetricData metric = metricReader.collectAllMetrics().stream().collect(toImmutableList()).get(0); DoublePointData point = @@ -163,15 +201,6 @@ public void createProdMemoryUtilizationGauge_isNotNull() { assertGaugeNonNull(gaugeName, "MiB"); } - @Test - public void createProdMemoryUtilizationRatioGauge_isNotNull() { - String gaugeName = "process.runtime.jvm.memory.utilization_ratio"; - - oTelConfigurationImpl.createProdMemoryUtilizationRatioGauge(); - - assertGaugeNonNull(gaugeName, "percent"); - } - @Test public void createProdCPUUtilizationGauge_isNotNull() { String gaugeName = "process.runtime.jvm.CPU.utilization"; @@ -310,4 +339,30 @@ public void createProdTimerStarted_addEventSucceeds() { assertThat(spanData.getName()).isEqualTo(timerName); assertThat(spanData.getEvents().get(0).getName()).isEqualTo(eventName); } + + @Test + public void createProdTimerStart_setTimeUnitSeconds() { + String timerName = "prodTimer"; + TimerUnit timeUnit = TimerUnit.SECONDS; + String jobID = "job1"; + + try (Timer timer = oTelConfigurationImpl.createProdTimerStarted(timerName, jobID, timeUnit)) { + Thread.sleep(1234); // Add delay to make startEpoch and endEpoch different + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + + List spanItems = spanExporter.getFinishedSpanItems(); + + assertThat(spanItems).isNotNull(); + + ImmutableList startEpoch = + spanItems.stream().map(SpanData::getStartEpochNanos).collect(toImmutableList()); + ImmutableList endEpoch = + spanItems.stream().map(SpanData::getEndEpochNanos).collect(toImmutableList()); + + // It should end with 0s because it is in seconds and converts to nanoseconds. + assertThat((long) startEpoch.get(0) % 1000000000).isEqualTo(0); + assertThat((long) endEpoch.get(0) % 1000000000).isEqualTo(0); + } } diff --git a/terraform/aws/BUILD b/terraform/aws/BUILD index 88368253..69e339e9 100644 --- a/terraform/aws/BUILD +++ b/terraform/aws/BUILD @@ -74,3 +74,11 @@ s3_jar_release( release_bucket = ":bucket_flag", release_key = ":bucket_path_flag", ) + +s3_jar_release( + name = "privacy_budget_unit_extraction_tool_release", + artifact_base_name = "PrivacyBudgetUnitExtraction_{VERSION}.jar", + jar_target = "//java/com/google/aggregate/tools/privacybudgetutil/aws:AwsPrivacyBudgetUnitExtractionDeploy", + release_bucket = ":bucket_flag", + release_key = ":bucket_path_flag", +) diff --git a/terraform/gcp/BUILD b/terraform/gcp/BUILD index 09571361..2f3f0684 100644 --- a/terraform/gcp/BUILD +++ b/terraform/gcp/BUILD @@ -50,3 +50,11 @@ gcs_jar_release( release_bucket = ":bucket_flag", release_key = ":bucket_path_flag", ) + +gcs_jar_release( + name = "privacy_budget_unit_extraction_tool_release", + artifact_base_name = "PrivacyBudgetUnitExtractionTool_{VERSION}.jar", + jar_target = "//java/com/google/aggregate/tools/privacybudgetutil/gcp:GcpPrivacyBudgetUnitExtractionDeploy", + release_bucket = ":bucket_flag", + release_key = ":bucket_path_flag", +) diff --git a/terraform/gcp/fetch_terraform.sh b/terraform/gcp/fetch_terraform.sh index 01ffc7a0..52b0a7b3 100644 --- a/terraform/gcp/fetch_terraform.sh +++ b/terraform/gcp/fetch_terraform.sh @@ -80,8 +80,8 @@ frontend_service_jar = "../../jars/FrontendServiceHttpCloudFunction_${VERSION}.j worker_scale_in_jar = "../../jars/WorkerScaleInCloudFunction_${VERSION}.jar" # Coordinator service accounts to impersonate for authorization and authentication -coordinator_a_impersonate_service_account = "a-opallowedusr@ps-msmt-coord-prd-gg-svcacc.iam.gserviceaccount.com" -coordinator_b_impersonate_service_account = "b-opallowedusr@ps-msmt-coord-prd-gg-svcacc.iam.gserviceaccount.com" +coordinator_a_impersonate_service_account = "a-opallowedusr@ps-msmt-coord-prd-g3p-svcacc.iam.gserviceaccount.com" +coordinator_b_impersonate_service_account = "b-opallowedusr@ps-prod-msmt-type2-e541.iam.gserviceaccount.com" EOT diff --git a/worker/aws/BUILD b/worker/aws/BUILD index a30012d9..52ccccfd 100644 --- a/worker/aws/BUILD +++ b/worker/aws/BUILD @@ -53,12 +53,10 @@ DEFAULT_ENCLAVE_ARGS = [ "--result_logger", "LOCAL_TO_CLOUD", # coord-staging environment - "--private_key_service_base_url", - "https://il9t0ajpsa.execute-api.us-west-2.amazonaws.com/stage/v1alpha", "--result_working_directory_path", "/", "--decryption_key_service", - "AWS_ENCLAVE_CLI_DECRYPTION_KEY_SERVICE", + "AWS_ENCLAVE_CLI_MULTI_PARTY_DECRYPTION_KEY_SERVICE", "--param_client", "AWS", # getting parameters from AWS parameter store "--lifecycle_client", @@ -108,7 +106,7 @@ PROD_ENCLAVE_ARGS = [ "--result_logger", "LOCAL_TO_CLOUD", "--decryption_key_service", - "AWS_ENCLAVE_CLI_DECRYPTION_KEY_SERVICE", + "AWS_ENCLAVE_CLI_MULTI_PARTY_DECRYPTION_KEY_SERVICE", "--trusted_party_region_override", "us-east-1", "--param_client", @@ -120,8 +118,10 @@ PROD_ENCLAVE_ARGS = [ "--noising", "DP_NOISING", "--return_stack_trace", - "--parallel-summary-upload", - "--streaming-output-domain-processing", + "--parallel_summary_upload_enabled", + "--streaming_output_domain_processing_enabled", + "--parallel_fact_noising_enabled", + "--labeled_privacy_budget_keys_enabled", ] # Check the documentation for the worker_aws_deployment macro for details on # what is available in this package. diff --git a/worker/gcp/BUILD b/worker/gcp/BUILD index 3803175b..a850533d 100644 --- a/worker/gcp/BUILD +++ b/worker/gcp/BUILD @@ -16,6 +16,74 @@ load("//build_defs/worker/gcp:deploy.bzl", "worker_gcp_deployment") package(default_visibility = ["//visibility:public"]) +worker_gcp_deployment( + name = "worker_mp_gcp_g3p_prod", + cmd = [ + "WorkerRunner_prod_deploy.jar", + "--client_config_env", + "GCP", + "--job_client", + "GCP", + "--blob_storage_client", + "GCP_CS_CLIENT", + "--decryption_key_service", + "GCP_KMS_MULTI_PARTY_DECRYPTION_KEY_SERVICE", + "--primary_encryption_key_service_base_url", + "https://privatekeyservice-a.msmt-3.gcp.privacysandboxservices.com/v1alpha", + "--secondary_encryption_key_service_base_url", + "https://privatekeyservice-b.msmt-4.gcp.privacysandboxservices.com/v1alpha", + "--primary_encryption_key_service_cloudfunction_url", + "https://a-us-central1-encryption-key-service-cloudfunctio-zihnau4cbq-uc.a.run.app", + "--secondary_encryption_key_service_cloudfunction_url", + "https://b-us-central1-encryption-key-service-cloudfunctio-mnlu5dzbga-uc.a.run.app", + "--coordinator_a_kms_key", + "gcp-kms://projects/ps-msmt-a-coord-prd-g3p/locations/us/keyRings/a_key_encryption_ring/cryptoKeys/a_key_encryption_key", + "--coordinator_b_kms_key", + "gcp-kms://projects/ps-prod-msmt-type2-e541/locations/us/keyRings/b_key_encryption_ring/cryptoKeys/b_key_encryption_key", + "--coordinator_a_wip_provider", + "projects/306633382134/locations/global/workloadIdentityPools/a-opwip/providers/a-opwip-pvdr", + "--coordinator_a_sa", + "a-opverifiedusr@ps-msmt-coord-prd-g3p-wif.iam.gserviceaccount.com", + "--coordinator_b_wip_provider", + "projects/364328752810/locations/global/workloadIdentityPools/b-opwip/providers/b-opwip-pvdr", + "--coordinator_b_sa", + "b-opverifiedusr@ps-prod-msmt-type2-e541.iam.gserviceaccount.com", + "--coordinator_a_privacy_budgeting_service_base_url", + "https://mp-pbs-a.msmt-3.gcp.privacysandboxservices.com/v1", + "--coordinator_a_privacy_budgeting_service_auth_endpoint", + "https://a-us-central1-pbs-auth-cloudfunction-zihnau4cbq-uc.a.run.app", + "--coordinator_b_privacy_budgeting_service_base_url", + "https://mp-pbs-b.msmt-4.gcp.privacysandboxservices.com/v1", + "--coordinator_b_privacy_budgeting_service_auth_endpoint", + "https://b-us-central1-pbs-auth-cloudfunction-mnlu5dzbga-uc.a.run.app", + "--privacy_budgeting", + "HTTP", + "--param_client", + "GCP", + "--metric_client", + "GCP", + "--lifecycle_client", + "GCP", + "--pbs_client", + "GCP", + "--noising", + "DP_NOISING", + "--return_stack_trace", + "--parallel_summary_upload_enabled", + "--streaming_output_domain_processing_enabled", + "--parallel_fact_noising_enabled", + "--labeled_privacy_budget_keys_enabled", + ], + entrypoint = [ + "/usr/bin/java", + "-XX:+ExitOnOutOfMemoryError", + "-XX:MaxRAMPercentage=75.0", + "-jar", + ], + files = ["//java/com/google/aggregate/adtech/worker/gcp:WorkerRunner_prod_deploy.jar"], + labels = {"tee.launch_policy.allow_cmd_override": "false"}, +) + worker_gcp_deployment( name = "worker_mp_gcp_prod", cmd = [ @@ -69,6 +137,10 @@ worker_gcp_deployment( "--noising", "DP_NOISING", "--return_stack_trace", + "--parallel_summary_upload_enabled", + "--streaming_output_domain_processing_enabled", + "--parallel_fact_noising_enabled", + "--labeled_privacy_budget_keys_enabled", ], entrypoint = [ "/usr/bin/java", diff --git a/worker/testing/data/library/expected_output_set_1/output.json b/worker/testing/data/library/expected_output_set_1/output.json index 42aa2a23..fc50282f 100644 --- a/worker/testing/data/library/expected_output_set_1/output.json +++ b/worker/testing/data/library/expected_output_set_1/output.json @@ -1,19 +1,19 @@ [ { - "bucket" : "a2V5NQ==", + "bucket" : "key5", "metric" : 2000000 }, { - "bucket" : "a2V5NA==", + "bucket" : "key4", "metric" : 2000000 }, { - "bucket" : "a2V5Ng==", + "bucket" : "key6", "metric" : 8000000 }, { - "bucket" : "a2V5MQ==", + "bucket" : "key1", "metric" : 12000000 }, { - "bucket" : "a2V5Mw==", + "bucket" : "key3", "metric" : 2000000 }, { - "bucket" : "a2V5Mg==", + "bucket" : "key2", "metric" : 13000000 } ] diff --git a/worker/testing/data/library/expected_output_set_3/output.json b/worker/testing/data/library/expected_output_set_3/output.json index 29f9552b..01c91777 100644 --- a/worker/testing/data/library/expected_output_set_3/output.json +++ b/worker/testing/data/library/expected_output_set_3/output.json @@ -1,19 +1,19 @@ [ { - "bucket" : "a2V5NA==", + "bucket" : "key4", "metric" : 2000000 }, { - "bucket" : "a2V5MQ==", + "bucket" : "key1", "metric" : 12000000 }, { - "bucket" : "a2V5Mw==", + "bucket" : "key3", "metric" : 2000000 }, { - "bucket" : "a2V5Mg==", + "bucket" : "key2", "metric" : 13000000 }, { - "bucket" : "a2V5MTE=", + "bucket" : "key11", "metric" : 0 }, { - "bucket" : "a2V5MTI=", + "bucket" : "key12", "metric" : 0 } ] diff --git a/worker/testing/data/library/expected_output_set_4/output.json b/worker/testing/data/library/expected_output_set_4/output.json index 55e0e799..a37f64ff 100644 --- a/worker/testing/data/library/expected_output_set_4/output.json +++ b/worker/testing/data/library/expected_output_set_4/output.json @@ -1,19 +1,19 @@ [ { - "bucket" : "a2V5MTM=", + "bucket" : "key13", "metric" : 0 }, { - "bucket" : "a2V5MTQ=", + "bucket" : "key14", "metric" : 0 }, { - "bucket" : "a2V5MTU=", + "bucket" : "key15", "metric" : 0 }, { - "bucket" : "a2V5MTY=", + "bucket" : "key16", "metric" : 0 }, { - "bucket" : "a2V5MTE=", + "bucket" : "key11", "metric" : 0 }, { - "bucket" : "a2V5MTI=", + "bucket" : "key12", "metric" : 0 } ] diff --git a/worker/testing/data/library/expected_output_set_thresholding/output.json b/worker/testing/data/library/expected_output_set_thresholding/output.json index d02b29e7..0df7a190 100644 --- a/worker/testing/data/library/expected_output_set_thresholding/output.json +++ b/worker/testing/data/library/expected_output_set_thresholding/output.json @@ -1,13 +1,13 @@ [ { - "bucket" : "MjI=", + "bucket" : "22", "metric" : 1 }, { - "bucket" : "MzM=", + "bucket" : "33", "metric" : 2 }, { - "bucket" : "NDQ=", + "bucket" : "44", "metric" : 3 }, { - "bucket" : "NTU=", + "bucket" : "55", "metric" : 1 } ]