diff --git a/plugins/modules/s3_bucket.py b/plugins/modules/s3_bucket.py index d259286f9c0..b9a473c16d3 100644 --- a/plugins/modules/s3_bucket.py +++ b/plugins/modules/s3_bucket.py @@ -166,7 +166,62 @@ type: bool default: false version_added: 6.0.0 - + inventory: + description: + - Enable S3 Inventory, saving list of the objects and their corresponding + metadata on a daily or weekly basis for an S3 bucket. + type: list + elements: dict + default: [] + suboptions: + destination: + description: Contains information about where to publish the inventory results. + type: dict + required: True + suboptions: + account_id: + description: The account ID that owns the destination S3 bucket. If no account ID is provided, the owner is not validated before exporting data. + type: str + bucket: + description: The Amazon Resource Name (ARN) of the bucket where inventory results will be published. + type: str + required: True + format: + description: Specifies the output format of the inventory results. + type: str + choices: [ 'CSV', 'ORC', 'Parquet' ] + default: CSV + prefix: + description: The prefix that is prepended to all inventory results. + type: str + filter: + description: Specifies an inventory filter. The inventory only includes objects that meet the filter's criteria. + type: str + id: + description: The ID used to identify the inventory configuration + type: str + required: True + schedule: + description: Specifies the schedule for generating inventory results. + type: str + default: Weekly + choices: [ 'Daily', 'Weekly' ] + included_object_versions: + description: Object versions to include in the inventory list. If set to All, the list includes all the object versions, which adds the version-related fields VersionId, IsLatest, and DeleteMarker to the list. If set to Current, the list does not contain these version-related fields. + type: str + default: All + choices: [ 'All', 'Current' ] + optional_fields: + description: Contains the optional fields that are included in the inventory results. + type: list + elements: str + default: [] + choices: [ "Size", "LastModifiedDate", "StorageClass", "ETag", + "IsMultipartUploaded", "ReplicationStatus", "EncryptionStatus", + "ObjectLockRetainUntilDate", "ObjectLockMode", + "ObjectLockLegalHoldStatus", "IntelligentTieringAccessTier", + "BucketKeyStatus", "ChecksumAlgorithm", "ObjectAccessControlList", + "ObjectOwner" ] extends_documentation_fragment: - amazon.aws.common.modules - amazon.aws.region.modules @@ -350,6 +405,7 @@ sample: 'public-read' """ +from email.policy import default import json import time from typing import Iterator @@ -832,6 +888,80 @@ def handle_bucket_object_lock(s3_client, module: AnsibleAWSModule, name: str) -> return object_lock_result +def handle_bucket_inventory(s3_client, module: AnsibleAWSModule, name: str) -> tuple[bool, dict]: + """ + Manage inventory configuration for an S3 bucket. + Parameters: + s3_client (boto3.client): The Boto3 S3 client object. + module (AnsibleAWSModule): The Ansible module object. + name (str): The name of the bucket to handle inventory for. + Returns: + A tuple containing a boolean indicating whether inventory settings were changed + and a dictionary containing the updated inventory. + """ + declared_inventories = module.params.get("inventory") + results = [] + bucket_changed = False + + try: + present_inventories = get_bucket_inventories(s3_client, name) + except is_boto3_error_code(["NotImplemented", "XNotImplemented"]) as e: + if declared_inventories is not None: + module.fail_json(msg="Fetching bucket inventories is not supported") + except is_boto3_error_code("AccessDenied") as e: # pylint: disable=duplicate-except + if declared_inventories is not None: + module.fail_json(msg="Permission denied fetching bucket inventories") + except ( + botocore.exceptions.BotoCoreError, + botocore.exceptions.ClientError, + ) as e: # pylint: disable=duplicate-except + module.fail_json_aws(e, msg="Failed to fetch bucket inventories") + else: + if not declared_inventories and present_inventories != []: + for present_inventory in present_inventories: + inventory_id = present_inventory.get("Id", "") + if inventory_id != "": + delete_bucket_inventory(s3_client, name, inventory_id) + bucket_changed = True + if declared_inventories: + for declared_inventory in declared_inventories: + camel_destination = snake_dict_to_camel_dict(declared_inventory.get("destination", {}), True) + declared_inventory_api = { + "IsEnabled": True, + "Id": declared_inventory.get("id"), + "Destination": {"S3BucketDestination": {k: v for k, v in camel_destination.items() if v is not None}}, + "IncludedObjectVersions": declared_inventory.get("included_object_versions"), + "Schedule": {"Frequency": declared_inventory.get("schedule")}, + } + + for field in declared_inventory.get("optional_fields", []): + declared_inventory_api["OptionalFields"].append({"Field": field}) + if declared_inventory.get("filter") is not None: + declared_inventory_api["Filter"] = {"Prefix": declared_inventory.get("filter")} + + update = True + for present_inventory in present_inventories: + if present_inventory.get("Id", "") == declared_inventory_api["Id"]: + if declared_inventory_api == present_inventory: + update = False + + if update: + try: + put_bucket_inventory(s3_client, name, declared_inventory_api) + bucket_changed = True + except is_boto3_error_code("InvalidS3DestinationBucket"): + module.fail_json(msg="Invalid destibation bucket ARN") + except ( + botocore.exceptions.BotoCoreError, + botocore.exceptions.ClientError, + ) as e: # pylint: disable=duplicate-except + module.fail_json_aws(e, msg="Failed to set bucket inventory setting") + + results.append(declared_inventory_api) + + return bucket_changed, results + + def create_or_update_bucket(s3_client, module: AnsibleAWSModule): """ Create or update an S3 bucket along with its associated configurations. @@ -908,6 +1038,10 @@ def create_or_update_bucket(s3_client, module: AnsibleAWSModule): bucket_object_lock_result = handle_bucket_object_lock(s3_client, module, name) result["object_lock_enabled"] = bucket_object_lock_result + # -- Inventory + bucket_inventory_changed, bucket_inventory_result = handle_bucket_inventory(s3_client, module, name) + result["bucket_inventory"] = bucket_inventory_result + # Module exit changed = ( changed @@ -919,6 +1053,7 @@ def create_or_update_bucket(s3_client, module: AnsibleAWSModule): or encryption_changed or bucket_ownership_changed or bucket_acl_changed + or bucket_inventory_changed ) module.exit_json(changed=changed, name=name, **result) @@ -973,6 +1108,22 @@ def create_bucket(s3_client, bucket_name: str, location: str, object_lock_enable return False +@AWSRetry.exponential_backoff(max_delay=120, catch_extra_error_codes=["NoSuchBucket", "OperationAborted"]) +def put_bucket_inventory(s3_client, bucket_name: str, inventory: dict): + """ + Set inventory settings for an S3 bucket. + Parameters: + s3_client (boto3.client): The Boto3 S3 client object. + bucket_name (str): The name of the S3 bucket. + tags (dict): A dictionary containing the inventory settings to be set on the bucket. + Returns: + None + """ + s3_client.put_bucket_inventory_configuration( + Bucket=bucket_name, InventoryConfiguration=inventory, Id=inventory.get("Id") + ) + + @AWSRetry.exponential_backoff(max_delay=120, catch_extra_error_codes=["NoSuchBucket", "OperationAborted"]) def put_bucket_tagging(s3_client, bucket_name: str, tags: dict): """ @@ -987,6 +1138,37 @@ def put_bucket_tagging(s3_client, bucket_name: str, tags: dict): s3_client.put_bucket_tagging(Bucket=bucket_name, Tagging={"TagSet": ansible_dict_to_boto3_tag_list(tags)}) +@AWSRetry.exponential_backoff(max_delay=120, catch_extra_error_codes=["NoSuchBucket", "OperationAborted"]) +def delete_bucket_inventory(s3_client, bucket_name: str, id: str): + """ + Delete the inventory settings for an S3 bucket. + Parameters: + s3_client (boto3.client): The Boto3 S3 client object. + bucket_name (str): The name of the S3 bucket. + Returns: + None + """ + s3_client.delete_bucket_inventory_configuration(Bucket=bucket_name, Id=id) + + +@AWSRetry.exponential_backoff(max_delay=120, catch_extra_error_codes=["NoSuchBucket", "OperationAborted"]) +def get_bucket_inventories(s3_client, bucket_name: str) -> list: + """ + Get the inventory settings for an S3 bucket. + Parameters: + s3_client (boto3.client): The Boto3 S3 client object. + bucket_name (str): The name of the S3 bucket. + Returns: + Current inventory settings. + """ + try: + result = s3_client.list_bucket_inventory_configurations(Bucket=bucket_name) + inventory_list = result.get("InventoryConfigurationList", []) + return inventory_list if inventory_list else [] + except is_boto3_error_code("NoSuchConfiguration"): # pylint: disable=duplicate-except + return [] + + @AWSRetry.exponential_backoff(max_delay=120, catch_extra_error_codes=["NoSuchBucket", "OperationAborted"]) def put_bucket_policy(s3_client, bucket_name: str, policy: dict): """ @@ -1733,6 +1915,49 @@ def main(): validate_bucket_name=dict(type="bool", default=True), dualstack=dict(default=False, type="bool"), object_lock_enabled=dict(type="bool"), + inventory=dict( + type="list", + elements="dict", + default=[], + options=dict( + destination=dict( + type="dict", + options=dict( + account_id=dict(type="str"), + bucket=dict(type="str", required=True), + format=dict(type="str", choices=["CSV", "ORC", "Parquet"], default="CSV"), + prefix=dict(type="str"), + ), + required=True, + ), + filter=dict(type="str"), + optional_fields=dict( + type="list", + elements="str", + choices=[ + "Size", + "LastModifiedDate", + "StorageClass", + "ETag", + "IsMultipartUploaded", + "ReplicationStatus", + "EncryptionStatus", + "ObjectLockRetainUntilDate", + "ObjectLockMode", + "ObjectLockLegalHoldStatus", + "IntelligentTieringAccessTier", + "BucketKeyStatus", + "ChecksumAlgorithm", + "ObjectAccessControlList", + "ObjectOwner", + ], + default=[], + ), + id=dict(type="str", required=True), + schedule=dict(type="str", default="Weekly", choices=["Daily", "Weekly"]), + included_object_versions=dict(type="str", default="All", choices=["All", "Current"]), + ), + ), ) required_by = dict( diff --git a/tests/integration/targets/s3_bucket/inventory b/tests/integration/targets/s3_bucket/inventory index db31e4a9b37..2bbe7842803 100644 --- a/tests/integration/targets/s3_bucket/inventory +++ b/tests/integration/targets/s3_bucket/inventory @@ -11,6 +11,7 @@ encryption_sse public_access acl object_lock +inventory [all:vars] ansible_connection=local diff --git a/tests/integration/targets/s3_bucket/roles/s3_bucket/tasks/inventory.yml b/tests/integration/targets/s3_bucket/roles/s3_bucket/tasks/inventory.yml new file mode 100644 index 00000000000..7bbcb28145c --- /dev/null +++ b/tests/integration/targets/s3_bucket/roles/s3_bucket/tasks/inventory.yml @@ -0,0 +1,129 @@ +--- +- module_defaults: + group/aws: + access_key: "{{ aws_access_key }}" + secret_key: "{{ aws_secret_key }}" + session_token: "{{ security_token | default(omit) }}" + region: "{{ aws_region }}" + block: + - ansible.builtin.set_fact: + local_bucket_name: "{{ bucket_name | hash('md5')}}-inventory-source" + local_dest_bucket_name: "{{ bucket_name | hash('md5')}}-inventory-target" + # ============================================================ + - name: Create a simple bucket to be inventory destination + amazon.aws.s3_bucket: + name: "{{ local_dest_bucket_name }}" + state: present + register: output + + - ansible.builtin.assert: + that: + - output.changed + + - name: Create a simple bucket with minimal inventory configuration + amazon.aws.s3_bucket: + name: "{{ local_bucket_name }}" + state: present + inventory: + - id: "{{ local_dest_bucket_name }}" + destination: + bucket: "arn:aws:s3:::{{ local_dest_bucket_name }}" + register: output + + - ansible.builtin.assert: + that: + - output.changed + - output.bucket_inventory + + - name: Re-configure inventory configuration + amazon.aws.s3_bucket: + name: "{{ local_bucket_name }}" + state: present + inventory: + - id: "{{ local_dest_bucket_name }}" + destination: + bucket: "arn:aws:s3:::{{ local_dest_bucket_name }}" + schedule: "Daily" + register: output + + - ansible.builtin.assert: + that: + - output.changed + - output.bucket_inventory + + - name: Re-configure inventory configuration (idempotency) + amazon.aws.s3_bucket: + name: "{{ local_bucket_name }}" + state: present + inventory: + - id: "{{ local_dest_bucket_name }}" + destination: + bucket: "arn:aws:s3:::{{ local_dest_bucket_name }}" + schedule: "Daily" + register: output + + - ansible.builtin.assert: + that: + - output is not changed + - output.bucket_inventory + + + - name: Delete inventory configuration + amazon.aws.s3_bucket: + name: "{{ local_bucket_name }}" + state: present + register: output + + - ansible.builtin.assert: + that: + - output is changed + - not output.bucket_inventory|bool + + - name: Delete inventory configuration (idempotency) + amazon.aws.s3_bucket: + name: "{{ local_bucket_name }}" + state: present + register: output + + - ansible.builtin.assert: + that: + - output is not changed + - not output.bucket_inventory|bool + + # ============================================================ + + - name: Delete testing s3 bucket + amazon.aws.s3_bucket: + name: "{{ local_bucket_name }}" + state: absent + register: output + + - ansible.builtin.assert: + that: + - output.changed + + + - name: Delete testing inventory s3 bucket + amazon.aws.s3_bucket: + name: "{{ local_dest_bucket_name }}" + state: absent + register: output + + - ansible.builtin.assert: + that: + - output.changed + + + # ============================================================ + always: + - name: Ensure all buckets are deleted + amazon.aws.s3_bucket: + name: "{{ local_bucket_name }}" + state: absent + ignore_errors: true + + - name: Ensure all buckets are deleted + amazon.aws.s3_bucket: + name: "{{ local_dest_bucket_name }}" + state: absent + ignore_errors: true