From afb18bd8e92c1b4ff4d094149ffd6228dbfbb6b2 Mon Sep 17 00:00:00 2001 From: Alexander Braverman Masis Date: Wed, 1 May 2024 12:40:20 -0400 Subject: [PATCH] Inventory --- plugins/modules/s3_bucket.py | 180 +++++++++++++++++- tests/integration/targets/s3_bucket/inventory | 1 + .../roles/s3_bucket/tasks/inventory.yml | 129 +++++++++++++ 3 files changed, 309 insertions(+), 1 deletion(-) create mode 100644 tests/integration/targets/s3_bucket/roles/s3_bucket/tasks/inventory.yml diff --git a/plugins/modules/s3_bucket.py b/plugins/modules/s3_bucket.py index d259286f9c..8051828ff3 100644 --- a/plugins/modules/s3_bucket.py +++ b/plugins/modules/s3_bucket.py @@ -166,7 +166,50 @@ type: bool default: false version_added: 6.0.0 - + inventory: + description: + - Enable S3 Inventory, saving list of the objects and their corresponding + metadata on a daily or weekly basis for an S3 bucket. + type: dict + suboptions: + destination: + type: dict + required: True + options: + account_id: + type: str + bucket: + type: str + required: True + format: + type: str + choices: [ 'CSV', 'OCR', 'Parquet' ] + default: CSV + prefix: + type: str + filter: + type: str + id: + type: str + required: True + schedule: + type: str + default: Weekly + choices: [ 'Daily', 'Weekly' ] + include_object_version: + type: str + default: All + choices: [ 'All', 'Current' ] + optional_fields: + type: list + elements: str + choices: [ "Size", "LastModifiedDate", "StorageClass", "ETag", + "IsMultipartUploaded", "ReplicationStatus", "EncryptionStatus", + "ObjectLockRetainUntilDate", "ObjectLockMode", + "ObjectLockLegalHoldStatus", "IntelligentTieringAccessTier", + "BucketKeyStatus", "ChecksumAlgorithm", "ObjectAccessControlList", + "ObjectOwner" ] + extends_documentation_fragment: - amazon.aws.common.modules - amazon.aws.region.modules @@ -832,6 +875,50 @@ def handle_bucket_object_lock(s3_client, module: AnsibleAWSModule, name: str) -> return object_lock_result +def handle_bucket_inventory(s3_client, module: AnsibleAWSModule, name: str) -> tuple[bool, dict]: + """ + Manage inventory configuration for an S3 bucket. + Parameters: + s3_client (boto3.client): The Boto3 S3 client object. + module (AnsibleAWSModule): The Ansible module object. + name (str): The name of the bucket to handle inventory for. + Returns: + A tuple containing a boolean indicating whether inventory settings were changed + and a dictionary containing the updated inventory. + """ + bucket_inventory_settings = module.params.get("inventory") + bucket_inventory_result = {} + bucket_inventory_changed = False + + try: + bucket_inventory_status = get_bucket_inventory(s3_client, name) + bucket_inventory_result = bucket_inventory_status + except is_boto3_error_code(["NotImplemented", "XNotImplemented"]) as e: + if bucket_inventory_settings is not None: + module.fail_json(msg="Fetching bucket inventory state is not supported") + except is_boto3_error_code("ObjectLockConfigurationNotFoundError"): # pylint: disable=duplicate-except + if bucket_inventory_settings: + module.fail_json(msg="Enabling object lock for existing buckets is not supported") + object_lock_result = False + except is_boto3_error_code("AccessDenied") as e: # pylint: disable=duplicate-except + if bucket_inventory_settings is not None: + module.fail_json(msg="Permission denied fetching object lock state for bucket") + except ( + botocore.exceptions.BotoCoreError, + botocore.exceptions.ClientError, + ) as e: # pylint: disable=duplicate-except + module.fail_json_aws(e, msg="Failed to fetch bucket object lock state") + else: + if bucket_inventory_status is not None: + if not bucket_inventory_settings and bucket_inventory_status: + delete_bucket_inventory(s3_client, name) + bucket_inventory_changed = True + if bucket_inventory_settings and not bucket_inventory_status: + put_bucket_inventory(s3_client, name, bucket_inventory_settings) + bucket_inventory_changed = True + + return bucket_inventory_changed, bucket_inventory_result + def create_or_update_bucket(s3_client, module: AnsibleAWSModule): """ Create or update an S3 bucket along with its associated configurations. @@ -908,6 +995,10 @@ def create_or_update_bucket(s3_client, module: AnsibleAWSModule): bucket_object_lock_result = handle_bucket_object_lock(s3_client, module, name) result["object_lock_enabled"] = bucket_object_lock_result + # -- Inventory + bucket_inventory_changed, bucket_inventory_result = handle_bucket_inventory(s3_client, module, name) + result["bucket_inventory"] = bucket_inventory_result + # Module exit changed = ( changed @@ -919,6 +1010,7 @@ def create_or_update_bucket(s3_client, module: AnsibleAWSModule): or encryption_changed or bucket_ownership_changed or bucket_acl_changed + or bucket_inventory_changed ) module.exit_json(changed=changed, name=name, **result) @@ -973,6 +1065,20 @@ def create_bucket(s3_client, bucket_name: str, location: str, object_lock_enable return False +@AWSRetry.exponential_backoff(max_delay=120, catch_extra_error_codes=["NoSuchBucket", "OperationAborted"]) +def put_bucket_inventory(s3_client: S3Client, bucket_name: str, inventory: dict): + """ + Set inventory settings for an S3 bucket. + Parameters: + s3_client (boto3.client): The Boto3 S3 client object. + bucket_name (str): The name of the S3 bucket. + tags (dict): A dictionary containing the inventory settings to be set on the bucket. + Returns: + None + """ + s3_client.put_bucket_inventory_configuration(Bucket=bucket_name, InventoryConfiguration=inventory, Id=inventory.get("Id")) + + @AWSRetry.exponential_backoff(max_delay=120, catch_extra_error_codes=["NoSuchBucket", "OperationAborted"]) def put_bucket_tagging(s3_client, bucket_name: str, tags: dict): """ @@ -987,6 +1093,37 @@ def put_bucket_tagging(s3_client, bucket_name: str, tags: dict): s3_client.put_bucket_tagging(Bucket=bucket_name, Tagging={"TagSet": ansible_dict_to_boto3_tag_list(tags)}) +@AWSRetry.exponential_backoff(max_delay=120, catch_extra_error_codes=["NoSuchBucket", "OperationAborted"]) +def delete_bucket_inventory(s3_client, bucket_name: str, id: str): + """ + Delete the inventory settings for an S3 bucket. + Parameters: + s3_client (boto3.client): The Boto3 S3 client object. + bucket_name (str): The name of the S3 bucket. + Returns: + None + """ + s3_client.delete_bucket_inventory_configuration(Bucket=bucket_name, Id=id) + + +@AWSRetry.exponential_backoff(max_delay=120, catch_extra_error_codes=["NoSuchBucket", "OperationAborted"]) +def get_bucket_inventory(s3_client, bucket_name: str) -> dict: + """ + Get the inventory settings for an S3 bucket. + Parameters: + s3_client (boto3.client): The Boto3 S3 client object. + bucket_name (str): The name of the S3 bucket. + Returns: + Current inventory settings. + """ + try: + result = s3_client.list_bucket_inventory_configurations(Bucket=bucket_name) + inventory_list = result.get("InventoryConfigurationList", []) + return inventory_list[0] if inventory_list else {} + except is_boto3_error_code("NoSuchConfiguration"): # pylint: disable=duplicate-except + return {} + + @AWSRetry.exponential_backoff(max_delay=120, catch_extra_error_codes=["NoSuchBucket", "OperationAborted"]) def put_bucket_policy(s3_client, bucket_name: str, policy: dict): """ @@ -1733,6 +1870,47 @@ def main(): validate_bucket_name=dict(type="bool", default=True), dualstack=dict(default=False, type="bool"), object_lock_enabled=dict(type="bool"), + inventory=dict( + type="dict", + options=dict( + destination=dict( + type="dict", + options=dict( + account_id=dict(type="str"), + bucket=dict(type="str", required=True), + format=dict(type="str", choices=["CSV", "ORC", "Parquet"], default="CSV"), + prefix=dict(type="str"), + ), + required=True, + ), + filter=dict(type="str"), + optional_fields=dict( + type="list", + elements="str", + choices=[ + "Size", + "LastModifiedDate", + "StorageClass", + "ETag", + "IsMultipartUploaded", + "ReplicationStatus", + "EncryptionStatus", + "ObjectLockRetainUntilDate", + "ObjectLockMode", + "ObjectLockLegalHoldStatus", + "IntelligentTieringAccessTier", + "BucketKeyStatus", + "ChecksumAlgorithm", + "ObjectAccessControlList", + "ObjectOwner", + ], + default=[], + ), + id=dict(type="str", required=True), + schedule=dict(type="str", default="Weekly", choices=["Daily", "Weekly"]), + included_object_versions=dict(type="str", default="All", choices=["All", "Current"]), + ), + ), ) required_by = dict( diff --git a/tests/integration/targets/s3_bucket/inventory b/tests/integration/targets/s3_bucket/inventory index db31e4a9b3..2bbe784280 100644 --- a/tests/integration/targets/s3_bucket/inventory +++ b/tests/integration/targets/s3_bucket/inventory @@ -11,6 +11,7 @@ encryption_sse public_access acl object_lock +inventory [all:vars] ansible_connection=local diff --git a/tests/integration/targets/s3_bucket/roles/s3_bucket/tasks/inventory.yml b/tests/integration/targets/s3_bucket/roles/s3_bucket/tasks/inventory.yml new file mode 100644 index 0000000000..7724848046 --- /dev/null +++ b/tests/integration/targets/s3_bucket/roles/s3_bucket/tasks/inventory.yml @@ -0,0 +1,129 @@ +--- +- module_defaults: + group/aws: + access_key: "{{ aws_access_key }}" + secret_key: "{{ aws_secret_key }}" + session_token: "{{ security_token | default(omit) }}" + region: "{{ aws_region }}" + block: + - ansible.builtin.set_fact: + local_bucket_name: "{{ bucket_name | hash('md5')}}-inventory-source" + local_dest_bucket_name: "{{ bucket_name | hash('md5')}}-inventory-target" + # ============================================================ + - name: Create a simple bucket to be inventory destination + amazon.aws.s3_bucket: + name: "{{ local_dest_bucket_name }}" + state: present + register: output + + - ansible.builtin.assert: + that: + - output.changed + + - name: Create a simple bucket with minimal inventory configuration + amazon.aws.s3_bucket: + name: "{{ local_bucket_name }}" + state: present + inventory: + id: "{{ local_dest_bucket_name }}" + destination: + bucket: "arn:aws:s3:::{{ local_dest_bucket_name }}" + register: output + + - ansible.builtin.assert: + that: + - output.changed + - output.bucket_inventory + + - name: Re-configure inventory configuration + amazon.aws.s3_bucket: + name: "{{ local_bucket_name }}" + state: present + inventory: + id: "{{ local_dest_bucket_name }}" + destination: + bucket: "arn:aws:s3:::{{ local_dest_bucket_name }}" + schedule: "Daily" + register: output + + - ansible.builtin.assert: + that: + - output.changed + - output.bucket_inventory + + - name: Re-configure inventory configuration (idempotency) + amazon.aws.s3_bucket: + name: "{{ local_bucket_name }}" + state: present + inventory: + id: "{{ local_dest_bucket_name }}" + destination: + bucket: "arn:aws:s3:::{{ local_dest_bucket_name }}" + schedule: "Daily" + register: output + + - ansible.builtin.assert: + that: + - output is not changed + - output.bucket_inventory + + + - name: Delete inventory configuration + amazon.aws.s3_bucket: + name: "{{ local_bucket_name }}" + state: present + register: output + + - ansible.builtin.assert: + that: + - output is changed + - not output.bucket_inventory|bool + + - name: Delete inventory configuration (idempotency) + amazon.aws.s3_bucket: + name: "{{ local_bucket_name }}" + state: present + register: output + + - ansible.builtin.assert: + that: + - output is not changed + - not output.bucket_inventory|bool + + # ============================================================ + + - name: Delete testing s3 bucket + amazon.aws.s3_bucket: + name: "{{ local_bucket_name }}" + state: absent + register: output + + - ansible.builtin.assert: + that: + - output.changed + + + - name: Delete testing inventory s3 bucket + amazon.aws.s3_bucket: + name: "{{ local_dest_bucket_name }}" + state: absent + register: output + + - ansible.builtin.assert: + that: + - output.changed + + + # ============================================================ + always: + - name: Ensure all buckets are deleted + amazon.aws.s3_bucket: + name: "{{ local_bucket_name }}" + state: absent + ignore_errors: true + + - name: Ensure all buckets are deleted + amazon.aws.s3_bucket: + name: "{{ local_dest_bucket_name }}" + state: absent + ignore_errors: true