diff --git a/aws_lambda_powertools/utilities/parser/models/__init__.py b/aws_lambda_powertools/utilities/parser/models/__init__.py index 2fbbb68f00c..c6efec0643a 100644 --- a/aws_lambda_powertools/utilities/parser/models/__init__.py +++ b/aws_lambda_powertools/utilities/parser/models/__init__.py @@ -2,6 +2,7 @@ from .cloudwatch import CloudWatchLogsData, CloudWatchLogsDecode, CloudWatchLogsLogEvent, CloudWatchLogsModel from .dynamodb import DynamoDBStreamChangedRecordModel, DynamoDBStreamModel, DynamoDBStreamRecordModel from .event_bridge import EventBridgeModel +from .s3 import S3Model, S3RecordModel from .ses import SesModel, SesRecordModel from .sns import SnsModel, SnsNotificationModel, SnsRecordModel from .sqs import SqsModel, SqsRecordModel @@ -18,6 +19,8 @@ "EventBridgeModel", "DynamoDBStreamChangedRecordModel", "DynamoDBStreamRecordModel", + "S3Model", + "S3RecordModel", "SesModel", "SesRecordModel", "SnsModel", diff --git a/aws_lambda_powertools/utilities/parser/models/s3.py b/aws_lambda_powertools/utilities/parser/models/s3.py new file mode 100644 index 00000000000..14ea250b35b --- /dev/null +++ b/aws_lambda_powertools/utilities/parser/models/s3.py @@ -0,0 +1,72 @@ +from datetime import datetime +from typing import List, Optional + +from pydantic import BaseModel +from pydantic.fields import Field +from pydantic.networks import IPvAnyNetwork +from pydantic.types import PositiveInt +from typing_extensions import Literal + + +class S3EventRecordGlacierRestoreEventData(BaseModel): + lifecycleRestorationExpiryTime: datetime + lifecycleRestoreStorageClass: str + + +class S3EventRecordGlacierEventData(BaseModel): + restoreEventData: S3EventRecordGlacierRestoreEventData + + +class S3Identity(BaseModel): + principalId: str + + +class S3RequestParameters(BaseModel): + sourceIPAddress: IPvAnyNetwork + + +class S3ResponseElements(BaseModel): + x_amz_request_id: str = Field(None, alias="x-amz-request-id") + x_amz_id_2: str = Field(None, alias="x-amz-id-2") + + +class S3OwnerIdentify(BaseModel): + principalId: str + + +class S3Bucket(BaseModel): + name: str + ownerIdentity: S3OwnerIdentify + arn: str + + +class S3Object(BaseModel): + key: str + size: PositiveInt + eTag: str + sequencer: str + versionId: Optional[str] + + +class S3Message(BaseModel): + s3SchemaVersion: str + configurationId: str + bucket: S3Bucket + object: S3Object # noqa: A003,VNE003 + + +class S3RecordModel(BaseModel): + eventVersion: str + eventSource: Literal["aws:s3"] + awsRegion: str + eventTime: datetime + eventName: str + userIdentity: S3Identity + requestParameters: S3RequestParameters + responseElements: S3ResponseElements + s3: S3Message + glacierEventData: Optional[S3EventRecordGlacierEventData] + + +class S3Model(BaseModel): + Records: List[S3RecordModel] diff --git a/docs/content/utilities/parser.mdx b/docs/content/utilities/parser.mdx index 29f81336d5f..b620f0eb18f 100644 --- a/docs/content/utilities/parser.mdx +++ b/docs/content/utilities/parser.mdx @@ -158,6 +158,7 @@ Model name | Description **SqsModel** | Lambda Event Source payload for Amazon SQS **AlbModel** | Lambda Event Source payload for Amazon Application Load Balancer **CloudwatchLogsModel** | Lambda Event Source payload for Amazon CloudWatch Logs +**S3Model** | Lambda Event Source payload for Amazon S3 You can extend them to include your own models, and yet have all other known fields parsed along the way. diff --git a/tests/events/s3EventGlacier.json b/tests/events/s3EventGlacier.json new file mode 100644 index 00000000000..2fbc447b308 --- /dev/null +++ b/tests/events/s3EventGlacier.json @@ -0,0 +1,44 @@ +{ + "Records": [ + { + "eventVersion": "2.1", + "eventSource": "aws:s3", + "awsRegion": "us-east-2", + "eventTime": "2019-09-03T19:37:27.192Z", + "eventName": "ObjectCreated:Put", + "userIdentity": { + "principalId": "AWS:AIDAINPONIXQXHT3IKHL2" + }, + "requestParameters": { + "sourceIPAddress": "205.255.255.255" + }, + "responseElements": { + "x-amz-request-id": "D82B88E5F771F645", + "x-amz-id-2": "vlR7PnpV2Ce81l0PRw6jlUpck7Jo5ZsQjryTjKlc5aLWGVHPZLj5NeC6qMa0emYBDXOo6QBU0Wo=" + }, + "s3": { + "s3SchemaVersion": "1.0", + "configurationId": "828aa6fc-f7b5-4305-8584-487c791949c1", + "bucket": { + "name": "lambda-artifacts-deafc19498e3f2df", + "ownerIdentity": { + "principalId": "A3I5XTEXAMAI3E" + }, + "arn": "arn:aws:s3:::lambda-artifacts-deafc19498e3f2df" + }, + "object": { + "key": "b21b84d653bb07b05b1e6b33684dc11b", + "size": 1305107, + "eTag": "b21b84d653bb07b05b1e6b33684dc11b", + "sequencer": "0C0F6F405D6ED209E1" + } + }, + "glacierEventData": { + "restoreEventData": { + "lifecycleRestorationExpiryTime": "1970-01-01T00:01:00.000Z", + "lifecycleRestoreStorageClass": "standard" + } + } + } + ] +} \ No newline at end of file diff --git a/tests/functional/parser/test_s3.py b/tests/functional/parser/test_s3.py new file mode 100644 index 00000000000..5d8a19a933e --- /dev/null +++ b/tests/functional/parser/test_s3.py @@ -0,0 +1,89 @@ +from aws_lambda_powertools.utilities.parser import event_parser +from aws_lambda_powertools.utilities.parser.models import S3Model, S3RecordModel +from aws_lambda_powertools.utilities.typing import LambdaContext +from tests.functional.parser.utils import load_event + + +@event_parser(model=S3Model) +def handle_s3(event: S3Model, _: LambdaContext): + records = list(event.Records) + assert len(records) == 1 + record: S3RecordModel = records[0] + assert record.eventVersion == "2.1" + assert record.eventSource == "aws:s3" + assert record.awsRegion == "us-east-2" + convert_time = int(round(record.eventTime.timestamp() * 1000)) + assert convert_time == 1567539447192 + assert record.eventName == "ObjectCreated:Put" + user_identity = record.userIdentity + assert user_identity.principalId == "AWS:AIDAINPONIXQXHT3IKHL2" + request_parameters = record.requestParameters + assert str(request_parameters.sourceIPAddress) == "205.255.255.255/32" + assert record.responseElements.x_amz_request_id == "D82B88E5F771F645" + assert ( + record.responseElements.x_amz_id_2 + == "vlR7PnpV2Ce81l0PRw6jlUpck7Jo5ZsQjryTjKlc5aLWGVHPZLj5NeC6qMa0emYBDXOo6QBU0Wo=" + ) + s3 = record.s3 + assert s3.s3SchemaVersion == "1.0" + assert s3.configurationId == "828aa6fc-f7b5-4305-8584-487c791949c1" + bucket = s3.bucket + assert bucket.name == "lambda-artifacts-deafc19498e3f2df" + assert bucket.ownerIdentity.principalId == "A3I5XTEXAMAI3E" + assert bucket.arn == "arn:aws:s3:::lambda-artifacts-deafc19498e3f2df" + assert s3.object.key == "b21b84d653bb07b05b1e6b33684dc11b" + assert s3.object.size == 1305107 + assert s3.object.eTag == "b21b84d653bb07b05b1e6b33684dc11b" + assert s3.object.versionId is None + assert s3.object.sequencer == "0C0F6F405D6ED209E1" + assert record.glacierEventData is None + + +@event_parser(model=S3Model) +def handle_s3_glacier(event: S3Model, _: LambdaContext): + records = list(event.Records) + assert len(records) == 1 + record: S3RecordModel = records[0] + assert record.eventVersion == "2.1" + assert record.eventSource == "aws:s3" + assert record.awsRegion == "us-east-2" + convert_time = int(round(record.eventTime.timestamp() * 1000)) + assert convert_time == 1567539447192 + assert record.eventName == "ObjectCreated:Put" + user_identity = record.userIdentity + assert user_identity.principalId == "AWS:AIDAINPONIXQXHT3IKHL2" + request_parameters = record.requestParameters + assert str(request_parameters.sourceIPAddress) == "205.255.255.255/32" + assert record.responseElements.x_amz_request_id == "D82B88E5F771F645" + assert ( + record.responseElements.x_amz_id_2 + == "vlR7PnpV2Ce81l0PRw6jlUpck7Jo5ZsQjryTjKlc5aLWGVHPZLj5NeC6qMa0emYBDXOo6QBU0Wo=" + ) + s3 = record.s3 + assert s3.s3SchemaVersion == "1.0" + assert s3.configurationId == "828aa6fc-f7b5-4305-8584-487c791949c1" + bucket = s3.bucket + assert bucket.name == "lambda-artifacts-deafc19498e3f2df" + assert bucket.ownerIdentity.principalId == "A3I5XTEXAMAI3E" + assert bucket.arn == "arn:aws:s3:::lambda-artifacts-deafc19498e3f2df" + assert s3.object.key == "b21b84d653bb07b05b1e6b33684dc11b" + assert s3.object.size == 1305107 + assert s3.object.eTag == "b21b84d653bb07b05b1e6b33684dc11b" + assert s3.object.versionId is None + assert s3.object.sequencer == "0C0F6F405D6ED209E1" + assert record.glacierEventData is not None + convert_time = int( + round(record.glacierEventData.restoreEventData.lifecycleRestorationExpiryTime.timestamp() * 1000) + ) + assert convert_time == 60000 + assert record.glacierEventData.restoreEventData.lifecycleRestoreStorageClass == "standard" + + +def test_s3_trigger_event(): + event_dict = load_event("s3Event.json") + handle_s3(event_dict, LambdaContext()) + + +def test_s3_glacier_trigger_event(): + event_dict = load_event("s3EventGlacier.json") + handle_s3_glacier(event_dict, LambdaContext())