Skip to content

Commit

Permalink
Merge pull request #225 from risenberg-cyberark/s3
Browse files Browse the repository at this point in the history
feat: Add S3 lambda event support to Parser utility #224
  • Loading branch information
heitorlessa authored Dec 4, 2020
2 parents 88bd2e0 + 0d21704 commit b82ea7a
Show file tree
Hide file tree
Showing 5 changed files with 209 additions and 0 deletions.
3 changes: 3 additions & 0 deletions aws_lambda_powertools/utilities/parser/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from .cloudwatch import CloudWatchLogsData, CloudWatchLogsDecode, CloudWatchLogsLogEvent, CloudWatchLogsModel
from .dynamodb import DynamoDBStreamChangedRecordModel, DynamoDBStreamModel, DynamoDBStreamRecordModel
from .event_bridge import EventBridgeModel
from .s3 import S3Model, S3RecordModel
from .ses import SesModel, SesRecordModel
from .sns import SnsModel, SnsNotificationModel, SnsRecordModel
from .sqs import SqsModel, SqsRecordModel
Expand All @@ -18,6 +19,8 @@
"EventBridgeModel",
"DynamoDBStreamChangedRecordModel",
"DynamoDBStreamRecordModel",
"S3Model",
"S3RecordModel",
"SesModel",
"SesRecordModel",
"SnsModel",
Expand Down
72 changes: 72 additions & 0 deletions aws_lambda_powertools/utilities/parser/models/s3.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
from datetime import datetime
from typing import List, Optional

from pydantic import BaseModel
from pydantic.fields import Field
from pydantic.networks import IPvAnyNetwork
from pydantic.types import PositiveInt
from typing_extensions import Literal


class S3EventRecordGlacierRestoreEventData(BaseModel):
lifecycleRestorationExpiryTime: datetime
lifecycleRestoreStorageClass: str


class S3EventRecordGlacierEventData(BaseModel):
restoreEventData: S3EventRecordGlacierRestoreEventData


class S3Identity(BaseModel):
principalId: str


class S3RequestParameters(BaseModel):
sourceIPAddress: IPvAnyNetwork


class S3ResponseElements(BaseModel):
x_amz_request_id: str = Field(None, alias="x-amz-request-id")
x_amz_id_2: str = Field(None, alias="x-amz-id-2")


class S3OwnerIdentify(BaseModel):
principalId: str


class S3Bucket(BaseModel):
name: str
ownerIdentity: S3OwnerIdentify
arn: str


class S3Object(BaseModel):
key: str
size: PositiveInt
eTag: str
sequencer: str
versionId: Optional[str]


class S3Message(BaseModel):
s3SchemaVersion: str
configurationId: str
bucket: S3Bucket
object: S3Object # noqa: A003,VNE003


class S3RecordModel(BaseModel):
eventVersion: str
eventSource: Literal["aws:s3"]
awsRegion: str
eventTime: datetime
eventName: str
userIdentity: S3Identity
requestParameters: S3RequestParameters
responseElements: S3ResponseElements
s3: S3Message
glacierEventData: Optional[S3EventRecordGlacierEventData]


class S3Model(BaseModel):
Records: List[S3RecordModel]
1 change: 1 addition & 0 deletions docs/content/utilities/parser.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -158,6 +158,7 @@ Model name | Description
**SqsModel** | Lambda Event Source payload for Amazon SQS
**AlbModel** | Lambda Event Source payload for Amazon Application Load Balancer
**CloudwatchLogsModel** | Lambda Event Source payload for Amazon CloudWatch Logs
**S3Model** | Lambda Event Source payload for Amazon S3

You can extend them to include your own models, and yet have all other known fields parsed along the way.

Expand Down
44 changes: 44 additions & 0 deletions tests/events/s3EventGlacier.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
{
"Records": [
{
"eventVersion": "2.1",
"eventSource": "aws:s3",
"awsRegion": "us-east-2",
"eventTime": "2019-09-03T19:37:27.192Z",
"eventName": "ObjectCreated:Put",
"userIdentity": {
"principalId": "AWS:AIDAINPONIXQXHT3IKHL2"
},
"requestParameters": {
"sourceIPAddress": "205.255.255.255"
},
"responseElements": {
"x-amz-request-id": "D82B88E5F771F645",
"x-amz-id-2": "vlR7PnpV2Ce81l0PRw6jlUpck7Jo5ZsQjryTjKlc5aLWGVHPZLj5NeC6qMa0emYBDXOo6QBU0Wo="
},
"s3": {
"s3SchemaVersion": "1.0",
"configurationId": "828aa6fc-f7b5-4305-8584-487c791949c1",
"bucket": {
"name": "lambda-artifacts-deafc19498e3f2df",
"ownerIdentity": {
"principalId": "A3I5XTEXAMAI3E"
},
"arn": "arn:aws:s3:::lambda-artifacts-deafc19498e3f2df"
},
"object": {
"key": "b21b84d653bb07b05b1e6b33684dc11b",
"size": 1305107,
"eTag": "b21b84d653bb07b05b1e6b33684dc11b",
"sequencer": "0C0F6F405D6ED209E1"
}
},
"glacierEventData": {
"restoreEventData": {
"lifecycleRestorationExpiryTime": "1970-01-01T00:01:00.000Z",
"lifecycleRestoreStorageClass": "standard"
}
}
}
]
}
89 changes: 89 additions & 0 deletions tests/functional/parser/test_s3.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
from aws_lambda_powertools.utilities.parser import event_parser
from aws_lambda_powertools.utilities.parser.models import S3Model, S3RecordModel
from aws_lambda_powertools.utilities.typing import LambdaContext
from tests.functional.parser.utils import load_event


@event_parser(model=S3Model)
def handle_s3(event: S3Model, _: LambdaContext):
records = list(event.Records)
assert len(records) == 1
record: S3RecordModel = records[0]
assert record.eventVersion == "2.1"
assert record.eventSource == "aws:s3"
assert record.awsRegion == "us-east-2"
convert_time = int(round(record.eventTime.timestamp() * 1000))
assert convert_time == 1567539447192
assert record.eventName == "ObjectCreated:Put"
user_identity = record.userIdentity
assert user_identity.principalId == "AWS:AIDAINPONIXQXHT3IKHL2"
request_parameters = record.requestParameters
assert str(request_parameters.sourceIPAddress) == "205.255.255.255/32"
assert record.responseElements.x_amz_request_id == "D82B88E5F771F645"
assert (
record.responseElements.x_amz_id_2
== "vlR7PnpV2Ce81l0PRw6jlUpck7Jo5ZsQjryTjKlc5aLWGVHPZLj5NeC6qMa0emYBDXOo6QBU0Wo="
)
s3 = record.s3
assert s3.s3SchemaVersion == "1.0"
assert s3.configurationId == "828aa6fc-f7b5-4305-8584-487c791949c1"
bucket = s3.bucket
assert bucket.name == "lambda-artifacts-deafc19498e3f2df"
assert bucket.ownerIdentity.principalId == "A3I5XTEXAMAI3E"
assert bucket.arn == "arn:aws:s3:::lambda-artifacts-deafc19498e3f2df"
assert s3.object.key == "b21b84d653bb07b05b1e6b33684dc11b"
assert s3.object.size == 1305107
assert s3.object.eTag == "b21b84d653bb07b05b1e6b33684dc11b"
assert s3.object.versionId is None
assert s3.object.sequencer == "0C0F6F405D6ED209E1"
assert record.glacierEventData is None


@event_parser(model=S3Model)
def handle_s3_glacier(event: S3Model, _: LambdaContext):
records = list(event.Records)
assert len(records) == 1
record: S3RecordModel = records[0]
assert record.eventVersion == "2.1"
assert record.eventSource == "aws:s3"
assert record.awsRegion == "us-east-2"
convert_time = int(round(record.eventTime.timestamp() * 1000))
assert convert_time == 1567539447192
assert record.eventName == "ObjectCreated:Put"
user_identity = record.userIdentity
assert user_identity.principalId == "AWS:AIDAINPONIXQXHT3IKHL2"
request_parameters = record.requestParameters
assert str(request_parameters.sourceIPAddress) == "205.255.255.255/32"
assert record.responseElements.x_amz_request_id == "D82B88E5F771F645"
assert (
record.responseElements.x_amz_id_2
== "vlR7PnpV2Ce81l0PRw6jlUpck7Jo5ZsQjryTjKlc5aLWGVHPZLj5NeC6qMa0emYBDXOo6QBU0Wo="
)
s3 = record.s3
assert s3.s3SchemaVersion == "1.0"
assert s3.configurationId == "828aa6fc-f7b5-4305-8584-487c791949c1"
bucket = s3.bucket
assert bucket.name == "lambda-artifacts-deafc19498e3f2df"
assert bucket.ownerIdentity.principalId == "A3I5XTEXAMAI3E"
assert bucket.arn == "arn:aws:s3:::lambda-artifacts-deafc19498e3f2df"
assert s3.object.key == "b21b84d653bb07b05b1e6b33684dc11b"
assert s3.object.size == 1305107
assert s3.object.eTag == "b21b84d653bb07b05b1e6b33684dc11b"
assert s3.object.versionId is None
assert s3.object.sequencer == "0C0F6F405D6ED209E1"
assert record.glacierEventData is not None
convert_time = int(
round(record.glacierEventData.restoreEventData.lifecycleRestorationExpiryTime.timestamp() * 1000)
)
assert convert_time == 60000
assert record.glacierEventData.restoreEventData.lifecycleRestoreStorageClass == "standard"


def test_s3_trigger_event():
event_dict = load_event("s3Event.json")
handle_s3(event_dict, LambdaContext())


def test_s3_glacier_trigger_event():
event_dict = load_event("s3EventGlacier.json")
handle_s3_glacier(event_dict, LambdaContext())

0 comments on commit b82ea7a

Please sign in to comment.