Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
38 changes: 33 additions & 5 deletions aws_lambda_powertools/utilities/data_classes/s3_event.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,9 +215,14 @@ def bucket(self) -> S3Bucket:

@property
def get_object(self) -> S3Object:
"""Get the `object` property as an S3Object"""
# Note: this name conflicts with existing python builtins
return S3Object(self["object"])
"""Get the `object` property as an S3Object

Note: IntelligentTiering events use 'get_object' as the actual key name,
while other S3 events use 'object'. This method handles both cases.
"""
# IntelligentTiering events use 'get_object', others use 'object'
object_data = self.get("get_object") or self["object"]
return S3Object(object_data)


class S3EventRecordGlacierRestoreEventData(DictWrapper):
Expand All @@ -242,6 +247,16 @@ def restore_event_data(self) -> S3EventRecordGlacierRestoreEventData:
return S3EventRecordGlacierRestoreEventData(self["restoreEventData"])


class S3EventRecordIntelligentTieringEventData(DictWrapper):
@property
def destination_access_tier(self) -> str:
"""The new access tier for the object.

The intelligentTieringEventData key is only visible for IntelligentTiering events.
"""
return self["destinationAccessTier"]


class S3EventRecord(DictWrapper):
@property
def event_version(self) -> str:
Expand Down Expand Up @@ -297,6 +312,12 @@ def glacier_event_data(self) -> S3EventRecordGlacierEventData | None:
item = self.get("glacierEventData")
return None if item is None else S3EventRecordGlacierEventData(item)

@property
def intelligent_tiering_event_data(self) -> S3EventRecordIntelligentTieringEventData | None:
"""The intelligentTieringEventData key is only visible for IntelligentTiering events."""
item = self.get("intelligentTieringEventData")
return None if item is None else S3EventRecordIntelligentTieringEventData(item)


class S3Event(DictWrapper):
"""S3 event notification
Expand Down Expand Up @@ -325,5 +346,12 @@ def bucket_name(self) -> str:

@property
def object_key(self) -> str:
"""Get the object key for the first s3 event record and unquote plus"""
return unquote_plus(self["Records"][0]["s3"]["object"]["key"])
"""Get the object key for the first s3 event record and unquote plus

Note: IntelligentTiering events use 'get_object' as the key name,
while other S3 events use 'object'. This method handles both cases.
"""
s3_data = self["Records"][0]["s3"]
# IntelligentTiering events use 'get_object', others use 'object'
object_data = s3_data.get("get_object") or s3_data["object"]
return unquote_plus(object_data["key"])
42 changes: 39 additions & 3 deletions aws_lambda_powertools/utilities/parser/models/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,16 @@ class S3EventRecordGlacierEventData(BaseModel):
)


class S3EventRecordIntelligentTieringEventData(BaseModel):
destinationAccessTier: str = Field(
description="The new access tier for the object. For IntelligentTiering events.",
examples=[
"ARCHIVE_ACCESS",
"DEEP_ARCHIVE_ACCESS",
],
)


class S3Identity(BaseModel):
principalId: str = Field(
description="Amazon identifier of the user, role, account or services who caused the event.",
Expand Down Expand Up @@ -178,8 +188,9 @@ class S3Message(BaseModel):
},
],
)
object: S3Object = Field(
description="The S3 object object.",
object: Optional[S3Object] = Field(
default=None,
description="The S3 object object. Used by most S3 event types.",
examples=[
{
"key": "b21b84d653bb07b05b1e6b33684dc11b",
Expand All @@ -189,6 +200,20 @@ class S3Message(BaseModel):
},
],
) # noqa: A003
get_object: Optional[S3Object] = Field(
default=None,
alias="get_object",
description="The S3 object object. Used by IntelligentTiering events instead of 'object'.",
examples=[
{
"key": "myobject",
"size": 252294,
"eTag": "4e9270240d7d62d5ee8dbfcb7a7a3279",
"versionId": "tiogA9Ga7Xi49yfJ6lkeTxPYx7ZK75yn",
"sequencer": "0066A8D0E77DE42BC5",
},
],
)


class S3EventNotificationObjectModel(BaseModel):
Expand Down Expand Up @@ -449,11 +474,22 @@ class S3RecordModel(BaseModel):
},
],
)
intelligentTieringEventData: Optional[S3EventRecordIntelligentTieringEventData] = Field(
default=None,
description="The Intelligent-Tiering event data object.",
examples=[
{
"destinationAccessTier": "ARCHIVE_ACCESS",
},
],
)

@model_validator(mode="before")
def validate_s3_object(cls, values):
event_name = values.get("eventName")
s3_object = values.get("s3").get("object")
s3_data = values.get("s3")
# IntelligentTiering events use 'get_object' instead of 'object'
s3_object = s3_data.get("object") or s3_data.get("get_object")
if ":Delete" not in event_name and (s3_object.get("size") is None or s3_object.get("eTag") is None):
raise ValueError(
"Size and eTag fields are required for all events except ObjectRemoved:* and LifecycleExpiration:*.",
Expand Down
42 changes: 42 additions & 0 deletions tests/events/s3EventIntelligentTiering.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
{
"Records": [
{
"eventVersion": "2.3",
"eventSource": "aws:s3",
"awsRegion": "ap-southeast-2",
"eventTime": "2025-09-29T00:47:23.967Z",
"eventName": "IntelligentTiering",
"userIdentity": {
"principalId": "s3.amazonaws.com"
},
"requestParameters": {
"sourceIPAddress": "s3.amazonaws.com"
},
"responseElements": {
"x-amz-request-id": "4C05BE601E268FCD",
"x-amz-id-2": "Wt0dgzBP/wQ47PWP5BqoD4nAZC08NOjfdZj2prx5gVPBCI+YQtrx/UKVlnT63Dj+vse4x5koWp61HaoVyz6fUFe1yLkhNpK3"
},
"s3": {
"s3SchemaVersion": "1.0",
"configurationId": "ZDM3MGVjY2MtZTlmNC00ZTk0LThiNjItMWJiNTMwOTYwY2Rl",
"bucket": {
"name": "mybucket",
"ownerIdentity": {
"principalId": "ANY597T3BWFY2"
},
"arn": "arn:aws:s3:::mybucket"
},
"get_object": {
"key": "myobject",
"size": 252294,
"eTag": "4e9270240d7d62d5ee8dbfcb7a7a3279",
"versionId": "tiogA9Ga7Xi49yfJ6lkeTxPYx7ZK75yn",
"sequencer": "0066A8D0E77DE42BC5"
}
},
"intelligentTieringEventData": {
"destinationAccessTier": "ARCHIVE_ACCESS"
}
}
]
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
from aws_lambda_powertools.utilities.data_classes import S3Event
from tests.functional.utils import load_event


def test_s3_intelligent_tiering_event():
"""Test S3 IntelligentTiering event with get_object field"""
event = S3Event(load_event("s3EventIntelligentTiering.json"))

# Test first record
record = event.record
assert record.event_name == "IntelligentTiering"
assert record.event_version == "2.3"
assert record.event_source == "aws:s3"
assert record.aws_region == "ap-southeast-2"

# Test user identity
assert record.user_identity.principal_id == "s3.amazonaws.com"

# Test S3 object via get_object property (handles both 'object' and 'get_object' keys)
s3_object = record.s3.get_object
assert s3_object.key == "myobject"
assert s3_object.size == 252294
assert s3_object.etag == "4e9270240d7d62d5ee8dbfcb7a7a3279"
assert s3_object.version_id == "tiogA9Ga7Xi49yfJ6lkeTxPYx7ZK75yn"
assert s3_object.sequencer == "0066A8D0E77DE42BC5"

# Test bucket
assert record.s3.bucket.name == "mybucket"
assert record.s3.bucket.arn == "arn:aws:s3:::mybucket"

# Test intelligentTieringEventData
assert record.intelligent_tiering_event_data is not None
assert record.intelligent_tiering_event_data.destination_access_tier == "ARCHIVE_ACCESS"

# Verify glacierEventData is None
assert record.glacier_event_data is None

# Test convenience properties
assert event.bucket_name == "mybucket"
assert event.object_key == "myobject"


def test_s3_intelligent_tiering_event_iteration():
"""Test iterating through multiple IntelligentTiering records"""
event = S3Event(load_event("s3EventIntelligentTiering.json"))

records = list(event.records)
assert len(records) == 1

for record in event.records:
assert record.event_name == "IntelligentTiering"
assert record.s3.get_object.key == "myobject"
assert record.intelligent_tiering_event_data.destination_access_tier == "ARCHIVE_ACCESS"


def test_s3_intelligent_tiering_deep_archive_access():
"""Test IntelligentTiering event with DEEP_ARCHIVE_ACCESS tier"""
raw_event = load_event("s3EventIntelligentTiering.json")
raw_event["Records"][0]["intelligentTieringEventData"]["destinationAccessTier"] = "DEEP_ARCHIVE_ACCESS"

event = S3Event(raw_event)
record = event.record

assert record.intelligent_tiering_event_data.destination_access_tier == "DEEP_ARCHIVE_ACCESS"
76 changes: 76 additions & 0 deletions tests/unit/parser/_pydantic/test_s3_intelligent_tiering.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
from aws_lambda_powertools.utilities.parser.models import S3Model, S3RecordModel
from tests.functional.utils import load_event


def test_s3_intelligent_tiering_event():
"""Test parsing of S3 IntelligentTiering events with get_object field"""
raw_event = load_event("s3EventIntelligentTiering.json")
parsed_event: S3Model = S3Model(**raw_event)

records = list(parsed_event.Records)
assert len(records) == 1

record: S3RecordModel = records[0]
raw_record = raw_event["Records"][0]

# Verify basic event properties
assert record.eventVersion == "2.3"
assert record.eventSource == "aws:s3"
assert record.awsRegion == "ap-southeast-2"
assert record.eventName == "IntelligentTiering"

# Verify user identity
user_identity = record.userIdentity
assert user_identity.principalId == "s3.amazonaws.com"

# Verify request parameters
request_parameters = record.requestParameters
# Note: sourceIPAddress is "s3.amazonaws.com" for IntelligentTiering events, not an IP
assert str(request_parameters.sourceIPAddress) == "s3.amazonaws.com"

# Verify response elements
assert record.responseElements.x_amz_request_id == raw_record["responseElements"]["x-amz-request-id"]
assert record.responseElements.x_amz_id_2 == raw_record["responseElements"]["x-amz-id-2"]

# Verify S3 message
s3 = record.s3
assert s3.s3SchemaVersion == raw_record["s3"]["s3SchemaVersion"]
assert s3.configurationId == raw_record["s3"]["configurationId"]

# Verify bucket
bucket = s3.bucket
raw_bucket = raw_record["s3"]["bucket"]
assert bucket.name == "mybucket"
assert bucket.ownerIdentity.principalId == raw_bucket["ownerIdentity"]["principalId"]
assert bucket.arn == "arn:aws:s3:::mybucket"

# Verify get_object field (IntelligentTiering uses 'get_object' instead of 'object')
assert s3.get_object is not None
assert s3.get_object.key == "myobject"
assert s3.get_object.size == 252294
assert s3.get_object.eTag == "4e9270240d7d62d5ee8dbfcb7a7a3279"
assert s3.get_object.versionId == "tiogA9Ga7Xi49yfJ6lkeTxPYx7ZK75yn"
assert s3.get_object.sequencer == "0066A8D0E77DE42BC5"

# Verify intelligentTieringEventData
assert record.intelligentTieringEventData is not None
assert record.intelligentTieringEventData.destinationAccessTier == "ARCHIVE_ACCESS"

# Verify glacierEventData is None for IntelligentTiering events
assert record.glacierEventData is None


def test_s3_intelligent_tiering_event_access_tiers():
"""Test different access tier values for IntelligentTiering events"""
raw_event = load_event("s3EventIntelligentTiering.json")

# Test ARCHIVE_ACCESS tier (from the test event)
parsed_event: S3Model = S3Model(**raw_event)
record = list(parsed_event.Records)[0]
assert record.intelligentTieringEventData.destinationAccessTier == "ARCHIVE_ACCESS"

# Test DEEP_ARCHIVE_ACCESS tier
raw_event["Records"][0]["intelligentTieringEventData"]["destinationAccessTier"] = "DEEP_ARCHIVE_ACCESS"
parsed_event: S3Model = S3Model(**raw_event)
record = list(parsed_event.Records)[0]
assert record.intelligentTieringEventData.destinationAccessTier == "DEEP_ARCHIVE_ACCESS"