|
| 1 | +from aws_lambda_powertools.utilities.parser.models import S3Model, S3RecordModel |
| 2 | +from tests.functional.utils import load_event |
| 3 | + |
| 4 | + |
| 5 | +def test_s3_intelligent_tiering_event(): |
| 6 | + """Test parsing of S3 IntelligentTiering events with get_object field""" |
| 7 | + raw_event = load_event("s3EventIntelligentTiering.json") |
| 8 | + parsed_event: S3Model = S3Model(**raw_event) |
| 9 | + |
| 10 | + records = list(parsed_event.Records) |
| 11 | + assert len(records) == 1 |
| 12 | + |
| 13 | + record: S3RecordModel = records[0] |
| 14 | + raw_record = raw_event["Records"][0] |
| 15 | + |
| 16 | + # Verify basic event properties |
| 17 | + assert record.eventVersion == "2.3" |
| 18 | + assert record.eventSource == "aws:s3" |
| 19 | + assert record.awsRegion == "ap-southeast-2" |
| 20 | + assert record.eventName == "IntelligentTiering" |
| 21 | + |
| 22 | + # Verify user identity |
| 23 | + user_identity = record.userIdentity |
| 24 | + assert user_identity.principalId == "s3.amazonaws.com" |
| 25 | + |
| 26 | + # Verify request parameters |
| 27 | + request_parameters = record.requestParameters |
| 28 | + # Note: sourceIPAddress is "s3.amazonaws.com" for IntelligentTiering events, not an IP |
| 29 | + assert str(request_parameters.sourceIPAddress) == "s3.amazonaws.com" |
| 30 | + |
| 31 | + # Verify response elements |
| 32 | + assert record.responseElements.x_amz_request_id == raw_record["responseElements"]["x-amz-request-id"] |
| 33 | + assert record.responseElements.x_amz_id_2 == raw_record["responseElements"]["x-amz-id-2"] |
| 34 | + |
| 35 | + # Verify S3 message |
| 36 | + s3 = record.s3 |
| 37 | + assert s3.s3SchemaVersion == raw_record["s3"]["s3SchemaVersion"] |
| 38 | + assert s3.configurationId == raw_record["s3"]["configurationId"] |
| 39 | + |
| 40 | + # Verify bucket |
| 41 | + bucket = s3.bucket |
| 42 | + raw_bucket = raw_record["s3"]["bucket"] |
| 43 | + assert bucket.name == "mybucket" |
| 44 | + assert bucket.ownerIdentity.principalId == raw_bucket["ownerIdentity"]["principalId"] |
| 45 | + assert bucket.arn == "arn:aws:s3:::mybucket" |
| 46 | + |
| 47 | + # Verify get_object field (IntelligentTiering uses 'get_object' instead of 'object') |
| 48 | + assert s3.get_object is not None |
| 49 | + assert s3.get_object.key == "myobject" |
| 50 | + assert s3.get_object.size == 252294 |
| 51 | + assert s3.get_object.eTag == "4e9270240d7d62d5ee8dbfcb7a7a3279" |
| 52 | + assert s3.get_object.versionId == "tiogA9Ga7Xi49yfJ6lkeTxPYx7ZK75yn" |
| 53 | + assert s3.get_object.sequencer == "0066A8D0E77DE42BC5" |
| 54 | + |
| 55 | + # Verify intelligentTieringEventData |
| 56 | + assert record.intelligentTieringEventData is not None |
| 57 | + assert record.intelligentTieringEventData.destinationAccessTier == "ARCHIVE_ACCESS" |
| 58 | + |
| 59 | + # Verify glacierEventData is None for IntelligentTiering events |
| 60 | + assert record.glacierEventData is None |
| 61 | + |
| 62 | + |
| 63 | +def test_s3_intelligent_tiering_event_access_tiers(): |
| 64 | + """Test different access tier values for IntelligentTiering events""" |
| 65 | + raw_event = load_event("s3EventIntelligentTiering.json") |
| 66 | + |
| 67 | + # Test ARCHIVE_ACCESS tier (from the test event) |
| 68 | + parsed_event: S3Model = S3Model(**raw_event) |
| 69 | + record = list(parsed_event.Records)[0] |
| 70 | + assert record.intelligentTieringEventData.destinationAccessTier == "ARCHIVE_ACCESS" |
| 71 | + |
| 72 | + # Test DEEP_ARCHIVE_ACCESS tier |
| 73 | + raw_event["Records"][0]["intelligentTieringEventData"]["destinationAccessTier"] = "DEEP_ARCHIVE_ACCESS" |
| 74 | + parsed_event: S3Model = S3Model(**raw_event) |
| 75 | + record = list(parsed_event.Records)[0] |
| 76 | + assert record.intelligentTieringEventData.destinationAccessTier == "DEEP_ARCHIVE_ACCESS" |
0 commit comments