diff --git a/.github/ISSUE_TEMPLATE/share_your_work.yml b/.github/ISSUE_TEMPLATE/share_your_work.yml
new file mode 100644
index 00000000000..974aec87b06
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/share_your_work.yml
@@ -0,0 +1,56 @@
+name: I Made This (showcase your work)
+description: Share what you did with Powertools 💞💞. Blog post, workshops, presentation, sample apps, etc.
+title: "[I Made This]:
"
+labels: ["community-content"]
+body:
+ - type: markdown
+ attributes:
+ value: Thank you for helping spread the word out on Powertools, truly!
+ - type: input
+ id: content
+ attributes:
+ label: Link to your material
+ description: |
+ Please share the original link to your material.
+
+ *Note: Short links will be expanded when added to Powertools documentation*
+ validations:
+ required: true
+ - type: textarea
+ id: description
+ attributes:
+ label: Description
+ description: Describe in one paragraph what's in it for them (readers)
+ validations:
+ required: true
+ - type: input
+ id: author
+ attributes:
+ label: Preferred contact
+ description: What's your preferred contact? We'll list it next to this content
+ validations:
+ required: true
+ - type: input
+ id: author-social
+ attributes:
+ label: (Optional) Social Network
+ description: If different from preferred contact, what's your preferred contact for social interactions?
+ validations:
+ required: false
+ - type: textarea
+ id: notes
+ attributes:
+ label: (Optional) Additional notes
+ description: |
+ Any notes you might want to share with us related to this material.
+
+ *Note: These notes are explicitly to Powertools maintainers. It will not be added to the community resources page.*
+ validations:
+ required: false
+ - type: checkboxes
+ id: acknowledgment
+ attributes:
+ label: Acknowledgment
+ options:
+ - label: I understand this content may be removed from Powertools documentation if it doesn't conform with the [Code of Conduct](https://aws.github.io/code-of-conduct)
+ required: true
diff --git a/.github/ISSUE_TEMPLATE/support_powertools.yml b/.github/ISSUE_TEMPLATE/support_powertools.yml
new file mode 100644
index 00000000000..e03b1627044
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/support_powertools.yml
@@ -0,0 +1,64 @@
+name: Support Lambda Powertools (become a reference)
+description: Add your organization's name or logo to the Lambda Powertools documentation
+title: "[Support Lambda Powertools]: "
+labels: ["customer-reference"]
+body:
+ - type: markdown
+ attributes:
+ value: |
+ Thank you for becoming a reference customer. Your support means a lot to us. It also helps new customers to know who's using it.
+
+ If you would like us to also display your organization's logo, please share a link in the `Company logo` field.
+ - type: input
+ id: organization
+ attributes:
+ label: Organization Name
+ description: Please share the name of your organization
+ placeholder: ACME
+ validations:
+ required: true
+ - type: input
+ id: name
+ attributes:
+ label: Your Name
+ description: Please share your name
+ validations:
+ required: true
+ - type: input
+ id: job
+ attributes:
+ label: Your current position
+ description: Please share your current position at your company
+ validations:
+ required: true
+ - type: input
+ id: logo
+ attributes:
+ label: (Optional) Company logo
+ description: Company logo you want us to display. You also allow us to resize for optimal placement in the documentation.
+ validations:
+ required: false
+ - type: textarea
+ id: use_case
+ attributes:
+ label: (Optional) Use case
+ description: How are you using Lambda Powertools today? *features, etc.*
+ validations:
+ required: false
+ - type: checkboxes
+ id: other_languages
+ attributes:
+ label: Also using other Lambda Powertools languages?
+ options:
+ - label: Java
+ required: false
+ - label: TypeScript
+ required: false
+ - label: .NET
+ required: false
+ - type: markdown
+ attributes:
+ value: |
+ *By raising a Support Lambda Powertools issue, you are granting AWS permission to use your company's name (and/or logo) for the limited purpose described here. You are also confirming that you have authority to grant such permission.*
+
+ *You can opt-out at any time by commenting or reopening this issue.*
diff --git a/.github/workflows/release-drafter.yml b/.github/workflows/release-drafter.yml
index 3007aa0241d..68f3cfb962a 100644
--- a/.github/workflows/release-drafter.yml
+++ b/.github/workflows/release-drafter.yml
@@ -10,6 +10,6 @@ jobs:
update_release_draft:
runs-on: ubuntu-latest
steps:
- - uses: release-drafter/release-drafter@df69d584deac33d8569990cb6413f82447181076 # v5.20.1
+ - uses: release-drafter/release-drafter@6df64e4ba4842c203c604c1f45246c5863410adb # v5.20.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/CHANGELOG.md b/CHANGELOG.md
index ef23f9d6c8f..637206e98e6 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,9 +4,35 @@
# Unreleased
+## Features
+
+* **data_classes:** add KinesisFirehoseEvent ([#1540](https://github.com/awslabs/aws-lambda-powertools-python/issues/1540))
+
+## Maintenance
+
+* **layer:** bump to 1.31.1 (v39)
+
+
+
+## [v1.31.1] - 2022-10-14
+## Bug Fixes
+
+* **parser:** loose validation on SNS fields to support FIFO ([#1606](https://github.com/awslabs/aws-lambda-powertools-python/issues/1606))
+
+## Documentation
+
+* **governance:** allow community to suggest feature content ([#1593](https://github.com/awslabs/aws-lambda-powertools-python/issues/1593))
+* **governance:** new form to allow customers self-nominate as public reference ([#1589](https://github.com/awslabs/aws-lambda-powertools-python/issues/1589))
+* **homepage:** include .NET powertools
+* **idempotency:** "persisntence" typo ([#1596](https://github.com/awslabs/aws-lambda-powertools-python/issues/1596))
+* **logger:** fix typo. ([#1587](https://github.com/awslabs/aws-lambda-powertools-python/issues/1587))
+
## Maintenance
+* add dummy v2 sar deploy job
* bump layer version to 38
+* **deps-dev:** bump mypy-boto3-ssm from 1.24.81 to 1.24.90 ([#1594](https://github.com/awslabs/aws-lambda-powertools-python/issues/1594))
+* **deps-dev:** bump flake8-builtins from 1.5.3 to 2.0.0 ([#1582](https://github.com/awslabs/aws-lambda-powertools-python/issues/1582))
@@ -2408,7 +2434,8 @@
* Merge pull request [#5](https://github.com/awslabs/aws-lambda-powertools-python/issues/5) from jfuss/feat/python38
-[Unreleased]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.31.0...HEAD
+[Unreleased]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.31.1...HEAD
+[v1.31.1]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.31.0...v1.31.1
[v1.31.0]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.30.0...v1.31.0
[v1.30.0]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.29.2...v1.30.0
[v1.29.2]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.29.1...v1.29.2
diff --git a/MAINTAINERS.md b/MAINTAINERS.md
index 260f6628aa3..c4907cdf57f 100644
--- a/MAINTAINERS.md
+++ b/MAINTAINERS.md
@@ -102,6 +102,8 @@ These are the most common labels used by maintainers to triage issues, pull requ
| github-actions | Changes in GitHub workflows | PR automation |
| github-templates | Changes in GitHub issue/PR templates | PR automation |
| internal | Changes in governance, tech debt and chores (linting setup, baseline, etc.) | PR automation |
+| customer-reference | Authorization to use company name in our documentation | Public Relations |
+| community-content | Suggested content to feature in our documentation | Public Relations |
## Maintainer Responsibilities
diff --git a/README.md b/README.md
index c0e32e6b6aa..8ecae29e20e 100644
--- a/README.md
+++ b/README.md
@@ -3,8 +3,7 @@
[](https://github.com/awslabs/aws-lambda-powertools-python/actions/workflows/python_build.yml)
[](https://app.codecov.io/gh/awslabs/aws-lambda-powertools-python)
-   
-[](https://discord.gg/B8zZKbbyET)
+   [](https://discord.gg/B8zZKbbyET)
A suite of Python utilities for AWS Lambda functions to ease adopting best practices such as tracing, structured logging, custom metrics, and more. (AWS Lambda Powertools for [Java](https://github.com/awslabs/aws-lambda-powertools-java), [Typescript](https://github.com/awslabs/aws-lambda-powertools-typescript), and [.NET](https://awslabs.github.io/aws-lambda-powertools-dotnet/){target="_blank"} are also available).
diff --git a/aws_lambda_powertools/utilities/data_classes/__init__.py b/aws_lambda_powertools/utilities/data_classes/__init__.py
index 8ed77f9f3a3..2aa2021ed1e 100644
--- a/aws_lambda_powertools/utilities/data_classes/__init__.py
+++ b/aws_lambda_powertools/utilities/data_classes/__init__.py
@@ -13,6 +13,7 @@
from .event_bridge_event import EventBridgeEvent
from .event_source import event_source
from .kafka_event import KafkaEvent
+from .kinesis_firehose_event import KinesisFirehoseEvent
from .kinesis_stream_event import KinesisStreamEvent
from .lambda_function_url_event import LambdaFunctionUrlEvent
from .s3_event import S3Event
@@ -32,6 +33,7 @@
"DynamoDBStreamEvent",
"EventBridgeEvent",
"KafkaEvent",
+ "KinesisFirehoseEvent",
"KinesisStreamEvent",
"LambdaFunctionUrlEvent",
"S3Event",
diff --git a/aws_lambda_powertools/utilities/data_classes/dynamo_db_stream_event.py b/aws_lambda_powertools/utilities/data_classes/dynamo_db_stream_event.py
index eb674c86b60..19c8231714e 100644
--- a/aws_lambda_powertools/utilities/data_classes/dynamo_db_stream_event.py
+++ b/aws_lambda_powertools/utilities/data_classes/dynamo_db_stream_event.py
@@ -1,169 +1,78 @@
from enum import Enum
-from typing import Any, Dict, Iterator, List, Optional, Union
+from typing import Any, Dict, Iterator, Optional
from aws_lambda_powertools.utilities.data_classes.common import DictWrapper
-class AttributeValueType(Enum):
- Binary = "B"
- BinarySet = "BS"
- Boolean = "BOOL"
- List = "L"
- Map = "M"
- Number = "N"
- NumberSet = "NS"
- Null = "NULL"
- String = "S"
- StringSet = "SS"
-
-
-class AttributeValue(DictWrapper):
- """Represents the data for an attribute
-
- Documentation:
- --------------
- - https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_streams_AttributeValue.html
- - https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/HowItWorks.NamingRulesDataTypes.html
+class TypeDeserializer:
+ """
+ This class deserializes DynamoDB types to Python types.
+ It based on boto3's DynamoDB TypeDeserializer found here:
+ https://boto3.amazonaws.com/v1/documentation/api/latest/_modules/boto3/dynamodb/types.html
+ Except that it deserializes DynamoDB numbers into strings, and does not wrap binary
+ with a Binary class.
"""
- def __init__(self, data: Dict[str, Any]):
- """AttributeValue constructor
-
- Parameters
- ----------
- data: Dict[str, Any]
- Raw lambda event dict
- """
- super().__init__(data)
- self.dynamodb_type = list(data.keys())[0]
-
- @property
- def b_value(self) -> Optional[str]:
- """An attribute of type Base64-encoded binary data object
-
- Example:
- >>> {"B": "dGhpcyB0ZXh0IGlzIGJhc2U2NC1lbmNvZGVk"}
- """
- return self.get("B")
-
- @property
- def bs_value(self) -> Optional[List[str]]:
- """An attribute of type Array of Base64-encoded binary data objects
-
- Example:
- >>> {"BS": ["U3Vubnk=", "UmFpbnk=", "U25vd3k="]}
- """
- return self.get("BS")
-
- @property
- def bool_value(self) -> Optional[bool]:
- """An attribute of type Boolean
-
- Example:
- >>> {"BOOL": True}
- """
- item = self.get("BOOL")
- return None if item is None else bool(item)
-
- @property
- def list_value(self) -> Optional[List["AttributeValue"]]:
- """An attribute of type Array of AttributeValue objects
-
- Example:
- >>> {"L": [ {"S": "Cookies"} , {"S": "Coffee"}, {"N": "3.14159"}]}
- """
- item = self.get("L")
- return None if item is None else [AttributeValue(v) for v in item]
-
- @property
- def map_value(self) -> Optional[Dict[str, "AttributeValue"]]:
- """An attribute of type String to AttributeValue object map
-
- Example:
- >>> {"M": {"Name": {"S": "Joe"}, "Age": {"N": "35"}}}
- """
- return _attribute_value_dict(self._data, "M")
-
- @property
- def n_value(self) -> Optional[str]:
- """An attribute of type Number
-
- Numbers are sent across the network to DynamoDB as strings, to maximize compatibility across languages
- and libraries. However, DynamoDB treats them as number type attributes for mathematical operations.
-
- Example:
- >>> {"N": "123.45"}
- """
- return self.get("N")
-
- @property
- def ns_value(self) -> Optional[List[str]]:
- """An attribute of type Number Set
-
- Example:
- >>> {"NS": ["42.2", "-19", "7.5", "3.14"]}
+ def deserialize(self, value):
+ """The method to deserialize the DynamoDB data types.
+
+ :param value: A DynamoDB value to be deserialized to a pythonic value.
+ Here are the various conversions:
+
+ DynamoDB Python
+ -------- ------
+ {'NULL': True} None
+ {'BOOL': True/False} True/False
+ {'N': str(value)} str(value)
+ {'S': string} string
+ {'B': bytes} bytes
+ {'NS': [str(value)]} set([str(value)])
+ {'SS': [string]} set([string])
+ {'BS': [bytes]} set([bytes])
+ {'L': list} list
+ {'M': dict} dict
+
+ :returns: The pythonic value of the DynamoDB type.
"""
- return self.get("NS")
- @property
- def null_value(self) -> None:
- """An attribute of type Null.
+ if not value:
+ raise TypeError("Value must be a nonempty dictionary whose key " "is a valid dynamodb type.")
+ dynamodb_type = list(value.keys())[0]
+ try:
+ deserializer = getattr(self, f"_deserialize_{dynamodb_type}".lower())
+ except AttributeError:
+ raise TypeError(f"Dynamodb type {dynamodb_type} is not supported")
+ return deserializer(value[dynamodb_type])
- Example:
- >>> {"NULL": True}
- """
+ def _deserialize_null(self, value):
return None
- @property
- def s_value(self) -> Optional[str]:
- """An attribute of type String
+ def _deserialize_bool(self, value):
+ return value
- Example:
- >>> {"S": "Hello"}
- """
- return self.get("S")
+ def _deserialize_n(self, value):
+ return value
- @property
- def ss_value(self) -> Optional[List[str]]:
- """An attribute of type Array of strings
+ def _deserialize_s(self, value):
+ return value
- Example:
- >>> {"SS": ["Giraffe", "Hippo" ,"Zebra"]}
- """
- return self.get("SS")
+ def _deserialize_b(self, value):
+ return value
- @property
- def get_type(self) -> AttributeValueType:
- """Get the attribute value type based on the contained data"""
- return AttributeValueType(self.dynamodb_type)
+ def _deserialize_ns(self, value):
+ return set(map(self._deserialize_n, value))
- @property
- def l_value(self) -> Optional[List["AttributeValue"]]:
- """Alias of list_value"""
- return self.list_value
+ def _deserialize_ss(self, value):
+ return set(map(self._deserialize_s, value))
- @property
- def m_value(self) -> Optional[Dict[str, "AttributeValue"]]:
- """Alias of map_value"""
- return self.map_value
-
- @property
- def get_value(self) -> Union[Optional[bool], Optional[str], Optional[List], Optional[Dict]]:
- """Get the attribute value"""
- try:
- return getattr(self, f"{self.dynamodb_type.lower()}_value")
- except AttributeError:
- raise TypeError(f"Dynamodb type {self.dynamodb_type} is not supported")
+ def _deserialize_bs(self, value):
+ return set(map(self._deserialize_b, value))
+ def _deserialize_l(self, value):
+ return [self.deserialize(v) for v in value]
-def _attribute_value_dict(attr_values: Dict[str, dict], key: str) -> Optional[Dict[str, AttributeValue]]:
- """A dict of type String to AttributeValue object map
-
- Example:
- >>> {"NewImage": {"Id": {"S": "xxx-xxx"}, "Value": {"N": "35"}}}
- """
- attr_values_dict = attr_values.get(key)
- return None if attr_values_dict is None else {k: AttributeValue(v) for k, v in attr_values_dict.items()}
+ def _deserialize_m(self, value):
+ return {k: self.deserialize(v) for k, v in value.items()}
class StreamViewType(Enum):
@@ -176,28 +85,43 @@ class StreamViewType(Enum):
class StreamRecord(DictWrapper):
+ def __init__(self, data: Dict[str, Any]):
+ """StreamRecord constructor
+
+ Parameters
+ ----------
+ data: Dict[str, Any]
+ Represents the dynamodb dict inside DynamoDBStreamEvent's records
+ """
+ super().__init__(data)
+ self._deserializer = TypeDeserializer()
+
+ def _deserialize_dynamodb_dict(self, key: str) -> Optional[Dict[str, Any]]:
+ dynamodb_dict = self._data.get(key)
+ return (
+ None if dynamodb_dict is None else {k: self._deserializer.deserialize(v) for k, v in dynamodb_dict.items()}
+ )
+
@property
def approximate_creation_date_time(self) -> Optional[int]:
"""The approximate date and time when the stream record was created, in UNIX epoch time format."""
item = self.get("ApproximateCreationDateTime")
return None if item is None else int(item)
- # NOTE: This override breaks the Mapping protocol of DictWrapper, it's left here for backwards compatibility with
- # a 'type: ignore' comment. See #1516 for discussion
@property
- def keys(self) -> Optional[Dict[str, AttributeValue]]: # type: ignore[override]
+ def keys(self) -> Optional[Dict[str, Any]]: # type: ignore[override]
"""The primary key attribute(s) for the DynamoDB item that was modified."""
- return _attribute_value_dict(self._data, "Keys")
+ return self._deserialize_dynamodb_dict("Keys")
@property
- def new_image(self) -> Optional[Dict[str, AttributeValue]]:
+ def new_image(self) -> Optional[Dict[str, Any]]:
"""The item in the DynamoDB table as it appeared after it was modified."""
- return _attribute_value_dict(self._data, "NewImage")
+ return self._deserialize_dynamodb_dict("NewImage")
@property
- def old_image(self) -> Optional[Dict[str, AttributeValue]]:
+ def old_image(self) -> Optional[Dict[str, Any]]:
"""The item in the DynamoDB table as it appeared before it was modified."""
- return _attribute_value_dict(self._data, "OldImage")
+ return self._deserialize_dynamodb_dict("OldImage")
@property
def sequence_number(self) -> Optional[str]:
@@ -233,7 +157,7 @@ def aws_region(self) -> Optional[str]:
@property
def dynamodb(self) -> Optional[StreamRecord]:
- """The main body of the stream record, containing all the DynamoDB-specific fields."""
+ """The main body of the stream record, containing all the DynamoDB-specific dicts."""
stream_record = self.get("dynamodb")
return None if stream_record is None else StreamRecord(stream_record)
diff --git a/aws_lambda_powertools/utilities/data_classes/event_source.py b/aws_lambda_powertools/utilities/data_classes/event_source.py
index 3968f923573..37249d4a99f 100644
--- a/aws_lambda_powertools/utilities/data_classes/event_source.py
+++ b/aws_lambda_powertools/utilities/data_classes/event_source.py
@@ -34,6 +34,6 @@ def event_source(
@event_source(data_class=S3Event)
def handler(event: S3Event, context):
- return {"key": event.object_key}
+ return {"key": event.object_key}
"""
return handler(data_class(event), context)
diff --git a/aws_lambda_powertools/utilities/data_classes/kinesis_firehose_event.py b/aws_lambda_powertools/utilities/data_classes/kinesis_firehose_event.py
new file mode 100644
index 00000000000..5683902f9d0
--- /dev/null
+++ b/aws_lambda_powertools/utilities/data_classes/kinesis_firehose_event.py
@@ -0,0 +1,113 @@
+import base64
+import json
+from typing import Iterator, Optional
+
+from aws_lambda_powertools.utilities.data_classes.common import DictWrapper
+
+
+class KinesisFirehoseRecordMetadata(DictWrapper):
+ @property
+ def _metadata(self) -> dict:
+ """Optional: metadata associated with this record; present only when Kinesis Stream is source"""
+ return self["kinesisRecordMetadata"] # could raise KeyError
+
+ @property
+ def shard_id(self) -> str:
+ """Kinesis stream shard ID; present only when Kinesis Stream is source"""
+ return self._metadata["shardId"]
+
+ @property
+ def partition_key(self) -> str:
+ """Kinesis stream partition key; present only when Kinesis Stream is source"""
+ return self._metadata["partitionKey"]
+
+ @property
+ def approximate_arrival_timestamp(self) -> int:
+ """Kinesis stream approximate arrival ISO timestamp; present only when Kinesis Stream is source"""
+ return self._metadata["approximateArrivalTimestamp"]
+
+ @property
+ def sequence_number(self) -> str:
+ """Kinesis stream sequence number; present only when Kinesis Stream is source"""
+ return self._metadata["sequenceNumber"]
+
+ @property
+ def subsequence_number(self) -> str:
+ """Kinesis stream sub-sequence number; present only when Kinesis Stream is source
+
+ Note: this will only be present for Kinesis streams using record aggregation
+ """
+ return self._metadata["subsequenceNumber"]
+
+
+class KinesisFirehoseRecord(DictWrapper):
+ @property
+ def approximate_arrival_timestamp(self) -> int:
+ """The approximate time that the record was inserted into the delivery stream"""
+ return self["approximateArrivalTimestamp"]
+
+ @property
+ def record_id(self) -> str:
+ """Record ID; uniquely identifies this record within the current batch"""
+ return self["recordId"]
+
+ @property
+ def data(self) -> str:
+ """The data blob, base64-encoded"""
+ return self["data"]
+
+ @property
+ def metadata(self) -> Optional[KinesisFirehoseRecordMetadata]:
+ """Optional: metadata associated with this record; present only when Kinesis Stream is source"""
+ return KinesisFirehoseRecordMetadata(self._data) if self.get("kinesisRecordMetadata") else None
+
+ @property
+ def data_as_bytes(self) -> bytes:
+ """Decoded base64-encoded data as bytes"""
+ return base64.b64decode(self.data)
+
+ @property
+ def data_as_text(self) -> str:
+ """Decoded base64-encoded data as text"""
+ return self.data_as_bytes.decode("utf-8")
+
+ @property
+ def data_as_json(self) -> dict:
+ """Decoded base64-encoded data loaded to json"""
+ if self._json_data is None:
+ self._json_data = json.loads(self.data_as_text)
+ return self._json_data
+
+
+class KinesisFirehoseEvent(DictWrapper):
+ """Kinesis Data Firehose event
+
+ Documentation:
+ --------------
+ - https://docs.aws.amazon.com/lambda/latest/dg/services-kinesisfirehose.html
+ """
+
+ @property
+ def invocation_id(self) -> str:
+ """Unique ID for for Lambda invocation"""
+ return self["invocationId"]
+
+ @property
+ def delivery_stream_arn(self) -> str:
+ """ARN of the Firehose Data Firehose Delivery Stream"""
+ return self["deliveryStreamArn"]
+
+ @property
+ def source_kinesis_stream_arn(self) -> Optional[str]:
+ """ARN of the Kinesis Stream; present only when Kinesis Stream is source"""
+ return self.get("sourceKinesisStreamArn")
+
+ @property
+ def region(self) -> str:
+ """AWS region where the event originated eg: us-east-1"""
+ return self["region"]
+
+ @property
+ def records(self) -> Iterator[KinesisFirehoseRecord]:
+ for record in self["records"]:
+ yield KinesisFirehoseRecord(record)
diff --git a/aws_lambda_powertools/utilities/parser/models/sns.py b/aws_lambda_powertools/utilities/parser/models/sns.py
index 1b095fde2c4..4666d1c4ff2 100644
--- a/aws_lambda_powertools/utilities/parser/models/sns.py
+++ b/aws_lambda_powertools/utilities/parser/models/sns.py
@@ -22,10 +22,10 @@ class SnsNotificationModel(BaseModel):
MessageAttributes: Optional[Dict[str, SnsMsgAttributeModel]]
Message: Union[str, TypingType[BaseModel]]
MessageId: str
- SigningCertUrl: HttpUrl
- Signature: str
+ SigningCertUrl: Optional[HttpUrl] # NOTE: FIFO opt-in removes attribute
+ Signature: Optional[str] # NOTE: FIFO opt-in removes attribute
Timestamp: datetime
- SignatureVersion: str
+ SignatureVersion: Optional[str] # NOTE: FIFO opt-in removes attribute
@root_validator(pre=True, allow_reuse=True)
def check_sqs_protocol(cls, values):
diff --git a/docs/core/logger.md b/docs/core/logger.md
index 4b16a1eeb71..f98962a0f5f 100644
--- a/docs/core/logger.md
+++ b/docs/core/logger.md
@@ -619,7 +619,7 @@ By default all registered loggers will be modified. You can change this behavior
### How can I add standard library logging attributes to a log record?
-The Python standard library log records contains a [large set of atttributes](https://docs.python.org/3/library/logging.html#logrecord-attributes){target="_blank"}, however only a few are included in Powertools Logger log record by default.
+The Python standard library log records contains a [large set of attributes](https://docs.python.org/3/library/logging.html#logrecord-attributes){target="_blank"}, however only a few are included in Powertools Logger log record by default.
You can include any of these logging attributes as key value arguments (`kwargs`) when instantiating `Logger` or `LambdaPowertoolsFormatter`.
diff --git a/docs/index.md b/docs/index.md
index 58bf74bf223..a82030dae7e 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -119,7 +119,6 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https:
```
=== "Serverless framework"
-
```yaml hl_lines="5"
functions:
hello:
@@ -137,7 +136,6 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https:
def __init__(self, scope: core.Construct, id_: str, env: core.Environment) -> None:
super().__init__(scope, id_)
-
powertools_layer = aws_lambda.LayerVersion.from_layer_version_arn(
self,
id="lambda-powertools",
@@ -195,7 +193,6 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https:
source_code_hash = filebase64sha256("lambda_function_payload.zip")
}
-
```
=== "Amplify"
diff --git a/docs/utilities/data_classes.md b/docs/utilities/data_classes.md
index 67d821fe04f..509110e0480 100644
--- a/docs/utilities/data_classes.md
+++ b/docs/utilities/data_classes.md
@@ -77,6 +77,7 @@ Event Source | Data_class
[EventBridge](#eventbridge) | `EventBridgeEvent`
[Kafka](#kafka) | `KafkaEvent`
[Kinesis Data Stream](#kinesis-streams) | `KinesisStreamEvent`
+[Kinesis Firehose Delivery Stream](#kinesis-firehose-delivery-stream) | `KinesisFirehoseEvent`
[Lambda Function URL](#lambda-function-url) | `LambdaFunctionUrlEvent`
[Rabbit MQ](#rabbit-mq) | `RabbitMQEvent`
[S3](#s3) | `S3Event`
@@ -892,6 +893,20 @@ or plain text, depending on the original payload.
do_something_with(data)
```
+### Kinesis Firehose delivery stream
+
+Kinesis Firehose Data Transformation can use a Lambda Function to modify the records
+inline, and re-emit them back to the Delivery Stream.
+
+Similar to Kinesis Data Streams, the events contain base64 encoded data. You can use the helper
+function to access the data either as json or plain text, depending on the original payload.
+
+=== "app.py"
+
+ ```python
+ --8<-- "examples/event_sources/src/kinesis_firehose_delivery_stream.py"
+ ```
+
### Lambda Function URL
=== "app.py"
diff --git a/docs/utilities/idempotency.md b/docs/utilities/idempotency.md
index f02cd8700b8..b246e2cebc1 100644
--- a/docs/utilities/idempotency.md
+++ b/docs/utilities/idempotency.md
@@ -402,7 +402,7 @@ To prevent against extended failed retries when a [Lambda function times out](ht
This means that if an invocation expired during execution, it will be quickly executed again on the next retry.
???+ important
- If you are only using the [@idempotent_function decorator](#idempotentfunction-decorator) to guard isolated parts of your code, you must use `register_lambda_context` available in the [idempotency config object](#customizing-the-default-behavior) to benefit from this protection.
+ If you are only using the [@idempotent_function decorator](#idempotent_function-decorator) to guard isolated parts of your code, you must use `register_lambda_context` available in the [idempotency config object](#customizing-the-default-behavior) to benefit from this protection.
Here is an example on how you register the Lambda context in your handler:
@@ -842,7 +842,7 @@ This utility provides an abstract base class (ABC), so that you can implement yo
You can inherit from the `BasePersistenceLayer` class and implement the abstract methods `_get_record`, `_put_record`,
`_update_record` and `_delete_record`.
-```python hl_lines="8-13 57 65 74 96 124" title="Excerpt DynamoDB Persisntence Layer implementation for reference"
+```python hl_lines="8-13 57 65 74 96 124" title="Excerpt DynamoDB Persistence Layer implementation for reference"
import datetime
import logging
from typing import Any, Dict, Optional
diff --git a/examples/event_sources/src/kinesis_firehose_delivery_stream.py b/examples/event_sources/src/kinesis_firehose_delivery_stream.py
new file mode 100644
index 00000000000..770bfb1ee63
--- /dev/null
+++ b/examples/event_sources/src/kinesis_firehose_delivery_stream.py
@@ -0,0 +1,28 @@
+import base64
+import json
+
+from aws_lambda_powertools.utilities.data_classes import (
+ KinesisFirehoseEvent,
+ event_source,
+)
+from aws_lambda_powertools.utilities.typing import LambdaContext
+
+
+@event_source(data_class=KinesisFirehoseEvent)
+def lambda_handler(event: KinesisFirehoseEvent, context: LambdaContext):
+ result = []
+
+ for record in event.records:
+ # if data was delivered as json; caches loaded value
+ data = record.data_as_json
+
+ processed_record = {
+ "recordId": record.record_id,
+ "data": base64.b64encode(json.dumps(data).encode("utf-8")),
+ "result": "Ok",
+ }
+
+ result.append(processed_record)
+
+ # return transformed records
+ return {"records": result}
diff --git a/poetry.lock b/poetry.lock
index c775e1714d8..d17be5ab5d1 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -368,7 +368,7 @@ dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit"]
[[package]]
name = "flake8-builtins"
-version = "1.5.3"
+version = "2.0.0"
description = "Check for python builtins being used as variables or parameters."
category = "dev"
optional = false
@@ -378,7 +378,7 @@ python-versions = "*"
flake8 = "*"
[package.extras]
-test = ["coverage", "coveralls", "mock", "pytest", "pytest-cov"]
+test = ["pytest"]
[[package]]
name = "flake8-comprehensions"
@@ -1712,8 +1712,8 @@ flake8-bugbear = [
{file = "flake8_bugbear-22.9.23-py3-none-any.whl", hash = "sha256:cd2779b2b7ada212d7a322814a1e5651f1868ab0d3f24cc9da66169ab8fda474"},
]
flake8-builtins = [
- {file = "flake8-builtins-1.5.3.tar.gz", hash = "sha256:09998853b2405e98e61d2ff3027c47033adbdc17f9fe44ca58443d876eb00f3b"},
- {file = "flake8_builtins-1.5.3-py2.py3-none-any.whl", hash = "sha256:7706babee43879320376861897e5d1468e396a40b8918ed7bccf70e5f90b8687"},
+ {file = "flake8-builtins-2.0.0.tar.gz", hash = "sha256:98833fa16139a75cd4913003492a9bd9a61c6f8ac146c3db12a2ebaf420dade3"},
+ {file = "flake8_builtins-2.0.0-py3-none-any.whl", hash = "sha256:39bfa3badb5e8d22f92baf4e0ea1b816707245233846932d6b13e81fc6f673e8"},
]
flake8-comprehensions = [
{file = "flake8-comprehensions-3.10.0.tar.gz", hash = "sha256:181158f7e7aa26a63a0a38e6017cef28c6adee71278ce56ce11f6ec9c4905058"},
diff --git a/tests/events/kinesisFirehosePutEvent.json b/tests/events/kinesisFirehosePutEvent.json
index 27aeddd80eb..f3e07190710 100644
--- a/tests/events/kinesisFirehosePutEvent.json
+++ b/tests/events/kinesisFirehosePutEvent.json
@@ -2,16 +2,16 @@
"invocationId": "2b4d1ad9-2f48-94bd-a088-767c317e994a",
"deliveryStreamArn": "arn:aws:firehose:us-east-2:123456789012:deliverystream/delivery-stream-name",
"region": "us-east-2",
- "records":[
+ "records": [
{
- "recordId":"record1",
- "approximateArrivalTimestamp":1664029185290,
- "data":"SGVsbG8gV29ybGQ="
+ "recordId": "record1",
+ "approximateArrivalTimestamp": 1664029185290,
+ "data": "SGVsbG8gV29ybGQ="
},
{
- "recordId":"record2",
- "approximateArrivalTimestamp":1664029186945,
- "data":"eyJIZWxsbyI6ICJXb3JsZCJ9"
+ "recordId": "record2",
+ "approximateArrivalTimestamp": 1664029186945,
+ "data": "eyJIZWxsbyI6ICJXb3JsZCJ9"
}
]
}
diff --git a/tests/events/snsSqsFifoEvent.json b/tests/events/snsSqsFifoEvent.json
new file mode 100644
index 00000000000..6c23ef62945
--- /dev/null
+++ b/tests/events/snsSqsFifoEvent.json
@@ -0,0 +1,23 @@
+{
+ "Records": [
+ {
+ "messageId": "69bc4bbd-ed69-4325-a434-85c3b428ceab",
+ "receiptHandle": "AQEBbfAqjhrgIdW3HGWYPz57mdDatG/dT9LZhRPAsNQ1pJmw495w4esDc8ZSbOwMZuPBol7wtiNWug8U25GpSQDDLY1qv//8/lfmdzXOiprG6xRVeiXSHj0j731rJQ3xo+GPdGjOzjIxI09CrE3HtZ4lpXY9NjjHzP8hdxkCLlbttumc8hDBUR365/Tk+GfV2nNP9qvZtLGEbKCdTm/GYdTSoAr+ML9HnnGrS9T25Md71ASiZMI4DZqptN6g7CYYojFPs1LVM9o1258ferA72zbNoQ==",
+ "body": "{\n \"Type\" : \"Notification\",\n \"MessageId\" : \"a7c9d2fa-77fa-5184-9de9-89391027cc7d\",\n \"SequenceNumber\" : \"10000000000000004000\",\n \"TopicArn\" : \"arn:aws:sns:eu-west-1:231436140809:Test.fifo\",\n \"Message\" : \"{\\\"message\\\": \\\"hello world\\\", \\\"username\\\": \\\"lessa\\\"}\",\n \"Timestamp\" : \"2022-10-14T13:35:25.419Z\",\n \"UnsubscribeURL\" : \"https://sns.eu-west-1.amazonaws.com/?Action=Unsubscribe&SubscriptionArn=arn:aws:sns:eu-west-1:231436140809:Test.fifo:bb81d3de-a0f9-46e4-b619-d3152a4d545f\"\n}",
+ "attributes": {
+ "ApproximateReceiveCount": "1",
+ "SentTimestamp": "1665754525442",
+ "SequenceNumber": "18873177232222703872",
+ "MessageGroupId": "powertools-test",
+ "SenderId": "AIDAWYJAWPFU7SUQGUJC6",
+ "MessageDeduplicationId": "4e0a0f61eed277a4b9e4c01d5722b07b0725e42fe782102abee5711adfac701f",
+ "ApproximateFirstReceiveTimestamp": "1665754525442"
+ },
+ "messageAttributes": {},
+ "md5OfBody": "f3c788e623445e3feb263e80c1bffc0b",
+ "eventSource": "aws:sqs",
+ "eventSourceARN": "arn:aws:sqs:eu-west-1:231436140809:Test.fifo",
+ "awsRegion": "eu-west-1"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/tests/functional/parser/test_sns.py b/tests/functional/parser/test_sns.py
index 6042322e88a..10674c88ef5 100644
--- a/tests/functional/parser/test_sns.py
+++ b/tests/functional/parser/test_sns.py
@@ -110,19 +110,6 @@ def test_handle_sns_sqs_trigger_event_json_body(): # noqa: F811
handle_sns_sqs_json_body(event_dict, LambdaContext())
-def test_handle_sns_sqs_trigger_event_json_body_missing_signing_cert_url():
- # GIVEN an event is tampered with a missing SigningCertURL
- event_dict = load_event("snsSqsEvent.json")
- payload = json.loads(event_dict["Records"][0]["body"])
- payload.pop("SigningCertURL")
- event_dict["Records"][0]["body"] = json.dumps(payload)
-
- # WHEN parsing the payload
- # THEN raise a ValidationError error
- with pytest.raises(ValidationError):
- handle_sns_sqs_json_body(event_dict, LambdaContext())
-
-
def test_handle_sns_sqs_trigger_event_json_body_missing_unsubscribe_url():
# GIVEN an event is tampered with a missing UnsubscribeURL
event_dict = load_event("snsSqsEvent.json")
@@ -134,3 +121,8 @@ def test_handle_sns_sqs_trigger_event_json_body_missing_unsubscribe_url():
# THEN raise a ValidationError error
with pytest.raises(ValidationError):
handle_sns_sqs_json_body(event_dict, LambdaContext())
+
+
+def test_handle_sns_sqs_fifo_trigger_event_json_body():
+ event_dict = load_event("snsSqsFifoEvent.json")
+ handle_sns_sqs_json_body(event_dict, LambdaContext())
diff --git a/tests/functional/test_data_classes.py b/tests/functional/test_data_classes.py
index 1f8c0cef955..b4dbd073382 100644
--- a/tests/functional/test_data_classes.py
+++ b/tests/functional/test_data_classes.py
@@ -18,6 +18,7 @@
CodePipelineJobEvent,
EventBridgeEvent,
KafkaEvent,
+ KinesisFirehoseEvent,
KinesisStreamEvent,
S3Event,
SESEvent,
@@ -75,8 +76,6 @@
ConnectContactFlowInitiationMethod,
)
from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import (
- AttributeValue,
- AttributeValueType,
DynamoDBRecordEventName,
DynamoDBStreamEvent,
StreamRecord,
@@ -502,20 +501,8 @@ def test_dynamo_db_stream_trigger_event():
assert dynamodb.approximate_creation_date_time is None
keys = dynamodb.keys
assert keys is not None
- id_key = keys["Id"]
- assert id_key.b_value is None
- assert id_key.bs_value is None
- assert id_key.bool_value is None
- assert id_key.list_value is None
- assert id_key.map_value is None
- assert id_key.n_value == "101"
- assert id_key.ns_value is None
- assert id_key.null_value is None
- assert id_key.s_value is None
- assert id_key.ss_value is None
- message_key = dynamodb.new_image["Message"]
- assert message_key is not None
- assert message_key.s_value == "New item!"
+ assert keys["Id"] == "101"
+ assert dynamodb.new_image["Message"] == "New item!"
assert dynamodb.old_image is None
assert dynamodb.sequence_number == "111"
assert dynamodb.size_bytes == 26
@@ -528,120 +515,44 @@ def test_dynamo_db_stream_trigger_event():
assert record.user_identity is None
-def test_dynamo_attribute_value_b_value():
- example_attribute_value = {"B": "dGhpcyB0ZXh0IGlzIGJhc2U2NC1lbmNvZGVk"}
-
- attribute_value = AttributeValue(example_attribute_value)
-
- assert attribute_value.get_type == AttributeValueType.Binary
- assert attribute_value.b_value == attribute_value.get_value
-
-
-def test_dynamo_attribute_value_bs_value():
- example_attribute_value = {"BS": ["U3Vubnk=", "UmFpbnk=", "U25vd3k="]}
-
- attribute_value = AttributeValue(example_attribute_value)
-
- assert attribute_value.get_type == AttributeValueType.BinarySet
- assert attribute_value.bs_value == attribute_value.get_value
-
-
-def test_dynamo_attribute_value_bool_value():
- example_attribute_value = {"BOOL": True}
-
- attribute_value = AttributeValue(example_attribute_value)
-
- assert attribute_value.get_type == AttributeValueType.Boolean
- assert attribute_value.bool_value == attribute_value.get_value
-
-
-def test_dynamo_attribute_value_list_value():
- example_attribute_value = {"L": [{"S": "Cookies"}, {"S": "Coffee"}, {"N": "3.14159"}]}
- attribute_value = AttributeValue(example_attribute_value)
- list_value = attribute_value.list_value
- assert list_value is not None
- item = list_value[0]
- assert item.s_value == "Cookies"
- assert attribute_value.get_type == AttributeValueType.List
- assert attribute_value.l_value == attribute_value.list_value
- assert attribute_value.list_value == attribute_value.get_value
-
-
-def test_dynamo_attribute_value_map_value():
- example_attribute_value = {"M": {"Name": {"S": "Joe"}, "Age": {"N": "35"}}}
-
- attribute_value = AttributeValue(example_attribute_value)
-
- map_value = attribute_value.map_value
- assert map_value is not None
- item = map_value["Name"]
- assert item.s_value == "Joe"
- assert attribute_value.get_type == AttributeValueType.Map
- assert attribute_value.m_value == attribute_value.map_value
- assert attribute_value.map_value == attribute_value.get_value
-
-
-def test_dynamo_attribute_value_n_value():
- example_attribute_value = {"N": "123.45"}
-
- attribute_value = AttributeValue(example_attribute_value)
-
- assert attribute_value.get_type == AttributeValueType.Number
- assert attribute_value.n_value == attribute_value.get_value
-
-
-def test_dynamo_attribute_value_ns_value():
- example_attribute_value = {"NS": ["42.2", "-19", "7.5", "3.14"]}
-
- attribute_value = AttributeValue(example_attribute_value)
-
- assert attribute_value.get_type == AttributeValueType.NumberSet
- assert attribute_value.ns_value == attribute_value.get_value
-
-
-def test_dynamo_attribute_value_null_value():
- example_attribute_value = {"NULL": True}
-
- attribute_value = AttributeValue(example_attribute_value)
-
- assert attribute_value.get_type == AttributeValueType.Null
- assert attribute_value.null_value is None
- assert attribute_value.null_value == attribute_value.get_value
-
-
-def test_dynamo_attribute_value_s_value():
- example_attribute_value = {"S": "Hello"}
-
- attribute_value = AttributeValue(example_attribute_value)
-
- assert attribute_value.get_type == AttributeValueType.String
- assert attribute_value.s_value == attribute_value.get_value
-
-
-def test_dynamo_attribute_value_ss_value():
- example_attribute_value = {"SS": ["Giraffe", "Hippo", "Zebra"]}
-
- attribute_value = AttributeValue(example_attribute_value)
-
- assert attribute_value.get_type == AttributeValueType.StringSet
- assert attribute_value.ss_value == attribute_value.get_value
-
-
-def test_dynamo_attribute_value_type_error():
- example_attribute_value = {"UNSUPPORTED": "'value' should raise a type error"}
-
- attribute_value = AttributeValue(example_attribute_value)
-
- with pytest.raises(TypeError):
- print(attribute_value.get_value)
- with pytest.raises(ValueError):
- print(attribute_value.get_type)
-
-
-def test_stream_record_keys_with_valid_keys():
- attribute_value = {"Foo": "Bar"}
- record = StreamRecord({"Keys": {"Key1": attribute_value}})
- assert record.keys == {"Key1": AttributeValue(attribute_value)}
+def test_dynamo_stream_record():
+ byte_list = [s.encode("utf-8") for s in ["item1", "item2"]]
+ data = {
+ "Keys": {"key1": {"attr1": "value1"}},
+ "NewImage": {
+ "Name": {"S": "Joe"},
+ "Age": {"N": "35"},
+ "TypesMap": {
+ "M": {
+ "string": {"S": "value"},
+ "number": {"N": "100"},
+ "bool": {"BOOL": True},
+ "dict": {"M": {"key": {"S": "value"}}},
+ "stringSet": {"SS": ["item1", "item2"]},
+ "numberSet": {"NS": ["100", "200", "300"]},
+ "byteSet": {"BS": byte_list},
+ "list": {"L": [{"S": "item1"}, {"N": "3.14159"}, {"BOOL": False}]},
+ "null": {"NULL": True},
+ },
+ },
+ },
+ }
+ record = StreamRecord(data)
+ assert record.new_image == {
+ "Name": "Joe",
+ "Age": "35",
+ "TypesMap": {
+ "string": "value",
+ "number": "100",
+ "bool": True,
+ "dict": {"key": "value"},
+ "stringSet": {"item1", "item2"},
+ "numberSet": {"100", "200", "300"},
+ "byteSet": set(byte_list),
+ "list": ["item1", "3.14159", False],
+ "null": None,
+ },
+ }
def test_stream_record_keys_with_no_keys():
@@ -650,7 +561,7 @@ def test_stream_record_keys_with_no_keys():
def test_stream_record_keys_overrides_dict_wrapper_keys():
- data = {"Keys": {"key1": {"attr1": "value1"}}}
+ data = {"Keys": {"key1": {"N": "101"}}}
record = StreamRecord(data)
assert record.keys != data.keys()
@@ -1239,6 +1150,72 @@ def test_kafka_self_managed_event():
assert record.get_header_value("HeaderKey", case_sensitive=False) == b"headerValue"
+def test_kinesis_firehose_kinesis_event():
+ event = KinesisFirehoseEvent(load_event("kinesisFirehoseKinesisEvent.json"))
+
+ assert event.region == "us-east-2"
+ assert event.invocation_id == "2b4d1ad9-2f48-94bd-a088-767c317e994a"
+ assert event.delivery_stream_arn == "arn:aws:firehose:us-east-2:123456789012:deliverystream/delivery-stream-name"
+ assert event.source_kinesis_stream_arn == "arn:aws:kinesis:us-east-1:123456789012:stream/kinesis-source"
+
+ records = list(event.records)
+ assert len(records) == 2
+ record_01, record_02 = records[:]
+
+ assert record_01.approximate_arrival_timestamp == 1664028820148
+ assert record_01.record_id == "record1"
+ assert record_01.data == "SGVsbG8gV29ybGQ="
+ assert record_01.data_as_bytes == b"Hello World"
+ assert record_01.data_as_text == "Hello World"
+
+ assert record_01.metadata.shard_id == "shardId-000000000000"
+ assert record_01.metadata.partition_key == "4d1ad2b9-24f8-4b9d-a088-76e9947c317a"
+ assert record_01.metadata.approximate_arrival_timestamp == 1664028820148
+ assert record_01.metadata.sequence_number == "49546986683135544286507457936321625675700192471156785154"
+ assert record_01.metadata.subsequence_number == ""
+
+ assert record_02.approximate_arrival_timestamp == 1664028793294
+ assert record_02.record_id == "record2"
+ assert record_02.data == "eyJIZWxsbyI6ICJXb3JsZCJ9"
+ assert record_02.data_as_bytes == b'{"Hello": "World"}'
+ assert record_02.data_as_text == '{"Hello": "World"}'
+ assert record_02.data_as_json == {"Hello": "World"}
+
+ assert record_02.metadata.shard_id == "shardId-000000000001"
+ assert record_02.metadata.partition_key == "4d1ad2b9-24f8-4b9d-a088-76e9947c318a"
+ assert record_02.metadata.approximate_arrival_timestamp == 1664028793294
+ assert record_02.metadata.sequence_number == "49546986683135544286507457936321625675700192471156785155"
+ assert record_02.metadata.subsequence_number == ""
+
+
+def test_kinesis_firehose_put_event():
+ event = KinesisFirehoseEvent(load_event("kinesisFirehosePutEvent.json"))
+
+ assert event.region == "us-east-2"
+ assert event.invocation_id == "2b4d1ad9-2f48-94bd-a088-767c317e994a"
+ assert event.delivery_stream_arn == "arn:aws:firehose:us-east-2:123456789012:deliverystream/delivery-stream-name"
+ assert event.source_kinesis_stream_arn is None
+
+ records = list(event.records)
+ assert len(records) == 2
+ record_01, record_02 = records[:]
+
+ assert record_01.approximate_arrival_timestamp == 1664029185290
+ assert record_01.record_id == "record1"
+ assert record_01.data == "SGVsbG8gV29ybGQ="
+ assert record_01.data_as_bytes == b"Hello World"
+ assert record_01.data_as_text == "Hello World"
+ assert record_01.metadata is None
+
+ assert record_02.approximate_arrival_timestamp == 1664029186945
+ assert record_02.record_id == "record2"
+ assert record_02.data == "eyJIZWxsbyI6ICJXb3JsZCJ9"
+ assert record_02.data_as_bytes == b'{"Hello": "World"}'
+ assert record_02.data_as_text == '{"Hello": "World"}'
+ assert record_02.data_as_json == {"Hello": "World"}
+ assert record_02.metadata is None
+
+
def test_kinesis_stream_event():
event = KinesisStreamEvent(load_event("kinesisStreamEvent.json"))
diff --git a/tests/functional/test_utilities_batch.py b/tests/functional/test_utilities_batch.py
index 4f46b428121..1d50de9e85e 100644
--- a/tests/functional/test_utilities_batch.py
+++ b/tests/functional/test_utilities_batch.py
@@ -129,7 +129,7 @@ def handler(record: KinesisStreamRecord):
@pytest.fixture(scope="module")
def dynamodb_record_handler() -> Callable:
def handler(record: DynamoDBRecord):
- body = record.dynamodb.new_image.get("Message").get_value
+ body = record.dynamodb.new_image.get("Message")
if "fail" in body:
raise Exception("Failed to process record.")
return body