diff --git a/.github/scripts/label_related_issue.js b/.github/scripts/label_related_issue.js index 8b8c3f4904b..b80a3ef62be 100644 --- a/.github/scripts/label_related_issue.js +++ b/.github/scripts/label_related_issue.js @@ -17,7 +17,6 @@ module.exports = async ({github, context, core}) => { return core.notice("Only merged PRs to avoid spam; skipping") } - const RELATED_ISSUE_REGEX = /Issue number:[^\d\r\n]+(?\d+)/; const isMatch = RELATED_ISSUE_REGEX.exec(PR_BODY); if (!isMatch) { diff --git a/.github/workflows/run-e2e-tests.yml b/.github/workflows/run-e2e-tests.yml new file mode 100644 index 00000000000..2e186037853 --- /dev/null +++ b/.github/workflows/run-e2e-tests.yml @@ -0,0 +1,32 @@ +name: run-e2e-tests +on: + workflow_dispatch: +env: + AWS_DEFAULT_REGION: us-east-1 + E2E_TESTS_PATH: tests/e2e/ +jobs: + run: + runs-on: ubuntu-latest + permissions: + id-token: write # needed to request JWT with GitHub's OIDC Token endpoint. docs: https://bit.ly/3MNgQO9 + contents: read + strategy: + matrix: + version: ["3.7", "3.8", "3.9"] + steps: + - name: "Checkout" + uses: actions/checkout@v3 + - name: "Use Python" + uses: actions/setup-python@v3 + with: + python-version: ${{ matrix.version }} + architecture: "x64" + - name: Install dependencies + run: make dev + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v1 + with: + role-to-assume: ${{ secrets.AWS_TEST_ROLE_ARN }} + aws-region: ${{ env.AWS_DEFAULT_REGION }} + - name: Test + run: make e2e-test diff --git a/MAINTAINERS.md b/MAINTAINERS.md index fa8b3287238..d861896caf8 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -16,6 +16,7 @@ - [Changelog generation](#changelog-generation) - [Bumping the version](#bumping-the-version) - [Drafting release notes](#drafting-release-notes) + - [Run end to end tests](#run-end-to-end-tests) - [Releasing a documentation hotfix](#releasing-a-documentation-hotfix) - [Maintain Overall Health of the Repo](#maintain-overall-health-of-the-repo) - [Manage Roadmap](#manage-roadmap) @@ -210,7 +211,13 @@ The best part comes now. Replace the placeholder `[Human readable summary of cha Once you're happy, hit `Publish release`. This will kick off the [Publishing workflow](https://github.com/awslabs/aws-lambda-powertools-python/actions/workflows/publish.yml) and within a few minutes you should see the latest version in PyPi, and all issues labeled as `pending-release` will be notified. > TODO: Wait for @am29d new Lambda Layers pipeline work to complete, then add how Lambda Layers are published +### Run end to end tests +In order to run end to end tests you need to install CDK CLI first and bootstrap your account with `cdk bootstrap` command. For additional details follow [documentation](https://docs.aws.amazon.com/cdk/v2/guide/bootstrapping.html). + +To run locally, export `AWS_PROFILE` environment variable and run `make e2e tests`. To run from GitHub Actions, use [run-e2e-tests workflow](https://github.com/awslabs/aws-lambda-powertools-python/actions/workflows/run-e2e-tests.yml) and pick the branch you want to run tests against. + +**NOTE**: E2E tests are run as part of each merge to `develop` branch. ### Releasing a documentation hotfix You can rebuild the latest documentation without a full release via this [GitHub Actions Workflow](https://github.com/awslabs/aws-lambda-powertools-python/actions/workflows/rebuild_latest_docs.yml). Choose `Run workflow`, keep `develop` as the branch, and input the latest Powertools version available. diff --git a/Makefile b/Makefile index 6173e3e310d..fb7457d9908 100644 --- a/Makefile +++ b/Makefile @@ -23,14 +23,17 @@ lint-docs-fix: docker run -v ${PWD}:/markdown 06kellyjac/markdownlint-cli --fix "docs" test: - poetry run pytest -m "not perf" --cov=aws_lambda_powertools --cov-report=xml + poetry run pytest -m "not perf" --ignore tests/e2e --cov=aws_lambda_powertools --cov-report=xml poetry run pytest --cache-clear tests/performance unit-test: poetry run pytest tests/unit +e2e-test: + poetry run pytest -rP -n 3 --dist loadscope --durations=0 --durations-min=1 tests/e2e + coverage-html: - poetry run pytest -m "not perf" --cov=aws_lambda_powertools --cov-report=html + poetry run pytest -m "not perf" --ignore tests/e2e --cov=aws_lambda_powertools --cov-report=html pre-commit: pre-commit run --show-diff-on-failure diff --git a/poetry.lock b/poetry.lock index f672ac83440..f30eace5967 100644 --- a/poetry.lock +++ b/poetry.lock @@ -20,6 +20,19 @@ docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] +[[package]] +name = "aws-cdk-lib" +version = "2.23.0" +description = "Version 2 of the AWS Cloud Development Kit library" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +constructs = ">=10.0.0,<11.0.0" +jsii = ">=1.57.0,<2.0.0" +publication = ">=0.0.3" + [[package]] name = "aws-xray-sdk" version = "2.10.0" @@ -106,6 +119,33 @@ urllib3 = ">=1.25.4,<1.27" [package.extras] crt = ["awscrt (==0.13.8)"] +[[package]] +name = "cattrs" +version = "1.0.0" +description = "Composable complex class support for attrs." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +attrs = ">=17.3" + +[package.extras] +dev = ["bumpversion", "wheel", "watchdog", "flake8", "tox", "coverage", "sphinx", "pytest", "hypothesis", "pendulum"] + +[[package]] +name = "cattrs" +version = "22.1.0" +description = "Composable complex class support for attrs and dataclasses." +category = "dev" +optional = false +python-versions = ">=3.7,<4.0" + +[package.dependencies] +attrs = ">=20" +exceptiongroup = {version = "*", markers = "python_version <= \"3.10\""} +typing_extensions = {version = "*", markers = "python_version >= \"3.7\" and python_version < \"3.8\""} + [[package]] name = "certifi" version = "2021.10.8" @@ -145,6 +185,18 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "constructs" +version = "10.1.1" +description = "A programming model for software-defined state" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +jsii = ">=1.57.0,<2.0.0" +publication = ">=0.0.3" + [[package]] name = "coverage" version = "6.2" @@ -167,6 +219,14 @@ category = "main" optional = false python-versions = ">=3.6, <3.7" +[[package]] +name = "decorator" +version = "5.1.1" +description = "Decorators for Humans" +category = "dev" +optional = false +python-versions = ">=3.5" + [[package]] name = "dnspython" version = "2.1.0" @@ -202,6 +262,28 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "exceptiongroup" +version = "1.0.0rc8" +description = "Backport of PEP 654 (exception groups)" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "execnet" +version = "1.9.0" +description = "execnet: rapid multi-Python deployment" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.extras] +testing = ["pre-commit"] + [[package]] name = "fastjsonschema" version = "2.15.3" @@ -408,6 +490,21 @@ zipp = ">=0.5" docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] +[[package]] +name = "importlib-resources" +version = "5.4.0" +description = "Read resources from Python packages" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy"] + [[package]] name = "iniconfig" version = "1.1.1" @@ -452,6 +549,24 @@ category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +[[package]] +name = "jsii" +version = "1.57.0" +description = "Python client for jsii runtime" +category = "dev" +optional = false +python-versions = "~=3.6" + +[package.dependencies] +attrs = ">=21.2,<22.0" +cattrs = [ + {version = ">=1.0.0,<1.1.0", markers = "python_version < \"3.7\""}, + {version = ">=1.8,<22.2", markers = "python_version >= \"3.7\""}, +] +importlib-resources = {version = "*", markers = "python_version < \"3.7\""} +python-dateutil = "*" +typing-extensions = ">=3.7,<5.0" + [[package]] name = "mako" version = "1.1.6" @@ -624,6 +739,17 @@ python-versions = ">=3.6" [package.dependencies] typing-extensions = ">=4.1.0" +[[package]] +name = "mypy-boto3-cloudwatch" +version = "1.24.0" +description = "Type annotations for boto3.CloudWatch 1.24.0 service generated with mypy-boto3-builder 7.6.1" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = ">=4.1.0" + [[package]] name = "mypy-boto3-dynamodb" version = "1.24.27" @@ -635,6 +761,17 @@ python-versions = ">=3.6" [package.dependencies] typing-extensions = ">=4.1.0" +[[package]] +name = "mypy-boto3-lambda" +version = "1.24.0" +description = "Type annotations for boto3.Lambda 1.24.0 service generated with mypy-boto3-builder 7.6.1" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = ">=4.1.0" + [[package]] name = "mypy-boto3-secretsmanager" version = "1.24.11.post3" @@ -657,6 +794,17 @@ python-versions = ">=3.6" [package.dependencies] typing-extensions = ">=4.1.0" +[[package]] +name = "mypy-boto3-xray" +version = "1.24.0" +description = "Type annotations for boto3.XRay 1.24.0 service generated with mypy-boto3-builder 7.6.1" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = ">=4.1.0" + [[package]] name = "mypy-extensions" version = "0.4.3" @@ -731,6 +879,14 @@ importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "publication" +version = "0.0.3" +description = "Publication helps you maintain public-api-friendly modules by preventing unintentional access to private implementation details via introspection." +category = "dev" +optional = false +python-versions = "*" + [[package]] name = "py" version = "1.11.0" @@ -877,6 +1033,18 @@ pytest = ">=4.6" [package.extras] testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtualenv"] +[[package]] +name = "pytest-forked" +version = "1.4.0" +description = "run tests in isolated forked subprocesses" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +py = "*" +pytest = ">=3.10" + [[package]] name = "pytest-mock" version = "3.6.1" @@ -891,6 +1059,24 @@ pytest = ">=5.0" [package.extras] dev = ["pre-commit", "tox", "pytest-asyncio"] +[[package]] +name = "pytest-xdist" +version = "2.5.0" +description = "pytest xdist plugin for distributed testing and loop-on-failing modes" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +execnet = ">=1.1" +pytest = ">=6.2.0" +pytest-forked = "*" + +[package.extras] +psutil = ["psutil (>=3.0)"] +setproctitle = ["setproctitle"] +testing = ["filelock"] + [[package]] name = "python-dateutil" version = "2.8.2" @@ -952,6 +1138,18 @@ urllib3 = ">=1.21.1,<1.27" socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] +[[package]] +name = "retry" +version = "0.9.2" +description = "Easy to use retry decorator." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +decorator = ">=3.4.2" +py = ">=1.4.26,<2.0.0" + [[package]] name = "ruamel.yaml" version = "0.17.17" @@ -1136,6 +1334,10 @@ attrs = [ {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, ] +aws-cdk-lib = [ + {file = "aws-cdk-lib-2.23.0.tar.gz", hash = "sha256:3e07d1c6b320795d38567be183e56c2125b4c4492589775257aabec3d3e2a384"}, + {file = "aws_cdk_lib-2.23.0-py3-none-any.whl", hash = "sha256:1ec04a146d3364cd0fc4da08e3f8ca25e28df68abaa90641936db17a415ca4bc"}, +] aws-xray-sdk = [ {file = "aws-xray-sdk-2.10.0.tar.gz", hash = "sha256:9b14924fd0628cf92936055864655354003f0b1acc3e1c3ffde6403d0799dd7a"}, {file = "aws_xray_sdk-2.10.0-py2.py3-none-any.whl", hash = "sha256:7551e81a796e1a5471ebe84844c40e8edf7c218db33506d046fec61f7495eda4"}, @@ -1156,6 +1358,12 @@ botocore = [ {file = "botocore-1.24.44-py3-none-any.whl", hash = "sha256:ed07772c924984e5b3c1005f7ba4600cebd4169c23307cf6e92cccadf0b5d2e7"}, {file = "botocore-1.24.44.tar.gz", hash = "sha256:0030a11eac972be46859263820885ba650503622c5acfe58966f482d42cc538d"}, ] +cattrs = [ + {file = "cattrs-1.0.0-py2.py3-none-any.whl", hash = "sha256:616972ae3dfa6e623a40ad3cb845420e64942989152774ab055e5c2b2f89f997"}, + {file = "cattrs-1.0.0.tar.gz", hash = "sha256:b7ab5cf8ad127c42eefd01410c1c6e28569a45a255ea80ed968511873c433c7a"}, + {file = "cattrs-22.1.0-py3-none-any.whl", hash = "sha256:d55c477b4672f93606e992049f15d526dc7867e6c756cd6256d4af92e2b1e364"}, + {file = "cattrs-22.1.0.tar.gz", hash = "sha256:94b67b64cf92c994f8784c40c082177dc916e0489a73a9a36b24eb18a9db40c6"}, +] certifi = [ {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, @@ -1172,6 +1380,10 @@ colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, ] +constructs = [ + {file = "constructs-10.1.1-py3-none-any.whl", hash = "sha256:c1f3deb196f54e070ded3c92c4339f73ef2b6022d35fb34908c0ebfa7ef8a640"}, + {file = "constructs-10.1.1.tar.gz", hash = "sha256:6ce0dd1352367237b5d7c51a25740482c852735d2a5e067c536acc1657f39ea5"}, +] coverage = [ {file = "coverage-6.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6dbc1536e105adda7a6312c778f15aaabe583b0e9a0b0a324990334fd458c94b"}, {file = "coverage-6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:174cf9b4bef0db2e8244f82059a5a72bd47e1d40e71c68ab055425172b16b7d0"}, @@ -1225,6 +1437,10 @@ dataclasses = [ {file = "dataclasses-0.8-py3-none-any.whl", hash = "sha256:0201d89fa866f68c8ebd9d08ee6ff50c0b255f8ec63a71c16fda7af82bb887bf"}, {file = "dataclasses-0.8.tar.gz", hash = "sha256:8479067f342acf957dc82ec415d355ab5edb7e7646b90dc6e2fd1d96ad084c97"}, ] +decorator = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] dnspython = [ {file = "dnspython-2.1.0-py3-none-any.whl", hash = "sha256:95d12f6ef0317118d2a1a6fc49aac65ffec7eb8087474158f42f26a639135216"}, {file = "dnspython-2.1.0.zip", hash = "sha256:e4a87f0b573201a0f3727fa18a516b055fd1107e0e5477cded4a2de497df1dd4"}, @@ -1236,6 +1452,14 @@ email-validator = [ eradicate = [ {file = "eradicate-2.0.0.tar.gz", hash = "sha256:27434596f2c5314cc9b31410c93d8f7e8885747399773cd088d3adea647a60c8"}, ] +exceptiongroup = [ + {file = "exceptiongroup-1.0.0rc8-py3-none-any.whl", hash = "sha256:ab0a968e1ef769e55d9a596f4a89f7be9ffedbc9fdefdb77cc68cf5c33ce1035"}, + {file = "exceptiongroup-1.0.0rc8.tar.gz", hash = "sha256:6990c24f06b8d33c8065cfe43e5e8a4bfa384e0358be036af9cc60b6321bd11a"}, +] +execnet = [ + {file = "execnet-1.9.0-py2.py3-none-any.whl", hash = "sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142"}, + {file = "execnet-1.9.0.tar.gz", hash = "sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5"}, +] fastjsonschema = [ {file = "fastjsonschema-2.15.3-py3-none-any.whl", hash = "sha256:ddb0b1d8243e6e3abb822bd14e447a89f4ab7439342912d590444831fa00b6a0"}, {file = "fastjsonschema-2.15.3.tar.gz", hash = "sha256:0a572f0836962d844c1fc435e200b2e4f4677e4e6611a2e3bdd01ba697c275ec"}, @@ -1302,6 +1526,10 @@ importlib-metadata = [ {file = "importlib_metadata-4.2.0-py3-none-any.whl", hash = "sha256:057e92c15bc8d9e8109738a48db0ccb31b4d9d5cfbee5a8670879a30be66304b"}, {file = "importlib_metadata-4.2.0.tar.gz", hash = "sha256:b7e52a1f8dec14a75ea73e0891f3060099ca1d8e6a462a4dff11c3e119ea1b31"}, ] +importlib-resources = [ + {file = "importlib_resources-5.4.0-py3-none-any.whl", hash = "sha256:33a95faed5fc19b4bc16b29a6eeae248a3fe69dd55d4d229d2b480e23eeaad45"}, + {file = "importlib_resources-5.4.0.tar.gz", hash = "sha256:d756e2f85dd4de2ba89be0b21dba2a3bbec2e871a42a3a16719258a11f87506b"}, +] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, @@ -1318,6 +1546,10 @@ jmespath = [ {file = "jmespath-0.10.0-py2.py3-none-any.whl", hash = "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f"}, {file = "jmespath-0.10.0.tar.gz", hash = "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9"}, ] +jsii = [ + {file = "jsii-1.57.0-py3-none-any.whl", hash = "sha256:4888091986a9ed8d50b042cc9c35a9564dd54c19e78adb890bf06d9ffac1b325"}, + {file = "jsii-1.57.0.tar.gz", hash = "sha256:ff7a3c51c1a653dd8a4342043b5f8e40b928bc617e3141e0d5d66175d22a754b"}, +] mako = [ {file = "Mako-1.1.6-py2.py3-none-any.whl", hash = "sha256:afaf8e515d075b22fad7d7b8b30e4a1c90624ff2f3733a06ec125f5a5f043a57"}, {file = "Mako-1.1.6.tar.gz", hash = "sha256:4e9e345a41924a954251b95b4b28e14a301145b544901332e658907a7464b6b2"}, @@ -1457,10 +1689,18 @@ mypy-boto3-appconfig = [ {file = "mypy-boto3-appconfig-1.24.29.tar.gz", hash = "sha256:10583d309a9db99babfbe85d3b6467b49b3509a57e4f8771da239f6d5cb3731b"}, {file = "mypy_boto3_appconfig-1.24.29-py3-none-any.whl", hash = "sha256:e9d9e2e25fdd82bffc6262dc184edf5d0d3d9fbb0ab35e597a1ea57ba13d4d80"}, ] +mypy-boto3-cloudwatch = [ + {file = "mypy-boto3-cloudwatch-1.24.0.tar.gz", hash = "sha256:d19cd71aa07ecc69c1e2f9691af6a81bf1d65267ad4be1f9486bf683370727a5"}, + {file = "mypy_boto3_cloudwatch-1.24.0-py3-none-any.whl", hash = "sha256:82dac27b1dd0ad8969fedf874ea4713b36d37fe04229f7fdaaecf4addb59d4bd"}, +] mypy-boto3-dynamodb = [ {file = "mypy-boto3-dynamodb-1.24.27.tar.gz", hash = "sha256:c982d24f9b2525a70f408ad40eff69660d56928217597d88860b60436b25efbf"}, {file = "mypy_boto3_dynamodb-1.24.27-py3-none-any.whl", hash = "sha256:63f7d9755fc5cf2e637edf8d33024050152a53013d1a102716ae0d534563ef07"}, ] +mypy-boto3-lambda = [ + {file = "mypy-boto3-lambda-1.24.0.tar.gz", hash = "sha256:ab425f941d0d50a2b8a20cc13cebe03c3097b122259bf00e7b295d284814bd6f"}, + {file = "mypy_boto3_lambda-1.24.0-py3-none-any.whl", hash = "sha256:a286a464513adf50847bda8573f2dc7adc348234827d1ac0200e610ee9a09b80"}, +] mypy-boto3-secretsmanager = [ {file = "mypy-boto3-secretsmanager-1.24.11.post3.tar.gz", hash = "sha256:f153b3f5ff2c65664a906fb2c97a6598a57da9f1da77679dbaf541051dcff36e"}, {file = "mypy_boto3_secretsmanager-1.24.11.post3-py3-none-any.whl", hash = "sha256:d9655d568f7fd8fe05265613b85fba55ab6e4dcd078989af1ef9f0ffe4b45019"}, @@ -1469,6 +1709,10 @@ mypy-boto3-ssm = [ {file = "mypy-boto3-ssm-1.24.0.tar.gz", hash = "sha256:bab58398947c3627a4e7610cd0f57b525c12fd1d0a6bb862400b6af0a4e684fc"}, {file = "mypy_boto3_ssm-1.24.0-py3-none-any.whl", hash = "sha256:1f17055abb8d70f25e6ece2ef4c0dc74d585744c25a3a833c2985d74165ac0c6"}, ] +mypy-boto3-xray = [ + {file = "mypy-boto3-xray-1.24.0.tar.gz", hash = "sha256:fbe211b7601684a2d4defa2f959286f1441027c15044c0c0013257e22307778a"}, + {file = "mypy_boto3_xray-1.24.0-py3-none-any.whl", hash = "sha256:6b9bc96e7924215fe833fe0d732d5e3ce98f7739b373432b9735a9905f867171"}, +] mypy-extensions = [ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, @@ -1497,6 +1741,10 @@ pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, ] +publication = [ + {file = "publication-0.0.3-py2.py3-none-any.whl", hash = "sha256:0248885351febc11d8a1098d5c8e3ab2dabcf3e8c0c96db1e17ecd12b53afbe6"}, + {file = "publication-0.0.3.tar.gz", hash = "sha256:68416a0de76dddcdd2930d1c8ef853a743cc96c82416c4e4d3b5d901c6276dc4"}, +] py = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, @@ -1577,10 +1825,18 @@ pytest-cov = [ {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"}, {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, ] +pytest-forked = [ + {file = "pytest-forked-1.4.0.tar.gz", hash = "sha256:8b67587c8f98cbbadfdd804539ed5455b6ed03802203485dd2f53c1422d7440e"}, + {file = "pytest_forked-1.4.0-py3-none-any.whl", hash = "sha256:bbbb6717efc886b9d64537b41fb1497cfaf3c9601276be8da2cccfea5a3c8ad8"}, +] pytest-mock = [ {file = "pytest-mock-3.6.1.tar.gz", hash = "sha256:40217a058c52a63f1042f0784f62009e976ba824c418cced42e88d5f40ab0e62"}, {file = "pytest_mock-3.6.1-py3-none-any.whl", hash = "sha256:30c2f2cc9759e76eee674b81ea28c9f0b94f8f0445a1b87762cadf774f0df7e3"}, ] +pytest-xdist = [ + {file = "pytest-xdist-2.5.0.tar.gz", hash = "sha256:4580deca3ff04ddb2ac53eba39d76cb5dd5edeac050cb6fbc768b0dd712b4edf"}, + {file = "pytest_xdist-2.5.0-py3-none-any.whl", hash = "sha256:6fe5c74fec98906deb8f2d2b616b5c782022744978e7bd4695d39c8f42d0ce65"}, +] python-dateutil = [ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, @@ -1628,6 +1884,10 @@ requests = [ {file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"}, {file = "requests-2.26.0.tar.gz", hash = "sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"}, ] +retry = [ + {file = "retry-0.9.2-py2.py3-none-any.whl", hash = "sha256:ccddf89761fa2c726ab29391837d4327f819ea14d244c232a1d24c67a2f98606"}, + {file = "retry-0.9.2.tar.gz", hash = "sha256:f8bfa8b99b69c4506d6f5bd3b0aabf77f98cdb17f3c9fc3f5ca820033336fba4"}, +] "ruamel.yaml" = [ {file = "ruamel.yaml-0.17.17-py3-none-any.whl", hash = "sha256:9af3ec5d7f8065582f3aa841305465025d0afd26c5fb54e15b964e11838fc74f"}, {file = "ruamel.yaml-0.17.17.tar.gz", hash = "sha256:9751de4cbb57d4bfbf8fc394e125ed4a2f170fbff3dc3d78abf50be85924f8be"}, diff --git a/pyproject.toml b/pyproject.toml index e54d3e50e5f..f10a2a45234 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,6 +26,9 @@ fastjsonschema = "^2.14.5" boto3 = "^1.18" pydantic = {version = "^1.8.2", optional = true } email-validator = {version = "*", optional = true } +mypy-boto3-cloudwatch = "^1.24.0" +mypy-boto3-lambda = "^1.24.0" +mypy-boto3-xray = "^1.24.0" [tool.poetry.dev-dependencies] # 2022-04-21: jmespath was removed, to be re-added once we drop python 3.6. @@ -59,6 +62,9 @@ mypy-boto3-secretsmanager = "^1.24.11" mypy-boto3-ssm = "^1.24.0" mypy-boto3-appconfig = "^1.24.29" mypy-boto3-dynamodb = "^1.24.27" +retry = "^0.9.2" +pytest-xdist = "^2.5.0" +aws-cdk-lib = "^2.23.0" pytest-benchmark = "^3.4.1" @@ -132,7 +138,9 @@ exclude = ''' minversion = "6.0" addopts = "-ra -vv" testpaths = "./tests" -markers = "perf: marks perf tests to be deselected (deselect with '-m \"not perf\"')" +markers = [ + "perf: marks perf tests to be deselected (deselect with '-m \"not perf\"')", +] [build-system] requires = ["poetry>=0.12"] diff --git a/tests/e2e/__init__.py b/tests/e2e/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/e2e/conftest.py b/tests/e2e/conftest.py new file mode 100644 index 00000000000..4be6a26c6a6 --- /dev/null +++ b/tests/e2e/conftest.py @@ -0,0 +1,65 @@ +import datetime +import sys +import uuid +from dataclasses import dataclass + +import boto3 + +# We only need typing_extensions for python versions <3.8 +if sys.version_info >= (3, 8): + from typing import TypedDict +else: + from typing_extensions import TypedDict + +from typing import Dict, Generator, Optional + +import pytest +from e2e.utils import helpers, infrastructure + + +class LambdaConfig(TypedDict): + parameters: dict + environment_variables: Dict[str, str] + + +@dataclass +class InfrastructureOutput: + arns: Dict[str, str] + execution_time: datetime.datetime + + def get_lambda_arns(self) -> Dict[str, str]: + return self.arns + + def get_lambda_function_arn(self, cf_output_name: str) -> Optional[str]: + return self.arns.get(cf_output_name) + + def get_lambda_function_name(self, cf_output_name: str) -> Optional[str]: + lambda_arn = self.get_lambda_function_arn(cf_output_name=cf_output_name) + return lambda_arn.split(":")[-1] if lambda_arn else None + + def get_lambda_execution_time(self) -> datetime.datetime: + return self.execution_time + + def get_lambda_execution_time_timestamp(self) -> int: + return int(self.execution_time.timestamp() * 1000) + + +@pytest.fixture(scope="module") +def create_infrastructure(config, request) -> Generator[Dict[str, str], None, None]: + stack_name = f"test-lambda-{uuid.uuid4()}" + test_dir = request.fspath.dirname + handlers_dir = f"{test_dir}/handlers/" + + infra = infrastructure.Infrastructure(stack_name=stack_name, handlers_dir=handlers_dir, config=config) + yield infra.deploy(Stack=infrastructure.InfrastructureStack) + infra.delete() + + +@pytest.fixture(scope="module") +def execute_lambda(create_infrastructure) -> InfrastructureOutput: + execution_time = datetime.datetime.utcnow() + session = boto3.Session() + client = session.client("lambda") + for _, arn in create_infrastructure.items(): + helpers.trigger_lambda(lambda_arn=arn, client=client) + return InfrastructureOutput(arns=create_infrastructure, execution_time=execution_time) diff --git a/tests/e2e/logger/__init__.py b/tests/e2e/logger/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/e2e/logger/handlers/basic_handler.py b/tests/e2e/logger/handlers/basic_handler.py new file mode 100644 index 00000000000..34d7fb4678a --- /dev/null +++ b/tests/e2e/logger/handlers/basic_handler.py @@ -0,0 +1,17 @@ +import os + +from aws_lambda_powertools import Logger + +logger = Logger() + +MESSAGE = os.environ["MESSAGE"] +ADDITIONAL_KEY = os.environ["ADDITIONAL_KEY"] + + +@logger.inject_lambda_context(log_event=True) +def lambda_handler(event, context): + logger.debug(MESSAGE) + logger.info(MESSAGE) + logger.append_keys(**{ADDITIONAL_KEY: "test"}) + logger.info(MESSAGE) + return "success" diff --git a/tests/e2e/logger/handlers/no_context_handler.py b/tests/e2e/logger/handlers/no_context_handler.py new file mode 100644 index 00000000000..1347ba98d81 --- /dev/null +++ b/tests/e2e/logger/handlers/no_context_handler.py @@ -0,0 +1,14 @@ +import os + +from aws_lambda_powertools import Logger + +logger = Logger() + +MESSAGE = os.environ["MESSAGE"] +ADDITIONAL_KEY = os.environ["ADDITIONAL_KEY"] + + +def lambda_handler(event, context): + logger.info(MESSAGE) + logger.append_keys(**{ADDITIONAL_KEY: "test"}) + return "success" diff --git a/tests/e2e/logger/test_logger.py b/tests/e2e/logger/test_logger.py new file mode 100644 index 00000000000..ea27b93740b --- /dev/null +++ b/tests/e2e/logger/test_logger.py @@ -0,0 +1,142 @@ +import boto3 +import pytest +from e2e import conftest +from e2e.utils import helpers + + +@pytest.fixture(scope="module") +def config() -> conftest.LambdaConfig: + return { + "parameters": {}, + "environment_variables": { + "MESSAGE": "logger message test", + "LOG_LEVEL": "INFO", + "ADDITIONAL_KEY": "extra_info", + }, + } + + +def test_basic_lambda_logs_visible(execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig): + # GIVEN + lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="basichandlerarn") + timestamp = execute_lambda.get_lambda_execution_time_timestamp() + cw_client = boto3.client("logs") + + # WHEN + filtered_logs = helpers.get_logs(lambda_function_name=lambda_name, start_time=timestamp, log_client=cw_client) + + # THEN + assert any( + log.message == config["environment_variables"]["MESSAGE"] + and log.level == config["environment_variables"]["LOG_LEVEL"] + for log in filtered_logs + ) + + +def test_basic_lambda_no_debug_logs_visible( + execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig +): + # GIVEN + lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="basichandlerarn") + timestamp = execute_lambda.get_lambda_execution_time_timestamp() + cw_client = boto3.client("logs") + + # WHEN + filtered_logs = helpers.get_logs(lambda_function_name=lambda_name, start_time=timestamp, log_client=cw_client) + + # THEN + assert not any( + log.message == config["environment_variables"]["MESSAGE"] and log.level == "DEBUG" for log in filtered_logs + ) + + +def test_basic_lambda_contextual_data_logged(execute_lambda: conftest.InfrastructureOutput): + # GIVEN + required_keys = ( + "xray_trace_id", + "function_request_id", + "function_arn", + "function_memory_size", + "function_name", + "cold_start", + ) + + lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="basichandlerarn") + timestamp = execute_lambda.get_lambda_execution_time_timestamp() + cw_client = boto3.client("logs") + + # WHEN + filtered_logs = helpers.get_logs(lambda_function_name=lambda_name, start_time=timestamp, log_client=cw_client) + + # THEN + assert all(keys in logs.dict(exclude_unset=True) for logs in filtered_logs for keys in required_keys) + + +def test_basic_lambda_additional_key_persistence_basic_lambda( + execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig +): + # GIVEN + lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="basichandlerarn") + timestamp = execute_lambda.get_lambda_execution_time_timestamp() + cw_client = boto3.client("logs") + + # WHEN + filtered_logs = helpers.get_logs(lambda_function_name=lambda_name, start_time=timestamp, log_client=cw_client) + + # THEN + assert any( + log.extra_info + and log.message == config["environment_variables"]["MESSAGE"] + and log.level == config["environment_variables"]["LOG_LEVEL"] + for log in filtered_logs + ) + + +def test_basic_lambda_empty_event_logged(execute_lambda: conftest.InfrastructureOutput): + + # GIVEN + lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="basichandlerarn") + timestamp = execute_lambda.get_lambda_execution_time_timestamp() + cw_client = boto3.client("logs") + + # WHEN + filtered_logs = helpers.get_logs(lambda_function_name=lambda_name, start_time=timestamp, log_client=cw_client) + + # THEN + assert any(log.message == {} for log in filtered_logs) + + +def test_no_context_lambda_contextual_data_not_logged(execute_lambda: conftest.InfrastructureOutput): + + # GIVEN + required_missing_keys = ( + "function_request_id", + "function_arn", + "function_memory_size", + "function_name", + "cold_start", + ) + + lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="nocontexthandlerarn") + timestamp = execute_lambda.get_lambda_execution_time_timestamp() + cw_client = boto3.client("logs") + + # WHEN + filtered_logs = helpers.get_logs(lambda_function_name=lambda_name, start_time=timestamp, log_client=cw_client) + + # THEN + assert not any(keys in logs.dict(exclude_unset=True) for logs in filtered_logs for keys in required_missing_keys) + + +def test_no_context_lambda_event_not_logged(execute_lambda: conftest.InfrastructureOutput): + + # GIVEN + lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="nocontexthandlerarn") + timestamp = execute_lambda.get_lambda_execution_time_timestamp() + cw_client = boto3.client("logs") + + # WHEN + filtered_logs = helpers.get_logs(lambda_function_name=lambda_name, start_time=timestamp, log_client=cw_client) + + # THEN + assert not any(log.message == {} for log in filtered_logs) diff --git a/tests/e2e/metrics/__init__.py b/tests/e2e/metrics/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/e2e/metrics/handlers/basic_handler.py b/tests/e2e/metrics/handlers/basic_handler.py new file mode 100644 index 00000000000..dd2f486d980 --- /dev/null +++ b/tests/e2e/metrics/handlers/basic_handler.py @@ -0,0 +1,14 @@ +import os + +from aws_lambda_powertools import Metrics +from aws_lambda_powertools.metrics import MetricUnit + +METRIC_NAME = os.environ["METRIC_NAME"] + +metrics = Metrics() + + +@metrics.log_metrics +def lambda_handler(event, context): + metrics.add_metric(name=METRIC_NAME, unit=MetricUnit.Count, value=1) + return "success" diff --git a/tests/e2e/metrics/test_metrics.py b/tests/e2e/metrics/test_metrics.py new file mode 100644 index 00000000000..7d3aa7efa61 --- /dev/null +++ b/tests/e2e/metrics/test_metrics.py @@ -0,0 +1,40 @@ +import datetime +import uuid + +import boto3 +import pytest +from e2e import conftest +from e2e.utils import helpers + + +@pytest.fixture(scope="module") +def config() -> conftest.LambdaConfig: + return { + "parameters": {}, + "environment_variables": { + "POWERTOOLS_METRICS_NAMESPACE": "powertools-e2e-metric", + "POWERTOOLS_SERVICE_NAME": "test-powertools-service", + "METRIC_NAME": f"business-metric-{str(uuid.uuid4()).replace('-','_')}", + }, + } + + +def test_basic_lambda_metric_visible(execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig): + # GIVEN + start_date = execute_lambda.get_lambda_execution_time() + end_date = start_date + datetime.timedelta(minutes=5) + + # WHEN + metrics = helpers.get_metrics( + start_date=start_date, + end_date=end_date, + namespace=config["environment_variables"]["POWERTOOLS_METRICS_NAMESPACE"], + metric_name=config["environment_variables"]["METRIC_NAME"], + service_name=config["environment_variables"]["POWERTOOLS_SERVICE_NAME"], + cw_client=boto3.client(service_name="cloudwatch"), + ) + + # THEN + assert metrics.get("Timestamps") and len(metrics.get("Timestamps")) == 1 + assert metrics.get("Values") and len(metrics.get("Values")) == 1 + assert metrics.get("Values") and metrics.get("Values")[0] == 1 diff --git a/tests/e2e/tracer/__init__.py b/tests/e2e/tracer/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/e2e/tracer/handlers/basic_handler.py b/tests/e2e/tracer/handlers/basic_handler.py new file mode 100644 index 00000000000..d074b30796f --- /dev/null +++ b/tests/e2e/tracer/handlers/basic_handler.py @@ -0,0 +1,25 @@ +import asyncio +import os + +from aws_lambda_powertools import Tracer +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer(service="e2e-tests-app") + +ANNOTATION_KEY = os.environ["ANNOTATION_KEY"] +ANNOTATION_VALUE = os.environ["ANNOTATION_VALUE"] +ANNOTATION_ASYNC_VALUE = os.environ["ANNOTATION_ASYNC_VALUE"] + + +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext): + tracer.put_annotation(key=ANNOTATION_KEY, value=ANNOTATION_VALUE) + tracer.put_metadata(key=ANNOTATION_KEY, value=ANNOTATION_VALUE) + return asyncio.run(collect_payment()) + + +@tracer.capture_method +async def collect_payment() -> str: + tracer.put_annotation(key=ANNOTATION_KEY, value=ANNOTATION_ASYNC_VALUE) + tracer.put_metadata(key=ANNOTATION_KEY, value=ANNOTATION_ASYNC_VALUE) + return "success" diff --git a/tests/e2e/tracer/test_tracer.py b/tests/e2e/tracer/test_tracer.py new file mode 100644 index 00000000000..c2af4386749 --- /dev/null +++ b/tests/e2e/tracer/test_tracer.py @@ -0,0 +1,51 @@ +import datetime +import uuid + +import boto3 +import pytest +from e2e import conftest +from e2e.utils import helpers + + +@pytest.fixture(scope="module") +def config() -> conftest.LambdaConfig: + return { + "parameters": {"tracing": "ACTIVE"}, + "environment_variables": { + "ANNOTATION_KEY": f"e2e-tracer-{str(uuid.uuid4()).replace('-','_')}", + "ANNOTATION_VALUE": "stored", + "ANNOTATION_ASYNC_VALUE": "payments", + }, + } + + +def test_basic_lambda_async_trace_visible(execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig): + # GIVEN + lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="basichandlerarn") + start_date = execute_lambda.get_lambda_execution_time() + end_date = start_date + datetime.timedelta(minutes=5) + trace_filter_exporession = f'service("{lambda_name}")' + + # WHEN + trace = helpers.get_traces( + start_date=start_date, + end_date=end_date, + filter_expression=trace_filter_exporession, + xray_client=boto3.client("xray"), + ) + + # THEN + info = helpers.find_trace_additional_info(trace=trace) + print(info) + handler_trace_segment = [trace_segment for trace_segment in info if trace_segment.name == "## lambda_handler"][0] + collect_payment_trace_segment = [ + trace_segment for trace_segment in info if trace_segment.name == "## collect_payment" + ][0] + + annotation_key = config["environment_variables"]["ANNOTATION_KEY"] + expected_value = config["environment_variables"]["ANNOTATION_VALUE"] + expected_async_value = config["environment_variables"]["ANNOTATION_ASYNC_VALUE"] + + assert handler_trace_segment.annotations["Service"] == "e2e-tests-app" + assert handler_trace_segment.metadata["e2e-tests-app"][annotation_key] == expected_value + assert collect_payment_trace_segment.metadata["e2e-tests-app"][annotation_key] == expected_async_value diff --git a/tests/e2e/utils/Dockerfile b/tests/e2e/utils/Dockerfile new file mode 100644 index 00000000000..eccfe2c6dfd --- /dev/null +++ b/tests/e2e/utils/Dockerfile @@ -0,0 +1,16 @@ +# Image used by CDK's LayerVersion construct to create Lambda Layer with Powertools +# library code. +# The correct AWS SAM build image based on the runtime of the function will be +# passed as build arg. The default allows to do `docker build .` when testing. +ARG IMAGE=public.ecr.aws/sam/build-python3.7 +FROM $IMAGE + +ARG PIP_INDEX_URL +ARG PIP_EXTRA_INDEX_URL +ARG HTTPS_PROXY + +# Upgrade pip (required by cryptography v3.4 and above, which is a dependency of poetry) +RUN pip install --upgrade pip +RUN pip install pipenv poetry + +CMD [ "python" ] diff --git a/tests/e2e/utils/__init__.py b/tests/e2e/utils/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/e2e/utils/helpers.py b/tests/e2e/utils/helpers.py new file mode 100644 index 00000000000..3f88f44f933 --- /dev/null +++ b/tests/e2e/utils/helpers.py @@ -0,0 +1,131 @@ +import json +from datetime import datetime +from functools import lru_cache +from typing import Dict, List, Optional, Union + +from mypy_boto3_cloudwatch import type_defs +from mypy_boto3_cloudwatch.client import CloudWatchClient +from mypy_boto3_lambda.client import LambdaClient +from mypy_boto3_xray.client import XRayClient +from pydantic import BaseModel +from retry import retry + + +# Helper methods && Class +class Log(BaseModel): + level: str + location: str + message: Union[dict, str] + timestamp: str + service: str + cold_start: Optional[bool] + function_name: Optional[str] + function_memory_size: Optional[str] + function_arn: Optional[str] + function_request_id: Optional[str] + xray_trace_id: Optional[str] + extra_info: Optional[str] + + +class TraceSegment(BaseModel): + name: str + metadata: Dict = {} + annotations: Dict = {} + + +def trigger_lambda(lambda_arn: str, client: LambdaClient): + response = client.invoke(FunctionName=lambda_arn, InvocationType="RequestResponse") + return response + + +@lru_cache(maxsize=10, typed=False) +@retry(ValueError, delay=1, jitter=1, tries=20) +def get_logs(lambda_function_name: str, log_client: CloudWatchClient, start_time: int, **kwargs: dict) -> List[Log]: + response = log_client.filter_log_events(logGroupName=f"/aws/lambda/{lambda_function_name}", startTime=start_time) + if not response["events"]: + raise ValueError("Empty response from Cloudwatch Logs. Repeating...") + filtered_logs = [] + for event in response["events"]: + try: + message = Log(**json.loads(event["message"])) + except json.decoder.JSONDecodeError: + continue + filtered_logs.append(message) + + return filtered_logs + + +@lru_cache(maxsize=10, typed=False) +@retry(ValueError, delay=1, jitter=1, tries=20) +def get_metrics( + namespace: str, + cw_client: CloudWatchClient, + start_date: datetime, + metric_name: str, + service_name: str, + end_date: Optional[datetime] = None, +) -> type_defs.MetricDataResultTypeDef: + response = cw_client.get_metric_data( + MetricDataQueries=[ + { + "Id": "m1", + "Expression": f'SELECT MAX("{metric_name}") from SCHEMA("{namespace}",service) \ + where service=\'{service_name}\'', + "ReturnData": True, + "Period": 600, + }, + ], + StartTime=start_date, + EndTime=end_date if end_date else datetime.utcnow(), + ) + result = response["MetricDataResults"][0] + if not result["Values"]: + raise ValueError("Empty response from Cloudwatch. Repeating...") + return result + + +@retry(ValueError, delay=1, jitter=1, tries=10) +def get_traces(filter_expression: str, xray_client: XRayClient, start_date: datetime, end_date: datetime) -> Dict: + paginator = xray_client.get_paginator("get_trace_summaries") + response_iterator = paginator.paginate( + StartTime=start_date, + EndTime=end_date, + TimeRangeType="Event", + Sampling=False, + FilterExpression=filter_expression, + ) + + traces = [trace["TraceSummaries"][0]["Id"] for trace in response_iterator if trace["TraceSummaries"]] + if not traces: + raise ValueError("Empty response from X-RAY. Repeating...") + + trace_details = xray_client.batch_get_traces( + TraceIds=traces, + ) + + return trace_details + + +def find_trace_additional_info(trace: Dict) -> List[TraceSegment]: + """Find all trace annotations and metadata and return them to the caller""" + info = [] + for segment in trace["Traces"][0]["Segments"]: + document = json.loads(segment["Document"]) + if document["origin"] == "AWS::Lambda::Function": + for subsegment in document["subsegments"]: + if subsegment["name"] == "Invocation": + find_meta(segment=subsegment, result=info) + return info + + +def find_meta(segment: dict, result: List): + for x_subsegment in segment["subsegments"]: + result.append( + TraceSegment( + name=x_subsegment["name"], + metadata=x_subsegment.get("metadata", {}), + annotations=x_subsegment.get("annotations", {}), + ) + ) + if x_subsegment.get("subsegments"): + find_meta(segment=x_subsegment, result=result) diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py new file mode 100644 index 00000000000..001ae0e6346 --- /dev/null +++ b/tests/e2e/utils/infrastructure.py @@ -0,0 +1,212 @@ +import io +import json +import os +import sys +import zipfile +from abc import ABC, abstractmethod +from enum import Enum +from pathlib import Path +from typing import Dict, List, Tuple, Type + +import boto3 +import yaml +from aws_cdk import App, AssetStaging, BundlingOptions, CfnOutput, DockerImage, RemovalPolicy, Stack, aws_logs +from aws_cdk.aws_lambda import Code, Function, LayerVersion, Runtime, Tracing + +PYTHON_RUNTIME_VERSION = f"V{''.join(map(str, sys.version_info[:2]))}" + + +class PythonVersion(Enum): + V37 = {"runtime": Runtime.PYTHON_3_7, "image": Runtime.PYTHON_3_7.bundling_image.image} + V38 = {"runtime": Runtime.PYTHON_3_8, "image": Runtime.PYTHON_3_8.bundling_image.image} + V39 = {"runtime": Runtime.PYTHON_3_9, "image": Runtime.PYTHON_3_9.bundling_image.image} + + +class BaseInfrastructureStack(ABC): + @abstractmethod + def synthesize() -> Tuple[dict, str]: + ... + + @abstractmethod + def __call__() -> Tuple[dict, str]: + ... + + +class InfrastructureStack(BaseInfrastructureStack): + def __init__(self, handlers_dir: str, stack_name: str, config: dict) -> None: + self.stack_name = stack_name + self.handlers_dir = handlers_dir + self.config = config + + def _create_layer(self, stack: Stack): + output_dir = Path(str(AssetStaging.BUNDLING_OUTPUT_DIR), "python") + input_dir = Path(str(AssetStaging.BUNDLING_INPUT_DIR), "aws_lambda_powertools") + powertools_layer = LayerVersion( + stack, + "aws-lambda-powertools", + layer_version_name="aws-lambda-powertools", + compatible_runtimes=[PythonVersion[PYTHON_RUNTIME_VERSION].value["runtime"]], + code=Code.from_asset( + path=".", + bundling=BundlingOptions( + image=DockerImage.from_build( + str(Path(__file__).parent), + build_args={"IMAGE": PythonVersion[PYTHON_RUNTIME_VERSION].value["image"]}, + ), + command=[ + "bash", + "-c", + rf"poetry export --with-credentials --format requirements.txt --output /tmp/requirements.txt &&\ + pip install -r /tmp/requirements.txt -t {output_dir} &&\ + cp -R {input_dir} {output_dir}", + ], + ), + ), + ) + return powertools_layer + + def _find_handlers(self, directory: str) -> List: + for root, _, files in os.walk(directory): + return [os.path.join(root, filename) for filename in files if filename.endswith(".py")] + + def synthesize(self, handlers: List[str]) -> Tuple[dict, str, str]: + integration_test_app = App() + stack = Stack(integration_test_app, self.stack_name) + powertools_layer = self._create_layer(stack) + code = Code.from_asset(self.handlers_dir) + + for filename_path in handlers: + filename = Path(filename_path).stem + function_python = Function( + stack, + f"{filename}-lambda", + runtime=PythonVersion[PYTHON_RUNTIME_VERSION].value["runtime"], + code=code, + handler=f"{filename}.lambda_handler", + layers=[powertools_layer], + environment=self.config.get("environment_variables"), + tracing=Tracing.ACTIVE + if self.config.get("parameters", {}).get("tracing") == "ACTIVE" + else Tracing.DISABLED, + ) + + aws_logs.LogGroup( + stack, + f"{filename}-lg", + log_group_name=f"/aws/lambda/{function_python.function_name}", + retention=aws_logs.RetentionDays.ONE_DAY, + removal_policy=RemovalPolicy.DESTROY, + ) + CfnOutput(stack, f"{filename}_arn", value=function_python.function_arn) + cloud_assembly = integration_test_app.synth() + cf_template = cloud_assembly.get_stack_by_name(self.stack_name).template + cloud_assembly_directory = cloud_assembly.directory + cloud_assembly_assets_manifest_path = cloud_assembly.get_stack_by_name(self.stack_name).dependencies[0].file + + return (cf_template, cloud_assembly_directory, cloud_assembly_assets_manifest_path) + + def __call__(self) -> Tuple[dict, str]: + handlers = self._find_handlers(directory=self.handlers_dir) + return self.synthesize(handlers=handlers) + + +class Infrastructure: + def __init__(self, stack_name: str, handlers_dir: str, config: dict) -> None: + session = boto3.Session() + self.s3_client = session.client("s3") + self.lambda_client = session.client("lambda") + self.cf_client = session.client("cloudformation") + self.s3_resource = session.resource("s3") + self.account_id = session.client("sts").get_caller_identity()["Account"] + self.region = session.region_name + self.stack_name = stack_name + self.handlers_dir = handlers_dir + self.config = config + + def deploy(self, Stack: Type[BaseInfrastructureStack]) -> Dict[str, str]: + + stack = Stack(handlers_dir=self.handlers_dir, stack_name=self.stack_name, config=self.config) + template, asset_root_dir, asset_manifest_file = stack() + self._upload_assets(asset_root_dir, asset_manifest_file) + + response = self._deploy_stack(self.stack_name, template) + + return self._transform_output(response["Stacks"][0]["Outputs"]) + + def delete(self): + self.cf_client.delete_stack(StackName=self.stack_name) + + def _upload_assets(self, asset_root_dir: str, asset_manifest_file: str): + """ + This method is drop-in replacement for cdk-assets package s3 upload part. + https://www.npmjs.com/package/cdk-assets. + We use custom solution to avoid dependencies from nodejs ecosystem. + We follow the same design cdk-assets: + https://github.com/aws/aws-cdk-rfcs/blob/master/text/0092-asset-publishing.md. + """ + + assets = self._find_assets(asset_manifest_file, self.account_id, self.region) + + for s3_key, config in assets.items(): + print(config) + s3_bucket = self.s3_resource.Bucket(config["bucket_name"]) + + if config["asset_packaging"] != "zip": + print("Asset is not a zip file. Skipping upload") + continue + + if bool(list(s3_bucket.objects.filter(Prefix=s3_key))): + print("object exists, skipping") + continue + + buf = io.BytesIO() + asset_dir = f"{asset_root_dir}/{config['asset_path']}" + os.chdir(asset_dir) + asset_files = self._find_files(directory=".") + with zipfile.ZipFile(buf, "w", compression=zipfile.ZIP_DEFLATED) as zf: + for asset_file in asset_files: + zf.write(os.path.join(asset_file)) + buf.seek(0) + self.s3_client.upload_fileobj(Fileobj=buf, Bucket=config["bucket_name"], Key=s3_key) + + def _find_files(self, directory: str) -> List: + file_paths = [] + for root, _, files in os.walk(directory): + for filename in files: + file_paths.append(os.path.join(root, filename)) + return file_paths + + def _deploy_stack(self, stack_name: str, template: dict): + response = self.cf_client.create_stack( + StackName=stack_name, + TemplateBody=yaml.dump(template), + TimeoutInMinutes=10, + OnFailure="ROLLBACK", + Capabilities=["CAPABILITY_IAM"], + ) + waiter = self.cf_client.get_waiter("stack_create_complete") + waiter.wait(StackName=stack_name, WaiterConfig={"Delay": 10, "MaxAttempts": 50}) + response = self.cf_client.describe_stacks(StackName=stack_name) + return response + + def _find_assets(self, asset_template: str, account_id: str, region: str): + assets = {} + with open(asset_template, mode="r") as template: + for _, config in json.loads(template.read())["files"].items(): + asset_path = config["source"]["path"] + asset_packaging = config["source"]["packaging"] + bucket_name = config["destinations"]["current_account-current_region"]["bucketName"] + object_key = config["destinations"]["current_account-current_region"]["objectKey"] + + assets[object_key] = { + "bucket_name": bucket_name.replace("${AWS::AccountId}", account_id).replace( + "${AWS::Region}", region + ), + "asset_path": asset_path, + "asset_packaging": asset_packaging, + } + + return assets + + def _transform_output(self, outputs: dict): + return {output["OutputKey"]: output["OutputValue"] for output in outputs if output["OutputKey"]} diff --git a/tests/e2e/utils/py.typed b/tests/e2e/utils/py.typed new file mode 100644 index 00000000000..e69de29bb2d