diff --git a/.github/workflows/on_release_notes.yml b/.github/workflows/on_release_notes.yml index 2b431defff0..71c9d07c930 100644 --- a/.github/workflows/on_release_notes.yml +++ b/.github/workflows/on_release_notes.yml @@ -25,12 +25,14 @@ env: on: release: + # We can't filter by tag here, so we filter later on the first job types: [published] + workflow_dispatch: inputs: version_to_publish: description: "Version to be released in PyPi, Docs, and Lambda Layer, e.g. v1.26.4" - default: v1.26.4 + default: v2.0.0 required: true skip_pypi: description: "Skip publishing to PyPi as it can't publish more than once. Useful for semi-failed releases" @@ -42,9 +44,15 @@ on: default: false type: boolean required: false + pre_release: + description: "Publishes documentation using a pre-release tag. You are still responsible for passing a pre-release version tag to the workflow." + default: false + type: boolean + required: false jobs: release: + if: ${{ startsWith(github.ref, 'refs/tags/v2') }} environment: release runs-on: ubuntu-latest permissions: @@ -110,17 +118,38 @@ jobs: contents: write uses: ./.github/workflows/reusable_publish_changelog.yml + # When doing a pre-release, we want to publish the docs as "alpha" instead of replacing the latest docs + prepare_docs_alias: + runs-on: ubuntu-latest + outputs: + DOCS_ALIAS: ${{ steps.set-alias.outputs.DOCS_ALIAS }} + steps: + - name: Set docs alias + id: set-alias + run: | + DOCS_ALIAS=latest + if [[ "${{ github.event.release.prerelease || inputs.pre_release }}" == true ]] ; then + DOCS_ALIAS=alpha + fi + echo DOCS_ALIAS="$DOCS_ALIAS" >> "$GITHUB_OUTPUT" + docs: - needs: [release, changelog] + needs: [release, changelog, prepare_docs_alias] permissions: contents: write pages: write uses: ./.github/workflows/reusable_publish_docs.yml with: version: ${{ needs.release.outputs.RELEASE_VERSION }} - alias: latest + alias: ${{ needs.prepare_docs_alias.outputs.DOCS_ALIAS }} detached_mode: true + publish_layer: + needs: release + uses: ./.github/workflows/publish_v2_layer.yml + with: + latest_published_version: ${{ needs.release.outputs.RELEASE_VERSION }} + post_release: needs: release permissions: diff --git a/.github/workflows/publish_layer.yml b/.github/workflows/publish_layer.yml deleted file mode 100644 index 564cbfad9de..00000000000 --- a/.github/workflows/publish_layer.yml +++ /dev/null @@ -1,86 +0,0 @@ -name: Deploy layer to all regions - -permissions: - id-token: write - contents: read - -on: - workflow_dispatch: - inputs: - latest_published_version: - description: "Latest PyPi published version to rebuild latest docs for, e.g. v1.22.0" - default: "v1.22.0" - required: true - workflow_run: - workflows: ["Publish to PyPi"] - types: - - completed - -jobs: - build-layer: - runs-on: ubuntu-latest - if: ${{ (github.event.workflow_run.conclusion == 'success') || (github.event_name == 'workflow_dispatch') }} - defaults: - run: - working-directory: ./layer - steps: - - name: checkout - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - name: Install poetry - run: pipx install poetry - - name: Setup Node.js - uses: actions/setup-node@v3 - with: - node-version: "16.12" - - name: Setup python - uses: actions/setup-python@v4 - with: - python-version: "3.9" - cache: "pip" - - name: Resolve and install project dependencies - # CDK spawns system python when compiling stack - # therefore it ignores both activated virtual env and cached interpreter by GH - run: | - poetry export --format requirements.txt --output requirements.txt - pip install -r requirements.txt - - name: Set release notes tag - run: | - RELEASE_INPUT=${{ inputs.latest_published_version }} - LATEST_TAG=$(git describe --tag --abbrev=0) - RELEASE_TAG_VERSION=${RELEASE_INPUT:-$LATEST_TAG} - echo "RELEASE_TAG_VERSION=${RELEASE_TAG_VERSION:1}" >> $GITHUB_ENV - - name: install cdk and deps - run: | - npm install -g aws-cdk@2.29.0 - cdk --version - - name: CDK build - run: cdk synth --context version=$RELEASE_TAG_VERSION -o cdk.out - - name: zip output - run: zip -r cdk.out.zip cdk.out - - name: Archive CDK artifacts - uses: actions/upload-artifact@v3 - with: - name: cdk-layer-artefact - path: layer/cdk.out.zip - - deploy-beta: - needs: - - build-layer - uses: ./.github/workflows/reusable_deploy_layer_stack.yml - secrets: inherit - with: - stage: "BETA" - artefact-name: "cdk-layer-artefact" - environment: "layer-beta" - - deploy-prod: - needs: - - deploy-beta - uses: ./.github/workflows/reusable_deploy_layer_stack.yml - secrets: inherit - with: - stage: "PROD" - artefact-name: "cdk-layer-artefact" - environment: "layer-prod" diff --git a/.github/workflows/publish_v2_layer.yml b/.github/workflows/publish_v2_layer.yml index 850063098cd..738dd0bead1 100644 --- a/.github/workflows/publish_v2_layer.yml +++ b/.github/workflows/publish_v2_layer.yml @@ -8,12 +8,108 @@ on: workflow_dispatch: inputs: latest_published_version: - description: "Latest PyPi published version to rebuild latest docs for, e.g. v1.22.0" + description: "Latest PyPi published version to rebuild latest docs for, e.g. v2.0.0" + required: true + workflow_call: + inputs: + latest_published_version: + type: string + description: "Latest PyPi published version to rebuild latest docs for, e.g. v2.0.0" required: true jobs: - dummy: + build-layer: runs-on: ubuntu-latest + defaults: + run: + working-directory: ./layer + outputs: + release-tag-version: ${{ steps.release-notes-tag.outputs.RELEASE_TAG_VERSION }} steps: - - name: Hello world - run: echo "hello world" + - name: checkout + uses: actions/checkout@v3 + with: + fetch-depth: 0 + - name: Install poetry + run: pipx install poetry + - name: Setup Node.js + uses: actions/setup-node@v3 + with: + node-version: "16.12" + - name: Setup python + uses: actions/setup-python@v4 + with: + python-version: "3.9" + cache: "pip" + - name: Resolve and install project dependencies + # CDK spawns system python when compiling stack + # therefore it ignores both activated virtual env and cached interpreter by GH + run: | + poetry export --format requirements.txt --output requirements.txt + pip install -r requirements.txt + - name: Set release notes tag + id: release-notes-tag + run: | + RELEASE_INPUT=${{ inputs.latest_published_version }} + LATEST_TAG=$(git describe --tag --abbrev=0) + RELEASE_TAG_VERSION=${RELEASE_INPUT:-$LATEST_TAG} + echo RELEASE_TAG_VERSION="${RELEASE_TAG_VERSION:1}" >> "$GITHUB_OUTPUT" + - name: Set up QEMU + uses: docker/setup-qemu-action@8b122486cedac8393e77aa9734c3528886e4a1a8 # v2.0.0 + # NOTE: we need QEMU to build Layer against a different architecture (e.g., ARM) + - name: Set up Docker Buildx + id: builder + uses: docker/setup-buildx-action@dc7b9719a96d48369863986a06765841d7ea23f6 # v2.0.0 + - name: install cdk and deps + run: | + npm install -g aws-cdk@2.44.0 + cdk --version + - name: CDK build + run: cdk synth --context version="${{ steps.release-notes-tag.outputs.RELEASE_TAG_VERSION }}" -o cdk.out + - name: zip output + run: zip -r cdk.out.zip cdk.out + - name: Archive CDK artifacts + uses: actions/upload-artifact@v3 + with: + name: cdk-layer-artefact + path: layer/cdk.out.zip + + deploy-beta: + needs: build-layer + uses: ./.github/workflows/reusable_deploy_v2_layer_stack.yml + secrets: inherit + with: + stage: "BETA" + artefact-name: "cdk-layer-artefact" + environment: "layer-beta" + latest_published_version: ${{ inputs.latest_published_version }} + + deploy-prod: + needs: deploy-beta + uses: ./.github/workflows/reusable_deploy_v2_layer_stack.yml + secrets: inherit + with: + stage: "PROD" + artefact-name: "cdk-layer-artefact" + environment: "layer-prod" + latest_published_version: ${{ inputs.latest_published_version }} + + deploy-sar-beta: + needs: build-layer + uses: ./.github/workflows/reusable_deploy_v2_sar.yml + secrets: inherit + with: + stage: "BETA" + artefact-name: "cdk-layer-artefact" + environment: "layer-beta" + package-version: ${{ needs.build-layer.outputs.release-tag-version }} + + deploy-sar-prod: + needs: [build-layer, deploy-sar-beta] + uses: ./.github/workflows/reusable_deploy_v2_sar.yml + secrets: inherit + with: + stage: "PROD" + artefact-name: "cdk-layer-artefact" + environment: "layer-prod" + package-version: ${{ needs.build-layer.outputs.release-tag-version }} diff --git a/.github/workflows/rebuild_latest_docs.yml b/.github/workflows/rebuild_latest_docs.yml index eb995d95a12..1e8333d4540 100644 --- a/.github/workflows/rebuild_latest_docs.yml +++ b/.github/workflows/rebuild_latest_docs.yml @@ -10,8 +10,8 @@ on: workflow_dispatch: inputs: latest_published_version: - description: "Latest PyPi published version to rebuild latest docs for, e.g. v1.26.7" - default: "v1.28.0" + description: "Latest PyPi published version to rebuild latest docs for, e.g. v2.0.0" + default: "v2.0.0" required: true jobs: diff --git a/.github/workflows/reusable_deploy_layer_stack.yml b/.github/workflows/reusable_deploy_v2_layer_stack.yml similarity index 71% rename from .github/workflows/reusable_deploy_layer_stack.yml rename to .github/workflows/reusable_deploy_v2_layer_stack.yml index 20d69b9c814..69b99fc3f9a 100644 --- a/.github/workflows/reusable_deploy_layer_stack.yml +++ b/.github/workflows/reusable_deploy_v2_layer_stack.yml @@ -1,9 +1,12 @@ -name: Deploy cdk stack +name: Deploy CDK Layer v2 stack permissions: id-token: write contents: read +env: + CDK_VERSION: 2.44.0 + on: workflow_call: inputs: @@ -19,6 +22,10 @@ on: description: "GitHub Environment to use for encrypted secrets" required: true type: string + latest_published_version: + description: "Latest version that is published" + required: true + type: string jobs: deploy-cdk-stack: @@ -82,7 +89,7 @@ jobs: pip install -r requirements.txt - name: install cdk and deps run: | - npm install -g aws-cdk@2.29.0 + npm install -g "aws-cdk@$CDK_VERSION" cdk --version - name: install deps run: poetry install @@ -94,6 +101,23 @@ jobs: - name: unzip artefact run: unzip cdk.out.zip - name: CDK Deploy Layer - run: cdk deploy --app cdk.out --context region=${{ matrix.region }} 'LayerStack' --require-approval never --verbose + run: cdk deploy --app cdk.out --context region=${{ matrix.region }} 'LayerV2Stack' --require-approval never --verbose --outputs-file cdk-outputs.json + - name: Store latest Layer ARN + if: ${{ inputs.stage == 'PROD' }} + run: | + jq -c '.LayerV2Stack.VersionArn' cdk-outputs.json > cdk-layer-stack-${{ matrix.region }}-layer-version.txt + jq -c '.LayerV2Stack.Arm64VersionArn' cdk-outputs.json >> cdk-layer-stack-${{ matrix.region }}-layer-version.txt - name: CDK Deploy Canary - run: cdk deploy --app cdk.out --context region=${{ matrix.region}} --parameters DeployStage="${{ inputs.stage }}" 'CanaryStack' --require-approval never --verbose + run: cdk deploy --app cdk.out --context region=${{ matrix.region}} --parameters DeployStage="${{ inputs.stage }}" 'CanaryV2Stack' --require-approval never --verbose + - name: Save Layer ARN artifact + uses: actions/upload-artifacts@v3 + with: + name: cdk-layer-stack + path: cdk-layer-stack* + + update_v2_layer_arn_docs: + permissions: + contents: write + uses: ./.github/workflows/reusable_update_v2_layer_arn_docs.yml + with: + latest_published_version: ${{ inputs.latest_published_version }} diff --git a/.github/workflows/reusable_deploy_v2_sar.yml b/.github/workflows/reusable_deploy_v2_sar.yml index 905fd20d5d1..acb28179efe 100644 --- a/.github/workflows/reusable_deploy_v2_sar.yml +++ b/.github/workflows/reusable_deploy_v2_sar.yml @@ -1,5 +1,24 @@ name: Deploy V2 SAR +# SAR deployment process +# +# 1. This workflow starts after the layer artifact is produced on `publish_v2_layer` +# 2. We use the same layer artifact to ensure the SAR app is consistent with the published Lambda Layer +# 3. We publish the SAR for both x86_64 and arm64 (see `matrix` section) +# 4. We use `sam package` and `sam publish` to publish the SAR app +# 5. We remove the previous Canary stack (if present) and deploy a new one to test the SAR App. We retain the Canary in the account for debugging purposes +# 6. Finally the published SAR app is made public on the PROD environment + +permissions: + id-token: write + contents: read + +env: + NODE_VERSION: 16.12 + AWS_REGION: eu-west-1 + SAR_NAME: aws-lambda-powertools-python-layer-v2 + TEST_STACK_NAME: serverlessrepo-v2-powertools-layer-test-stack + on: workflow_call: inputs: @@ -21,8 +40,100 @@ on: type: string jobs: - dummy: + deploy-sar-app: runs-on: ubuntu-latest + environment: ${{ inputs.environment }} + strategy: + matrix: + architecture: ["x86_64", "arm64"] steps: - - name: Hello world - run: echo "hello world" + - name: Checkout + uses: actions/checkout@v3 + - name: AWS credentials + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-region: ${{ env.AWS_REGION }} + role-to-assume: ${{ secrets.AWS_LAYERS_ROLE_ARN }} + - name: AWS credentials SAR role + uses: aws-actions/configure-aws-credentials@v1 + id: aws-credentials-sar-role + with: + aws-access-key-id: ${{ env.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ env.AWS_SECRET_ACCESS_KEY }} + aws-session-token: ${{ env.AWS_SESSION_TOKEN }} + role-duration-seconds: 1200 + aws-region: ${{ env.AWS_REGION }} + role-to-assume: ${{ secrets.AWS_SAR_V2_ROLE_ARN }} + - name: Setup Node.js + uses: actions/setup-node@v3 + with: + node-version: ${{ env.NODE_VERSION }} + - name: Download artifact + uses: actions/download-artifact@v3 + with: + name: ${{ inputs.artefact-name }} + - name: Unzip artefact + run: unzip cdk.out.zip + - name: Configure SAR name + run: | + if [[ "${{ inputs.stage }}" == "BETA" ]]; then + SAR_NAME="test-${SAR_NAME}" + fi + echo SAR_NAME="${SAR_NAME}" >> "$GITHUB_ENV" + - name: Adds arm64 suffix to SAR name + if: ${{ matrix.architecture == 'arm64' }} + run: echo SAR_NAME="${SAR_NAME}-arm64" >> "$GITHUB_ENV" + - name: Deploy SAR + run: | + # From the generated LayerStack cdk.out artifact, find the layer asset path for the correct architecture. + # We'll use this as the source directory of our SAR. This way we are re-using the same layer asset for our SAR. + asset=$(jq -jc '.Resources[] | select(.Properties.CompatibleArchitectures == ["${{ matrix.architecture }}"]) | .Metadata."aws:asset:path"' cdk.out/LayerV2Stack.template.json) + + # fill in the SAR SAM template + sed -e "s||${{ inputs.package-version }}|g" -e "s//${{ env.SAR_NAME }}/g" -e "s||./cdk.out/$asset|g" layer/sar/template.txt > template.yml + + # SAR needs a README and a LICENSE, so just copy the ones from the repo + cp README.md LICENSE "./cdk.out/$asset/" + + # Package the SAR to our SAR S3 bucket, and publish it + sam package --template-file template.yml --output-template-file packaged.yml --s3-bucket ${{ secrets.AWS_SAR_S3_BUCKET }} + sam publish --template packaged.yml --region "$AWS_REGION" + - name: Deploy BETA canary + if: ${{ inputs.stage == 'BETA' }} + run: | + if [[ "${{ matrix.architecture }}" == "arm64" ]]; then + TEST_STACK_NAME="${TEST_STACK_NAME}-arm64" + fi + + echo "Check if stack does not exist" + stack_exists=$(aws cloudformation list-stacks --query "StackSummaries[?(StackName == '$TEST_STACK_NAME' && StackStatus == 'CREATE_COMPLETE')].{StackId:StackId, StackName:StackName, CreationTime:CreationTime, StackStatus:StackStatus}" --output text) + + if [[ -n "$stack_exists" ]] ; then + echo "Found test deployment stack, removing..." + aws cloudformation delete-stack --stack-name "$TEST_STACK_NAME" + aws cloudformation wait stack-delete-complete --stack-name "$TEST_STACK_NAME" + fi + + echo "Creating canary stack" + echo "Stack name: $TEST_STACK_NAME" + aws serverlessrepo create-cloud-formation-change-set --application-id arn:aws:serverlessrepo:${{ env.AWS_REGION }}:${{ steps.aws-credentials-sar-role.outputs.aws-account-id }}:applications/${{ env.SAR_NAME }} --stack-name "${TEST_STACK_NAME/serverlessrepo-/}" --capabilities CAPABILITY_NAMED_IAM + CHANGE_SET_ID=$(aws cloudformation list-change-sets --stack-name "$TEST_STACK_NAME" --query 'Summaries[*].ChangeSetId' --output text) + aws cloudformation wait change-set-create-complete --change-set-name "$CHANGE_SET_ID" + aws cloudformation execute-change-set --change-set-name "$CHANGE_SET_ID" + aws cloudformation wait stack-create-complete --stack-name "$TEST_STACK_NAME" + echo "Waiting until stack deployment completes..." + + echo "Exit with error if stack is not in CREATE_COMPLETE" + stack_exists=$(aws cloudformation list-stacks --query "StackSummaries[?(StackName == '$TEST_STACK_NAME' && StackStatus == 'CREATE_COMPLETE')].{StackId:StackId, StackName:StackName, CreationTime:CreationTime, StackStatus:StackStatus}") + if [[ -z "$stack_exists" ]] ; then + echo "Could find successful deployment, exit error..." + exit 1 + fi + echo "Deployment successful" + - name: Publish SAR + if: ${{ inputs.stage == 'PROD' }} + run: | + # wait until SAR registers the app, otherwise it fails to make it public + sleep 15 + echo "Make SAR app public" + aws serverlessrepo put-application-policy --application-id arn:aws:serverlessrepo:${{ env.AWS_REGION }}:${{ steps.aws-credentials-sar-role.outputs.aws-account-id }}:applications/${{ env.SAR_NAME }} --statements Principals='*',Actions=Deploy diff --git a/.github/workflows/reusable_update_v2_layer_arn_docs.yml b/.github/workflows/reusable_update_v2_layer_arn_docs.yml new file mode 100644 index 00000000000..ea13a63f64a --- /dev/null +++ b/.github/workflows/reusable_update_v2_layer_arn_docs.yml @@ -0,0 +1,60 @@ +name: Update V2 Layer ARN Docs + +on: + workflow_call: + inputs: + latest_published_version: + description: "Latest PyPi published version to rebuild latest docs for, e.g. v2.0.0" + type: string + required: true + +permissions: + contents: write + +env: + BRANCH: develop + +jobs: + publish_v2_layer_arn: + # Force Github action to run only a single job at a time (based on the group name) + # This is to prevent race-condition and inconsistencies with changelog push + concurrency: + group: changelog-build + runs-on: ubuntu-latest + steps: + - name: Checkout repository # reusable workflows start clean, so we need to checkout again + uses: actions/checkout@v3 + with: + fetch-depth: 0 + - name: Git client setup and refresh tip + run: | + git config user.name "Release bot" + git config user.email "aws-devax-open-source@amazon.com" + git config pull.rebase true + git config remote.origin.url >&- || git remote add origin https://github.com/"${origin}" # Git Detached mode (release notes) doesn't have origin + git pull origin "${BRANCH}" + - name: Download CDK layer artifact + uses: actions/download-artifact@v3 + with: + name: cdk-layer-stack + path: cdk-layer-stack + - name: Replace layer versions in documentation + run: ./layer/scripts/update_layer_arn.sh cdk-layer-stack + - name: Update documentation in trunk + run: | + HAS_CHANGE=$(git status --porcelain) + test -z "${HAS_CHANGE}" && echo "Nothing to update" && exit 0 + git add docs/index.md + git commit -m "chore: update v2 layer ARN on documentation" + git pull origin "${BRANCH}" # prevents concurrent branch update failing push + git push origin HEAD:refs/heads/"${BRANCH}" + + release-docs: + needs: publish_v2_layer_arn + permissions: + contents: write + pages: write + uses: ./.github/workflows/reusable_publish_docs.yml + with: + version: ${{ inputs.latest_published_version }} + alias: latest diff --git a/.github/workflows/run-e2e-tests.yml b/.github/workflows/run-e2e-tests.yml index 86176968839..e60aaf391ec 100644 --- a/.github/workflows/run-e2e-tests.yml +++ b/.github/workflows/run-e2e-tests.yml @@ -27,9 +27,7 @@ jobs: contents: read strategy: matrix: - # Maintenance: disabled until we discover concurrency lock issue with multiple versions and tmp - # version: ["3.7", "3.8", "3.9"] - version: ["3.7"] + version: ["3.7", "3.8", "3.9"] steps: - name: "Checkout" uses: actions/checkout@v3 @@ -41,6 +39,14 @@ jobs: python-version: ${{ matrix.version }} architecture: "x64" cache: "poetry" + - name: Setup Node.js + uses: actions/setup-node@v3 + with: + node-version: "16.12" + - name: Install CDK CLI + run: | + npm install + npx cdk --version - name: Install dependencies run: make dev - name: Configure AWS credentials diff --git a/.github/workflows/v2_on_push_docs.yml b/.github/workflows/v2_on_push_docs.yml deleted file mode 100644 index d70fedbc6c5..00000000000 --- a/.github/workflows/v2_on_push_docs.yml +++ /dev/null @@ -1,36 +0,0 @@ -name: Docs v2 - -on: - workflow_dispatch: -# push: -# branches: -# - v2 -# paths: -# - "docs/**" -# - "mkdocs.yml" -# - "examples/**" - -jobs: - changelog: - permissions: - contents: write - uses: ./.github/workflows/reusable_publish_changelog.yml - - release-docs: - needs: changelog - permissions: - contents: write - pages: write - uses: ./.github/workflows/reusable_publish_docs.yml - with: - version: v2 - alias: alpha -# Maintenance: Only necessary in repo migration -# - name: Create redirect from old docs -# run: | -# git checkout gh-pages -# test -f 404.html && echo "Redirect already set" && exit 0 -# git checkout develop -- 404.html -# git add 404.html -# git commit -m "chore: set docs redirect" --no-verify -# git push origin gh-pages -f diff --git a/.github/workflows/v2_rebuild_latest_docs.yml b/.github/workflows/v2_rebuild_latest_docs.yml deleted file mode 100644 index 6d833cc3fef..00000000000 --- a/.github/workflows/v2_rebuild_latest_docs.yml +++ /dev/null @@ -1,14 +0,0 @@ -name: V2 Rebuild latest docs - -on: - workflow_dispatch: - -jobs: - release-docs: - permissions: - contents: write - pages: write - uses: ./.github/workflows/reusable_publish_docs.yml - with: - version: v2 - alias: alpha diff --git a/.gitignore b/.gitignore index b776e1999c2..a69b4eaf618 100644 --- a/.gitignore +++ b/.gitignore @@ -305,5 +305,12 @@ site/ !404.html !docs/overrides/*.html +# CDK +.cdk + !.github/workflows/lib examples/**/sam/.aws-sam + +cdk.out +# NOTE: different accounts will be used for E2E thus creating unnecessary git clutter +cdk.context.json diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 71b1125cf54..a9f34e20593 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -34,13 +34,16 @@ repos: hooks: - id: markdownlint args: ["--fix"] - - repo: https://github.com/aws-cloudformation/cfn-python-lint - rev: v0.61.1 + - repo: local hooks: - - id: cfn-python-lint - files: examples/.*\.(yaml|yml)$ + - id: cloudformation + name: linting::cloudformation + entry: poetry run cfn-lint + language: system + types: [yaml] + files: examples/.* - repo: https://github.com/rhysd/actionlint - rev: v1.6.16 + rev: v1.6.21 hooks: - id: actionlint-docker args: [-pyflakes=] diff --git a/MAINTAINERS.md b/MAINTAINERS.md index 4e78aac2eb4..c4907cdf57f 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -15,8 +15,6 @@ - [Releasing a new version](#releasing-a-new-version) - [Drafting release notes](#drafting-release-notes) - [Run end to end tests](#run-end-to-end-tests) - - [Structure](#structure) - - [Workflow](#workflow) - [Releasing a documentation hotfix](#releasing-a-documentation-hotfix) - [Maintain Overall Health of the Repo](#maintain-overall-health-of-the-repo) - [Manage Roadmap](#manage-roadmap) @@ -30,6 +28,16 @@ - [Is that a bug?](#is-that-a-bug) - [Mentoring contributions](#mentoring-contributions) - [Long running issues or PRs](#long-running-issues-or-prs) +- [E2E framework](#e2e-framework) + - [Structure](#structure) + - [Mechanics](#mechanics) + - [Authoring a new feature E2E test](#authoring-a-new-feature-e2e-test) + - [1. Define infrastructure](#1-define-infrastructure) + - [2. Deploy/Delete infrastructure when tests run](#2-deploydelete-infrastructure-when-tests-run) + - [3. Access stack outputs for E2E tests](#3-access-stack-outputs-for-e2e-tests) + - [Internals](#internals) + - [Test runner parallelization](#test-runner-parallelization) + - [CDK CLI parallelization](#cdk-cli-parallelization) ## Overview @@ -220,18 +228,88 @@ E2E tests are run on every push to `develop` or manually via [run-e2e-tests work To run locally, you need [AWS CDK CLI](https://docs.aws.amazon.com/cdk/v2/guide/getting_started.html#getting_started_prerequisites) and an [account bootstrapped](https://docs.aws.amazon.com/cdk/v2/guide/bootstrapping.html) (`cdk bootstrap`). With a default AWS CLI profile configured, or `AWS_PROFILE` environment variable set, run `make e2e tests`. -#### Structure +### Releasing a documentation hotfix + +You can rebuild the latest documentation without a full release via this [GitHub Actions Workflow](https://github.com/awslabs/aws-lambda-powertools-python/actions/workflows/rebuild_latest_docs.yml). Choose `Run workflow`, keep `develop` as the branch, and input the latest Powertools version available. + +This workflow will update both user guide and API documentation. + +### Maintain Overall Health of the Repo + +> TODO: Coordinate renaming `develop` to `main` + +Keep the `develop` branch at production quality at all times. Backport features as needed. Cut release branches and tags to enable future patches. + +### Manage Roadmap + +See [Roadmap section](https://awslabs.github.io/aws-lambda-powertools-python/latest/roadmap/) + +Ensure the repo highlights features that should be elevated to the project roadmap. Be clear about the feature’s status, priority, target version, and whether or not it should be elevated to the roadmap. + +### Add Continuous Integration Checks + +Add integration checks that validate pull requests and pushes to ease the burden on Pull Request reviewers. Continuously revisit areas of improvement to reduce operational burden in all parties involved. + +### Negative Impact on the Project + +Actions that negatively impact the project will be handled by the admins, in coordination with other maintainers, in balance with the urgency of the issue. Examples would be [Code of Conduct](CODE_OF_CONDUCT.md) violations, deliberate harmful or malicious actions, spam, monopolization, and security risks. + +### Becoming a maintainer + +In 2023, we will revisit this. We need to improve our understanding of how other projects are doing, their mechanisms to promote key contributors, and how they interact daily. + +We suspect this process might look similar to the [OpenSearch project](https://github.com/opensearch-project/.github/blob/main/MAINTAINERS.md#becoming-a-maintainer). + +## Common scenarios + +These are recurring ambiguous situations that new and existing maintainers may encounter. They serve as guidance. It is up to each maintainer to follow, adjust, or handle in a different manner as long as [our conduct is consistent](#uphold-code-of-conduct) + +### Contribution is stuck + +A contribution can get stuck often due to lack of bandwidth and language barrier. For bandwidth issues, check whether the author needs help. Make sure you get their permission before pushing code into their existing PR - do not create a new PR unless strictly necessary. + +For language barrier and others, offer a 1:1 chat to get them unblocked. Often times, English might not be their primary language, and writing in public might put them off, or come across not the way they intended to be. + +In other cases, you may have constrained capacity. Use `help wanted` label when you want to signal other maintainers and external contributors that you could use a hand to move it forward. -Our E2E framework relies on pytest fixtures to coordinate infrastructure and test parallelization (see [Workflow](#workflow)). You'll notice multiple `conftest.py`, `infrastructure.py`, and `handlers`. +### Insufficient feedback or information + +When in doubt, use `need-more-information` or `need-customer-feedback` labels to signal more context and feedback are necessary before proceeding. You can also use `revisit-in-3-months` label when you expect it might take a while to gather enough information before you can decide. + +### Crediting contributions + +We credit all contributions as part of each [release note](https://github.com/awslabs/aws-lambda-powertools-python/releases) as an automated process. If you find contributors are missing from the release note you're producing, please add them manually. + +### Is that a bug? + +A bug produces incorrect or unexpected results at runtime that differ from its intended behavior. Bugs must be reproducible. They directly affect customers experience at runtime despite following its recommended usage. -- **`infrastructure`**. Uses CDK to define what a Stack for a given feature should look like. It inherits from `BaseInfrastructure` to handle all boilerplate and deployment logic necessary. -- **`conftest.py`**. Imports and deploys a given feature Infrastructure. Hierarchy matters. Top-level `conftest` deploys stacks only once and blocks I/O across all CPUs. Feature-level `conftest` deploys stacks in parallel, and once complete run all tests in parallel. -- **`handlers`**. Lambda function handlers that will be automatically deployed and exported as PascalCase for later use. +Documentation snippets, use of internal components, or unadvertised functionalities are not considered bugs. + +### Mentoring contributions + +Always favor mentoring issue authors to contribute, unless they're not interested or the implementation is sensitive (_e.g., complexity, time to release, etc._). + +Make use of `help wanted` and `good first issue` to signal additional contributions the community can help. + +### Long running issues or PRs + +Try offering a 1:1 call in the attempt to get to a mutual understanding and clarify areas that maintainers could help. + +In the rare cases where both parties don't have the bandwidth or expertise to continue, it's best to use the `revisit-in-3-months` label. By then, see if it's possible to break the PR or issue in smaller chunks, and eventually close if there is no progress. + +## E2E framework + +### Structure + +Our E2E framework relies on [Pytest fixtures](https://docs.pytest.org/en/6.2.x/fixture.html) to coordinate infrastructure and test parallelization - see [Test Parallelization](#test-runner-parallelization) and [CDK CLI Parallelization](#cdk-cli-parallelization). + +**tests/e2e structure** ```shell . ├── __init__.py -├── conftest.py # deploys Lambda Layer stack +├── conftest.py # builds Lambda Layer once ├── logger │ ├── __init__.py │ ├── conftest.py # deploys LoggerStack @@ -256,112 +334,293 @@ Our E2E framework relies on pytest fixtures to coordinate infrastructure and tes │ ├── infrastructure.py # TracerStack definition │ └── test_tracer.py └── utils - ├── Dockerfile ├── __init__.py ├── data_builder # build_service_name(), build_add_dimensions_input, etc. ├── data_fetcher # get_traces(), get_logs(), get_lambda_response(), etc. - ├── infrastructure.py # base infrastructure like deploy logic, Layer Stack, etc. + ├── infrastructure.py # base infrastructure like deploy logic, etc. ``` -#### Workflow +Where: -We parallelize our end-to-end tests to benefit from speed and isolate Lambda functions to ease assessing side effects (e.g., traces, logs, etc.). The following diagram demonstrates the process we take every time you use `make e2e`: +- **`/infrastructure.py`**. Uses CDK to define the infrastructure a given feature needs. +- **`/handlers/`**. Lambda function handlers to build, deploy, and exposed as stack output in PascalCase (e.g., `BasicHandler`). +- **`utils/`**. Test utilities to build data and fetch AWS data to ease assertion +- **`conftest.py`**. Deploys and deletes a given feature infrastructure. Hierarchy matters: + - **Top-level (`e2e/conftest`)**. Builds Lambda Layer only once and blocks I/O across all CPU workers. + - **Feature-level (`e2e//conftest`)**. Deploys stacks in parallel and make them independent of each other. + +### Mechanics + +Under [`BaseInfrastructure`](https://github.com/awslabs/aws-lambda-powertools-python/blob/develop/tests/e2e/utils/infrastructure.py), we hide the complexity of deployment and delete coordination under `deploy`, `delete`, and `create_lambda_functions` methods. + +This allows us to benefit from test and deployment parallelization, use IDE step-through debugging for a single test, run one, subset, or all tests and only deploy their related infrastructure, without any custom configuration. + +> Class diagram to understand abstraction built when defining a new stack (`LoggerStack`) ```mermaid -graph TD - A[make e2e test] -->Spawn{"Split and group tests
by feature and CPU"} +classDiagram + class InfrastructureProvider { + <> + +deploy() Dict + +delete() + +create_resources() + +create_lambda_functions() Dict~Functions~ + } + + class BaseInfrastructure { + +deploy() Dict + +delete() + +create_lambda_functions() Dict~Functions~ + +add_cfn_output() + } + + class TracerStack { + +create_resources() + } + + class LoggerStack { + +create_resources() + } + + class MetricsStack { + +create_resources() + } + + class EventHandlerStack { + +create_resources() + } + + InfrastructureProvider <|-- BaseInfrastructure : implement + BaseInfrastructure <|-- TracerStack : inherit + BaseInfrastructure <|-- LoggerStack : inherit + BaseInfrastructure <|-- MetricsStack : inherit + BaseInfrastructure <|-- EventHandlerStack : inherit +``` - Spawn -->|Worker0| Worker0_Start["Load tests"] - Spawn -->|Worker1| Worker1_Start["Load tests"] - Spawn -->|WorkerN| WorkerN_Start["Load tests"] +### Authoring a new feature E2E test - Worker0_Start -->|Wait| LambdaLayerStack["Lambda Layer Stack Deployment"] - Worker1_Start -->|Wait| LambdaLayerStack["Lambda Layer Stack Deployment"] - WorkerN_Start -->|Wait| LambdaLayerStack["Lambda Layer Stack Deployment"] +Imagine you're going to create E2E for Event Handler feature for the first time. Keep the following mental model when reading: - LambdaLayerStack -->|Worker0| Worker0_Deploy["Launch feature stack"] - LambdaLayerStack -->|Worker1| Worker1_Deploy["Launch feature stack"] - LambdaLayerStack -->|WorkerN| WorkerN_Deploy["Launch feature stack"] +```mermaid +graph LR + A["1. Define infrastructure"]-->B["2. Deploy/Delete infrastructure"]-->C["3.Access Stack outputs" ] +``` - Worker0_Deploy -->|Worker0| Worker0_Tests["Run tests"] - Worker1_Deploy -->|Worker1| Worker1_Tests["Run tests"] - WorkerN_Deploy -->|WorkerN| WorkerN_Tests["Run tests"] +#### 1. Define infrastructure - Worker0_Tests --> ResultCollection - Worker1_Tests --> ResultCollection - WorkerN_Tests --> ResultCollection +We use CDK as our Infrastructure as Code tool of choice. Before you start using CDK, you'd take the following steps: - ResultCollection{"Wait for workers
Collect test results"} - ResultCollection --> TestEnd["Report results"] - ResultCollection --> DeployEnd["Delete Stacks"] +1. Create `tests/e2e/event_handler/infrastructure.py` file +2. Create a new class `EventHandlerStack` and inherit from `BaseInfrastructure` +3. Override `create_resources` method and define your infrastructure using CDK +4. (Optional) Create a Lambda function under `handlers/alb_handler.py` + +> Excerpt `tests/e2e/event_handler/infrastructure.py` + +```python +class EventHandlerStack(BaseInfrastructure): + def create_resources(self): + functions = self.create_lambda_functions() + + self._create_alb(function=functions["AlbHandler"]) + ... + + def _create_alb(self, function: Function): + vpc = ec2.Vpc.from_lookup( + self.stack, + "VPC", + is_default=True, + region=self.region, + ) + + alb = elbv2.ApplicationLoadBalancer(self.stack, "ALB", vpc=vpc, internet_facing=True) + CfnOutput(self.stack, "ALBDnsName", value=alb.load_balancer_dns_name) + ... ``` -### Releasing a documentation hotfix +> Excerpt `tests/e2e/event_handler/handlers/alb_handler.py` -You can rebuild the latest documentation without a full release via this [GitHub Actions Workflow](https://github.com/awslabs/aws-lambda-powertools-python/actions/workflows/rebuild_latest_docs.yml). Choose `Run workflow`, keep `develop` as the branch, and input the latest Powertools version available. +```python +from aws_lambda_powertools.event_handler import ALBResolver, Response, content_types -This workflow will update both user guide and API documentation. +app = ALBResolver() -### Maintain Overall Health of the Repo -> TODO: Coordinate renaming `develop` to `main` +@app.get("/todos") +def hello(): + return Response( + status_code=200, + content_type=content_types.TEXT_PLAIN, + body="Hello world", + cookies=["CookieMonster", "MonsterCookie"], + headers={"Foo": ["bar", "zbr"]}, + ) -Keep the `develop` branch at production quality at all times. Backport features as needed. Cut release branches and tags to enable future patches. -### Manage Roadmap +def lambda_handler(event, context): + return app.resolve(event, context) +``` -See [Roadmap section](https://awslabs.github.io/aws-lambda-powertools-python/latest/roadmap/) +#### 2. Deploy/Delete infrastructure when tests run -Ensure the repo highlights features that should be elevated to the project roadmap. Be clear about the feature’s status, priority, target version, and whether or not it should be elevated to the roadmap. +We need to create a Pytest fixture for our new feature under `tests/e2e/event_handler/conftest.py`. -### Add Continuous Integration Checks +This will instruct Pytest to deploy our infrastructure when our tests start, and delete it when they complete whether tests are successful or not. Note that this file will not need any modification in the future. -Add integration checks that validate pull requests and pushes to ease the burden on Pull Request reviewers. Continuously revisit areas of improvement to reduce operational burden in all parties involved. +> Excerpt `conftest.py` for Event Handler -### Negative Impact on the Project +```python +import pytest -Actions that negatively impact the project will be handled by the admins, in coordination with other maintainers, in balance with the urgency of the issue. Examples would be [Code of Conduct](CODE_OF_CONDUCT.md) violations, deliberate harmful or malicious actions, spam, monopolization, and security risks. +from tests.e2e.event_handler.infrastructure import EventHandlerStack -### Becoming a maintainer -In 2023, we will revisit this. We need to improve our understanding of how other projects are doing, their mechanisms to promote key contributors, and how they interact daily. +@pytest.fixture(autouse=True, scope="module") +def infrastructure(): + """Setup and teardown logic for E2E test infrastructure -We suspect this process might look similar to the [OpenSearch project](https://github.com/opensearch-project/.github/blob/main/MAINTAINERS.md#becoming-a-maintainer). + Yields + ------ + Dict[str, str] + CloudFormation Outputs from deployed infrastructure + """ + stack = EventHandlerStack() + try: + yield stack.deploy() + finally: + stack.delete() -## Common scenarios +``` -These are recurring ambiguous situations that new and existing maintainers may encounter. They serve as guidance. It is up to each maintainer to follow, adjust, or handle in a different manner as long as [our conduct is consistent](#uphold-code-of-conduct) +#### 3. Access stack outputs for E2E tests -### Contribution is stuck +Within our tests, we should now have access to the `infrastructure` fixture we defined earlier in `tests/e2e/event_handler/conftest.py`. -A contribution can get stuck often due to lack of bandwidth and language barrier. For bandwidth issues, check whether the author needs help. Make sure you get their permission before pushing code into their existing PR - do not create a new PR unless strictly necessary. +We can access any Stack Output using pytest dependency injection. -For language barrier and others, offer a 1:1 chat to get them unblocked. Often times, English might not be their primary language, and writing in public might put them off, or come across not the way they intended to be. +> Excerpt `tests/e2e/event_handler/test_header_serializer.py` -In other cases, you may have constrained capacity. Use `help wanted` label when you want to signal other maintainers and external contributors that you could use a hand to move it forward. +```python +@pytest.fixture +def alb_basic_listener_endpoint(infrastructure: dict) -> str: + dns_name = infrastructure.get("ALBDnsName") + port = infrastructure.get("ALBBasicListenerPort", "") + return f"http://{dns_name}:{port}" -### Insufficient feedback or information -When in doubt, use `need-more-information` or `need-customer-feedback` labels to signal more context and feedback are necessary before proceeding. You can also use `revisit-in-3-months` label when you expect it might take a while to gather enough information before you can decide. +def test_alb_headers_serializer(alb_basic_listener_endpoint): + # GIVEN + url = f"{alb_basic_listener_endpoint}/todos" + ... +``` -### Crediting contributions +### Internals -We credit all contributions as part of each [release note](https://github.com/awslabs/aws-lambda-powertools-python/releases) as an automated process. If you find contributors are missing from the release note you're producing, please add them manually. +#### Test runner parallelization -### Is that a bug? +Besides speed, we parallelize our end-to-end tests to ease asserting async side-effects may take a while per test too, _e.g., traces to become available_. -A bug produces incorrect or unexpected results at runtime that differ from its intended behavior. Bugs must be reproducible. They directly affect customers experience at runtime despite following its recommended usage. +The following diagram demonstrates the process we take every time you use `make e2e` locally or at CI: -Documentation snippets, use of internal components, or unadvertised functionalities are not considered bugs. +```mermaid +graph TD + A[make e2e test] -->Spawn{"Split and group tests
by feature and CPU"} -### Mentoring contributions + Spawn -->|Worker0| Worker0_Start["Load tests"] + Spawn -->|Worker1| Worker1_Start["Load tests"] + Spawn -->|WorkerN| WorkerN_Start["Load tests"] -Always favor mentoring issue authors to contribute, unless they're not interested or the implementation is sensitive (_e.g., complexity, time to release, etc._). + Worker0_Start -->|Wait| LambdaLayer["Lambda Layer build"] + Worker1_Start -->|Wait| LambdaLayer["Lambda Layer build"] + WorkerN_Start -->|Wait| LambdaLayer["Lambda Layer build"] -Make use of `help wanted` and `good first issue` to signal additional contributions the community can help. + LambdaLayer -->|Worker0| Worker0_Deploy["Launch feature stack"] + LambdaLayer -->|Worker1| Worker1_Deploy["Launch feature stack"] + LambdaLayer -->|WorkerN| WorkerN_Deploy["Launch feature stack"] -### Long running issues or PRs + Worker0_Deploy -->|Worker0| Worker0_Tests["Run tests"] + Worker1_Deploy -->|Worker1| Worker1_Tests["Run tests"] + WorkerN_Deploy -->|WorkerN| WorkerN_Tests["Run tests"] -Try offering a 1:1 call in the attempt to get to a mutual understanding and clarify areas that maintainers could help. + Worker0_Tests --> ResultCollection + Worker1_Tests --> ResultCollection + WorkerN_Tests --> ResultCollection -In the rare cases where both parties don't have the bandwidth or expertise to continue, it's best to use the `revisit-in-3-months` label. By then, see if it's possible to break the PR or issue in smaller chunks, and eventually close if there is no progress. + ResultCollection{"Wait for workers
Collect test results"} + ResultCollection --> TestEnd["Report results"] + ResultCollection --> DeployEnd["Delete Stacks"] +``` + +#### CDK CLI parallelization + +For CDK CLI to work with [independent CDK Apps](https://docs.aws.amazon.com/cdk/v2/guide/apps.html), we specify an output directory when synthesizing our stack and deploy from said output directory. + +```mermaid +flowchart TD + subgraph "Deploying distinct CDK Apps" + EventHandlerInfra["Event Handler CDK App"] --> EventHandlerSynth + TracerInfra["Tracer CDK App"] --> TracerSynth + EventHandlerSynth["cdk synth --out cdk.out/event_handler"] --> EventHandlerDeploy["cdk deploy --app cdk.out/event_handler"] + + TracerSynth["cdk synth --out cdk.out/tracer"] --> TracerDeploy["cdk deploy --app cdk.out/tracer"] + end +``` + +We create the typical CDK `app.py` at runtime when tests run, since we know which feature and Python version we're dealing with (locally or at CI). + +> Excerpt `cdk_app_V39.py` for Event Handler created at deploy phase + +```python +from tests.e2e.event_handler.infrastructure import EventHandlerStack +stack = EventHandlerStack() +stack.create_resources() +stack.app.synth() +``` + +When we run E2E tests for a single feature or all of them, our `cdk.out` looks like this: + +```shell +total 8 +drwxr-xr-x 18 lessa staff 576B Sep 6 15:38 event-handler +drwxr-xr-x 3 lessa staff 96B Sep 6 15:08 layer_build +-rw-r--r-- 1 lessa staff 32B Sep 6 15:08 layer_build.diff +drwxr-xr-x 18 lessa staff 576B Sep 6 15:38 logger +drwxr-xr-x 18 lessa staff 576B Sep 6 15:38 metrics +drwxr-xr-x 22 lessa staff 704B Sep 9 10:52 tracer +``` + +```mermaid +classDiagram + class CdkOutDirectory { + feature_name/ + layer_build/ + layer_build.diff + } + + class EventHandler { + manifest.json + stack_outputs.json + cdk_app_V39.py + asset.uuid/ + ... + } + + class StackOutputsJson { + BasicHandlerArn: str + ALBDnsName: str + ... + } + + CdkOutDirectory <|-- EventHandler : feature_name/ + StackOutputsJson <|-- EventHandler +``` + +Where: + +- **``**. Contains CDK Assets, CDK `manifest.json`, our `cdk_app_.py` and `stack_outputs.json` +- **`layer_build`**. Contains our Lambda Layer source code built once, used by all stacks independently +- **`layer_build.diff`**. Contains a hash on whether our source code has changed to speed up further deployments and E2E tests + +Together, all of this allows us to use Pytest like we would for any project, use CDK CLI and its [context methods](https://docs.aws.amazon.com/cdk/v2/guide/context.html#context_methods) (`from_lookup`), and use step-through debugging for a single E2E test without any extra configuration. + +> NOTE: VSCode doesn't support debugging processes spawning sub-processes (like CDK CLI does w/ shell and CDK App). Maybe [this works](https://stackoverflow.com/a/65339352). PyCharm works just fine. diff --git a/Makefile b/Makefile index 7a212738c53..ba4c2943f84 100644 --- a/Makefile +++ b/Makefile @@ -6,12 +6,12 @@ target: dev: pip install --upgrade pip pre-commit poetry - poetry install --extras "pydantic" + poetry install --extras "all" pre-commit install dev-gitpod: pip install --upgrade pip poetry - poetry install --extras "pydantic" + poetry install --extras "all" pre-commit install format: diff --git a/README.md b/README.md index fb5fc480f37..f34c0104c67 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ - + # AWS Lambda Powertools for Python [![Build](https://github.com/awslabs/aws-lambda-powertools-python/actions/workflows/python_build.yml/badge.svg)](https://github.com/awslabs/aws-lambda-powertools-python/actions/workflows/python_build.yml) diff --git a/aws_lambda_powertools/__init__.py b/aws_lambda_powertools/__init__.py index 65b5eb86730..574c9b257f1 100644 --- a/aws_lambda_powertools/__init__.py +++ b/aws_lambda_powertools/__init__.py @@ -2,12 +2,21 @@ """Top-level package for Lambda Python Powertools.""" +from pathlib import Path -from .logging import Logger # noqa: F401 -from .metrics import Metrics, single_metric # noqa: F401 +from .logging import Logger +from .metrics import Metrics, single_metric from .package_logger import set_package_logger_handler -from .tracing import Tracer # noqa: F401 +from .tracing import Tracer __author__ = """Amazon Web Services""" +__all__ = [ + "Logger", + "Metrics", + "single_metric", + "Tracer", +] + +PACKAGE_PATH = Path(__file__).parent set_package_logger_handler() diff --git a/aws_lambda_powertools/event_handler/api_gateway.py b/aws_lambda_powertools/event_handler/api_gateway.py index 0e7b5a87838..112bcd92dfe 100644 --- a/aws_lambda_powertools/event_handler/api_gateway.py +++ b/aws_lambda_powertools/event_handler/api_gateway.py @@ -1,7 +1,6 @@ import base64 import json import logging -import os import re import traceback import warnings @@ -26,8 +25,8 @@ from aws_lambda_powertools.event_handler import content_types from aws_lambda_powertools.event_handler.exceptions import NotFoundError, ServiceError -from aws_lambda_powertools.shared import constants -from aws_lambda_powertools.shared.functions import powertools_dev_is_set, strtobool +from aws_lambda_powertools.shared.cookies import Cookie +from aws_lambda_powertools.shared.functions import powertools_dev_is_set from aws_lambda_powertools.shared.json_encoder import Encoder from aws_lambda_powertools.utilities.data_classes import ( ALBEvent, @@ -45,6 +44,7 @@ # API GW/ALB decode non-safe URI chars; we must support them too _UNSAFE_URI = "%<> \[\]{}|^" # noqa: W605 _NAMED_GROUP_BOUNDARY_PATTERN = rf"(?P\1[{_SAFE_URI}{_UNSAFE_URI}\\w]+)" +_ROUTE_REGEX = "^{}$" class ProxyEventType(Enum): @@ -136,10 +136,11 @@ def __init__( def to_dict(self) -> Dict[str, str]: """Builds the configured Access-Control http headers""" - headers = { + headers: Dict[str, str] = { "Access-Control-Allow-Origin": self.allow_origin, "Access-Control-Allow-Headers": ",".join(sorted(self.allow_headers)), } + if self.expose_headers: headers["Access-Control-Expose-Headers"] = ",".join(self.expose_headers) if self.max_age is not None: @@ -157,7 +158,8 @@ def __init__( status_code: int, content_type: Optional[str] = None, body: Union[str, bytes, None] = None, - headers: Optional[Dict] = None, + headers: Optional[Dict[str, Union[str, List[str]]]] = None, + cookies: Optional[List[Cookie]] = None, ): """ @@ -170,13 +172,16 @@ def __init__( provided http headers body: Union[str, bytes, None] Optionally set the response body. Note: bytes body will be automatically base64 encoded - headers: dict - Optionally set specific http headers. Setting "Content-Type" hear would override the `content_type` value. + headers: dict[str, Union[str, List[str]]] + Optionally set specific http headers. Setting "Content-Type" here would override the `content_type` value. + cookies: list[Cookie] + Optionally set cookies. """ self.status_code = status_code self.body = body self.base64_encoded = False - self.headers: Dict = headers or {} + self.headers: Dict[str, Union[str, List[str]]] = headers if headers else {} + self.cookies = cookies or [] if content_type: self.headers.setdefault("Content-Type", content_type) @@ -208,7 +213,8 @@ def _add_cors(self, cors: CORSConfig): def _add_cache_control(self, cache_control: str): """Set the specified cache control headers for 200 http responses. For non-200 `no-cache` is used.""" - self.response.headers["Cache-Control"] = cache_control if self.response.status_code == 200 else "no-cache" + cache_control = cache_control if self.response.status_code == 200 else "no-cache" + self.response.headers["Cache-Control"] = cache_control def _compress(self): """Compress the response body, but only if `Accept-Encoding` headers includes gzip.""" @@ -238,11 +244,12 @@ def build(self, event: BaseProxyEvent, cors: Optional[CORSConfig] = None) -> Dic logger.debug("Encoding bytes response with base64") self.response.base64_encoded = True self.response.body = base64.b64encode(self.response.body).decode() + return { "statusCode": self.response.status_code, - "headers": self.response.headers, "body": self.response.body, "isBase64Encoded": self.response.base64_encoded, + **event.header_serializer().serialize(headers=self.response.headers, cookies=self.response.cookies), } @@ -450,7 +457,7 @@ def __init__( cors: CORSConfig Optionally configure and enabled CORS. Not each route will need to have to cors=True debug: Optional[bool] - Enables debug mode, by default False. Can be also be enabled by "POWERTOOLS_EVENT_HANDLER_DEBUG" + Enables debug mode, by default False. Can be also be enabled by "POWERTOOLS_DEV" environment variable serializer : Callable, optional function to serialize `obj` to a JSON formatted `str`, by default json.dumps @@ -543,18 +550,10 @@ def _has_debug(debug: Optional[bool] = None) -> bool: if debug is not None: return debug - # Maintenance: deprecate EVENT_HANDLER_DEBUG later in V2. - env_debug = os.getenv(constants.EVENT_HANDLER_DEBUG_ENV) - if env_debug is not None: - warnings.warn( - "POWERTOOLS_EVENT_HANDLER_DEBUG is set and will be deprecated in V2. Please use POWERTOOLS_DEV instead." - ) - return strtobool(env_debug) or powertools_dev_is_set() - return powertools_dev_is_set() @staticmethod - def _compile_regex(rule: str): + def _compile_regex(rule: str, base_regex: str = _ROUTE_REGEX): """Precompile regex pattern Logic @@ -584,7 +583,7 @@ def _compile_regex(rule: str): NOTE: See #520 for context """ rule_regex: str = re.sub(_DYNAMIC_ROUTE_PATTERN, _NAMED_GROUP_BOUNDARY_PATTERN, rule) - return re.compile("^{}$".format(rule_regex)) + return re.compile(base_regex.format(rule_regex)) def _to_proxy_event(self, event: Dict) -> BaseProxyEvent: """Convert the event dict to the corresponding data class""" @@ -638,7 +637,7 @@ def _path_starts_with(path: str, prefix: str): def _not_found(self, method: str) -> ResponseBuilder: """Called when no matching route was found and includes support for the cors preflight response""" - headers = {} + headers: Dict[str, Union[str, List[str]]] = {} if self._cors: logger.debug("CORS is enabled, updating headers.") headers.update(self._cors.to_dict()) @@ -811,6 +810,24 @@ def __init__( """Amazon API Gateway REST and HTTP API v1 payload resolver""" super().__init__(ProxyEventType.APIGatewayProxyEvent, cors, debug, serializer, strip_prefixes) + # override route to ignore trailing "/" in routes for REST API + def route( + self, + rule: str, + method: Union[str, Union[List[str], Tuple[str]]], + cors: Optional[bool] = None, + compress: bool = False, + cache_control: Optional[str] = None, + ): + # NOTE: see #1552 for more context. + return super().route(rule.rstrip("/"), method, cors, compress, cache_control) + + # Override _compile_regex to exclude trailing slashes for route resolution + @staticmethod + def _compile_regex(rule: str, base_regex: str = _ROUTE_REGEX): + + return super(APIGatewayRestResolver, APIGatewayRestResolver)._compile_regex(rule, "^{}/*$") + class APIGatewayHttpResolver(ApiGatewayResolver): current_event: APIGatewayProxyEventV2 diff --git a/aws_lambda_powertools/shared/constants.py b/aws_lambda_powertools/shared/constants.py index 86a6c2ac41b..2ec120e4d4a 100644 --- a/aws_lambda_powertools/shared/constants.py +++ b/aws_lambda_powertools/shared/constants.py @@ -10,8 +10,6 @@ METRICS_NAMESPACE_ENV: str = "POWERTOOLS_METRICS_NAMESPACE" -EVENT_HANDLER_DEBUG_ENV: str = "POWERTOOLS_EVENT_HANDLER_DEBUG" - SERVICE_NAME_ENV: str = "POWERTOOLS_SERVICE_NAME" XRAY_TRACE_ID_ENV: str = "_X_AMZN_TRACE_ID" LAMBDA_TASK_ROOT_ENV: str = "LAMBDA_TASK_ROOT" diff --git a/aws_lambda_powertools/shared/cookies.py b/aws_lambda_powertools/shared/cookies.py new file mode 100644 index 00000000000..944bcb5dc9f --- /dev/null +++ b/aws_lambda_powertools/shared/cookies.py @@ -0,0 +1,118 @@ +from datetime import datetime +from enum import Enum +from io import StringIO +from typing import List, Optional + + +class SameSite(Enum): + """ + SameSite allows a server to define a cookie attribute making it impossible for + the browser to send this cookie along with cross-site requests. The main + goal is to mitigate the risk of cross-origin information leakage, and provide + some protection against cross-site request forgery attacks. + + See https://tools.ietf.org/html/draft-ietf-httpbis-cookie-same-site-00 for details. + """ + + DEFAULT_MODE = "" + LAX_MODE = "Lax" + STRICT_MODE = "Strict" + NONE_MODE = "None" + + +def _format_date(timestamp: datetime) -> str: + # Specification example: Wed, 21 Oct 2015 07:28:00 GMT + return timestamp.strftime("%a, %d %b %Y %H:%M:%S GMT") + + +class Cookie: + """ + A Cookie represents an HTTP cookie as sent in the Set-Cookie header of an + HTTP response or the Cookie header of an HTTP request. + + See https://tools.ietf.org/html/rfc6265 for details. + """ + + def __init__( + self, + name: str, + value: str, + path: str = "", + domain: str = "", + secure: bool = True, + http_only: bool = False, + max_age: Optional[int] = None, + expires: Optional[datetime] = None, + same_site: Optional[SameSite] = None, + custom_attributes: Optional[List[str]] = None, + ): + """ + + Parameters + ---------- + name: str + The name of this cookie, for example session_id + value: str + The cookie value, for instance an uuid + path: str + The path for which this cookie is valid. Optional + domain: str + The domain for which this cookie is valid. Optional + secure: bool + Marks the cookie as secure, only sendable to the server with an encrypted request over the HTTPS protocol + http_only: bool + Enabling this attribute makes the cookie inaccessible to the JavaScript `Document.cookie` API + max_age: Optional[int] + Defines the period of time after which the cookie is invalid. Use negative values to force cookie deletion. + expires: Optional[datetime] + Defines a date where the permanent cookie expires. + same_site: Optional[SameSite] + Determines if the cookie should be sent to third party websites + custom_attributes: Optional[List[str]] + List of additional custom attributes to set on the cookie + """ + self.name = name + self.value = value + self.path = path + self.domain = domain + self.secure = secure + self.expires = expires + self.max_age = max_age + self.http_only = http_only + self.same_site = same_site + self.custom_attributes = custom_attributes + + def __str__(self) -> str: + payload = StringIO() + payload.write(f"{self.name}={self.value}") + + if self.path: + payload.write(f"; Path={self.path}") + + if self.domain: + payload.write(f"; Domain={self.domain}") + + if self.expires: + payload.write(f"; Expires={_format_date(self.expires)}") + + if self.max_age: + if self.max_age > 0: + payload.write(f"; MaxAge={self.max_age}") + else: + # negative or zero max-age should be set to 0 + payload.write("; MaxAge=0") + + if self.http_only: + payload.write("; HttpOnly") + + if self.secure: + payload.write("; Secure") + + if self.same_site: + payload.write(f"; SameSite={self.same_site.value}") + + if self.custom_attributes: + for attr in self.custom_attributes: + payload.write(f"; {attr}") + + return payload.getvalue() diff --git a/aws_lambda_powertools/shared/headers_serializer.py b/aws_lambda_powertools/shared/headers_serializer.py new file mode 100644 index 00000000000..796fd9aeae3 --- /dev/null +++ b/aws_lambda_powertools/shared/headers_serializer.py @@ -0,0 +1,113 @@ +import warnings +from collections import defaultdict +from typing import Any, Dict, List, Union + +from aws_lambda_powertools.shared.cookies import Cookie + + +class BaseHeadersSerializer: + """ + Helper class to correctly serialize headers and cookies for Amazon API Gateway, + ALB and Lambda Function URL response payload. + """ + + def serialize(self, headers: Dict[str, Union[str, List[str]]], cookies: List[Cookie]) -> Dict[str, Any]: + """ + Serializes headers and cookies according to the request type. + Returns a dict that can be merged with the response payload. + + Parameters + ---------- + headers: Dict[str, List[str]] + A dictionary of headers to set in the response + cookies: List[str] + A list of cookies to set in the response + """ + raise NotImplementedError() + + +class HttpApiHeadersSerializer(BaseHeadersSerializer): + def serialize(self, headers: Dict[str, Union[str, List[str]]], cookies: List[Cookie]) -> Dict[str, Any]: + """ + When using HTTP APIs or LambdaFunctionURLs, everything is taken care automatically for us. + We can directly assign a list of cookies and a dict of headers to the response payload, and the + runtime will automatically serialize them correctly on the output. + + https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-develop-integrations-lambda.html#http-api-develop-integrations-lambda.proxy-format + https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-develop-integrations-lambda.html#http-api-develop-integrations-lambda.response + """ + + # Format 2.0 doesn't have multiValueHeaders or multiValueQueryStringParameters fields. + # Duplicate headers are combined with commas and included in the headers field. + combined_headers: Dict[str, str] = {} + for key, values in headers.items(): + if isinstance(values, str): + combined_headers[key] = values + else: + combined_headers[key] = ", ".join(values) + + return {"headers": combined_headers, "cookies": list(map(str, cookies))} + + +class MultiValueHeadersSerializer(BaseHeadersSerializer): + def serialize(self, headers: Dict[str, Union[str, List[str]]], cookies: List[Cookie]) -> Dict[str, Any]: + """ + When using REST APIs, headers can be encoded using the `multiValueHeaders` key on the response. + This is also the case when using an ALB integration with the `multiValueHeaders` option enabled. + The solution covers headers with just one key or multiple keys. + + https://docs.aws.amazon.com/apigateway/latest/developerguide/set-up-lambda-proxy-integrations.html#api-gateway-simple-proxy-for-lambda-output-format + https://docs.aws.amazon.com/elasticloadbalancing/latest/application/lambda-functions.html#multi-value-headers-response + """ + payload: Dict[str, List[str]] = defaultdict(list) + + for key, values in headers.items(): + if isinstance(values, str): + payload[key].append(values) + else: + for value in values: + payload[key].append(value) + + if cookies: + payload.setdefault("Set-Cookie", []) + for cookie in cookies: + payload["Set-Cookie"].append(str(cookie)) + + return {"multiValueHeaders": payload} + + +class SingleValueHeadersSerializer(BaseHeadersSerializer): + def serialize(self, headers: Dict[str, Union[str, List[str]]], cookies: List[Cookie]) -> Dict[str, Any]: + """ + The ALB integration has `multiValueHeaders` disabled by default. + If we try to set multiple headers with the same key, or more than one cookie, print a warning. + + https://docs.aws.amazon.com/elasticloadbalancing/latest/application/lambda-functions.html#respond-to-load-balancer + """ + payload: Dict[str, Dict[str, str]] = {} + payload.setdefault("headers", {}) + + if cookies: + if len(cookies) > 1: + warnings.warn( + "Can't encode more than one cookie in the response. Sending the last cookie only. " + "Did you enable multiValueHeaders on the ALB Target Group?" + ) + + # We can only send one cookie, send the last one + payload["headers"]["Set-Cookie"] = str(cookies[-1]) + + for key, values in headers.items(): + if isinstance(values, str): + payload["headers"][key] = values + else: + if len(values) > 1: + warnings.warn( + f"Can't encode more than one header value for the same key ('{key}') in the response. " + "Did you enable multiValueHeaders on the ALB Target Group?" + ) + + # We can only set one header per key, send the last one + payload["headers"][key] = values[-1] + + return payload diff --git a/aws_lambda_powertools/tracing/tracer.py b/aws_lambda_powertools/tracing/tracer.py index 8d9ad16a3d0..0523d53c41d 100644 --- a/aws_lambda_powertools/tracing/tracer.py +++ b/aws_lambda_powertools/tracing/tracer.py @@ -300,16 +300,6 @@ def handler(event, context): @functools.wraps(lambda_handler) def decorate(event, context, **kwargs): with self.provider.in_subsegment(name=f"## {lambda_handler_name}") as subsegment: - global is_cold_start - logger.debug("Annotating cold start") - subsegment.put_annotation(key="ColdStart", value=is_cold_start) - - if is_cold_start: - is_cold_start = False - - if self.service: - subsegment.put_annotation(key="Service", value=self.service) - try: logger.debug("Calling lambda handler") response = lambda_handler(event, context, **kwargs) @@ -325,7 +315,18 @@ def decorate(event, context, **kwargs): self._add_full_exception_as_metadata( method_name=lambda_handler_name, error=err, subsegment=subsegment, capture_error=capture_error ) + raise + finally: + global is_cold_start + logger.debug("Annotating cold start") + subsegment.put_annotation(key="ColdStart", value=is_cold_start) + + if is_cold_start: + is_cold_start = False + + if self.service: + subsegment.put_annotation(key="Service", value=self.service) return response @@ -354,7 +355,8 @@ def capture_method( """Decorator to create subsegment for arbitrary functions It also captures both response and exceptions as metadata - and creates a subsegment named `## ` + and creates a subsegment named `## ` + # see here: [Qualified name for classes and functions](https://peps.python.org/pep-3155/) When running [async functions concurrently](https://docs.python.org/3/library/asyncio-task.html#id6), methods may impact each others subsegment, and can trigger @@ -508,7 +510,8 @@ async def async_tasks(): functools.partial(self.capture_method, capture_response=capture_response, capture_error=capture_error), ) - method_name = f"{method.__name__}" + # Example: app.ClassA.get_all # noqa E800 + method_name = f"{method.__module__}.{method.__qualname__}" capture_response = resolve_truthy_env_var_choice( env=os.getenv(constants.TRACER_CAPTURE_RESPONSE_ENV, "true"), choice=capture_response @@ -670,7 +673,7 @@ def _add_response_as_metadata( if data is None or not capture_response or subsegment is None: return - subsegment.put_metadata(key=f"{method_name} response", value=data, namespace=self._config["service"]) + subsegment.put_metadata(key=f"{method_name} response", value=data, namespace=self.service) def _add_full_exception_as_metadata( self, @@ -695,7 +698,7 @@ def _add_full_exception_as_metadata( if not capture_error: return - subsegment.put_metadata(key=f"{method_name} error", value=error, namespace=self._config["service"]) + subsegment.put_metadata(key=f"{method_name} error", value=error, namespace=self.service) @staticmethod def _disable_tracer_provider(): diff --git a/aws_lambda_powertools/utilities/batch/__init__.py b/aws_lambda_powertools/utilities/batch/__init__.py index 7db0781232c..08c35560b3f 100644 --- a/aws_lambda_powertools/utilities/batch/__init__.py +++ b/aws_lambda_powertools/utilities/batch/__init__.py @@ -13,10 +13,6 @@ batch_processor, ) from aws_lambda_powertools.utilities.batch.exceptions import ExceptionInfo -from aws_lambda_powertools.utilities.batch.sqs import ( - PartialSQSProcessor, - sqs_batch_processor, -) __all__ = ( "BatchProcessor", @@ -24,8 +20,6 @@ "ExceptionInfo", "EventType", "FailureResponse", - "PartialSQSProcessor", "SuccessResponse", "batch_processor", - "sqs_batch_processor", ) diff --git a/aws_lambda_powertools/utilities/batch/base.py b/aws_lambda_powertools/utilities/batch/base.py index f2d7cd2ed74..4f9c4ca8780 100644 --- a/aws_lambda_powertools/utilities/batch/base.py +++ b/aws_lambda_powertools/utilities/batch/base.py @@ -208,19 +208,19 @@ def batch_processor( Lambda's Context record_handler: Callable Callable to process each record from the batch - processor: PartialSQSProcessor + processor: BasePartialProcessor Batch Processor to handle partial failure cases Examples -------- - **Processes Lambda's event with PartialSQSProcessor** + **Processes Lambda's event with a BasePartialProcessor** - >>> from aws_lambda_powertools.utilities.batch import batch_processor, PartialSQSProcessor + >>> from aws_lambda_powertools.utilities.batch import batch_processor, BatchProcessor >>> >>> def record_handler(record): >>> return record["body"] >>> - >>> @batch_processor(record_handler=record_handler, processor=PartialSQSProcessor()) + >>> @batch_processor(record_handler=record_handler, processor=BatchProcessor()) >>> def handler(event, context): >>> return {"StatusCode": 200} @@ -323,10 +323,10 @@ def lambda_handler(event, context: LambdaContext): @tracer.capture_method def record_handler(record: DynamoDBRecord): logger.info(record.dynamodb.new_image) - payload: dict = json.loads(record.dynamodb.new_image.get("item").s_value) + payload: dict = json.loads(record.dynamodb.new_image.get("item")) # alternatively: - # changes: Dict[str, dynamo_db_stream_event.AttributeValue] = record.dynamodb.new_image # noqa: E800 - # payload = change.get("Message").raw_event -> {"S": ""} + # changes: Dict[str, Any] = record.dynamodb.new_image # noqa: E800 + # payload = change.get("Message") -> "" ... @logger.inject_lambda_context diff --git a/aws_lambda_powertools/utilities/batch/exceptions.py b/aws_lambda_powertools/utilities/batch/exceptions.py index d90c25f12bc..d541d18d18f 100644 --- a/aws_lambda_powertools/utilities/batch/exceptions.py +++ b/aws_lambda_powertools/utilities/batch/exceptions.py @@ -24,19 +24,6 @@ def format_exceptions(self, parent_exception_str): return "\n".join(exception_list) -class SQSBatchProcessingError(BaseBatchProcessingError): - """When at least one message within a batch could not be processed""" - - def __init__(self, msg="", child_exceptions: Optional[List[ExceptionInfo]] = None): - super().__init__(msg, child_exceptions) - - # Overriding this method so we can output all child exception tracebacks when we raise this exception to prevent - # errors being lost. See https://github.com/awslabs/aws-lambda-powertools-python/issues/275 - def __str__(self): - parent_exception_str = super(SQSBatchProcessingError, self).__str__() - return self.format_exceptions(parent_exception_str) - - class BatchProcessingError(BaseBatchProcessingError): """When all batch records failed to be processed""" diff --git a/aws_lambda_powertools/utilities/batch/sqs.py b/aws_lambda_powertools/utilities/batch/sqs.py deleted file mode 100644 index 7b234c1372e..00000000000 --- a/aws_lambda_powertools/utilities/batch/sqs.py +++ /dev/null @@ -1,250 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -Batch SQS utilities -""" -import logging -import math -import sys -import warnings -from concurrent.futures import ThreadPoolExecutor, as_completed -from typing import Any, Callable, Dict, List, Optional, Tuple, cast - -import boto3 -from botocore.config import Config - -from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord - -from ...middleware_factory import lambda_handler_decorator -from .base import BasePartialProcessor -from .exceptions import SQSBatchProcessingError - -logger = logging.getLogger(__name__) - - -class PartialSQSProcessor(BasePartialProcessor): - """ - Amazon SQS batch processor to delete successes from the Queue. - - The whole batch will be processed, even if failures occur. After all records are processed, - SQSBatchProcessingError will be raised if there were any failures, causing messages to - be returned to the SQS queue. This behaviour can be disabled by passing suppress_exception. - - Parameters - ---------- - config: Config - botocore config object - suppress_exception: bool, optional - Supress exception raised if any messages fail processing, by default False - boto3_session : boto3.session.Session, optional - Boto3 session to use for AWS API communication - - - Example - ------- - **Process batch triggered by SQS** - - >>> from aws_lambda_powertools.utilities.batch import PartialSQSProcessor - >>> - >>> def record_handler(record): - >>> return record["body"] - >>> - >>> def handler(event, context): - >>> records = event["Records"] - >>> processor = PartialSQSProcessor() - >>> - >>> with processor(records=records, handler=record_handler): - >>> result = processor.process() - >>> - >>> # Case a partial failure occurred, all successful executions - >>> # have been deleted from the queue after context's exit. - >>> - >>> return result - - """ - - def __init__( - self, - config: Optional[Config] = None, - suppress_exception: bool = False, - boto3_session: Optional[boto3.session.Session] = None, - ): - """ - Initializes sqs client. - """ - config = config or Config() - session = boto3_session or boto3.session.Session() - self.client = session.client("sqs", config=config) - self.suppress_exception = suppress_exception - self.max_message_batch = 10 - - warnings.warn( - "The sqs_batch_processor decorator and PartialSQSProcessor class are now deprecated, " - "and will be removed in the next major version. " - "Please follow the upgrade guide at " - "https://awslabs.github.io/aws-lambda-powertools-python/latest/utilities/batch/#legacy " - "to use the native batch_processor decorator or BatchProcessor class." - ) - - super().__init__() - - def _get_queue_url(self) -> Optional[str]: - """ - Format QueueUrl from first records entry - """ - if not getattr(self, "records", None): - return None - - *_, account_id, queue_name = self.records[0]["eventSourceARN"].split(":") - return f"{self.client._endpoint.host}/{account_id}/{queue_name}" - - def _get_entries_to_clean(self) -> List[Dict[str, str]]: - """ - Format messages to use in batch deletion - """ - entries = [] - # success_messages has generic type of union of SQS, Dynamodb and Kinesis Streams records or Pydantic models. - # Here we get SQS Record only - messages = cast(List[SQSRecord], self.success_messages) - for msg in messages: - entries.append({"Id": msg["messageId"], "ReceiptHandle": msg["receiptHandle"]}) - return entries - - def _process_record(self, record) -> Tuple: - """ - Process a record with instance's handler - - Parameters - ---------- - record: Any - An object to be processed. - """ - try: - result = self.handler(record=record) - return self.success_handler(record=record, result=result) - except Exception: - return self.failure_handler(record=record, exception=sys.exc_info()) - - def _prepare(self): - """ - Remove results from previous execution. - """ - self.success_messages.clear() - self.fail_messages.clear() - - def _clean(self) -> Optional[List]: - """ - Delete messages from Queue in case of partial failure. - """ - - # If all messages were successful, fall back to the default SQS - - # Lambda behavior which deletes messages if Lambda responds successfully - if not self.fail_messages: - logger.debug(f"All {len(self.success_messages)} records successfully processed") - return None - - queue_url = self._get_queue_url() - if queue_url is None: - logger.debug("No queue url found") - return None - - entries_to_remove = self._get_entries_to_clean() - # Batch delete up to 10 messages at a time (SQS limit) - max_workers = math.ceil(len(entries_to_remove) / self.max_message_batch) - - if entries_to_remove: - with ThreadPoolExecutor(max_workers=max_workers) as executor: - futures, results = [], [] - while entries_to_remove: - futures.append( - executor.submit( - self._delete_messages, queue_url, entries_to_remove[: self.max_message_batch], self.client - ) - ) - entries_to_remove = entries_to_remove[self.max_message_batch :] - for future in as_completed(futures): - try: - logger.debug("Deleted batch of processed messages from SQS") - results.append(future.result()) - except Exception: - logger.exception("Couldn't remove batch of processed messages from SQS") - raise - if self.suppress_exception: - logger.debug(f"{len(self.fail_messages)} records failed processing, but exceptions are suppressed") - else: - logger.debug(f"{len(self.fail_messages)} records failed processing, raising exception") - raise SQSBatchProcessingError( - msg=f"Not all records processed successfully. {len(self.exceptions)} individual errors logged " - f"separately below.", - child_exceptions=self.exceptions, - ) - - return results - - def _delete_messages(self, queue_url: str, entries_to_remove: List, sqs_client: Any): - delete_message_response = sqs_client.delete_message_batch( - QueueUrl=queue_url, - Entries=entries_to_remove, - ) - return delete_message_response - - -@lambda_handler_decorator -def sqs_batch_processor( - handler: Callable, - event: Dict, - context: Dict, - record_handler: Callable, - config: Optional[Config] = None, - suppress_exception: bool = False, - boto3_session: Optional[boto3.session.Session] = None, -): - """ - Middleware to handle SQS batch event processing - - Parameters - ---------- - handler: Callable - Lambda's handler - event: Dict - Lambda's Event - context: Dict - Lambda's Context - record_handler: Callable - Callable to process each record from the batch - config: Config - botocore config object - suppress_exception: bool, optional - Supress exception raised if any messages fail processing, by default False - boto3_session : boto3.session.Session, optional - Boto3 session to use for AWS API communication - - Examples - -------- - **Processes Lambda's event with PartialSQSProcessor** - - >>> from aws_lambda_powertools.utilities.batch import sqs_batch_processor - >>> - >>> def record_handler(record): - >>> return record["body"] - >>> - >>> @sqs_batch_processor(record_handler=record_handler) - >>> def handler(event, context): - >>> return {"StatusCode": 200} - - Limitations - ----------- - * Async batch processors - - """ - config = config or Config() - session = boto3_session or boto3.session.Session() - - processor = PartialSQSProcessor(config=config, suppress_exception=suppress_exception, boto3_session=session) - - records = event["Records"] - - with processor(records, record_handler): - processor.process() - - return handler(event, context) diff --git a/aws_lambda_powertools/utilities/data_classes/alb_event.py b/aws_lambda_powertools/utilities/data_classes/alb_event.py index c28ec0d72e2..51a6f61f368 100644 --- a/aws_lambda_powertools/utilities/data_classes/alb_event.py +++ b/aws_lambda_powertools/utilities/data_classes/alb_event.py @@ -1,5 +1,10 @@ from typing import Dict, List, Optional +from aws_lambda_powertools.shared.headers_serializer import ( + BaseHeadersSerializer, + MultiValueHeadersSerializer, + SingleValueHeadersSerializer, +) from aws_lambda_powertools.utilities.data_classes.common import ( BaseProxyEvent, DictWrapper, @@ -33,3 +38,11 @@ def multi_value_query_string_parameters(self) -> Optional[Dict[str, List[str]]]: @property def multi_value_headers(self) -> Optional[Dict[str, List[str]]]: return self.get("multiValueHeaders") + + def header_serializer(self) -> BaseHeadersSerializer: + # When using the ALB integration, the `multiValueHeaders` feature can be disabled (default) or enabled. + # We can determine if the feature is enabled by looking if the event has a `multiValueHeaders` key. + if self.multi_value_headers: + return MultiValueHeadersSerializer() + + return SingleValueHeadersSerializer() diff --git a/aws_lambda_powertools/utilities/data_classes/api_gateway_proxy_event.py b/aws_lambda_powertools/utilities/data_classes/api_gateway_proxy_event.py index be374aba398..030d9739fa4 100644 --- a/aws_lambda_powertools/utilities/data_classes/api_gateway_proxy_event.py +++ b/aws_lambda_powertools/utilities/data_classes/api_gateway_proxy_event.py @@ -1,5 +1,10 @@ from typing import Any, Dict, List, Optional +from aws_lambda_powertools.shared.headers_serializer import ( + BaseHeadersSerializer, + HttpApiHeadersSerializer, + MultiValueHeadersSerializer, +) from aws_lambda_powertools.utilities.data_classes.common import ( BaseProxyEvent, BaseRequestContext, @@ -106,6 +111,9 @@ def path_parameters(self) -> Optional[Dict[str, str]]: def stage_variables(self) -> Optional[Dict[str, str]]: return self.get("stageVariables") + def header_serializer(self) -> BaseHeadersSerializer: + return MultiValueHeadersSerializer() + class RequestContextV2AuthorizerIam(DictWrapper): @property @@ -250,3 +258,6 @@ def path(self) -> str: def http_method(self) -> str: """The HTTP method used. Valid values include: DELETE, GET, HEAD, OPTIONS, PATCH, POST, and PUT.""" return self.request_context.http.method + + def header_serializer(self): + return HttpApiHeadersSerializer() diff --git a/aws_lambda_powertools/utilities/data_classes/common.py b/aws_lambda_powertools/utilities/data_classes/common.py index 1b671489cdd..fa0d479af8a 100644 --- a/aws_lambda_powertools/utilities/data_classes/common.py +++ b/aws_lambda_powertools/utilities/data_classes/common.py @@ -3,6 +3,8 @@ from collections.abc import Mapping from typing import Any, Dict, Iterator, Optional +from aws_lambda_powertools.shared.headers_serializer import BaseHeadersSerializer + class DictWrapper(Mapping): """Provides a single read only access to a wrapper dict""" @@ -134,6 +136,9 @@ def get_header_value( """ return get_header_value(self.headers, name, default_value, case_sensitive) + def header_serializer(self) -> BaseHeadersSerializer: + raise NotImplementedError() + class RequestContextClientCert(DictWrapper): @property diff --git a/aws_lambda_powertools/utilities/data_classes/dynamo_db_stream_event.py b/aws_lambda_powertools/utilities/data_classes/dynamo_db_stream_event.py index eb674c86b60..e62e307d67a 100644 --- a/aws_lambda_powertools/utilities/data_classes/dynamo_db_stream_event.py +++ b/aws_lambda_powertools/utilities/data_classes/dynamo_db_stream_event.py @@ -1,169 +1,100 @@ +from decimal import Clamped, Context, Decimal, Inexact, Overflow, Rounded, Underflow from enum import Enum -from typing import Any, Dict, Iterator, List, Optional, Union +from typing import Any, Callable, Dict, Iterator, Optional, Sequence, Set from aws_lambda_powertools.utilities.data_classes.common import DictWrapper +# NOTE: DynamoDB supports up to 38 digits precision +# Therefore, this ensures our Decimal follows what's stored in the table +DYNAMODB_CONTEXT = Context( + Emin=-128, + Emax=126, + prec=38, + traps=[Clamped, Overflow, Inexact, Rounded, Underflow], +) -class AttributeValueType(Enum): - Binary = "B" - BinarySet = "BS" - Boolean = "BOOL" - List = "L" - Map = "M" - Number = "N" - NumberSet = "NS" - Null = "NULL" - String = "S" - StringSet = "SS" +class TypeDeserializer: + """ + Deserializes DynamoDB types to Python types. -class AttributeValue(DictWrapper): - """Represents the data for an attribute + It's based on boto3's [DynamoDB TypeDeserializer](https://boto3.amazonaws.com/v1/documentation/api/latest/_modules/boto3/dynamodb/types.html). # noqa: E501 - Documentation: - -------------- - - https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_streams_AttributeValue.html - - https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/HowItWorks.NamingRulesDataTypes.html + The only notable difference is that for Binary (`B`, `BS`) values we return Python Bytes directly, + since we don't support Python 2. """ - def __init__(self, data: Dict[str, Any]): - """AttributeValue constructor + def deserialize(self, value: Dict) -> Any: + """Deserialize DynamoDB data types into Python types. Parameters ---------- - data: Dict[str, Any] - Raw lambda event dict - """ - super().__init__(data) - self.dynamodb_type = list(data.keys())[0] + value: Any + DynamoDB value to be deserialized to a python type - @property - def b_value(self) -> Optional[str]: - """An attribute of type Base64-encoded binary data object - Example: - >>> {"B": "dGhpcyB0ZXh0IGlzIGJhc2U2NC1lbmNvZGVk"} - """ - return self.get("B") + Here are the various conversions: - @property - def bs_value(self) -> Optional[List[str]]: - """An attribute of type Array of Base64-encoded binary data objects - - Example: - >>> {"BS": ["U3Vubnk=", "UmFpbnk=", "U25vd3k="]} - """ - return self.get("BS") - - @property - def bool_value(self) -> Optional[bool]: - """An attribute of type Boolean - - Example: - >>> {"BOOL": True} - """ - item = self.get("BOOL") - return None if item is None else bool(item) + DynamoDB Python + -------- ------ + {'NULL': True} None + {'BOOL': True/False} True/False + {'N': str(value)} str(value) + {'S': string} string + {'B': bytes} bytes + {'NS': [str(value)]} set([str(value)]) + {'SS': [string]} set([string]) + {'BS': [bytes]} set([bytes]) + {'L': list} list + {'M': dict} dict - @property - def list_value(self) -> Optional[List["AttributeValue"]]: - """An attribute of type Array of AttributeValue objects - - Example: - >>> {"L": [ {"S": "Cookies"} , {"S": "Coffee"}, {"N": "3.14159"}]} - """ - item = self.get("L") - return None if item is None else [AttributeValue(v) for v in item] - - @property - def map_value(self) -> Optional[Dict[str, "AttributeValue"]]: - """An attribute of type String to AttributeValue object map - - Example: - >>> {"M": {"Name": {"S": "Joe"}, "Age": {"N": "35"}}} - """ - return _attribute_value_dict(self._data, "M") - - @property - def n_value(self) -> Optional[str]: - """An attribute of type Number - - Numbers are sent across the network to DynamoDB as strings, to maximize compatibility across languages - and libraries. However, DynamoDB treats them as number type attributes for mathematical operations. + Parameters + ---------- + value: Any + DynamoDB value to be deserialized to a python type - Example: - >>> {"N": "123.45"} + Returns + -------- + any + Python native type converted from DynamoDB type """ - return self.get("N") - - @property - def ns_value(self) -> Optional[List[str]]: - """An attribute of type Number Set - Example: - >>> {"NS": ["42.2", "-19", "7.5", "3.14"]} - """ - return self.get("NS") + dynamodb_type = list(value.keys())[0] + deserializer: Optional[Callable] = getattr(self, f"_deserialize_{dynamodb_type}".lower(), None) + if deserializer is None: + raise TypeError(f"Dynamodb type {dynamodb_type} is not supported") - @property - def null_value(self) -> None: - """An attribute of type Null. + return deserializer(value[dynamodb_type]) - Example: - >>> {"NULL": True} - """ + def _deserialize_null(self, value: bool) -> None: return None - @property - def s_value(self) -> Optional[str]: - """An attribute of type String + def _deserialize_bool(self, value: bool) -> bool: + return value - Example: - >>> {"S": "Hello"} - """ - return self.get("S") + def _deserialize_n(self, value: str) -> Decimal: + return DYNAMODB_CONTEXT.create_decimal(value) - @property - def ss_value(self) -> Optional[List[str]]: - """An attribute of type Array of strings + def _deserialize_s(self, value: str) -> str: + return value - Example: - >>> {"SS": ["Giraffe", "Hippo" ,"Zebra"]} - """ - return self.get("SS") + def _deserialize_b(self, value: bytes) -> bytes: + return value - @property - def get_type(self) -> AttributeValueType: - """Get the attribute value type based on the contained data""" - return AttributeValueType(self.dynamodb_type) + def _deserialize_ns(self, value: Sequence[str]) -> Set[Decimal]: + return set(map(self._deserialize_n, value)) - @property - def l_value(self) -> Optional[List["AttributeValue"]]: - """Alias of list_value""" - return self.list_value + def _deserialize_ss(self, value: Sequence[str]) -> Set[str]: + return set(map(self._deserialize_s, value)) - @property - def m_value(self) -> Optional[Dict[str, "AttributeValue"]]: - """Alias of map_value""" - return self.map_value + def _deserialize_bs(self, value: Sequence[bytes]) -> Set[bytes]: + return set(map(self._deserialize_b, value)) - @property - def get_value(self) -> Union[Optional[bool], Optional[str], Optional[List], Optional[Dict]]: - """Get the attribute value""" - try: - return getattr(self, f"{self.dynamodb_type.lower()}_value") - except AttributeError: - raise TypeError(f"Dynamodb type {self.dynamodb_type} is not supported") + def _deserialize_l(self, value: Sequence[Dict]) -> Sequence[Any]: + return [self.deserialize(v) for v in value] - -def _attribute_value_dict(attr_values: Dict[str, dict], key: str) -> Optional[Dict[str, AttributeValue]]: - """A dict of type String to AttributeValue object map - - Example: - >>> {"NewImage": {"Id": {"S": "xxx-xxx"}, "Value": {"N": "35"}}} - """ - attr_values_dict = attr_values.get(key) - return None if attr_values_dict is None else {k: AttributeValue(v) for k, v in attr_values_dict.items()} + def _deserialize_m(self, value: Dict) -> Dict: + return {k: self.deserialize(v) for k, v in value.items()} class StreamViewType(Enum): @@ -176,28 +107,57 @@ class StreamViewType(Enum): class StreamRecord(DictWrapper): + _deserializer = TypeDeserializer() + + def __init__(self, data: Dict[str, Any]): + """StreamRecord constructor + Parameters + ---------- + data: Dict[str, Any] + Represents the dynamodb dict inside DynamoDBStreamEvent's records + """ + super().__init__(data) + self._deserializer = TypeDeserializer() + + def _deserialize_dynamodb_dict(self, key: str) -> Optional[Dict[str, Any]]: + """Deserialize DynamoDB records available in `Keys`, `NewImage`, and `OldImage` + + Parameters + ---------- + key : str + DynamoDB key (e.g., Keys, NewImage, or OldImage) + + Returns + ------- + Optional[Dict[str, Any]] + Deserialized records in Python native types + """ + dynamodb_dict = self._data.get(key) + if dynamodb_dict is None: + return None + + return {k: self._deserializer.deserialize(v) for k, v in dynamodb_dict.items()} + @property def approximate_creation_date_time(self) -> Optional[int]: """The approximate date and time when the stream record was created, in UNIX epoch time format.""" item = self.get("ApproximateCreationDateTime") return None if item is None else int(item) - # NOTE: This override breaks the Mapping protocol of DictWrapper, it's left here for backwards compatibility with - # a 'type: ignore' comment. See #1516 for discussion @property - def keys(self) -> Optional[Dict[str, AttributeValue]]: # type: ignore[override] + def keys(self) -> Optional[Dict[str, Any]]: # type: ignore[override] """The primary key attribute(s) for the DynamoDB item that was modified.""" - return _attribute_value_dict(self._data, "Keys") + return self._deserialize_dynamodb_dict("Keys") @property - def new_image(self) -> Optional[Dict[str, AttributeValue]]: + def new_image(self) -> Optional[Dict[str, Any]]: """The item in the DynamoDB table as it appeared after it was modified.""" - return _attribute_value_dict(self._data, "NewImage") + return self._deserialize_dynamodb_dict("NewImage") @property - def old_image(self) -> Optional[Dict[str, AttributeValue]]: + def old_image(self) -> Optional[Dict[str, Any]]: """The item in the DynamoDB table as it appeared before it was modified.""" - return _attribute_value_dict(self._data, "OldImage") + return self._deserialize_dynamodb_dict("OldImage") @property def sequence_number(self) -> Optional[str]: @@ -233,7 +193,7 @@ def aws_region(self) -> Optional[str]: @property def dynamodb(self) -> Optional[StreamRecord]: - """The main body of the stream record, containing all the DynamoDB-specific fields.""" + """The main body of the stream record, containing all the DynamoDB-specific dicts.""" stream_record = self.get("dynamodb") return None if stream_record is None else StreamRecord(stream_record) @@ -278,26 +238,18 @@ class DynamoDBStreamEvent(DictWrapper): Example ------- - **Process dynamodb stream events and use get_type and get_value for handling conversions** + **Process dynamodb stream events. DynamoDB types are automatically converted to their equivalent Python values.** from aws_lambda_powertools.utilities.data_classes import event_source, DynamoDBStreamEvent - from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import ( - AttributeValueType, - AttributeValue, - ) from aws_lambda_powertools.utilities.typing import LambdaContext @event_source(data_class=DynamoDBStreamEvent) def lambda_handler(event: DynamoDBStreamEvent, context: LambdaContext): for record in event.records: - key: AttributeValue = record.dynamodb.keys["id"] - if key == AttributeValueType.Number: - assert key.get_value == key.n_value - print(key.get_value) - elif key == AttributeValueType.Map: - assert key.get_value == key.map_value - print(key.get_value) + # {"N": "123.45"} => Decimal("123.45") + key: str = record.dynamodb.keys["id"] + print(key) """ @property diff --git a/aws_lambda_powertools/utilities/idempotency/base.py b/aws_lambda_powertools/utilities/idempotency/base.py index ddd054daa14..9281c77109a 100644 --- a/aws_lambda_powertools/utilities/idempotency/base.py +++ b/aws_lambda_powertools/utilities/idempotency/base.py @@ -76,7 +76,7 @@ def __init__( self.fn_kwargs = function_kwargs self.config = config - persistence_store.configure(config, self.function.__name__) + persistence_store.configure(config, f"{self.function.__module__}.{self.function.__qualname__}") self.persistence_store = persistence_store def handle(self) -> Any: diff --git a/aws_lambda_powertools/utilities/parameters/appconfig.py b/aws_lambda_powertools/utilities/parameters/appconfig.py index 380e355d673..a3a340a62be 100644 --- a/aws_lambda_powertools/utilities/parameters/appconfig.py +++ b/aws_lambda_powertools/utilities/parameters/appconfig.py @@ -5,20 +5,17 @@ import os from typing import TYPE_CHECKING, Any, Dict, Optional, Union -from uuid import uuid4 import boto3 from botocore.config import Config if TYPE_CHECKING: - from mypy_boto3_appconfig import AppConfigClient + from mypy_boto3_appconfigdata import AppConfigDataClient from ...shared import constants from ...shared.functions import resolve_env_var_choice from .base import DEFAULT_MAX_AGE_SECS, DEFAULT_PROVIDERS, BaseProvider -CLIENT_ID = str(uuid4()) - class AppConfigProvider(BaseProvider): """ @@ -34,8 +31,8 @@ class AppConfigProvider(BaseProvider): Botocore configuration to pass during client initialization boto3_session : boto3.session.Session, optional Boto3 session to create a boto3_client from - boto3_client: AppConfigClient, optional - Boto3 AppConfig Client to use, boto3_session will be ignored if both are provided + boto3_client: AppConfigDataClient, optional + Boto3 AppConfigData Client to use, boto3_session will be ignored if both are provided Example ------- @@ -73,7 +70,7 @@ def __init__( application: Optional[str] = None, config: Optional[Config] = None, boto3_session: Optional[boto3.session.Session] = None, - boto3_client: Optional["AppConfigClient"] = None, + boto3_client: Optional["AppConfigDataClient"] = None, ): """ Initialize the App Config client @@ -81,8 +78,8 @@ def __init__( super().__init__() - self.client: "AppConfigClient" = self._build_boto3_client( - service_name="appconfig", client=boto3_client, session=boto3_session, config=config + self.client: "AppConfigDataClient" = self._build_boto3_client( + service_name="appconfigdata", client=boto3_client, session=boto3_session, config=config ) self.application = resolve_env_var_choice( @@ -91,6 +88,9 @@ def __init__( self.environment = environment self.current_version = "" + self._next_token = "" # nosec - token for get_latest_configuration executions + self.last_returned_value = "" + def _get(self, name: str, **sdk_options) -> str: """ Retrieve a parameter value from AWS App config. @@ -100,16 +100,26 @@ def _get(self, name: str, **sdk_options) -> str: name: str Name of the configuration sdk_options: dict, optional - Dictionary of options that will be passed to the client's get_configuration API call + SDK options to propagate to `start_configuration_session` API call """ + if not self._next_token: + sdk_options["ConfigurationProfileIdentifier"] = name + sdk_options["ApplicationIdentifier"] = self.application + sdk_options["EnvironmentIdentifier"] = self.environment + response_configuration = self.client.start_configuration_session(**sdk_options) + self._next_token = response_configuration["InitialConfigurationToken"] - sdk_options["Configuration"] = name - sdk_options["Application"] = self.application - sdk_options["Environment"] = self.environment - sdk_options["ClientId"] = CLIENT_ID + # The new AppConfig APIs require two API calls to return the configuration + # First we start the session and after that we retrieve the configuration + # We need to store the token to use in the next execution + response = self.client.get_latest_configuration(ConfigurationToken=self._next_token) + return_value = response["Configuration"].read() + self._next_token = response["NextPollConfigurationToken"] - response = self.client.get_configuration(**sdk_options) - return response["Content"].read() # read() of botocore.response.StreamingBody + if return_value: + self.last_returned_value = return_value + + return self.last_returned_value def _get_multiple(self, path: str, **sdk_options) -> Dict[str, str]: """ @@ -145,7 +155,7 @@ def get_app_config( max_age: int Maximum age of the cached value sdk_options: dict, optional - Dictionary of options that will be passed to the boto client get_configuration API call + SDK options to propagate to `start_configuration_session` API call Raises ------ @@ -180,8 +190,6 @@ def get_app_config( if "appconfig" not in DEFAULT_PROVIDERS: DEFAULT_PROVIDERS["appconfig"] = AppConfigProvider(environment=environment, application=application) - sdk_options["ClientId"] = CLIENT_ID - return DEFAULT_PROVIDERS["appconfig"].get( name, max_age=max_age, transform=transform, force_fetch=force_fetch, **sdk_options ) diff --git a/aws_lambda_powertools/utilities/parameters/base.py b/aws_lambda_powertools/utilities/parameters/base.py index ce03b757618..b76b16e1dd8 100644 --- a/aws_lambda_powertools/utilities/parameters/base.py +++ b/aws_lambda_powertools/utilities/parameters/base.py @@ -15,7 +15,7 @@ from .exceptions import GetParameterError, TransformParameterError if TYPE_CHECKING: - from mypy_boto3_appconfig import AppConfigClient + from mypy_boto3_appconfigdata import AppConfigDataClient from mypy_boto3_dynamodb import DynamoDBServiceResource from mypy_boto3_secretsmanager import SecretsManagerClient from mypy_boto3_ssm import SSMClient @@ -28,7 +28,7 @@ TRANSFORM_METHOD_JSON = "json" TRANSFORM_METHOD_BINARY = "binary" SUPPORTED_TRANSFORM_METHODS = [TRANSFORM_METHOD_JSON, TRANSFORM_METHOD_BINARY] -ParameterClients = Union["AppConfigClient", "SecretsManagerClient", "SSMClient"] +ParameterClients = Union["AppConfigDataClient", "SecretsManagerClient", "SSMClient"] class BaseProvider(ABC): diff --git a/aws_lambda_powertools/utilities/parser/models/ses.py b/aws_lambda_powertools/utilities/parser/models/ses.py index 70fd2e83978..77b23431099 100644 --- a/aws_lambda_powertools/utilities/parser/models/ses.py +++ b/aws_lambda_powertools/utilities/parser/models/ses.py @@ -2,7 +2,6 @@ from typing import List, Optional from pydantic import BaseModel, Field -from pydantic.networks import EmailStr from pydantic.types import PositiveInt from ..types import Literal @@ -21,7 +20,7 @@ class SesReceiptAction(BaseModel): class SesReceipt(BaseModel): timestamp: datetime processingTimeMillis: PositiveInt - recipients: List[EmailStr] + recipients: List[str] spamVerdict: SesReceiptVerdict virusVerdict: SesReceiptVerdict spfVerdict: SesReceiptVerdict @@ -41,7 +40,7 @@ class SesMailCommonHeaders(BaseModel): bcc: Optional[List[str]] sender: Optional[List[str]] reply_to: Optional[List[str]] = Field(None, alias="reply-to") - returnPath: EmailStr + returnPath: str messageId: str date: str subject: str @@ -49,9 +48,9 @@ class SesMailCommonHeaders(BaseModel): class SesMail(BaseModel): timestamp: datetime - source: EmailStr + source: str messageId: str - destination: List[EmailStr] + destination: List[str] headersTruncated: bool headers: List[SesMailHeaders] commonHeaders: SesMailCommonHeaders diff --git a/docs/core/event_handler/api_gateway.md b/docs/core/event_handler/api_gateway.md index 10aaa9faeb9..ec6116403e3 100644 --- a/docs/core/event_handler/api_gateway.md +++ b/docs/core/event_handler/api_gateway.md @@ -42,10 +42,10 @@ Before you decorate your functions to handle a given path and HTTP method(s), yo A resolver will handle request resolution, including [one or more routers](#split-routes-with-router), and give you access to the current event via typed properties. -For resolvers, we provide: `APIGatewayRestResolver`, `APIGatewayHttpResolver`, `ALBResolver`, and `LambdaFunctionUrlResolver` . +For resolvers, we provide: `APIGatewayRestResolver`, `APIGatewayHttpResolver`, `ALBResolver`, and `LambdaFunctionUrlResolver`. From here on, we will default to `APIGatewayRestResolver` across examples. -???+ info - We will use `APIGatewayRestResolver` as the default across examples. +???+ info "Auto-serialization" + We serialize `Dict` responses as JSON, trim whitespace for compact responses, and set content-type to `application/json`. #### API Gateway REST API @@ -53,8 +53,8 @@ When using Amazon API Gateway REST API to front your Lambda functions, you can u Here's an example on how we can handle the `/todos` path. -???+ info - We automatically serialize `Dict` responses as JSON, trim whitespace for compact responses, and set content-type to `application/json`. +???+ info "Trailing slash in routes" + For `APIGatewayRestResolver`, we seamless handle routes with a trailing slash (`/todos/`). === "getting_started_rest_api_resolver.py" @@ -312,7 +312,14 @@ For convenience, these are the default values when using `CORSConfig` to enable ### Fine grained responses -You can use the `Response` class to have full control over the response, for example you might want to add additional headers or set a custom Content-type. +You can use the `Response` class to have full control over the response. For example, you might want to add additional headers, cookies, or set a custom Content-type. + +???+ info + Powertools serializes headers and cookies according to the type of input event. + Some event sources require headers and cookies to be encoded as `multiValueHeaders`. + +???+ warning "Using multiple values for HTTP headers in ALB?" + Make sure you [enable the multi value headers feature](https://docs.aws.amazon.com/elasticloadbalancing/latest/application/lambda-functions.html#multi-value-headers) to serialize response headers correctly. === "fine_grained_responses.py" diff --git a/docs/core/tracer.md b/docs/core/tracer.md index 8fbfc0e29f7..018af91797b 100644 --- a/docs/core/tracer.md +++ b/docs/core/tracer.md @@ -19,6 +19,14 @@ Tracer is an opinionated thin wrapper for [AWS X-Ray Python SDK](https://github. ???+ tip All examples shared in this documentation are available within the [project repository](https://github.com/awslabs/aws-lambda-powertools-python/tree/develop/examples){target="_blank"}. +### Install + +!!! info "This is not necessary if you're installing Powertools via [Lambda Layer](../index.md#lambda-layer){target="_blank"}" + +Add `aws-lambda-powertools[tracer]` as a dependency in your preferred tool: _e.g._, _requirements.txt_, _pyproject.toml_. + +This will ensure you have the required dependencies before using Tracer. + ### Permissions Before your use this utility, your AWS Lambda function [must have permissions](https://docs.aws.amazon.com/lambda/latest/dg/services-xray.html#services-xray-permissions) to send traces to AWS X-Ray. diff --git a/docs/index.md b/docs/index.md index b0fd3f40ce5..a8337c193de 100644 --- a/docs/index.md +++ b/docs/index.md @@ -5,10 +5,13 @@ description: AWS Lambda Powertools for Python +???+ danger + This documentation is for v2 that is not yet released. + A suite of utilities for AWS Lambda functions to ease adopting best practices such as tracing, structured logging, custom metrics, idempotency, batching, and more. ???+ note - Lambda Powertools is also available for [Java](https://awslabs.github.io/aws-lambda-powertools-java/){target="_blank"}, [TypeScript](https://awslabs.github.io/aws-lambda-powertools-typescript/latest/){target="_blank"}, and [.NET](https://awslabs.github.io/aws-lambda-powertools-dotnet/){target="_blank"}. + Powertools is also available for [Java](https://awslabs.github.io/aws-lambda-powertools-java/){target="_blank"}, [TypeScript](https://awslabs.github.io/aws-lambda-powertools-typescript/latest/){target="_blank"}, and [.NET](https://awslabs.github.io/aws-lambda-powertools-dotnet/){target="_blank"} ## Install @@ -17,11 +20,29 @@ Powertools is available in the following formats: * **Lambda Layer**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:39**](#){: .copyMe}:clipboard: * **PyPi**: **`pip install aws-lambda-powertools`** +???+ info "Some utilities require additional dependencies" + You can stop reading if you're using Lambda Layer. + + [Tracer](./core/tracer.md){target="_blank"}, [Validation](./utilities/validation.md){target="_blank"} and [Parser](./utilities/parser.md){target="_blank"} require additional dependencies. If you prefer to install all of them, use `pip install aws-lambda-powertools[all]`. + ???+ hint "Support this project by using Lambda Layers :heart:" Lambda Layers allow us to understand who uses this library in a non-intrusive way. This helps us justify and gain future investments for other Lambda Powertools languages. When using Layers, you can add Lambda Powertools as a dev dependency (or as part of your virtual env) to not impact the development process. +### Local development + +Powertools relies on the AWS SDK bundled in the Lambda runtime. This helps us achieve an optimal package size and initialization. + +This means you need to add AWS SDK as a development dependency (not as a production dependency). + +* **Pip**: `pip install aws-lambda-powertools[aws-sdk]` +* **Poetry**: `poetry add aws-lambda-powertools[aws-sdk] --dev` +* **Pipenv**: `pipenv install --dev "aws-lambda-powertools[aws-sdk]"` + +???+ note "Local emulation" + If you're running your code locally with [AWS SAM CLI](https://github.com/aws/aws-sam-cli){target="_blank"}, and not with your Python/IDE interpreter directly, this is not necessary. SAM CLI already brings the AWS SDK in its emulation image. + ### Lambda Layer [Lambda Layer](https://docs.aws.amazon.com/lambda/latest/dg/configuration-layers.html){target="_blank"} is a .zip file archive that can contain additional code, pre-packaged dependencies, data, or configuration files. Layers promote code sharing and separation of responsibilities so that you can iterate faster on writing business logic. @@ -30,160 +51,316 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: ??? note "Note: Expand to copy any regional Lambda Layer ARN" - | Region | Layer ARN | - | ---------------- | -------------------------------------------------------------------------------------------------------- | - | `af-south-1` | [arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPython:39](#){: .copyMe}:clipboard: | - | `ap-east-1` | [arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPython:39](#){: .copyMe}:clipboard: | - | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPython:39](#){: .copyMe}:clipboard: | - | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPython:39](#){: .copyMe}:clipboard: | - | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPython:39](#){: .copyMe}:clipboard: | - | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPython:39](#){: .copyMe}:clipboard: | - | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPython:39](#){: .copyMe}:clipboard: | - | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPython:39](#){: .copyMe}:clipboard: | - | `ap-southeast-3` | [arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPython:39](#){: .copyMe}:clipboard: | - | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPython:39](#){: .copyMe}:clipboard: | - | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:39](#){: .copyMe}:clipboard: | - | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPython:39](#){: .copyMe}:clipboard: | - | `eu-south-1` | [arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPython:39](#){: .copyMe}:clipboard: | - | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPython:39](#){: .copyMe}:clipboard: | - | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPython:39](#){: .copyMe}:clipboard: | - | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPython:39](#){: .copyMe}:clipboard: | - | `me-south-1` | [arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPython:39](#){: .copyMe}:clipboard: | - | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPython:39](#){: .copyMe}:clipboard: | - | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPython:39](#){: .copyMe}:clipboard: | - | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPython:39](#){: .copyMe}:clipboard: | - | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPython:39](#){: .copyMe}:clipboard: | - | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPython:39](#){: .copyMe}:clipboard: | - -??? question "Can't find our Lambda Layer for your preferred AWS region?" - You can use [Serverless Application Repository (SAR)](#sar) method, our [CDK Layer Construct](https://github.com/aws-samples/cdk-lambda-powertools-python-layer){target="_blank"}, or PyPi like you normally would for any other library. - - Please do file a feature request with the region you'd want us to prioritize making our Lambda Layer available. - -=== "SAM" - - ```yaml hl_lines="5" - MyLambdaFunction: - Type: AWS::Serverless::Function - Properties: - Layers: - - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPython:39 - ``` + === "x86_64" + + | Region | Layer ARN | + | ---------------- | --------------------------------------------------------------------------------------------------------- | + | `af-south-1` | [arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:1](#){: .copyMe}:clipboard: | + | `ap-east-1` | [arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:1](#){: .copyMe}:clipboard: | + | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:1](#){: .copyMe}:clipboard: | + | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:1](#){: .copyMe}:clipboard: | + | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:1](#){: .copyMe}:clipboard: | + | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:1](#){: .copyMe}:clipboard: | + | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:1](#){: .copyMe}:clipboard: | + | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:1](#){: .copyMe}:clipboard: | + | `ap-southeast-3` | [arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:1](#){: .copyMe}:clipboard: | + | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:1](#){: .copyMe}:clipboard: | + | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:1](#){: .copyMe}:clipboard: | + | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:1](#){: .copyMe}:clipboard: | + | `eu-south-1` | [arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:1](#){: .copyMe}:clipboard: | + | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:1](#){: .copyMe}:clipboard: | + | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:1](#){: .copyMe}:clipboard: | + | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:1](#){: .copyMe}:clipboard: | + | `me-south-1` | [arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:1](#){: .copyMe}:clipboard: | + | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:1](#){: .copyMe}:clipboard: | + | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:1](#){: .copyMe}:clipboard: | + | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:1](#){: .copyMe}:clipboard: | + | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:1](#){: .copyMe}:clipboard: | + | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:1](#){: .copyMe}:clipboard: | + + === "arm64" + + | Region | Layer ARN | + | ---------------- | --------------------------------------------------------------------------------------------------------------- | + | `af-south-1` | [arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:1](#){: .copyMe}:clipboard: | + | `ap-east-1` | [arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:1](#){: .copyMe}:clipboard: | + | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:1](#){: .copyMe}:clipboard: | + | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:1](#){: .copyMe}:clipboard: | + | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:1](#){: .copyMe}:clipboard: | + | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:1](#){: .copyMe}:clipboard: | + | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:1](#){: .copyMe}:clipboard: | + | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:1](#){: .copyMe}:clipboard: | + | `ap-southeast-3` | [arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:1](#){: .copyMe}:clipboard: | + | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:1](#){: .copyMe}:clipboard: | + | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:1](#){: .copyMe}:clipboard: | + | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:1](#){: .copyMe}:clipboard: | + | `eu-south-1` | [arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:1](#){: .copyMe}:clipboard: | + | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:1](#){: .copyMe}:clipboard: | + | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:1](#){: .copyMe}:clipboard: | + | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:1](#){: .copyMe}:clipboard: | + | `me-south-1` | [arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:1](#){: .copyMe}:clipboard: | + | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:1](#){: .copyMe}:clipboard: | + | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:1](#){: .copyMe}:clipboard: | + | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:1](#){: .copyMe}:clipboard: | + | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:1](#){: .copyMe}:clipboard: | + | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:1](#){: .copyMe}:clipboard: | + +=== "x86_64" + + === "SAM" + + ```yaml hl_lines="5" + MyLambdaFunction: + Type: AWS::Serverless::Function + Properties: + Layers: + - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:1 + ``` -=== "Serverless framework" + === "Serverless framework" - ```yaml hl_lines="5" - functions: - hello: - handler: lambda_function.lambda_handler - layers: - - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPython:39 - ``` + ```yaml hl_lines="5" + functions: + hello: + handler: lambda_function.lambda_handler + layers: + - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:1 + ``` -=== "CDK" + === "CDK" - ```python hl_lines="11 16" - from aws_cdk import core, aws_lambda + ```python hl_lines="11 16" + from aws_cdk import core, aws_lambda - class SampleApp(core.Construct): + class SampleApp(core.Construct): - def __init__(self, scope: core.Construct, id_: str, env: core.Environment) -> None: - super().__init__(scope, id_) + def __init__(self, scope: core.Construct, id_: str, env: core.Environment) -> None: + super().__init__(scope, id_) - powertools_layer = aws_lambda.LayerVersion.from_layer_version_arn( - self, - id="lambda-powertools", - layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPython:39" - ) - aws_lambda.Function(self, - 'sample-app-lambda', - runtime=aws_lambda.Runtime.PYTHON_3_9, - layers=[powertools_layer] - # other props... - ) - ``` + powertools_layer = aws_lambda.LayerVersion.from_layer_version_arn( + self, + id="lambda-powertools", + layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:1" + ) + aws_lambda.Function(self, + 'sample-app-lambda', + runtime=aws_lambda.Runtime.PYTHON_3_9, + layers=[powertools_layer] + # other props... + ) + ``` -=== "Terraform" + === "Terraform" - ```terraform hl_lines="9 38" - terraform { - required_version = "~> 1.0.5" - required_providers { - aws = "~> 3.50.0" - } - } + ```terraform hl_lines="9 38" + terraform { + required_version = "~> 1.0.5" + required_providers { + aws = "~> 3.50.0" + } + } - provider "aws" { - region = "{region}" - } + provider "aws" { + region = "{region}" + } - resource "aws_iam_role" "iam_for_lambda" { - name = "iam_for_lambda" + resource "aws_iam_role" "iam_for_lambda" { + name = "iam_for_lambda" - assume_role_policy = < + ? Choose the runtime that you want to use: Python + ? Do you want to configure advanced settings? Yes + ... + ? Do you want to enable Lambda layers for this function? Yes + ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:1 + ❯ amplify push -y + + + # Updating an existing function and add the layer + ❯ amplify update function + ? Select the Lambda function you want to update test2 + General information + - Name: + ? Which setting do you want to update? Lambda layers configuration + ? Do you want to enable Lambda layers for this function? Yes + ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:1 + ? Do you want to edit the local lambda function now? No + ``` - source_code_hash = filebase64sha256("lambda_function_payload.zip") - } + === "Get the Layer .zip contents" + + Change {region} to your AWS region, e.g. `eu-west-1` + ```bash title="AWS CLI" + aws lambda get-layer-version-by-arn --arn arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:1 --region {region} + ``` - ``` + The pre-signed URL to download this Lambda Layer will be within `Location` key. -=== "Amplify" - - ```zsh - # Create a new one with the layer - ❯ amplify add function - ? Select which capability you want to add: Lambda function (serverless function) - ? Provide an AWS Lambda function name: - ? Choose the runtime that you want to use: Python - ? Do you want to configure advanced settings? Yes - ... - ? Do you want to enable Lambda layers for this function? Yes - ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:39 - ❯ amplify push -y - - - # Updating an existing function and add the layer - ❯ amplify update function - ? Select the Lambda function you want to update test2 - General information - - Name: - ? Which setting do you want to update? Lambda layers configuration - ? Do you want to enable Lambda layers for this function? Yes - ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:39 - ? Do you want to edit the local lambda function now? No - ``` +=== "arm64" -=== "Get the Layer .zip contents" - Change {region} to your AWS region, e.g. `eu-west-1` + === "SAM" - ```bash title="AWS CLI" - aws lambda get-layer-version-by-arn --arn arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:39 --region {region} - ``` + ```yaml hl_lines="6" + MyLambdaFunction: + Type: AWS::Serverless::Function + Properties: + Architectures: [arm64] + Layers: + - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:1 + ``` + + === "Serverless framework" + + ```yaml hl_lines="6" + functions: + hello: + handler: lambda_function.lambda_handler + architecture: arm64 + layers: + - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:1 + ``` + + === "CDK" + + ```python hl_lines="11 17" + from aws_cdk import core, aws_lambda + + class SampleApp(core.Construct): + + def __init__(self, scope: core.Construct, id_: str, env: core.Environment) -> None: + super().__init__(scope, id_) + + powertools_layer = aws_lambda.LayerVersion.from_layer_version_arn( + self, + id="lambda-powertools", + layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:1" + ) + aws_lambda.Function(self, + 'sample-app-lambda', + runtime=aws_lambda.Runtime.PYTHON_3_9, + architecture=aws_lambda.Architecture.ARM_64, + layers=[powertools_layer] + # other props... + ) + ``` - The pre-signed URL to download this Lambda Layer will be within `Location` key. + === "Terraform" + + ```terraform hl_lines="9 37" + terraform { + required_version = "~> 1.0.5" + required_providers { + aws = "~> 3.50.0" + } + } + + provider "aws" { + region = "{region}" + } + + resource "aws_iam_role" "iam_for_lambda" { + name = "iam_for_lambda" + + assume_role_policy = < + ? Choose the runtime that you want to use: Python + ? Do you want to configure advanced settings? Yes + ... + ? Do you want to enable Lambda layers for this function? Yes + ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:1 + ❯ amplify push -y + + + # Updating an existing function and add the layer + ❯ amplify update function + ? Select the Lambda function you want to update test2 + General information + - Name: + ? Which setting do you want to update? Lambda layers configuration + ? Do you want to enable Lambda layers for this function? Yes + ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:1 + ? Do you want to edit the local lambda function now? No + ``` + + === "Get the Layer .zip contents" + Change {region} to your AWS region, e.g. `eu-west-1` + + ```bash title="AWS CLI" + aws lambda get-layer-version-by-arn --arn arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:1 --region {region} + ``` + + The pre-signed URL to download this Lambda Layer will be within `Location` key. ???+ warning "Warning: Limitations" @@ -197,13 +374,10 @@ Serverless Application Repository (SAR) App deploys a CloudFormation stack with Despite having more steps compared to the [public Layer ARN](#lambda-layer) option, the benefit is that you can specify a semantic version you want to use. -| App | ARN | Description | -| ---------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | -| [aws-lambda-powertools-python-layer](https://serverlessrepo.aws.amazon.com/applications/eu-west-1/057560766410/aws-lambda-powertools-python-layer) | [arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer](#){: .copyMe}:clipboard: | Core dependencies only; sufficient for nearly all utilities. | -| [aws-lambda-powertools-python-layer-extras](https://serverlessrepo.aws.amazon.com/applications/eu-west-1/057560766410/aws-lambda-powertools-python-layer-extras) | [arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer-extras](#){: .copyMe}:clipboard: | Core plus extra dependencies such as `pydantic` that is required by `parser` utility. | - -???+ warning - **Layer-extras** does not support Python 3.6 runtime. This layer also includes all extra dependencies: `22.4MB zipped`, `~155MB unzipped`. +| App | ARN | Description | +| -------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------- | +| [aws-lambda-powertools-python-layer-v2](https://serverlessrepo.aws.amazon.com/applications/eu-west-1/057560766410/aws-lambda-powertools-python-layer-v2) | [arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer-v2](#){: .copyMe}:clipboard: | Contains all extra dependencies (e.g: pydantic). | +| [aws-lambda-powertools-python-layer-v2-arm64](https://serverlessrepo.aws.amazon.com/applications/eu-west-1/057560766410/aws-lambda-powertools-python-layer-v2-arm64) | [arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer-v2-arm64](#){: .copyMe}:clipboard: | Contains all extra dependencies (e.g: pydantic). For arm64 functions. | ???+ tip You can create a shared Lambda Layers stack and make this along with other account level layers stack. @@ -217,8 +391,8 @@ If using SAM, you can include this SAR App as part of your shared Layers stack, Type: AWS::Serverless::Application Properties: Location: - ApplicationId: arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer - SemanticVersion: 1.30.0 # change to latest semantic version available in SAR + ApplicationId: arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer-v2 + SemanticVersion: 2.0.0 # change to latest semantic version available in SAR MyLambdaFunction: Type: AWS::Serverless::Function @@ -244,9 +418,9 @@ If using SAM, you can include this SAR App as part of your shared Layers stack, Type: AWS::Serverless::Application Properties: Location: - ApplicationId: arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer + ApplicationId: arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer-v2 # Find latest from github.com/awslabs/aws-lambda-powertools-python/releases - SemanticVersion: 1.30.0 + SemanticVersion: 2.0.0 ``` === "CDK" @@ -256,8 +430,8 @@ If using SAM, you can include this SAR App as part of your shared Layers stack, POWERTOOLS_BASE_NAME = 'AWSLambdaPowertools' # Find latest from github.com/awslabs/aws-lambda-powertools-python/releases - POWERTOOLS_VER = '1.30.0' - POWERTOOLS_ARN = 'arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer' + POWERTOOLS_VER = '2.0.0' + POWERTOOLS_ARN = 'arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer-v2' class SampleApp(core.Construct): @@ -314,13 +488,13 @@ If using SAM, you can include this SAR App as part of your shared Layers stack, } data "aws_serverlessapplicationrepository_application" "sar_app" { - application_id = "arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer" + application_id = "arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer-v2" semantic_version = var.aws_powertools_version } variable "aws_powertools_version" { type = string - default = "1.30.0" + default = "2.0.0" description = "The AWS Powertools release version" } @@ -377,7 +551,7 @@ If using SAM, you can include this SAR App as part of your shared Layers stack, - serverlessrepo:GetCloudFormationTemplate Resource: # this is arn of the powertools SAR app - - arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer + - arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer-v2 - Sid: S3AccessLayer Effect: Allow Action: @@ -394,7 +568,7 @@ If using SAM, you can include this SAR App as part of your shared Layers stack, - lambda:PublishLayerVersion - lambda:GetLayerVersion Resource: - - !Sub arn:aws:lambda:${AWS::Region}:${AWS::AccountId}:layer:aws-lambda-powertools-python-layer* + - !Sub arn:aws:lambda:${AWS::Region}:${AWS::AccountId}:layer:aws-lambda-powertools-python-layer-v2* Roles: - Ref: "PowertoolsLayerIamRole" ``` @@ -403,7 +577,7 @@ You can fetch available versions via SAR ListApplicationVersions API: ```bash title="AWS CLI example" aws serverlessrepo list-application-versions \ - --application-id arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer + --application-id arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer-v2 ``` ## Quick getting started @@ -449,7 +623,6 @@ Core utilities such as Tracing, Logging, Metrics, and Event Handler will be avai | **POWERTOOLS_LOGGER_LOG_EVENT** | Logs incoming event | [Logging](./core/logger) | `false` | | **POWERTOOLS_LOGGER_SAMPLE_RATE** | Debug log sampling | [Logging](./core/logger) | `0` | | **POWERTOOLS_LOG_DEDUPLICATION_DISABLED** | Disables log deduplication filter protection to use Pytest Live Log feature | [Logging](./core/logger) | `false` | -| **POWERTOOLS_EVENT_HANDLER_DEBUG** | Enables debugging mode for event handler | [Event Handler](./core/event_handler/api_gateway.md#debug-mode) | `false` | | **POWERTOOLS_DEV** | Increases verbosity across utilities | Multiple; see [POWERTOOLS_DEV effect below](#increasing-verbosity-across-utilities) | `0` | | **LOG_LEVEL** | Sets logging level | [Logging](./core/logger) | `INFO` | @@ -462,11 +635,11 @@ Whether you're prototyping locally or against a non-production environment, you When `POWERTOOLS_DEV` is set to a truthy value (`1`, `true`), it'll have the following effects: -| Utility | Effect | -| ----------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **Logger** | Increase JSON indentation to 4. This will ease local debugging when running functions locally under emulators or direct calls while not affecting unit tests | -| **Event Handler** | Enable full traceback errors in the response, indent request/responses, and CORS in dev mode (`*`). This will deprecate [`POWERTOOLS_EVENT_HANDLER_DEBUG`](https://awslabs.github.io/aws-lambda-powertools-python/latest/core/event_handler/api_gateway/#debug-mode) in the future. | -| **Tracer** | Future-proof safety to disables tracing operations in non-Lambda environments. This already happens automatically in the Tracer utility. | +| Utility | Effect | +| ----------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| **Logger** | Increase JSON indentation to 4. This will ease local debugging when running functions locally under emulators or direct calls while not affecting unit tests | +| **Event Handler** | Enable full traceback errors in the response, indent request/responses, and CORS in dev mode (`*`). | +| **Tracer** | Future-proof safety to disables tracing operations in non-Lambda environments. This already happens automatically in the Tracer utility. | ## Debug mode diff --git a/docs/media/tracer_utility_showcase.png b/docs/media/tracer_utility_showcase.png index 55d7d5d0bf8..b3d3568a01c 100644 Binary files a/docs/media/tracer_utility_showcase.png and b/docs/media/tracer_utility_showcase.png differ diff --git a/docs/media/upgrade_idempotency_after.png b/docs/media/upgrade_idempotency_after.png new file mode 100644 index 00000000000..8faa8bed124 Binary files /dev/null and b/docs/media/upgrade_idempotency_after.png differ diff --git a/docs/media/upgrade_idempotency_before.png b/docs/media/upgrade_idempotency_before.png new file mode 100644 index 00000000000..6f1f7f998c1 Binary files /dev/null and b/docs/media/upgrade_idempotency_before.png differ diff --git a/docs/stylesheets/extra.css b/docs/stylesheets/extra.css index 194e4e2ba08..ca6ab06903d 100644 --- a/docs/stylesheets/extra.css +++ b/docs/stylesheets/extra.css @@ -1,5 +1,5 @@ .md-grid { - max-width: 81vw + max-width: 90vw } .highlight .hll { diff --git a/docs/upgrade.md b/docs/upgrade.md new file mode 100644 index 00000000000..391bc084d7f --- /dev/null +++ b/docs/upgrade.md @@ -0,0 +1,218 @@ +--- +title: Upgrade guide +description: Guide to update between major Powertools versions +--- + + + +## Migrate to v2 from v1 + +We've made minimal breaking changes to make your transition to v2 as smooth as possible. + +### Quick summary + +| Area | Change | Code change required | IAM Permissions change required | +| ---------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------- | ------------------------------- | +| **Batch** | Removed legacy [SQS batch processor](#legacy-sqs-batch-processor) in favour of **`BatchProcessor`**. | Yes | - | +| **Environment variables** | Removed legacy **`POWERTOOLS_EVENT_HANDLER_DEBUG`** in favour of [`POWERTOOLS_DEV`](index.md#optimizing-for-non-production-environments){target="_blank"}. | - | - | +| **Event Handler** | Updated [headers response format](#event-handler-headers-response-format) due to [multi-value headers and cookie support](./core/event_handler/api_gateway.md#fine-grained-responses){target="_blank"}. | Tests only | - | +| **Event Source Data Classes** | Replaced [DynamoDBStreamEvent](#dynamodbstreamevent-in-event-source-data-classes) `AttributeValue` with native Python types. | Yes | - | +| **Feature Flags** / **Parameters** | Updated [AppConfig API calls](#feature-flags-and-appconfig-parameter-utility) due to **`GetConfiguration`** API deprecation. | - | Yes | +| **Idempotency** | Updated [partition key](#idempotency-key-format) to include fully qualified function/method names. | - | - | + +### First Steps + +Before you start, we suggest making a copy of your current working project or create a new branch with git. + +1. **Upgrade** Python to at least v3.7 +2. **Ensure** you have the latest version via [Lambda Layer or PyPi](index.md#install){target="_blank"} +3. **Review** the following sections to confirm whether they affect your code + +## Legacy SQS Batch Processor + +We removed the deprecated `PartialSQSProcessor` class and `sqs_batch_processor` decorator. + +You can migrate to `BatchProcessor` with the following changes: + +1. If you use **`sqs_batch_decorator`**, change to **`batch_processor`** decorator +2. If you use **`PartialSQSProcessor`**, change to **`BatchProcessor`** +3. [Enable **`ReportBatchItemFailures`** in your Lambda Event Source](../utilities/batch#required-resources){target="_blank"} +4. Change your Lambda Handler to return the new response format + +=== "[Before] Decorator" + + ```python hl_lines="1 6" + from aws_lambda_powertools.utilities.batch import sqs_batch_processor + + def record_handler(record): + return do_something_with(record["body"]) + + @sqs_batch_processor(record_handler=record_handler) + def lambda_handler(event, context): + return {"statusCode": 200} + ``` + +=== "[After] Decorator" + + ```python hl_lines="3 5 11 13" + import json + + from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor + + processor = BatchProcessor(event_type=EventType.SQS) + + + def record_handler(record): + return do_something_with(record["body"]) + + @batch_processor(record_handler=record_handler, processor=processor) + def lambda_handler(event, context): + return processor.response() + ``` + +=== "[Before] Context manager" + + ```python hl_lines="1-2 4 14 19" + from aws_lambda_powertools.utilities.batch import PartialSQSProcessor + from botocore.config import Config + + config = Config(region_name="us-east-1") + + def record_handler(record): + return_value = do_something_with(record["body"]) + return return_value + + + def lambda_handler(event, context): + records = event["Records"] + + processor = PartialSQSProcessor(config=config) + + with processor(records, record_handler): + result = processor.process() + + return result + ``` + +=== "[After] Context manager" + + ```python hl_lines="1 11 16" + from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor + + + def record_handler(record): + return_value = do_something_with(record["body"]) + return return_value + + def lambda_handler(event, context): + records = event["Records"] + + processor = BatchProcessor(event_type=EventType.SQS) + + with processor(records, record_handler): + result = processor.process() + + return processor.response() + ``` + +## Event Handler headers response format + +!!! note "No code changes required" + +This only applies if you're using `APIGatewayRestResolver` and asserting custom header values in your tests. + +Previously, custom headers were available under `headers` key in the Event Handler response. + +```python title="V1 response headers" hl_lines="2" +{ + "headers": { + "Content-Type": "application/json" + } +} +``` + +In V2, we add all headers under `multiValueHeaders` key. This enables seamless support for multi-value headers and cookies in [fine grained responses](./core/event_handler/api_gateway.md#fine-grained-responses){target="_blank"}. + +```python title="V2 response headers" hl_lines="2" +{ + "multiValueHeaders": { + "Content-Type": "application/json" + } +} +``` + +## DynamoDBStreamEvent in Event Source Data Classes + +!!! info "This also applies if you're using [**DynamoDB BatchProcessor**](https://awslabs.github.io/aws-lambda-powertools-python/latest/utilities/batch/#processing-messages-from-dynamodb){target="_blank"}." + +You will now receive native Python types when accessing DynamoDB records via `keys`, `new_image`, and `old_image` attributes in `DynamoDBStreamEvent`. + +Previously, you'd receive a `AttributeValue` instance and need to deserialize each item to the type you'd want for convenience, or to the type DynamoDB stored via `get_value` method. + +With this change, you can access data deserialized as stored in DynamoDB, and no longer need to recursively deserialize nested objects (Maps) if you had them. + +???+ note + For a lossless conversion of DynamoDB `Number` type, we follow AWS Python SDK (boto3) approach and convert to `Decimal`. + +```python hl_lines="15-20 24-25" +from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import ( + DynamoDBStreamEvent, + DynamoDBRecordEventName +) + +def send_to_sqs(data: Dict): + body = json.dumps(data) + ... + +@event_source(data_class=DynamoDBStreamEvent) +def lambda_handler(event: DynamoDBStreamEvent, context): + for record in event.records: + + # BEFORE + new_image: Dict[str, AttributeValue] = record.dynamodb.new_image + event_type: AttributeValue = new_image["eventType"].get_value + if event_type == "PENDING": + # deserialize attribute value into Python native type + # NOTE: nested objects would need additional logic + data = {k: v.get_value for k, v in image.items()} + send_to_sqs(data) + + # AFTER + new_image: Dict[str, Any] = record.dynamodb.new_image + if new_image.get("eventType") == "PENDING": + send_to_sqs(new_image) # Here new_image is just a Python Dict type + +``` + +## Feature Flags and AppConfig Parameter utility + +!!! note "No code changes required" + +We replaced `GetConfiguration` API ([now deprecated](https://github.com/awslabs/aws-lambda-powertools-python/issues/1506#issuecomment-1266645884){target="_blank"}) with `GetLatestConfiguration` and `StartConfigurationSession`. + +As such, you must update your IAM Role permissions to allow the following IAM actions: + +* `appconfig:GetLatestConfiguration` +* `appconfig:StartConfigurationSession` + +## Idempotency partition key format + +!!! note "No code changes required" + +We replaced the DynamoDB partition key format to include fully qualified function/method names. This means that recent non-expired idempotent transactions will be ignored. + +Previously, we used the function/method name to generate the partition key value. + +> e.g. `HelloWorldFunction.lambda_handler#99914b932bd37a50b983c5e7c90ae93b` + +![Idempotency Before](./media/upgrade_idempotency_before.png) + +In V2, we now distinguish between distinct classes or modules that may have the same function/method name. + +[For example](https://github.com/awslabs/aws-lambda-powertools-python/issues/1330){target="_blank"}, an ABC or Protocol class may have multiple implementations of `process_payment` method and may have different results. + + + +> e.g. `HelloWorldFunction.app.lambda_handler#99914b932bd37a50b983c5e7c90ae93b` + +![Idempotency Before](./media/upgrade_idempotency_after.png) diff --git a/docs/utilities/batch.md b/docs/utilities/batch.md index c429ac24693..7fcf1ff46d8 100644 --- a/docs/utilities/batch.md +++ b/docs/utilities/batch.md @@ -5,11 +5,6 @@ description: Utility The batch processing utility handles partial failures when processing batches from Amazon SQS, Amazon Kinesis Data Streams, and Amazon DynamoDB Streams. -???+ warning - The legacy `sqs_batch_processor` decorator and `PartialSQSProcessor` class are deprecated and are going to be removed soon. - - Please check the [migration guide](#migration-guide) for more information. - ## Key Features * Reports batch item failures to reduce number of retries for a record upon errors @@ -511,9 +506,9 @@ Processing batches from Kinesis works in four stages: @tracer.capture_method def record_handler(record: DynamoDBRecord): logger.info(record.dynamodb.new_image) - payload: dict = json.loads(record.dynamodb.new_image.get("Message").get_value) + payload: dict = json.loads(record.dynamodb.new_image.get("Message")) # alternatively: - # changes: Dict[str, dynamo_db_stream_event.AttributeValue] = record.dynamodb.new_image + # changes: Dict[str, Any] = record.dynamodb.new_image # payload = change.get("Message").raw_event -> {"S": ""} ... @@ -543,10 +538,10 @@ Processing batches from Kinesis works in four stages: @tracer.capture_method def record_handler(record: DynamoDBRecord): logger.info(record.dynamodb.new_image) - payload: dict = json.loads(record.dynamodb.new_image.get("item").s_value) + payload: dict = json.loads(record.dynamodb.new_image.get("item")) # alternatively: - # changes: Dict[str, dynamo_db_stream_event.AttributeValue] = record.dynamodb.new_image - # payload = change.get("Message").raw_event -> {"S": ""} + # changes: Dict[str, Any] = record.dynamodb.new_image + # payload = change.get("Message") -> "" ... @logger.inject_lambda_context @@ -1213,215 +1208,16 @@ class MyProcessor(BatchProcessor): return super().failure_handler(record, exception) ``` -## Legacy - -???+ tip - This is kept for historical purposes. Use the new [BatchProcessor](#processing-messages-from-sqs) instead. - -### Migration guide - -???+ info - Keep reading if you are using `sqs_batch_processor` or `PartialSQSProcessor`. - -[As of Nov 2021](https://aws.amazon.com/about-aws/whats-new/2021/11/aws-lambda-partial-batch-response-sqs-event-source/){target="_blank"}, this is no longer needed as both SQS, Kinesis, and DynamoDB Streams offer this capability natively with one caveat - it's an [opt-in feature](#required-resources). - -Being a native feature, we no longer need to instantiate boto3 nor other customizations like exception suppressing – this lowers the cost of your Lambda function as you can delegate deleting partial failures to Lambda. - -???+ tip - It's also easier to test since it's mostly a [contract based response](https://docs.aws.amazon.com/lambda/latest/dg/with-sqs.html#sqs-batchfailurereporting-syntax){target="_blank"}. - -You can migrate in three steps: - -1. If you are using **`sqs_batch_decorator`** you can now use **`batch_processor`** decorator -2. If you were using **`PartialSQSProcessor`** you can now use **`BatchProcessor`** -3. Change your Lambda Handler to return the new response format - -=== "Decorator: Before" - - ```python hl_lines="1 6" - from aws_lambda_powertools.utilities.batch import sqs_batch_processor - - def record_handler(record): - return do_something_with(record["body"]) - - @sqs_batch_processor(record_handler=record_handler) - def lambda_handler(event, context): - return {"statusCode": 200} - ``` - -=== "Decorator: After" - - ```python hl_lines="3 5 11" - import json - - from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor - - processor = BatchProcessor(event_type=EventType.SQS) - - - def record_handler(record): - return do_something_with(record["body"]) - - @batch_processor(record_handler=record_handler, processor=processor) - def lambda_handler(event, context): - return processor.response() - ``` - -=== "Context manager: Before" - - ```python hl_lines="1-2 4 14 19" - from aws_lambda_powertools.utilities.batch import PartialSQSProcessor - from botocore.config import Config - - config = Config(region_name="us-east-1") - - def record_handler(record): - return_value = do_something_with(record["body"]) - return return_value - - - def lambda_handler(event, context): - records = event["Records"] - - processor = PartialSQSProcessor(config=config) - - with processor(records, record_handler): - result = processor.process() - - return result - ``` - -=== "Context manager: After" - - ```python hl_lines="1 11" - from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor - - - def record_handler(record): - return_value = do_something_with(record["body"]) - return return_value - - def lambda_handler(event, context): - records = event["Records"] - - processor = BatchProcessor(event_type=EventType.SQS) - - with processor(records, record_handler): - result = processor.process() - - return processor.response() - ``` - -### Customizing boto configuration - -The **`config`** and **`boto3_session`** parameters enable you to pass in a custom [botocore config object](https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html) -or a custom [boto3 session](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html) when using the `sqs_batch_processor` -decorator or `PartialSQSProcessor` class. - -> Custom config example - -=== "Decorator" - - ```python hl_lines="4 12" - from aws_lambda_powertools.utilities.batch import sqs_batch_processor - from botocore.config import Config - - config = Config(region_name="us-east-1") - - def record_handler(record): - # This will be called for each individual message from a batch - # It should raise an exception if the message was not processed successfully - return_value = do_something_with(record["body"]) - return return_value - - @sqs_batch_processor(record_handler=record_handler, config=config) - def lambda_handler(event, context): - return {"statusCode": 200} - ``` - -=== "Context manager" - - ```python hl_lines="4 16" - from aws_lambda_powertools.utilities.batch import PartialSQSProcessor - from botocore.config import Config - - config = Config(region_name="us-east-1") - - def record_handler(record): - # This will be called for each individual message from a batch - # It should raise an exception if the message was not processed successfully - return_value = do_something_with(record["body"]) - return return_value - - - def lambda_handler(event, context): - records = event["Records"] - - processor = PartialSQSProcessor(config=config) - - with processor(records, record_handler): - result = processor.process() - - return result - ``` - -> Custom boto3 session example - -=== "Decorator" - - ```python hl_lines="4 12" - from aws_lambda_powertools.utilities.batch import sqs_batch_processor - from botocore.config import Config - - session = boto3.session.Session() - - def record_handler(record): - # This will be called for each individual message from a batch - # It should raise an exception if the message was not processed successfully - return_value = do_something_with(record["body"]) - return return_value - - @sqs_batch_processor(record_handler=record_handler, boto3_session=session) - def lambda_handler(event, context): - return {"statusCode": 200} - ``` - -=== "Context manager" - - ```python hl_lines="4 16" - from aws_lambda_powertools.utilities.batch import PartialSQSProcessor - import boto3 - - session = boto3.session.Session() - - def record_handler(record): - # This will be called for each individual message from a batch - # It should raise an exception if the message was not processed successfully - return_value = do_something_with(record["body"]) - return return_value - - - def lambda_handler(event, context): - records = event["Records"] - - processor = PartialSQSProcessor(boto3_session=session) - - with processor(records, record_handler): - result = processor.process() - - return result - ``` - ### Suppressing exceptions -If you want to disable the default behavior where `SQSBatchProcessingError` is raised if there are any errors, you can pass the `suppress_exception` boolean argument. +If you want to disable the default behavior where `BatchProcessingError` is raised if there are any errors, you can pass the `suppress_exception` boolean argument. === "Decorator" ```python hl_lines="3" - from aws_lambda_powertools.utilities.batch import sqs_batch_processor + from aws_lambda_powertools.utilities.batch import batch_processor - @sqs_batch_processor(record_handler=record_handler, config=config, suppress_exception=True) + @batch_processor(record_handler=record_handler, suppress_exception=True) def lambda_handler(event, context): return {"statusCode": 200} ``` @@ -1429,9 +1225,9 @@ If you want to disable the default behavior where `SQSBatchProcessingError` is r === "Context manager" ```python hl_lines="3" - from aws_lambda_powertools.utilities.batch import PartialSQSProcessor + from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType - processor = PartialSQSProcessor(config=config, suppress_exception=True) + processor = BatchProcessor(event_type=EventType.SQS, suppress_exception=True) with processor(records, record_handler): result = processor.process() diff --git a/docs/utilities/data_classes.md b/docs/utilities/data_classes.md index 509110e0480..9981978ebc9 100644 --- a/docs/utilities/data_classes.md +++ b/docs/utilities/data_classes.md @@ -798,9 +798,9 @@ This example is based on the AWS Cognito docs for [Verify Auth Challenge Respons ### DynamoDB Streams -The DynamoDB data class utility provides the base class for `DynamoDBStreamEvent`, a typed class for -attributes values (`AttributeValue`), as well as enums for stream view type (`StreamViewType`) and event type +The DynamoDB data class utility provides the base class for `DynamoDBStreamEvent`, as well as enums for stream view type (`StreamViewType`) and event type. (`DynamoDBRecordEventName`). +The class automatically deserializes DynamoDB types into their equivalent Python types. === "app.py" @@ -824,21 +824,15 @@ attributes values (`AttributeValue`), as well as enums for stream view type (`St ```python from aws_lambda_powertools.utilities.data_classes import event_source, DynamoDBStreamEvent - from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import AttributeValueType, AttributeValue from aws_lambda_powertools.utilities.typing import LambdaContext @event_source(data_class=DynamoDBStreamEvent) def lambda_handler(event: DynamoDBStreamEvent, context: LambdaContext): for record in event.records: - key: AttributeValue = record.dynamodb.keys["id"] - if key == AttributeValueType.Number: - # {"N": "123.45"} => "123.45" - assert key.get_value == key.n_value - print(key.get_value) - elif key == AttributeValueType.Map: - assert key.get_value == key.map_value - print(key.get_value) + # {"N": "123.45"} => Decimal("123.45") + key: str = record.dynamodb.keys["id"] + print(key) ``` ### EventBridge diff --git a/docs/utilities/feature_flags.md b/docs/utilities/feature_flags.md index 1d586d9377d..f35dd88106b 100644 --- a/docs/utilities/feature_flags.md +++ b/docs/utilities/feature_flags.md @@ -3,11 +3,11 @@ title: Feature flags description: Utility --- -???+ note - This is currently in Beta, as we might change Store parameters in the next release. - The feature flags utility provides a simple rule engine to define when one or multiple features should be enabled depending on the input. +???+ info + We currently only support AppConfig using [freeform configuration profile](https://docs.aws.amazon.com/appconfig/latest/userguide/appconfig-creating-configuration-and-profile.html#appconfig-creating-configuration-and-profile-free-form-configurations). + ## Terminology Feature flags are used to modify behaviour without changing the application's code. These flags can be **static** or **dynamic**. @@ -28,6 +28,9 @@ If you want to learn more about feature flags, their variations and trade-offs, * [AWS Lambda Feature Toggles Made Simple - Ran Isenberg](https://isenberg-ran.medium.com/aws-lambda-feature-toggles-made-simple-580b0c444233) * [Feature Flags Getting Started - CloudBees](https://www.cloudbees.com/blog/ultimate-feature-flag-guide) +???+ note + AWS AppConfig requires two API calls to fetch configuration for the first time. You can improve latency by consolidating your feature settings in a single [Configuration](https://docs.aws.amazon.com/appconfig/latest/userguide/appconfig-creating-configuration-and-profile.html). + ## Key features * Define simple feature flags to dynamically decide when to enable a feature @@ -38,7 +41,7 @@ If you want to learn more about feature flags, their variations and trade-offs, ### IAM Permissions -Your Lambda function must have `appconfig:GetConfiguration` IAM permission in order to fetch configuration from AWS AppConfig. +Your Lambda function IAM Role must have `appconfig:GetLatestConfiguration` and `appconfig:StartConfigurationSession` IAM permissions before using this feature. ### Required resources diff --git a/docs/utilities/idempotency.md b/docs/utilities/idempotency.md index 4893bb76ab0..b246e2cebc1 100644 --- a/docs/utilities/idempotency.md +++ b/docs/utilities/idempotency.md @@ -42,7 +42,7 @@ If you're not [changing the default configuration for the DynamoDB persistence l | TTL attribute name | `expiration` | This can only be configured after your table is created if you're using AWS Console | ???+ tip "Tip: You can share a single state table for all functions" - You can reuse the same DynamoDB table to store idempotency state. We add your `function_name` in addition to the idempotency key as a hash key. + You can reuse the same DynamoDB table to store idempotency state. We add `module_name` and [qualified name for classes and functions](https://peps.python.org/pep-3155/) in addition to the idempotency key as a hash key. ```yaml hl_lines="5-13 21-23" title="AWS Serverless Application Model (SAM) example" Resources: diff --git a/docs/utilities/parameters.md b/docs/utilities/parameters.md index 2559044b632..6b7d64b66b9 100644 --- a/docs/utilities/parameters.md +++ b/docs/utilities/parameters.md @@ -24,14 +24,15 @@ This utility requires additional permissions to work as expected. ???+ note Different parameter providers require different permissions. -| Provider | Function/Method | IAM Permission | -| ------------------- | ---------------------------------------------------- | ------------------------------- | -| SSM Parameter Store | `get_parameter`, `SSMProvider.get` | `ssm:GetParameter` | -| SSM Parameter Store | `get_parameters`, `SSMProvider.get_multiple` | `ssm:GetParametersByPath` | -| Secrets Manager | `get_secret`, `SecretsManager.get` | `secretsmanager:GetSecretValue` | -| DynamoDB | `DynamoDBProvider.get` | `dynamodb:GetItem` | -| DynamoDB | `DynamoDBProvider.get_multiple` | `dynamodb:Query` | -| App Config | `AppConfigProvider.get_app_config`, `get_app_config` | `appconfig:GetConfiguration` | +| Provider | Function/Method | IAM Permission | +| ------------------- | -----------------------------------------------------------------| -----------------------------------------------------------------------------| +| SSM Parameter Store | `get_parameter`, `SSMProvider.get` | `ssm:GetParameter` | +| SSM Parameter Store | `get_parameters`, `SSMProvider.get_multiple` | `ssm:GetParametersByPath` | +| SSM Parameter Store | If using `decrypt=True` | You must add an additional permission `kms:Decrypt` | +| Secrets Manager | `get_secret`, `SecretsManager.get` | `secretsmanager:GetSecretValue` | +| DynamoDB | `DynamoDBProvider.get` | `dynamodb:GetItem` | +| DynamoDB | `DynamoDBProvider.get_multiple` | `dynamodb:Query` | +| App Config | `get_app_config`, `AppConfigProvider.get_app_config` | `appconfig:GetLatestConfiguration` and `appconfig:StartConfigurationSession` | ### Fetching parameters diff --git a/docs/utilities/parser.md b/docs/utilities/parser.md index 48c244c8df2..fd157482c2c 100644 --- a/docs/utilities/parser.md +++ b/docs/utilities/parser.md @@ -1,5 +1,5 @@ --- -title: Parser +title: Parser (Pydantic) description: Utility --- @@ -12,20 +12,25 @@ This utility provides data parsing and deep validation using [Pydantic](https:// * Built-in envelopes to unwrap, extend, and validate popular event sources payloads * Enforces type hints at runtime with user-friendly errors -**Extra dependency** +## Getting started -???+ warning +### Install + +!!! info "This is not necessary if you're installing Powertools via [Lambda Layer](../index.md#lambda-layer){target="_blank"}" + +Add `aws-lambda-powertools[parser]` as a dependency in your preferred tool: _e.g._, _requirements.txt_, _pyproject.toml_. +This will ensure you have the required dependencies before using Parser. + +???+ warning This will increase the compressed package size by >10MB due to the Pydantic dependency. To reduce the impact on the package size at the expense of 30%-50% of its performance [Pydantic can also be installed without binary files](https://pydantic-docs.helpmanual.io/install/#performance-vs-package-size-trade-off): - `SKIP_CYTHON=1 pip install --no-binary pydantic aws-lambda-powertools[pydantic]` + Pip example: `SKIP_CYTHON=1 pip install --no-binary pydantic aws-lambda-powertools[parser]` -Install parser's extra dependencies using **`pip install aws-lambda-powertools[pydantic]`**. - -## Defining models +### Defining models You can define models to parse incoming events by inheriting from `BaseModel`. @@ -47,11 +52,11 @@ class Order(BaseModel): These are simply Python classes that inherit from BaseModel. **Parser** enforces type hints declared in your model at runtime. -## Parsing events +### Parsing events You can parse inbound events using **event_parser** decorator, or the standalone `parse` function. Both are also able to parse either dictionary or JSON string as an input. -### event_parser decorator +#### event_parser decorator Use the decorator for fail fast scenarios where you want your Lambda function to raise an exception in the event of a malformed payload. @@ -104,7 +109,7 @@ handler(event=payload, context=LambdaContext()) handler(event=json.dumps(payload), context=LambdaContext()) # also works if event is a JSON string ``` -### parse function +#### parse function Use this standalone function when you want more control over the data validation process, for example returning a 400 error for malformed payloads. @@ -149,7 +154,7 @@ def my_function(): } ``` -## Built-in models +### Built-in models Parser comes with the following built-in models: @@ -172,7 +177,7 @@ Parser comes with the following built-in models: | **KafkaSelfManagedEventModel** | Lambda Event Source payload for self managed Kafka payload | | **KafkaMskEventModel** | Lambda Event Source payload for AWS MSK payload | -### Extending built-in models +#### Extending built-in models You can extend them to include your own models, and yet have all other known fields parsed along the way. @@ -251,7 +256,7 @@ for order_item in ret.detail.items: --8<-- "examples/parser/src/extending_built_in_models_with_json_validator.py" ``` -## Envelopes +### Envelopes When trying to parse your payloads wrapped in a known structure, you might encounter the following situations: @@ -309,7 +314,7 @@ def handler(event: UserModel, context: LambdaContext): 3. Parser parsed the original event against the EventBridge model 4. Parser then parsed the `detail` key using `UserModel` -### Built-in envelopes +#### Built-in envelopes Parser comes with the following built-in envelopes, where `Model` in the return section is your given model. @@ -328,7 +333,7 @@ Parser comes with the following built-in envelopes, where `Model` in the return | **LambdaFunctionUrlEnvelope** | 1. Parses data using `LambdaFunctionUrlModel`.
2. Parses `body` key using your model and returns it. | `Model` | | **KafkaEnvelope** | 1. Parses data using `KafkaRecordModel`.
2. Parses `value` key using your model and returns it. | `Model` | -### Bringing your own envelope +#### Bringing your own envelope You can create your own Envelope model and logic by inheriting from `BaseEnvelope`, and implementing the `parse` method. @@ -393,7 +398,7 @@ Here's a snippet of how the EventBridge envelope we demonstrated previously is i 3. Then, we parsed the incoming data with our envelope to confirm it matches EventBridge's structure defined in `EventBridgeModel` 4. Lastly, we call `_parse` from `BaseEnvelope` to parse the data in our envelope (.detail) using the customer model -## Data model validation +### Data model validation ???+ warning This is radically different from the **Validator utility** which validates events against JSON Schema. @@ -410,7 +415,7 @@ Keep the following in mind regardless of which decorator you end up using it: * You must raise either `ValueError`, `TypeError`, or `AssertionError` when value is not compliant * You must return the value(s) itself if compliant -### validating fields +#### validating fields Quick validation to verify whether the field `message` has the value of `hello world`. @@ -455,7 +460,7 @@ class HelloWorldModel(BaseModel): parse(model=HelloWorldModel, event={"message": "hello universe", "sender": "universe"}) ``` -### validating entire model +#### validating entire model `root_validator` can help when you have a complex validation mechanism. For example finding whether data has been omitted, comparing field values, etc. @@ -486,7 +491,7 @@ parse(model=UserModel, event=payload) ???+ info You can read more about validating list items, reusing validators, validating raw inputs, and a lot more in Pydantic's documentation. -## Advanced use cases +### Advanced use cases ???+ tip "Tip: Looking to auto-generate models from JSON, YAML, JSON Schemas, OpenApi, etc?" Use Koudai Aono's [data model code generation tool for Pydantic](https://github.com/koxudaxi/datamodel-code-generator) @@ -551,55 +556,3 @@ If what you're trying to use isn't available as part of the high level import sy ```python title="Pydantic import escape hatch" from aws_lambda_powertools.utilities.parser.pydantic import ``` - -**What is the cold start impact in bringing this additional dependency?** - -No significant cold start impact. It does increase the final uncompressed package by **71M**, when you bring the additional dependency that parser requires. - -Artillery load test sample against a [hello world sample](https://github.com/aws-samples/cookiecutter-aws-sam-python) using Tracer, Metrics, and Logger with and without parser. - -**No parser** - -???+ info - **Uncompressed package size**: 55M, **p99**: 180.3ms - -```javascript -Summary report @ 14:36:07(+0200) 2020-10-23 -Scenarios launched: 10 -Scenarios completed: 10 -Requests completed: 2000 -Mean response/sec: 114.81 -Response time (msec): - min: 54.9 - max: 1684.9 - median: 68 - p95: 109.1 - p99: 180.3 -Scenario counts: - 0: 10 (100%) -Codes: - 200: 2000 -``` - -**With parser** - -???+ info - **Uncompressed package size**: 128M, **p99**: 193.1ms - -```javascript -Summary report @ 14:29:23(+0200) 2020-10-23 -Scenarios launched: 10 -Scenarios completed: 10 -Requests completed: 2000 -Mean response/sec: 111.67 -Response time (msec): - min: 54.3 - max: 1887.2 - median: 66.1 - p95: 113.3 - p99: 193.1 -Scenario counts: - 0: 10 (100%) -Codes: - 200: 2000 -``` diff --git a/docs/utilities/validation.md b/docs/utilities/validation.md index 3b61fececd4..43086c3d2d5 100644 --- a/docs/utilities/validation.md +++ b/docs/utilities/validation.md @@ -18,6 +18,14 @@ This utility provides JSON Schema validation for events and responses, including ???+ tip All examples shared in this documentation are available within the [project repository](https://github.com/awslabs/aws-lambda-powertools-python/tree/develop/examples){target="_blank"}. +### Install + +!!! info "This is not necessary if you're installing Powertools via [Lambda Layer](../index.md#lambda-layer){target="_blank"}" + +Add `aws-lambda-powertools[validation]` as a dependency in your preferred tool: _e.g._, _requirements.txt_, _pyproject.toml_. + +This will ensure you have the required dependencies before using Validation. + You can validate inbound and outbound events using [`validator` decorator](#validator-decorator). You can also use the standalone `validate` function, if you want more control over the validation process such as handling a validation error. diff --git a/examples/event_handler_rest/src/binary_responses_output.json b/examples/event_handler_rest/src/binary_responses_output.json index 0938dee6811..ec59d251732 100644 --- a/examples/event_handler_rest/src/binary_responses_output.json +++ b/examples/event_handler_rest/src/binary_responses_output.json @@ -1,7 +1,7 @@ { "body": "PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiPz4KPHN2ZyB3aWR0aD0iMjU2cHgiIGhlaWdodD0iMjU2cHgiIHZpZXdCb3g9IjAgMCAyNTYgMjU2IiB2ZXJzaW9uPSIxLjEiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgeG1sbnM6eGxpbms9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkveGxpbmsiIHByZXNlcnZlQXNwZWN0UmF0aW89InhNaWRZTWlkIj4KICAgIDx0aXRsZT5BV1MgTGFtYmRhPC90aXRsZT4KICAgIDxkZWZzPgogICAgICAgIDxsaW5lYXJHcmFkaWVudCB4MT0iMCUiIHkxPSIxMDAlIiB4Mj0iMTAwJSIgeTI9IjAlIiBpZD0ibGluZWFyR3JhZGllbnQtMSI+CiAgICAgICAgICAgIDxzdG9wIHN0b3AtY29sb3I9IiNDODUxMUIiIG9mZnNldD0iMCUiPjwvc3RvcD4KICAgICAgICAgICAgPHN0b3Agc3RvcC1jb2xvcj0iI0ZGOTkwMCIgb2Zmc2V0PSIxMDAlIj48L3N0b3A+CiAgICAgICAgPC9saW5lYXJHcmFkaWVudD4KICAgIDwvZGVmcz4KICAgIDxnPgogICAgICAgIDxyZWN0IGZpbGw9InVybCgjbGluZWFyR3JhZGllbnQtMSkiIHg9IjAiIHk9IjAiIHdpZHRoPSIyNTYiIGhlaWdodD0iMjU2Ij48L3JlY3Q+CiAgICAgICAgPHBhdGggZD0iTTg5LjYyNDExMjYsMjExLjIgTDQ5Ljg5MDMyNzcsMjExLjIgTDkzLjgzNTQ4MzIsMTE5LjM0NzIgTDExMy43NDcyOCwxNjAuMzM5MiBMODkuNjI0MTEyNiwyMTEuMiBaIE05Ni43MDI5MzU3LDExMC41Njk2IEM5Ni4xNjQwODU4LDEwOS40NjU2IDk1LjA0MTQ4MTMsMTA4Ljc2NDggOTMuODE2MjM4NCwxMDguNzY0OCBMOTMuODA2NjE2MywxMDguNzY0OCBDOTIuNTcxNzUxNCwxMDguNzY4IDkxLjQ0OTE0NjYsMTA5LjQ3NTIgOTAuOTE5OTE4NywxMTAuNTg1NiBMNDEuOTEzNDIwOCwyMTMuMDIwOCBDNDEuNDM4NzE5NywyMTQuMDEyOCA0MS41MDYwNzU4LDIxNS4xNzc2IDQyLjA5NjI0NTEsMjE2LjEwODggQzQyLjY3OTk5OTQsMjE3LjAzNjggNDMuNzA2MzgwNSwyMTcuNiA0NC44MDY1MzMxLDIxNy42IEw5MS42NTQ0MjMsMjE3LjYgQzkyLjg5NTcwMjcsMjE3LjYgOTQuMDIxNTE0OSwyMTYuODg2NCA5NC41NTM5NTAxLDIxNS43Njk2IEwxMjAuMjAzODU5LDE2MS42ODk2IEMxMjAuNjE3NjE5LDE2MC44MTI4IDEyMC42MTQ0MTIsMTU5Ljc5ODQgMTIwLjE4NzgyMiwxNTguOTI4IEw5Ni43MDI5MzU3LDExMC41Njk2IFogTTIwNy45ODUxMTcsMjExLjIgTDE2OC41MDc5MjgsMjExLjIgTDEwNS4xNzM3ODksNzguNjI0IEMxMDQuNjQ0NTYxLDc3LjUxMDQgMTAzLjUxNTU0MSw3Ni44IDEwMi4yNzc0NjksNzYuOCBMNzYuNDQ3OTQzLDc2LjggTDc2LjQ3NjgwOTksNDQuOCBMMTI3LjEwMzA2Niw0NC44IEwxOTAuMTQ1MzI4LDE3Ny4zNzI4IEMxOTAuNjc0NTU2LDE3OC40ODY0IDE5MS44MDM1NzUsMTc5LjIgMTkzLjA0MTY0NywxNzkuMiBMMjA3Ljk4NTExNywxNzkuMiBMMjA3Ljk4NTExNywyMTEuMiBaIE0yMTEuMTkyNTU4LDE3Mi44IEwxOTUuMDcxOTU4LDE3Mi44IEwxMzIuMDI5Njk2LDQwLjIyNzIgQzEzMS41MDA0NjgsMzkuMTEzNiAxMzAuMzcxNDQ5LDM4LjQgMTI5LjEzMDE2OSwzOC40IEw3My4yNzI1NzYsMzguNCBDNzEuNTA1Mjc1OCwzOC40IDcwLjA2ODM0MjEsMzkuODMwNCA3MC4wNjUxMzQ0LDQxLjU5NjggTDcwLjAyOTg1MjgsNzkuOTk2OCBDNzAuMDI5ODUyOCw4MC44NDggNzAuMzYzNDI2Niw4MS42NjA4IDcwLjk2OTYzMyw4Mi4yNjI0IEM3MS41Njk0MjQ2LDgyLjg2NCA3Mi4zODQxMTQ2LDgzLjIgNzMuMjM3Mjk0MSw4My4yIEwxMDAuMjUzNTczLDgzLjIgTDE2My41OTA5MiwyMTUuNzc2IEMxNjQuMTIzMzU1LDIxNi44ODk2IDE2NS4yNDU5NiwyMTcuNiAxNjYuNDg0MDMyLDIxNy42IEwyMTEuMTkyNTU4LDIxNy42IEMyMTIuOTY2Mjc0LDIxNy42IDIxNC40LDIxNi4xNjY0IDIxNC40LDIxNC40IEwyMTQuNCwxNzYgQzIxNC40LDE3NC4yMzM2IDIxMi45NjYyNzQsMTcyLjggMjExLjE5MjU1OCwxNzIuOCBMMjExLjE5MjU1OCwxNzIuOCBaIiBmaWxsPSIjRkZGRkZGIj48L3BhdGg+CiAgICA8L2c+Cjwvc3ZnPg==", - "headers": { - "Content-Type": "image/svg+xml" + "multiValueHeaders": { + "Content-Type": ["image/svg+xml"] }, "isBase64Encoded": true, "statusCode": 200 diff --git a/examples/event_handler_rest/src/compressing_responses_output.json b/examples/event_handler_rest/src/compressing_responses_output.json index 0836b3aa726..60a63966494 100644 --- a/examples/event_handler_rest/src/compressing_responses_output.json +++ b/examples/event_handler_rest/src/compressing_responses_output.json @@ -1,8 +1,8 @@ { "statusCode": 200, - "headers": { - "Content-Type": "application/json", - "Content-Encoding": "gzip" + "multiValueHeaders": { + "Content-Type": ["application/json"], + "Content-Encoding": ["gzip"] }, "body": "H4sIAAAAAAACE42STU4DMQyFrxJl3QXln96AMyAW7sSDLCVxiJ0Kqerd8TCCUOgii1EmP/783pOPXjmw+N3L0TfB+hz8brvxtC5KGtHvfMCIkzZx0HT5MPmNnziViIr2dIYoeNr8Q1x3xHsjcVadIbkZJoq2RXU8zzQROLseQ9505NzeCNQdMJNBE+UmY4zbzjAJhWtlZ57sB84BWtul+rteH2HPlVgWARwjqXkxpklK5gmEHAQqJBMtFsGVygcKmNVRjG0wxvuzGF2L0dpVUOKMC3bfJNjJgWMrCuZk7cUp02AiD72D6WKHHwUDKbiJs6AZ0VZXKOUx4uNvzdxT+E4mLcMA+6G8nzrLQkaxkNEVrFKW2VGbJCoCY7q2V3+tiv5kGThyxfTecDWbgGz/NfYXhL6ePgF9PnFdPgMAAA==", "isBase64Encoded": true diff --git a/examples/event_handler_rest/src/fine_grained_responses.py b/examples/event_handler_rest/src/fine_grained_responses.py index 15c70cd282b..7d45d74621f 100644 --- a/examples/event_handler_rest/src/fine_grained_responses.py +++ b/examples/event_handler_rest/src/fine_grained_responses.py @@ -10,6 +10,7 @@ content_types, ) from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.shared.cookies import Cookie from aws_lambda_powertools.utilities.typing import LambdaContext tracer = Tracer() @@ -23,13 +24,14 @@ def get_todos(): todos: requests.Response = requests.get("https://jsonplaceholder.typicode.com/todos") todos.raise_for_status() - custom_headers = {"X-Transaction-Id": f"{uuid4()}"} + custom_headers = {"X-Transaction-Id": [f"{uuid4()}"]} return Response( status_code=HTTPStatus.OK.value, # 200 content_type=content_types.APPLICATION_JSON, body=todos.json()[:10], headers=custom_headers, + cookies=[Cookie(name="session_id", value="12345")], ) diff --git a/examples/event_handler_rest/src/fine_grained_responses_output.json b/examples/event_handler_rest/src/fine_grained_responses_output.json index c3d58098e80..0b33bd91542 100644 --- a/examples/event_handler_rest/src/fine_grained_responses_output.json +++ b/examples/event_handler_rest/src/fine_grained_responses_output.json @@ -1,8 +1,9 @@ { "statusCode": 200, - "headers": { - "Content-Type": "application/json", - "X-Transaction-Id": "3490eea9-791b-47a0-91a4-326317db61a9" + "multiValueHeaders": { + "Content-Type": ["application/json"], + "X-Transaction-Id": ["3490eea9-791b-47a0-91a4-326317db61a9"], + "Set-Cookie": ["session_id=12345; Secure"] }, "body": "{\"todos\":[{\"userId\":1,\"id\":1,\"title\":\"delectus aut autem\",\"completed\":false},{\"userId\":1,\"id\":2,\"title\":\"quis ut nam facilis et officia qui\",\"completed\":false},{\"userId\":1,\"id\":3,\"title\":\"fugiat veniam minus\",\"completed\":false},{\"userId\":1,\"id\":4,\"title\":\"et porro tempora\",\"completed\":true},{\"userId\":1,\"id\":5,\"title\":\"laboriosam mollitia et enim quasi adipisci quia provident illum\",\"completed\":false},{\"userId\":1,\"id\":6,\"title\":\"qui ullam ratione quibusdam voluptatem quia omnis\",\"completed\":false},{\"userId\":1,\"id\":7,\"title\":\"illo expedita consequatur quia in\",\"completed\":false},{\"userId\":1,\"id\":8,\"title\":\"quo adipisci enim quam ut ab\",\"completed\":true},{\"userId\":1,\"id\":9,\"title\":\"molestiae perspiciatis ipsa\",\"completed\":false},{\"userId\":1,\"id\":10,\"title\":\"illo est ratione doloremque quia maiores aut\",\"completed\":true}]}", "isBase64Encoded": false diff --git a/examples/event_handler_rest/src/getting_started_rest_api_resolver_output.json b/examples/event_handler_rest/src/getting_started_rest_api_resolver_output.json index 2ef3714531f..24d2b5c6dbc 100644 --- a/examples/event_handler_rest/src/getting_started_rest_api_resolver_output.json +++ b/examples/event_handler_rest/src/getting_started_rest_api_resolver_output.json @@ -1,7 +1,7 @@ { "statusCode": 200, - "headers": { - "Content-Type": "application/json" + "multiValueHeaders": { + "Content-Type": ["application/json"] }, "body": "{\"todos\":[{\"userId\":1,\"id\":1,\"title\":\"delectus aut autem\",\"completed\":false},{\"userId\":1,\"id\":2,\"title\":\"quis ut nam facilis et officia qui\",\"completed\":false},{\"userId\":1,\"id\":3,\"title\":\"fugiat veniam minus\",\"completed\":false},{\"userId\":1,\"id\":4,\"title\":\"et porro tempora\",\"completed\":true},{\"userId\":1,\"id\":5,\"title\":\"laboriosam mollitia et enim quasi adipisci quia provident illum\",\"completed\":false},{\"userId\":1,\"id\":6,\"title\":\"qui ullam ratione quibusdam voluptatem quia omnis\",\"completed\":false},{\"userId\":1,\"id\":7,\"title\":\"illo expedita consequatur quia in\",\"completed\":false},{\"userId\":1,\"id\":8,\"title\":\"quo adipisci enim quam ut ab\",\"completed\":true},{\"userId\":1,\"id\":9,\"title\":\"molestiae perspiciatis ipsa\",\"completed\":false},{\"userId\":1,\"id\":10,\"title\":\"illo est ratione doloremque quia maiores aut\",\"completed\":true}]}", "isBase64Encoded": false diff --git a/examples/event_handler_rest/src/setting_cors_output.json b/examples/event_handler_rest/src/setting_cors_output.json index ca86e892d38..19660941e91 100644 --- a/examples/event_handler_rest/src/setting_cors_output.json +++ b/examples/event_handler_rest/src/setting_cors_output.json @@ -1,9 +1,9 @@ { "statusCode": 200, - "headers": { - "Content-Type": "application/json", - "Access-Control-Allow-Origin": "https://www.example.com", - "Access-Control-Allow-Headers": "Authorization,Content-Type,X-Amz-Date,X-Amz-Security-Token,X-Api-Key" + "multiValueHeaders": { + "Content-Type": ["application/json"], + "Access-Control-Allow-Origin": ["https://www.example.com"], + "Access-Control-Allow-Headers": ["Authorization,Content-Type,X-Amz-Date,X-Amz-Security-Token,X-Api-Key"] }, "body": "{\"todos\":[{\"userId\":1,\"id\":1,\"title\":\"delectus aut autem\",\"completed\":false},{\"userId\":1,\"id\":2,\"title\":\"quis ut nam facilis et officia qui\",\"completed\":false},{\"userId\":1,\"id\":3,\"title\":\"fugiat veniam minus\",\"completed\":false},{\"userId\":1,\"id\":4,\"title\":\"et porro tempora\",\"completed\":true},{\"userId\":1,\"id\":5,\"title\":\"laboriosam mollitia et enim quasi adipisci quia provident illum\",\"completed\":false},{\"userId\":1,\"id\":6,\"title\":\"qui ullam ratione quibusdam voluptatem quia omnis\",\"completed\":false},{\"userId\":1,\"id\":7,\"title\":\"illo expedita consequatur quia in\",\"completed\":false},{\"userId\":1,\"id\":8,\"title\":\"quo adipisci enim quam ut ab\",\"completed\":true},{\"userId\":1,\"id\":9,\"title\":\"molestiae perspiciatis ipsa\",\"completed\":false},{\"userId\":1,\"id\":10,\"title\":\"illo est ratione doloremque quia maiores aut\",\"completed\":true}]}", "isBase64Encoded": false diff --git a/examples/parser/src/extending_built_in_models_with_json_mypy.py b/examples/parser/src/extending_built_in_models_with_json_mypy.py index 80314a814ce..813f757ad79 100644 --- a/examples/parser/src/extending_built_in_models_with_json_mypy.py +++ b/examples/parser/src/extending_built_in_models_with_json_mypy.py @@ -11,11 +11,11 @@ class CancelOrder(BaseModel): class CancelOrderModel(APIGatewayProxyEventV2Model): - body: Json[CancelOrder] # type: ignore[type-arg] + body: Json[CancelOrder] # type: ignore[assignment] @event_parser(model=CancelOrderModel) def handler(event: CancelOrderModel, context: LambdaContext): - cancel_order: CancelOrder = event.body # type: ignore[assignment] + cancel_order: CancelOrder = event.body assert cancel_order.order_id is not None diff --git a/layer/app.py b/layer/app.py index 50f8090482e..59a35dfd300 100644 --- a/layer/app.py +++ b/layer/app.py @@ -8,7 +8,8 @@ app = cdk.App() POWERTOOLS_VERSION: str = app.node.try_get_context("version") -SSM_PARAM_LAYER_ARN: str = "/layers/powertools-layer-arn" +SSM_PARAM_LAYER_ARN: str = "/layers/powertools-layer-v2-arn" +SSM_PARAM_LAYER_ARM64_ARN: str = "/layers/powertools-layer-v2-arm64-arn" if not POWERTOOLS_VERSION: raise ValueError( @@ -18,16 +19,18 @@ LayerStack( app, - "LayerStack", + "LayerV2Stack", powertools_version=POWERTOOLS_VERSION, ssm_paramter_layer_arn=SSM_PARAM_LAYER_ARN, + ssm_parameter_layer_arm64_arn=SSM_PARAM_LAYER_ARM64_ARN, ) CanaryStack( app, - "CanaryStack", + "CanaryV2Stack", powertools_version=POWERTOOLS_VERSION, ssm_paramter_layer_arn=SSM_PARAM_LAYER_ARN, + ssm_parameter_layer_arm64_arn=SSM_PARAM_LAYER_ARM64_ARN, ) app.synth() diff --git a/layer/layer/canary/app.py b/layer/layer/canary/app.py index 1011fc654c2..e9d8d5d7679 100644 --- a/layer/layer/canary/app.py +++ b/layer/layer/canary/app.py @@ -1,14 +1,19 @@ import datetime import json import os +import platform from importlib.metadata import version import boto3 +from pydantic import HttpUrl from aws_lambda_powertools import Logger, Metrics, Tracer +from aws_lambda_powertools.utilities.parser import BaseModel, envelopes, event_parser +from aws_lambda_powertools.utilities.typing import LambdaContext +from aws_lambda_powertools.utilities.validation import validator logger = Logger(service="version-track") -tracer = Tracer() +tracer = Tracer() # this checks for aws-xray-sdk presence metrics = Metrics(namespace="powertools-layer-canary", service="PowertoolsLayerCanary") layer_arn = os.getenv("POWERTOOLS_LAYER_ARN") @@ -17,6 +22,26 @@ event_bus_arn = os.getenv("VERSION_TRACKING_EVENT_BUS_ARN") +# Model to check parser imports correctly, tests for pydantic +class OrderItem(BaseModel): + order_id: int + quantity: int + description: str + url: HttpUrl + + +# Tests for jmespath presence +@event_parser(model=OrderItem, envelope=envelopes.EventBridgeEnvelope) +def envelope_handler(event: OrderItem, context: LambdaContext): + assert event.order_id != 1 + + +# Tests for fastjsonschema presence +@validator(inbound_schema={}, envelope="detail") +def validator_handler(event, context: LambdaContext): + pass + + def handler(event): logger.info("Running checks") check_envs() @@ -42,9 +67,7 @@ def on_create(event): def check_envs(): - logger.info( - 'Checking required envs ["POWERTOOLS_LAYER_ARN", "AWS_REGION", "STAGE"]' - ) + logger.info('Checking required envs ["POWERTOOLS_LAYER_ARN", "AWS_REGION", "STAGE"]') if not layer_arn: raise ValueError("POWERTOOLS_LAYER_ARN is not set. Aborting...") if not powertools_version: @@ -66,9 +89,9 @@ def verify_powertools_version() -> None: current_version = version("aws_lambda_powertools") if powertools_version != current_version: raise ValueError( - f'Expected powertoosl version is "{powertools_version}", but layer contains version "{current_version}"' + f'Expected Powertools version is "{powertools_version}", but layer contains version "{current_version}"' ) - logger.info(f"Current Powertools version is: {current_version}") + logger.info(f"Current Powertools version is: {current_version} [{_get_architecture()}]") def send_notification(): @@ -76,10 +99,9 @@ def send_notification(): sends an event to version tracking event bridge """ if stage != "PROD": - logger.info( - "Not sending notification to event bus, because this is not the PROD stage" - ) + logger.info("Not sending notification to event bus, because this is not the PROD stage") return + event = { "Time": datetime.datetime.now(), "Source": "powertools.layer.canary", @@ -90,6 +112,7 @@ def send_notification(): "version": powertools_version, "region": os.environ["AWS_REGION"], "layerArn": layer_arn, + "architecture": _get_architecture(), } ), } @@ -102,3 +125,8 @@ def send_notification(): if resp["FailedEntryCount"] != 0: logger.error(resp) raise ValueError("Failed to send deployment notification to version tracking") + + +def _get_architecture() -> str: + """Returns aarch64, x86_64""" + return platform.uname()[4] diff --git a/layer/layer/canary_stack.py b/layer/layer/canary_stack.py index 1f903f91c74..fda9ebff3ad 100644 --- a/layer/layer/canary_stack.py +++ b/layer/layer/canary_stack.py @@ -2,12 +2,16 @@ from aws_cdk import CfnParameter, CustomResource, Duration, Stack from aws_cdk.aws_iam import Effect, ManagedPolicy, PolicyStatement, Role, ServicePrincipal -from aws_cdk.aws_lambda import Code, Function, LayerVersion, Runtime +from aws_cdk.aws_lambda import Architecture, Code, Function, LayerVersion, Runtime from aws_cdk.aws_logs import RetentionDays from aws_cdk.aws_ssm import StringParameter from aws_cdk.custom_resources import Provider from constructs import Construct +VERSION_TRACKING_EVENT_BUS_ARN: str = ( + "arn:aws:events:eu-central-1:027876851704:event-bus/VersionTrackingEventBus" +) + class CanaryStack(Stack): def __init__( @@ -16,25 +20,74 @@ def __init__( construct_id: str, powertools_version: str, ssm_paramter_layer_arn: str, + ssm_parameter_layer_arm64_arn: str, **kwargs, ) -> None: super().__init__(scope, construct_id, **kwargs) - VERSION_TRACKING_EVENT_BUS_ARN: str = ( - "arn:aws:events:eu-central-1:027876851704:event-bus/VersionTrackingEventBus" - ) + deploy_stage = CfnParameter( + self, "DeployStage", description="Deployment stage for canary" + ).value_as_string layer_arn = StringParameter.from_string_parameter_attributes( self, "LayerVersionArnParam", parameter_name=ssm_paramter_layer_arn ).string_value + Canary( + self, + "Canary-x86-64", + layer_arn=layer_arn, + powertools_version=powertools_version, + architecture=Architecture.X86_64, + stage=deploy_stage, + ) + + layer_arm64_arn = StringParameter.from_string_parameter_attributes( + self, + "LayerArm64VersionArnParam", + parameter_name=ssm_parameter_layer_arm64_arn, + ).string_value + Canary( + self, + "Canary-arm64", + layer_arn=layer_arm64_arn, + powertools_version=powertools_version, + architecture=Architecture.ARM_64, + stage=deploy_stage, + ) + + +class Canary(Construct): + def __init__( + self, + scope: Construct, + construct_id: str, + layer_arn: str, + powertools_version: str, + architecture: Architecture, + stage: str, + ): + super().__init__(scope, construct_id) - layer = LayerVersion.from_layer_version_arn(self, "PowertoolsLayer", layer_version_arn=layer_arn) - deploy_stage = CfnParameter(self, "DeployStage", description="Deployment stage for canary").value_as_string + layer = LayerVersion.from_layer_version_arn( + self, "PowertoolsLayer", layer_version_arn=layer_arn + ) - execution_role = Role(self, "LambdaExecutionRole", assumed_by=ServicePrincipal("lambda.amazonaws.com")) + execution_role = Role( + self, + "LambdaExecutionRole", + assumed_by=ServicePrincipal("lambda.amazonaws.com"), + ) execution_role.add_managed_policy( - ManagedPolicy.from_aws_managed_policy_name("service-role/AWSLambdaBasicExecutionRole") + ManagedPolicy.from_aws_managed_policy_name( + "service-role/AWSLambdaBasicExecutionRole" + ) + ) + + execution_role.add_to_policy( + PolicyStatement( + effect=Effect.ALLOW, actions=["lambda:GetFunction"], resources=["*"] + ) ) canary_lambda = Function( @@ -46,25 +99,35 @@ def __init__( memory_size=512, timeout=Duration.seconds(10), runtime=Runtime.PYTHON_3_9, + architecture=architecture, log_retention=RetentionDays.ONE_MONTH, role=execution_role, environment={ "POWERTOOLS_VERSION": powertools_version, "POWERTOOLS_LAYER_ARN": layer_arn, "VERSION_TRACKING_EVENT_BUS_ARN": VERSION_TRACKING_EVENT_BUS_ARN, - "LAYER_PIPELINE_STAGE": deploy_stage, + "LAYER_PIPELINE_STAGE": stage, }, ) canary_lambda.add_to_role_policy( PolicyStatement( - effect=Effect.ALLOW, actions=["events:PutEvents"], resources=[VERSION_TRACKING_EVENT_BUS_ARN] + effect=Effect.ALLOW, + actions=["events:PutEvents"], + resources=[VERSION_TRACKING_EVENT_BUS_ARN], ) ) # custom resource provider configuration provider = Provider( - self, "CanaryCustomResource", on_event_handler=canary_lambda, log_retention=RetentionDays.ONE_MONTH + self, + "CanaryCustomResource", + on_event_handler=canary_lambda, + log_retention=RetentionDays.ONE_MONTH, ) # force to recreate resource on each deployment with randomized name - CustomResource(self, f"CanaryTrigger-{str(uuid.uuid4())[0:7]}", service_token=provider.service_token) + CustomResource( + self, + f"CanaryTrigger-{str(uuid.uuid4())[0:7]}", + service_token=provider.service_token, + ) diff --git a/layer/layer/layer_stack.py b/layer/layer/layer_stack.py index f15232eb560..6a92e1fa408 100644 --- a/layer/layer/layer_stack.py +++ b/layer/layer/layer_stack.py @@ -1,18 +1,38 @@ from aws_cdk import CfnOutput, RemovalPolicy, Stack -from aws_cdk.aws_lambda import CfnLayerVersionPermission +from aws_cdk.aws_lambda import Architecture, CfnLayerVersionPermission from aws_cdk.aws_ssm import StringParameter -from cdk_lambda_powertools_python_layer import LambdaPowertoolsLayer +from cdk_aws_lambda_powertools_layer import LambdaPowertoolsLayer from constructs import Construct class LayerStack(Stack): def __init__( - self, scope: Construct, construct_id: str, powertools_version: str, ssm_paramter_layer_arn: str, **kwargs + self, + scope: Construct, + construct_id: str, + powertools_version: str, + ssm_paramter_layer_arn: str, + ssm_parameter_layer_arm64_arn: str, + **kwargs ) -> None: super().__init__(scope, construct_id, **kwargs) layer = LambdaPowertoolsLayer( - self, "Layer", layer_version_name="AWSLambdaPowertoolsPython", version=powertools_version + self, + "Layer", + layer_version_name="AWSLambdaPowertoolsPythonV2", + version=powertools_version, + include_extras=True, + compatible_architectures=[Architecture.X86_64], + ) + + layer_arm64 = LambdaPowertoolsLayer( + self, + "Layer-ARM64", + layer_version_name="AWSLambdaPowertoolsPythonV2-Arm64", + version=powertools_version, + include_extras=True, + compatible_architectures=[Architecture.ARM_64], ) layer_permission = CfnLayerVersionPermission( @@ -23,9 +43,32 @@ def __init__( principal="*", ) + layer_permission_arm64 = CfnLayerVersionPermission( + self, + "PublicLayerAccessArm64", + action="lambda:GetLayerVersion", + layer_version_arn=layer_arm64.layer_version_arn, + principal="*", + ) + layer_permission.apply_removal_policy(RemovalPolicy.RETAIN) + layer_permission_arm64.apply_removal_policy(RemovalPolicy.RETAIN) + layer.apply_removal_policy(RemovalPolicy.RETAIN) + layer_arm64.apply_removal_policy(RemovalPolicy.RETAIN) - StringParameter(self, "VersionArn", parameter_name=ssm_paramter_layer_arn, string_value=layer.layer_version_arn) + StringParameter( + self, + "VersionArn", + parameter_name=ssm_paramter_layer_arn, + string_value=layer.layer_version_arn, + ) + StringParameter( + self, + "Arm64VersionArn", + parameter_name=ssm_parameter_layer_arm64_arn, + string_value=layer_arm64.layer_version_arn, + ) CfnOutput(self, "LatestLayerArn", value=layer.layer_version_arn) + CfnOutput(self, "LatestLayerArm64Arn", value=layer_arm64.layer_version_arn) diff --git a/layer/poetry.lock b/layer/poetry.lock index 182094a8b9d..d0fd4d0a37b 100644 --- a/layer/poetry.lock +++ b/layer/poetry.lock @@ -1,28 +1,20 @@ -[[package]] -name = "atomicwrites" -version = "1.4.1" -description = "Atomic file writes." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - [[package]] name = "attrs" -version = "21.4.0" +version = "22.1.0" description = "Classes Without Boilerplate" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.5" [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] -docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] +dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] +docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] +tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] +tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] [[package]] name = "aws-cdk-lib" -version = "2.35.0" +version = "2.45.0" description = "Version 2 of the AWS Cloud Development Kit library" category = "main" optional = false @@ -30,20 +22,20 @@ python-versions = "~=3.7" [package.dependencies] constructs = ">=10.0.0,<11.0.0" -jsii = ">=1.63.2,<2.0.0" +jsii = ">=1.68.0,<2.0.0" publication = ">=0.0.3" typeguard = ">=2.13.3,<2.14.0" [[package]] name = "boto3" -version = "1.24.46" +version = "1.24.89" description = "The AWS SDK for Python" category = "dev" optional = false python-versions = ">= 3.7" [package.dependencies] -botocore = ">=1.27.46,<1.28.0" +botocore = ">=1.27.89,<1.28.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.6.0,<0.7.0" @@ -52,7 +44,7 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.27.46" +version = "1.27.89" description = "Low-level, data-driven core of boto 3." category = "dev" optional = false @@ -64,7 +56,7 @@ python-dateutil = ">=2.1,<3.0.0" urllib3 = ">=1.25.4,<1.27" [package.extras] -crt = ["awscrt (==0.13.8)"] +crt = ["awscrt (==0.14.0)"] [[package]] name = "cattrs" @@ -79,18 +71,19 @@ attrs = ">=20" exceptiongroup = {version = "*", markers = "python_version <= \"3.10\""} [[package]] -name = "cdk-lambda-powertools-python-layer" -version = "2.0.49" -description = "A lambda layer for AWS Powertools for python" +name = "cdk-aws-lambda-powertools-layer" +version = "3.2.0" +description = "A lambda layer for AWS Powertools for python and typescript" category = "main" optional = false python-versions = "~=3.7" [package.dependencies] -aws-cdk-lib = ">=2.2.0,<3.0.0" +aws-cdk-lib = ">=2.44.0,<3.0.0" constructs = ">=10.0.5,<11.0.0" -jsii = ">=1.61.0,<2.0.0" +jsii = ">=1.69.0,<2.0.0" publication = ">=0.0.3" +typeguard = ">=2.13.3,<2.14.0" [[package]] name = "colorama" @@ -102,20 +95,20 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "constructs" -version = "10.1.67" +version = "10.1.128" description = "A programming model for software-defined state" category = "main" optional = false python-versions = "~=3.7" [package.dependencies] -jsii = ">=1.63.2,<2.0.0" +jsii = ">=1.69.0,<2.0.0" publication = ">=0.0.3" typeguard = ">=2.13.3,<2.14.0" [[package]] name = "exceptiongroup" -version = "1.0.0rc8" +version = "1.0.0rc9" description = "Backport of PEP 654 (exception groups)" category = "main" optional = false @@ -142,14 +135,14 @@ python-versions = ">=3.7" [[package]] name = "jsii" -version = "1.63.2" +version = "1.69.0" description = "Python client for jsii runtime" category = "main" optional = false python-versions = "~=3.7" [package.dependencies] -attrs = ">=21.2,<22.0" +attrs = ">=21.2,<23.0" cattrs = ">=1.8,<22.2" publication = ">=0.0.3" python-dateutil = "*" @@ -204,18 +197,17 @@ optional = false python-versions = ">=3.6.8" [package.extras] -diagrams = ["railroad-diagrams", "jinja2"] +diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pytest" -version = "7.1.2" +version = "7.1.3" description = "pytest: simple powerful testing with Python" category = "dev" optional = false python-versions = ">=3.7" [package.dependencies] -atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} attrs = ">=19.2.0" colorama = {version = "*", markers = "sys_platform == \"win32\""} iniconfig = "*" @@ -277,12 +269,12 @@ optional = false python-versions = ">=3.5.3" [package.extras] -doc = ["sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"] -test = ["pytest", "typing-extensions", "mypy"] +doc = ["sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["mypy", "pytest", "typing-extensions"] [[package]] name = "typing-extensions" -version = "4.3.0" +version = "4.4.0" description = "Backported and Experimental Type Hints for Python 3.7+" category = "main" optional = false @@ -290,61 +282,58 @@ python-versions = ">=3.7" [[package]] name = "urllib3" -version = "1.26.11" +version = "1.26.12" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" [package.extras] -brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] -secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [metadata] lock-version = "1.1" python-versions = "^3.9" -content-hash = "a68a9649808efb49529ace7d990559e6569be096bf2d86234f3bd056bae0fdc3" +content-hash = "10a4e60fe1abbe982f077699767b8a7949b2be5ca82f909647f34d1e30ffb9a9" [metadata.files] -atomicwrites = [ - {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, -] attrs = [ - {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, - {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, + {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, + {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, ] aws-cdk-lib = [ - {file = "aws-cdk-lib-2.35.0.tar.gz", hash = "sha256:fc9cba4df0b60a9ab7f17ceb3b1c447d27e96cec9eb9e8c5b7ecfd1275878930"}, - {file = "aws_cdk_lib-2.35.0-py3-none-any.whl", hash = "sha256:ee481dca9335c32b5871e58ba697e27e2f1e92d9b81cf9341cfc6cc36127a2b0"}, + {file = "aws-cdk-lib-2.45.0.tar.gz", hash = "sha256:ed4166498205a6507666a9fdb69f5dbeffa11cd69bf7e98b279ec305e4970374"}, + {file = "aws_cdk_lib-2.45.0-py3-none-any.whl", hash = "sha256:9463fe6d84563c4c23ae96810be0ea0ff0a260eebb85a4a7afe0c3747eca18a8"}, ] boto3 = [ - {file = "boto3-1.24.46-py3-none-any.whl", hash = "sha256:44026e44549148dbc5b261ead5f6b339e785680c350ef621bf85f7e2fca05b49"}, - {file = "boto3-1.24.46.tar.gz", hash = "sha256:b2d9d55f123a9a91eea2fd8e379d90abf37634420fbb45c22d67e10b324ec71b"}, + {file = "boto3-1.24.89-py3-none-any.whl", hash = "sha256:346f8f0d101a4261dac146a959df18d024feda6431e1d9d84f94efd24d086cae"}, + {file = "boto3-1.24.89.tar.gz", hash = "sha256:d0d8ffcdc10821c4562bc7f935cdd840033bbc342ac0e14b6bdd348b3adf4c04"}, ] botocore = [ - {file = "botocore-1.27.46-py3-none-any.whl", hash = "sha256:747b7e94aef41498f063fc0be79c5af102d940beea713965179e1ead89c7e9ec"}, - {file = "botocore-1.27.46.tar.gz", hash = "sha256:f66d8305d1f59d83334df9b11b6512bb1e14698ec4d5d6d42f833f39f3304ca7"}, + {file = "botocore-1.27.89-py3-none-any.whl", hash = "sha256:238f1dfdb8d8d017c2aea082609a3764f3161d32745900f41bcdcf290d95a048"}, + {file = "botocore-1.27.89.tar.gz", hash = "sha256:621f5413be8f97712b7e36c1b075a8791d1d1b9971a7ee060cdcdf5e2debf6c1"}, ] cattrs = [ {file = "cattrs-22.1.0-py3-none-any.whl", hash = "sha256:d55c477b4672f93606e992049f15d526dc7867e6c756cd6256d4af92e2b1e364"}, {file = "cattrs-22.1.0.tar.gz", hash = "sha256:94b67b64cf92c994f8784c40c082177dc916e0489a73a9a36b24eb18a9db40c6"}, ] -cdk-lambda-powertools-python-layer = [ - {file = "cdk-lambda-powertools-python-layer-2.0.49.tar.gz", hash = "sha256:8055fc691539f16e22a40e3d3df9c3f59fb28012437b08c47c639aefb001f1b2"}, - {file = "cdk_lambda_powertools_python_layer-2.0.49-py3-none-any.whl", hash = "sha256:9b0a7b7344f9ccb486564af728cefeac743687bfb131631e6d9171a55800dbac"}, +cdk-aws-lambda-powertools-layer = [ + {file = "cdk-aws-lambda-powertools-layer-3.2.0.tar.gz", hash = "sha256:75b86a6c8714c82293d754f1d799134c4159953711312e261f8b3aaf77492fa6"}, + {file = "cdk_aws_lambda_powertools_layer-3.2.0-py3-none-any.whl", hash = "sha256:a293a2f42b459de70ccd9d2a16b0b0789f7c682aa31ab80d6696e93ff07caa92"}, ] colorama = [ {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, ] constructs = [ - {file = "constructs-10.1.67-py3-none-any.whl", hash = "sha256:d597d8d5387328c1e95fa674d5d64969b1c1a479e63544e53a067a5d95b5c46b"}, - {file = "constructs-10.1.67.tar.gz", hash = "sha256:8b9fdf5040dde63545c08b8cc86fcd019512e0d16ee599c82b1201a5806f0066"}, + {file = "constructs-10.1.128-py3-none-any.whl", hash = "sha256:d6fbc88de4c2517b59e28a9d0bc3663e75decbe3464030b5bc53809868b52c9e"}, + {file = "constructs-10.1.128.tar.gz", hash = "sha256:6789412823ae27b39f659537337f688a9d555cad5845d4b821c7be02a061be1e"}, ] exceptiongroup = [ - {file = "exceptiongroup-1.0.0rc8-py3-none-any.whl", hash = "sha256:ab0a968e1ef769e55d9a596f4a89f7be9ffedbc9fdefdb77cc68cf5c33ce1035"}, - {file = "exceptiongroup-1.0.0rc8.tar.gz", hash = "sha256:6990c24f06b8d33c8065cfe43e5e8a4bfa384e0358be036af9cc60b6321bd11a"}, + {file = "exceptiongroup-1.0.0rc9-py3-none-any.whl", hash = "sha256:2e3c3fc1538a094aab74fad52d6c33fc94de3dfee3ee01f187c0e0c72aec5337"}, + {file = "exceptiongroup-1.0.0rc9.tar.gz", hash = "sha256:9086a4a21ef9b31c72181c77c040a074ba0889ee56a7b289ff0afb0d97655f96"}, ] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, @@ -355,8 +344,8 @@ jmespath = [ {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, ] jsii = [ - {file = "jsii-1.63.2-py3-none-any.whl", hash = "sha256:ae8cbc84c633382c317dc367e1441bb2afd8b74ed82b3557b8df15e05316b14d"}, - {file = "jsii-1.63.2.tar.gz", hash = "sha256:6f68dcd82395ccd12606b31383f611adfefd246082750350891a2a277562f34b"}, + {file = "jsii-1.69.0-py3-none-any.whl", hash = "sha256:f3ae5cdf5e854b4d59256dc1f8818cd3fabb8eb43fbd3134a8e8aef962643005"}, + {file = "jsii-1.69.0.tar.gz", hash = "sha256:7c7ed2a913372add17d63322a640c6435324770eb78c6b89e4c701e07d9c84db"}, ] packaging = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, @@ -379,8 +368,8 @@ pyparsing = [ {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, ] pytest = [ - {file = "pytest-7.1.2-py3-none-any.whl", hash = "sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c"}, - {file = "pytest-7.1.2.tar.gz", hash = "sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45"}, + {file = "pytest-7.1.3-py3-none-any.whl", hash = "sha256:1377bda3466d70b55e3f5cecfa55bb7cfcf219c7964629b967c37cf0bda818b7"}, + {file = "pytest-7.1.3.tar.gz", hash = "sha256:4f365fec2dff9c1162f834d9f18af1ba13062db0c708bf7b946f8a5c76180c39"}, ] python-dateutil = [ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, @@ -403,7 +392,10 @@ typeguard = [ {file = "typeguard-2.13.3.tar.gz", hash = "sha256:00edaa8da3a133674796cf5ea87d9f4b4c367d77476e185e80251cc13dfbb8c4"}, ] typing-extensions = [ - {file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"}, - {file = "typing_extensions-4.3.0.tar.gz", hash = "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6"}, + {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"}, + {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"}, +] +urllib3 = [ + {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"}, + {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"}, ] -urllib3 = [] diff --git a/layer/pyproject.toml b/layer/pyproject.toml index 7f219453a72..e4caa660565 100644 --- a/layer/pyproject.toml +++ b/layer/pyproject.toml @@ -1,14 +1,13 @@ [tool.poetry] name = "aws-lambda-powertools-python-layer" -version = "0.1.0" +version = "1.1.0" description = "AWS Lambda Powertools for Python Lambda Layers" authors = ["DevAx "] license = "MIT" [tool.poetry.dependencies] python = "^3.9" -cdk-lambda-powertools-python-layer = "^2.0.49" -aws-cdk-lib = "^2.35.0" +cdk-aws-lambda-powertools-layer = "^3.2.0" [tool.poetry.dev-dependencies] pytest = "^7.1.2" diff --git a/layer/sar/template.txt b/layer/sar/template.txt new file mode 100644 index 00000000000..808e9d7a36a --- /dev/null +++ b/layer/sar/template.txt @@ -0,0 +1,38 @@ +AWSTemplateFormatVersion: '2010-09-09' + +Metadata: + AWS::ServerlessRepo::Application: + Name: + Description: "AWS Lambda Layer for aws-lambda-powertools " + Author: AWS + SpdxLicenseId: Apache-2.0 + LicenseUrl: /LICENSE + ReadmeUrl: /README.md + Labels: ['layer','lambda','powertools','python', 'aws'] + HomePageUrl: https://github.com/awslabs/aws-lambda-powertools-python + SemanticVersion: + SourceCodeUrl: https://github.com/awslabs/aws-lambda-powertools-python + +Transform: AWS::Serverless-2016-10-31 +Description: AWS Lambda Layer for aws-lambda-powertools with python 3.9, 3.8 or 3.7 + +Resources: + LambdaLayer: + Type: AWS::Serverless::LayerVersion + Properties: + Description: "AWS Lambda Layer for aws-lambda-powertools version " + LayerName: + ContentUri: + CompatibleRuntimes: + - python3.9 + - python3.8 + - python3.7 + LicenseInfo: 'Available under the Apache-2.0 license.' + RetentionPolicy: Retain + +Outputs: + LayerVersionArn: + Description: ARN for the published Layer version + Value: !Ref LambdaLayer + Export: + Name: !Sub 'LayerVersionArn-${AWS::StackName}' diff --git a/layer/scripts/update_layer_arn.sh b/layer/scripts/update_layer_arn.sh new file mode 100755 index 00000000000..b007b2d35cc --- /dev/null +++ b/layer/scripts/update_layer_arn.sh @@ -0,0 +1,75 @@ +#!/bin/bash + +# This script is run during the reusable_update_v2_layer_arn_docs CI job, +# and it is responsible for replacing the layer ARN in our documentation, +# based on the output files generated by CDK when deploying to each pseudo_region. +# +# see .github/workflows/reusable_deploy_v2_layer_stack.yml + +set -eo pipefail + +if [[ $# -ne 1 ]]; then + cat < line + # sed doesn't support \d+ in a portable way, so we cheat with (:digit: :digit: *) + sed -i '' -e "s/$prefix:[[:digit:]][[:digit:]]*/$line/g" docs/index.md + + # We use the eu-central-1 layer as the version for all the frameworks (SAM, CDK, SLS, etc) + # We could have used any other region. What's important is the version at the end. + + # Examples of strings found in the documentation with pseudo regions: + # arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:39 + # arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPython:39 + # arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPython:39 + # arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPython:39 + if [[ "$line" == *"eu-central-1"* ]]; then + # These are all the framework pseudo parameters currently found in the docs + for pseudo_region in '{region}' '${AWS::Region}' '${aws:region}' '{env.region}' + do + prefix_pseudo_region=$(echo "$prefix" | sed "s/eu-central-1/${pseudo_region}/") + # prefix_pseudo_region = arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPython + + line_pseudo_region=$(echo "$line" | sed "s/eu-central-1/${pseudo_region}/") + # line_pseudo_region = arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPython:49 + + # Replace all the "prefix_pseudo_region"'s in the file + # prefix_pseudo_region:\d+ ==> line_pseudo_region + sed -i '' -e "s/$prefix_pseudo_region:[[:digit:]][[:digit:]]*/$line_pseudo_region/g" docs/index.md + done + fi + done +done diff --git a/mkdocs.yml b/mkdocs.yml index 171cf36eb13..59fcdfa6a08 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -10,6 +10,7 @@ nav: - Tutorial: tutorial/index.md - Roadmap: roadmap.md - API reference: api/" target="_blank + - Upgrade guide: upgrade.md - Core utilities: - core/tracer.md - core/logger.md diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 00000000000..1764eda669e --- /dev/null +++ b/package-lock.json @@ -0,0 +1,62 @@ +{ + "name": "aws-lambda-powertools-python-e2e", + "version": "1.0.0", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "name": "aws-lambda-powertools-python-e2e", + "version": "1.0.0", + "devDependencies": { + "aws-cdk": "2.44.0" + } + }, + "node_modules/aws-cdk": { + "version": "2.44.0", + "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.44.0.tgz", + "integrity": "sha512-9hbK4Yc1GQ28zSjZE2ajidt7sRrTLYpijkI7HT7JcDhXLe2ZGP9EOZrqKy5EEsOv0wDQ7cdXB3/oMiMGSmSQ5A==", + "dev": true, + "bin": { + "cdk": "bin/cdk" + }, + "engines": { + "node": ">= 14.15.0" + }, + "optionalDependencies": { + "fsevents": "2.3.2" + } + }, + "node_modules/fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + } + }, + "dependencies": { + "aws-cdk": { + "version": "2.44.0", + "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.44.0.tgz", + "integrity": "sha512-9hbK4Yc1GQ28zSjZE2ajidt7sRrTLYpijkI7HT7JcDhXLe2ZGP9EOZrqKy5EEsOv0wDQ7cdXB3/oMiMGSmSQ5A==", + "dev": true, + "requires": { + "fsevents": "2.3.2" + } + }, + "fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "optional": true + } + } +} diff --git a/package.json b/package.json new file mode 100644 index 00000000000..6d5eb3f5bee --- /dev/null +++ b/package.json @@ -0,0 +1,7 @@ +{ + "name": "aws-lambda-powertools-python-e2e", + "version": "1.0.0", + "devDependencies": { + "aws-cdk": "2.44.0" + } +} diff --git a/parallel_run_e2e.py b/parallel_run_e2e.py index b9603701e5e..745f1392f67 100755 --- a/parallel_run_e2e.py +++ b/parallel_run_e2e.py @@ -8,7 +8,6 @@ def main(): workers = len(list(features)) - 1 command = f"poetry run pytest -n {workers} --dist loadfile -o log_cli=true tests/e2e" - print(f"Running E2E tests with: {command}") subprocess.run(command.split(), shell=False) diff --git a/poetry.lock b/poetry.lock index 09e252b5a09..6fa066bb224 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,44 +1,83 @@ -[[package]] -name = "atomicwrites" -version = "1.4.1" -description = "Atomic file writes." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - [[package]] name = "attrs" -version = "21.4.0" +version = "22.1.0" description = "Classes Without Boilerplate" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.5" [package.extras] -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "sphinx", "sphinx-notfound-page", "zope.interface"] +dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "zope.interface"] -tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six"] +tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] +tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] + +[[package]] +name = "aws-cdk-aws-apigatewayv2-alpha" +version = "2.46.0a0" +description = "The CDK Construct Library for AWS::APIGatewayv2" +category = "dev" +optional = false +python-versions = "~=3.7" + +[package.dependencies] +aws-cdk-lib = ">=2.46.0,<3.0.0" +constructs = ">=10.0.0,<11.0.0" +jsii = ">=1.69.0,<2.0.0" +publication = ">=0.0.3" +typeguard = ">=2.13.3,<2.14.0" + +[[package]] +name = "aws-cdk-aws-apigatewayv2-integrations-alpha" +version = "2.46.0a0" +description = "Integrations for AWS APIGateway V2" +category = "dev" +optional = false +python-versions = "~=3.7" + +[package.dependencies] +"aws-cdk.aws-apigatewayv2-alpha" = "2.46.0.a0" +aws-cdk-lib = ">=2.46.0,<3.0.0" +constructs = ">=10.0.0,<11.0.0" +jsii = ">=1.69.0,<2.0.0" +publication = ">=0.0.3" +typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-lib" -version = "2.23.0" +version = "2.46.0" description = "Version 2 of the AWS Cloud Development Kit library" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = "~=3.7" [package.dependencies] constructs = ">=10.0.0,<11.0.0" -jsii = ">=1.57.0,<2.0.0" +jsii = ">=1.69.0,<2.0.0" publication = ">=0.0.3" +typeguard = ">=2.13.3,<2.14.0" + +[[package]] +name = "aws-sam-translator" +version = "1.53.0" +description = "AWS SAM Translator is a library that transform SAM templates into AWS CloudFormation templates" +category = "dev" +optional = false +python-versions = ">=3.7, <=4.0, !=4.0" + +[package.dependencies] +boto3 = ">=1.19.5,<2.0.0" +jsonschema = ">=3.2,<4.0" + +[package.extras] +dev = ["black (==20.8b1)", "boto3 (>=1.23,<2)", "boto3-stubs[appconfig,serverlessrepo] (>=1.19.5,<2.0.0)", "click (>=7.1,<8.0)", "coverage (>=5.3,<6.0)", "dateparser (>=0.7,<1.0)", "docopt (>=0.6.2,<0.7.0)", "flake8 (>=3.8.4,<3.9.0)", "mypy (==0.971)", "parameterized (>=0.7.4,<0.8.0)", "pylint (>=2.9.0,<2.10.0)", "pytest (>=6.2.5,<6.3.0)", "pytest-cov (>=2.10.1,<2.11.0)", "pytest-env (>=0.6.2,<0.7.0)", "pytest-xdist (>=2.5,<3.0)", "pyyaml (>=5.4,<6.0)", "requests (>=2.24.0,<2.25.0)", "tenacity (>=7.0.0,<7.1.0)", "tox (>=3.24,<4.0)", "types-PyYAML (>=5.4,<6.0)", "types-jsonschema (>=3.2,<4.0)"] [[package]] name = "aws-xray-sdk" version = "2.10.0" description = "The AWS X-Ray SDK for Python (the SDK) enables Python developers to record and emit information from within their applications to the AWS X-Ray service." category = "main" -optional = false +optional = true python-versions = "*" [package.dependencies] @@ -47,11 +86,11 @@ wrapt = "*" [[package]] name = "bandit" -version = "1.7.1" +version = "1.7.4" description = "Security oriented static analyser for python code." category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.7" [package.dependencies] colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} @@ -59,17 +98,21 @@ GitPython = ">=1.0.1" PyYAML = ">=5.3.1" stevedore = ">=1.20.0" +[package.extras] +test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "toml"] +toml = ["toml"] +yaml = ["PyYAML"] + [[package]] name = "black" -version = "22.8.0" +version = "22.10.0" description = "The uncompromising code formatter." category = "dev" optional = false -python-versions = ">=3.6.2" +python-versions = ">=3.7" [package.dependencies] click = ">=8.0.0" -dataclasses = {version = ">=0.6", markers = "python_version < \"3.7\""} mypy-extensions = ">=0.4.3" pathspec = ">=0.9.0" platformdirs = ">=2" @@ -85,27 +128,27 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "boto3" -version = "1.23.10" +version = "1.24.94" description = "The AWS SDK for Python" category = "main" optional = false -python-versions = ">= 3.6" +python-versions = ">= 3.7" [package.dependencies] -botocore = ">=1.26.10,<1.27.0" +botocore = ">=1.27.94,<1.28.0" jmespath = ">=0.7.1,<2.0.0" -s3transfer = ">=0.5.0,<0.6.0" +s3transfer = ">=0.6.0,<0.7.0" [package.extras] crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.26.10" +version = "1.27.94" description = "Low-level, data-driven core of boto 3." category = "main" optional = false -python-versions = ">= 3.6" +python-versions = ">= 3.7" [package.dependencies] jmespath = ">=0.7.1,<2.0.0" @@ -113,61 +156,73 @@ python-dateutil = ">=2.1,<3.0.0" urllib3 = ">=1.25.4,<1.27" [package.extras] -crt = ["awscrt (==0.13.8)"] - -[[package]] -name = "cattrs" -version = "1.0.0" -description = "Composable complex class support for attrs." -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -attrs = ">=17.3" - -[package.extras] -dev = ["Sphinx", "bumpversion", "coverage", "flake8", "hypothesis", "pendulum", "pytest", "tox", "watchdog", "wheel"] +crt = ["awscrt (==0.14.0)"] [[package]] name = "cattrs" -version = "22.1.0" +version = "22.2.0" description = "Composable complex class support for attrs and dataclasses." category = "dev" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.7" [package.dependencies] attrs = ">=20" -exceptiongroup = {version = "*", markers = "python_version <= \"3.10\""} -typing_extensions = {version = "*", markers = "python_version >= \"3.7\" and python_version < \"3.8\""} +exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} +typing_extensions = {version = "*", markers = "python_version < \"3.8\""} [[package]] name = "certifi" -version = "2022.6.15.1" +version = "2022.9.24" description = "Python package for providing Mozilla's CA Bundle." category = "dev" optional = false python-versions = ">=3.6" +[[package]] +name = "cfn-lint" +version = "0.67.0" +description = "Checks CloudFormation templates for practices and behaviour that could potentially be improved" +category = "dev" +optional = false +python-versions = ">=3.7, <=4.0, !=4.0" + +[package.dependencies] +aws-sam-translator = ">=1.52.0" +jschema-to-python = ">=1.2.3,<1.3.0" +jsonpatch = "*" +jsonschema = ">=3.0,<5" +junit-xml = ">=1.9,<2.0" +networkx = ">=2.4,<3.0" +pyyaml = ">5.4" +sarif-om = ">=1.0.4,<1.1.0" + [[package]] name = "charset-normalizer" -version = "2.0.12" +version = "2.1.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "dev" optional = false -python-versions = ">=3.5.0" +python-versions = ">=3.6.0" [package.extras] unicode-backport = ["unicodedata2"] +[[package]] +name = "checksumdir" +version = "1.2.0" +description = "Compute a single hash of the file contents of a directory." +category = "dev" +optional = false +python-versions = ">=3.6,<4.0" + [[package]] name = "click" -version = "8.0.4" +version = "8.1.3" description = "Composable command line interface toolkit" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} @@ -183,38 +238,31 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "constructs" -version = "10.1.1" +version = "10.1.134" description = "A programming model for software-defined state" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = "~=3.7" [package.dependencies] -jsii = ">=1.57.0,<2.0.0" +jsii = ">=1.70.0,<2.0.0" publication = ">=0.0.3" +typeguard = ">=2.13.3,<2.14.0" [[package]] name = "coverage" -version = "6.2" +version = "6.5.0" description = "Code coverage measurement for Python" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] -tomli = {version = "*", optional = true, markers = "extra == \"toml\""} +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] toml = ["tomli"] -[[package]] -name = "dataclasses" -version = "0.8" -description = "A backport of the dataclasses module for Python 3.6" -category = "main" -optional = false -python-versions = ">=3.6, <3.7" - [[package]] name = "decorator" version = "5.1.1" @@ -223,34 +271,6 @@ category = "dev" optional = false python-versions = ">=3.5" -[[package]] -name = "dnspython" -version = "2.2.1" -description = "DNS toolkit" -category = "main" -optional = true -python-versions = ">=3.6,<4.0" - -[package.extras] -curio = ["curio (>=1.2,<2.0)", "sniffio (>=1.1,<2.0)"] -dnssec = ["cryptography (>=2.6,<37.0)"] -doh = ["h2 (>=4.1.0)", "httpx (>=0.21.1)", "requests (>=2.23.0,<3.0.0)", "requests-toolbelt (>=0.9.1,<0.10.0)"] -idna = ["idna (>=2.1,<4.0)"] -trio = ["trio (>=0.14,<0.20)"] -wmi = ["wmi (>=1.5.1,<2.0.0)"] - -[[package]] -name = "email-validator" -version = "1.3.0" -description = "A robust email address syntax and deliverability validation library." -category = "main" -optional = true -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" - -[package.dependencies] -dnspython = ">=1.15.0" -idna = ">=2.0.0" - [[package]] name = "eradicate" version = "2.1.0" @@ -286,7 +306,7 @@ name = "fastjsonschema" version = "2.16.2" description = "Fastest Python implementation of JSON schema" category = "main" -optional = false +optional = true python-versions = "*" [package.extras] @@ -318,6 +338,19 @@ mccabe = ">=0.6.0,<0.7.0" pycodestyle = ">=2.7.0,<2.8.0" pyflakes = ">=2.3.0,<2.4.0" +[[package]] +name = "flake8-black" +version = "0.3.3" +description = "flake8 plugin to call black as a code style validator" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +black = ">=22.1.0" +flake8 = ">=3.0.0" +tomli = "*" + [[package]] name = "flake8-bugbear" version = "22.9.23" @@ -349,42 +382,41 @@ test = ["pytest"] [[package]] name = "flake8-comprehensions" -version = "3.7.0" +version = "3.10.0" description = "A flake8 plugin to help you write better list/set/dict comprehensions." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] -flake8 = ">=3.0,<3.2.0 || >3.2.0,<5" +flake8 = ">=3.0,<3.2.0 || >3.2.0" importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} [[package]] name = "flake8-debugger" -version = "4.0.0" +version = "4.1.2" description = "ipdb/pdb statement checker plugin for flake8" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] flake8 = ">=3.0" pycodestyle = "*" -six = "*" [[package]] name = "flake8-eradicate" -version = "1.3.0" +version = "1.4.0" description = "Flake8 plugin to find commented out code" category = "dev" optional = false -python-versions = ">=3.6,<4.0" +python-versions = ">=3.7,<4.0" [package.dependencies] attrs = "*" eradicate = ">=2.0,<3.0" flake8 = ">=3.5,<6" -setuptools = "*" +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} [[package]] name = "flake8-fixme" @@ -455,55 +487,40 @@ smmap = ">=3.0.1,<6" [[package]] name = "gitpython" -version = "3.1.20" -description = "Python Git Library" +version = "3.1.29" +description = "GitPython is a python library used to interact with Git repositories" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] gitdb = ">=4.0.1,<5" -typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.10\""} +typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.8\""} [[package]] name = "idna" -version = "3.3" +version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" +category = "dev" optional = false python-versions = ">=3.5" [[package]] name = "importlib-metadata" -version = "4.8.3" +version = "4.13.0" description = "Read metadata from Python packages" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} zipp = ">=0.5" [package.extras] -docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pep517", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy", "pytest-perf (>=0.9.2)"] - -[[package]] -name = "importlib-resources" -version = "5.4.0" -description = "Read resources from Python packages" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} - -[package.extras] -docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] -testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"] +testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] [[package]] name = "iniconfig" @@ -529,11 +546,11 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] [[package]] name = "jinja2" -version = "3.0.3" +version = "3.1.2" description = "A very fast and expressive template engine." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] MarkupSafe = ">=2.0" @@ -543,32 +560,108 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jmespath" -version = "0.10.0" +version = "1.0.1" description = "JSON Matching Expressions" category = "main" optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = ">=3.7" + +[[package]] +name = "jschema-to-python" +version = "1.2.3" +description = "Generate source code for Python classes from a JSON schema." +category = "dev" +optional = false +python-versions = ">= 2.7" + +[package.dependencies] +attrs = "*" +jsonpickle = "*" +pbr = "*" [[package]] name = "jsii" -version = "1.57.0" +version = "1.70.0" description = "Python client for jsii runtime" category = "dev" optional = false -python-versions = "~=3.6" +python-versions = "~=3.7" [package.dependencies] -attrs = ">=21.2,<22.0" -cattrs = [ - {version = ">=1.0.0,<1.1.0", markers = "python_version < \"3.7\""}, - {version = ">=1.8,<22.2", markers = "python_version >= \"3.7\""}, -] -importlib-resources = {version = "*", markers = "python_version < \"3.7\""} +attrs = ">=21.2,<23.0" +cattrs = ">=1.8,<22.3" +publication = ">=0.0.3" python-dateutil = "*" +typeguard = ">=2.13.3,<2.14.0" typing-extensions = ">=3.7,<5.0" [[package]] -name = "Mako" +name = "jsonpatch" +version = "1.32" +description = "Apply JSON-Patches (RFC 6902)" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.dependencies] +jsonpointer = ">=1.9" + +[[package]] +name = "jsonpickle" +version = "2.2.0" +description = "Python library for serializing any arbitrary object graph into JSON" +category = "dev" +optional = false +python-versions = ">=2.7" + +[package.dependencies] +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} + +[package.extras] +docs = ["jaraco.packaging (>=3.2)", "rst.linker (>=1.9)", "sphinx"] +testing = ["ecdsa", "enum34", "feedparser", "jsonlib", "numpy", "pandas", "pymongo", "pytest (>=3.5,!=3.7.3)", "pytest-black-multipy", "pytest-checkdocs (>=1.2.3)", "pytest-cov", "pytest-flake8 (<1.1.0)", "pytest-flake8 (>=1.1.1)", "scikit-learn", "sqlalchemy"] +testing-libs = ["simplejson", "ujson", "yajl"] + +[[package]] +name = "jsonpointer" +version = "2.3" +description = "Identify specific nodes in a JSON document (RFC 6901)" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +attrs = ">=17.4.0" +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "junit-xml" +version = "1.9" +description = "Creates JUnit XML test result documents that can be read by tools such as Jenkins" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +six = "*" + +[[package]] +name = "mako" version = "1.2.3" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." category = "dev" @@ -614,11 +707,11 @@ testing = ["coverage", "pyyaml"] [[package]] name = "markupsafe" -version = "2.0.1" +version = "2.1.1" description = "Safely add untrusted strings to HTML/XML markup." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "mccabe" @@ -638,7 +731,7 @@ python-versions = ">=3.6" [[package]] name = "mike" -version = "0.6.0" +version = "1.1.2" description = "Manage multiple versions of your MkDocs-powered documentation" category = "dev" optional = false @@ -647,35 +740,38 @@ python-versions = "*" [package.dependencies] jinja2 = "*" mkdocs = ">=1.0" -packaging = "*" -"ruamel.yaml" = "*" +pyyaml = ">=5.1" +verspec = "*" [package.extras] -dev = ["coverage", "flake8 (>=3.0)", "pypandoc (>=1.4)"] -test = ["coverage", "flake8 (>=3.0)"] +dev = ["coverage", "flake8 (>=3.0)", "shtab"] +test = ["coverage", "flake8 (>=3.0)", "shtab"] [[package]] name = "mkdocs" -version = "1.3.1" +version = "1.4.1" description = "Project documentation with Markdown." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] -click = ">=3.3" +click = ">=7.0" +colorama = {version = ">=0.4", markers = "platform_system == \"Windows\""} ghp-import = ">=1.0" -importlib-metadata = ">=4.3" -Jinja2 = ">=2.10.2" -Markdown = ">=3.2.1,<3.4" +importlib-metadata = {version = ">=4.3", markers = "python_version < \"3.10\""} +jinja2 = ">=2.11.1" +markdown = ">=3.2.1,<3.4" mergedeep = ">=1.3.4" packaging = ">=20.5" -PyYAML = ">=3.10" +pyyaml = ">=5.1" pyyaml-env-tag = ">=0.1" +typing-extensions = {version = ">=3.10", markers = "python_version < \"3.8\""} watchdog = ">=2.0" [package.extras] i18n = ["babel (>=2.9.0)"] +min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-import (==1.0)", "importlib-metadata (==4.3)", "jinja2 (==2.11.1)", "markdown (==3.2.1)", "markupsafe (==2.0.1)", "mergedeep (==1.3.4)", "packaging (==20.5)", "pyyaml (==5.1)", "pyyaml-env-tag (==0.1)", "typing-extensions (==3.10)", "watchdog (==2.0)"] [[package]] name = "mkdocs-git-revision-date-plugin" @@ -692,7 +788,7 @@ mkdocs = ">=0.17" [[package]] name = "mkdocs-material" -version = "8.5.4" +version = "8.5.6" description = "Documentation that simply works" category = "dev" optional = false @@ -701,7 +797,7 @@ python-versions = ">=3.7" [package.dependencies] jinja2 = ">=3.0.2" markdown = ">=3.2" -mkdocs = ">=1.3.0" +mkdocs = ">=1.4.0" mkdocs-material-extensions = ">=1.0.3" pygments = ">=2.12" pymdown-extensions = ">=9.4" @@ -745,6 +841,17 @@ python-versions = ">=3.7" [package.dependencies] typing-extensions = ">=4.1.0" +[[package]] +name = "mypy-boto3-appconfigdata" +version = "1.24.36.post1" +description = "Type annotations for boto3.AppConfigData 1.24.36 service generated with mypy-boto3-builder 7.10.0" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +typing-extensions = ">=4.1.0" + [[package]] name = "mypy-boto3-cloudformation" version = "1.24.36.post1" @@ -852,6 +959,21 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "networkx" +version = "2.6.3" +description = "Python package for creating and manipulating graphs and networks" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +default = ["matplotlib (>=3.3)", "numpy (>=1.19)", "pandas (>=1.1)", "scipy (>=1.5,!=1.6.1)"] +developer = ["black (==21.5b1)", "pre-commit (>=2.12)"] +doc = ["nb2plots (>=0.6)", "numpydoc (>=1.1)", "pillow (>=8.2)", "pydata-sphinx-theme (>=0.6,<1.0)", "sphinx (>=4.0,<5.0)", "sphinx-gallery (>=0.9,<1.0)", "texext (>=0.6.6)"] +extra = ["lxml (>=4.5)", "pydot (>=1.4.1)", "pygraphviz (>=1.7)"] +test = ["codecov (>=2.1)", "pytest (>=6.2)", "pytest-cov (>=2.12)"] + [[package]] name = "packaging" version = "21.3" @@ -865,15 +987,15 @@ pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" [[package]] name = "pathspec" -version = "0.9.0" +version = "0.10.1" description = "Utility library for gitignore style pattern matching of file paths." category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +python-versions = ">=3.7" [[package]] name = "pbr" -version = "5.10.0" +version = "5.11.0" description = "Python Build Reasonableness" category = "dev" optional = false @@ -893,14 +1015,14 @@ markdown = ">=3.0" [[package]] name = "platformdirs" -version = "2.4.0" +version = "2.5.2" description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.extras] -docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] +docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx (>=4)", "sphinx-autodoc-typehints (>=1.12)"] test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] [[package]] @@ -952,15 +1074,14 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pydantic" -version = "1.9.2" +version = "1.10.2" description = "Data validation and settings management using python type hints" category = "main" optional = true -python-versions = ">=3.6.1" +python-versions = ">=3.7" [package.dependencies] -dataclasses = {version = ">=0.6", markers = "python_version < \"3.7\""} -typing-extensions = ">=3.7.4.3" +typing-extensions = ">=4.1.0" [package.extras] dotenv = ["python-dotenv (>=0.10.4)"] @@ -987,7 +1108,7 @@ plugins = ["importlib-metadata"] [[package]] name = "pymdown-extensions" -version = "9.5" +version = "9.6" description = "Extension pack for Python Markdown." category = "dev" optional = false @@ -998,25 +1119,32 @@ markdown = ">=3.2" [[package]] name = "pyparsing" -version = "3.0.7" -description = "Python parsing module" +version = "3.0.9" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.6.8" [package.extras] diagrams = ["jinja2", "railroad-diagrams"] +[[package]] +name = "pyrsistent" +version = "0.18.1" +description = "Persistent/Functional/Immutable data structures" +category = "dev" +optional = false +python-versions = ">=3.7" + [[package]] name = "pytest" -version = "7.0.1" +version = "7.1.3" description = "pytest: simple powerful testing with Python" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] -atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} attrs = ">=19.2.0" colorama = {version = "*", markers = "sys_platform == \"win32\""} importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} @@ -1089,11 +1217,11 @@ pytest = ">=3.10" [[package]] name = "pytest-mock" -version = "3.6.1" +version = "3.10.0" description = "Thin-wrapper around the mock package for easier use with pytest" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] pytest = ">=5.0" @@ -1172,21 +1300,21 @@ mando = ">=0.6,<0.7" [[package]] name = "requests" -version = "2.27.1" +version = "2.28.1" description = "Python HTTP for Humans." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">=3.7, <4" [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} -idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} +charset-normalizer = ">=2,<3" +idna = ">=2.5,<4" urllib3 = ">=1.21.1,<1.27" [package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<5)"] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "retry" @@ -1201,53 +1329,43 @@ decorator = ">=3.4.2" py = ">=1.4.26,<2.0.0" [[package]] -name = "ruamel.yaml" -version = "0.17.21" -description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" -category = "dev" +name = "s3transfer" +version = "0.6.0" +description = "An Amazon S3 Transfer Manager" +category = "main" optional = false -python-versions = ">=3" +python-versions = ">= 3.7" [package.dependencies] -"ruamel.yaml.clib" = {version = ">=0.2.6", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.11\""} +botocore = ">=1.12.36,<2.0a.0" [package.extras] -docs = ["ryd"] -jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] +crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] [[package]] -name = "ruamel.yaml.clib" -version = "0.2.6" -description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" +name = "sarif-om" +version = "1.0.4" +description = "Classes implementing the SARIF 2.1.0 object model." category = "dev" optional = false -python-versions = ">=3.5" - -[[package]] -name = "s3transfer" -version = "0.5.2" -description = "An Amazon S3 Transfer Manager" -category = "main" -optional = false -python-versions = ">= 3.6" +python-versions = ">= 2.7" [package.dependencies] -botocore = ">=1.12.36,<2.0a.0" - -[package.extras] -crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] +attrs = "*" +pbr = "*" [[package]] name = "setuptools" -version = "59.6.0" +version = "65.5.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.extras] -docs = ["furo", "jaraco.packaging (>=8.2)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx", "sphinx-inline-tabs", "sphinxcontrib-towncrier"] -testing = ["flake8-2020", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "paver", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy", "pytest-virtualenv (>=1.2.7)", "pytest-xdist", "sphinx", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -1267,7 +1385,7 @@ python-versions = ">=3.6" [[package]] name = "stevedore" -version = "3.5.0" +version = "3.5.2" description = "Manage dynamic plugins for Python applications" category = "dev" optional = false @@ -1279,11 +1397,11 @@ pbr = ">=2.0.0,<2.1.0 || >2.1.0" [[package]] name = "tomli" -version = "1.2.3" +version = "2.0.1" description = "A lil' TOML parser" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "typed-ast" @@ -1293,6 +1411,18 @@ category = "dev" optional = false python-versions = ">=3.6" +[[package]] +name = "typeguard" +version = "2.13.3" +description = "Run-time type checker for Python" +category = "dev" +optional = false +python-versions = ">=3.5.3" + +[package.extras] +doc = ["sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["mypy", "pytest", "typing-extensions"] + [[package]] name = "types-requests" version = "2.28.11.2" @@ -1306,7 +1436,7 @@ types-urllib3 = "<1.27" [[package]] name = "types-urllib3" -version = "1.26.24" +version = "1.26.25.1" description = "Typing stubs for urllib3" category = "dev" optional = false @@ -1333,6 +1463,17 @@ brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +[[package]] +name = "verspec" +version = "0.1.0" +description = "Flexible version handling" +category = "dev" +optional = false +python-versions = "*" + +[package.extras] +test = ["coverage", "flake8 (>=3.7)", "mypy", "pretend", "pytest"] + [[package]] name = "watchdog" version = "2.1.9" @@ -1349,7 +1490,7 @@ name = "wrapt" version = "1.14.1" description = "Module for decorators, wrappers and monkey patching." category = "main" -optional = false +optional = true python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [[package]] @@ -1367,168 +1508,177 @@ requests = ">=2.0,<3.0" [[package]] name = "zipp" -version = "3.6.0" +version = "3.9.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.extras] -docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] -testing = ["func-timeout", "jaraco.itertools", "pytest (>=4.6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] +testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [extras] -pydantic = ["pydantic", "email-validator"] +all = ["pydantic", "aws-xray-sdk", "fastjsonschema"] +aws-sdk = ["boto3"] +parser = ["pydantic"] +tracer = ["aws-xray-sdk"] +validation = ["fastjsonschema"] [metadata] lock-version = "1.1" -python-versions = "^3.6.2" -content-hash = "d8c42b40264cf13d31d7b165eaf69c5ee743f94feb78e365e1490fa34be47478" +python-versions = "^3.7.4" +content-hash = "3efc3b4c3dd7b7a6fe37f519330fb3e450c84ca57b190e7a05fabd444eef8aaa" [metadata.files] -atomicwrites = [ - {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, -] attrs = [ - {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, - {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, + {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, + {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, +] +aws-cdk-aws-apigatewayv2-alpha = [ + {file = "aws-cdk.aws-apigatewayv2-alpha-2.46.0a0.tar.gz", hash = "sha256:10d9324da26db7aeee3a45853a2e249b6b85866fcc8f8f43fa1a0544ce582482"}, + {file = "aws_cdk.aws_apigatewayv2_alpha-2.46.0a0-py3-none-any.whl", hash = "sha256:2cdeac84fb1fe219e5686ee95d9528a1810e9d426b2bb7f305ea07cb43e328a8"}, +] +aws-cdk-aws-apigatewayv2-integrations-alpha = [ + {file = "aws-cdk.aws-apigatewayv2-integrations-alpha-2.46.0a0.tar.gz", hash = "sha256:91a792c94500987b69fd97cb00afec5ace00f2039ffebebd99f91ee6b47c3c8b"}, + {file = "aws_cdk.aws_apigatewayv2_integrations_alpha-2.46.0a0-py3-none-any.whl", hash = "sha256:c7bbe1c08019cee41c14b6c1513f673d60b337422ef338c67f9a0cb3e17cc963"}, ] aws-cdk-lib = [ - {file = "aws-cdk-lib-2.23.0.tar.gz", hash = "sha256:3e07d1c6b320795d38567be183e56c2125b4c4492589775257aabec3d3e2a384"}, - {file = "aws_cdk_lib-2.23.0-py3-none-any.whl", hash = "sha256:1ec04a146d3364cd0fc4da08e3f8ca25e28df68abaa90641936db17a415ca4bc"}, + {file = "aws-cdk-lib-2.46.0.tar.gz", hash = "sha256:ec2c6055d64a0574533fcbcdc2006ee32a23d38a5755bc4b99fd1796124b1de5"}, + {file = "aws_cdk_lib-2.46.0-py3-none-any.whl", hash = "sha256:28d76161acf834d97ab5f9a6b2003bb81345e14197474d706de7ee30847b87bd"}, +] +aws-sam-translator = [ + {file = "aws-sam-translator-1.53.0.tar.gz", hash = "sha256:392ed4f5fb08f72cb68a8800f0bc278d2a3b6609bd1ac66bfcdeaaa94cdc18e5"}, + {file = "aws_sam_translator-1.53.0-py2-none-any.whl", hash = "sha256:85252646cf123642d08442137b60445e69e30bfd2f8b663b1202b20ab3782b10"}, + {file = "aws_sam_translator-1.53.0-py3-none-any.whl", hash = "sha256:84d780ad82f1a176e2f5d4c397749d1e71214cc97ee7cccd50f823fd7c7e7cdf"}, ] aws-xray-sdk = [ {file = "aws-xray-sdk-2.10.0.tar.gz", hash = "sha256:9b14924fd0628cf92936055864655354003f0b1acc3e1c3ffde6403d0799dd7a"}, {file = "aws_xray_sdk-2.10.0-py2.py3-none-any.whl", hash = "sha256:7551e81a796e1a5471ebe84844c40e8edf7c218db33506d046fec61f7495eda4"}, ] bandit = [ - {file = "bandit-1.7.1-py3-none-any.whl", hash = "sha256:f5acd838e59c038a159b5c621cf0f8270b279e884eadd7b782d7491c02add0d4"}, - {file = "bandit-1.7.1.tar.gz", hash = "sha256:a81b00b5436e6880fa8ad6799bc830e02032047713cbb143a12939ac67eb756c"}, + {file = "bandit-1.7.4-py3-none-any.whl", hash = "sha256:412d3f259dab4077d0e7f0c11f50f650cc7d10db905d98f6520a95a18049658a"}, + {file = "bandit-1.7.4.tar.gz", hash = "sha256:2d63a8c573417bae338962d4b9b06fbc6080f74ecd955a092849e1e65c717bd2"}, ] black = [ - {file = "black-22.8.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ce957f1d6b78a8a231b18e0dd2d94a33d2ba738cd88a7fe64f53f659eea49fdd"}, - {file = "black-22.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5107ea36b2b61917956d018bd25129baf9ad1125e39324a9b18248d362156a27"}, - {file = "black-22.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8166b7bfe5dcb56d325385bd1d1e0f635f24aae14b3ae437102dedc0c186747"}, - {file = "black-22.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd82842bb272297503cbec1a2600b6bfb338dae017186f8f215c8958f8acf869"}, - {file = "black-22.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d839150f61d09e7217f52917259831fe2b689f5c8e5e32611736351b89bb2a90"}, - {file = "black-22.8.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a05da0430bd5ced89176db098567973be52ce175a55677436a271102d7eaa3fe"}, - {file = "black-22.8.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a098a69a02596e1f2a58a2a1c8d5a05d5a74461af552b371e82f9fa4ada8342"}, - {file = "black-22.8.0-cp36-cp36m-win_amd64.whl", hash = "sha256:5594efbdc35426e35a7defa1ea1a1cb97c7dbd34c0e49af7fb593a36bd45edab"}, - {file = "black-22.8.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a983526af1bea1e4cf6768e649990f28ee4f4137266921c2c3cee8116ae42ec3"}, - {file = "black-22.8.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b2c25f8dea5e8444bdc6788a2f543e1fb01494e144480bc17f806178378005e"}, - {file = "black-22.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:78dd85caaab7c3153054756b9fe8c611efa63d9e7aecfa33e533060cb14b6d16"}, - {file = "black-22.8.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:cea1b2542d4e2c02c332e83150e41e3ca80dc0fb8de20df3c5e98e242156222c"}, - {file = "black-22.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5b879eb439094751185d1cfdca43023bc6786bd3c60372462b6f051efa6281a5"}, - {file = "black-22.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0a12e4e1353819af41df998b02c6742643cfef58282915f781d0e4dd7a200411"}, - {file = "black-22.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3a73f66b6d5ba7288cd5d6dad9b4c9b43f4e8a4b789a94bf5abfb878c663eb3"}, - {file = "black-22.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:e981e20ec152dfb3e77418fb616077937378b322d7b26aa1ff87717fb18b4875"}, - {file = "black-22.8.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8ce13ffed7e66dda0da3e0b2eb1bdfc83f5812f66e09aca2b0978593ed636b6c"}, - {file = "black-22.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:32a4b17f644fc288c6ee2bafdf5e3b045f4eff84693ac069d87b1a347d861497"}, - {file = "black-22.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ad827325a3a634bae88ae7747db1a395d5ee02cf05d9aa7a9bd77dfb10e940c"}, - {file = "black-22.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53198e28a1fb865e9fe97f88220da2e44df6da82b18833b588b1883b16bb5d41"}, - {file = "black-22.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:bc4d4123830a2d190e9cc42a2e43570f82ace35c3aeb26a512a2102bce5af7ec"}, - {file = "black-22.8.0-py3-none-any.whl", hash = "sha256:d2c21d439b2baf7aa80d6dd4e3659259be64c6f49dfd0f32091063db0e006db4"}, - {file = "black-22.8.0.tar.gz", hash = "sha256:792f7eb540ba9a17e8656538701d3eb1afcb134e3b45b71f20b25c77a8db7e6e"}, + {file = "black-22.10.0-1fixedarch-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa"}, + {file = "black-22.10.0-1fixedarch-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef"}, + {file = "black-22.10.0-1fixedarch-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6"}, + {file = "black-22.10.0-1fixedarch-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d"}, + {file = "black-22.10.0-1fixedarch-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4"}, + {file = "black-22.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb"}, + {file = "black-22.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7"}, + {file = "black-22.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66"}, + {file = "black-22.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae"}, + {file = "black-22.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b"}, + {file = "black-22.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d"}, + {file = "black-22.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650"}, + {file = "black-22.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d"}, + {file = "black-22.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff"}, + {file = "black-22.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87"}, + {file = "black-22.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395"}, + {file = "black-22.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0"}, + {file = "black-22.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383"}, + {file = "black-22.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de"}, + {file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"}, + {file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"}, ] boto3 = [ - {file = "boto3-1.23.10-py3-none-any.whl", hash = "sha256:40d08614f17a69075e175c02c5d5aab69a6153fd50e40fa7057b913ac7bf40e7"}, - {file = "boto3-1.23.10.tar.gz", hash = "sha256:2a4395e3241c20eef441d7443a5e6eaa0ee3f7114653fb9d9cef41587526f7bd"}, + {file = "boto3-1.24.94-py3-none-any.whl", hash = "sha256:f13db0beb3c9fe2cc1ed0f031189f144610d2909b5874a616e77b0bd1ae3b686"}, + {file = "boto3-1.24.94.tar.gz", hash = "sha256:f4842b395d1580454756622069f4ca0408993885ecede967001d2c101201cdfa"}, ] botocore = [ - {file = "botocore-1.26.10-py3-none-any.whl", hash = "sha256:8a4a984bf901ccefe40037da11ba2abd1ddbcb3b490a492b7f218509c99fc12f"}, - {file = "botocore-1.26.10.tar.gz", hash = "sha256:5df2cf7ebe34377470172bd0bbc582cf98c5cbd02da0909a14e9e2885ab3ae9c"}, + {file = "botocore-1.27.94-py3-none-any.whl", hash = "sha256:8237c070d2ab29fac4fbcfe9dd2e84e0ee147402e0fed3ac1629f92459c7f1d2"}, + {file = "botocore-1.27.94.tar.gz", hash = "sha256:572224608a0b7662966fc303b768e2eba61bf53bdbf314481cd9e63a0d8e1a66"}, ] cattrs = [ - {file = "cattrs-1.0.0-py2.py3-none-any.whl", hash = "sha256:616972ae3dfa6e623a40ad3cb845420e64942989152774ab055e5c2b2f89f997"}, - {file = "cattrs-1.0.0.tar.gz", hash = "sha256:b7ab5cf8ad127c42eefd01410c1c6e28569a45a255ea80ed968511873c433c7a"}, - {file = "cattrs-22.1.0-py3-none-any.whl", hash = "sha256:d55c477b4672f93606e992049f15d526dc7867e6c756cd6256d4af92e2b1e364"}, - {file = "cattrs-22.1.0.tar.gz", hash = "sha256:94b67b64cf92c994f8784c40c082177dc916e0489a73a9a36b24eb18a9db40c6"}, + {file = "cattrs-22.2.0-py3-none-any.whl", hash = "sha256:bc12b1f0d000b9f9bee83335887d532a1d3e99a833d1bf0882151c97d3e68c21"}, + {file = "cattrs-22.2.0.tar.gz", hash = "sha256:f0eed5642399423cf656e7b66ce92cdc5b963ecafd041d1b24d136fdde7acf6d"}, ] certifi = [ - {file = "certifi-2022.6.15.1-py3-none-any.whl", hash = "sha256:43dadad18a7f168740e66944e4fa82c6611848ff9056ad910f8f7a3e46ab89e0"}, - {file = "certifi-2022.6.15.1.tar.gz", hash = "sha256:cffdcd380919da6137f76633531a5817e3a9f268575c128249fb637e4f9e73fb"}, + {file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"}, + {file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"}, +] +cfn-lint = [ + {file = "cfn-lint-0.67.0.tar.gz", hash = "sha256:dfa707e06f4a530ffc9cf66c0af7a4f28b11190b7a6a22536a6c4aa6afc5ff06"}, + {file = "cfn_lint-0.67.0-py3-none-any.whl", hash = "sha256:3526213b91f1740231cac894652046daa77409a0c0ca755589ab21d5faab8fd1"}, ] charset-normalizer = [ - {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, - {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, + {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, + {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, +] +checksumdir = [ + {file = "checksumdir-1.2.0-py3-none-any.whl", hash = "sha256:77687e16da95970c94061c74ef2e13666c4b6e0e8c90a5eaf0c8f7591332cf01"}, + {file = "checksumdir-1.2.0.tar.gz", hash = "sha256:10bfd7518da5a14b0e9ac03e9ad105f0e70f58bba52b6e9aa2f21a3f73c7b5a8"}, ] click = [ - {file = "click-8.0.4-py3-none-any.whl", hash = "sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1"}, - {file = "click-8.0.4.tar.gz", hash = "sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb"}, + {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, + {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, ] colorama = [ {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, ] constructs = [ - {file = "constructs-10.1.1-py3-none-any.whl", hash = "sha256:c1f3deb196f54e070ded3c92c4339f73ef2b6022d35fb34908c0ebfa7ef8a640"}, - {file = "constructs-10.1.1.tar.gz", hash = "sha256:6ce0dd1352367237b5d7c51a25740482c852735d2a5e067c536acc1657f39ea5"}, + {file = "constructs-10.1.134-py3-none-any.whl", hash = "sha256:b3f05ad138af83473cc9bd5f8949558bd31d38fb32c09fcc56d0de9057c2e61d"}, + {file = "constructs-10.1.134.tar.gz", hash = "sha256:4ab253a74e62a2c918456d20dff42ec0abb2e4393a6bab0218c81c09e19c1a41"}, ] coverage = [ - {file = "coverage-6.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6dbc1536e105adda7a6312c778f15aaabe583b0e9a0b0a324990334fd458c94b"}, - {file = "coverage-6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:174cf9b4bef0db2e8244f82059a5a72bd47e1d40e71c68ab055425172b16b7d0"}, - {file = "coverage-6.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:92b8c845527eae547a2a6617d336adc56394050c3ed8a6918683646328fbb6da"}, - {file = "coverage-6.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c7912d1526299cb04c88288e148c6c87c0df600eca76efd99d84396cfe00ef1d"}, - {file = "coverage-6.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5d2033d5db1d58ae2d62f095e1aefb6988af65b4b12cb8987af409587cc0739"}, - {file = "coverage-6.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3feac4084291642165c3a0d9eaebedf19ffa505016c4d3db15bfe235718d4971"}, - {file = "coverage-6.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:276651978c94a8c5672ea60a2656e95a3cce2a3f31e9fb2d5ebd4c215d095840"}, - {file = "coverage-6.2-cp310-cp310-win32.whl", hash = "sha256:f506af4f27def639ba45789fa6fde45f9a217da0be05f8910458e4557eed020c"}, - {file = "coverage-6.2-cp310-cp310-win_amd64.whl", hash = "sha256:3f7c17209eef285c86f819ff04a6d4cbee9b33ef05cbcaae4c0b4e8e06b3ec8f"}, - {file = "coverage-6.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:13362889b2d46e8d9f97c421539c97c963e34031ab0cb89e8ca83a10cc71ac76"}, - {file = "coverage-6.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:22e60a3ca5acba37d1d4a2ee66e051f5b0e1b9ac950b5b0cf4aa5366eda41d47"}, - {file = "coverage-6.2-cp311-cp311-win_amd64.whl", hash = "sha256:b637c57fdb8be84e91fac60d9325a66a5981f8086c954ea2772efe28425eaf64"}, - {file = "coverage-6.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f467bbb837691ab5a8ca359199d3429a11a01e6dfb3d9dcc676dc035ca93c0a9"}, - {file = "coverage-6.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2641f803ee9f95b1f387f3e8f3bf28d83d9b69a39e9911e5bfee832bea75240d"}, - {file = "coverage-6.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1219d760ccfafc03c0822ae2e06e3b1248a8e6d1a70928966bafc6838d3c9e48"}, - {file = "coverage-6.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9a2b5b52be0a8626fcbffd7e689781bf8c2ac01613e77feda93d96184949a98e"}, - {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8e2c35a4c1f269704e90888e56f794e2d9c0262fb0c1b1c8c4ee44d9b9e77b5d"}, - {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5d6b09c972ce9200264c35a1d53d43ca55ef61836d9ec60f0d44273a31aa9f17"}, - {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e3db840a4dee542e37e09f30859f1612da90e1c5239a6a2498c473183a50e781"}, - {file = "coverage-6.2-cp36-cp36m-win32.whl", hash = "sha256:4e547122ca2d244f7c090fe3f4b5a5861255ff66b7ab6d98f44a0222aaf8671a"}, - {file = "coverage-6.2-cp36-cp36m-win_amd64.whl", hash = "sha256:01774a2c2c729619760320270e42cd9e797427ecfddd32c2a7b639cdc481f3c0"}, - {file = "coverage-6.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fb8b8ee99b3fffe4fd86f4c81b35a6bf7e4462cba019997af2fe679365db0c49"}, - {file = "coverage-6.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:619346d57c7126ae49ac95b11b0dc8e36c1dd49d148477461bb66c8cf13bb521"}, - {file = "coverage-6.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0a7726f74ff63f41e95ed3a89fef002916c828bb5fcae83b505b49d81a066884"}, - {file = "coverage-6.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cfd9386c1d6f13b37e05a91a8583e802f8059bebfccde61a418c5808dea6bbfa"}, - {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:17e6c11038d4ed6e8af1407d9e89a2904d573be29d51515f14262d7f10ef0a64"}, - {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c254b03032d5a06de049ce8bca8338a5185f07fb76600afff3c161e053d88617"}, - {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dca38a21e4423f3edb821292e97cec7ad38086f84313462098568baedf4331f8"}, - {file = "coverage-6.2-cp37-cp37m-win32.whl", hash = "sha256:600617008aa82032ddeace2535626d1bc212dfff32b43989539deda63b3f36e4"}, - {file = "coverage-6.2-cp37-cp37m-win_amd64.whl", hash = "sha256:bf154ba7ee2fd613eb541c2bc03d3d9ac667080a737449d1a3fb342740eb1a74"}, - {file = "coverage-6.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f9afb5b746781fc2abce26193d1c817b7eb0e11459510fba65d2bd77fe161d9e"}, - {file = "coverage-6.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edcada2e24ed68f019175c2b2af2a8b481d3d084798b8c20d15d34f5c733fa58"}, - {file = "coverage-6.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a9c8c4283e17690ff1a7427123ffb428ad6a52ed720d550e299e8291e33184dc"}, - {file = "coverage-6.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f614fc9956d76d8a88a88bb41ddc12709caa755666f580af3a688899721efecd"}, - {file = "coverage-6.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9365ed5cce5d0cf2c10afc6add145c5037d3148585b8ae0e77cc1efdd6aa2953"}, - {file = "coverage-6.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8bdfe9ff3a4ea37d17f172ac0dff1e1c383aec17a636b9b35906babc9f0f5475"}, - {file = "coverage-6.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:63c424e6f5b4ab1cf1e23a43b12f542b0ec2e54f99ec9f11b75382152981df57"}, - {file = "coverage-6.2-cp38-cp38-win32.whl", hash = "sha256:49dbff64961bc9bdd2289a2bda6a3a5a331964ba5497f694e2cbd540d656dc1c"}, - {file = "coverage-6.2-cp38-cp38-win_amd64.whl", hash = "sha256:9a29311bd6429be317c1f3fe4bc06c4c5ee45e2fa61b2a19d4d1d6111cb94af2"}, - {file = "coverage-6.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03b20e52b7d31be571c9c06b74746746d4eb82fc260e594dc662ed48145e9efd"}, - {file = "coverage-6.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:215f8afcc02a24c2d9a10d3790b21054b58d71f4b3c6f055d4bb1b15cecce685"}, - {file = "coverage-6.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a4bdeb0a52d1d04123b41d90a4390b096f3ef38eee35e11f0b22c2d031222c6c"}, - {file = "coverage-6.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c332d8f8d448ded473b97fefe4a0983265af21917d8b0cdcb8bb06b2afe632c3"}, - {file = "coverage-6.2-cp39-cp39-win32.whl", hash = "sha256:6e1394d24d5938e561fbeaa0cd3d356207579c28bd1792f25a068743f2d5b282"}, - {file = "coverage-6.2-cp39-cp39-win_amd64.whl", hash = "sha256:86f2e78b1eff847609b1ca8050c9e1fa3bd44ce755b2ec30e70f2d3ba3844644"}, - {file = "coverage-6.2-pp36.pp37.pp38-none-any.whl", hash = "sha256:5829192582c0ec8ca4a2532407bc14c2f338d9878a10442f5d03804a95fac9de"}, - {file = "coverage-6.2.tar.gz", hash = "sha256:e2cad8093172b7d1595b4ad66f24270808658e11acf43a8f95b41276162eb5b8"}, -] -dataclasses = [ - {file = "dataclasses-0.8-py3-none-any.whl", hash = "sha256:0201d89fa866f68c8ebd9d08ee6ff50c0b255f8ec63a71c16fda7af82bb887bf"}, - {file = "dataclasses-0.8.tar.gz", hash = "sha256:8479067f342acf957dc82ec415d355ab5edb7e7646b90dc6e2fd1d96ad084c97"}, + {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, + {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, + {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, + {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, + {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, + {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, + {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, + {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"}, + {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"}, + {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"}, + {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"}, + {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"}, + {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"}, + {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"}, + {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"}, + {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"}, + {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"}, + {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"}, + {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, + {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, ] decorator = [ {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, ] -dnspython = [ - {file = "dnspython-2.2.1-py3-none-any.whl", hash = "sha256:a851e51367fb93e9e1361732c1d60dab63eff98712e503ea7d92e6eccb109b4f"}, - {file = "dnspython-2.2.1.tar.gz", hash = "sha256:0f7569a4a6ff151958b64304071d370daa3243d15941a7beedf0c9fe5105603e"}, -] -email-validator = [ - {file = "email_validator-1.3.0-py2.py3-none-any.whl", hash = "sha256:816073f2a7cffef786b29928f58ec16cdac42710a53bb18aa94317e3e145ec5c"}, - {file = "email_validator-1.3.0.tar.gz", hash = "sha256:553a66f8be2ec2dea641ae1d3f29017ab89e9d603d4a25cdaac39eefa283d769"}, -] eradicate = [ {file = "eradicate-2.1.0-py3-none-any.whl", hash = "sha256:8bfaca181db9227dc88bdbce4d051a9627604c2243e7d85324f6d6ce0fd08bb2"}, {file = "eradicate-2.1.0.tar.gz", hash = "sha256:aac7384ab25b1bf21c4c012de9b4bf8398945a14c98c911545b2ea50ab558014"}, @@ -1553,6 +1703,10 @@ flake8 = [ {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, ] +flake8-black = [ + {file = "flake8-black-0.3.3.tar.gz", hash = "sha256:8211f5e20e954cb57c709acccf2f3281ce27016d4c4b989c3e51f878bb7ce12a"}, + {file = "flake8_black-0.3.3-py3-none-any.whl", hash = "sha256:7d667d0059fd1aa468de1669d77cc934b7f1feeac258d57bdae69a8e73c4cd90"}, +] flake8-bugbear = [ {file = "flake8-bugbear-22.9.23.tar.gz", hash = "sha256:17b9623325e6e0dcdcc80ed9e4aa811287fcc81d7e03313b8736ea5733759937"}, {file = "flake8_bugbear-22.9.23-py3-none-any.whl", hash = "sha256:cd2779b2b7ada212d7a322814a1e5651f1868ab0d3f24cc9da66169ab8fda474"}, @@ -1562,16 +1716,16 @@ flake8-builtins = [ {file = "flake8_builtins-2.0.0-py3-none-any.whl", hash = "sha256:39bfa3badb5e8d22f92baf4e0ea1b816707245233846932d6b13e81fc6f673e8"}, ] flake8-comprehensions = [ - {file = "flake8-comprehensions-3.7.0.tar.gz", hash = "sha256:6b3218b2dde8ac5959c6476cde8f41a79e823c22feb656be2710cd2a3232cef9"}, - {file = "flake8_comprehensions-3.7.0-py3-none-any.whl", hash = "sha256:a5d7aea6315bbbd6fbcb2b4e80bff6a54d1600155e26236e555d0c6fe1d62522"}, + {file = "flake8-comprehensions-3.10.0.tar.gz", hash = "sha256:181158f7e7aa26a63a0a38e6017cef28c6adee71278ce56ce11f6ec9c4905058"}, + {file = "flake8_comprehensions-3.10.0-py3-none-any.whl", hash = "sha256:dad454fd3d525039121e98fa1dd90c46bc138708196a4ebbc949ad3c859adedb"}, ] flake8-debugger = [ - {file = "flake8-debugger-4.0.0.tar.gz", hash = "sha256:e43dc777f7db1481db473210101ec2df2bd39a45b149d7218a618e954177eda6"}, - {file = "flake8_debugger-4.0.0-py3-none-any.whl", hash = "sha256:82e64faa72e18d1bdd0000407502ebb8ecffa7bc027c62b9d4110ce27c091032"}, + {file = "flake8-debugger-4.1.2.tar.gz", hash = "sha256:52b002560941e36d9bf806fca2523dc7fb8560a295d5f1a6e15ac2ded7a73840"}, + {file = "flake8_debugger-4.1.2-py3-none-any.whl", hash = "sha256:0a5e55aeddcc81da631ad9c8c366e7318998f83ff00985a49e6b3ecf61e571bf"}, ] flake8-eradicate = [ - {file = "flake8-eradicate-1.3.0.tar.gz", hash = "sha256:e4c98f00d17dc8653e3388cac2624cd81e9735de2fd4a8dcf99029633ebd7a63"}, - {file = "flake8_eradicate-1.3.0-py3-none-any.whl", hash = "sha256:85a71e0c5f4e07f7c6c5fec520483561fd6bd295417d622855bdeade99242e3d"}, + {file = "flake8-eradicate-1.4.0.tar.gz", hash = "sha256:3088cfd6717d1c9c6c3ac45ef2e5f5b6c7267f7504d5a74b781500e95cb9c7e1"}, + {file = "flake8_eradicate-1.4.0-py3-none-any.whl", hash = "sha256:e3bbd0871be358e908053c1ab728903c114f062ba596b4d40c852fd18f473d56"}, ] flake8-fixme = [ {file = "flake8-fixme-1.1.1.tar.gz", hash = "sha256:50cade07d27a4c30d4f12351478df87339e67640c83041b664724bda6d16f33a"}, @@ -1596,20 +1750,16 @@ gitdb = [ {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, ] gitpython = [ - {file = "GitPython-3.1.20-py3-none-any.whl", hash = "sha256:b1e1c269deab1b08ce65403cf14e10d2ef1f6c89e33ea7c5e5bb0222ea593b8a"}, - {file = "GitPython-3.1.20.tar.gz", hash = "sha256:df0e072a200703a65387b0cfdf0466e3bab729c0458cf6b7349d0e9877636519"}, + {file = "GitPython-3.1.29-py3-none-any.whl", hash = "sha256:41eea0deec2deea139b459ac03656f0dd28fc4a3387240ec1d3c259a2c47850f"}, + {file = "GitPython-3.1.29.tar.gz", hash = "sha256:cc36bfc4a3f913e66805a28e84703e419d9c264c1077e537b54f0e1af85dbefd"}, ] idna = [ - {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, - {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, ] importlib-metadata = [ - {file = "importlib_metadata-4.8.3-py3-none-any.whl", hash = "sha256:65a9576a5b2d58ca44d133c42a241905cc45e34d2c06fd5ba2bafa221e5d7b5e"}, - {file = "importlib_metadata-4.8.3.tar.gz", hash = "sha256:766abffff765960fcc18003801f7044eb6755ffae4521c8e8ce8e83b9c9b0668"}, -] -importlib-resources = [ - {file = "importlib_resources-5.4.0-py3-none-any.whl", hash = "sha256:33a95faed5fc19b4bc16b29a6eeae248a3fe69dd55d4d229d2b480e23eeaad45"}, - {file = "importlib_resources-5.4.0.tar.gz", hash = "sha256:d756e2f85dd4de2ba89be0b21dba2a3bbec2e871a42a3a16719258a11f87506b"}, + {file = "importlib_metadata-4.13.0-py3-none-any.whl", hash = "sha256:8a8a81bcf996e74fee46f0d16bd3eaa382a7eb20fd82445c3ad11f4090334116"}, + {file = "importlib_metadata-4.13.0.tar.gz", hash = "sha256:dd0173e8f150d6815e098fd354f6414b0f079af4644ddfe90c71e2fc6174346d"}, ] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, @@ -1620,18 +1770,41 @@ isort = [ {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, ] jinja2 = [ - {file = "Jinja2-3.0.3-py3-none-any.whl", hash = "sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8"}, - {file = "Jinja2-3.0.3.tar.gz", hash = "sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7"}, + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, ] jmespath = [ - {file = "jmespath-0.10.0-py2.py3-none-any.whl", hash = "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f"}, - {file = "jmespath-0.10.0.tar.gz", hash = "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9"}, + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] +jschema-to-python = [ + {file = "jschema_to_python-1.2.3-py3-none-any.whl", hash = "sha256:8a703ca7604d42d74b2815eecf99a33359a8dccbb80806cce386d5e2dd992b05"}, + {file = "jschema_to_python-1.2.3.tar.gz", hash = "sha256:76ff14fe5d304708ccad1284e4b11f96a658949a31ee7faed9e0995279549b91"}, ] jsii = [ - {file = "jsii-1.57.0-py3-none-any.whl", hash = "sha256:4888091986a9ed8d50b042cc9c35a9564dd54c19e78adb890bf06d9ffac1b325"}, - {file = "jsii-1.57.0.tar.gz", hash = "sha256:ff7a3c51c1a653dd8a4342043b5f8e40b928bc617e3141e0d5d66175d22a754b"}, + {file = "jsii-1.70.0-py3-none-any.whl", hash = "sha256:d0867c0d2f60ceda1664c026033ae34ea36178c7027315e577ded13043827664"}, + {file = "jsii-1.70.0.tar.gz", hash = "sha256:9fc57ff37868364ba3417b26dc97189cd0cc71282196a3f4765768c067354be0"}, +] +jsonpatch = [ + {file = "jsonpatch-1.32-py2.py3-none-any.whl", hash = "sha256:26ac385719ac9f54df8a2f0827bb8253aa3ea8ab7b3368457bcdb8c14595a397"}, + {file = "jsonpatch-1.32.tar.gz", hash = "sha256:b6ddfe6c3db30d81a96aaeceb6baf916094ffa23d7dd5fa2c13e13f8b6e600c2"}, +] +jsonpickle = [ + {file = "jsonpickle-2.2.0-py2.py3-none-any.whl", hash = "sha256:de7f2613818aa4f234138ca11243d6359ff83ae528b2185efdd474f62bcf9ae1"}, + {file = "jsonpickle-2.2.0.tar.gz", hash = "sha256:7b272918b0554182e53dc340ddd62d9b7f902fec7e7b05620c04f3ccef479a0e"}, ] -Mako = [ +jsonpointer = [ + {file = "jsonpointer-2.3-py2.py3-none-any.whl", hash = "sha256:51801e558539b4e9cd268638c078c6c5746c9ac96bc38152d443400e4f3793e9"}, + {file = "jsonpointer-2.3.tar.gz", hash = "sha256:97cba51526c829282218feb99dab1b1e6bdf8efd1c43dc9d57be093c0d69c99a"}, +] +jsonschema = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] +junit-xml = [ + {file = "junit_xml-1.9-py2.py3-none-any.whl", hash = "sha256:ec5ca1a55aefdd76d28fcc0b135251d156c7106fa979686a4b48d62b761b4732"}, +] +mako = [ {file = "Mako-1.2.3-py3-none-any.whl", hash = "sha256:c413a086e38cd885088d5e165305ee8eed04e8b3f8f62df343480da0a385735f"}, {file = "Mako-1.2.3.tar.gz", hash = "sha256:7fde96466fcfeedb0eed94f187f20b23d85e4cb41444be0e542e2c8c65c396cd"}, ] @@ -1644,75 +1817,46 @@ markdown = [ {file = "Markdown-3.3.7.tar.gz", hash = "sha256:cbb516f16218e643d8e0a95b309f77eb118cb138d39a4f27851e6a63581db874"}, ] markupsafe = [ - {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, - {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, + {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, ] mccabe = [ {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, @@ -1723,19 +1867,19 @@ mergedeep = [ {file = "mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8"}, ] mike = [ - {file = "mike-0.6.0-py3-none-any.whl", hash = "sha256:cef9b9c803ff5c3fbb410f51f5ceb00902a9fe16d9fabd93b69c65cf481ab5a1"}, - {file = "mike-0.6.0.tar.gz", hash = "sha256:6d6239de2a60d733da2f34617e9b9a14c4b5437423b47e524f14dc96d6ce5f2f"}, + {file = "mike-1.1.2-py3-none-any.whl", hash = "sha256:4c307c28769834d78df10f834f57f810f04ca27d248f80a75f49c6fa2d1527ca"}, + {file = "mike-1.1.2.tar.gz", hash = "sha256:56c3f1794c2d0b5fdccfa9b9487beb013ca813de2e3ad0744724e9d34d40b77b"}, ] mkdocs = [ - {file = "mkdocs-1.3.1-py3-none-any.whl", hash = "sha256:fda92466393127d2da830bc6edc3a625a14b436316d1caf347690648e774c4f0"}, - {file = "mkdocs-1.3.1.tar.gz", hash = "sha256:a41a2ff25ce3bbacc953f9844ba07d106233cd76c88bac1f59cb1564ac0d87ed"}, + {file = "mkdocs-1.4.1-py3-none-any.whl", hash = "sha256:2b7845c2775396214cd408753e4cfb01af3cfed36acc141a84bce2ceec9d705d"}, + {file = "mkdocs-1.4.1.tar.gz", hash = "sha256:07ed90be4062e4ef732bbac2623097b9dca35c67b562c38cfd0bfbc7151758c1"}, ] mkdocs-git-revision-date-plugin = [ {file = "mkdocs_git_revision_date_plugin-0.3.2-py3-none-any.whl", hash = "sha256:2e67956cb01823dd2418e2833f3623dee8604cdf223bddd005fe36226a56f6ef"}, ] mkdocs-material = [ - {file = "mkdocs_material-8.5.4-py3-none-any.whl", hash = "sha256:aec2f0f2143109f8388aadf76e6fff749a2b74ebe730d0f674c65b53da89d19d"}, - {file = "mkdocs_material-8.5.4.tar.gz", hash = "sha256:70dc47820d4765b77968b9119f2957d09b4d8d328d950bee4544ff224d5c7b36"}, + {file = "mkdocs_material-8.5.6-py3-none-any.whl", hash = "sha256:b473162c800321b9760453f301a91f7cb40a120a85a9d0464e1e484e74b76bb2"}, + {file = "mkdocs_material-8.5.6.tar.gz", hash = "sha256:38a21d817265d0c203ab3dad64996e45859c983f72180f6937bd5540a4eb84e4"}, ] mkdocs-material-extensions = [ {file = "mkdocs-material-extensions-1.0.3.tar.gz", hash = "sha256:bfd24dfdef7b41c312ede42648f9eb83476ea168ec163b613f9abd12bbfddba2"}, @@ -1770,6 +1914,10 @@ mypy-boto3-appconfig = [ {file = "mypy-boto3-appconfig-1.24.36.post1.tar.gz", hash = "sha256:e1916b3754915cb411ef977083500e1f30f81f7b3aea6ff5eed1cec91944dea6"}, {file = "mypy_boto3_appconfig-1.24.36.post1-py3-none-any.whl", hash = "sha256:a5dbe549dbebf4bc7a6cfcbfa9dff89ceb4983c042b785763ee656504bdb49f6"}, ] +mypy-boto3-appconfigdata = [ + {file = "mypy-boto3-appconfigdata-1.24.36.post1.tar.gz", hash = "sha256:48c0b29a99f5e5a54a4585a4b3661bc00c7db40e481c5d014a4bfd86d1ae645e"}, + {file = "mypy_boto3_appconfigdata-1.24.36.post1-py3-none-any.whl", hash = "sha256:2bc495e6b6bd358d78d30f84b750d17ac326b2b4356a7786d0d1334812416edd"}, +] mypy-boto3-cloudformation = [ {file = "mypy-boto3-cloudformation-1.24.36.post1.tar.gz", hash = "sha256:ed7df9ae3a8390a145229122a1489d0a58bbf9986cb54f0d7a65ed54f12c8e63"}, {file = "mypy_boto3_cloudformation-1.24.36.post1-py3-none-any.whl", hash = "sha256:b39020c13a876bb18908aad22326478d0ac3faec0bdac0d2c11dc318c9dcf149"}, @@ -1810,25 +1958,29 @@ mypy-extensions = [ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, ] +networkx = [ + {file = "networkx-2.6.3-py3-none-any.whl", hash = "sha256:80b6b89c77d1dfb64a4c7854981b60aeea6360ac02c6d4e4913319e0a313abef"}, + {file = "networkx-2.6.3.tar.gz", hash = "sha256:c0946ed31d71f1b732b5aaa6da5a0388a345019af232ce2f49c766e2d6795c51"}, +] packaging = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, ] pathspec = [ - {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, - {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, + {file = "pathspec-0.10.1-py3-none-any.whl", hash = "sha256:46846318467efc4556ccfd27816e004270a9eeeeb4d062ce5e6fc7a87c573f93"}, + {file = "pathspec-0.10.1.tar.gz", hash = "sha256:7ace6161b621d31e7902eb6b5ae148d12cfd23f4a249b9ffb6b9fee12084323d"}, ] pbr = [ - {file = "pbr-5.10.0-py2.py3-none-any.whl", hash = "sha256:da3e18aac0a3c003e9eea1a81bd23e5a3a75d745670dcf736317b7d966887fdf"}, - {file = "pbr-5.10.0.tar.gz", hash = "sha256:cfcc4ff8e698256fc17ea3ff796478b050852585aa5bae79ecd05b2ab7b39b9a"}, + {file = "pbr-5.11.0-py2.py3-none-any.whl", hash = "sha256:db2317ff07c84c4c63648c9064a79fe9d9f5c7ce85a9099d4b6258b3db83225a"}, + {file = "pbr-5.11.0.tar.gz", hash = "sha256:b97bc6695b2aff02144133c2e7399d5885223d42b7912ffaec2ca3898e673bfe"}, ] pdoc3 = [ {file = "pdoc3-0.10.0-py3-none-any.whl", hash = "sha256:ba45d1ada1bd987427d2bf5cdec30b2631a3ff5fb01f6d0e77648a572ce6028b"}, {file = "pdoc3-0.10.0.tar.gz", hash = "sha256:5f22e7bcb969006738e1aa4219c75a32f34c2d62d46dc9d2fb2d3e0b0287e4b7"}, ] platformdirs = [ - {file = "platformdirs-2.4.0-py3-none-any.whl", hash = "sha256:8868bbe3c3c80d42f20156f22e7131d2fb321f5bc86a2a345375c6481a67021d"}, - {file = "platformdirs-2.4.0.tar.gz", hash = "sha256:367a5e80b3d04d2428ffa76d33f124cf11e8fff2acdaa9b43d545f5c7d661ef2"}, + {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, + {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, ] pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, @@ -1850,41 +2002,42 @@ pycodestyle = [ {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, ] pydantic = [ - {file = "pydantic-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9c9e04a6cdb7a363d7cb3ccf0efea51e0abb48e180c0d31dca8d247967d85c6e"}, - {file = "pydantic-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fafe841be1103f340a24977f61dee76172e4ae5f647ab9e7fd1e1fca51524f08"}, - {file = "pydantic-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afacf6d2a41ed91fc631bade88b1d319c51ab5418870802cedb590b709c5ae3c"}, - {file = "pydantic-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ee0d69b2a5b341fc7927e92cae7ddcfd95e624dfc4870b32a85568bd65e6131"}, - {file = "pydantic-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ff68fc85355532ea77559ede81f35fff79a6a5543477e168ab3a381887caea76"}, - {file = "pydantic-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c0f5e142ef8217019e3eef6ae1b6b55f09a7a15972958d44fbd228214cede567"}, - {file = "pydantic-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:615661bfc37e82ac677543704437ff737418e4ea04bef9cf11c6d27346606044"}, - {file = "pydantic-1.9.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:328558c9f2eed77bd8fffad3cef39dbbe3edc7044517f4625a769d45d4cf7555"}, - {file = "pydantic-1.9.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bd446bdb7755c3a94e56d7bdfd3ee92396070efa8ef3a34fab9579fe6aa1d84"}, - {file = "pydantic-1.9.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0b214e57623a535936005797567231a12d0da0c29711eb3514bc2b3cd008d0f"}, - {file = "pydantic-1.9.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d8ce3fb0841763a89322ea0432f1f59a2d3feae07a63ea2c958b2315e1ae8adb"}, - {file = "pydantic-1.9.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b34ba24f3e2d0b39b43f0ca62008f7ba962cff51efa56e64ee25c4af6eed987b"}, - {file = "pydantic-1.9.2-cp36-cp36m-win_amd64.whl", hash = "sha256:84d76ecc908d917f4684b354a39fd885d69dd0491be175f3465fe4b59811c001"}, - {file = "pydantic-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4de71c718c9756d679420c69f216776c2e977459f77e8f679a4a961dc7304a56"}, - {file = "pydantic-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5803ad846cdd1ed0d97eb00292b870c29c1f03732a010e66908ff48a762f20e4"}, - {file = "pydantic-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a8c5360a0297a713b4123608a7909e6869e1b56d0e96eb0d792c27585d40757f"}, - {file = "pydantic-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:cdb4272678db803ddf94caa4f94f8672e9a46bae4a44f167095e4d06fec12979"}, - {file = "pydantic-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:19b5686387ea0d1ea52ecc4cffb71abb21702c5e5b2ac626fd4dbaa0834aa49d"}, - {file = "pydantic-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:32e0b4fb13ad4db4058a7c3c80e2569adbd810c25e6ca3bbd8b2a9cc2cc871d7"}, - {file = "pydantic-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:91089b2e281713f3893cd01d8e576771cd5bfdfbff5d0ed95969f47ef6d676c3"}, - {file = "pydantic-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e631c70c9280e3129f071635b81207cad85e6c08e253539467e4ead0e5b219aa"}, - {file = "pydantic-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b3946f87e5cef3ba2e7bd3a4eb5a20385fe36521d6cc1ebf3c08a6697c6cfb3"}, - {file = "pydantic-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5565a49effe38d51882cb7bac18bda013cdb34d80ac336428e8908f0b72499b0"}, - {file = "pydantic-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:bd67cb2c2d9602ad159389c29e4ca964b86fa2f35c2faef54c3eb28b4efd36c8"}, - {file = "pydantic-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4aafd4e55e8ad5bd1b19572ea2df546ccace7945853832bb99422a79c70ce9b8"}, - {file = "pydantic-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:d70916235d478404a3fa8c997b003b5f33aeac4686ac1baa767234a0f8ac2326"}, - {file = "pydantic-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f0ca86b525264daa5f6b192f216a0d1e860b7383e3da1c65a1908f9c02f42801"}, - {file = "pydantic-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1061c6ee6204f4f5a27133126854948e3b3d51fcc16ead2e5d04378c199b2f44"}, - {file = "pydantic-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e78578f0c7481c850d1c969aca9a65405887003484d24f6110458fb02cca7747"}, - {file = "pydantic-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5da164119602212a3fe7e3bc08911a89db4710ae51444b4224c2382fd09ad453"}, - {file = "pydantic-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ead3cd020d526f75b4188e0a8d71c0dbbe1b4b6b5dc0ea775a93aca16256aeb"}, - {file = "pydantic-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7d0f183b305629765910eaad707800d2f47c6ac5bcfb8c6397abdc30b69eeb15"}, - {file = "pydantic-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:f1a68f4f65a9ee64b6ccccb5bf7e17db07caebd2730109cb8a95863cfa9c4e55"}, - {file = "pydantic-1.9.2-py3-none-any.whl", hash = "sha256:78a4d6bdfd116a559aeec9a4cfe77dda62acc6233f8b56a716edad2651023e5e"}, - {file = "pydantic-1.9.2.tar.gz", hash = "sha256:8cb0bc509bfb71305d7a59d00163d5f9fc4530f0881ea32c74ff4f74c85f3d3d"}, + {file = "pydantic-1.10.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bb6ad4489af1bac6955d38ebcb95079a836af31e4c4f74aba1ca05bb9f6027bd"}, + {file = "pydantic-1.10.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a1f5a63a6dfe19d719b1b6e6106561869d2efaca6167f84f5ab9347887d78b98"}, + {file = "pydantic-1.10.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:352aedb1d71b8b0736c6d56ad2bd34c6982720644b0624462059ab29bd6e5912"}, + {file = "pydantic-1.10.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19b3b9ccf97af2b7519c42032441a891a5e05c68368f40865a90eb88833c2559"}, + {file = "pydantic-1.10.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e9069e1b01525a96e6ff49e25876d90d5a563bc31c658289a8772ae186552236"}, + {file = "pydantic-1.10.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:355639d9afc76bcb9b0c3000ddcd08472ae75318a6eb67a15866b87e2efa168c"}, + {file = "pydantic-1.10.2-cp310-cp310-win_amd64.whl", hash = "sha256:ae544c47bec47a86bc7d350f965d8b15540e27e5aa4f55170ac6a75e5f73b644"}, + {file = "pydantic-1.10.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a4c805731c33a8db4b6ace45ce440c4ef5336e712508b4d9e1aafa617dc9907f"}, + {file = "pydantic-1.10.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d49f3db871575e0426b12e2f32fdb25e579dea16486a26e5a0474af87cb1ab0a"}, + {file = "pydantic-1.10.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37c90345ec7dd2f1bcef82ce49b6235b40f282b94d3eec47e801baf864d15525"}, + {file = "pydantic-1.10.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b5ba54d026c2bd2cb769d3468885f23f43710f651688e91f5fb1edcf0ee9283"}, + {file = "pydantic-1.10.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:05e00dbebbe810b33c7a7362f231893183bcc4251f3f2ff991c31d5c08240c42"}, + {file = "pydantic-1.10.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2d0567e60eb01bccda3a4df01df677adf6b437958d35c12a3ac3e0f078b0ee52"}, + {file = "pydantic-1.10.2-cp311-cp311-win_amd64.whl", hash = "sha256:c6f981882aea41e021f72779ce2a4e87267458cc4d39ea990729e21ef18f0f8c"}, + {file = "pydantic-1.10.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4aac8e7103bf598373208f6299fa9a5cfd1fc571f2d40bf1dd1955a63d6eeb5"}, + {file = "pydantic-1.10.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a7b66c3f499108b448f3f004801fcd7d7165fb4200acb03f1c2402da73ce4c"}, + {file = "pydantic-1.10.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bedf309630209e78582ffacda64a21f96f3ed2e51fbf3962d4d488e503420254"}, + {file = "pydantic-1.10.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9300fcbebf85f6339a02c6994b2eb3ff1b9c8c14f502058b5bf349d42447dcf5"}, + {file = "pydantic-1.10.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:216f3bcbf19c726b1cc22b099dd409aa371f55c08800bcea4c44c8f74b73478d"}, + {file = "pydantic-1.10.2-cp37-cp37m-win_amd64.whl", hash = "sha256:dd3f9a40c16daf323cf913593083698caee97df2804aa36c4b3175d5ac1b92a2"}, + {file = "pydantic-1.10.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b97890e56a694486f772d36efd2ba31612739bc6f3caeee50e9e7e3ebd2fdd13"}, + {file = "pydantic-1.10.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9cabf4a7f05a776e7793e72793cd92cc865ea0e83a819f9ae4ecccb1b8aa6116"}, + {file = "pydantic-1.10.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06094d18dd5e6f2bbf93efa54991c3240964bb663b87729ac340eb5014310624"}, + {file = "pydantic-1.10.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc78cc83110d2f275ec1970e7a831f4e371ee92405332ebfe9860a715f8336e1"}, + {file = "pydantic-1.10.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ee433e274268a4b0c8fde7ad9d58ecba12b069a033ecc4645bb6303c062d2e9"}, + {file = "pydantic-1.10.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7c2abc4393dea97a4ccbb4ec7d8658d4e22c4765b7b9b9445588f16c71ad9965"}, + {file = "pydantic-1.10.2-cp38-cp38-win_amd64.whl", hash = "sha256:0b959f4d8211fc964772b595ebb25f7652da3f22322c007b6fed26846a40685e"}, + {file = "pydantic-1.10.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c33602f93bfb67779f9c507e4d69451664524389546bacfe1bee13cae6dc7488"}, + {file = "pydantic-1.10.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5760e164b807a48a8f25f8aa1a6d857e6ce62e7ec83ea5d5c5a802eac81bad41"}, + {file = "pydantic-1.10.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6eb843dcc411b6a2237a694f5e1d649fc66c6064d02b204a7e9d194dff81eb4b"}, + {file = "pydantic-1.10.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b8795290deaae348c4eba0cebb196e1c6b98bdbe7f50b2d0d9a4a99716342fe"}, + {file = "pydantic-1.10.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e0bedafe4bc165ad0a56ac0bd7695df25c50f76961da29c050712596cf092d6d"}, + {file = "pydantic-1.10.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e05aed07fa02231dbf03d0adb1be1d79cabb09025dd45aa094aa8b4e7b9dcda"}, + {file = "pydantic-1.10.2-cp39-cp39-win_amd64.whl", hash = "sha256:c1ba1afb396148bbc70e9eaa8c06c1716fdddabaf86e7027c5988bae2a829ab6"}, + {file = "pydantic-1.10.2-py3-none-any.whl", hash = "sha256:1b6ee725bd6e83ec78b1aa32c5b1fa67a3a65badddde3976bca5fe4568f27709"}, + {file = "pydantic-1.10.2.tar.gz", hash = "sha256:91b8e218852ef6007c2b98cd861601c6a09f1aa32bbbb74fab5b1c33d4a1e410"}, ] pyflakes = [ {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, @@ -1895,16 +2048,39 @@ pygments = [ {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, ] pymdown-extensions = [ - {file = "pymdown_extensions-9.5-py3-none-any.whl", hash = "sha256:ec141c0f4983755349f0c8710416348d1a13753976c028186ed14f190c8061c4"}, - {file = "pymdown_extensions-9.5.tar.gz", hash = "sha256:3ef2d998c0d5fa7eb09291926d90d69391283561cf6306f85cd588a5eb5befa0"}, + {file = "pymdown_extensions-9.6-py3-none-any.whl", hash = "sha256:1e36490adc7bfcef1fdb21bb0306e93af99cff8ec2db199bd17e3bf009768c11"}, + {file = "pymdown_extensions-9.6.tar.gz", hash = "sha256:b956b806439bbff10f726103a941266beb03fbe99f897c7d5e774d7170339ad9"}, ] pyparsing = [ - {file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"}, - {file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"}, + {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, + {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, +] +pyrsistent = [ + {file = "pyrsistent-0.18.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1"}, + {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26"}, + {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ed6784ceac462a7d6fcb7e9b663e93b9a6fb373b7f43594f9ff68875788e01e"}, + {file = "pyrsistent-0.18.1-cp310-cp310-win32.whl", hash = "sha256:e4f3149fd5eb9b285d6bfb54d2e5173f6a116fe19172686797c056672689daf6"}, + {file = "pyrsistent-0.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:636ce2dc235046ccd3d8c56a7ad54e99d5c1cd0ef07d9ae847306c91d11b5fec"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e92a52c166426efbe0d1ec1332ee9119b6d32fc1f0bbfd55d5c1088070e7fc1b"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7a096646eab884bf8bed965bad63ea327e0d0c38989fc83c5ea7b8a87037bfc"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cdfd2c361b8a8e5d9499b9082b501c452ade8bbf42aef97ea04854f4a3f43b22"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-win32.whl", hash = "sha256:7ec335fc998faa4febe75cc5268a9eac0478b3f681602c1f27befaf2a1abe1d8"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6455fc599df93d1f60e1c5c4fe471499f08d190d57eca040c0ea182301321286"}, + {file = "pyrsistent-0.18.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6"}, + {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bfe2388663fd18bd8ce7db2c91c7400bf3e1a9e8bd7d63bf7e77d39051b85ec"}, + {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e3e1fcc45199df76053026a51cc59ab2ea3fc7c094c6627e93b7b44cdae2c8c"}, + {file = "pyrsistent-0.18.1-cp38-cp38-win32.whl", hash = "sha256:b568f35ad53a7b07ed9b1b2bae09eb15cdd671a5ba5d2c66caee40dbf91c68ca"}, + {file = "pyrsistent-0.18.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1b96547410f76078eaf66d282ddca2e4baae8964364abb4f4dcdde855cd123a"}, + {file = "pyrsistent-0.18.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5"}, + {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bc66318fb7ee012071b2792024564973ecc80e9522842eb4e17743604b5e045"}, + {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:914474c9f1d93080338ace89cb2acee74f4f666fb0424896fcfb8d86058bf17c"}, + {file = "pyrsistent-0.18.1-cp39-cp39-win32.whl", hash = "sha256:1b34eedd6812bf4d33814fca1b66005805d3640ce53140ab8bbb1e2651b0d9bc"}, + {file = "pyrsistent-0.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:e24a828f57e0c337c8d8bb9f6b12f09dfdf0273da25fda9e314f0b684b415a07"}, + {file = "pyrsistent-0.18.1.tar.gz", hash = "sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96"}, ] pytest = [ - {file = "pytest-7.0.1-py3-none-any.whl", hash = "sha256:9ce3ff477af913ecf6321fe337b93a2c0dcf2a0a1439c43f5452112c1e4280db"}, - {file = "pytest-7.0.1.tar.gz", hash = "sha256:e30905a0c131d3d94b89624a1cc5afec3e0ba2fbdb151867d8e0ebd49850f171"}, + {file = "pytest-7.1.3-py3-none-any.whl", hash = "sha256:1377bda3466d70b55e3f5cecfa55bb7cfcf219c7964629b967c37cf0bda818b7"}, + {file = "pytest-7.1.3.tar.gz", hash = "sha256:4f365fec2dff9c1162f834d9f18af1ba13062db0c708bf7b946f8a5c76180c39"}, ] pytest-asyncio = [ {file = "pytest-asyncio-0.16.0.tar.gz", hash = "sha256:7496c5977ce88c34379df64a66459fe395cd05543f0a2f837016e7144391fcfb"}, @@ -1923,8 +2099,8 @@ pytest-forked = [ {file = "pytest_forked-1.4.0-py3-none-any.whl", hash = "sha256:bbbb6717efc886b9d64537b41fb1497cfaf3c9601276be8da2cccfea5a3c8ad8"}, ] pytest-mock = [ - {file = "pytest-mock-3.6.1.tar.gz", hash = "sha256:40217a058c52a63f1042f0784f62009e976ba824c418cced42e88d5f40ab0e62"}, - {file = "pytest_mock-3.6.1-py3-none-any.whl", hash = "sha256:30c2f2cc9759e76eee674b81ea28c9f0b94f8f0445a1b87762cadf774f0df7e3"}, + {file = "pytest-mock-3.10.0.tar.gz", hash = "sha256:fbbdb085ef7c252a326fd8cdcac0aa3b1333d8811f131bdcc701002e1be7ed4f"}, + {file = "pytest_mock-3.10.0-py3-none-any.whl", hash = "sha256:f4c973eeae0282963eb293eb173ce91b091a79c1334455acfac9ddee8a1c784b"}, ] pytest-xdist = [ {file = "pytest-xdist-2.5.0.tar.gz", hash = "sha256:4580deca3ff04ddb2ac53eba39d76cb5dd5edeac050cb6fbc768b0dd712b4edf"}, @@ -2035,56 +2211,24 @@ radon = [ {file = "radon-5.1.0.tar.gz", hash = "sha256:cb1d8752e5f862fb9e20d82b5f758cbc4fb1237c92c9a66450ea0ea7bf29aeee"}, ] requests = [ - {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, - {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, + {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, + {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, ] retry = [ {file = "retry-0.9.2-py2.py3-none-any.whl", hash = "sha256:ccddf89761fa2c726ab29391837d4327f819ea14d244c232a1d24c67a2f98606"}, {file = "retry-0.9.2.tar.gz", hash = "sha256:f8bfa8b99b69c4506d6f5bd3b0aabf77f98cdb17f3c9fc3f5ca820033336fba4"}, ] -"ruamel.yaml" = [ - {file = "ruamel.yaml-0.17.21-py3-none-any.whl", hash = "sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7"}, - {file = "ruamel.yaml-0.17.21.tar.gz", hash = "sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af"}, -] -"ruamel.yaml.clib" = [ - {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6e7be2c5bcb297f5b82fee9c665eb2eb7001d1050deaba8471842979293a80b0"}, - {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:066f886bc90cc2ce44df8b5f7acfc6a7e2b2e672713f027136464492b0c34d7c"}, - {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:221eca6f35076c6ae472a531afa1c223b9c29377e62936f61bc8e6e8bdc5f9e7"}, - {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-win32.whl", hash = "sha256:1070ba9dd7f9370d0513d649420c3b362ac2d687fe78c6e888f5b12bf8bc7bee"}, - {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:77df077d32921ad46f34816a9a16e6356d8100374579bc35e15bab5d4e9377de"}, - {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:cfdb9389d888c5b74af297e51ce357b800dd844898af9d4a547ffc143fa56751"}, - {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:7b2927e92feb51d830f531de4ccb11b320255ee95e791022555971c466af4527"}, - {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-win32.whl", hash = "sha256:ada3f400d9923a190ea8b59c8f60680c4ef8a4b0dfae134d2f2ff68429adfab5"}, - {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-win_amd64.whl", hash = "sha256:de9c6b8a1ba52919ae919f3ae96abb72b994dd0350226e28f3686cb4f142165c"}, - {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d67f273097c368265a7b81e152e07fb90ed395df6e552b9fa858c6d2c9f42502"}, - {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:72a2b8b2ff0a627496aad76f37a652bcef400fd861721744201ef1b45199ab78"}, - {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:d3c620a54748a3d4cf0bcfe623e388407c8e85a4b06b8188e126302bcab93ea8"}, - {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-win32.whl", hash = "sha256:9efef4aab5353387b07f6b22ace0867032b900d8e91674b5d8ea9150db5cae94"}, - {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-win_amd64.whl", hash = "sha256:846fc8336443106fe23f9b6d6b8c14a53d38cef9a375149d61f99d78782ea468"}, - {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0847201b767447fc33b9c235780d3aa90357d20dd6108b92be544427bea197dd"}, - {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:78988ed190206672da0f5d50c61afef8f67daa718d614377dcd5e3ed85ab4a99"}, - {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:210c8fcfeff90514b7133010bf14e3bad652c8efde6b20e00c43854bf94fa5a6"}, - {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-win32.whl", hash = "sha256:a49e0161897901d1ac9c4a79984b8410f450565bbad64dbfcbf76152743a0cdb"}, - {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-win_amd64.whl", hash = "sha256:bf75d28fa071645c529b5474a550a44686821decebdd00e21127ef1fd566eabe"}, - {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a32f8d81ea0c6173ab1b3da956869114cae53ba1e9f72374032e33ba3118c233"}, - {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7f7ecb53ae6848f959db6ae93bdff1740e651809780822270eab111500842a84"}, - {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:61bc5e5ca632d95925907c569daa559ea194a4d16084ba86084be98ab1cec1c6"}, - {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-win32.whl", hash = "sha256:89221ec6d6026f8ae859c09b9718799fea22c0e8da8b766b0b2c9a9ba2db326b"}, - {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-win_amd64.whl", hash = "sha256:31ea73e564a7b5fbbe8188ab8b334393e06d997914a4e184975348f204790277"}, - {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc6a613d6c74eef5a14a214d433d06291526145431c3b964f5e16529b1842bed"}, - {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:1866cf2c284a03b9524a5cc00daca56d80057c5ce3cdc86a52020f4c720856f0"}, - {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:1b4139a6ffbca8ef60fdaf9b33dec05143ba746a6f0ae0f9d11d38239211d335"}, - {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-win32.whl", hash = "sha256:3fb9575a5acd13031c57a62cc7823e5d2ff8bc3835ba4d94b921b4e6ee664104"}, - {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-win_amd64.whl", hash = "sha256:825d5fccef6da42f3c8eccd4281af399f21c02b32d98e113dbc631ea6a6ecbc7"}, - {file = "ruamel.yaml.clib-0.2.6.tar.gz", hash = "sha256:4ff604ce439abb20794f05613c374759ce10e3595d1867764dd1ae675b85acbd"}, -] s3transfer = [ - {file = "s3transfer-0.5.2-py3-none-any.whl", hash = "sha256:7a6f4c4d1fdb9a2b640244008e142cbc2cd3ae34b386584ef044dd0f27101971"}, - {file = "s3transfer-0.5.2.tar.gz", hash = "sha256:95c58c194ce657a5f4fb0b9e60a84968c808888aed628cd98ab8771fe1db98ed"}, + {file = "s3transfer-0.6.0-py3-none-any.whl", hash = "sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd"}, + {file = "s3transfer-0.6.0.tar.gz", hash = "sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"}, +] +sarif-om = [ + {file = "sarif_om-1.0.4-py3-none-any.whl", hash = "sha256:539ef47a662329b1c8502388ad92457425e95dc0aaaf995fe46f4984c4771911"}, + {file = "sarif_om-1.0.4.tar.gz", hash = "sha256:cd5f416b3083e00d402a92e449a7ff67af46f11241073eea0461802a3b5aef98"}, ] setuptools = [ - {file = "setuptools-59.6.0-py3-none-any.whl", hash = "sha256:4ce92f1e1f8f01233ee9952c04f6b81d1e02939d6e1b488428154974a4d0783e"}, - {file = "setuptools-59.6.0.tar.gz", hash = "sha256:22c7348c6d2976a52632c67f7ab0cdf40147db7789f9aed18734643fe9cf3373"}, + {file = "setuptools-65.5.0-py3-none-any.whl", hash = "sha256:f62ea9da9ed6289bfe868cd6845968a2c854d1427f8548d52cae02a42b4f0356"}, + {file = "setuptools-65.5.0.tar.gz", hash = "sha256:512e5536220e38146176efb833d4a62aa726b7bbff82cfbc8ba9eaa3996e0b17"}, ] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, @@ -2095,12 +2239,12 @@ smmap = [ {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, ] stevedore = [ - {file = "stevedore-3.5.0-py3-none-any.whl", hash = "sha256:a547de73308fd7e90075bb4d301405bebf705292fa90a90fc3bcf9133f58616c"}, - {file = "stevedore-3.5.0.tar.gz", hash = "sha256:f40253887d8712eaa2bb0ea3830374416736dc8ec0e22f5a65092c1174c44335"}, + {file = "stevedore-3.5.2-py3-none-any.whl", hash = "sha256:fa2630e3d0ad3e22d4914aff2501445815b9a4467a6edc49387c667a38faf5bf"}, + {file = "stevedore-3.5.2.tar.gz", hash = "sha256:cf99f41fc0d5a4f185ca4d3d42b03be9011b0a1ec1a4ea1a282be1b4b306dcc2"}, ] tomli = [ - {file = "tomli-1.2.3-py3-none-any.whl", hash = "sha256:e3069e4be3ead9668e21cb9b074cd948f7b3113fd9c8bba083f48247aab8b11c"}, - {file = "tomli-1.2.3.tar.gz", hash = "sha256:05b6166bff487dc068d322585c7ea4ef78deed501cc124060e0f238e89a9231f"}, + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] typed-ast = [ {file = "typed_ast-1.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4"}, @@ -2128,13 +2272,17 @@ typed-ast = [ {file = "typed_ast-1.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1"}, {file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"}, ] +typeguard = [ + {file = "typeguard-2.13.3-py3-none-any.whl", hash = "sha256:5e3e3be01e887e7eafae5af63d1f36c849aaa94e3a0112097312aabfa16284f1"}, + {file = "typeguard-2.13.3.tar.gz", hash = "sha256:00edaa8da3a133674796cf5ea87d9f4b4c367d77476e185e80251cc13dfbb8c4"}, +] types-requests = [ {file = "types-requests-2.28.11.2.tar.gz", hash = "sha256:fdcd7bd148139fb8eef72cf4a41ac7273872cad9e6ada14b11ff5dfdeee60ed3"}, {file = "types_requests-2.28.11.2-py3-none-any.whl", hash = "sha256:14941f8023a80b16441b3b46caffcbfce5265fd14555844d6029697824b5a2ef"}, ] types-urllib3 = [ - {file = "types-urllib3-1.26.24.tar.gz", hash = "sha256:a1b3aaea7dda3eb1b51699ee723aadd235488e4dc4648e030f09bc429ecff42f"}, - {file = "types_urllib3-1.26.24-py3-none-any.whl", hash = "sha256:cf7918503d02d3576e503bbfb419b0e047c4617653bba09624756ab7175e15c9"}, + {file = "types-urllib3-1.26.25.1.tar.gz", hash = "sha256:a948584944b2412c9a74b9cf64f6c48caf8652cb88b38361316f6d15d8a184cd"}, + {file = "types_urllib3-1.26.25.1-py3-none-any.whl", hash = "sha256:f6422596cc9ee5fdf68f9d547f541096a20c2dcfd587e37c804c9ea720bf5cb2"}, ] typing-extensions = [ {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"}, @@ -2144,6 +2292,10 @@ urllib3 = [ {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"}, {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"}, ] +verspec = [ + {file = "verspec-0.1.0-py3-none-any.whl", hash = "sha256:741877d5633cc9464c45a469ae2a31e801e6dbbaa85b9675d481cda100f11c31"}, + {file = "verspec-0.1.0.tar.gz", hash = "sha256:c4504ca697b2056cdb4bfa7121461f5a0e81809255b41c03dda4ba823637c01e"}, +] watchdog = [ {file = "watchdog-2.1.9-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a735a990a1095f75ca4f36ea2ef2752c99e6ee997c46b0de507ba40a09bf7330"}, {file = "watchdog-2.1.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b17d302850c8d412784d9246cfe8d7e3af6bcd45f958abb2d08a6f8bedf695d"}, @@ -2242,6 +2394,6 @@ xenon = [ {file = "xenon-0.9.0.tar.gz", hash = "sha256:d2b9cb6c6260f771a432c1e588e51fddb17858f88f73ef641e7532f7a5f58fb8"}, ] zipp = [ - {file = "zipp-3.6.0-py3-none-any.whl", hash = "sha256:9fe5ea21568a0a70e50f273397638d39b03353731e6cbbb3fd8502a33fec40bc"}, - {file = "zipp-3.6.0.tar.gz", hash = "sha256:71c644c5369f4a6e07636f0aa966270449561fcea2e3d6747b8d23efaa9d7832"}, + {file = "zipp-3.9.0-py3-none-any.whl", hash = "sha256:972cfa31bc2fedd3fa838a51e9bc7e64b7fb725a8c00e7431554311f180e9980"}, + {file = "zipp-3.9.0.tar.gz", hash = "sha256:3a7af91c3db40ec72dd9d154ae18e008c69efe8ca88dde4f9a731bb82fe2f9eb"}, ] diff --git a/pyproject.toml b/pyproject.toml index e4cf27ff5e6..aad7c8fd533 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "aws_lambda_powertools" -version = "1.31.1" +version = "2.0.0" description = "A suite of utilities for AWS Lambda functions to ease adopting best practices such as tracing, structured logging, custom metrics, batching, idempotency, feature flags, and more." authors = ["Amazon Web Services"] include = ["aws_lambda_powertools/py.typed", "THIRD-PARTY-LICENSES"] @@ -9,7 +9,6 @@ classifiers=[ "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Natural Language :: English", - "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", @@ -20,25 +19,24 @@ keywords = ["aws_lambda_powertools", "aws", "tracing", "logging", "lambda", "pow license = "MIT-0" [tool.poetry.dependencies] -python = "^3.6.2" -aws-xray-sdk = "^2.8.0" -fastjsonschema = "^2.14.5" -boto3 = "^1.18" -pydantic = {version = "^1.8.2", optional = true } -email-validator = {version = "*", optional = true } +python = "^3.7.4" +aws-xray-sdk = { version = "^2.8.0", optional = true } +fastjsonschema = { version = "^2.14.5", optional = true } +pydantic = { version = "^1.8.2", optional = true } +boto3 = { version = "^1.20.32", optional = true } [tool.poetry.dev-dependencies] -# Maintenance: 2022-04-21 jmespath was removed, to be re-added once we drop python 3.6. -# issue #1148 coverage = {extras = ["toml"], version = "^6.2"} pytest = "^7.0.1" black = "^22.8" +boto3 = "^1.18" flake8-builtins = "^2.0.0" flake8-comprehensions = "^3.7.0" flake8-debugger = "^4.0.0" flake8-fixme = "^1.1.1" flake8-isort = "^4.1.2" flake8-variables-names = "^0.0.4" +flake8-black = "^0.3.3" isort = "^5.10.1" pytest-cov = "^4.0.0" pytest-mock = "^3.5.1" @@ -50,32 +48,43 @@ xenon = "^0.9.0" flake8-eradicate = "^1.2.1" flake8-bugbear = "^22.9.23" mkdocs-git-revision-date-plugin = "^0.3.2" -mike = "^0.6.0" +mike = "^1.1.2" mypy = "^0.971" retry = "^0.9.2" pytest-xdist = "^2.5.0" -aws-cdk-lib = "^2.23.0" +aws-cdk-lib = "^2.38.1" +"aws-cdk.aws-apigatewayv2-alpha" = "^2.38.1-alpha.0" +"aws-cdk.aws-apigatewayv2-integrations-alpha" = "^2.38.1-alpha.0" pytest-benchmark = "^3.4.1" -mypy-boto3-appconfig = { version = "^1.24.29", python = ">=3.7" } -mypy-boto3-cloudformation = { version = "^1.24.0", python = ">=3.7" } -mypy-boto3-cloudwatch = { version = "^1.24.35", python = ">=3.7" } -mypy-boto3-dynamodb = { version = "^1.24.74", python = ">=3.7" } -mypy-boto3-lambda = { version = "^1.24.0", python = ">=3.7" } -mypy-boto3-logs = { version = "^1.24.0", python = ">=3.7" } -mypy-boto3-secretsmanager = { version = "^1.24.83", python = ">=3.7" } -mypy-boto3-ssm = { version = "^1.24.90", python = ">=3.7" } -mypy-boto3-s3 = { version = "^1.24.94", python = ">=3.7" } -mypy-boto3-xray = { version = "^1.24.0", python = ">=3.7" } -types-requests = "^2.28.11" -typing-extensions = { version = "^4.4.0", python = ">=3.7" } python-snappy = "^0.6.1" -mkdocs-material = { version = "^8.5.4", python = ">=3.7" } -filelock = { version = "^3.8.0", python = ">=3.7" } -# Maintenance: 2022-09-19 pinned mako to fix vulnerability as a pdoc3 dependency. Remove once we drop python 3.6. -Mako = {version = "1.2.3", python = ">=3.7"} +mypy-boto3-appconfig = "^1.24.29" +mypy-boto3-cloudformation = "^1.24.0" +mypy-boto3-cloudwatch = "^1.24.35" +mypy-boto3-dynamodb = "^1.24.60" +mypy-boto3-lambda = "^1.24.0" +mypy-boto3-logs = "^1.24.0" +mypy-boto3-secretsmanager = "^1.24.11" +mypy-boto3-ssm = "^1.24.0" +mypy-boto3-s3 = "^1.24.0" +mypy-boto3-xray = "^1.24.0" +types-requests = "^2.28.11" +typing-extensions = "^4.4.0" +mkdocs-material = "^8.5.4" +filelock = "^3.8.0" +checksumdir = "^1.2.0" +mypy-boto3-appconfigdata = "^1.24.36" +importlib-metadata = "^4.13" [tool.poetry.extras] -pydantic = ["pydantic", "email-validator"] +parser = ["pydantic"] +validation = ["fastjsonschema"] +tracer = ["aws-xray-sdk"] +all = ["pydantic", "aws-xray-sdk", "fastjsonschema"] +# allow customers to run code locally without emulators (SAM CLI, etc.) +aws-sdk = ["boto3"] + +[tool.poetry.group.dev.dependencies] +cfn-lint = "0.67.0" [tool.coverage.run] source = ["aws_lambda_powertools"] diff --git a/tests/e2e/conftest.py b/tests/e2e/conftest.py index ac55d373e63..f59eea9a33b 100644 --- a/tests/e2e/conftest.py +++ b/tests/e2e/conftest.py @@ -1,21 +1,15 @@ import pytest -from tests.e2e.utils.infrastructure import LambdaLayerStack, deploy_once +from tests.e2e.utils.infrastructure import call_once +from tests.e2e.utils.lambda_layer.powertools_layer import LocalLambdaPowertoolsLayer -@pytest.fixture(scope="session") -def lambda_layer_arn(lambda_layer_deployment): - yield lambda_layer_deployment.get("LayerArn") - - -@pytest.fixture(scope="session") -def lambda_layer_deployment(request: pytest.FixtureRequest, tmp_path_factory: pytest.TempPathFactory, worker_id: str): - """Setup and teardown logic for E2E test infrastructure +@pytest.fixture(scope="session", autouse=True) +def lambda_layer_build(tmp_path_factory: pytest.TempPathFactory, worker_id: str) -> str: + """Build Lambda Layer once before stacks are created Parameters ---------- - request : pytest.FixtureRequest - pytest request fixture to introspect absolute path to test being executed tmp_path_factory : pytest.TempPathFactory pytest temporary path factory to discover shared tmp when multiple CPU processes are spun up worker_id : str @@ -23,13 +17,13 @@ def lambda_layer_deployment(request: pytest.FixtureRequest, tmp_path_factory: py Yields ------ - Dict[str, str] - CloudFormation Outputs from deployed infrastructure + str + Lambda Layer artefact location """ - yield from deploy_once( - stack=LambdaLayerStack, - request=request, + + layer = LocalLambdaPowertoolsLayer() + yield from call_once( + task=layer.build, tmp_path_factory=tmp_path_factory, worker_id=worker_id, - layer_arn="", ) diff --git a/tests/e2e/event_handler/__init__.py b/tests/e2e/event_handler/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/e2e/event_handler/conftest.py b/tests/e2e/event_handler/conftest.py new file mode 100644 index 00000000000..43941946ac7 --- /dev/null +++ b/tests/e2e/event_handler/conftest.py @@ -0,0 +1,19 @@ +import pytest + +from tests.e2e.event_handler.infrastructure import EventHandlerStack + + +@pytest.fixture(autouse=True, scope="module") +def infrastructure(): + """Setup and teardown logic for E2E test infrastructure + + Yields + ------ + Dict[str, str] + CloudFormation Outputs from deployed infrastructure + """ + stack = EventHandlerStack() + try: + yield stack.deploy() + finally: + stack.delete() diff --git a/tests/e2e/event_handler/handlers/alb_handler.py b/tests/e2e/event_handler/handlers/alb_handler.py new file mode 100644 index 00000000000..26746284aee --- /dev/null +++ b/tests/e2e/event_handler/handlers/alb_handler.py @@ -0,0 +1,29 @@ +from aws_lambda_powertools.event_handler import ALBResolver, Response, content_types + +app = ALBResolver() + +# The reason we use post is that whoever is writing tests can easily assert on the +# content being sent (body, headers, cookies, content-type) to reduce cognitive load. + + +@app.post("/todos") +def todos(): + payload = app.current_event.json_body + + body = payload.get("body", "Hello World") + status_code = payload.get("status_code", 200) + headers = payload.get("headers", {}) + cookies = payload.get("cookies", []) + content_type = headers.get("Content-Type", content_types.TEXT_PLAIN) + + return Response( + status_code=status_code, + content_type=content_type, + body=body, + cookies=cookies, + headers=headers, + ) + + +def lambda_handler(event, context): + return app.resolve(event, context) diff --git a/tests/e2e/event_handler/handlers/api_gateway_http_handler.py b/tests/e2e/event_handler/handlers/api_gateway_http_handler.py new file mode 100644 index 00000000000..1012af7b3fb --- /dev/null +++ b/tests/e2e/event_handler/handlers/api_gateway_http_handler.py @@ -0,0 +1,33 @@ +from aws_lambda_powertools.event_handler import ( + APIGatewayHttpResolver, + Response, + content_types, +) + +app = APIGatewayHttpResolver() + +# The reason we use post is that whoever is writing tests can easily assert on the +# content being sent (body, headers, cookies, content-type) to reduce cognitive load. + + +@app.post("/todos") +def todos(): + payload = app.current_event.json_body + + body = payload.get("body", "Hello World") + status_code = payload.get("status_code", 200) + headers = payload.get("headers", {}) + cookies = payload.get("cookies", []) + content_type = headers.get("Content-Type", content_types.TEXT_PLAIN) + + return Response( + status_code=status_code, + content_type=content_type, + body=body, + cookies=cookies, + headers=headers, + ) + + +def lambda_handler(event, context): + return app.resolve(event, context) diff --git a/tests/e2e/event_handler/handlers/api_gateway_rest_handler.py b/tests/e2e/event_handler/handlers/api_gateway_rest_handler.py new file mode 100644 index 00000000000..d52e2728cab --- /dev/null +++ b/tests/e2e/event_handler/handlers/api_gateway_rest_handler.py @@ -0,0 +1,33 @@ +from aws_lambda_powertools.event_handler import ( + APIGatewayRestResolver, + Response, + content_types, +) + +app = APIGatewayRestResolver() + +# The reason we use post is that whoever is writing tests can easily assert on the +# content being sent (body, headers, cookies, content-type) to reduce cognitive load. + + +@app.post("/todos") +def todos(): + payload = app.current_event.json_body + + body = payload.get("body", "Hello World") + status_code = payload.get("status_code", 200) + headers = payload.get("headers", {}) + cookies = payload.get("cookies", []) + content_type = headers.get("Content-Type", content_types.TEXT_PLAIN) + + return Response( + status_code=status_code, + content_type=content_type, + body=body, + cookies=cookies, + headers=headers, + ) + + +def lambda_handler(event, context): + return app.resolve(event, context) diff --git a/tests/e2e/event_handler/handlers/lambda_function_url_handler.py b/tests/e2e/event_handler/handlers/lambda_function_url_handler.py new file mode 100644 index 00000000000..f90037afc75 --- /dev/null +++ b/tests/e2e/event_handler/handlers/lambda_function_url_handler.py @@ -0,0 +1,33 @@ +from aws_lambda_powertools.event_handler import ( + LambdaFunctionUrlResolver, + Response, + content_types, +) + +app = LambdaFunctionUrlResolver() + +# The reason we use post is that whoever is writing tests can easily assert on the +# content being sent (body, headers, cookies, content-type) to reduce cognitive load. + + +@app.post("/todos") +def todos(): + payload = app.current_event.json_body + + body = payload.get("body", "Hello World") + status_code = payload.get("status_code", 200) + headers = payload.get("headers", {}) + cookies = payload.get("cookies", []) + content_type = headers.get("Content-Type", content_types.TEXT_PLAIN) + + return Response( + status_code=status_code, + content_type=content_type, + body=body, + cookies=cookies, + headers=headers, + ) + + +def lambda_handler(event, context): + return app.resolve(event, context) diff --git a/tests/e2e/event_handler/infrastructure.py b/tests/e2e/event_handler/infrastructure.py new file mode 100644 index 00000000000..da456038a25 --- /dev/null +++ b/tests/e2e/event_handler/infrastructure.py @@ -0,0 +1,80 @@ +from typing import Dict, Optional + +from aws_cdk import CfnOutput +from aws_cdk import aws_apigateway as apigwv1 +from aws_cdk import aws_apigatewayv2_alpha as apigwv2 +from aws_cdk import aws_apigatewayv2_integrations_alpha as apigwv2integrations +from aws_cdk import aws_ec2 as ec2 +from aws_cdk import aws_elasticloadbalancingv2 as elbv2 +from aws_cdk import aws_elasticloadbalancingv2_targets as targets +from aws_cdk.aws_lambda import Function, FunctionUrlAuthType + +from tests.e2e.utils.infrastructure import BaseInfrastructure + + +class EventHandlerStack(BaseInfrastructure): + def create_resources(self): + functions = self.create_lambda_functions() + + self._create_alb(function=functions["AlbHandler"]) + self._create_api_gateway_rest(function=functions["ApiGatewayRestHandler"]) + self._create_api_gateway_http(function=functions["ApiGatewayHttpHandler"]) + self._create_lambda_function_url(function=functions["LambdaFunctionUrlHandler"]) + + def _create_alb(self, function: Function): + vpc = ec2.Vpc.from_lookup( + self.stack, + "VPC", + is_default=True, + region=self.region, + ) + + alb = elbv2.ApplicationLoadBalancer(self.stack, "ALB", vpc=vpc, internet_facing=True) + CfnOutput(self.stack, "ALBDnsName", value=alb.load_balancer_dns_name) + + self._create_alb_listener(alb=alb, name="Basic", port=80, function=function) + self._create_alb_listener( + alb=alb, + name="MultiValueHeader", + port=8080, + function=function, + attributes={"lambda.multi_value_headers.enabled": "true"}, + ) + + def _create_alb_listener( + self, + alb: elbv2.ApplicationLoadBalancer, + name: str, + port: int, + function: Function, + attributes: Optional[Dict[str, str]] = None, + ): + listener = alb.add_listener(name, port=port, protocol=elbv2.ApplicationProtocol.HTTP) + target = listener.add_targets(f"ALB{name}Target", targets=[targets.LambdaTarget(function)]) + if attributes is not None: + for key, value in attributes.items(): + target.set_attribute(key, value) + CfnOutput(self.stack, f"ALB{name}ListenerPort", value=str(port)) + + def _create_api_gateway_http(self, function: Function): + apigw = apigwv2.HttpApi(self.stack, "APIGatewayHTTP", create_default_stage=True) + apigw.add_routes( + path="/todos", + methods=[apigwv2.HttpMethod.POST], + integration=apigwv2integrations.HttpLambdaIntegration("TodosIntegration", function), + ) + + CfnOutput(self.stack, "APIGatewayHTTPUrl", value=(apigw.url or "")) + + def _create_api_gateway_rest(self, function: Function): + apigw = apigwv1.RestApi(self.stack, "APIGatewayRest", deploy_options=apigwv1.StageOptions(stage_name="dev")) + + todos = apigw.root.add_resource("todos") + todos.add_method("POST", apigwv1.LambdaIntegration(function, proxy=True)) + + CfnOutput(self.stack, "APIGatewayRestUrl", value=apigw.url) + + def _create_lambda_function_url(self, function: Function): + # Maintenance: move auth to IAM when we create sigv4 builders + function_url = function.add_function_url(auth_type=FunctionUrlAuthType.NONE) + CfnOutput(self.stack, "LambdaFunctionUrl", value=function_url.url) diff --git a/tests/e2e/event_handler/test_header_serializer.py b/tests/e2e/event_handler/test_header_serializer.py new file mode 100644 index 00000000000..eedb69ccaad --- /dev/null +++ b/tests/e2e/event_handler/test_header_serializer.py @@ -0,0 +1,217 @@ +from uuid import uuid4 + +import pytest +from requests import Request + +from aws_lambda_powertools.shared.cookies import Cookie +from tests.e2e.utils import data_fetcher + + +@pytest.fixture +def alb_basic_listener_endpoint(infrastructure: dict) -> str: + dns_name = infrastructure.get("ALBDnsName") + port = infrastructure.get("ALBBasicListenerPort", "") + return f"http://{dns_name}:{port}" + + +@pytest.fixture +def alb_multi_value_header_listener_endpoint(infrastructure: dict) -> str: + dns_name = infrastructure.get("ALBDnsName") + port = infrastructure.get("ALBMultiValueHeaderListenerPort", "") + return f"http://{dns_name}:{port}" + + +@pytest.fixture +def apigw_rest_endpoint(infrastructure: dict) -> str: + return infrastructure.get("APIGatewayRestUrl", "") + + +@pytest.fixture +def apigw_http_endpoint(infrastructure: dict) -> str: + return infrastructure.get("APIGatewayHTTPUrl", "") + + +@pytest.fixture +def lambda_function_url_endpoint(infrastructure: dict) -> str: + return infrastructure.get("LambdaFunctionUrl", "") + + +def test_alb_headers_serializer(alb_basic_listener_endpoint): + # GIVEN + url = f"{alb_basic_listener_endpoint}/todos" + body = "Hello World" + status_code = 200 + headers = {"Content-Type": "text/plain", "Vary": ["Accept-Encoding", "User-Agent"]} + cookies = [ + Cookie(name="session_id", value=str(uuid4()), secure=True, http_only=True), + Cookie(name="ab_experiment", value="3"), + ] + last_cookie = cookies[-1] + + # WHEN + response = data_fetcher.get_http_response( + Request( + method="POST", + url=url, + json={"body": body, "status_code": status_code, "headers": headers, "cookies": list(map(str, cookies))}, + ) + ) + + # THEN + assert response.status_code == status_code + # response.content is a binary string, needs to be decoded to compare with the real string + assert response.content.decode("ascii") == body + + # Only the last header should be set + for key, value in headers.items(): + assert key in response.headers + value = value if isinstance(value, str) else sorted(value)[-1] + assert response.headers[key] == value + + # Only the last cookie should be set + assert len(response.cookies.items()) == 1 + assert last_cookie.name in response.cookies + assert response.cookies.get(last_cookie.name) == last_cookie.value + + +def test_alb_multi_value_headers_serializer(alb_multi_value_header_listener_endpoint): + # GIVEN + url = f"{alb_multi_value_header_listener_endpoint}/todos" + body = "Hello World" + status_code = 200 + headers = {"Content-Type": "text/plain", "Vary": ["Accept-Encoding", "User-Agent"]} + cookies = [ + Cookie(name="session_id", value=str(uuid4()), secure=True, http_only=True), + Cookie(name="ab_experiment", value="3"), + ] + + # WHEN + response = data_fetcher.get_http_response( + Request( + method="POST", + url=url, + json={"body": body, "status_code": status_code, "headers": headers, "cookies": list(map(str, cookies))}, + ) + ) + + # THEN + assert response.status_code == status_code + # response.content is a binary string, needs to be decoded to compare with the real string + assert response.content.decode("ascii") == body + + for key, value in headers.items(): + assert key in response.headers + value = value if isinstance(value, str) else ", ".join(sorted(value)) + + # ALB sorts the header values randomly, so we have to re-order them for comparison here + returned_value = ", ".join(sorted(response.headers[key].split(", "))) + assert returned_value == value + + for cookie in cookies: + assert cookie.name in response.cookies + assert response.cookies.get(cookie.name) == cookie.value + + +def test_api_gateway_rest_headers_serializer(apigw_rest_endpoint): + # GIVEN + url = f"{apigw_rest_endpoint}todos" + body = "Hello World" + status_code = 200 + headers = {"Content-Type": "text/plain", "Vary": ["Accept-Encoding", "User-Agent"]} + cookies = [ + Cookie(name="session_id", value=str(uuid4()), secure=True, http_only=True), + Cookie(name="ab_experiment", value="3"), + ] + + # WHEN + response = data_fetcher.get_http_response( + Request( + method="POST", + url=url, + json={"body": body, "status_code": status_code, "headers": headers, "cookies": list(map(str, cookies))}, + ) + ) + + # THEN + assert response.status_code == status_code + # response.content is a binary string, needs to be decoded to compare with the real string + assert response.content.decode("ascii") == body + + for key, value in headers.items(): + assert key in response.headers + value = value if isinstance(value, str) else ", ".join(sorted(value)) + assert response.headers[key] == value + + for cookie in cookies: + assert cookie.name in response.cookies + assert response.cookies.get(cookie.name) == cookie.value + + +def test_api_gateway_http_headers_serializer(apigw_http_endpoint): + # GIVEN + url = f"{apigw_http_endpoint}todos" + body = "Hello World" + status_code = 200 + headers = {"Content-Type": "text/plain", "Vary": ["Accept-Encoding", "User-Agent"]} + cookies = [ + Cookie(name="session_id", value=str(uuid4()), secure=True, http_only=True), + Cookie(name="ab_experiment", value="3"), + ] + + # WHEN + response = data_fetcher.get_http_response( + Request( + method="POST", + url=url, + json={"body": body, "status_code": status_code, "headers": headers, "cookies": list(map(str, cookies))}, + ) + ) + + # THEN + assert response.status_code == status_code + # response.content is a binary string, needs to be decoded to compare with the real string + assert response.content.decode("ascii") == body + + for key, value in headers.items(): + assert key in response.headers + value = value if isinstance(value, str) else ", ".join(sorted(value)) + assert response.headers[key] == value + + for cookie in cookies: + assert cookie.name in response.cookies + assert response.cookies.get(cookie.name) == cookie.value + + +def test_lambda_function_url_headers_serializer(lambda_function_url_endpoint): + # GIVEN + url = f"{lambda_function_url_endpoint}todos" # the function url endpoint already has the trailing / + body = "Hello World" + status_code = 200 + headers = {"Content-Type": "text/plain", "Vary": ["Accept-Encoding", "User-Agent"]} + cookies = [ + Cookie(name="session_id", value=str(uuid4()), secure=True, http_only=True), + Cookie(name="ab_experiment", value="3"), + ] + + # WHEN + response = data_fetcher.get_http_response( + Request( + method="POST", + url=url, + json={"body": body, "status_code": status_code, "headers": headers, "cookies": list(map(str, cookies))}, + ) + ) + + # THEN + assert response.status_code == status_code + # response.content is a binary string, needs to be decoded to compare with the real string + assert response.content.decode("ascii") == body + + for key, value in headers.items(): + assert key in response.headers + value = value if isinstance(value, str) else ", ".join(sorted(value)) + assert response.headers[key] == value + + for cookie in cookies: + assert cookie.name in response.cookies + assert response.cookies.get(cookie.name) == cookie.value diff --git a/tests/e2e/event_handler/test_paths_ending_with_slash.py b/tests/e2e/event_handler/test_paths_ending_with_slash.py new file mode 100644 index 00000000000..4c1461d6fc5 --- /dev/null +++ b/tests/e2e/event_handler/test_paths_ending_with_slash.py @@ -0,0 +1,99 @@ +import pytest +from requests import HTTPError, Request + +from tests.e2e.utils import data_fetcher + + +@pytest.fixture +def alb_basic_listener_endpoint(infrastructure: dict) -> str: + dns_name = infrastructure.get("ALBDnsName") + port = infrastructure.get("ALBBasicListenerPort", "") + return f"http://{dns_name}:{port}" + + +@pytest.fixture +def alb_multi_value_header_listener_endpoint(infrastructure: dict) -> str: + dns_name = infrastructure.get("ALBDnsName") + port = infrastructure.get("ALBMultiValueHeaderListenerPort", "") + return f"http://{dns_name}:{port}" + + +@pytest.fixture +def apigw_rest_endpoint(infrastructure: dict) -> str: + return infrastructure.get("APIGatewayRestUrl", "") + + +@pytest.fixture +def apigw_http_endpoint(infrastructure: dict) -> str: + return infrastructure.get("APIGatewayHTTPUrl", "") + + +@pytest.fixture +def lambda_function_url_endpoint(infrastructure: dict) -> str: + return infrastructure.get("LambdaFunctionUrl", "") + + +def test_api_gateway_rest_trailing_slash(apigw_rest_endpoint): + # GIVEN API URL ends in a trailing slash + url = f"{apigw_rest_endpoint}todos/" + body = "Hello World" + + # WHEN + response = data_fetcher.get_http_response( + Request( + method="POST", + url=url, + json={"body": body}, + ) + ) + + # THEN expect a HTTP 200 response + assert response.status_code == 200 + + +def test_api_gateway_http_trailing_slash(apigw_http_endpoint): + # GIVEN the URL for the API ends in a trailing slash API gateway should return a 404 + url = f"{apigw_http_endpoint}todos/" + body = "Hello World" + + # WHEN calling an invalid URL (with trailing slash) expect HTTPError exception from data_fetcher + with pytest.raises(HTTPError): + data_fetcher.get_http_response( + Request( + method="POST", + url=url, + json={"body": body}, + ) + ) + + +def test_lambda_function_url_trailing_slash(lambda_function_url_endpoint): + # GIVEN the URL for the API ends in a trailing slash it should behave as if there was not one + url = f"{lambda_function_url_endpoint}todos/" # the function url endpoint already has the trailing / + body = "Hello World" + + # WHEN calling an invalid URL (with trailing slash) expect HTTPError exception from data_fetcher + with pytest.raises(HTTPError): + data_fetcher.get_http_response( + Request( + method="POST", + url=url, + json={"body": body}, + ) + ) + + +def test_alb_url_trailing_slash(alb_multi_value_header_listener_endpoint): + # GIVEN url has a trailing slash - it should behave as if there was not one + url = f"{alb_multi_value_header_listener_endpoint}/todos/" + body = "Hello World" + + # WHEN calling an invalid URL (with trailing slash) expect HTTPError exception from data_fetcher + with pytest.raises(HTTPError): + data_fetcher.get_http_response( + Request( + method="POST", + url=url, + json={"body": body}, + ) + ) diff --git a/tests/e2e/idempotency/__init__.py b/tests/e2e/idempotency/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/e2e/idempotency/conftest.py b/tests/e2e/idempotency/conftest.py new file mode 100644 index 00000000000..24a7c71c1f2 --- /dev/null +++ b/tests/e2e/idempotency/conftest.py @@ -0,0 +1,19 @@ +import pytest + +from tests.e2e.idempotency.infrastructure import IdempotencyDynamoDBStack + + +@pytest.fixture(autouse=True, scope="module") +def infrastructure(tmp_path_factory, worker_id): + """Setup and teardown logic for E2E test infrastructure + + Yields + ------ + Dict[str, str] + CloudFormation Outputs from deployed infrastructure + """ + stack = IdempotencyDynamoDBStack() + try: + yield stack.deploy() + finally: + stack.delete() diff --git a/tests/e2e/idempotency/handlers/parallel_execution_handler.py b/tests/e2e/idempotency/handlers/parallel_execution_handler.py new file mode 100644 index 00000000000..6dcb012d858 --- /dev/null +++ b/tests/e2e/idempotency/handlers/parallel_execution_handler.py @@ -0,0 +1,18 @@ +import os +import time + +from aws_lambda_powertools.utilities.idempotency import ( + DynamoDBPersistenceLayer, + idempotent, +) + +TABLE_NAME = os.getenv("IdempotencyTable", "") +persistence_layer = DynamoDBPersistenceLayer(table_name=TABLE_NAME) + + +@idempotent(persistence_store=persistence_layer) +def lambda_handler(event, context): + + time.sleep(5) + + return event diff --git a/tests/e2e/idempotency/handlers/ttl_cache_expiration_handler.py b/tests/e2e/idempotency/handlers/ttl_cache_expiration_handler.py new file mode 100644 index 00000000000..4cd71045dc0 --- /dev/null +++ b/tests/e2e/idempotency/handlers/ttl_cache_expiration_handler.py @@ -0,0 +1,20 @@ +import os +import time + +from aws_lambda_powertools.utilities.idempotency import ( + DynamoDBPersistenceLayer, + IdempotencyConfig, + idempotent, +) + +TABLE_NAME = os.getenv("IdempotencyTable", "") +persistence_layer = DynamoDBPersistenceLayer(table_name=TABLE_NAME) +config = IdempotencyConfig(expires_after_seconds=5) + + +@idempotent(config=config, persistence_store=persistence_layer) +def lambda_handler(event, context): + + time_now = time.time() + + return {"time": str(time_now)} diff --git a/tests/e2e/idempotency/handlers/ttl_cache_timeout_handler.py b/tests/e2e/idempotency/handlers/ttl_cache_timeout_handler.py new file mode 100644 index 00000000000..99be7b63391 --- /dev/null +++ b/tests/e2e/idempotency/handlers/ttl_cache_timeout_handler.py @@ -0,0 +1,21 @@ +import os +import time + +from aws_lambda_powertools.utilities.idempotency import ( + DynamoDBPersistenceLayer, + IdempotencyConfig, + idempotent, +) + +TABLE_NAME = os.getenv("IdempotencyTable", "") +persistence_layer = DynamoDBPersistenceLayer(table_name=TABLE_NAME) +config = IdempotencyConfig(expires_after_seconds=1) + + +@idempotent(config=config, persistence_store=persistence_layer) +def lambda_handler(event, context): + + sleep_time: int = event.get("sleep") or 0 + time.sleep(sleep_time) + + return event diff --git a/tests/e2e/idempotency/infrastructure.py b/tests/e2e/idempotency/infrastructure.py new file mode 100644 index 00000000000..abe69f6a5e6 --- /dev/null +++ b/tests/e2e/idempotency/infrastructure.py @@ -0,0 +1,31 @@ +from aws_cdk import CfnOutput, RemovalPolicy +from aws_cdk import aws_dynamodb as dynamodb +from aws_cdk.aws_dynamodb import Table + +from tests.e2e.utils.infrastructure import BaseInfrastructure + + +class IdempotencyDynamoDBStack(BaseInfrastructure): + def create_resources(self): + table = self._create_dynamodb_table() + + env_vars = {"IdempotencyTable": table.table_name} + functions = self.create_lambda_functions(function_props={"environment": env_vars}) + + table.grant_read_write_data(functions["TtlCacheExpirationHandler"]) + table.grant_read_write_data(functions["TtlCacheTimeoutHandler"]) + table.grant_read_write_data(functions["ParallelExecutionHandler"]) + + def _create_dynamodb_table(self) -> Table: + table = dynamodb.Table( + self.stack, + "Idempotency", + removal_policy=RemovalPolicy.DESTROY, + partition_key=dynamodb.Attribute(name="id", type=dynamodb.AttributeType.STRING), + time_to_live_attribute="expiration", + billing_mode=dynamodb.BillingMode.PAY_PER_REQUEST, + ) + + CfnOutput(self.stack, "DynamoDBTable", value=table.table_name) + + return table diff --git a/tests/e2e/idempotency/test_idempotency_dynamodb.py b/tests/e2e/idempotency/test_idempotency_dynamodb.py new file mode 100644 index 00000000000..87b61d285ec --- /dev/null +++ b/tests/e2e/idempotency/test_idempotency_dynamodb.py @@ -0,0 +1,96 @@ +import json +from time import sleep + +import pytest + +from tests.e2e.utils import data_fetcher +from tests.e2e.utils.functions import execute_lambdas_in_parallel + + +@pytest.fixture +def ttl_cache_expiration_handler_fn_arn(infrastructure: dict) -> str: + return infrastructure.get("TtlCacheExpirationHandlerArn", "") + + +@pytest.fixture +def ttl_cache_timeout_handler_fn_arn(infrastructure: dict) -> str: + return infrastructure.get("TtlCacheTimeoutHandlerArn", "") + + +@pytest.fixture +def parallel_execution_handler_fn_arn(infrastructure: dict) -> str: + return infrastructure.get("ParallelExecutionHandlerArn", "") + + +@pytest.fixture +def idempotency_table_name(infrastructure: dict) -> str: + return infrastructure.get("DynamoDBTable", "") + + +def test_ttl_caching_expiration_idempotency(ttl_cache_expiration_handler_fn_arn: str): + # GIVEN + payload = json.dumps({"message": "Lambda Powertools - TTL 5s"}) + + # WHEN + # first execution + first_execution, _ = data_fetcher.get_lambda_response( + lambda_arn=ttl_cache_expiration_handler_fn_arn, payload=payload + ) + first_execution_response = first_execution["Payload"].read().decode("utf-8") + + # the second execution should return the same response as the first execution + second_execution, _ = data_fetcher.get_lambda_response( + lambda_arn=ttl_cache_expiration_handler_fn_arn, payload=payload + ) + second_execution_response = second_execution["Payload"].read().decode("utf-8") + + # wait 8s to expire ttl and execute again, this should return a new response value + sleep(8) + third_execution, _ = data_fetcher.get_lambda_response( + lambda_arn=ttl_cache_expiration_handler_fn_arn, payload=payload + ) + third_execution_response = third_execution["Payload"].read().decode("utf-8") + + # THEN + assert first_execution_response == second_execution_response + assert third_execution_response != second_execution_response + + +def test_ttl_caching_timeout_idempotency(ttl_cache_timeout_handler_fn_arn: str): + # GIVEN + payload_timeout_execution = json.dumps({"sleep": 5, "message": "Lambda Powertools - TTL 1s"}) + payload_working_execution = json.dumps({"sleep": 0, "message": "Lambda Powertools - TTL 1s"}) + + # WHEN + # first call should fail due to timeout + execution_with_timeout, _ = data_fetcher.get_lambda_response( + lambda_arn=ttl_cache_timeout_handler_fn_arn, payload=payload_timeout_execution + ) + execution_with_timeout_response = execution_with_timeout["Payload"].read().decode("utf-8") + + # the second call should work and return the payload + execution_working, _ = data_fetcher.get_lambda_response( + lambda_arn=ttl_cache_timeout_handler_fn_arn, payload=payload_working_execution + ) + execution_working_response = execution_working["Payload"].read().decode("utf-8") + + # THEN + assert "Task timed out after" in execution_with_timeout_response + assert payload_working_execution == execution_working_response + + +def test_parallel_execution_idempotency(parallel_execution_handler_fn_arn: str): + # GIVEN + arguments = json.dumps({"message": "Lambda Powertools - Parallel execution"}) + + # WHEN + # executing Lambdas in parallel + lambdas_arn = [parallel_execution_handler_fn_arn, parallel_execution_handler_fn_arn] + execution_result_list = execute_lambdas_in_parallel("data_fetcher.get_lambda_response", lambdas_arn, arguments) + + timeout_execution_response = execution_result_list[0][0]["Payload"].read().decode("utf-8") + error_idempotency_execution_response = execution_result_list[1][0]["Payload"].read().decode("utf-8") + + # THEN + assert "Execution already in progress with idempotency key" in error_idempotency_execution_response + assert "Task timed out after" in timeout_execution_response diff --git a/tests/e2e/logger/conftest.py b/tests/e2e/logger/conftest.py index 82a89314258..a31be77031b 100644 --- a/tests/e2e/logger/conftest.py +++ b/tests/e2e/logger/conftest.py @@ -1,27 +1,18 @@ -from pathlib import Path - import pytest from tests.e2e.logger.infrastructure import LoggerStack @pytest.fixture(autouse=True, scope="module") -def infrastructure(request: pytest.FixtureRequest, lambda_layer_arn: str): +def infrastructure(tmp_path_factory, worker_id): """Setup and teardown logic for E2E test infrastructure - Parameters - ---------- - request : pytest.FixtureRequest - pytest request fixture to introspect absolute path to test being executed - lambda_layer_arn : str - Lambda Layer ARN - Yields ------ Dict[str, str] CloudFormation Outputs from deployed infrastructure """ - stack = LoggerStack(handlers_dir=Path(f"{request.path.parent}/handlers"), layer_arn=lambda_layer_arn) + stack = LoggerStack() try: yield stack.deploy() finally: diff --git a/tests/e2e/logger/infrastructure.py b/tests/e2e/logger/infrastructure.py index 68aaa8eb38a..242b3c10892 100644 --- a/tests/e2e/logger/infrastructure.py +++ b/tests/e2e/logger/infrastructure.py @@ -1,13 +1,6 @@ -from pathlib import Path - from tests.e2e.utils.infrastructure import BaseInfrastructure class LoggerStack(BaseInfrastructure): - FEATURE_NAME = "logger" - - def __init__(self, handlers_dir: Path, feature_name: str = FEATURE_NAME, layer_arn: str = "") -> None: - super().__init__(feature_name, handlers_dir, layer_arn) - def create_resources(self): self.create_lambda_functions() diff --git a/tests/e2e/metrics/conftest.py b/tests/e2e/metrics/conftest.py index 663c8845be4..2f72e7950be 100644 --- a/tests/e2e/metrics/conftest.py +++ b/tests/e2e/metrics/conftest.py @@ -1,27 +1,18 @@ -from pathlib import Path - import pytest from tests.e2e.metrics.infrastructure import MetricsStack @pytest.fixture(autouse=True, scope="module") -def infrastructure(request: pytest.FixtureRequest, lambda_layer_arn: str): +def infrastructure(tmp_path_factory, worker_id): """Setup and teardown logic for E2E test infrastructure - Parameters - ---------- - request : pytest.FixtureRequest - pytest request fixture to introspect absolute path to test being executed - lambda_layer_arn : str - Lambda Layer ARN - Yields ------ Dict[str, str] CloudFormation Outputs from deployed infrastructure """ - stack = MetricsStack(handlers_dir=Path(f"{request.path.parent}/handlers"), layer_arn=lambda_layer_arn) + stack = MetricsStack() try: yield stack.deploy() finally: diff --git a/tests/e2e/metrics/infrastructure.py b/tests/e2e/metrics/infrastructure.py index 9afa59bb5cd..7cc1eb8c498 100644 --- a/tests/e2e/metrics/infrastructure.py +++ b/tests/e2e/metrics/infrastructure.py @@ -1,13 +1,6 @@ -from pathlib import Path - from tests.e2e.utils.infrastructure import BaseInfrastructure class MetricsStack(BaseInfrastructure): - FEATURE_NAME = "metrics" - - def __init__(self, handlers_dir: Path, feature_name: str = FEATURE_NAME, layer_arn: str = "") -> None: - super().__init__(feature_name, handlers_dir, layer_arn) - def create_resources(self): self.create_lambda_functions() diff --git a/tests/e2e/parameters/__init__.py b/tests/e2e/parameters/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/e2e/parameters/conftest.py b/tests/e2e/parameters/conftest.py new file mode 100644 index 00000000000..f4c9d7396dd --- /dev/null +++ b/tests/e2e/parameters/conftest.py @@ -0,0 +1,19 @@ +import pytest + +from tests.e2e.parameters.infrastructure import ParametersStack + + +@pytest.fixture(autouse=True, scope="module") +def infrastructure(tmp_path_factory, worker_id): + """Setup and teardown logic for E2E test infrastructure + + Yields + ------ + Dict[str, str] + CloudFormation Outputs from deployed infrastructure + """ + stack = ParametersStack() + try: + yield stack.deploy() + finally: + stack.delete() diff --git a/tests/e2e/parameters/handlers/parameter_appconfig_freeform_handler.py b/tests/e2e/parameters/handlers/parameter_appconfig_freeform_handler.py new file mode 100644 index 00000000000..51b56eba95a --- /dev/null +++ b/tests/e2e/parameters/handlers/parameter_appconfig_freeform_handler.py @@ -0,0 +1,11 @@ +from aws_lambda_powertools.utilities import parameters +from aws_lambda_powertools.utilities.typing import LambdaContext + + +def lambda_handler(event: dict, context: LambdaContext): + # Retrieve a single configuration, latest version + value: bytes = parameters.get_app_config( + name=event.get("name"), environment=event.get("environment"), application=event.get("application") + ) + + return value diff --git a/tests/e2e/parameters/infrastructure.py b/tests/e2e/parameters/infrastructure.py new file mode 100644 index 00000000000..d0fb1b6c60c --- /dev/null +++ b/tests/e2e/parameters/infrastructure.py @@ -0,0 +1,108 @@ +from pyclbr import Function + +from aws_cdk import CfnOutput +from aws_cdk import aws_appconfig as appconfig +from aws_cdk import aws_iam as iam + +from tests.e2e.utils.data_builder import build_service_name +from tests.e2e.utils.infrastructure import BaseInfrastructure + + +class ParametersStack(BaseInfrastructure): + def create_resources(self): + functions = self.create_lambda_functions() + self._create_app_config(function=functions["ParameterAppconfigFreeformHandler"]) + + def _create_app_config(self, function: Function): + + service_name = build_service_name() + + cfn_application = appconfig.CfnApplication( + self.stack, + id="appconfig-app", + name=f"powertools-e2e-{service_name}", + description="Lambda Powertools End-to-End testing for AppConfig", + ) + CfnOutput(self.stack, "AppConfigApplication", value=cfn_application.name) + + cfn_environment = appconfig.CfnEnvironment( + self.stack, + "appconfig-env", + application_id=cfn_application.ref, + name=f"powertools-e2e{service_name}", + description="Lambda Powertools End-to-End testing environment", + ) + CfnOutput(self.stack, "AppConfigEnvironment", value=cfn_environment.name) + + cfn_deployment_strategy = appconfig.CfnDeploymentStrategy( + self.stack, + "appconfig-deployment-strategy", + deployment_duration_in_minutes=0, + final_bake_time_in_minutes=0, + growth_factor=100, + name=f"deploymente2e{service_name}", + description="deploymente2e", + replicate_to="NONE", + growth_type="LINEAR", + ) + + self._create_app_config_freeform( + app=cfn_application, + environment=cfn_environment, + strategy=cfn_deployment_strategy, + function=function, + service_name=service_name, + ) + + def _create_app_config_freeform( + self, + app: appconfig.CfnApplication, + environment: appconfig.CfnEnvironment, + strategy: appconfig.CfnDeploymentStrategy, + function: Function, + service_name: str, + ): + + cfn_configuration_profile = appconfig.CfnConfigurationProfile( + self.stack, + "appconfig-profile", + application_id=app.ref, + location_uri="hosted", + type="AWS.Freeform", + name=f"profilee2e{service_name}", + description="profilee2e", + ) + CfnOutput(self.stack, "AppConfigProfile", value=cfn_configuration_profile.name) + + cfn_hosted_configuration_version = appconfig.CfnHostedConfigurationVersion( + self.stack, + "appconfig-hosted-deploy", + application_id=app.ref, + configuration_profile_id=cfn_configuration_profile.ref, + content='{"save_history": {"default": true}}', + content_type="application/json", + description="hostedconfiguratione2e", + ) + CfnOutput(self.stack, "AppConfigConfigurationValue", value=cfn_hosted_configuration_version.content) + + appconfig.CfnDeployment( + self.stack, + "appconfig-deployment", + application_id=app.ref, + configuration_profile_id=cfn_configuration_profile.ref, + configuration_version=cfn_hosted_configuration_version.ref, + deployment_strategy_id=strategy.ref, + environment_id=environment.ref, + description="deployment", + ) + + function.add_to_role_policy( + iam.PolicyStatement( + effect=iam.Effect.ALLOW, + actions=[ + "appconfig:GetLatestConfiguration", + "appconfig:StartConfigurationSession", + ], + resources=["*"], + ) + ) diff --git a/tests/e2e/parameters/test_appconfig.py b/tests/e2e/parameters/test_appconfig.py new file mode 100644 index 00000000000..0129adb1515 --- /dev/null +++ b/tests/e2e/parameters/test_appconfig.py @@ -0,0 +1,61 @@ +import json + +import pytest + +from tests.e2e.utils import data_fetcher + + +@pytest.fixture +def parameter_appconfig_freeform_handler_fn_arn(infrastructure: dict) -> str: + return infrastructure.get("ParameterAppconfigFreeformHandlerArn", "") + + +@pytest.fixture +def parameter_appconfig_freeform_handler_fn(infrastructure: dict) -> str: + return infrastructure.get("ParameterAppconfigFreeformHandler", "") + + +@pytest.fixture +def parameter_appconfig_freeform_value(infrastructure: dict) -> str: + return infrastructure.get("AppConfigConfigurationValue", "") + + +@pytest.fixture +def parameter_appconfig_freeform_application(infrastructure: dict) -> str: + return infrastructure.get("AppConfigApplication", "") + + +@pytest.fixture +def parameter_appconfig_freeform_environment(infrastructure: dict) -> str: + return infrastructure.get("AppConfigEnvironment", "") + + +@pytest.fixture +def parameter_appconfig_freeform_profile(infrastructure: dict) -> str: + return infrastructure.get("AppConfigProfile", "") + + +def test_get_parameter_appconfig_freeform( + parameter_appconfig_freeform_handler_fn_arn: str, + parameter_appconfig_freeform_value: str, + parameter_appconfig_freeform_application: str, + parameter_appconfig_freeform_environment: str, + parameter_appconfig_freeform_profile: str, +): + # GIVEN + payload = json.dumps( + { + "name": parameter_appconfig_freeform_profile, + "environment": parameter_appconfig_freeform_environment, + "application": parameter_appconfig_freeform_application, + } + ) + expected_return = parameter_appconfig_freeform_value + + # WHEN + parameter_execution, _ = data_fetcher.get_lambda_response( + lambda_arn=parameter_appconfig_freeform_handler_fn_arn, payload=payload + ) + parameter_value = parameter_execution["Payload"].read().decode("utf-8") + + assert parameter_value == expected_return diff --git a/tests/e2e/tracer/conftest.py b/tests/e2e/tracer/conftest.py index 3b724bf1247..afb34ffee2b 100644 --- a/tests/e2e/tracer/conftest.py +++ b/tests/e2e/tracer/conftest.py @@ -1,27 +1,19 @@ -from pathlib import Path - import pytest from tests.e2e.tracer.infrastructure import TracerStack @pytest.fixture(autouse=True, scope="module") -def infrastructure(request: pytest.FixtureRequest, lambda_layer_arn: str): +def infrastructure(): """Setup and teardown logic for E2E test infrastructure - Parameters - ---------- - request : pytest.FixtureRequest - pytest request fixture to introspect absolute path to test being executed - lambda_layer_arn : str - Lambda Layer ARN Yields ------ Dict[str, str] CloudFormation Outputs from deployed infrastructure """ - stack = TracerStack(handlers_dir=Path(f"{request.path.parent}/handlers"), layer_arn=lambda_layer_arn) + stack = TracerStack() try: yield stack.deploy() finally: diff --git a/tests/e2e/tracer/handlers/async_capture.py b/tests/e2e/tracer/handlers/async_capture.py index b19840a6f69..814e0b92e02 100644 --- a/tests/e2e/tracer/handlers/async_capture.py +++ b/tests/e2e/tracer/handlers/async_capture.py @@ -13,4 +13,5 @@ async def async_get_users(): def lambda_handler(event: dict, context: LambdaContext): + tracer.service = event.get("service") return asyncio.run(async_get_users()) diff --git a/tests/e2e/tracer/handlers/basic_handler.py b/tests/e2e/tracer/handlers/basic_handler.py index ba94c845ace..89a6b062423 100644 --- a/tests/e2e/tracer/handlers/basic_handler.py +++ b/tests/e2e/tracer/handlers/basic_handler.py @@ -13,4 +13,5 @@ def get_todos(): @tracer.capture_lambda_handler def lambda_handler(event: dict, context: LambdaContext): + tracer.service = event.get("service") return get_todos() diff --git a/tests/e2e/tracer/handlers/same_function_name.py b/tests/e2e/tracer/handlers/same_function_name.py new file mode 100644 index 00000000000..240e3329bc8 --- /dev/null +++ b/tests/e2e/tracer/handlers/same_function_name.py @@ -0,0 +1,35 @@ +from abc import ABC, abstractmethod +from uuid import uuid4 + +from aws_lambda_powertools import Tracer +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() + + +class MainAbstractClass(ABC): + @abstractmethod + def get_all(self): + raise NotImplementedError + + +class Comments(MainAbstractClass): + @tracer.capture_method + def get_all(self): + return [{"id": f"{uuid4()}", "completed": False} for _ in range(5)] + + +class Todos(MainAbstractClass): + @tracer.capture_method + def get_all(self): + return [{"id": f"{uuid4()}", "completed": False} for _ in range(5)] + + +def lambda_handler(event: dict, context: LambdaContext): + # Maintenance: create a public method to set these explicitly + tracer.service = event["service"] + + todos = Todos() + comments = Comments() + + return {"todos": todos.get_all(), "comments": comments.get_all()} diff --git a/tests/e2e/tracer/infrastructure.py b/tests/e2e/tracer/infrastructure.py index 9b388558c0b..8562359acf0 100644 --- a/tests/e2e/tracer/infrastructure.py +++ b/tests/e2e/tracer/infrastructure.py @@ -1,18 +1,6 @@ -from pathlib import Path - -from tests.e2e.utils.data_builder import build_service_name from tests.e2e.utils.infrastructure import BaseInfrastructure class TracerStack(BaseInfrastructure): - # Maintenance: Tracer doesn't support dynamic service injection (tracer.py L310) - # we could move after handler response or adopt env vars usage in e2e tests - SERVICE_NAME: str = build_service_name() - FEATURE_NAME = "tracer" - - def __init__(self, handlers_dir: Path, feature_name: str = FEATURE_NAME, layer_arn: str = "") -> None: - super().__init__(feature_name, handlers_dir, layer_arn) - def create_resources(self) -> None: - env_vars = {"POWERTOOLS_SERVICE_NAME": self.SERVICE_NAME} - self.create_lambda_functions(function_props={"environment": env_vars}) + self.create_lambda_functions() diff --git a/tests/e2e/tracer/test_tracer.py b/tests/e2e/tracer/test_tracer.py index 06dde811ef1..e2abc5af6bc 100644 --- a/tests/e2e/tracer/test_tracer.py +++ b/tests/e2e/tracer/test_tracer.py @@ -1,7 +1,8 @@ +import json + import pytest from tests.e2e.tracer.handlers import async_capture, basic_handler -from tests.e2e.tracer.infrastructure import TracerStack from tests.e2e.utils import data_builder, data_fetcher @@ -15,6 +16,16 @@ def basic_handler_fn(infrastructure: dict) -> str: return infrastructure.get("BasicHandler", "") +@pytest.fixture +def same_function_name_fn(infrastructure: dict) -> str: + return infrastructure.get("SameFunctionName", "") + + +@pytest.fixture +def same_function_name_arn(infrastructure: dict) -> str: + return infrastructure.get("SameFunctionNameArn", "") + + @pytest.fixture def async_fn_arn(infrastructure: dict) -> str: return infrastructure.get("AsyncCaptureArn", "") @@ -27,43 +38,73 @@ def async_fn(infrastructure: dict) -> str: def test_lambda_handler_trace_is_visible(basic_handler_fn_arn: str, basic_handler_fn: str): # GIVEN + service = data_builder.build_service_name() handler_name = basic_handler.lambda_handler.__name__ handler_subsegment = f"## {handler_name}" handler_metadata_key = f"{handler_name} response" - method_name = basic_handler.get_todos.__name__ + method_name = f"basic_handler.{basic_handler.get_todos.__name__}" method_subsegment = f"## {method_name}" - handler_metadata_key = f"{method_name} response" + method_metadata_key = f"{method_name} response" trace_query = data_builder.build_trace_default_query(function_name=basic_handler_fn) # WHEN - _, execution_time = data_fetcher.get_lambda_response(lambda_arn=basic_handler_fn_arn) - data_fetcher.get_lambda_response(lambda_arn=basic_handler_fn_arn) + event = json.dumps({"service": service}) + _, execution_time = data_fetcher.get_lambda_response(lambda_arn=basic_handler_fn_arn, payload=event) + data_fetcher.get_lambda_response(lambda_arn=basic_handler_fn_arn, payload=event) # THEN trace = data_fetcher.get_traces(start_date=execution_time, filter_expression=trace_query, minimum_traces=2) assert len(trace.get_annotation(key="ColdStart", value=True)) == 1 - assert len(trace.get_metadata(key=handler_metadata_key, namespace=TracerStack.SERVICE_NAME)) == 2 - assert len(trace.get_metadata(key=handler_metadata_key, namespace=TracerStack.SERVICE_NAME)) == 2 + assert len(trace.get_metadata(key=handler_metadata_key, namespace=service)) == 2 + assert len(trace.get_metadata(key=method_metadata_key, namespace=service)) == 2 assert len(trace.get_subsegment(name=handler_subsegment)) == 2 assert len(trace.get_subsegment(name=method_subsegment)) == 2 +def test_lambda_handler_trace_multiple_functions_same_name(same_function_name_arn: str, same_function_name_fn: str): + # GIVEN + service = data_builder.build_service_name() + method_name_todos = "same_function_name.Todos.get_all" + method_subsegment_todos = f"## {method_name_todos}" + method_metadata_key_todos = f"{method_name_todos} response" + + method_name_comments = "same_function_name.Comments.get_all" + method_subsegment_comments = f"## {method_name_comments}" + method_metadata_key_comments = f"{method_name_comments} response" + + trace_query = data_builder.build_trace_default_query(function_name=same_function_name_fn) + + # WHEN + event = json.dumps({"service": service}) + _, execution_time = data_fetcher.get_lambda_response(lambda_arn=same_function_name_arn, payload=event) + + # THEN + trace = data_fetcher.get_traces(start_date=execution_time, filter_expression=trace_query) + + assert len(trace.get_metadata(key=method_metadata_key_todos, namespace=service)) == 1 + assert len(trace.get_metadata(key=method_metadata_key_comments, namespace=service)) == 1 + assert len(trace.get_subsegment(name=method_subsegment_todos)) == 1 + assert len(trace.get_subsegment(name=method_subsegment_comments)) == 1 + + def test_async_trace_is_visible(async_fn_arn: str, async_fn: str): # GIVEN - async_fn_name = async_capture.async_get_users.__name__ + service = data_builder.build_service_name() + async_fn_name = f"async_capture.{async_capture.async_get_users.__name__}" async_fn_name_subsegment = f"## {async_fn_name}" async_fn_name_metadata_key = f"{async_fn_name} response" trace_query = data_builder.build_trace_default_query(function_name=async_fn) # WHEN - _, execution_time = data_fetcher.get_lambda_response(lambda_arn=async_fn_arn) + event = json.dumps({"service": service}) + _, execution_time = data_fetcher.get_lambda_response(lambda_arn=async_fn_arn, payload=event) # THEN trace = data_fetcher.get_traces(start_date=execution_time, filter_expression=trace_query) assert len(trace.get_subsegment(name=async_fn_name_subsegment)) == 1 - assert len(trace.get_metadata(key=async_fn_name_metadata_key, namespace=TracerStack.SERVICE_NAME)) == 1 + assert len(trace.get_metadata(key=async_fn_name_metadata_key, namespace=service)) == 1 diff --git a/tests/e2e/utils/Dockerfile b/tests/e2e/utils/Dockerfile deleted file mode 100644 index 586847bb3fa..00000000000 --- a/tests/e2e/utils/Dockerfile +++ /dev/null @@ -1,14 +0,0 @@ -# Image used by CDK's LayerVersion construct to create Lambda Layer with Powertools -# library code. -# The correct AWS SAM build image based on the runtime of the function will be -# passed as build arg. The default allows to do `docker build .` when testing. -ARG IMAGE=public.ecr.aws/sam/build-python3.7 -FROM $IMAGE - -ARG PIP_INDEX_URL -ARG PIP_EXTRA_INDEX_URL -ARG HTTPS_PROXY - -RUN pip install --upgrade pip - -CMD [ "python" ] diff --git a/tests/e2e/utils/asset.py b/tests/e2e/utils/asset.py deleted file mode 100644 index db9e7299d1a..00000000000 --- a/tests/e2e/utils/asset.py +++ /dev/null @@ -1,147 +0,0 @@ -import io -import json -import logging -import zipfile -from pathlib import Path -from typing import Dict, List, Optional - -import boto3 -import botocore.exceptions -from mypy_boto3_s3 import S3Client -from pydantic import BaseModel, Field - -logger = logging.getLogger(__name__) - - -class AssetManifest(BaseModel): - path: str - packaging: str - - -class AssetTemplateConfigDestinationsAccount(BaseModel): - bucket_name: str = Field(str, alias="bucketName") - object_key: str = Field(str, alias="objectKey") - assume_role_arn: str = Field(str, alias="assumeRoleArn") - - -class AssetTemplateConfigDestinations(BaseModel): - current_account_current_region: AssetTemplateConfigDestinationsAccount = Field( - AssetTemplateConfigDestinationsAccount, alias="current_account-current_region" - ) - - -class AssetTemplateConfig(BaseModel): - source: AssetManifest - destinations: AssetTemplateConfigDestinations - - -class TemplateAssembly(BaseModel): - version: str - files: Dict[str, AssetTemplateConfig] - - -class Asset: - def __init__( - self, config: AssetTemplateConfig, account_id: str, region: str, boto3_client: Optional[S3Client] = None - ) -> None: - """CDK Asset logic to verify existence and resolve deeply nested configuration - - Parameters - ---------- - config : AssetTemplateConfig - CDK Asset configuration found in synthesized template - account_id : str - AWS Account ID - region : str - AWS Region - boto3_client : Optional["S3Client"], optional - S3 client instance for asset operations, by default None - """ - self.config = config - self.s3 = boto3_client or boto3.client("s3") - self.account_id = account_id - self.region = region - self.asset_path = config.source.path - self.asset_packaging = config.source.packaging - self.object_key = config.destinations.current_account_current_region.object_key - self._bucket = config.destinations.current_account_current_region.bucket_name - self.bucket_name = self._resolve_bucket_name() - - @property - def is_zip(self): - return self.asset_packaging == "zip" - - def exists_in_s3(self, key: str) -> bool: - try: - return self.s3.head_object(Bucket=self.bucket_name, Key=key) is not None - except botocore.exceptions.ClientError: - return False - - def _resolve_bucket_name(self) -> str: - return self._bucket.replace("${AWS::AccountId}", self.account_id).replace("${AWS::Region}", self.region) - - -class Assets: - def __init__( - self, asset_manifest: Path, account_id: str, region: str, boto3_client: Optional[S3Client] = None - ) -> None: - """CDK Assets logic to find each asset, compress, and upload - - Parameters - ---------- - asset_manifest : Path - Asset manifest JSON file (self.__synthesize) - account_id : str - AWS Account ID - region : str - AWS Region - boto3_client : Optional[S3Client], optional - S3 client instance for asset operations, by default None - """ - self.asset_manifest = asset_manifest - self.account_id = account_id - self.region = region - self.s3 = boto3_client or boto3.client("s3") - self.assets = self._find_assets_from_template() - self.assets_location = str(self.asset_manifest.parent) - - def upload(self): - """Drop-in replacement for cdk-assets package s3 upload part. - https://www.npmjs.com/package/cdk-assets. - We use custom solution to avoid dependencies from nodejs ecosystem. - We follow the same design cdk-assets: - https://github.com/aws/aws-cdk-rfcs/blob/master/text/0092-asset-publishing.md. - """ - logger.debug(f"Upload {len(self.assets)} assets") - for asset in self.assets: - if not asset.is_zip: - logger.debug(f"Asset '{asset.object_key}' is not zip. Skipping upload.") - continue - - if asset.exists_in_s3(key=asset.object_key): - logger.debug(f"Asset '{asset.object_key}' already exists in S3. Skipping upload.") - continue - - archive = self._compress_assets(asset) - logger.debug("Uploading archive to S3") - self.s3.upload_fileobj(Fileobj=archive, Bucket=asset.bucket_name, Key=asset.object_key) - logger.debug("Successfully uploaded") - - def _find_assets_from_template(self) -> List[Asset]: - data = json.loads(self.asset_manifest.read_text()) - template = TemplateAssembly(**data) - return [ - Asset(config=asset_config, account_id=self.account_id, region=self.region) - for asset_config in template.files.values() - ] - - def _compress_assets(self, asset: Asset) -> io.BytesIO: - buf = io.BytesIO() - asset_dir = f"{self.assets_location}/{asset.asset_path}" - asset_files = list(Path(asset_dir).rglob("*")) - with zipfile.ZipFile(buf, "w", compression=zipfile.ZIP_DEFLATED) as archive: - for asset_file in asset_files: - logger.debug(f"Adding file '{asset_file}' to the archive.") - archive.write(asset_file, arcname=asset_file.relative_to(asset_dir)) - buf.seek(0) - return buf diff --git a/tests/e2e/utils/base.py b/tests/e2e/utils/base.py new file mode 100644 index 00000000000..2a6e6032e52 --- /dev/null +++ b/tests/e2e/utils/base.py @@ -0,0 +1,20 @@ +from abc import ABC, abstractmethod +from typing import Dict, Optional + + +class InfrastructureProvider(ABC): + @abstractmethod + def create_lambda_functions(self, function_props: Optional[Dict] = None) -> Dict: + pass + + @abstractmethod + def deploy(self) -> Dict[str, str]: + pass + + @abstractmethod + def delete(self): + pass + + @abstractmethod + def create_resources(self): + pass diff --git a/tests/e2e/utils/constants.py b/tests/e2e/utils/constants.py new file mode 100644 index 00000000000..445c9f00113 --- /dev/null +++ b/tests/e2e/utils/constants.py @@ -0,0 +1,8 @@ +import sys + +from aws_lambda_powertools import PACKAGE_PATH + +PYTHON_RUNTIME_VERSION = f"V{''.join(map(str, sys.version_info[:2]))}" +SOURCE_CODE_ROOT_PATH = PACKAGE_PATH.parent +CDK_OUT_PATH = SOURCE_CODE_ROOT_PATH / "cdk.out" +LAYER_BUILD_PATH = CDK_OUT_PATH / "layer_build" diff --git a/tests/e2e/utils/data_fetcher/__init__.py b/tests/e2e/utils/data_fetcher/__init__.py index 43024f9946f..fdd1de5c515 100644 --- a/tests/e2e/utils/data_fetcher/__init__.py +++ b/tests/e2e/utils/data_fetcher/__init__.py @@ -1,4 +1,5 @@ -from tests.e2e.utils.data_fetcher.common import get_lambda_response +from tests.e2e.utils.data_fetcher.common import get_http_response, get_lambda_response +from tests.e2e.utils.data_fetcher.idempotency import get_ddb_idempotency_record from tests.e2e.utils.data_fetcher.logs import get_logs from tests.e2e.utils.data_fetcher.metrics import get_metrics from tests.e2e.utils.data_fetcher.traces import get_traces diff --git a/tests/e2e/utils/data_fetcher/common.py b/tests/e2e/utils/data_fetcher/common.py index 2de8838dc74..29f97eab2de 100644 --- a/tests/e2e/utils/data_fetcher/common.py +++ b/tests/e2e/utils/data_fetcher/common.py @@ -2,8 +2,12 @@ from typing import Optional, Tuple import boto3 +import requests as requests from mypy_boto3_lambda import LambdaClient from mypy_boto3_lambda.type_defs import InvocationResponseTypeDef +from requests import Request, Response +from requests.exceptions import RequestException +from retry import retry def get_lambda_response( @@ -13,3 +17,11 @@ def get_lambda_response( payload = payload or "" execution_time = datetime.utcnow() return client.invoke(FunctionName=lambda_arn, InvocationType="RequestResponse", Payload=payload), execution_time + + +@retry(RequestException, delay=2, jitter=1.5, tries=5) +def get_http_response(request: Request) -> Response: + session = requests.Session() + result = session.send(request.prepare()) + result.raise_for_status() + return result diff --git a/tests/e2e/utils/data_fetcher/idempotency.py b/tests/e2e/utils/data_fetcher/idempotency.py new file mode 100644 index 00000000000..109e6735d3b --- /dev/null +++ b/tests/e2e/utils/data_fetcher/idempotency.py @@ -0,0 +1,39 @@ +import boto3 +from retry import retry + + +@retry(ValueError, delay=2, jitter=1.5, tries=10) +def get_ddb_idempotency_record( + function_name: str, + table_name: str, +) -> int: + """_summary_ + + Parameters + ---------- + function_name : str + Name of Lambda function to fetch dynamodb record + table_name : str + Name of DynamoDB table + + Returns + ------- + int + Count of records found + + Raises + ------ + ValueError + When no record is found within retry window + """ + ddb_client = boto3.resource("dynamodb") + table = ddb_client.Table(table_name) + ret = table.scan( + FilterExpression="contains (id, :functionName)", + ExpressionAttributeValues={":functionName": f"{function_name}#"}, + ) + + if not ret["Items"]: + raise ValueError("Empty response from DynamoDB Repeating...") + + return ret["Count"] diff --git a/tests/e2e/utils/functions.py b/tests/e2e/utils/functions.py new file mode 100644 index 00000000000..7b64c439298 --- /dev/null +++ b/tests/e2e/utils/functions.py @@ -0,0 +1,14 @@ +from concurrent.futures import ThreadPoolExecutor + +from tests.e2e.utils import data_fetcher # noqa F401 + + +def execute_lambdas_in_parallel(function_name: str, lambdas_arn: list, arguments: str): + result_list = [] + with ThreadPoolExecutor() as executor: + running_tasks = executor.map(lambda exec: eval(function_name)(*exec), [(arn, arguments) for arn in lambdas_arn]) + executor.shutdown(wait=True) + for running_task in running_tasks: + result_list.append(running_task) + + return result_list diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py index 87be83d2f96..29e45b83abf 100644 --- a/tests/e2e/utils/infrastructure.py +++ b/tests/e2e/utils/infrastructure.py @@ -1,72 +1,72 @@ import json import logging +import os +import subprocess import sys -from abc import ABC, abstractmethod -from enum import Enum +import textwrap from pathlib import Path -from typing import Dict, Generator, Optional, Tuple, Type +from typing import Callable, Dict, Generator, Optional from uuid import uuid4 import boto3 import pytest -import yaml -from aws_cdk import ( - App, - AssetStaging, - BundlingOptions, - CfnOutput, - DockerImage, - RemovalPolicy, - Stack, - aws_logs, +from aws_cdk import App, CfnOutput, Environment, RemovalPolicy, Stack, aws_logs +from aws_cdk.aws_lambda import ( + Architecture, + Code, + Function, + LayerVersion, + Runtime, + Tracing, ) -from aws_cdk.aws_lambda import Code, Function, LayerVersion, Runtime, Tracing from filelock import FileLock from mypy_boto3_cloudformation import CloudFormationClient -from tests.e2e.utils.asset import Assets - -PYTHON_RUNTIME_VERSION = f"V{''.join(map(str, sys.version_info[:2]))}" +from tests.e2e.utils.base import InfrastructureProvider +from tests.e2e.utils.constants import ( + CDK_OUT_PATH, + PYTHON_RUNTIME_VERSION, + SOURCE_CODE_ROOT_PATH, +) +from tests.e2e.utils.lambda_layer.powertools_layer import LocalLambdaPowertoolsLayer logger = logging.getLogger(__name__) -class BaseInfrastructureStack(ABC): - @abstractmethod - def synthesize(self) -> Tuple[dict, str]: - ... - - @abstractmethod - def __call__(self) -> Tuple[dict, str]: - ... - - -class PythonVersion(Enum): - V37 = {"runtime": Runtime.PYTHON_3_7, "image": Runtime.PYTHON_3_7.bundling_image.image} - V38 = {"runtime": Runtime.PYTHON_3_8, "image": Runtime.PYTHON_3_8.bundling_image.image} - V39 = {"runtime": Runtime.PYTHON_3_9, "image": Runtime.PYTHON_3_9.bundling_image.image} +class BaseInfrastructure(InfrastructureProvider): + RANDOM_STACK_VALUE: str = f"{uuid4()}" - -class BaseInfrastructure(ABC): - def __init__(self, feature_name: str, handlers_dir: Path, layer_arn: str = "") -> None: - self.feature_name = feature_name - self.stack_name = f"test{PYTHON_RUNTIME_VERSION}-{feature_name}-{uuid4()}" - self.handlers_dir = handlers_dir - self.layer_arn = layer_arn + def __init__(self) -> None: + self.feature_path = Path(sys.modules[self.__class__.__module__].__file__).parent # absolute path to feature + self.feature_name = self.feature_path.parts[-1].replace("_", "-") # logger, tracer, event-handler, etc. + self.stack_name = f"test{PYTHON_RUNTIME_VERSION}-{self.feature_name}-{self.RANDOM_STACK_VALUE}" self.stack_outputs: Dict[str, str] = {} - # NOTE: Investigate why cdk.Environment in Stack - # changes synthesized asset (no object_key in asset manifest) - self.app = App() - self.stack = Stack(self.app, self.stack_name) + # NOTE: CDK stack account and region are tokens, we need to resolve earlier self.session = boto3.Session() self.cfn: CloudFormationClient = self.session.client("cloudformation") - - # NOTE: CDK stack account and region are tokens, we need to resolve earlier self.account_id = self.session.client("sts").get_caller_identity()["Account"] self.region = self.session.region_name - def create_lambda_functions(self, function_props: Optional[Dict] = None): + self.app = App() + self.stack = Stack(self.app, self.stack_name, env=Environment(account=self.account_id, region=self.region)) + + # NOTE: Introspect feature details to generate CDK App (_create_temp_cdk_app method), Synth and Deployment + self._feature_infra_class_name = self.__class__.__name__ + self._feature_infra_module_path = self.feature_path / "infrastructure" + self._feature_infra_file = self.feature_path / "infrastructure.py" + self._handlers_dir = self.feature_path / "handlers" + self._cdk_out_dir: Path = CDK_OUT_PATH / self.feature_name + self._stack_outputs_file = f'{self._cdk_out_dir / "stack_outputs.json"}' + + if not self._feature_infra_file.exists(): + raise FileNotFoundError( + "You must have your infrastructure defined in 'tests/e2e//infrastructure.py'." + ) + + def create_lambda_functions( + self, function_props: Optional[Dict] = None, architecture: Architecture = Architecture.X86_64 + ) -> Dict[str, Function]: """Create Lambda functions available under handlers_dir It creates CloudFormation Outputs for every function found in PascalCase. For example, @@ -78,6 +78,14 @@ def create_lambda_functions(self, function_props: Optional[Dict] = None): function_props: Optional[Dict] Dictionary representing CDK Lambda FunctionProps to override defaults + architecture: Architecture + Used to create Lambda Layer and functions in a different architecture. Defaults to x86_64. + + Returns + ------- + output: Dict[str, Function] + A dict with PascalCased function names and the corresponding CDK Function object + Examples -------- @@ -95,17 +103,32 @@ def create_lambda_functions(self, function_props: Optional[Dict] = None): self.create_lambda_functions(function_props={"runtime": Runtime.PYTHON_3_7) ``` """ - handlers = list(self.handlers_dir.rglob("*.py")) - source = Code.from_asset(f"{self.handlers_dir}") + if not self._handlers_dir.exists(): + raise RuntimeError(f"Handlers dir '{self._handlers_dir}' must exist for functions to be created.") + + layer_build = LocalLambdaPowertoolsLayer(architecture=architecture).build() + layer = LayerVersion( + self.stack, + "aws-lambda-powertools-e2e-test", + layer_version_name="aws-lambda-powertools-e2e-test", + compatible_runtimes=[ + Runtime.PYTHON_3_7, + Runtime.PYTHON_3_8, + Runtime.PYTHON_3_9, + ], + compatible_architectures=[architecture], + code=Code.from_asset(path=layer_build), + ) + + # NOTE: Agree on a convention if we need to support multi-file handlers + # as we're simply taking any file under `handlers/` to be a Lambda function. + handlers = list(self._handlers_dir.rglob("*.py")) + source = Code.from_asset(f"{self._handlers_dir}") logger.debug(f"Creating functions for handlers: {handlers}") - if not self.layer_arn: - raise ValueError( - """Lambda Layer ARN cannot be empty when creating Lambda functions. - Make sure to inject `lambda_layer_arn` fixture and pass at the constructor level""" - ) - layer = LayerVersion.from_layer_version_arn(self.stack, "layer-arn", layer_version_arn=self.layer_arn) function_settings_override = function_props or {} + output: Dict[str, Function] = {} + for fn in handlers: fn_name = fn.stem fn_name_pascal_case = fn_name.title().replace("_", "") # basic_handler -> BasicHandler @@ -117,6 +140,7 @@ def create_lambda_functions(self, function_props: Optional[Dict] = None): "tracing": Tracing.ACTIVE, "runtime": Runtime.PYTHON_3_9, "layers": [layer], + "architecture": architecture, **function_settings_override, } @@ -133,26 +157,92 @@ def create_lambda_functions(self, function_props: Optional[Dict] = None): # CFN Outputs only support hyphen hence pascal case self.add_cfn_output(name=fn_name_pascal_case, value=function.function_name, arn=function.function_arn) + output[fn_name_pascal_case] = function + + return output + def deploy(self) -> Dict[str, str]: - """Creates CloudFormation Stack and return stack outputs as dict + """Synthesize and deploy a CDK app, and return its stack outputs + + NOTE: It auto-generates a temporary CDK app to benefit from CDK CLI lookup features Returns ------- Dict[str, str] CloudFormation Stack Outputs with output key and value """ - template, asset_manifest_file = self._synthesize() - assets = Assets(asset_manifest=asset_manifest_file, account_id=self.account_id, region=self.region) - assets.upload() - self.stack_outputs = self._deploy_stack(self.stack_name, template) - return self.stack_outputs + stack_file = self._create_temp_cdk_app() + synth_command = f"npx cdk synth --app 'python {stack_file}' -o {self._cdk_out_dir}" + deploy_command = ( + f"npx cdk deploy --app '{self._cdk_out_dir}' -O {self._stack_outputs_file} " + "--require-approval=never --method=direct" + ) + + # CDK launches a background task, so we must wait + subprocess.check_output(synth_command, shell=True) + subprocess.check_output(deploy_command, shell=True) + return self._read_stack_output() def delete(self) -> None: """Delete CloudFormation Stack""" logger.debug(f"Deleting stack: {self.stack_name}") self.cfn.delete_stack(StackName=self.stack_name) - @abstractmethod + def _sync_stack_name(self, stack_output: Dict): + """Synchronize initial stack name with CDK final stack name + + When using `cdk synth` with context methods (`from_lookup`), + CDK can initialize the Stack multiple times until it resolves + the context. + + Parameters + ---------- + stack_output : Dict + CDK CloudFormation Outputs, where the key is the stack name + """ + self.stack_name = list(stack_output.keys())[0] + + def _read_stack_output(self): + content = Path(self._stack_outputs_file).read_text() + outputs: Dict = json.loads(content) + self._sync_stack_name(stack_output=outputs) + + # discard stack_name and get outputs as dict + self.stack_outputs = list(outputs.values())[0] + return self.stack_outputs + + def _create_temp_cdk_app(self): + """Autogenerate a CDK App with our Stack so that CDK CLI can deploy it + + This allows us to keep our BaseInfrastructure while supporting context lookups. + """ + # cdk.out/tracer/cdk_app_v39.py + temp_file = self._cdk_out_dir / f"cdk_app_{PYTHON_RUNTIME_VERSION}.py" + + if temp_file.exists(): + # no need to regenerate CDK app since it's just boilerplate + return temp_file + + # Convert from POSIX path to Python module: tests.e2e.tracer.infrastructure + infra_module = str(self._feature_infra_module_path.relative_to(SOURCE_CODE_ROOT_PATH)).replace(os.sep, ".") + + code = f""" + from {infra_module} import {self._feature_infra_class_name} + stack = {self._feature_infra_class_name}() + stack.create_resources() + stack.app.synth() + """ + + if not self._cdk_out_dir.is_dir(): + self._cdk_out_dir.mkdir(parents=True, exist_ok=True) + + with temp_file.open("w") as fd: + fd.write(textwrap.dedent(code)) + + # allow CDK to read/execute file for stack deployment + temp_file.chmod(0o755) + return temp_file + def create_resources(self) -> None: """Create any necessary CDK resources. It'll be called before deploy @@ -176,34 +266,7 @@ def created_resources(self): self.create_lambda_functions() ``` """ - ... - - def _synthesize(self) -> Tuple[Dict, Path]: - logger.debug("Creating CDK Stack resources") - self.create_resources() - logger.debug("Synthesizing CDK Stack into raw CloudFormation template") - cloud_assembly = self.app.synth() - cf_template: Dict = cloud_assembly.get_stack_by_name(self.stack_name).template - cloud_assembly_assets_manifest_path: str = ( - cloud_assembly.get_stack_by_name(self.stack_name).dependencies[0].file # type: ignore[attr-defined] - ) - return cf_template, Path(cloud_assembly_assets_manifest_path) - - def _deploy_stack(self, stack_name: str, template: Dict) -> Dict[str, str]: - logger.debug(f"Creating CloudFormation Stack: {stack_name}") - self.cfn.create_stack( - StackName=stack_name, - TemplateBody=yaml.dump(template), - TimeoutInMinutes=10, - OnFailure="ROLLBACK", - Capabilities=["CAPABILITY_IAM"], - ) - waiter = self.cfn.get_waiter("stack_create_complete") - waiter.wait(StackName=stack_name, WaiterConfig={"Delay": 10, "MaxAttempts": 50}) - - stack_details = self.cfn.describe_stacks(StackName=stack_name) - stack_outputs = stack_details["Stacks"][0]["Outputs"] - return {output["OutputKey"]: output["OutputValue"] for output in stack_outputs if output["OutputKey"]} + raise NotImplementedError() def add_cfn_output(self, name: str, value: str, arn: str = ""): """Create {Name} and optionally {Name}Arn CloudFormation Outputs. @@ -222,88 +285,50 @@ def add_cfn_output(self, name: str, value: str, arn: str = ""): CfnOutput(self.stack, f"{name}Arn", value=arn) -def deploy_once( - stack: Type[BaseInfrastructure], - request: pytest.FixtureRequest, +def call_once( + task: Callable, tmp_path_factory: pytest.TempPathFactory, worker_id: str, - layer_arn: str, -) -> Generator[Dict[str, str], None, None]: - """Deploys provided stack once whether CPU parallelization is enabled or not + callback: Optional[Callable] = None, +) -> Generator[object, None, None]: + """Call function and serialize results once whether CPU parallelization is enabled or not Parameters ---------- - stack : Type[BaseInfrastructure] - stack class to instantiate and deploy, for example MetricStack. - Not to be confused with class instance (MetricStack()). - request : pytest.FixtureRequest - pytest request fixture to introspect absolute path to test being executed + task : Callable + Function to call once and JSON serialize result whether parallel test is enabled or not. tmp_path_factory : pytest.TempPathFactory pytest temporary path factory to discover shared tmp when multiple CPU processes are spun up worker_id : str pytest-xdist worker identification to detect whether parallelization is enabled + callback : Callable + Function to call when job is complete. Yields ------ - Generator[Dict[str, str], None, None] - stack CloudFormation outputs + Generator[object, None, None] + Callable output when called """ - handlers_dir = f"{request.node.path.parent}/handlers" - stack = stack(handlers_dir=Path(handlers_dir), layer_arn=layer_arn) try: if worker_id == "master": - # no parallelization, deploy stack and let fixture be cached - yield stack.deploy() + # no parallelization, call and return + yield task() else: # tmp dir shared by all workers root_tmp_dir = tmp_path_factory.getbasetemp().parent cache = root_tmp_dir / f"{PYTHON_RUNTIME_VERSION}_cache.json" with FileLock(f"{cache}.lock"): - # If cache exists, return stack outputs back + # If cache exists, return task outputs back # otherwise it's the first run by the main worker - # deploy and return stack outputs so subsequent workers can reuse + # run and return task outputs for subsequent workers reuse if cache.is_file(): - stack_outputs = json.loads(cache.read_text()) + callable_result = json.loads(cache.read_text()) else: - stack_outputs: Dict = stack.deploy() - cache.write_text(json.dumps(stack_outputs)) - yield stack_outputs + callable_result: Dict = task() + cache.write_text(json.dumps(callable_result)) + yield callable_result finally: - stack.delete() - - -class LambdaLayerStack(BaseInfrastructure): - FEATURE_NAME = "lambda-layer" - - def __init__(self, handlers_dir: Path, feature_name: str = FEATURE_NAME, layer_arn: str = "") -> None: - super().__init__(feature_name, handlers_dir, layer_arn) - - def create_resources(self): - layer = self._create_layer() - CfnOutput(self.stack, "LayerArn", value=layer) - - def _create_layer(self) -> str: - logger.debug("Creating Lambda Layer with latest source code available") - output_dir = Path(str(AssetStaging.BUNDLING_OUTPUT_DIR), "python") - input_dir = Path(str(AssetStaging.BUNDLING_INPUT_DIR), "aws_lambda_powertools") - - build_commands = [f"pip install .[pydantic] -t {output_dir}", f"cp -R {input_dir} {output_dir}"] - layer = LayerVersion( - self.stack, - "aws-lambda-powertools-e2e-test", - layer_version_name="aws-lambda-powertools-e2e-test", - compatible_runtimes=[PythonVersion[PYTHON_RUNTIME_VERSION].value["runtime"]], - code=Code.from_asset( - path=".", - bundling=BundlingOptions( - image=DockerImage.from_build( - str(Path(__file__).parent), - build_args={"IMAGE": PythonVersion[PYTHON_RUNTIME_VERSION].value["image"]}, - ), - command=["bash", "-c", " && ".join(build_commands)], - ), - ), - ) - return layer.layer_version_arn + if callback is not None: + callback() diff --git a/tests/e2e/utils/lambda_layer/__init__.py b/tests/e2e/utils/lambda_layer/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/e2e/utils/lambda_layer/base.py b/tests/e2e/utils/lambda_layer/base.py new file mode 100644 index 00000000000..280fe19d4f8 --- /dev/null +++ b/tests/e2e/utils/lambda_layer/base.py @@ -0,0 +1,32 @@ +from abc import ABC, abstractmethod +from pathlib import Path + + +class BaseLocalLambdaLayer(ABC): + def __init__(self, output_dir: Path): + self.output_dir = output_dir / "layer_build" + self.target_dir = f"{self.output_dir}/python" + + @abstractmethod + def build(self) -> str: + """Builds a Lambda Layer locally + + Returns + ------- + build_path : str + Path where newly built Lambda Layer is + """ + raise NotImplementedError() + + def before_build(self): + """Any step to run before build process begins. + + By default, it creates output dir and its parents if it doesn't exist. + """ + if not self.output_dir.exists(): + # Create missing parent directories if missing + self.output_dir.mkdir(parents=True, exist_ok=True) + + def after_build(self): + """Any step after a build succeed""" + ... diff --git a/tests/e2e/utils/lambda_layer/powertools_layer.py b/tests/e2e/utils/lambda_layer/powertools_layer.py new file mode 100644 index 00000000000..23eae521696 --- /dev/null +++ b/tests/e2e/utils/lambda_layer/powertools_layer.py @@ -0,0 +1,77 @@ +import logging +import subprocess +from pathlib import Path + +from aws_cdk.aws_lambda import Architecture +from checksumdir import dirhash + +from aws_lambda_powertools import PACKAGE_PATH +from tests.e2e.utils.constants import CDK_OUT_PATH, SOURCE_CODE_ROOT_PATH +from tests.e2e.utils.lambda_layer.base import BaseLocalLambdaLayer + +logger = logging.getLogger(__name__) + + +class LocalLambdaPowertoolsLayer(BaseLocalLambdaLayer): + IGNORE_EXTENSIONS = ["pyc"] + + def __init__(self, output_dir: Path = CDK_OUT_PATH, architecture: Architecture = Architecture.X86_64): + super().__init__(output_dir) + self.package = f"{SOURCE_CODE_ROOT_PATH}[all]" + + platform_name = self._resolve_platform(architecture) + self.build_args = f"--platform {platform_name} --only-binary=:all: --upgrade" + self.build_command = f"python -m pip install {self.package} {self.build_args} --target {self.target_dir}" + self.cleanup_command = ( + f"rm -rf {self.target_dir}/boto* {self.target_dir}/s3transfer* && " + f"rm -rf {self.target_dir}/*dateutil* {self.target_dir}/urllib3* {self.target_dir}/six* && " + f"rm -rf {self.target_dir}/jmespath* && " + f"find {self.target_dir} -name '*.so' -type f -exec strip '{{}}' \\; && " + f"find {self.target_dir} -wholename '*/tests/*' -type f -delete && " + f"find {self.target_dir} -regex '^.*\\(__pycache__\\|\\.py[co]\\)$' -delete" + ) + self.source_diff_file: Path = CDK_OUT_PATH / "layer_build.diff" + + def build(self) -> str: + self.before_build() + + if self._has_source_changed(): + subprocess.run(self.build_command, shell=True) + + self.after_build() + + return str(self.output_dir) + + def after_build(self): + subprocess.run(self.cleanup_command, shell=True) + + def _has_source_changed(self) -> bool: + """Hashes source code and + + Returns + ------- + change : bool + Whether source code hash has changed + """ + diff = self.source_diff_file.read_text() if self.source_diff_file.exists() else "" + new_diff = dirhash(dirname=PACKAGE_PATH, excluded_extensions=self.IGNORE_EXTENSIONS) + if new_diff != diff or not self.output_dir.exists(): + self.source_diff_file.write_text(new_diff) + return True + + return False + + def _resolve_platform(self, architecture: Architecture) -> str: + """Returns the correct plaform name for the manylinux project (see PEP 599) + + Returns + ------- + platform_name : str + The platform tag + """ + if architecture.name == Architecture.X86_64.name: + return "manylinux1_x86_64" + elif architecture.name == Architecture.ARM_64.name: + return "manylinux2014_aarch64" + else: + raise ValueError(f"unknown architecture {architecture.name}") diff --git a/tests/events/albEventPathTrailingSlash.json b/tests/events/albEventPathTrailingSlash.json new file mode 100644 index 00000000000..c517a3f6b04 --- /dev/null +++ b/tests/events/albEventPathTrailingSlash.json @@ -0,0 +1,28 @@ +{ + "requestContext": { + "elb": { + "targetGroupArn": "arn:aws:elasticloadbalancing:us-east-2:123456789012:targetgroup/lambda-279XGJDqGZ5rsrHC2Fjr/49e9d65c45c6791a" + } + }, + "httpMethod": "GET", + "path": "/lambda/", + "queryStringParameters": { + "query": "1234ABCD" + }, + "headers": { + "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8", + "accept-encoding": "gzip", + "accept-language": "en-US,en;q=0.9", + "connection": "keep-alive", + "host": "lambda-alb-123578498.us-east-2.elb.amazonaws.com", + "upgrade-insecure-requests": "1", + "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36", + "x-amzn-trace-id": "Root=1-5c536348-3d683b8b04734faae651f476", + "x-forwarded-for": "72.12.164.125", + "x-forwarded-port": "80", + "x-forwarded-proto": "http", + "x-imforwards": "20" + }, + "body": "Test", + "isBase64Encoded": false + } \ No newline at end of file diff --git a/tests/events/albMultiValueHeadersEvent.json b/tests/events/albMultiValueHeadersEvent.json new file mode 100644 index 00000000000..6b34709605c --- /dev/null +++ b/tests/events/albMultiValueHeadersEvent.json @@ -0,0 +1,35 @@ +{ + "requestContext": { + "elb": { + "targetGroupArn": "arn:aws:elasticloadbalancing:eu-central-1:1234567890:targetgroup/alb-c-Targe-11GDXTPQ7663S/804a67588bfdc10f" + } + }, + "httpMethod": "GET", + "path": "/todos", + "multiValueQueryStringParameters": {}, + "multiValueHeaders": { + "accept": [ + "*/*" + ], + "host": [ + "alb-c-LoadB-14POFKYCLBNSF-1815800096.eu-central-1.elb.amazonaws.com" + ], + "user-agent": [ + "curl/7.79.1" + ], + "x-amzn-trace-id": [ + "Root=1-62fa9327-21cdd4da4c6db451490a5fb7" + ], + "x-forwarded-for": [ + "123.123.123.123" + ], + "x-forwarded-port": [ + "80" + ], + "x-forwarded-proto": [ + "http" + ] + }, + "body": "", + "isBase64Encoded": false +} diff --git a/tests/events/apiGatewayProxyEventPathTrailingSlash.json b/tests/events/apiGatewayProxyEventPathTrailingSlash.json new file mode 100644 index 00000000000..8a321d96c8c --- /dev/null +++ b/tests/events/apiGatewayProxyEventPathTrailingSlash.json @@ -0,0 +1,80 @@ +{ + "version": "1.0", + "resource": "/my/path", + "path": "/my/path/", + "httpMethod": "GET", + "headers": { + "Header1": "value1", + "Header2": "value2" + }, + "multiValueHeaders": { + "Header1": [ + "value1" + ], + "Header2": [ + "value1", + "value2" + ] + }, + "queryStringParameters": { + "parameter1": "value1", + "parameter2": "value" + }, + "multiValueQueryStringParameters": { + "parameter1": [ + "value1", + "value2" + ], + "parameter2": [ + "value" + ] + }, + "requestContext": { + "accountId": "123456789012", + "apiId": "id", + "authorizer": { + "claims": null, + "scopes": null + }, + "domainName": "id.execute-api.us-east-1.amazonaws.com", + "domainPrefix": "id", + "extendedRequestId": "request-id", + "httpMethod": "GET", + "identity": { + "accessKey": null, + "accountId": null, + "caller": null, + "cognitoAuthenticationProvider": null, + "cognitoAuthenticationType": null, + "cognitoIdentityId": null, + "cognitoIdentityPoolId": null, + "principalOrgId": null, + "sourceIp": "192.168.0.1/32", + "user": null, + "userAgent": "user-agent", + "userArn": null, + "clientCert": { + "clientCertPem": "CERT_CONTENT", + "subjectDN": "www.example.com", + "issuerDN": "Example issuer", + "serialNumber": "a1:a1:a1:a1:a1:a1:a1:a1:a1:a1:a1:a1:a1:a1:a1:a1", + "validity": { + "notBefore": "May 28 12:30:02 2019 GMT", + "notAfter": "Aug 5 09:36:04 2021 GMT" + } + } + }, + "path": "/my/path", + "protocol": "HTTP/1.1", + "requestId": "id=", + "requestTime": "04/Mar/2020:19:15:17 +0000", + "requestTimeEpoch": 1583349317135, + "resourceId": null, + "resourcePath": "/my/path", + "stage": "$default" + }, + "pathParameters": null, + "stageVariables": null, + "body": "Hello from Lambda!", + "isBase64Encoded": true + } \ No newline at end of file diff --git a/tests/events/apiGatewayProxyV2EventPathTrailingSlash.json b/tests/events/apiGatewayProxyV2EventPathTrailingSlash.json new file mode 100644 index 00000000000..dfb0d98f2e1 --- /dev/null +++ b/tests/events/apiGatewayProxyV2EventPathTrailingSlash.json @@ -0,0 +1,69 @@ +{ + "version": "2.0", + "routeKey": "$default", + "rawPath": "/my/path/", + "rawQueryString": "parameter1=value1¶meter1=value2¶meter2=value", + "cookies": [ + "cookie1", + "cookie2" + ], + "headers": { + "Header1": "value1", + "Header2": "value1,value2" + }, + "queryStringParameters": { + "parameter1": "value1,value2", + "parameter2": "value" + }, + "requestContext": { + "accountId": "123456789012", + "apiId": "api-id", + "authentication": { + "clientCert": { + "clientCertPem": "CERT_CONTENT", + "subjectDN": "www.example.com", + "issuerDN": "Example issuer", + "serialNumber": "a1:a1:a1:a1:a1:a1:a1:a1:a1:a1:a1:a1:a1:a1:a1:a1", + "validity": { + "notBefore": "May 28 12:30:02 2019 GMT", + "notAfter": "Aug 5 09:36:04 2021 GMT" + } + } + }, + "authorizer": { + "jwt": { + "claims": { + "claim1": "value1", + "claim2": "value2" + }, + "scopes": [ + "scope1", + "scope2" + ] + } + }, + "domainName": "id.execute-api.us-east-1.amazonaws.com", + "domainPrefix": "id", + "http": { + "method": "POST", + "path": "/my/path", + "protocol": "HTTP/1.1", + "sourceIp": "192.168.0.1/32", + "userAgent": "agent" + }, + "requestId": "id", + "routeKey": "$default", + "stage": "$default", + "time": "12/Mar/2020:19:03:58 +0000", + "timeEpoch": 1583348638390 + }, + "body": "{\"message\": \"hello world\", \"username\": \"tom\"}", + "pathParameters": { + "parameter1": "value1" + }, + "isBase64Encoded": false, + "stageVariables": { + "stageVariable1": "value1", + "stageVariable2": "value2" + } + } \ No newline at end of file diff --git a/tests/events/lambdaFunctionUrlEventPathTrailingSlash.json b/tests/events/lambdaFunctionUrlEventPathTrailingSlash.json new file mode 100644 index 00000000000..b1f82265187 --- /dev/null +++ b/tests/events/lambdaFunctionUrlEventPathTrailingSlash.json @@ -0,0 +1,52 @@ +{ + "version": "2.0", + "routeKey": "$default", + "rawPath": "/my/path/", + "rawQueryString": "parameter1=value1¶meter1=value2¶meter2=value", + "cookies": [ + "cookie1", + "cookie2" + ], + "headers": { + "header1": "value1", + "header2": "value1,value2" + }, + "queryStringParameters": { + "parameter1": "value1,value2", + "parameter2": "value" + }, + "requestContext": { + "accountId": "123456789012", + "apiId": "", + "authentication": null, + "authorizer": { + "iam": { + "accessKey": "AKIA...", + "accountId": "111122223333", + "callerId": "AIDA...", + "cognitoIdentity": null, + "principalOrgId": null, + "userArn": "arn:aws:iam::111122223333:user/example-user", + "userId": "AIDA..." + } + }, + "domainName": ".lambda-url.us-west-2.on.aws", + "domainPrefix": "", + "http": { + "method": "POST", + "path": "/my/path", + "protocol": "HTTP/1.1", + "sourceIp": "123.123.123.123", + "userAgent": "agent" + }, + "requestId": "id", + "routeKey": "$default", + "stage": "$default", + "time": "12/Mar/2020:19:03:58 +0000", + "timeEpoch": 1583348638390 + }, + "body": "Hello from client!", + "pathParameters": null, + "isBase64Encoded": false, + "stageVariables": null + } \ No newline at end of file diff --git a/tests/functional/event_handler/test_api_gateway.py b/tests/functional/event_handler/test_api_gateway.py index ae2c3eee43e..6b343dd1f0f 100644 --- a/tests/functional/event_handler/test_api_gateway.py +++ b/tests/functional/event_handler/test_api_gateway.py @@ -30,6 +30,7 @@ UnauthorizedError, ) from aws_lambda_powertools.shared import constants +from aws_lambda_powertools.shared.cookies import Cookie from aws_lambda_powertools.shared.json_encoder import Encoder from aws_lambda_powertools.utilities.data_classes import ( ALBEvent, @@ -52,6 +53,7 @@ def read_media(file_name: str) -> bytes: LOAD_GW_EVENT = load_event("apiGatewayProxyEvent.json") +LOAD_GW_EVENT_TRAILING_SLASH = load_event("apiGatewayProxyEventPathTrailingSlash.json") def test_alb_event(): @@ -75,6 +77,27 @@ def foo(): assert result["body"] == "foo" +def test_alb_event_path_trailing_slash(json_dump): + # GIVEN an Application Load Balancer proxy type event + app = ALBResolver() + + @app.get("/lambda") + def foo(): + assert isinstance(app.current_event, ALBEvent) + assert app.lambda_context == {} + assert app.current_event.request_context.elb_target_group_arn is not None + return Response(200, content_types.TEXT_HTML, "foo") + + # WHEN calling the event handler using path with trailing "/" + result = app(load_event("albEventPathTrailingSlash.json"), {}) + + # THEN + assert result["statusCode"] == 404 + assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + expected = {"statusCode": 404, "message": "Not found"} + assert result["body"] == json_dump(expected) + + def test_api_gateway_v1(): # GIVEN a Http API V1 proxy type event app = APIGatewayRestResolver() @@ -92,7 +115,43 @@ def get_lambda() -> Response: # THEN process event correctly # AND set the current_event type as APIGatewayProxyEvent assert result["statusCode"] == 200 - assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] + + +def test_api_gateway_v1_path_trailing_slash(): + # GIVEN a Http API V1 proxy type event + app = APIGatewayRestResolver() + + @app.get("/my/path") + def get_lambda() -> Response: + return Response(200, content_types.APPLICATION_JSON, json.dumps({"foo": "value"})) + + # WHEN calling the event handler + result = app(LOAD_GW_EVENT_TRAILING_SLASH, {}) + + # THEN process event correctly + # AND set the current_event type as APIGatewayProxyEvent + assert result["statusCode"] == 200 + assert result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] + + +def test_api_gateway_v1_cookies(): + # GIVEN a Http API V1 proxy type event + app = APIGatewayRestResolver() + cookie = Cookie(name="CookieMonster", value="MonsterCookie") + + @app.get("/my/path") + def get_lambda() -> Response: + assert isinstance(app.current_event, APIGatewayProxyEvent) + return Response(200, content_types.TEXT_PLAIN, "Hello world", cookies=[cookie]) + + # WHEN calling the event handler + result = app(LOAD_GW_EVENT, {}) + + # THEN process event correctly + # AND set the current_event type as APIGatewayProxyEvent + assert result["statusCode"] == 200 + assert result["multiValueHeaders"]["Set-Cookie"] == ["CookieMonster=MonsterCookie; Secure"] def test_api_gateway(): @@ -110,10 +169,28 @@ def get_lambda() -> Response: # THEN process event correctly # AND set the current_event type as APIGatewayProxyEvent assert result["statusCode"] == 200 - assert result["headers"]["Content-Type"] == content_types.TEXT_HTML + assert result["multiValueHeaders"]["Content-Type"] == [content_types.TEXT_HTML] assert result["body"] == "foo" +def test_api_gateway_event_path_trailing_slash(json_dump): + # GIVEN a Rest API Gateway proxy type event + app = ApiGatewayResolver(proxy_type=ProxyEventType.APIGatewayProxyEvent) + + @app.get("/my/path") + def get_lambda() -> Response: + assert isinstance(app.current_event, APIGatewayProxyEvent) + return Response(200, content_types.TEXT_HTML, "foo") + + # WHEN calling the event handler + result = app(LOAD_GW_EVENT_TRAILING_SLASH, {}) + # THEN + assert result["statusCode"] == 404 + assert result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] + expected = {"statusCode": 404, "message": "Not found"} + assert result["body"] == json_dump(expected) + + def test_api_gateway_v2(): # GIVEN a Http API V2 proxy type event app = APIGatewayHttpResolver() @@ -132,9 +209,49 @@ def my_path() -> Response: # AND set the current_event type as APIGatewayProxyEventV2 assert result["statusCode"] == 200 assert result["headers"]["Content-Type"] == content_types.TEXT_PLAIN + assert "Cookies" not in result["headers"] assert result["body"] == "tom" +def test_api_gateway_v2_http_path_trailing_slash(json_dump): + # GIVEN a Http API V2 proxy type event + app = APIGatewayHttpResolver() + + @app.post("/my/path") + def my_path() -> Response: + post_data = app.current_event.json_body + return Response(200, content_types.TEXT_PLAIN, post_data["username"]) + + # WHEN calling the event handler + result = app(load_event("apiGatewayProxyV2EventPathTrailingSlash.json"), {}) + + # THEN expect a 404 response + assert result["statusCode"] == 404 + assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + expected = {"statusCode": 404, "message": "Not found"} + assert result["body"] == json_dump(expected) + + +def test_api_gateway_v2_cookies(): + # GIVEN a Http API V2 proxy type event + app = APIGatewayHttpResolver() + cookie = Cookie(name="CookieMonster", value="MonsterCookie") + + @app.post("/my/path") + def my_path() -> Response: + assert isinstance(app.current_event, APIGatewayProxyEventV2) + return Response(200, content_types.TEXT_PLAIN, "Hello world", cookies=[cookie]) + + # WHEN calling the event handler + result = app(load_event("apiGatewayProxyV2Event.json"), {}) + + # THEN process event correctly + # AND set the current_event type as APIGatewayProxyEventV2 + assert result["statusCode"] == 200 + assert result["headers"]["Content-Type"] == content_types.TEXT_PLAIN + assert result["cookies"] == ["CookieMonster=MonsterCookie; Secure"] + + def test_include_rule_matching(): # GIVEN app = ApiGatewayResolver() @@ -149,7 +266,7 @@ def get_lambda(my_id: str, name: str) -> Response: # THEN assert result["statusCode"] == 200 - assert result["headers"]["Content-Type"] == content_types.TEXT_HTML + assert result["multiValueHeaders"]["Content-Type"] == [content_types.TEXT_HTML] assert result["body"] == "path" @@ -200,7 +317,7 @@ def handler(event, context): result = handler(LOAD_GW_EVENT, None) assert result["statusCode"] == 404 # AND cors headers are not returned - assert "Access-Control-Allow-Origin" not in result["headers"] + assert "Access-Control-Allow-Origin" not in result["multiValueHeaders"] def test_cors(): @@ -223,17 +340,17 @@ def handler(event, context): result = handler(LOAD_GW_EVENT, None) # THEN the headers should include cors headers - assert "headers" in result - headers = result["headers"] - assert headers["Content-Type"] == content_types.TEXT_HTML - assert headers["Access-Control-Allow-Origin"] == "*" + assert "multiValueHeaders" in result + headers = result["multiValueHeaders"] + assert headers["Content-Type"] == [content_types.TEXT_HTML] + assert headers["Access-Control-Allow-Origin"] == ["*"] assert "Access-Control-Allow-Credentials" not in headers - assert headers["Access-Control-Allow-Headers"] == ",".join(sorted(CORSConfig._REQUIRED_HEADERS)) + assert headers["Access-Control-Allow-Headers"] == [",".join(sorted(CORSConfig._REQUIRED_HEADERS))] # THEN for routes without cors flag return no cors headers mock_event = {"path": "/my/request", "httpMethod": "GET"} result = handler(mock_event, None) - assert "Access-Control-Allow-Origin" not in result["headers"] + assert "Access-Control-Allow-Origin" not in result["multiValueHeaders"] def test_cors_preflight_body_is_empty_not_null(): @@ -272,8 +389,8 @@ def handler(event, context): assert isinstance(body, str) decompress = zlib.decompress(base64.b64decode(body), wbits=zlib.MAX_WBITS | 16).decode("UTF-8") assert decompress == expected_value - headers = result["headers"] - assert headers["Content-Encoding"] == "gzip" + headers = result["multiValueHeaders"] + assert headers["Content-Encoding"] == ["gzip"] def test_base64_encode(): @@ -292,8 +409,8 @@ def read_image() -> Response: assert result["isBase64Encoded"] is True body = result["body"] assert isinstance(body, str) - headers = result["headers"] - assert headers["Content-Encoding"] == "gzip" + headers = result["multiValueHeaders"] + assert headers["Content-Encoding"] == ["gzip"] def test_compress_no_accept_encoding(): @@ -348,9 +465,9 @@ def handler(event, context): result = handler({"path": "/success", "httpMethod": "GET"}, None) # THEN return the set Cache-Control - headers = result["headers"] - assert headers["Content-Type"] == content_types.TEXT_HTML - assert headers["Cache-Control"] == "max-age=600" + headers = result["multiValueHeaders"] + assert headers["Content-Type"] == [content_types.TEXT_HTML] + assert headers["Cache-Control"] == ["max-age=600"] def test_cache_control_non_200(): @@ -369,9 +486,9 @@ def handler(event, context): result = handler({"path": "/fails", "httpMethod": "DELETE"}, None) # THEN return a Cache-Control of "no-cache" - headers = result["headers"] - assert headers["Content-Type"] == content_types.TEXT_HTML - assert headers["Cache-Control"] == "no-cache" + headers = result["multiValueHeaders"] + assert headers["Content-Type"] == [content_types.TEXT_HTML] + assert headers["Cache-Control"] == ["no-cache"] def test_rest_api(): @@ -388,7 +505,7 @@ def rest_func() -> Dict: # THEN automatically process this as a json rest api response assert result["statusCode"] == 200 - assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] expected_str = json.dumps(expected_dict, separators=(",", ":"), indent=None, cls=Encoder) assert result["body"] == expected_str @@ -403,7 +520,7 @@ def rest_func() -> Response: status_code=404, content_type="used-if-not-set-in-header", body="Not found", - headers={"Content-Type": "header-content-type-wins", "custom": "value"}, + headers={"Content-Type": ["header-content-type-wins"], "custom": ["value"]}, ) # WHEN calling the event handler @@ -411,8 +528,8 @@ def rest_func() -> Response: # THEN the result can include some additional field control like overriding http headers assert result["statusCode"] == 404 - assert result["headers"]["Content-Type"] == "header-content-type-wins" - assert result["headers"]["custom"] == "value" + assert result["multiValueHeaders"]["Content-Type"] == ["header-content-type-wins"] + assert result["multiValueHeaders"]["custom"] == ["value"] assert result["body"] == "Not found" @@ -441,16 +558,16 @@ def another_one(): result = app(event, None) # THEN routes by default return the custom cors headers - assert "headers" in result - headers = result["headers"] - assert headers["Content-Type"] == content_types.APPLICATION_JSON - assert headers["Access-Control-Allow-Origin"] == cors_config.allow_origin - expected_allows_headers = ",".join(sorted(set(allow_header + cors_config._REQUIRED_HEADERS))) + assert "multiValueHeaders" in result + headers = result["multiValueHeaders"] + assert headers["Content-Type"] == [content_types.APPLICATION_JSON] + assert headers["Access-Control-Allow-Origin"] == [cors_config.allow_origin] + expected_allows_headers = [",".join(sorted(set(allow_header + cors_config._REQUIRED_HEADERS)))] assert headers["Access-Control-Allow-Headers"] == expected_allows_headers - assert headers["Access-Control-Expose-Headers"] == ",".join(cors_config.expose_headers) - assert headers["Access-Control-Max-Age"] == str(cors_config.max_age) + assert headers["Access-Control-Expose-Headers"] == [",".join(cors_config.expose_headers)] + assert headers["Access-Control-Max-Age"] == [str(cors_config.max_age)] assert "Access-Control-Allow-Credentials" in headers - assert headers["Access-Control-Allow-Credentials"] == "true" + assert headers["Access-Control-Allow-Credentials"] == ["true"] # AND custom cors was set on the app assert isinstance(app._cors, CORSConfig) @@ -459,7 +576,7 @@ def another_one(): # AND routes without cors don't include "Access-Control" headers event = {"path": "/another-one", "httpMethod": "GET"} result = app(event, None) - headers = result["headers"] + headers = result["multiValueHeaders"] assert "Access-Control-Allow-Origin" not in headers @@ -474,7 +591,7 @@ def test_no_content_response(): # THEN return an None body and no Content-Type header assert result["statusCode"] == response.status_code assert result["body"] is None - headers = result["headers"] + headers = result["multiValueHeaders"] assert "Content-Type" not in headers @@ -489,7 +606,7 @@ def test_no_matches_with_cors(): # THEN return a 404 # AND cors headers are returned assert result["statusCode"] == 404 - assert "Access-Control-Allow-Origin" in result["headers"] + assert "Access-Control-Allow-Origin" in result["multiValueHeaders"] assert "Not found" in result["body"] @@ -517,10 +634,10 @@ def post_no_cors(): # AND include Access-Control-Allow-Methods of the cors methods used assert result["statusCode"] == 204 assert result["body"] == "" - headers = result["headers"] + headers = result["multiValueHeaders"] assert "Content-Type" not in headers - assert "Access-Control-Allow-Origin" in result["headers"] - assert headers["Access-Control-Allow-Methods"] == "DELETE,GET,OPTIONS" + assert "Access-Control-Allow-Origin" in result["multiValueHeaders"] + assert headers["Access-Control-Allow-Methods"] == [",".join(sorted(["DELETE", "GET", "OPTIONS"]))] def test_custom_preflight_response(): @@ -535,7 +652,7 @@ def custom_preflight(): status_code=200, content_type=content_types.TEXT_HTML, body="Foo", - headers={"Access-Control-Allow-Methods": "CUSTOM"}, + headers={"Access-Control-Allow-Methods": ["CUSTOM"]}, ) @app.route(method="CUSTOM", rule="/some-call", cors=True) @@ -548,10 +665,10 @@ def custom_method(): # THEN return the custom preflight response assert result["statusCode"] == 200 assert result["body"] == "Foo" - headers = result["headers"] - assert headers["Content-Type"] == content_types.TEXT_HTML - assert "Access-Control-Allow-Origin" in result["headers"] - assert headers["Access-Control-Allow-Methods"] == "CUSTOM" + headers = result["multiValueHeaders"] + assert headers["Content-Type"] == [content_types.TEXT_HTML] + assert "Access-Control-Allow-Origin" in result["multiValueHeaders"] + assert headers["Access-Control-Allow-Methods"] == ["CUSTOM"] def test_service_error_responses(json_dump): @@ -569,7 +686,7 @@ def bad_request_error(): # THEN return the bad request error response # AND status code equals 400 assert result["statusCode"] == 400 - assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] expected = {"statusCode": 400, "message": "Missing required parameter"} assert result["body"] == json_dump(expected) @@ -584,7 +701,7 @@ def unauthorized_error(): # THEN return the unauthorized error response # AND status code equals 401 assert result["statusCode"] == 401 - assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] expected = {"statusCode": 401, "message": "Unauthorized"} assert result["body"] == json_dump(expected) @@ -599,7 +716,7 @@ def not_found_error(): # THEN return the not found error response # AND status code equals 404 assert result["statusCode"] == 404 - assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] expected = {"statusCode": 404, "message": "Not found"} assert result["body"] == json_dump(expected) @@ -614,7 +731,7 @@ def internal_server_error(): # THEN return the internal server error response # AND status code equals 500 assert result["statusCode"] == 500 - assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] expected = {"statusCode": 500, "message": "Internal server error"} assert result["body"] == json_dump(expected) @@ -629,8 +746,8 @@ def service_error(): # THEN return the service error response # AND status code equals 502 assert result["statusCode"] == 502 - assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON - assert "Access-Control-Allow-Origin" in result["headers"] + assert result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] + assert "Access-Control-Allow-Origin" in result["multiValueHeaders"] expected = {"statusCode": 502, "message": "Something went wrong!"} assert result["body"] == json_dump(expected) @@ -653,8 +770,8 @@ def raises_error(): # AND include the exception traceback in the response assert result["statusCode"] == 500 assert "Traceback (most recent call last)" in result["body"] - headers = result["headers"] - assert headers["Content-Type"] == content_types.TEXT_PLAIN + headers = result["multiValueHeaders"] + assert headers["Content-Type"] == [content_types.TEXT_PLAIN] def test_debug_unhandled_exceptions_debug_off(): @@ -676,16 +793,6 @@ def raises_error(): assert e.value.args == ("Foo",) -def test_debug_mode_environment_variable(monkeypatch): - # GIVEN a debug mode environment variable is set - monkeypatch.setenv(constants.EVENT_HANDLER_DEBUG_ENV, "true") - app = ApiGatewayResolver() - - # WHEN calling app._debug - # THEN the debug mode is enabled - assert app._debug - - def test_powertools_dev_sets_debug_mode(monkeypatch): # GIVEN a debug mode environment variable is set monkeypatch.setenv(constants.POWERTOOLS_DEV_ENV, "true") @@ -951,7 +1058,7 @@ def base(): # THEN process event correctly assert result["statusCode"] == 200 - assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] def test_api_gateway_app_router(): @@ -969,7 +1076,7 @@ def foo(): # THEN process event correctly assert result["statusCode"] == 200 - assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] def test_api_gateway_app_router_with_params(): @@ -995,7 +1102,7 @@ def foo(account_id): # THEN process event correctly assert result["statusCode"] == 200 - assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] def test_api_gateway_app_router_with_prefix(): @@ -1014,7 +1121,7 @@ def foo(): # THEN process event correctly assert result["statusCode"] == 200 - assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] def test_api_gateway_app_router_with_prefix_equals_path(): @@ -1034,7 +1141,7 @@ def foo(): # THEN process event correctly assert result["statusCode"] == 200 - assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] def test_api_gateway_app_router_with_different_methods(): @@ -1084,7 +1191,7 @@ def patch_func(): result = app(LOAD_GW_EVENT, None) assert result["statusCode"] == 404 # AND cors headers are not returned - assert "Access-Control-Allow-Origin" not in result["headers"] + assert "Access-Control-Allow-Origin" not in result["multiValueHeaders"] def test_duplicate_routes(): @@ -1143,11 +1250,11 @@ def foo(account_id): # THEN events are processed correctly assert get_result["statusCode"] == 200 - assert get_result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert get_result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] assert post_result["statusCode"] == 200 - assert post_result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert post_result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] assert put_result["statusCode"] == 404 - assert put_result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert put_result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] def test_api_gateway_app_router_access_to_resolver(): @@ -1166,7 +1273,7 @@ def foo(): result = app(LOAD_GW_EVENT, {}) assert result["statusCode"] == 200 - assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] def test_exception_handler(): @@ -1192,7 +1299,7 @@ def get_lambda() -> Response: # THEN call the exception_handler assert result["statusCode"] == 418 - assert result["headers"]["Content-Type"] == content_types.TEXT_HTML + assert result["multiValueHeaders"]["Content-Type"] == [content_types.TEXT_HTML] assert result["body"] == "Foo!" @@ -1219,7 +1326,7 @@ def get_lambda() -> Response: # THEN call the exception_handler assert result["statusCode"] == 500 - assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] assert result["body"] == "CUSTOM ERROR FORMAT" @@ -1238,7 +1345,7 @@ def handle_not_found(exc: NotFoundError) -> Response: # THEN call the exception_handler assert result["statusCode"] == 404 - assert result["headers"]["Content-Type"] == content_types.TEXT_PLAIN + assert result["multiValueHeaders"]["Content-Type"] == [content_types.TEXT_PLAIN] assert result["body"] == "I am a teapot!" @@ -1276,7 +1383,7 @@ def get_lambda() -> Response: # THEN call the exception_handler assert result["statusCode"] == 400 - assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] expected = {"statusCode": 400, "message": "Bad request"} assert result["body"] == json_dump(expected) diff --git a/tests/functional/event_handler/test_lambda_function_url.py b/tests/functional/event_handler/test_lambda_function_url.py index 4d4d5c39f35..41baed68a7c 100644 --- a/tests/functional/event_handler/test_lambda_function_url.py +++ b/tests/functional/event_handler/test_lambda_function_url.py @@ -3,6 +3,7 @@ Response, content_types, ) +from aws_lambda_powertools.shared.cookies import Cookie from aws_lambda_powertools.utilities.data_classes import LambdaFunctionUrlEvent from tests.functional.utils import load_event @@ -25,9 +26,46 @@ def foo(): # AND set the current_event type as LambdaFunctionUrlEvent assert result["statusCode"] == 200 assert result["headers"]["Content-Type"] == content_types.TEXT_HTML + assert "Cookies" not in result["headers"] assert result["body"] == "foo" +def test_lambda_function_url_event_path_trailing_slash(): + # GIVEN a Lambda Function Url type event + app = LambdaFunctionUrlResolver() + + @app.post("/my/path") + def foo(): + return Response(200, content_types.TEXT_HTML, "foo") + + # WHEN calling the event handler with an event with a trailing slash + result = app(load_event("lambdaFunctionUrlEventPathTrailingSlash.json"), {}) + + # THEN return a 404 error + assert result["statusCode"] == 404 + assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + + +def test_lambda_function_url_event_with_cookies(): + # GIVEN a Lambda Function Url type event + app = LambdaFunctionUrlResolver() + cookie = Cookie(name="CookieMonster", value="MonsterCookie") + + @app.get("/") + def foo(): + assert isinstance(app.current_event, LambdaFunctionUrlEvent) + assert app.lambda_context == {} + return Response(200, content_types.TEXT_PLAIN, "foo", cookies=[cookie]) + + # WHEN calling the event handler + result = app(load_event("lambdaFunctionUrlEvent.json"), {}) + + # THEN process event correctly + # AND set the current_event type as LambdaFunctionUrlEvent + assert result["statusCode"] == 200 + assert result["cookies"] == ["CookieMonster=MonsterCookie; Secure"] + + def test_lambda_function_url_no_matches(): # GIVEN a Lambda Function Url type event app = LambdaFunctionUrlResolver() diff --git a/tests/functional/idempotency/conftest.py b/tests/functional/idempotency/conftest.py index b5cf79727b1..657a4b6bd13 100644 --- a/tests/functional/idempotency/conftest.py +++ b/tests/functional/idempotency/conftest.py @@ -172,18 +172,24 @@ def expected_params_put_item_with_validation(hashed_idempotency_key, hashed_vali @pytest.fixture -def hashed_idempotency_key(lambda_apigw_event, default_jmespath, lambda_context): +def hashed_idempotency_key(request, lambda_apigw_event, default_jmespath, lambda_context): compiled_jmespath = jmespath.compile(default_jmespath) data = compiled_jmespath.search(lambda_apigw_event) - return "test-func.lambda_handler#" + hash_idempotency_key(data) + return ( + f"test-func.{request.function.__module__}.{request.function.__qualname__}..lambda_handler#" + + hash_idempotency_key(data) + ) @pytest.fixture -def hashed_idempotency_key_with_envelope(lambda_apigw_event): +def hashed_idempotency_key_with_envelope(request, lambda_apigw_event): event = extract_data_from_envelope( data=lambda_apigw_event, envelope=envelopes.API_GATEWAY_HTTP, jmespath_options={} ) - return "test-func.lambda_handler#" + hash_idempotency_key(event) + return ( + f"test-func.{request.function.__module__}.{request.function.__qualname__}..lambda_handler#" + + hash_idempotency_key(event) + ) @pytest.fixture diff --git a/tests/functional/idempotency/test_idempotency.py b/tests/functional/idempotency/test_idempotency.py index f63d7347b1c..e5c5c777971 100644 --- a/tests/functional/idempotency/test_idempotency.py +++ b/tests/functional/idempotency/test_idempotency.py @@ -48,6 +48,7 @@ from tests.functional.utils import json_serialize, load_event TABLE_NAME = "TEST_TABLE" +TESTS_MODULE_PREFIX = "test-func.functional.idempotency.test_idempotency" def get_dataclasses_lib(): @@ -786,7 +787,7 @@ def lambda_handler(event, context): def test_idempotent_lambda_expires_in_progress_unavailable_remaining_time(): mock_event = {"data": "value"} - idempotency_key = "test-func.function#" + hash_idempotency_key(mock_event) + idempotency_key = f"{TESTS_MODULE_PREFIX}.test_idempotent_lambda_expires_in_progress_unavailable_remaining_time..function#{hash_idempotency_key(mock_event)}" # noqa E501 persistence_layer = MockPersistenceLayer(expected_idempotency_key=idempotency_key) expected_result = {"message": "Foo"} @@ -1125,7 +1126,8 @@ def _delete_record(self, data_record: DataRecord) -> None: def test_idempotent_lambda_event_source(lambda_context): # Scenario to validate that we can use the event_source decorator before or after the idempotent decorator mock_event = load_event("apiGatewayProxyV2Event.json") - persistence_layer = MockPersistenceLayer("test-func.lambda_handler#" + hash_idempotency_key(mock_event)) + idempotency_key = f"{TESTS_MODULE_PREFIX}.test_idempotent_lambda_event_source..lambda_handler#{hash_idempotency_key(mock_event)}" # noqa E501 + persistence_layer = MockPersistenceLayer(idempotency_key) expected_result = {"message": "Foo"} # GIVEN an event_source decorator @@ -1145,7 +1147,9 @@ def lambda_handler(event, _): def test_idempotent_function(): # Scenario to validate we can use idempotent_function with any function mock_event = {"data": "value"} - idempotency_key = "test-func.record_handler#" + hash_idempotency_key(mock_event) + idempotency_key = ( + f"{TESTS_MODULE_PREFIX}.test_idempotent_function..record_handler#{hash_idempotency_key(mock_event)}" + ) persistence_layer = MockPersistenceLayer(expected_idempotency_key=idempotency_key) expected_result = {"message": "Foo"} @@ -1163,7 +1167,7 @@ def test_idempotent_function_arbitrary_args_kwargs(): # Scenario to validate we can use idempotent_function with a function # with an arbitrary number of args and kwargs mock_event = {"data": "value"} - idempotency_key = "test-func.record_handler#" + hash_idempotency_key(mock_event) + idempotency_key = f"{TESTS_MODULE_PREFIX}.test_idempotent_function_arbitrary_args_kwargs..record_handler#{hash_idempotency_key(mock_event)}" # noqa E501 persistence_layer = MockPersistenceLayer(expected_idempotency_key=idempotency_key) expected_result = {"message": "Foo"} @@ -1179,7 +1183,7 @@ def record_handler(arg_one, arg_two, record, is_record): def test_idempotent_function_invalid_data_kwarg(): mock_event = {"data": "value"} - idempotency_key = "test-func.record_handler#" + hash_idempotency_key(mock_event) + idempotency_key = f"{TESTS_MODULE_PREFIX}.test_idempotent_function_invalid_data_kwarg..record_handler#{hash_idempotency_key(mock_event)}" # noqa E501 persistence_layer = MockPersistenceLayer(expected_idempotency_key=idempotency_key) expected_result = {"message": "Foo"} keyword_argument = "payload" @@ -1216,7 +1220,7 @@ def record_handler(record): def test_idempotent_function_and_lambda_handler(lambda_context): # Scenario to validate we can use both idempotent_function and idempotent decorators mock_event = {"data": "value"} - idempotency_key = "test-func.record_handler#" + hash_idempotency_key(mock_event) + idempotency_key = f"{TESTS_MODULE_PREFIX}.test_idempotent_function_and_lambda_handler..record_handler#{hash_idempotency_key(mock_event)}" # noqa E501 persistence_layer = MockPersistenceLayer(expected_idempotency_key=idempotency_key) expected_result = {"message": "Foo"} @@ -1224,7 +1228,9 @@ def test_idempotent_function_and_lambda_handler(lambda_context): def record_handler(record): return expected_result - persistence_layer = MockPersistenceLayer("test-func.lambda_handler#" + hash_idempotency_key(mock_event)) + persistence_layer = MockPersistenceLayer( + f"{TESTS_MODULE_PREFIX}.test_idempotent_function_and_lambda_handler..lambda_handler#{hash_idempotency_key(mock_event)}" # noqa E501 + ) @idempotent(persistence_store=persistence_layer) def lambda_handler(event, _): @@ -1245,7 +1251,9 @@ def test_idempotent_data_sorting(): # Scenario to validate same data in different order hashes to the same idempotency key data_one = {"data": "test message 1", "more_data": "more data 1"} data_two = {"more_data": "more data 1", "data": "test message 1"} - idempotency_key = "test-func.dummy#" + hash_idempotency_key(data_one) + idempotency_key = ( + f"{TESTS_MODULE_PREFIX}.test_idempotent_data_sorting..dummy#{hash_idempotency_key(data_one)}" + ) # Assertion will happen in MockPersistenceLayer persistence_layer = MockPersistenceLayer(expected_idempotency_key=idempotency_key) @@ -1353,7 +1361,7 @@ def test_idempotent_function_dataclass_with_jmespath(): dataclasses = get_dataclasses_lib() config = IdempotencyConfig(event_key_jmespath="transaction_id", use_local_cache=True) mock_event = {"customer_id": "fake", "transaction_id": "fake-id"} - idempotency_key = "test-func.collect_payment#" + hash_idempotency_key(mock_event["transaction_id"]) + idempotency_key = f"{TESTS_MODULE_PREFIX}.test_idempotent_function_dataclass_with_jmespath..collect_payment#{hash_idempotency_key(mock_event['transaction_id'])}" # noqa E501 persistence_layer = MockPersistenceLayer(expected_idempotency_key=idempotency_key) @dataclasses.dataclass @@ -1378,7 +1386,7 @@ def test_idempotent_function_pydantic_with_jmespath(): # GIVEN config = IdempotencyConfig(event_key_jmespath="transaction_id", use_local_cache=True) mock_event = {"customer_id": "fake", "transaction_id": "fake-id"} - idempotency_key = "test-func.collect_payment#" + hash_idempotency_key(mock_event["transaction_id"]) + idempotency_key = f"{TESTS_MODULE_PREFIX}.test_idempotent_function_pydantic_with_jmespath..collect_payment#{hash_idempotency_key(mock_event['transaction_id'])}" # noqa E501 persistence_layer = MockPersistenceLayer(expected_idempotency_key=idempotency_key) class Payment(BaseModel): diff --git a/tests/functional/idempotency/utils.py b/tests/functional/idempotency/utils.py index 797b696aba4..f9cdaf05d0a 100644 --- a/tests/functional/idempotency/utils.py +++ b/tests/functional/idempotency/utils.py @@ -14,9 +14,13 @@ def hash_idempotency_key(data: Any): def build_idempotency_put_item_stub( data: Dict, function_name: str = "test-func", + function_qualified_name: str = "test_idempotent_lambda_first_execution_event_mutation.", + module_name: str = "functional.idempotency.test_idempotency", handler_name: str = "lambda_handler", ) -> Dict: - idempotency_key_hash = f"{function_name}.{handler_name}#{hash_idempotency_key(data)}" + idempotency_key_hash = ( + f"{function_name}.{module_name}.{function_qualified_name}.{handler_name}#{hash_idempotency_key(data)}" + ) return { "ConditionExpression": ( "attribute_not_exists(#id) OR #expiry < :now OR " @@ -43,9 +47,13 @@ def build_idempotency_update_item_stub( data: Dict, handler_response: Dict, function_name: str = "test-func", + function_qualified_name: str = "test_idempotent_lambda_first_execution_event_mutation.", + module_name: str = "functional.idempotency.test_idempotency", handler_name: str = "lambda_handler", ) -> Dict: - idempotency_key_hash = f"{function_name}.{handler_name}#{hash_idempotency_key(data)}" + idempotency_key_hash = ( + f"{function_name}.{module_name}.{function_qualified_name}.{handler_name}#{hash_idempotency_key(data)}" + ) serialized_lambda_response = json_serialize(handler_response) return { "ExpressionAttributeNames": { diff --git a/tests/functional/test_data_classes.py b/tests/functional/test_data_classes.py index 235a3f8f8da..a0113b62486 100644 --- a/tests/functional/test_data_classes.py +++ b/tests/functional/test_data_classes.py @@ -2,6 +2,7 @@ import datetime import json import zipfile +from decimal import Clamped, Context, Inexact, Overflow, Rounded, Underflow from secrets import compare_digest from urllib.parse import quote_plus @@ -76,8 +77,6 @@ ConnectContactFlowInitiationMethod, ) from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import ( - AttributeValue, - AttributeValueType, DynamoDBRecordEventName, DynamoDBStreamEvent, StreamRecord, @@ -491,7 +490,13 @@ def test_connect_contact_flow_event_all(): assert event.parameters == {"ParameterOne": "One", "ParameterTwo": "Two"} -def test_dynamo_db_stream_trigger_event(): +def test_dynamodb_stream_trigger_event(): + decimal_context = Context( + Emin=-128, + Emax=126, + prec=38, + traps=[Clamped, Overflow, Inexact, Rounded, Underflow], + ) event = DynamoDBStreamEvent(load_event("dynamoStreamEvent.json")) records = list(event.records) @@ -503,20 +508,8 @@ def test_dynamo_db_stream_trigger_event(): assert dynamodb.approximate_creation_date_time is None keys = dynamodb.keys assert keys is not None - id_key = keys["Id"] - assert id_key.b_value is None - assert id_key.bs_value is None - assert id_key.bool_value is None - assert id_key.list_value is None - assert id_key.map_value is None - assert id_key.n_value == "101" - assert id_key.ns_value is None - assert id_key.null_value is None - assert id_key.s_value is None - assert id_key.ss_value is None - message_key = dynamodb.new_image["Message"] - assert message_key is not None - assert message_key.s_value == "New item!" + assert keys["Id"] == decimal_context.create_decimal(101) + assert dynamodb.new_image["Message"] == "New item!" assert dynamodb.old_image is None assert dynamodb.sequence_number == "111" assert dynamodb.size_bytes == 26 @@ -529,129 +522,61 @@ def test_dynamo_db_stream_trigger_event(): assert record.user_identity is None -def test_dynamo_attribute_value_b_value(): - example_attribute_value = {"B": "dGhpcyB0ZXh0IGlzIGJhc2U2NC1lbmNvZGVk"} - - attribute_value = AttributeValue(example_attribute_value) - - assert attribute_value.get_type == AttributeValueType.Binary - assert attribute_value.b_value == attribute_value.get_value - - -def test_dynamo_attribute_value_bs_value(): - example_attribute_value = {"BS": ["U3Vubnk=", "UmFpbnk=", "U25vd3k="]} - - attribute_value = AttributeValue(example_attribute_value) - - assert attribute_value.get_type == AttributeValueType.BinarySet - assert attribute_value.bs_value == attribute_value.get_value - - -def test_dynamo_attribute_value_bool_value(): - example_attribute_value = {"BOOL": True} - - attribute_value = AttributeValue(example_attribute_value) - - assert attribute_value.get_type == AttributeValueType.Boolean - assert attribute_value.bool_value == attribute_value.get_value - - -def test_dynamo_attribute_value_list_value(): - example_attribute_value = {"L": [{"S": "Cookies"}, {"S": "Coffee"}, {"N": "3.14159"}]} - attribute_value = AttributeValue(example_attribute_value) - list_value = attribute_value.list_value - assert list_value is not None - item = list_value[0] - assert item.s_value == "Cookies" - assert attribute_value.get_type == AttributeValueType.List - assert attribute_value.l_value == attribute_value.list_value - assert attribute_value.list_value == attribute_value.get_value - - -def test_dynamo_attribute_value_map_value(): - example_attribute_value = {"M": {"Name": {"S": "Joe"}, "Age": {"N": "35"}}} - - attribute_value = AttributeValue(example_attribute_value) - - map_value = attribute_value.map_value - assert map_value is not None - item = map_value["Name"] - assert item.s_value == "Joe" - assert attribute_value.get_type == AttributeValueType.Map - assert attribute_value.m_value == attribute_value.map_value - assert attribute_value.map_value == attribute_value.get_value - - -def test_dynamo_attribute_value_n_value(): - example_attribute_value = {"N": "123.45"} - - attribute_value = AttributeValue(example_attribute_value) - - assert attribute_value.get_type == AttributeValueType.Number - assert attribute_value.n_value == attribute_value.get_value - - -def test_dynamo_attribute_value_ns_value(): - example_attribute_value = {"NS": ["42.2", "-19", "7.5", "3.14"]} - - attribute_value = AttributeValue(example_attribute_value) - - assert attribute_value.get_type == AttributeValueType.NumberSet - assert attribute_value.ns_value == attribute_value.get_value - - -def test_dynamo_attribute_value_null_value(): - example_attribute_value = {"NULL": True} - - attribute_value = AttributeValue(example_attribute_value) - - assert attribute_value.get_type == AttributeValueType.Null - assert attribute_value.null_value is None - assert attribute_value.null_value == attribute_value.get_value - - -def test_dynamo_attribute_value_s_value(): - example_attribute_value = {"S": "Hello"} - - attribute_value = AttributeValue(example_attribute_value) - - assert attribute_value.get_type == AttributeValueType.String - assert attribute_value.s_value == attribute_value.get_value - - -def test_dynamo_attribute_value_ss_value(): - example_attribute_value = {"SS": ["Giraffe", "Hippo", "Zebra"]} - - attribute_value = AttributeValue(example_attribute_value) - - assert attribute_value.get_type == AttributeValueType.StringSet - assert attribute_value.ss_value == attribute_value.get_value - - -def test_dynamo_attribute_value_type_error(): - example_attribute_value = {"UNSUPPORTED": "'value' should raise a type error"} - - attribute_value = AttributeValue(example_attribute_value) - - with pytest.raises(TypeError): - print(attribute_value.get_value) - with pytest.raises(ValueError): - print(attribute_value.get_type) - - -def test_stream_record_keys_with_valid_keys(): - attribute_value = {"Foo": "Bar"} - record = StreamRecord({"Keys": {"Key1": attribute_value}}) - assert record.keys == {"Key1": AttributeValue(attribute_value)} +def test_dynamodb_stream_record_deserialization(): + byte_list = [s.encode("utf-8") for s in ["item1", "item2"]] + decimal_context = Context( + Emin=-128, + Emax=126, + prec=38, + traps=[Clamped, Overflow, Inexact, Rounded, Underflow], + ) + data = { + "Keys": {"key1": {"attr1": "value1"}}, + "NewImage": { + "Name": {"S": "Joe"}, + "Age": {"N": "35"}, + "TypesMap": { + "M": { + "string": {"S": "value"}, + "number": {"N": "100"}, + "bool": {"BOOL": True}, + "dict": {"M": {"key": {"S": "value"}}}, + "stringSet": {"SS": ["item1", "item2"]}, + "numberSet": {"NS": ["100", "200", "300"]}, + "binary": {"B": b"\x00"}, + "byteSet": {"BS": byte_list}, + "list": {"L": [{"S": "item1"}, {"N": "3.14159"}, {"BOOL": False}]}, + "null": {"NULL": True}, + }, + }, + }, + } + record = StreamRecord(data) + assert record.new_image == { + "Name": "Joe", + "Age": decimal_context.create_decimal("35"), + "TypesMap": { + "string": "value", + "number": decimal_context.create_decimal("100"), + "bool": True, + "dict": {"key": "value"}, + "stringSet": {"item1", "item2"}, + "numberSet": {decimal_context.create_decimal(n) for n in ["100", "200", "300"]}, + "binary": b"\x00", + "byteSet": set(byte_list), + "list": ["item1", decimal_context.create_decimal("3.14159"), False], + "null": None, + }, + } -def test_stream_record_keys_with_no_keys(): +def test_dynamodb_stream_record_keys_with_no_keys(): record = StreamRecord({}) assert record.keys is None -def test_stream_record_keys_overrides_dict_wrapper_keys(): - data = {"Keys": {"key1": {"attr1": "value1"}}} +def test_dynamodb_stream_record_keys_overrides_dict_wrapper_keys(): + data = {"Keys": {"key1": {"N": "101"}}} record = StreamRecord(data) assert record.keys != data.keys() diff --git a/tests/functional/test_headers_serializer.py b/tests/functional/test_headers_serializer.py new file mode 100644 index 00000000000..8a27ce8baa8 --- /dev/null +++ b/tests/functional/test_headers_serializer.py @@ -0,0 +1,147 @@ +from collections import defaultdict + +import pytest + +from aws_lambda_powertools.shared.headers_serializer import ( + HttpApiHeadersSerializer, + MultiValueHeadersSerializer, + SingleValueHeadersSerializer, +) + + +def test_http_api_headers_serializer(): + cookies = ["UUID=12345", "SSID=0xdeadbeef"] + header_values = ["bar", "zbr"] + headers = {"Foo": header_values} + + serializer = HttpApiHeadersSerializer() + payload = serializer.serialize(headers=headers, cookies=cookies) + + assert payload["cookies"] == cookies + assert payload["headers"]["Foo"] == ", ".join(header_values) + + +def test_http_api_headers_serializer_with_empty_values(): + serializer = HttpApiHeadersSerializer() + payload = serializer.serialize(headers={}, cookies=[]) + assert payload == {"headers": {}, "cookies": []} + + +def test_http_api_headers_serializer_with_headers_only(): + content_type = "text/html" + serializer = HttpApiHeadersSerializer() + payload = serializer.serialize(headers={"Content-Type": [content_type]}, cookies=[]) + assert payload["headers"]["Content-Type"] == content_type + + +def test_http_api_headers_serializer_with_single_headers_only(): + content_type = "text/html" + serializer = HttpApiHeadersSerializer() + payload = serializer.serialize(headers={"Content-Type": content_type}, cookies=[]) + assert payload["headers"]["Content-Type"] == content_type + + +def test_http_api_headers_serializer_with_cookies_only(): + cookies = ["UUID=12345", "SSID=0xdeadbeef"] + serializer = HttpApiHeadersSerializer() + payload = serializer.serialize(headers={}, cookies=cookies) + assert payload["cookies"] == cookies + + +def test_multi_value_headers_serializer(): + cookies = ["UUID=12345", "SSID=0xdeadbeef"] + header_values = ["bar", "zbr"] + headers = {"Foo": header_values} + + serializer = MultiValueHeadersSerializer() + payload = serializer.serialize(headers=headers, cookies=cookies) + + assert payload["multiValueHeaders"]["Set-Cookie"] == cookies + assert payload["multiValueHeaders"]["Foo"] == header_values + + +def test_multi_value_headers_serializer_with_headers_only(): + content_type = "text/html" + serializer = MultiValueHeadersSerializer() + payload = serializer.serialize(headers={"Content-Type": [content_type]}, cookies=[]) + assert payload["multiValueHeaders"]["Content-Type"] == [content_type] + + +def test_multi_value_headers_serializer_with_single_headers_only(): + content_type = "text/html" + serializer = MultiValueHeadersSerializer() + payload = serializer.serialize(headers={"Content-Type": content_type}, cookies=[]) + assert payload["multiValueHeaders"]["Content-Type"] == [content_type] + + +def test_multi_value_headers_serializer_with_cookies_only(): + cookie = "UUID=12345" + serializer = MultiValueHeadersSerializer() + payload = serializer.serialize(headers={}, cookies=[cookie]) + assert payload["multiValueHeaders"]["Set-Cookie"] == [cookie] + + +def test_multi_value_headers_serializer_with_empty_values(): + serializer = MultiValueHeadersSerializer() + payload = serializer.serialize(headers={}, cookies=[]) + assert payload["multiValueHeaders"] == defaultdict(list) + + +def test_single_value_headers_serializer(): + cookie = "UUID=12345" + content_type = "text/html" + headers = {"Content-Type": [content_type]} + + serializer = SingleValueHeadersSerializer() + payload = serializer.serialize(headers=headers, cookies=[cookie]) + assert payload["headers"]["Content-Type"] == content_type + assert payload["headers"]["Set-Cookie"] == cookie + + +def test_single_value_headers_serializer_with_headers_only(): + content_type = "text/html" + serializer = SingleValueHeadersSerializer() + payload = serializer.serialize(headers={"Content-Type": [content_type]}, cookies=[]) + assert payload["headers"]["Content-Type"] == content_type + + +def test_single_value_headers_serializer_with_single_headers_only(): + content_type = "text/html" + serializer = SingleValueHeadersSerializer() + payload = serializer.serialize(headers={"Content-Type": content_type}, cookies=[]) + assert payload["headers"]["Content-Type"] == content_type + + +def test_single_value_headers_serializer_with_cookies_only(): + cookie = "UUID=12345" + serializer = SingleValueHeadersSerializer() + payload = serializer.serialize(headers={}, cookies=[cookie]) + assert payload["headers"] == {"Set-Cookie": cookie} + + +def test_single_value_headers_serializer_with_empty_values(): + serializer = SingleValueHeadersSerializer() + payload = serializer.serialize(headers={}, cookies=[]) + assert payload["headers"] == {} + + +def test_single_value_headers_with_multiple_cookies_warning(): + cookies = ["UUID=12345", "SSID=0xdeadbeef"] + warning_message = "Can't encode more than one cookie in the response. Sending the last cookie only." + serializer = SingleValueHeadersSerializer() + + with pytest.warns(match=warning_message): + payload = serializer.serialize(cookies=cookies, headers={}) + + assert payload["headers"]["Set-Cookie"] == cookies[-1] + + +def test_single_value_headers_with_multiple_header_values_warning(): + headers = {"Foo": ["bar", "zbr"]} + warning_message = "Can't encode more than one header value for the same key." + serializer = SingleValueHeadersSerializer() + + with pytest.warns(match=warning_message): + payload = serializer.serialize(cookies=[], headers=headers) + + assert payload["headers"]["Foo"] == headers["Foo"][-1] diff --git a/tests/functional/test_utilities_batch.py b/tests/functional/test_utilities_batch.py index b5489fb7c62..1d50de9e85e 100644 --- a/tests/functional/test_utilities_batch.py +++ b/tests/functional/test_utilities_batch.py @@ -1,25 +1,16 @@ import json -import math from random import randint from typing import Callable, Dict, Optional -from unittest.mock import patch -from uuid import uuid4 import pytest from botocore.config import Config -from botocore.stub import Stubber from aws_lambda_powertools.utilities.batch import ( BatchProcessor, EventType, - PartialSQSProcessor, batch_processor, - sqs_batch_processor, -) -from aws_lambda_powertools.utilities.batch.exceptions import ( - BatchProcessingError, - SQSBatchProcessingError, ) +from aws_lambda_powertools.utilities.batch.exceptions import BatchProcessingError from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import ( DynamoDBRecord, ) @@ -40,7 +31,6 @@ SqsRecordModel, ) from aws_lambda_powertools.utilities.parser.types import Literal -from aws_lambda_powertools.utilities.typing import LambdaContext from tests.functional.utils import b64_to_str, str_to_b64 @@ -139,7 +129,7 @@ def handler(record: KinesisStreamRecord): @pytest.fixture(scope="module") def dynamodb_record_handler() -> Callable: def handler(record: DynamoDBRecord): - body = record.dynamodb.new_image.get("Message").get_value + body = record.dynamodb.new_image.get("Message") if "fail" in body: raise Exception("Failed to process record.") return body @@ -152,30 +142,6 @@ def config() -> Config: return Config(region_name="us-east-1") -@pytest.fixture(scope="function") -def partial_processor(config) -> PartialSQSProcessor: - return PartialSQSProcessor(config=config) - - -@pytest.fixture(scope="function") -def partial_processor_suppressed(config) -> PartialSQSProcessor: - return PartialSQSProcessor(config=config, suppress_exception=True) - - -@pytest.fixture(scope="function") -def stubbed_partial_processor(config) -> PartialSQSProcessor: - processor = PartialSQSProcessor(config=config) - with Stubber(processor.client) as stubber: - yield stubber, processor - - -@pytest.fixture(scope="function") -def stubbed_partial_processor_suppressed(config) -> PartialSQSProcessor: - processor = PartialSQSProcessor(config=config, suppress_exception=True) - with Stubber(processor.client) as stubber: - yield stubber, processor - - @pytest.fixture(scope="module") def order_event_factory() -> Callable: def factory(item: Dict) -> str: @@ -184,270 +150,6 @@ def factory(item: Dict) -> str: return factory -@pytest.fixture(scope="module") -def lambda_context() -> LambdaContext: - class DummyLambdaContext: - def __init__(self): - self.function_name = "test-func" - self.memory_limit_in_mb = 128 - self.invoked_function_arn = "arn:aws:lambda:eu-west-1:809313241234:function:test-func" - self.aws_request_id = f"{uuid4()}" - - return DummyLambdaContext - - -@pytest.mark.parametrize( - "success_messages_count", - ([1, 18, 34]), -) -def test_partial_sqs_processor_context_with_failure( - success_messages_count, sqs_event_factory, record_handler, partial_processor -): - """ - Test processor with one failing record and multiple processed records - """ - fail_record = sqs_event_factory("fail") - success_records = [sqs_event_factory("success") for i in range(0, success_messages_count)] - - records = [fail_record, *success_records] - - response = {"Successful": [{"Id": fail_record["messageId"]}], "Failed": []} - - with Stubber(partial_processor.client) as stubber: - for _ in range(0, math.ceil((success_messages_count / partial_processor.max_message_batch))): - stubber.add_response("delete_message_batch", response) - with pytest.raises(SQSBatchProcessingError) as error: - with partial_processor(records, record_handler) as ctx: - ctx.process() - - assert len(error.value.child_exceptions) == 1 - stubber.assert_no_pending_responses() - - -def test_partial_sqs_processor_context_with_failure_exception(sqs_event_factory, record_handler, partial_processor): - """ - Test processor with one failing record - """ - fail_record = sqs_event_factory("fail") - success_record = sqs_event_factory("success") - - records = [fail_record, success_record] - - with Stubber(partial_processor.client) as stubber: - stubber.add_client_error( - method="delete_message_batch", service_error_code="ServiceUnavailable", http_status_code=503 - ) - with pytest.raises(Exception) as error: - with partial_processor(records, record_handler) as ctx: - ctx.process() - - assert "ServiceUnavailable" in str(error.value) - stubber.assert_no_pending_responses() - - -def test_partial_sqs_processor_context_only_success(sqs_event_factory, record_handler, partial_processor): - """ - Test processor without failure - """ - first_record = sqs_event_factory("success") - second_record = sqs_event_factory("success") - - records = [first_record, second_record] - - with partial_processor(records, record_handler) as ctx: - result = ctx.process() - - assert result == [ - ("success", first_record["body"], first_record), - ("success", second_record["body"], second_record), - ] - - -def test_partial_sqs_processor_context_multiple_calls(sqs_event_factory, record_handler, partial_processor): - """ - Test processor without failure - """ - first_record = sqs_event_factory("success") - second_record = sqs_event_factory("success") - - records = [first_record, second_record] - - with partial_processor(records, record_handler) as ctx: - ctx.process() - - with partial_processor([first_record], record_handler) as ctx: - ctx.process() - - assert partial_processor.success_messages == [first_record] - - -def test_batch_processor_middleware_with_partial_sqs_processor(sqs_event_factory, record_handler, partial_processor): - """ - Test middleware's integration with PartialSQSProcessor - """ - - @batch_processor(record_handler=record_handler, processor=partial_processor) - def lambda_handler(event, context): - return True - - fail_record = sqs_event_factory("fail") - - event = {"Records": [sqs_event_factory("fail"), sqs_event_factory("fail"), sqs_event_factory("success")]} - response = {"Successful": [{"Id": fail_record["messageId"]}], "Failed": []} - - with Stubber(partial_processor.client) as stubber: - stubber.add_response("delete_message_batch", response) - with pytest.raises(SQSBatchProcessingError) as error: - lambda_handler(event, {}) - - assert len(error.value.child_exceptions) == 2 - stubber.assert_no_pending_responses() - - -@patch("aws_lambda_powertools.utilities.batch.sqs.PartialSQSProcessor") -def test_sqs_batch_processor_middleware( - patched_sqs_processor, sqs_event_factory, record_handler, stubbed_partial_processor -): - """ - Test middleware's integration with PartialSQSProcessor - """ - - @sqs_batch_processor(record_handler=record_handler) - def lambda_handler(event, context): - return True - - stubber, processor = stubbed_partial_processor - patched_sqs_processor.return_value = processor - - fail_record = sqs_event_factory("fail") - - event = {"Records": [sqs_event_factory("fail"), sqs_event_factory("success")]} - response = {"Successful": [{"Id": fail_record["messageId"]}], "Failed": []} - stubber.add_response("delete_message_batch", response) - with pytest.raises(SQSBatchProcessingError) as error: - lambda_handler(event, {}) - - assert len(error.value.child_exceptions) == 1 - stubber.assert_no_pending_responses() - - -def test_batch_processor_middleware_with_custom_processor(capsys, sqs_event_factory, record_handler, config): - """ - Test middlewares' integration with custom batch processor - """ - - class CustomProcessor(PartialSQSProcessor): - def failure_handler(self, record, exception): - print("Oh no ! It's a failure.") - return super().failure_handler(record, exception) - - processor = CustomProcessor(config=config) - - @batch_processor(record_handler=record_handler, processor=processor) - def lambda_handler(event, context): - return True - - fail_record = sqs_event_factory("fail") - - event = {"Records": [sqs_event_factory("fail"), sqs_event_factory("success")]} - response = {"Successful": [{"Id": fail_record["messageId"]}], "Failed": []} - - with Stubber(processor.client) as stubber: - stubber.add_response("delete_message_batch", response) - with pytest.raises(SQSBatchProcessingError) as error: - lambda_handler(event, {}) - - stubber.assert_no_pending_responses() - - assert len(error.value.child_exceptions) == 1 - assert capsys.readouterr().out == "Oh no ! It's a failure.\n" - - -def test_batch_processor_middleware_suppressed_exceptions( - sqs_event_factory, record_handler, partial_processor_suppressed -): - """ - Test middleware's integration with PartialSQSProcessor - """ - - @batch_processor(record_handler=record_handler, processor=partial_processor_suppressed) - def lambda_handler(event, context): - return True - - fail_record = sqs_event_factory("fail") - - event = {"Records": [sqs_event_factory("fail"), sqs_event_factory("fail"), sqs_event_factory("success")]} - response = {"Successful": [{"Id": fail_record["messageId"]}], "Failed": []} - - with Stubber(partial_processor_suppressed.client) as stubber: - stubber.add_response("delete_message_batch", response) - result = lambda_handler(event, {}) - - stubber.assert_no_pending_responses() - assert result is True - - -def test_partial_sqs_processor_suppressed_exceptions(sqs_event_factory, record_handler, partial_processor_suppressed): - """ - Test processor without failure - """ - - first_record = sqs_event_factory("success") - second_record = sqs_event_factory("fail") - records = [first_record, second_record] - - fail_record = sqs_event_factory("fail") - response = {"Successful": [{"Id": fail_record["messageId"]}], "Failed": []} - - with Stubber(partial_processor_suppressed.client) as stubber: - stubber.add_response("delete_message_batch", response) - with partial_processor_suppressed(records, record_handler) as ctx: - ctx.process() - - assert partial_processor_suppressed.success_messages == [first_record] - - -@patch("aws_lambda_powertools.utilities.batch.sqs.PartialSQSProcessor") -def test_sqs_batch_processor_middleware_suppressed_exception( - patched_sqs_processor, sqs_event_factory, record_handler, stubbed_partial_processor_suppressed -): - """ - Test middleware's integration with PartialSQSProcessor - """ - - @sqs_batch_processor(record_handler=record_handler) - def lambda_handler(event, context): - return True - - stubber, processor = stubbed_partial_processor_suppressed - patched_sqs_processor.return_value = processor - - fail_record = sqs_event_factory("fail") - - event = {"Records": [sqs_event_factory("fail"), sqs_event_factory("success")]} - response = {"Successful": [{"Id": fail_record["messageId"]}], "Failed": []} - stubber.add_response("delete_message_batch", response) - result = lambda_handler(event, {}) - - stubber.assert_no_pending_responses() - assert result is True - - -def test_partial_sqs_processor_context_only_failure(sqs_event_factory, record_handler, partial_processor): - """ - Test processor with only failures - """ - first_record = sqs_event_factory("fail") - second_record = sqs_event_factory("fail") - - records = [first_record, second_record] - with pytest.raises(SQSBatchProcessingError) as error: - with partial_processor(records, record_handler) as ctx: - ctx.process() - - assert len(error.value.child_exceptions) == 2 - - def test_batch_processor_middleware_success_only(sqs_event_factory, record_handler): # GIVEN first_record = SQSRecord(sqs_event_factory("success")) @@ -937,41 +639,3 @@ def lambda_handler(event, context): # THEN raise BatchProcessingError assert "All records failed processing. " in str(e.value) - - -def test_batch_processor_handler_receives_lambda_context(sqs_event_factory, lambda_context: LambdaContext): - # GIVEN - def record_handler(record, lambda_context: LambdaContext = None): - return lambda_context.function_name == "test-func" - - first_record = SQSRecord(sqs_event_factory("success")) - event = {"Records": [first_record.raw_event]} - - processor = BatchProcessor(event_type=EventType.SQS) - - @batch_processor(record_handler=record_handler, processor=processor) - def lambda_handler(event, context): - return processor.response() - - # WHEN/THEN - lambda_handler(event, lambda_context()) - - -def test_batch_processor_context_manager_handler_receives_lambda_context( - sqs_event_factory, lambda_context: LambdaContext -): - # GIVEN - def record_handler(record, lambda_context: LambdaContext = None): - return lambda_context.function_name == "test-func" - - first_record = SQSRecord(sqs_event_factory("success")) - event = {"Records": [first_record.raw_event]} - - processor = BatchProcessor(event_type=EventType.SQS) - - def lambda_handler(event, context): - with processor(records=event["Records"], handler=record_handler, lambda_context=context) as batch: - batch.process() - - # WHEN/THEN - lambda_handler(event, lambda_context()) diff --git a/tests/functional/test_utilities_parameters.py b/tests/functional/test_utilities_parameters.py index 2b8291db47b..123c2fdbcc2 100644 --- a/tests/functional/test_utilities_parameters.py +++ b/tests/functional/test_utilities_parameters.py @@ -1639,14 +1639,22 @@ def test_appconf_provider_get_configuration_json_content_type(mock_name, config) encoded_message = json.dumps(mock_body_json).encode("utf-8") mock_value = StreamingBody(BytesIO(encoded_message), len(encoded_message)) - # Stub the boto3 client stubber = stub.Stubber(provider.client) - response = {"Content": mock_value, "ConfigurationVersion": "1", "ContentType": "application/json"} - stubber.add_response("get_configuration", response) + response_start_config_session = {"InitialConfigurationToken": "initial_token"} + stubber.add_response("start_configuration_session", response_start_config_session) + + response_get_latest_config = { + "Configuration": mock_value, + "NextPollConfigurationToken": "initial_token", + "ContentType": "application/json", + } + stubber.add_response("get_latest_configuration", response_get_latest_config) stubber.activate() try: - value = provider.get(mock_name, transform="json", ClientConfigurationVersion="2") + value = provider.get( + mock_name, transform="json", ApplicationIdentifier=application, EnvironmentIdentifier=environment + ) assert value == mock_body_json stubber.assert_no_pending_responses() @@ -1659,7 +1667,7 @@ def test_appconf_provider_get_configuration_json_content_type_with_custom_client Test get_configuration.get with default values """ - client = boto3.client("appconfig", config=config) + client = boto3.client("appconfigdata", config=config) # Create a new provider environment = "dev" @@ -1670,14 +1678,22 @@ def test_appconf_provider_get_configuration_json_content_type_with_custom_client encoded_message = json.dumps(mock_body_json).encode("utf-8") mock_value = StreamingBody(BytesIO(encoded_message), len(encoded_message)) - # Stub the boto3 client stubber = stub.Stubber(provider.client) - response = {"Content": mock_value, "ConfigurationVersion": "1", "ContentType": "application/json"} - stubber.add_response("get_configuration", response) + response_start_config_session = {"InitialConfigurationToken": "initial_token"} + stubber.add_response("start_configuration_session", response_start_config_session) + + response_get_latest_config = { + "Configuration": mock_value, + "NextPollConfigurationToken": "initial_token", + "ContentType": "application/json", + } + stubber.add_response("get_latest_configuration", response_get_latest_config) stubber.activate() try: - value = provider.get(mock_name, transform="json", ClientConfigurationVersion="2") + value = provider.get( + mock_name, transform="json", ApplicationIdentifier=application, EnvironmentIdentifier=environment + ) assert value == mock_body_json stubber.assert_no_pending_responses() @@ -1699,10 +1715,16 @@ def test_appconf_provider_get_configuration_no_transform(mock_name, config): encoded_message = json.dumps(mock_body_json).encode("utf-8") mock_value = StreamingBody(BytesIO(encoded_message), len(encoded_message)) - # Stub the boto3 client stubber = stub.Stubber(provider.client) - response = {"Content": mock_value, "ConfigurationVersion": "1", "ContentType": "application/json"} - stubber.add_response("get_configuration", response) + response_start_config_session = {"InitialConfigurationToken": "initial_token"} + stubber.add_response("start_configuration_session", response_start_config_session) + + response_get_latest_config = { + "Configuration": mock_value, + "NextPollConfigurationToken": "initial_token", + "ContentType": "application/json", + } + stubber.add_response("get_latest_configuration", response_get_latest_config) stubber.activate() try: diff --git a/tests/unit/test_tracing.py b/tests/unit/test_tracing.py index d9c5b91214a..a40301a44c2 100644 --- a/tests/unit/test_tracing.py +++ b/tests/unit/test_tracing.py @@ -10,6 +10,8 @@ # Maintenance: This should move to Functional tests and use Fake over mocks. +MODULE_PREFIX = "unit.test_tracing" + @pytest.fixture def dummy_response(): @@ -125,9 +127,13 @@ def greeting(name, message): # and add its response as trace metadata # and use service name as a metadata namespace assert in_subsegment_mock.in_subsegment.call_count == 1 - assert in_subsegment_mock.in_subsegment.call_args == mocker.call(name="## greeting") + assert in_subsegment_mock.in_subsegment.call_args == mocker.call( + name=f"## {MODULE_PREFIX}.test_tracer_method..greeting" + ) assert in_subsegment_mock.put_metadata.call_args == mocker.call( - key="greeting response", value=dummy_response, namespace="booking" + key=f"{MODULE_PREFIX}.test_tracer_method..greeting response", + value=dummy_response, + namespace="booking", ) @@ -253,7 +259,10 @@ def greeting(name, message): # THEN we should add the exception using method name as key plus error # and their service name as the namespace put_metadata_mock_args = in_subsegment_mock.put_metadata.call_args[1] - assert put_metadata_mock_args["key"] == "greeting error" + assert ( + put_metadata_mock_args["key"] + == f"{MODULE_PREFIX}.test_tracer_method_exception_metadata..greeting error" + ) assert put_metadata_mock_args["namespace"] == "booking" @@ -305,15 +314,23 @@ async def greeting(name, message): # THEN we should add metadata for each response like we would for a sync decorated method assert in_subsegment_mock.in_subsegment.call_count == 2 - assert in_subsegment_greeting_call_args == mocker.call(name="## greeting") - assert in_subsegment_greeting2_call_args == mocker.call(name="## greeting_2") + assert in_subsegment_greeting_call_args == mocker.call( + name=f"## {MODULE_PREFIX}.test_tracer_method_nested_async..greeting" + ) + assert in_subsegment_greeting2_call_args == mocker.call( + name=f"## {MODULE_PREFIX}.test_tracer_method_nested_async..greeting_2" + ) assert in_subsegment_mock.put_metadata.call_count == 2 assert put_metadata_greeting2_call_args == mocker.call( - key="greeting_2 response", value=dummy_response, namespace="booking" + key=f"{MODULE_PREFIX}.test_tracer_method_nested_async..greeting_2 response", + value=dummy_response, + namespace="booking", ) assert put_metadata_greeting_call_args == mocker.call( - key="greeting response", value=dummy_response, namespace="booking" + key=f"{MODULE_PREFIX}.test_tracer_method_nested_async..greeting response", + value=dummy_response, + namespace="booking", ) @@ -355,7 +372,10 @@ async def greeting(name, message): # THEN we should add the exception using method name as key plus error # and their service name as the namespace put_metadata_mock_args = in_subsegment_mock.put_metadata.call_args[1] - assert put_metadata_mock_args["key"] == "greeting error" + assert ( + put_metadata_mock_args["key"] + == f"{MODULE_PREFIX}.test_tracer_method_exception_metadata_async..greeting error" + ) assert put_metadata_mock_args["namespace"] == "booking" @@ -387,7 +407,9 @@ def handler(event, context): assert "test result" in in_subsegment_mock.put_metadata.call_args[1]["value"] assert in_subsegment_mock.in_subsegment.call_count == 2 assert handler_trace == mocker.call(name="## handler") - assert yield_function_trace == mocker.call(name="## yield_with_capture") + assert yield_function_trace == mocker.call( + name=f"## {MODULE_PREFIX}.test_tracer_yield_from_context_manager..yield_with_capture" + ) assert "test result" in result @@ -411,7 +433,10 @@ def yield_with_capture(): # THEN we should add the exception using method name as key plus error # and their service name as the namespace put_metadata_mock_args = in_subsegment_mock.put_metadata.call_args[1] - assert put_metadata_mock_args["key"] == "yield_with_capture error" + assert ( + put_metadata_mock_args["key"] + == f"{MODULE_PREFIX}.test_tracer_yield_from_context_manager_exception_metadata..yield_with_capture error" # noqa E501 + ) assert isinstance(put_metadata_mock_args["value"], ValueError) assert put_metadata_mock_args["namespace"] == "booking" @@ -453,7 +478,9 @@ def handler(event, context): assert "test result" in in_subsegment_mock.put_metadata.call_args[1]["value"] assert in_subsegment_mock.in_subsegment.call_count == 2 assert handler_trace == mocker.call(name="## handler") - assert yield_function_trace == mocker.call(name="## yield_with_capture") + assert yield_function_trace == mocker.call( + name=f"## {MODULE_PREFIX}.test_tracer_yield_from_nested_context_manager..yield_with_capture" + ) assert "test result" in result @@ -483,7 +510,9 @@ def handler(event, context): assert "test result" in in_subsegment_mock.put_metadata.call_args[1]["value"] assert in_subsegment_mock.in_subsegment.call_count == 2 assert handler_trace == mocker.call(name="## handler") - assert generator_fn_trace == mocker.call(name="## generator_fn") + assert generator_fn_trace == mocker.call( + name=f"## {MODULE_PREFIX}.test_tracer_yield_from_generator..generator_fn" + ) assert "test result" in result @@ -506,7 +535,10 @@ def generator_fn(): # THEN we should add the exception using method name as key plus error # and their service name as the namespace put_metadata_mock_args = in_subsegment_mock.put_metadata.call_args[1] - assert put_metadata_mock_args["key"] == "generator_fn error" + assert ( + put_metadata_mock_args["key"] + == f"{MODULE_PREFIX}.test_tracer_yield_from_generator_exception_metadata..generator_fn error" + ) assert put_metadata_mock_args["namespace"] == "booking" assert isinstance(put_metadata_mock_args["value"], ValueError) assert str(put_metadata_mock_args["value"]) == "test" diff --git a/tests/unit/test_utilities_batch.py b/tests/unit/test_utilities_batch.py deleted file mode 100644 index 8cc4f0b0225..00000000000 --- a/tests/unit/test_utilities_batch.py +++ /dev/null @@ -1,141 +0,0 @@ -import pytest -from botocore.config import Config - -from aws_lambda_powertools.utilities.batch import PartialSQSProcessor -from aws_lambda_powertools.utilities.batch.exceptions import SQSBatchProcessingError - -# Maintenance: This will be deleted as part of legacy Batch deprecation - - -@pytest.fixture(scope="function") -def sqs_event(): - return { - "messageId": "059f36b4-87a3-44ab-83d2-661975830a7d", - "receiptHandle": "AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a", - "body": "", - "attributes": {}, - "messageAttributes": {}, - "md5OfBody": "e4e68fb7bd0e697a0ae8f1bb342846b3", - "eventSource": "aws:sqs", - "eventSourceARN": "arn:aws:sqs:us-east-2:123456789012:my-queue", - "awsRegion": "us-east-1", - } - - -@pytest.fixture(scope="module") -def config() -> Config: - return Config(region_name="us-east-1") - - -@pytest.fixture(scope="function") -def partial_sqs_processor(config) -> PartialSQSProcessor: - return PartialSQSProcessor(config=config) - - -def test_partial_sqs_get_queue_url_with_records(mocker, sqs_event, partial_sqs_processor): - expected_url = "https://queue.amazonaws.com/123456789012/my-queue" - - records_mock = mocker.patch.object(PartialSQSProcessor, "records", create=True, new_callable=mocker.PropertyMock) - records_mock.return_value = [sqs_event] - - result = partial_sqs_processor._get_queue_url() - assert result == expected_url - - -def test_partial_sqs_get_queue_url_without_records(partial_sqs_processor): - assert partial_sqs_processor._get_queue_url() is None - - -def test_partial_sqs_get_entries_to_clean_with_success(mocker, sqs_event, partial_sqs_processor): - expected_entries = [{"Id": sqs_event["messageId"], "ReceiptHandle": sqs_event["receiptHandle"]}] - - success_messages_mock = mocker.patch.object( - PartialSQSProcessor, "success_messages", create=True, new_callable=mocker.PropertyMock - ) - success_messages_mock.return_value = [sqs_event] - - result = partial_sqs_processor._get_entries_to_clean() - - assert result == expected_entries - - -def test_partial_sqs_get_entries_to_clean_without_success(mocker, partial_sqs_processor): - expected_entries = [] - - success_messages_mock = mocker.patch.object( - PartialSQSProcessor, "success_messages", create=True, new_callable=mocker.PropertyMock - ) - success_messages_mock.return_value = [] - - result = partial_sqs_processor._get_entries_to_clean() - - assert result == expected_entries - - -def test_partial_sqs_process_record_success(mocker, partial_sqs_processor): - expected_value = mocker.sentinel.expected_value - - success_result = mocker.sentinel.success_result - record = mocker.sentinel.record - - handler_mock = mocker.patch.object(PartialSQSProcessor, "handler", create=True, return_value=success_result) - success_handler_mock = mocker.patch.object(PartialSQSProcessor, "success_handler", return_value=expected_value) - - result = partial_sqs_processor._process_record(record) - - handler_mock.assert_called_once_with(record=record) - success_handler_mock.assert_called_once_with(record=record, result=success_result) - - assert result == expected_value - - -def test_partial_sqs_process_record_failure(mocker, partial_sqs_processor): - expected_value = mocker.sentinel.expected_value - - failure_result = Exception() - record = mocker.sentinel.record - - handler_mock = mocker.patch.object(PartialSQSProcessor, "handler", create=True, side_effect=failure_result) - failure_handler_mock = mocker.patch.object(PartialSQSProcessor, "failure_handler", return_value=expected_value) - - result = partial_sqs_processor._process_record(record) - - handler_mock.assert_called_once_with(record=record) - - _, failure_handler_called_with_args = failure_handler_mock.call_args - failure_handler_mock.assert_called_once() - assert (failure_handler_called_with_args["record"]) == record - assert isinstance(failure_handler_called_with_args["exception"], tuple) - assert failure_handler_called_with_args["exception"][1] == failure_result - assert result == expected_value - - -def test_partial_sqs_prepare(mocker, partial_sqs_processor): - success_messages_mock = mocker.patch.object(partial_sqs_processor, "success_messages", spec=list) - failed_messages_mock = mocker.patch.object(partial_sqs_processor, "fail_messages", spec=list) - - partial_sqs_processor._prepare() - - success_messages_mock.clear.assert_called_once() - failed_messages_mock.clear.assert_called_once() - - -def test_partial_sqs_clean(monkeypatch, mocker, partial_sqs_processor): - records = [mocker.sentinel.record] - - monkeypatch.setattr(partial_sqs_processor, "fail_messages", records) - monkeypatch.setattr(partial_sqs_processor, "success_messages", records) - - queue_url_mock = mocker.patch.object(PartialSQSProcessor, "_get_queue_url") - entries_to_clean_mock = mocker.patch.object(PartialSQSProcessor, "_get_entries_to_clean") - - queue_url_mock.return_value = mocker.sentinel.queue_url - entries_to_clean_mock.return_value = [mocker.sentinel.entries_to_clean] - - client_mock = mocker.patch.object(partial_sqs_processor, "client", autospec=True) - with pytest.raises(SQSBatchProcessingError): - partial_sqs_processor._clean() - - client_mock.delete_message_batch.assert_called_once_with( - QueueUrl=mocker.sentinel.queue_url, Entries=[mocker.sentinel.entries_to_clean] - ) diff --git a/tox.ini b/tox.ini index 286b1c10ab0..20eef002f9d 100644 --- a/tox.ini +++ b/tox.ini @@ -6,10 +6,9 @@ deps = filelock pytest-xdist pydantic - email-validator commands = python parallel_run_e2e.py ; If you ever encounter another parallel lock across interpreters ; pip install tox tox-poetry -; tox -p --parallel-live \ No newline at end of file +; tox -p --parallel-live