Skip to content

refactor(e2e): encapsulate assets logic; add sample stack #296

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 14 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
564 changes: 270 additions & 294 deletions poetry.lock

Large diffs are not rendered by default.

3 changes: 2 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@ coverage = {extras = ["toml"], version = "^6.2"}
pytest = "^7.0.1"
black = "^21.12b0"
flake8 = "^4.0.1"
flake8-black = "^0.2.3"
flake8-builtins = "^1.5.3"
flake8-comprehensions = "^3.7.0"
flake8-debugger = "^4.0.0"
Expand Down Expand Up @@ -66,6 +65,8 @@ pytest-benchmark = "^3.4.1"
mypy-boto3-cloudwatch = "^1.24.35"
mypy-boto3-lambda = "^1.24.0"
mypy-boto3-xray = "^1.24.0"
mypy-boto3-s3 = { version = "^1.24.0", python = ">=3.7" }
mypy-boto3-cloudformation = { version = "^1.24.0", python = ">=3.7" }
types-requests = "^2.28.7"
typing-extensions = { version = "^4.3.0", python = ">=3.7" }

Expand Down
52 changes: 52 additions & 0 deletions tests/e2e/metrics/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
import json
from pathlib import Path
from typing import Dict

import pytest
from _pytest import fixtures
from filelock import FileLock

from tests.e2e.metrics.infrastructure import MetricsStack


@pytest.fixture(autouse=True, scope="module")
def infrastructure(request: fixtures.SubRequest, tmp_path_factory: pytest.TempPathFactory, worker_id) -> MetricsStack:
"""Setup and teardown logic for E2E test infrastructure

Parameters
----------
request : fixtures.SubRequest
test fixture containing metadata about test execution

Returns
-------
MetricsStack
Metrics Stack to deploy infrastructure

Yields
------
Iterator[MetricsStack]
Deployed Infrastructure
"""
stack = MetricsStack(handlers_dir=Path(f"{request.fspath.dirname}/handlers"))
try:
if worker_id == "master":
# no parallelization, deploy stack and let fixture be cached
yield stack.deploy()
else:
# tmp dir shared by all workers
root_tmp_dir = tmp_path_factory.getbasetemp().parent

cache = root_tmp_dir / "cache.json"
with FileLock(f"{cache}.lock"):
# If cache exists, return stack outputs back
# otherwise it's the first run by the main worker
# deploy and return stack outputs so subsequent workers can reuse
if cache.is_file():
stack_outputs = json.loads(cache.read_text())
else:
stack_outputs: Dict = stack.deploy()
cache.write_text(json.dumps(stack_outputs))
yield stack_outputs
finally:
stack.delete()
19 changes: 11 additions & 8 deletions tests/e2e/metrics/handlers/basic_handler.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,17 @@
import os

from aws_lambda_powertools import Metrics
from aws_lambda_powertools.metrics import MetricUnit

METRIC_NAME = os.environ["METRIC_NAME"]

metrics = Metrics()
my_metrics = Metrics()


@metrics.log_metrics
@my_metrics.log_metrics
def lambda_handler(event, context):
metrics.add_metric(name=METRIC_NAME, unit=MetricUnit.Count, value=1)
metrics, namespace, service = event.get("metrics"), event.get("namespace"), event.get("service")

# Maintenance: create a public method to set these explicitly
my_metrics.namespace = namespace
my_metrics.service = service

for metric in metrics:
my_metrics.add_metric(**metric)

return "success"
12 changes: 12 additions & 0 deletions tests/e2e/metrics/handlers/cold_start.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
from aws_lambda_powertools import Metrics

my_metrics = Metrics()


@my_metrics.log_metrics(capture_cold_start_metric=True)
def lambda_handler(event, context):
# Maintenance: create a public method to set these explicitly
my_metrics.namespace = event.get("namespace")
my_metrics.service = event.get("service")

return "success"
11 changes: 11 additions & 0 deletions tests/e2e/metrics/infrastructure.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
from pathlib import Path

from tests.e2e.utils.infrastructure import BaseInfrastructureV2


class MetricsStack(BaseInfrastructureV2):
def __init__(self, handlers_dir: Path, feature_name: str = "metrics") -> None:
super().__init__(feature_name, handlers_dir)

def create_resources(self):
self.create_lambda_functions()
81 changes: 54 additions & 27 deletions tests/e2e/metrics/test_metrics.py
Original file line number Diff line number Diff line change
@@ -1,40 +1,67 @@
import datetime
import uuid
import json

import boto3
import pytest
from e2e import conftest
from e2e.utils import helpers

from tests.e2e.utils import helpers

@pytest.fixture(scope="module")
def config() -> conftest.LambdaConfig:
return {
"parameters": {},
"environment_variables": {
"POWERTOOLS_METRICS_NAMESPACE": "powertools-e2e-metric",
"POWERTOOLS_SERVICE_NAME": "test-powertools-service",
"METRIC_NAME": f"business-metric-{str(uuid.uuid4()).replace('-','_')}",
},
}

@pytest.fixture
def basic_handler_fn(infrastructure: dict) -> str:
return infrastructure.get("BasicHandler", "")

def test_basic_lambda_metric_visible(execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig):

@pytest.fixture
def basic_handler_fn_arn(infrastructure: dict) -> str:
return infrastructure.get("BasicHandlerArn", "")


@pytest.fixture
def cold_start_fn(infrastructure: dict) -> str:
return infrastructure.get("ColdStart", "")


@pytest.fixture
def cold_start_fn_arn(infrastructure: dict) -> str:
return infrastructure.get("ColdStartArn", "")


METRIC_NAMESPACE = "powertools-e2e-metric"


def test_basic_lambda_metric_is_visible(basic_handler_fn: str, basic_handler_fn_arn: str):
# GIVEN
metric_name = helpers.build_metric_name()
service = helpers.build_service_name()
dimensions = helpers.build_add_dimensions_input(service=service)
metrics = helpers.build_multiple_add_metric_input(metric_name=metric_name, value=1, quantity=3)

# WHEN
event = json.dumps({"metrics": metrics, "service": service, "namespace": METRIC_NAMESPACE})
_, execution_time = helpers.trigger_lambda(lambda_arn=basic_handler_fn_arn, payload=event)

metrics = helpers.get_metrics(
namespace=METRIC_NAMESPACE, start_date=execution_time, metric_name=metric_name, dimensions=dimensions
)

# THEN
metric_data = metrics.get("Values", [])
assert metric_data and metric_data[0] == 3.0


def test_cold_start_metric(cold_start_fn_arn: str, cold_start_fn: str):
# GIVEN
start_date = execute_lambda.get_lambda_execution_time()
end_date = start_date + datetime.timedelta(minutes=5)
metric_name = "ColdStart"
service = helpers.build_service_name()
dimensions = helpers.build_add_dimensions_input(function_name=cold_start_fn, service=service)

# WHEN
event = json.dumps({"service": service, "namespace": METRIC_NAMESPACE})
_, execution_time = helpers.trigger_lambda(lambda_arn=cold_start_fn_arn, payload=event)

metrics = helpers.get_metrics(
start_date=start_date,
end_date=end_date,
namespace=config["environment_variables"]["POWERTOOLS_METRICS_NAMESPACE"],
metric_name=config["environment_variables"]["METRIC_NAME"],
service_name=config["environment_variables"]["POWERTOOLS_SERVICE_NAME"],
cw_client=boto3.client(service_name="cloudwatch"),
namespace=METRIC_NAMESPACE, start_date=execution_time, metric_name=metric_name, dimensions=dimensions
)

# THEN
assert metrics.get("Timestamps") and len(metrics.get("Timestamps")) == 1
assert metrics.get("Values") and len(metrics.get("Values")) == 1
assert metrics.get("Values") and metrics.get("Values")[0] == 1
metric_data = metrics.get("Values", [])
assert metric_data and metric_data[0] == 1.0
120 changes: 120 additions & 0 deletions tests/e2e/utils/asset.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
import io
import json
import zipfile
from pathlib import Path
from typing import List, Optional

import boto3
import botocore.exceptions
from mypy_boto3_s3 import S3Client

from aws_lambda_powertools import Logger
from tests.e2e.utils.models import AssetTemplateConfig, TemplateAssembly

logger = Logger(service="e2e-utils")


class Asset:
def __init__(
self, config: AssetTemplateConfig, account_id: str, region: str, boto3_client: Optional[S3Client] = None
) -> None:
"""CDK Asset logic to verify existence and resolve deeply nested configuration

Parameters
----------
config : AssetTemplateConfig
CDK Asset configuration found in synthesized template
account_id : str
AWS Account ID
region : str
AWS Region
boto3_client : Optional["S3Client"], optional
S3 client instance for asset operations, by default None
"""
self.config = config
self.s3 = boto3_client or boto3.client("s3")
self.account_id = account_id
self.region = region
self.asset_path = config.source.path
self.asset_packaging = config.source.packaging
self.object_key = config.destinations.current_account_current_region.object_key
self._bucket = config.destinations.current_account_current_region.bucket_name
self.bucket_name = self._resolve_bucket_name()

@property
def is_zip(self):
return self.asset_packaging == "zip"

def exists_in_s3(self, key: str) -> bool:
try:
return self.s3.head_object(Bucket=self.bucket_name, Key=key) is not None
except botocore.exceptions.ClientError:
return False

def _resolve_bucket_name(self) -> str:
return self._bucket.replace("${AWS::AccountId}", self.account_id).replace("${AWS::Region}", self.region)


class Assets:
def __init__(
self, cfn_template: Path, account_id: str, region: str, boto3_client: Optional[S3Client] = None
) -> None:
"""CDK Assets logic to find each asset, compress, and upload

Parameters
----------
cfn_template : Path
CloudFormation template synthesized (self.__synthesize)
account_id : str
AWS Account ID
region : str
AWS Region
boto3_client : Optional[S3Client], optional
S3 client instance for asset operations, by default None
"""
self.template = cfn_template
self.account_id = account_id
self.region = region
self.s3 = boto3_client or boto3.client("s3")
self.assets = self._find_assets_from_template()
self.assets_location = str(self.template.parent)

def upload(self):
"""Drop-in replacement for cdk-assets package s3 upload part.
https://www.npmjs.com/package/cdk-assets.
We use custom solution to avoid dependencies from nodejs ecosystem.
We follow the same design cdk-assets:
https://github.com/aws/aws-cdk-rfcs/blob/master/text/0092-asset-publishing.md.
"""
for asset in self.assets:
if not asset.is_zip:
logger.debug(f"Asset '{asset.object_key}' is not zip. Skipping upload.")
continue

if asset.exists_in_s3(key=asset.object_key):
logger.debug(f"Asset '{asset.object_key}' already exists in S3. Skipping upload.")
continue

archive = self._compress_assets(asset)
logger.debug("Uploading archive to S3")
self.s3.upload_fileobj(Fileobj=archive, Bucket=asset.bucket_name, Key=asset.object_key)
logger.debug("Successfully uploaded")

def _find_assets_from_template(self) -> List[Asset]:
data = json.loads(self.template.read_text())
template = TemplateAssembly(**data)
return [
Asset(config=asset_config, account_id=self.account_id, region=self.region)
for asset_config in template.files.values()
]

def _compress_assets(self, asset: Asset) -> io.BytesIO:
buf = io.BytesIO()
asset_dir = f"{self.assets_location}/{asset.asset_path}"
asset_files = list(Path(asset_dir).iterdir())
with zipfile.ZipFile(buf, "w", compression=zipfile.ZIP_DEFLATED) as archive:
for asset_file in asset_files:
logger.debug(f"Adding file '{asset_file}' to the archive.")
archive.write(asset_file, arcname=asset_file.relative_to(asset_dir))
buf.seek(0)
return buf
Loading