From 8daf90c9c1f994c8827a9f5fcfe3147158d00a6c Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Sat, 8 Jul 2023 17:00:32 +0100 Subject: [PATCH 01/21] pydantic v2: initial tests --- .../quality_check_temp_pydanticv2.yml | 82 ++++++++++++++++++ aws_lambda_powertools/shared/user_agent.py | 6 +- .../utilities/parser/models/apigw.py | 83 ++++++++++--------- .../utilities/parser/models/apigwv2.py | 41 +++++---- .../utilities/parser/models/cloudwatch.py | 7 +- .../utilities/parser/models/dynamodb.py | 8 +- .../utilities/parser/models/event_bridge.py | 4 +- .../utilities/parser/models/kafka.py | 9 +- .../parser/models/kinesis_firehose.py | 4 +- .../parser/models/kinesis_firehose_sqs.py | 4 +- .../utilities/parser/models/s3.py | 28 +++---- .../parser/models/s3_object_event.py | 6 +- .../utilities/parser/models/ses.py | 10 +-- .../utilities/parser/models/sns.py | 10 +-- .../utilities/parser/models/sqs.py | 23 +++-- tests/functional/batch/sample_models.py | 4 +- tests/functional/test_utilities_batch.py | 8 +- tests/unit/parser/schemas.py | 4 +- tests/unit/parser/test_apigw.py | 2 +- tests/unit/parser/test_cloudwatch.py | 2 +- 20 files changed, 226 insertions(+), 119 deletions(-) create mode 100644 .github/workflows/quality_check_temp_pydanticv2.yml diff --git a/.github/workflows/quality_check_temp_pydanticv2.yml b/.github/workflows/quality_check_temp_pydanticv2.yml new file mode 100644 index 00000000000..4c8e03c23c7 --- /dev/null +++ b/.github/workflows/quality_check_temp_pydanticv2.yml @@ -0,0 +1,82 @@ +name: Code quality temp - Pydanticv2 + +# PROCESS +# +# 1. Install all dependencies and spin off containers for all supported Python versions +# 2. Run code formatters and linters (various checks) for code standard +# 3. Run static typing checker for potential bugs +# 4. Run entire test suite for regressions except end-to-end (unit, functional, performance) +# 5. Run static analysis (in addition to CodeQL) for common insecure code practices +# 6. Run complexity baseline to avoid error-prone bugs and keep maintenance lower +# 7. Collect and report on test coverage + +# USAGE +# +# Always triggered on new PRs, PR changes and PR merge. + + +on: + pull_request: + paths: + - "aws_lambda_powertools/**" + - "tests/**" + - "pyproject.toml" + - "poetry.lock" + - "mypy.ini" + branches: + - poc/pydanticv2 + push: + paths: + - "aws_lambda_powertools/**" + - "tests/**" + - "pyproject.toml" + - "poetry.lock" + - "mypy.ini" + branches: + - poc/pydanticv2 + +permissions: + contents: read + +jobs: + quality_check: + runs-on: ubuntu-latest + strategy: + max-parallel: 4 + matrix: + python-version: ["3.7", "3.8", "3.9", "3.10"] + env: + PYTHON: "${{ matrix.python-version }}" + permissions: + contents: read # checkout code only + steps: + - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - name: Install poetry + run: pipx install poetry + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # v4.6.1 + with: + python-version: ${{ matrix.python-version }} + cache: "poetry" + - name: Removing cfn-lint + run: poetry remove cfn-lint + - name: Replacing Pydantic v1 with v2 + run: poetry add "pydantic>=2.0" + - name: Install dependencies + run: make dev + - name: Formatting and Linting + run: make lint + - name: Static type checking + run: make mypy + - name: Test with pytest + run: make test + - name: Security baseline + run: make security-baseline + - name: Complexity baseline + run: make complexity-baseline + - name: Upload coverage to Codecov + uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # 3.1.4 + with: + file: ./coverage.xml + env_vars: PYTHON + name: aws-lambda-powertools-python-codecov diff --git a/aws_lambda_powertools/shared/user_agent.py b/aws_lambda_powertools/shared/user_agent.py index 098be7a503a..c682c24b34f 100644 --- a/aws_lambda_powertools/shared/user_agent.py +++ b/aws_lambda_powertools/shared/user_agent.py @@ -112,7 +112,7 @@ def register_feature_to_session(session, feature): def register_feature_to_botocore_session(botocore_session, feature): """ Register the given feature string to the event system of the provided botocore session - + Please notice this function is for patching botocore session and is different from previous one which is for patching boto3 session @@ -127,7 +127,7 @@ def register_feature_to_botocore_session(botocore_session, feature): ------ AttributeError If the provided session does not have an event system. - + Examples -------- **register data-masking user-agent to botocore session** @@ -139,7 +139,7 @@ def register_feature_to_botocore_session(botocore_session, feature): >>> session = botocore.session.Session() >>> register_feature_to_botocore_session(botocore_session=session, feature="data-masking") >>> key_provider = StrictAwsKmsMasterKeyProvider(key_ids=self.keys, botocore_session=session) - + """ try: botocore_session.register(TARGET_SDK_EVENT, _create_feature_function(feature)) diff --git a/aws_lambda_powertools/utilities/parser/models/apigw.py b/aws_lambda_powertools/utilities/parser/models/apigw.py index 82a3a6188d2..fc7f5b61d3f 100644 --- a/aws_lambda_powertools/utilities/parser/models/apigw.py +++ b/aws_lambda_powertools/utilities/parser/models/apigw.py @@ -1,7 +1,7 @@ -from datetime import datetime +from datetime import datetime, timezone from typing import Any, Dict, List, Optional, Type, Union -from pydantic import BaseModel, root_validator +from pydantic import BaseModel, root_validator, validator from pydantic.networks import IPvAnyNetwork from aws_lambda_powertools.utilities.parser.types import Literal @@ -21,74 +21,79 @@ class ApiGatewayUserCert(BaseModel): class APIGatewayEventIdentity(BaseModel): - accessKey: Optional[str] - accountId: Optional[str] - apiKey: Optional[str] - apiKeyId: Optional[str] - caller: Optional[str] - cognitoAuthenticationProvider: Optional[str] - cognitoAuthenticationType: Optional[str] - cognitoIdentityId: Optional[str] - cognitoIdentityPoolId: Optional[str] - principalOrgId: Optional[str] + accessKey: Optional[str] = None + accountId: Optional[str] = None + apiKey: Optional[str] = None + apiKeyId: Optional[str] = None + caller: Optional[str] = None + cognitoAuthenticationProvider: Optional[str] = None + cognitoAuthenticationType: Optional[str] = None + cognitoIdentityId: Optional[str] = None + cognitoIdentityPoolId: Optional[str] = None + principalOrgId: Optional[str] = None # see #1562, temp workaround until API Gateway fixes it the Test button payload # removing it will not be considered a regression in the future sourceIp: Union[IPvAnyNetwork, Literal["test-invoke-source-ip"]] - user: Optional[str] - userAgent: Optional[str] - userArn: Optional[str] - clientCert: Optional[ApiGatewayUserCert] + user: Optional[str] = None + userAgent: Optional[str] = None + userArn: Optional[str] = None + clientCert: Optional[ApiGatewayUserCert] = None class APIGatewayEventAuthorizer(BaseModel): - claims: Optional[Dict[str, Any]] - scopes: Optional[List[str]] + claims: Optional[Dict[str, Any]] = None + scopes: Optional[List[str]] = None class APIGatewayEventRequestContext(BaseModel): accountId: str apiId: str - authorizer: Optional[APIGatewayEventAuthorizer] + authorizer: Optional[APIGatewayEventAuthorizer] = None stage: str protocol: str identity: APIGatewayEventIdentity requestId: str requestTime: str requestTimeEpoch: datetime - resourceId: Optional[str] + resourceId: Optional[str] = None resourcePath: str - domainName: Optional[str] - domainPrefix: Optional[str] - extendedRequestId: Optional[str] + domainName: Optional[str] = None + domainPrefix: Optional[str] = None + extendedRequestId: Optional[str] = None httpMethod: Literal["DELETE", "GET", "HEAD", "OPTIONS", "PATCH", "POST", "PUT"] path: str - connectedAt: Optional[datetime] - connectionId: Optional[str] - eventType: Optional[Literal["CONNECT", "MESSAGE", "DISCONNECT"]] - messageDirection: Optional[str] - messageId: Optional[str] - routeKey: Optional[str] - operationName: Optional[str] - - @root_validator(allow_reuse=True) + connectedAt: Optional[datetime] = None + connectionId: Optional[str] = None + eventType: Optional[Literal["CONNECT", "MESSAGE", "DISCONNECT"]] = None + messageDirection: Optional[str] = None + messageId: Optional[str] = None + routeKey: Optional[str] = None + operationName: Optional[str] = None + + @root_validator(allow_reuse=True, skip_on_failure=True) def check_message_id(cls, values): message_id, event_type = values.get("messageId"), values.get("eventType") if message_id is not None and event_type != "MESSAGE": - raise TypeError("messageId is available only when the `eventType` is `MESSAGE`") + raise ValueError("messageId is available only when the `eventType` is `MESSAGE`") return values + @validator("requestTimeEpoch", pre=True) + def normalize_timestamp(cls, value): + date_utc = datetime.fromtimestamp(int(value) / 1000, tz=timezone.utc) + return date_utc + class APIGatewayProxyEventModel(BaseModel): - version: Optional[str] + version: Optional[str] = None resource: str path: str httpMethod: Literal["DELETE", "GET", "HEAD", "OPTIONS", "PATCH", "POST", "PUT"] headers: Dict[str, str] multiValueHeaders: Dict[str, List[str]] - queryStringParameters: Optional[Dict[str, str]] - multiValueQueryStringParameters: Optional[Dict[str, List[str]]] + queryStringParameters: Optional[Dict[str, str]] = None + multiValueQueryStringParameters: Optional[Dict[str, List[str]]] = None requestContext: APIGatewayEventRequestContext - pathParameters: Optional[Dict[str, str]] - stageVariables: Optional[Dict[str, str]] + pathParameters: Optional[Dict[str, str]] = None + stageVariables: Optional[Dict[str, str]] = None isBase64Encoded: bool - body: Optional[Union[str, Type[BaseModel]]] + body: Optional[Union[str, Type[BaseModel]]] = None diff --git a/aws_lambda_powertools/utilities/parser/models/apigwv2.py b/aws_lambda_powertools/utilities/parser/models/apigwv2.py index cb1f830bb47..83fd465ade6 100644 --- a/aws_lambda_powertools/utilities/parser/models/apigwv2.py +++ b/aws_lambda_powertools/utilities/parser/models/apigwv2.py @@ -1,7 +1,7 @@ -from datetime import datetime +from datetime import datetime, timezone from typing import Any, Dict, List, Optional, Type, Union -from pydantic import BaseModel, Field +from pydantic import BaseModel, Field, validator from pydantic.networks import IPvAnyNetwork from aws_lambda_powertools.utilities.parser.types import Literal @@ -14,13 +14,13 @@ class RequestContextV2AuthorizerIamCognito(BaseModel): class RequestContextV2AuthorizerIam(BaseModel): - accessKey: Optional[str] - accountId: Optional[str] - callerId: Optional[str] - principalOrgId: Optional[str] - userArn: Optional[str] - userId: Optional[str] - cognitoIdentity: Optional[RequestContextV2AuthorizerIamCognito] + accessKey: Optional[str] = None + accountId: Optional[str] = None + callerId: Optional[str] = None + principalOrgId: Optional[str] = None + userArn: Optional[str] = None + userId: Optional[str] = None + cognitoIdentity: Optional[RequestContextV2AuthorizerIamCognito] = None class RequestContextV2AuthorizerJwt(BaseModel): @@ -29,9 +29,9 @@ class RequestContextV2AuthorizerJwt(BaseModel): class RequestContextV2Authorizer(BaseModel): - jwt: Optional[RequestContextV2AuthorizerJwt] - iam: Optional[RequestContextV2AuthorizerIam] - lambda_value: Optional[Dict[str, Any]] = Field(None, alias="lambda") + jwt: Optional[RequestContextV2AuthorizerJwt] = None + iam: Optional[RequestContextV2AuthorizerIam] = None + lambda_value: Union[Dict[str, Any], None] = Field(None, alias="lambda") class RequestContextV2Http(BaseModel): @@ -45,7 +45,7 @@ class RequestContextV2Http(BaseModel): class RequestContextV2(BaseModel): accountId: str apiId: str - authorizer: Optional[RequestContextV2Authorizer] + authorizer: Optional[RequestContextV2Authorizer] = None domainName: str domainPrefix: str requestId: str @@ -55,17 +55,22 @@ class RequestContextV2(BaseModel): timeEpoch: datetime http: RequestContextV2Http + @validator("timeEpoch", pre=True) + def normalize_timestamp(cls, value): + date_utc = datetime.fromtimestamp(int(value) / 1000, tz=timezone.utc) + return date_utc + class APIGatewayProxyEventV2Model(BaseModel): version: str routeKey: str rawPath: str rawQueryString: str - cookies: Optional[List[str]] + cookies: Optional[List[str]] = None headers: Dict[str, str] - queryStringParameters: Optional[Dict[str, str]] - pathParameters: Optional[Dict[str, str]] - stageVariables: Optional[Dict[str, str]] + queryStringParameters: Optional[Dict[str, str]] = None + pathParameters: Optional[Dict[str, str]] = None + stageVariables: Optional[Dict[str, str]] = None requestContext: RequestContextV2 - body: Optional[Union[str, Type[BaseModel]]] + body: Optional[Union[str, Type[BaseModel]]] = None isBase64Encoded: bool diff --git a/aws_lambda_powertools/utilities/parser/models/cloudwatch.py b/aws_lambda_powertools/utilities/parser/models/cloudwatch.py index 71e560276a4..39ae20a0a2f 100644 --- a/aws_lambda_powertools/utilities/parser/models/cloudwatch.py +++ b/aws_lambda_powertools/utilities/parser/models/cloudwatch.py @@ -2,7 +2,7 @@ import json import logging import zlib -from datetime import datetime +from datetime import datetime, timezone from typing import List, Type, Union from pydantic import BaseModel, Field, validator @@ -15,6 +15,11 @@ class CloudWatchLogsLogEvent(BaseModel): timestamp: datetime message: Union[str, Type[BaseModel]] + @validator("timestamp", pre=True) + def coerc_timestamp(cls, value): + date_utc = datetime.fromtimestamp(value / 1000, tz=timezone.utc) + return date_utc + class CloudWatchLogsDecode(BaseModel): messageType: str diff --git a/aws_lambda_powertools/utilities/parser/models/dynamodb.py b/aws_lambda_powertools/utilities/parser/models/dynamodb.py index 7a12bf195d3..679952a7181 100644 --- a/aws_lambda_powertools/utilities/parser/models/dynamodb.py +++ b/aws_lambda_powertools/utilities/parser/models/dynamodb.py @@ -7,10 +7,10 @@ class DynamoDBStreamChangedRecordModel(BaseModel): - ApproximateCreationDateTime: Optional[date] + ApproximateCreationDateTime: Optional[date] = None Keys: Dict[str, Dict[str, Any]] - NewImage: Optional[Union[Dict[str, Any], Type[BaseModel], BaseModel]] - OldImage: Optional[Union[Dict[str, Any], Type[BaseModel], BaseModel]] + NewImage: Optional[Union[Dict[str, Any], Type[BaseModel], BaseModel]] = None + OldImage: Optional[Union[Dict[str, Any], Type[BaseModel], BaseModel]] = None SequenceNumber: str SizeBytes: int StreamViewType: Literal["NEW_AND_OLD_IMAGES", "KEYS_ONLY", "NEW_IMAGE", "OLD_IMAGE"] @@ -40,7 +40,7 @@ class DynamoDBStreamRecordModel(BaseModel): awsRegion: str eventSourceARN: str dynamodb: DynamoDBStreamChangedRecordModel - userIdentity: Optional[UserIdentity] + userIdentity: Optional[UserIdentity] = None class DynamoDBStreamModel(BaseModel): diff --git a/aws_lambda_powertools/utilities/parser/models/event_bridge.py b/aws_lambda_powertools/utilities/parser/models/event_bridge.py index eab6c54d12d..68edc546e2f 100644 --- a/aws_lambda_powertools/utilities/parser/models/event_bridge.py +++ b/aws_lambda_powertools/utilities/parser/models/event_bridge.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import List, Optional +from typing import List, Union from pydantic import BaseModel, Field @@ -16,4 +16,4 @@ class EventBridgeModel(BaseModel): resources: List[str] detail_type: str = Field(None, alias="detail-type") detail: RawDictOrModel - replay_name: Optional[str] = Field(None, alias="replay-name") + replay_name: Union[str, None] = Field(None, alias="replay-name") diff --git a/aws_lambda_powertools/utilities/parser/models/kafka.py b/aws_lambda_powertools/utilities/parser/models/kafka.py index d4c36bf70f1..dba911bf4d1 100644 --- a/aws_lambda_powertools/utilities/parser/models/kafka.py +++ b/aws_lambda_powertools/utilities/parser/models/kafka.py @@ -1,4 +1,4 @@ -from datetime import datetime +from datetime import datetime, timezone from typing import Dict, List, Type, Union from pydantic import BaseModel, validator @@ -20,7 +20,7 @@ class KafkaRecordModel(BaseModel): headers: List[Dict[str, bytes]] # validators - _decode_key = validator("key", allow_reuse=True)(base64_decode) + _decode_key = validator("key", allow_reuse=True)(base64_decode) # type: ignore[type-var] @validator("value", pre=True, allow_reuse=True) def data_base64_decode(cls, value): @@ -34,6 +34,11 @@ def decode_headers_list(cls, value): header[key] = bytes(values) return value + @validator("timestamp", pre=True) + def normalize_timestamp(cls, value): + date_utc = datetime.fromtimestamp(int(value) / 1000, tz=timezone.utc) + return date_utc + class KafkaBaseEventModel(BaseModel): bootstrapServers: List[str] diff --git a/aws_lambda_powertools/utilities/parser/models/kinesis_firehose.py b/aws_lambda_powertools/utilities/parser/models/kinesis_firehose.py index c59d8c680e5..7edc0ba4ebf 100644 --- a/aws_lambda_powertools/utilities/parser/models/kinesis_firehose.py +++ b/aws_lambda_powertools/utilities/parser/models/kinesis_firehose.py @@ -17,7 +17,7 @@ class KinesisFirehoseRecord(BaseModel): data: Union[bytes, Type[BaseModel]] # base64 encoded str is parsed into bytes recordId: str approximateArrivalTimestamp: PositiveInt - kinesisRecordMetadata: Optional[KinesisFirehoseRecordMetadata] + kinesisRecordMetadata: Optional[KinesisFirehoseRecordMetadata] = None @validator("data", pre=True, allow_reuse=True) def data_base64_decode(cls, value): @@ -28,5 +28,5 @@ class KinesisFirehoseModel(BaseModel): invocationId: str deliveryStreamArn: str region: str - sourceKinesisStreamArn: Optional[str] + sourceKinesisStreamArn: Optional[str] = None records: List[KinesisFirehoseRecord] diff --git a/aws_lambda_powertools/utilities/parser/models/kinesis_firehose_sqs.py b/aws_lambda_powertools/utilities/parser/models/kinesis_firehose_sqs.py index b649828853b..58a23e5006c 100644 --- a/aws_lambda_powertools/utilities/parser/models/kinesis_firehose_sqs.py +++ b/aws_lambda_powertools/utilities/parser/models/kinesis_firehose_sqs.py @@ -13,7 +13,7 @@ class KinesisFirehoseSqsRecord(BaseModel): data: SqsRecordModel recordId: str approximateArrivalTimestamp: PositiveInt - kinesisRecordMetadata: Optional[KinesisFirehoseRecordMetadata] + kinesisRecordMetadata: Optional[KinesisFirehoseRecordMetadata] = None @validator("data", pre=True, allow_reuse=True) def data_base64_decode(cls, value): @@ -25,5 +25,5 @@ class KinesisFirehoseSqsModel(BaseModel): invocationId: str deliveryStreamArn: str region: str - sourceKinesisStreamArn: Optional[str] + sourceKinesisStreamArn: Optional[str] = None records: List[KinesisFirehoseSqsRecord] diff --git a/aws_lambda_powertools/utilities/parser/models/s3.py b/aws_lambda_powertools/utilities/parser/models/s3.py index 01573b6d751..29a90c1a63e 100644 --- a/aws_lambda_powertools/utilities/parser/models/s3.py +++ b/aws_lambda_powertools/utilities/parser/models/s3.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import List, Optional +from typing import List, Optional, Union from pydantic import BaseModel, root_validator from pydantic.fields import Field @@ -45,10 +45,10 @@ class S3Bucket(BaseModel): class S3Object(BaseModel): key: str - size: Optional[NonNegativeFloat] - eTag: Optional[str] + size: Optional[NonNegativeFloat] = None + eTag: Optional[str] = None sequencer: str - versionId: Optional[str] + versionId: Optional[str] = None class S3Message(BaseModel): @@ -60,10 +60,10 @@ class S3Message(BaseModel): class S3EventNotificationObjectModel(BaseModel): key: str - size: Optional[NonNegativeFloat] + size: Optional[NonNegativeFloat] = None etag: str version_id: str = Field(None, alias="version-id") - sequencer: Optional[str] + sequencer: Optional[str] = None class S3EventNotificationEventBridgeBucketModel(BaseModel): @@ -77,12 +77,12 @@ class S3EventNotificationEventBridgeDetailModel(BaseModel): request_id: str = Field(None, alias="request-id") requester: str source_ip_address: str = Field(None, alias="source-ip-address") - reason: Optional[str] - deletion_type: Optional[str] = Field(None, alias="deletion-type") - restore_expiry_time: Optional[str] = Field(None, alias="restore-expiry-time") - source_storage_class: Optional[str] = Field(None, alias="source-storage-class") - destination_storage_class: Optional[str] = Field(None, alias="destination-storage-class") - destination_access_tier: Optional[str] = Field(None, alias="destination-access-tier") + reason: Optional[str] = None + deletion_type: Union[str, None] = Field(None, alias="deletion-type") + restore_expiry_time: Union[str, None] = Field(None, alias="restore-expiry-time") + source_storage_class: Union[str, None] = Field(None, alias="source-storage-class") + destination_storage_class: Union[str, None] = Field(None, alias="destination-storage-class") + destination_access_tier: Union[str, None] = Field(None, alias="destination-access-tier") class S3EventNotificationEventBridgeModel(EventBridgeModel): @@ -99,9 +99,9 @@ class S3RecordModel(BaseModel): requestParameters: S3RequestParameters responseElements: S3ResponseElements s3: S3Message - glacierEventData: Optional[S3EventRecordGlacierEventData] + glacierEventData: Optional[S3EventRecordGlacierEventData] = None - @root_validator + @root_validator(allow_reuse=True, skip_on_failure=True) def validate_s3_object(cls, values): event_name = values.get("eventName") s3_object = values.get("s3").object diff --git a/aws_lambda_powertools/utilities/parser/models/s3_object_event.py b/aws_lambda_powertools/utilities/parser/models/s3_object_event.py index ef59e9c2f98..7ef98fe4bb2 100644 --- a/aws_lambda_powertools/utilities/parser/models/s3_object_event.py +++ b/aws_lambda_powertools/utilities/parser/models/s3_object_event.py @@ -22,7 +22,7 @@ class S3ObjectUserRequest(BaseModel): class S3ObjectSessionIssuer(BaseModel): type: str # noqa: A003, VNE003 - userName: Optional[str] + userName: Optional[str] = None principalId: str arn: str accountId: str @@ -42,10 +42,10 @@ class S3ObjectUserIdentity(BaseModel): type: str # noqa003 accountId: str accessKeyId: str - userName: Optional[str] + userName: Optional[str] = None principalId: str arn: str - sessionContext: Optional[S3ObjectSessionContext] + sessionContext: Optional[S3ObjectSessionContext] = None class S3ObjectLambdaEvent(BaseModel): diff --git a/aws_lambda_powertools/utilities/parser/models/ses.py b/aws_lambda_powertools/utilities/parser/models/ses.py index 77b23431099..20a2e7b558e 100644 --- a/aws_lambda_powertools/utilities/parser/models/ses.py +++ b/aws_lambda_powertools/utilities/parser/models/ses.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import List, Optional +from typing import List, Optional, Union from pydantic import BaseModel, Field from pydantic.types import PositiveInt @@ -36,10 +36,10 @@ class SesMailHeaders(BaseModel): class SesMailCommonHeaders(BaseModel): header_from: List[str] = Field(None, alias="from") to: List[str] - cc: Optional[List[str]] - bcc: Optional[List[str]] - sender: Optional[List[str]] - reply_to: Optional[List[str]] = Field(None, alias="reply-to") + cc: Optional[List[str]] = None + bcc: Optional[List[str]] = None + sender: Optional[List[str]] = None + reply_to: Union[List[str], None] = Field(None, alias="reply-to") returnPath: str messageId: str date: str diff --git a/aws_lambda_powertools/utilities/parser/models/sns.py b/aws_lambda_powertools/utilities/parser/models/sns.py index 6cd2fcec006..8f388f2974c 100644 --- a/aws_lambda_powertools/utilities/parser/models/sns.py +++ b/aws_lambda_powertools/utilities/parser/models/sns.py @@ -14,17 +14,17 @@ class SnsMsgAttributeModel(BaseModel): class SnsNotificationModel(BaseModel): - Subject: Optional[str] + Subject: Optional[str] = None TopicArn: str UnsubscribeUrl: HttpUrl Type: Literal["Notification"] - MessageAttributes: Optional[Dict[str, SnsMsgAttributeModel]] + MessageAttributes: Optional[Dict[str, SnsMsgAttributeModel]] = None Message: Union[str, TypingType[BaseModel]] MessageId: str - SigningCertUrl: Optional[HttpUrl] # NOTE: FIFO opt-in removes attribute - Signature: Optional[str] # NOTE: FIFO opt-in removes attribute + SigningCertUrl: Optional[HttpUrl] = None # NOTE: FIFO opt-in removes attribute + Signature: Optional[str] = None # NOTE: FIFO opt-in removes attribute Timestamp: datetime - SignatureVersion: Optional[str] # NOTE: FIFO opt-in removes attribute + SignatureVersion: Optional[str] = None # NOTE: FIFO opt-in removes attribute @root_validator(pre=True, allow_reuse=True) def check_sqs_protocol(cls, values): diff --git a/aws_lambda_powertools/utilities/parser/models/sqs.py b/aws_lambda_powertools/utilities/parser/models/sqs.py index 168707530f3..5ee0da16f19 100644 --- a/aws_lambda_powertools/utilities/parser/models/sqs.py +++ b/aws_lambda_powertools/utilities/parser/models/sqs.py @@ -1,7 +1,7 @@ -from datetime import datetime +from datetime import datetime, timezone from typing import Dict, List, Optional, Sequence, Type, Union -from pydantic import BaseModel +from pydantic import BaseModel, validator from aws_lambda_powertools.utilities.parser.types import Literal @@ -9,17 +9,22 @@ class SqsAttributesModel(BaseModel): ApproximateReceiveCount: str ApproximateFirstReceiveTimestamp: datetime - MessageDeduplicationId: Optional[str] - MessageGroupId: Optional[str] + MessageDeduplicationId: Optional[str] = None + MessageGroupId: Optional[str] = None SenderId: str SentTimestamp: datetime - SequenceNumber: Optional[str] - AWSTraceHeader: Optional[str] + SequenceNumber: Optional[str] = None + AWSTraceHeader: Optional[str] = None + + @validator("ApproximateFirstReceiveTimestamp", "SentTimestamp", pre=True) + def normalize_timestamp(cls, value): + date_utc = datetime.fromtimestamp(int(value) / 1000, tz=timezone.utc) + return date_utc class SqsMsgAttributeModel(BaseModel): - stringValue: Optional[str] - binaryValue: Optional[str] + stringValue: Optional[str] = None + binaryValue: Optional[str] = None stringListValues: List[str] = [] binaryListValues: List[str] = [] dataType: str @@ -56,7 +61,7 @@ class SqsRecordModel(BaseModel): attributes: SqsAttributesModel messageAttributes: Dict[str, SqsMsgAttributeModel] md5OfBody: str - md5OfMessageAttributes: Optional[str] + md5OfMessageAttributes: Optional[str] = None eventSource: Literal["aws:sqs"] eventSourceARN: str awsRegion: str diff --git a/tests/functional/batch/sample_models.py b/tests/functional/batch/sample_models.py index 556ff0ebf8a..996a2a408fe 100644 --- a/tests/functional/batch/sample_models.py +++ b/tests/functional/batch/sample_models.py @@ -39,8 +39,8 @@ def transform_message_to_dict(cls, value: Dict[Literal["S"], str]): class OrderDynamoDBChangeRecord(DynamoDBStreamChangedRecordModel): - NewImage: Optional[OrderDynamoDB] - OldImage: Optional[OrderDynamoDB] + NewImage: Optional[OrderDynamoDB] = None + OldImage: Optional[OrderDynamoDB] = None class OrderDynamoDBRecord(DynamoDBStreamRecordModel): diff --git a/tests/functional/test_utilities_batch.py b/tests/functional/test_utilities_batch.py index 1831ef973d9..e146d65744f 100644 --- a/tests/functional/test_utilities_batch.py +++ b/tests/functional/test_utilities_batch.py @@ -501,8 +501,8 @@ def transform_message_to_dict(cls, value: Dict[Literal["S"], str]): return json.loads(value["S"]) class OrderDynamoDBChangeRecord(DynamoDBStreamChangedRecordModel): - NewImage: Optional[OrderDynamoDB] - OldImage: Optional[OrderDynamoDB] + NewImage: Optional[OrderDynamoDB] = None + OldImage: Optional[OrderDynamoDB] = None class OrderDynamoDBRecord(DynamoDBStreamRecordModel): dynamodb: OrderDynamoDBChangeRecord @@ -545,8 +545,8 @@ def transform_message_to_dict(cls, value: Dict[Literal["S"], str]): return json.loads(value["S"]) class OrderDynamoDBChangeRecord(DynamoDBStreamChangedRecordModel): - NewImage: Optional[OrderDynamoDB] - OldImage: Optional[OrderDynamoDB] + NewImage: Optional[OrderDynamoDB] = None + OldImage: Optional[OrderDynamoDB] = None class OrderDynamoDBRecord(DynamoDBStreamRecordModel): dynamodb: OrderDynamoDBChangeRecord diff --git a/tests/unit/parser/schemas.py b/tests/unit/parser/schemas.py index 1da0213ff45..fd2f29697dc 100644 --- a/tests/unit/parser/schemas.py +++ b/tests/unit/parser/schemas.py @@ -22,8 +22,8 @@ class MyDynamoBusiness(BaseModel): class MyDynamoScheme(DynamoDBStreamChangedRecordModel): - NewImage: Optional[MyDynamoBusiness] - OldImage: Optional[MyDynamoBusiness] + NewImage: Optional[MyDynamoBusiness] = None + OldImage: Optional[MyDynamoBusiness] = None class MyDynamoDBStreamRecordModel(DynamoDBStreamRecordModel): diff --git a/tests/unit/parser/test_apigw.py b/tests/unit/parser/test_apigw.py index a65d181cc54..8a90c4e978c 100644 --- a/tests/unit/parser/test_apigw.py +++ b/tests/unit/parser/test_apigw.py @@ -138,7 +138,7 @@ def test_apigw_event_with_invalid_websocket_request(): errors = err.value.errors() assert len(errors) == 1 expected_msg = "messageId is available only when the `eventType` is `MESSAGE`" - assert errors[0]["msg"] == expected_msg + assert expected_msg in errors[0]["msg"] assert expected_msg in str(err.value) diff --git a/tests/unit/parser/test_cloudwatch.py b/tests/unit/parser/test_cloudwatch.py index bc8bf0776f9..a451fd889e7 100644 --- a/tests/unit/parser/test_cloudwatch.py +++ b/tests/unit/parser/test_cloudwatch.py @@ -86,7 +86,7 @@ def test_handle_invalid_cloudwatch_trigger_event_no_envelope(): with pytest.raises(ValidationError) as context: CloudWatchLogsModel(**raw_event) - assert context.value.errors()[0]["msg"] == "unable to decompress data" + assert "unable to decompress data" in context.value.errors()[0]["msg"] def test_handle_invalid_event_with_envelope(): From de53dd6ac2bb64b64a9f4eacd36a37db07e81897 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Sat, 8 Jul 2023 17:09:21 +0100 Subject: [PATCH 02/21] pydantic v2: comment --- aws_lambda_powertools/utilities/parser/models/apigwv2.py | 1 + 1 file changed, 1 insertion(+) diff --git a/aws_lambda_powertools/utilities/parser/models/apigwv2.py b/aws_lambda_powertools/utilities/parser/models/apigwv2.py index 83fd465ade6..d0cb4c7a161 100644 --- a/aws_lambda_powertools/utilities/parser/models/apigwv2.py +++ b/aws_lambda_powertools/utilities/parser/models/apigwv2.py @@ -55,6 +55,7 @@ class RequestContextV2(BaseModel): timeEpoch: datetime http: RequestContextV2Http + # validator to normalize timestamp @validator("timeEpoch", pre=True) def normalize_timestamp(cls, value): date_utc = datetime.fromtimestamp(int(value) / 1000, tz=timezone.utc) From 6f8c52b308c7b7dc2e189290fefa2be49a4f5467 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Sat, 8 Jul 2023 17:12:11 +0100 Subject: [PATCH 03/21] pydantic v2: new workflow --- .../quality_check_temp_pydanticv1.yml | 78 +++++++++++++++++++ 1 file changed, 78 insertions(+) create mode 100644 .github/workflows/quality_check_temp_pydanticv1.yml diff --git a/.github/workflows/quality_check_temp_pydanticv1.yml b/.github/workflows/quality_check_temp_pydanticv1.yml new file mode 100644 index 00000000000..6c2eeca36ce --- /dev/null +++ b/.github/workflows/quality_check_temp_pydanticv1.yml @@ -0,0 +1,78 @@ +name: Code quality temp - Pydanticv1 + +# PROCESS +# +# 1. Install all dependencies and spin off containers for all supported Python versions +# 2. Run code formatters and linters (various checks) for code standard +# 3. Run static typing checker for potential bugs +# 4. Run entire test suite for regressions except end-to-end (unit, functional, performance) +# 5. Run static analysis (in addition to CodeQL) for common insecure code practices +# 6. Run complexity baseline to avoid error-prone bugs and keep maintenance lower +# 7. Collect and report on test coverage + +# USAGE +# +# Always triggered on new PRs, PR changes and PR merge. + + +on: + pull_request: + paths: + - "aws_lambda_powertools/**" + - "tests/**" + - "pyproject.toml" + - "poetry.lock" + - "mypy.ini" + branches: + - poc/pydanticv2 + push: + paths: + - "aws_lambda_powertools/**" + - "tests/**" + - "pyproject.toml" + - "poetry.lock" + - "mypy.ini" + branches: + - poc/pydanticv2 + +permissions: + contents: read + +jobs: + quality_check: + runs-on: ubuntu-latest + strategy: + max-parallel: 4 + matrix: + python-version: ["3.7", "3.8", "3.9", "3.10"] + env: + PYTHON: "${{ matrix.python-version }}" + permissions: + contents: read # checkout code only + steps: + - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - name: Install poetry + run: pipx install poetry + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # v4.6.1 + with: + python-version: ${{ matrix.python-version }} + cache: "poetry" + - name: Install dependencies + run: make dev + - name: Formatting and Linting + run: make lint + - name: Static type checking + run: make mypy + - name: Test with pytest + run: make test + - name: Security baseline + run: make security-baseline + - name: Complexity baseline + run: make complexity-baseline + - name: Upload coverage to Codecov + uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # 3.1.4 + with: + file: ./coverage.xml + env_vars: PYTHON + name: aws-lambda-powertools-python-codecov From 82b166c080ddd330047b697dd07cafcf437bf4dd Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Sat, 8 Jul 2023 17:16:19 +0100 Subject: [PATCH 04/21] pydantic v2: comment --- aws_lambda_powertools/utilities/parser/models/apigw.py | 1 + 1 file changed, 1 insertion(+) diff --git a/aws_lambda_powertools/utilities/parser/models/apigw.py b/aws_lambda_powertools/utilities/parser/models/apigw.py index fc7f5b61d3f..74735ef4919 100644 --- a/aws_lambda_powertools/utilities/parser/models/apigw.py +++ b/aws_lambda_powertools/utilities/parser/models/apigw.py @@ -77,6 +77,7 @@ def check_message_id(cls, values): raise ValueError("messageId is available only when the `eventType` is `MESSAGE`") return values + # validator to normalize requestTimeEpoch @validator("requestTimeEpoch", pre=True) def normalize_timestamp(cls, value): date_utc = datetime.fromtimestamp(int(value) / 1000, tz=timezone.utc) From 729579813b7f3a5e518fab1b230a02e22f58ac91 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Sat, 8 Jul 2023 19:51:48 +0100 Subject: [PATCH 05/21] pydantic v2: mypy fix --- aws_lambda_powertools/utilities/parser/models/kafka.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aws_lambda_powertools/utilities/parser/models/kafka.py b/aws_lambda_powertools/utilities/parser/models/kafka.py index dba911bf4d1..0f3573008a1 100644 --- a/aws_lambda_powertools/utilities/parser/models/kafka.py +++ b/aws_lambda_powertools/utilities/parser/models/kafka.py @@ -20,7 +20,7 @@ class KafkaRecordModel(BaseModel): headers: List[Dict[str, bytes]] # validators - _decode_key = validator("key", allow_reuse=True)(base64_decode) # type: ignore[type-var] + _decode_key = validator("key", allow_reuse=True)(base64_decode) # type: ignore[type-var, unused-ignore] @validator("value", pre=True, allow_reuse=True) def data_base64_decode(cls, value): From 201d877f0102f8a7e8a54ae31db711a2a94656ff Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Sun, 9 Jul 2023 00:55:10 +0100 Subject: [PATCH 06/21] pydantic v2: fix v2 compability --- aws_lambda_powertools/utilities/batch/base.py | 16 ++++++++++++---- tests/functional/batch/sample_models.py | 5 ++++- 2 files changed, 16 insertions(+), 5 deletions(-) diff --git a/aws_lambda_powertools/utilities/batch/base.py b/aws_lambda_powertools/utilities/batch/base.py index 4ab2c1a2b0b..5bcd8b655d1 100644 --- a/aws_lambda_powertools/utilities/batch/base.py +++ b/aws_lambda_powertools/utilities/batch/base.py @@ -500,8 +500,12 @@ def _process_record(self, record: dict) -> Union[SuccessResponse, FailureRespons # we need to handle that exception differently. # We check for a public attr in validation errors coming from Pydantic exceptions (subclass or not) # and we compare if it's coming from the same model that trigger the exception in the first place - model = getattr(exc, "model", None) - if model == self.model: + + # Pydantic v1 raises a ValidationError with ErrorWrappers and store the model instance in a class variable. + # Pydantic v2 simplifies this by adding a title variable to store the model name directly. + model = getattr(exc, "model", None) or getattr(exc, "title", None) + + if model == self.model or model == getattr(self.model, "__name__", None): return self._register_model_validation_error_record(record) return self.failure_handler(record=data, exception=sys.exc_info()) @@ -644,8 +648,12 @@ async def _async_process_record(self, record: dict) -> Union[SuccessResponse, Fa # we need to handle that exception differently. # We check for a public attr in validation errors coming from Pydantic exceptions (subclass or not) # and we compare if it's coming from the same model that trigger the exception in the first place - model = getattr(exc, "model", None) - if model == self.model: + + # Pydantic v1 raises a ValidationError with ErrorWrappers and store the model instance in a class variable. + # Pydantic v2 simplifies this by adding a title variable to store the model name directly. + model = getattr(exc, "model", None) or getattr(exc, "title", None) + + if model == self.model or model == getattr(self.model, "__name__", None): return self._register_model_validation_error_record(record) return self.failure_handler(record=data, exception=sys.exc_info()) diff --git a/tests/functional/batch/sample_models.py b/tests/functional/batch/sample_models.py index 996a2a408fe..72029e154d5 100644 --- a/tests/functional/batch/sample_models.py +++ b/tests/functional/batch/sample_models.py @@ -35,7 +35,10 @@ class OrderDynamoDB(BaseModel): # so Pydantic can auto-initialize nested Order model @validator("Message", pre=True) def transform_message_to_dict(cls, value: Dict[Literal["S"], str]): - return json.loads(value["S"]) + try: + return json.loads(value["S"]) + except TypeError: + raise ValueError class OrderDynamoDBChangeRecord(DynamoDBStreamChangedRecordModel): From bf6b31aa757ceaffe179e50b89a170f9da59f6ca Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Mon, 10 Jul 2023 13:14:44 +0100 Subject: [PATCH 07/21] pydantic v2: fix last things --- aws_lambda_powertools/shared/functions.py | 30 +++++++++++++ aws_lambda_powertools/utilities/batch/base.py | 11 ++++- .../utilities/parser/models/__init__.py | 42 ++++++++++--------- 3 files changed, 62 insertions(+), 21 deletions(-) diff --git a/aws_lambda_powertools/shared/functions.py b/aws_lambda_powertools/shared/functions.py index 82ea7dad8d8..b56c04702e9 100644 --- a/aws_lambda_powertools/shared/functions.py +++ b/aws_lambda_powertools/shared/functions.py @@ -173,3 +173,33 @@ def extract_event_from_common_models(data: Any) -> Dict | Any: # Is it a Dataclass? If not return as is return dataclasses.asdict(data) if dataclasses.is_dataclass(data) else data + + +def disable_pydantic_v2_warning(): + """ + Disables the Pydantic version 2 warning by filtering out the related warnings. + + This function checks the version of Pydantic currently installed and if it is version 2, + it filters out the PydanticDeprecationWarning and PydanticDeprecatedSince20 warnings + to suppress them. + + Note: This function assumes that Pydantic is already imported. + + Usage: + disable_pydantic_v2_warning() + """ + try: + from pydantic import __version__ + + version = __version__.split(".") + + if int(version[0]) == 2: + import warnings + + from pydantic import PydanticDeprecatedSince20, PydanticDeprecationWarning + + warnings.filterwarnings("ignore", category=PydanticDeprecationWarning) + warnings.filterwarnings("ignore", category=PydanticDeprecatedSince20) + + except ImportError: + pass diff --git a/aws_lambda_powertools/utilities/batch/base.py b/aws_lambda_powertools/utilities/batch/base.py index 5bcd8b655d1..f089855cb1f 100644 --- a/aws_lambda_powertools/utilities/batch/base.py +++ b/aws_lambda_powertools/utilities/batch/base.py @@ -348,6 +348,11 @@ def _to_batch_type(self, record: dict, event_type: EventType) -> EventSourceData def _to_batch_type(self, record: dict, event_type: EventType, model: Optional["BatchTypeModels"] = None): if model is not None: + # If a model is provided, we assume Pydantic is installed and we need to disable v2 warnings + from aws_lambda_powertools.shared.functions import disable_pydantic_v2_warning + + disable_pydantic_v2_warning() + return model.parse_obj(record) return self._DATA_CLASS_MAPPING[event_type](record) @@ -504,8 +509,9 @@ def _process_record(self, record: dict) -> Union[SuccessResponse, FailureRespons # Pydantic v1 raises a ValidationError with ErrorWrappers and store the model instance in a class variable. # Pydantic v2 simplifies this by adding a title variable to store the model name directly. model = getattr(exc, "model", None) or getattr(exc, "title", None) + model_name = getattr(self.model, "__name__", None) - if model == self.model or model == getattr(self.model, "__name__", None): + if model == self.model or model == model_name: return self._register_model_validation_error_record(record) return self.failure_handler(record=data, exception=sys.exc_info()) @@ -652,8 +658,9 @@ async def _async_process_record(self, record: dict) -> Union[SuccessResponse, Fa # Pydantic v1 raises a ValidationError with ErrorWrappers and store the model instance in a class variable. # Pydantic v2 simplifies this by adding a title variable to store the model name directly. model = getattr(exc, "model", None) or getattr(exc, "title", None) + model_name = getattr(self.model, "__name__", None) - if model == self.model or model == getattr(self.model, "__name__", None): + if model == self.model or model == model_name: return self._register_model_validation_error_record(record) return self.failure_handler(record=data, exception=sys.exc_info()) diff --git a/aws_lambda_powertools/utilities/parser/models/__init__.py b/aws_lambda_powertools/utilities/parser/models/__init__.py index ddc76dc7819..a01429511b7 100644 --- a/aws_lambda_powertools/utilities/parser/models/__init__.py +++ b/aws_lambda_powertools/utilities/parser/models/__init__.py @@ -1,11 +1,15 @@ -from .alb import AlbModel, AlbRequestContext, AlbRequestContextData -from .apigw import ( +from aws_lambda_powertools.shared.functions import disable_pydantic_v2_warning + +disable_pydantic_v2_warning() + +from .alb import AlbModel, AlbRequestContext, AlbRequestContextData # noqa: E402 +from .apigw import ( # noqa: E402 APIGatewayEventAuthorizer, APIGatewayEventIdentity, APIGatewayEventRequestContext, APIGatewayProxyEventModel, ) -from .apigwv2 import ( +from .apigwv2 import ( # noqa: E402 APIGatewayProxyEventV2Model, RequestContextV2, RequestContextV2Authorizer, @@ -14,54 +18,54 @@ RequestContextV2AuthorizerJwt, RequestContextV2Http, ) -from .cloudformation_custom_resource import ( +from .cloudformation_custom_resource import ( # noqa: E402 CloudFormationCustomResourceBaseModel, CloudFormationCustomResourceCreateModel, CloudFormationCustomResourceDeleteModel, CloudFormationCustomResourceUpdateModel, ) -from .cloudwatch import ( +from .cloudwatch import ( # noqa: E402 CloudWatchLogsData, CloudWatchLogsDecode, CloudWatchLogsLogEvent, CloudWatchLogsModel, ) -from .dynamodb import ( +from .dynamodb import ( # noqa: E402 DynamoDBStreamChangedRecordModel, DynamoDBStreamModel, DynamoDBStreamRecordModel, ) -from .event_bridge import EventBridgeModel -from .kafka import ( +from .event_bridge import EventBridgeModel # noqa: E402 +from .kafka import ( # noqa: E402 KafkaBaseEventModel, KafkaMskEventModel, KafkaRecordModel, KafkaSelfManagedEventModel, ) -from .kinesis import ( +from .kinesis import ( # noqa: E402 KinesisDataStreamModel, KinesisDataStreamRecord, KinesisDataStreamRecordPayload, ) -from .kinesis_firehose import ( +from .kinesis_firehose import ( # noqa: E402 KinesisFirehoseModel, KinesisFirehoseRecord, KinesisFirehoseRecordMetadata, ) -from .kinesis_firehose_sqs import KinesisFirehoseSqsModel, KinesisFirehoseSqsRecord -from .lambda_function_url import LambdaFunctionUrlModel -from .s3 import ( +from .kinesis_firehose_sqs import KinesisFirehoseSqsModel, KinesisFirehoseSqsRecord # noqa: E402 +from .lambda_function_url import LambdaFunctionUrlModel # noqa: E402 +from .s3 import ( # noqa: E402 S3EventNotificationEventBridgeDetailModel, S3EventNotificationEventBridgeModel, S3EventNotificationObjectModel, S3Model, S3RecordModel, ) -from .s3_event_notification import ( +from .s3_event_notification import ( # noqa: E402 S3SqsEventNotificationModel, S3SqsEventNotificationRecordModel, ) -from .s3_object_event import ( +from .s3_object_event import ( # noqa: E402 S3ObjectConfiguration, S3ObjectContext, S3ObjectLambdaEvent, @@ -71,7 +75,7 @@ S3ObjectUserIdentity, S3ObjectUserRequest, ) -from .ses import ( +from .ses import ( # noqa: E402 SesMail, SesMailCommonHeaders, SesMailHeaders, @@ -82,9 +86,9 @@ SesReceiptVerdict, SesRecordModel, ) -from .sns import SnsModel, SnsNotificationModel, SnsRecordModel -from .sqs import SqsAttributesModel, SqsModel, SqsMsgAttributeModel, SqsRecordModel -from .vpc_lattice import VpcLatticeModel +from .sns import SnsModel, SnsNotificationModel, SnsRecordModel # noqa: E402 +from .sqs import SqsAttributesModel, SqsModel, SqsMsgAttributeModel, SqsRecordModel # noqa: E402 +from .vpc_lattice import VpcLatticeModel # noqa: E402 __all__ = [ "APIGatewayProxyEventV2Model", From ef98e888f4d173aed763a3493500c4bc4f2386a6 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Mon, 10 Jul 2023 14:45:59 +0100 Subject: [PATCH 08/21] pydantic v2: improving comments --- aws_lambda_powertools/utilities/parser/models/apigw.py | 4 +++- aws_lambda_powertools/utilities/parser/models/apigwv2.py | 4 +++- aws_lambda_powertools/utilities/parser/models/cloudwatch.py | 3 +++ aws_lambda_powertools/utilities/parser/models/kafka.py | 5 ++++- aws_lambda_powertools/utilities/parser/models/sqs.py | 3 +++ 5 files changed, 16 insertions(+), 3 deletions(-) diff --git a/aws_lambda_powertools/utilities/parser/models/apigw.py b/aws_lambda_powertools/utilities/parser/models/apigw.py index 74735ef4919..762c24e10ef 100644 --- a/aws_lambda_powertools/utilities/parser/models/apigw.py +++ b/aws_lambda_powertools/utilities/parser/models/apigw.py @@ -77,7 +77,9 @@ def check_message_id(cls, values): raise ValueError("messageId is available only when the `eventType` is `MESSAGE`") return values - # validator to normalize requestTimeEpoch + # Validator to normalize the requestTimeEpoch field + # Converts the provided timestamp value to a UTC datetime object + # See: https://github.com/pydantic/pydantic/issues/6518 @validator("requestTimeEpoch", pre=True) def normalize_timestamp(cls, value): date_utc = datetime.fromtimestamp(int(value) / 1000, tz=timezone.utc) diff --git a/aws_lambda_powertools/utilities/parser/models/apigwv2.py b/aws_lambda_powertools/utilities/parser/models/apigwv2.py index d0cb4c7a161..d7a03143a32 100644 --- a/aws_lambda_powertools/utilities/parser/models/apigwv2.py +++ b/aws_lambda_powertools/utilities/parser/models/apigwv2.py @@ -55,7 +55,9 @@ class RequestContextV2(BaseModel): timeEpoch: datetime http: RequestContextV2Http - # validator to normalize timestamp + # Validator to normalize the timeEpoch field + # Converts the provided timestamp value to a UTC datetime object + # See: https://github.com/pydantic/pydantic/issues/6518 @validator("timeEpoch", pre=True) def normalize_timestamp(cls, value): date_utc = datetime.fromtimestamp(int(value) / 1000, tz=timezone.utc) diff --git a/aws_lambda_powertools/utilities/parser/models/cloudwatch.py b/aws_lambda_powertools/utilities/parser/models/cloudwatch.py index 39ae20a0a2f..2cbcf067605 100644 --- a/aws_lambda_powertools/utilities/parser/models/cloudwatch.py +++ b/aws_lambda_powertools/utilities/parser/models/cloudwatch.py @@ -15,6 +15,9 @@ class CloudWatchLogsLogEvent(BaseModel): timestamp: datetime message: Union[str, Type[BaseModel]] + # Validator to normalize the timestamp field + # Converts the provided timestamp value to a UTC datetime object + # See: https://github.com/pydantic/pydantic/issues/6518 @validator("timestamp", pre=True) def coerc_timestamp(cls, value): date_utc = datetime.fromtimestamp(value / 1000, tz=timezone.utc) diff --git a/aws_lambda_powertools/utilities/parser/models/kafka.py b/aws_lambda_powertools/utilities/parser/models/kafka.py index 0f3573008a1..46098ee28ab 100644 --- a/aws_lambda_powertools/utilities/parser/models/kafka.py +++ b/aws_lambda_powertools/utilities/parser/models/kafka.py @@ -19,7 +19,7 @@ class KafkaRecordModel(BaseModel): value: Union[str, Type[BaseModel]] headers: List[Dict[str, bytes]] - # validators + # Added type ignore to keep compatibility between Pydantic v1 and v2 _decode_key = validator("key", allow_reuse=True)(base64_decode) # type: ignore[type-var, unused-ignore] @validator("value", pre=True, allow_reuse=True) @@ -34,6 +34,9 @@ def decode_headers_list(cls, value): header[key] = bytes(values) return value + # Validator to normalize the timestamp field + # Converts the provided timestamp value to a UTC datetime object + # See: https://github.com/pydantic/pydantic/issues/6518 @validator("timestamp", pre=True) def normalize_timestamp(cls, value): date_utc = datetime.fromtimestamp(int(value) / 1000, tz=timezone.utc) diff --git a/aws_lambda_powertools/utilities/parser/models/sqs.py b/aws_lambda_powertools/utilities/parser/models/sqs.py index 5ee0da16f19..b299866ee97 100644 --- a/aws_lambda_powertools/utilities/parser/models/sqs.py +++ b/aws_lambda_powertools/utilities/parser/models/sqs.py @@ -16,6 +16,9 @@ class SqsAttributesModel(BaseModel): SequenceNumber: Optional[str] = None AWSTraceHeader: Optional[str] = None + # Validator to normalize the ApproximateFirstReceiveTimestamp and SentTimestamp fields + # Converts the provided timestamp value to a UTC datetime object + # See: https://github.com/pydantic/pydantic/issues/6518 @validator("ApproximateFirstReceiveTimestamp", "SentTimestamp", pre=True) def normalize_timestamp(cls, value): date_utc = datetime.fromtimestamp(int(value) / 1000, tz=timezone.utc) From f39ea895a3d936d1e05dc71711a6a34631ab021c Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Mon, 10 Jul 2023 16:44:52 +0100 Subject: [PATCH 09/21] pydantic v2: addressing Heitor's feedback --- aws_lambda_powertools/shared/functions.py | 30 -------------- aws_lambda_powertools/utilities/batch/base.py | 2 +- .../utilities/parser/compat.py | 28 +++++++++++++ .../utilities/parser/models/__init__.py | 40 +++++++++---------- .../utilities/parser/models/apigw.py | 2 +- .../utilities/parser/models/apigwv2.py | 2 +- .../utilities/parser/models/cloudwatch.py | 2 +- .../utilities/parser/models/kafka.py | 2 +- .../utilities/parser/models/sqs.py | 2 +- ruff.toml | 1 + tests/unit/parser/test_apigw.py | 2 + tests/unit/parser/test_cloudwatch.py | 2 + 12 files changed, 59 insertions(+), 56 deletions(-) create mode 100644 aws_lambda_powertools/utilities/parser/compat.py diff --git a/aws_lambda_powertools/shared/functions.py b/aws_lambda_powertools/shared/functions.py index b56c04702e9..82ea7dad8d8 100644 --- a/aws_lambda_powertools/shared/functions.py +++ b/aws_lambda_powertools/shared/functions.py @@ -173,33 +173,3 @@ def extract_event_from_common_models(data: Any) -> Dict | Any: # Is it a Dataclass? If not return as is return dataclasses.asdict(data) if dataclasses.is_dataclass(data) else data - - -def disable_pydantic_v2_warning(): - """ - Disables the Pydantic version 2 warning by filtering out the related warnings. - - This function checks the version of Pydantic currently installed and if it is version 2, - it filters out the PydanticDeprecationWarning and PydanticDeprecatedSince20 warnings - to suppress them. - - Note: This function assumes that Pydantic is already imported. - - Usage: - disable_pydantic_v2_warning() - """ - try: - from pydantic import __version__ - - version = __version__.split(".") - - if int(version[0]) == 2: - import warnings - - from pydantic import PydanticDeprecatedSince20, PydanticDeprecationWarning - - warnings.filterwarnings("ignore", category=PydanticDeprecationWarning) - warnings.filterwarnings("ignore", category=PydanticDeprecatedSince20) - - except ImportError: - pass diff --git a/aws_lambda_powertools/utilities/batch/base.py b/aws_lambda_powertools/utilities/batch/base.py index f089855cb1f..b00b31449f2 100644 --- a/aws_lambda_powertools/utilities/batch/base.py +++ b/aws_lambda_powertools/utilities/batch/base.py @@ -349,7 +349,7 @@ def _to_batch_type(self, record: dict, event_type: EventType) -> EventSourceData def _to_batch_type(self, record: dict, event_type: EventType, model: Optional["BatchTypeModels"] = None): if model is not None: # If a model is provided, we assume Pydantic is installed and we need to disable v2 warnings - from aws_lambda_powertools.shared.functions import disable_pydantic_v2_warning + from aws_lambda_powertools.utilities.parser.compat import disable_pydantic_v2_warning disable_pydantic_v2_warning() diff --git a/aws_lambda_powertools/utilities/parser/compat.py b/aws_lambda_powertools/utilities/parser/compat.py new file mode 100644 index 00000000000..a791d4042d0 --- /dev/null +++ b/aws_lambda_powertools/utilities/parser/compat.py @@ -0,0 +1,28 @@ +def disable_pydantic_v2_warning(): + """ + Disables the Pydantic version 2 warning by filtering out the related warnings. + + This function checks the version of Pydantic currently installed and if it is version 2, + it filters out the PydanticDeprecationWarning and PydanticDeprecatedSince20 warnings + to suppress them. + + Note: This function assumes that Pydantic is already imported. + + Usage: + disable_pydantic_v2_warning() + """ + try: + from pydantic import __version__ + + version = __version__.split(".") + + if int(version[0]) == 2: + import warnings + + from pydantic import PydanticDeprecatedSince20, PydanticDeprecationWarning + + warnings.filterwarnings("ignore", category=PydanticDeprecationWarning) + warnings.filterwarnings("ignore", category=PydanticDeprecatedSince20) + + except ImportError: + pass diff --git a/aws_lambda_powertools/utilities/parser/models/__init__.py b/aws_lambda_powertools/utilities/parser/models/__init__.py index a01429511b7..f1b2d30d9cf 100644 --- a/aws_lambda_powertools/utilities/parser/models/__init__.py +++ b/aws_lambda_powertools/utilities/parser/models/__init__.py @@ -1,15 +1,15 @@ -from aws_lambda_powertools.shared.functions import disable_pydantic_v2_warning +from aws_lambda_powertools.utilities.parser.compat import disable_pydantic_v2_warning disable_pydantic_v2_warning() -from .alb import AlbModel, AlbRequestContext, AlbRequestContextData # noqa: E402 -from .apigw import ( # noqa: E402 +from .alb import AlbModel, AlbRequestContext, AlbRequestContextData +from .apigw import ( APIGatewayEventAuthorizer, APIGatewayEventIdentity, APIGatewayEventRequestContext, APIGatewayProxyEventModel, ) -from .apigwv2 import ( # noqa: E402 +from .apigwv2 import ( APIGatewayProxyEventV2Model, RequestContextV2, RequestContextV2Authorizer, @@ -18,54 +18,54 @@ RequestContextV2AuthorizerJwt, RequestContextV2Http, ) -from .cloudformation_custom_resource import ( # noqa: E402 +from .cloudformation_custom_resource import ( CloudFormationCustomResourceBaseModel, CloudFormationCustomResourceCreateModel, CloudFormationCustomResourceDeleteModel, CloudFormationCustomResourceUpdateModel, ) -from .cloudwatch import ( # noqa: E402 +from .cloudwatch import ( CloudWatchLogsData, CloudWatchLogsDecode, CloudWatchLogsLogEvent, CloudWatchLogsModel, ) -from .dynamodb import ( # noqa: E402 +from .dynamodb import ( DynamoDBStreamChangedRecordModel, DynamoDBStreamModel, DynamoDBStreamRecordModel, ) -from .event_bridge import EventBridgeModel # noqa: E402 -from .kafka import ( # noqa: E402 +from .event_bridge import EventBridgeModel +from .kafka import ( KafkaBaseEventModel, KafkaMskEventModel, KafkaRecordModel, KafkaSelfManagedEventModel, ) -from .kinesis import ( # noqa: E402 +from .kinesis import ( KinesisDataStreamModel, KinesisDataStreamRecord, KinesisDataStreamRecordPayload, ) -from .kinesis_firehose import ( # noqa: E402 +from .kinesis_firehose import ( KinesisFirehoseModel, KinesisFirehoseRecord, KinesisFirehoseRecordMetadata, ) -from .kinesis_firehose_sqs import KinesisFirehoseSqsModel, KinesisFirehoseSqsRecord # noqa: E402 -from .lambda_function_url import LambdaFunctionUrlModel # noqa: E402 -from .s3 import ( # noqa: E402 +from .kinesis_firehose_sqs import KinesisFirehoseSqsModel, KinesisFirehoseSqsRecord +from .lambda_function_url import LambdaFunctionUrlModel +from .s3 import ( S3EventNotificationEventBridgeDetailModel, S3EventNotificationEventBridgeModel, S3EventNotificationObjectModel, S3Model, S3RecordModel, ) -from .s3_event_notification import ( # noqa: E402 +from .s3_event_notification import ( S3SqsEventNotificationModel, S3SqsEventNotificationRecordModel, ) -from .s3_object_event import ( # noqa: E402 +from .s3_object_event import ( S3ObjectConfiguration, S3ObjectContext, S3ObjectLambdaEvent, @@ -75,7 +75,7 @@ S3ObjectUserIdentity, S3ObjectUserRequest, ) -from .ses import ( # noqa: E402 +from .ses import ( SesMail, SesMailCommonHeaders, SesMailHeaders, @@ -86,9 +86,9 @@ SesReceiptVerdict, SesRecordModel, ) -from .sns import SnsModel, SnsNotificationModel, SnsRecordModel # noqa: E402 -from .sqs import SqsAttributesModel, SqsModel, SqsMsgAttributeModel, SqsRecordModel # noqa: E402 -from .vpc_lattice import VpcLatticeModel # noqa: E402 +from .sns import SnsModel, SnsNotificationModel, SnsRecordModel +from .sqs import SqsAttributesModel, SqsModel, SqsMsgAttributeModel, SqsRecordModel +from .vpc_lattice import VpcLatticeModel __all__ = [ "APIGatewayProxyEventV2Model", diff --git a/aws_lambda_powertools/utilities/parser/models/apigw.py b/aws_lambda_powertools/utilities/parser/models/apigw.py index 762c24e10ef..4246b45efbc 100644 --- a/aws_lambda_powertools/utilities/parser/models/apigw.py +++ b/aws_lambda_powertools/utilities/parser/models/apigw.py @@ -81,7 +81,7 @@ def check_message_id(cls, values): # Converts the provided timestamp value to a UTC datetime object # See: https://github.com/pydantic/pydantic/issues/6518 @validator("requestTimeEpoch", pre=True) - def normalize_timestamp(cls, value): + def coerce_timestamp(cls, value): date_utc = datetime.fromtimestamp(int(value) / 1000, tz=timezone.utc) return date_utc diff --git a/aws_lambda_powertools/utilities/parser/models/apigwv2.py b/aws_lambda_powertools/utilities/parser/models/apigwv2.py index d7a03143a32..529fb51035e 100644 --- a/aws_lambda_powertools/utilities/parser/models/apigwv2.py +++ b/aws_lambda_powertools/utilities/parser/models/apigwv2.py @@ -59,7 +59,7 @@ class RequestContextV2(BaseModel): # Converts the provided timestamp value to a UTC datetime object # See: https://github.com/pydantic/pydantic/issues/6518 @validator("timeEpoch", pre=True) - def normalize_timestamp(cls, value): + def coerce_timestamp(cls, value): date_utc = datetime.fromtimestamp(int(value) / 1000, tz=timezone.utc) return date_utc diff --git a/aws_lambda_powertools/utilities/parser/models/cloudwatch.py b/aws_lambda_powertools/utilities/parser/models/cloudwatch.py index 2cbcf067605..fb6e24ab34e 100644 --- a/aws_lambda_powertools/utilities/parser/models/cloudwatch.py +++ b/aws_lambda_powertools/utilities/parser/models/cloudwatch.py @@ -19,7 +19,7 @@ class CloudWatchLogsLogEvent(BaseModel): # Converts the provided timestamp value to a UTC datetime object # See: https://github.com/pydantic/pydantic/issues/6518 @validator("timestamp", pre=True) - def coerc_timestamp(cls, value): + def coerce_timestamp(cls, value): date_utc = datetime.fromtimestamp(value / 1000, tz=timezone.utc) return date_utc diff --git a/aws_lambda_powertools/utilities/parser/models/kafka.py b/aws_lambda_powertools/utilities/parser/models/kafka.py index 46098ee28ab..2bfe30ff5d7 100644 --- a/aws_lambda_powertools/utilities/parser/models/kafka.py +++ b/aws_lambda_powertools/utilities/parser/models/kafka.py @@ -38,7 +38,7 @@ def decode_headers_list(cls, value): # Converts the provided timestamp value to a UTC datetime object # See: https://github.com/pydantic/pydantic/issues/6518 @validator("timestamp", pre=True) - def normalize_timestamp(cls, value): + def coerce_timestamp(cls, value): date_utc = datetime.fromtimestamp(int(value) / 1000, tz=timezone.utc) return date_utc diff --git a/aws_lambda_powertools/utilities/parser/models/sqs.py b/aws_lambda_powertools/utilities/parser/models/sqs.py index b299866ee97..e1e025d34f1 100644 --- a/aws_lambda_powertools/utilities/parser/models/sqs.py +++ b/aws_lambda_powertools/utilities/parser/models/sqs.py @@ -20,7 +20,7 @@ class SqsAttributesModel(BaseModel): # Converts the provided timestamp value to a UTC datetime object # See: https://github.com/pydantic/pydantic/issues/6518 @validator("ApproximateFirstReceiveTimestamp", "SentTimestamp", pre=True) - def normalize_timestamp(cls, value): + def coerce_timestamp(cls, value): date_utc = datetime.fromtimestamp(int(value) / 1000, tz=timezone.utc) return date_utc diff --git a/ruff.toml b/ruff.toml index f3a50abc720..424040ede1f 100644 --- a/ruff.toml +++ b/ruff.toml @@ -68,3 +68,4 @@ split-on-trailing-comma = true "tests/e2e/utils/data_builder/__init__.py" = ["F401"] "tests/e2e/utils/data_fetcher/__init__.py" = ["F401"] "aws_lambda_powertools/utilities/data_classes/s3_event.py" = ["A003"] +"aws_lambda_powertools/utilities/parser/models/__init__.py" = ["E402"] diff --git a/tests/unit/parser/test_apigw.py b/tests/unit/parser/test_apigw.py index 8a90c4e978c..b2ed294ff7a 100644 --- a/tests/unit/parser/test_apigw.py +++ b/tests/unit/parser/test_apigw.py @@ -138,6 +138,8 @@ def test_apigw_event_with_invalid_websocket_request(): errors = err.value.errors() assert len(errors) == 1 expected_msg = "messageId is available only when the `eventType` is `MESSAGE`" + # Pydantic v2 adds "Value error," to the error string. + # So to maintain compatibility with v1 and v2, we've changed the way we test this. assert expected_msg in errors[0]["msg"] assert expected_msg in str(err.value) diff --git a/tests/unit/parser/test_cloudwatch.py b/tests/unit/parser/test_cloudwatch.py index a451fd889e7..48d296c40ef 100644 --- a/tests/unit/parser/test_cloudwatch.py +++ b/tests/unit/parser/test_cloudwatch.py @@ -86,6 +86,8 @@ def test_handle_invalid_cloudwatch_trigger_event_no_envelope(): with pytest.raises(ValidationError) as context: CloudWatchLogsModel(**raw_event) + # Pydantic v2 adds "Value error," to the error string. + # So to maintain compatibility with v1 and v2, we've changed the way we test this. assert "unable to decompress data" in context.value.errors()[0]["msg"] From 15fab061cca6f053dec67660576af2e694cdf3b6 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Mon, 10 Jul 2023 17:21:18 +0100 Subject: [PATCH 10/21] pydantic v2: creating pydantic v2 specific test --- .../quality_check_temp_pydanticv2.yml | 2 +- tests/functional/parser/test_parser.py | 28 +++++++++++++++++++ 2 files changed, 29 insertions(+), 1 deletion(-) diff --git a/.github/workflows/quality_check_temp_pydanticv2.yml b/.github/workflows/quality_check_temp_pydanticv2.yml index 4c8e03c23c7..e8ff1e7d074 100644 --- a/.github/workflows/quality_check_temp_pydanticv2.yml +++ b/.github/workflows/quality_check_temp_pydanticv2.yml @@ -58,7 +58,7 @@ jobs: with: python-version: ${{ matrix.python-version }} cache: "poetry" - - name: Removing cfn-lint + - name: Removing dev dependencies locked to Pydantic v1 run: poetry remove cfn-lint - name: Replacing Pydantic v1 with v2 run: poetry add "pydantic>=2.0" diff --git a/tests/functional/parser/test_parser.py b/tests/functional/parser/test_parser.py index c439134071c..2c7ebf53da3 100644 --- a/tests/functional/parser/test_parser.py +++ b/tests/functional/parser/test_parser.py @@ -1,6 +1,7 @@ import json from typing import Dict, Union +import pydantic import pytest from aws_lambda_powertools.utilities.parser import ( @@ -53,6 +54,33 @@ def handle_no_envelope(event: Dict, _: LambdaContext): handle_no_envelope(dummy_event["payload"], LambdaContext()) +def test_pydanticv2_validation(): + from pydantic import __version__ + + version = __version__.split(".") + + # GIVEN pydantic v2 version + if int(version[0]) == 2: + + class FakeModel(pydantic.BaseModel): + region: str + event_name: str + version: int + + # WHEN using the validator for v2 + @pydantic.field_validator("version", mode="before") + def validate_field(cls, value): + return int(value) + + event_raw = {"region": "us-east-1", "event_name": "aws-powertools", "version": "10"} + event_parsed = FakeModel(**event_raw) + + # THEN parse the event as expected + assert event_parsed.region == event_raw["region"] + assert event_parsed.event_name == event_raw["event_name"] + assert event_parsed.version == int(event_raw["version"]) + + @pytest.mark.parametrize("invalid_schema", [None, str, bool(), [], (), object]) def test_parser_with_invalid_schema_type(dummy_event, invalid_schema): @event_parser(model=invalid_schema) From 6f30f08184f069774c366def1e4b1cb80bc46397 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Tue, 11 Jul 2023 11:36:31 +0100 Subject: [PATCH 11/21] pydantic v2: using fixture to clean the code --- tests/functional/parser/conftest.py | 9 ++++++ tests/functional/parser/test_parser.py | 42 +++++++++++--------------- 2 files changed, 27 insertions(+), 24 deletions(-) diff --git a/tests/functional/parser/conftest.py b/tests/functional/parser/conftest.py index 34199a322b2..41347bc5fa9 100644 --- a/tests/functional/parser/conftest.py +++ b/tests/functional/parser/conftest.py @@ -6,6 +6,15 @@ from aws_lambda_powertools.utilities.parser import BaseEnvelope +@pytest.fixture +def pydanticv2_only(): + from pydantic import __version__ + + version = __version__.split(".") + if version[0] != "2": + pytest.skip("pydanticv2 test only") + + @pytest.fixture def dummy_event(): return {"payload": {"message": "hello world"}} diff --git a/tests/functional/parser/test_parser.py b/tests/functional/parser/test_parser.py index 2c7ebf53da3..1f948655917 100644 --- a/tests/functional/parser/test_parser.py +++ b/tests/functional/parser/test_parser.py @@ -54,31 +54,25 @@ def handle_no_envelope(event: Dict, _: LambdaContext): handle_no_envelope(dummy_event["payload"], LambdaContext()) +@pytest.mark.usefixtures("pydanticv2_only") def test_pydanticv2_validation(): - from pydantic import __version__ - - version = __version__.split(".") - - # GIVEN pydantic v2 version - if int(version[0]) == 2: - - class FakeModel(pydantic.BaseModel): - region: str - event_name: str - version: int - - # WHEN using the validator for v2 - @pydantic.field_validator("version", mode="before") - def validate_field(cls, value): - return int(value) - - event_raw = {"region": "us-east-1", "event_name": "aws-powertools", "version": "10"} - event_parsed = FakeModel(**event_raw) - - # THEN parse the event as expected - assert event_parsed.region == event_raw["region"] - assert event_parsed.event_name == event_raw["event_name"] - assert event_parsed.version == int(event_raw["version"]) + class FakeModel(pydantic.BaseModel): + region: str + event_name: str + version: int + + # WHEN using the validator for v2 + @pydantic.field_validator("version", mode="before") + def validate_field(cls, value): + return int(value) + + event_raw = {"region": "us-east-1", "event_name": "aws-powertools", "version": "10"} + event_parsed = FakeModel(**event_raw) + + # THEN parse the event as expected + assert event_parsed.region == event_raw["region"] + assert event_parsed.event_name == event_raw["event_name"] + assert event_parsed.version == int(event_raw["version"]) @pytest.mark.parametrize("invalid_schema", [None, str, bool(), [], (), object]) From 3d5c9b2399e83a747a063e05ccd1ca612f4db1fa Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Wed, 12 Jul 2023 10:33:19 +0100 Subject: [PATCH 12/21] pydanticv2: reverting Optional fields --- .../utilities/parser/models/apigwv2.py | 2 +- .../utilities/parser/models/event_bridge.py | 4 ++-- aws_lambda_powertools/utilities/parser/models/s3.py | 12 ++++++------ aws_lambda_powertools/utilities/parser/models/ses.py | 4 ++-- 4 files changed, 11 insertions(+), 11 deletions(-) diff --git a/aws_lambda_powertools/utilities/parser/models/apigwv2.py b/aws_lambda_powertools/utilities/parser/models/apigwv2.py index 529fb51035e..d08ac86fda3 100644 --- a/aws_lambda_powertools/utilities/parser/models/apigwv2.py +++ b/aws_lambda_powertools/utilities/parser/models/apigwv2.py @@ -31,7 +31,7 @@ class RequestContextV2AuthorizerJwt(BaseModel): class RequestContextV2Authorizer(BaseModel): jwt: Optional[RequestContextV2AuthorizerJwt] = None iam: Optional[RequestContextV2AuthorizerIam] = None - lambda_value: Union[Dict[str, Any], None] = Field(None, alias="lambda") + lambda_value: Optional[Dict[str, Any]] = Field(None, alias="lambda") class RequestContextV2Http(BaseModel): diff --git a/aws_lambda_powertools/utilities/parser/models/event_bridge.py b/aws_lambda_powertools/utilities/parser/models/event_bridge.py index 68edc546e2f..eab6c54d12d 100644 --- a/aws_lambda_powertools/utilities/parser/models/event_bridge.py +++ b/aws_lambda_powertools/utilities/parser/models/event_bridge.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import List, Union +from typing import List, Optional from pydantic import BaseModel, Field @@ -16,4 +16,4 @@ class EventBridgeModel(BaseModel): resources: List[str] detail_type: str = Field(None, alias="detail-type") detail: RawDictOrModel - replay_name: Union[str, None] = Field(None, alias="replay-name") + replay_name: Optional[str] = Field(None, alias="replay-name") diff --git a/aws_lambda_powertools/utilities/parser/models/s3.py b/aws_lambda_powertools/utilities/parser/models/s3.py index 29a90c1a63e..db6c41d30f3 100644 --- a/aws_lambda_powertools/utilities/parser/models/s3.py +++ b/aws_lambda_powertools/utilities/parser/models/s3.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import List, Optional, Union +from typing import List, Optional from pydantic import BaseModel, root_validator from pydantic.fields import Field @@ -78,11 +78,11 @@ class S3EventNotificationEventBridgeDetailModel(BaseModel): requester: str source_ip_address: str = Field(None, alias="source-ip-address") reason: Optional[str] = None - deletion_type: Union[str, None] = Field(None, alias="deletion-type") - restore_expiry_time: Union[str, None] = Field(None, alias="restore-expiry-time") - source_storage_class: Union[str, None] = Field(None, alias="source-storage-class") - destination_storage_class: Union[str, None] = Field(None, alias="destination-storage-class") - destination_access_tier: Union[str, None] = Field(None, alias="destination-access-tier") + deletion_type: Optional[str] = Field(None, alias="deletion-type") + restore_expiry_time: Optional[str] = Field(None, alias="restore-expiry-time") + source_storage_class: Optional[str] = Field(None, alias="source-storage-class") + destination_storage_class: Optional[str] = Field(None, alias="destination-storage-class") + destination_access_tier: Optional[str] = Field(None, alias="destination-access-tier") class S3EventNotificationEventBridgeModel(EventBridgeModel): diff --git a/aws_lambda_powertools/utilities/parser/models/ses.py b/aws_lambda_powertools/utilities/parser/models/ses.py index 20a2e7b558e..2e9e93f368e 100644 --- a/aws_lambda_powertools/utilities/parser/models/ses.py +++ b/aws_lambda_powertools/utilities/parser/models/ses.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import List, Optional, Union +from typing import List, Optional from pydantic import BaseModel, Field from pydantic.types import PositiveInt @@ -39,7 +39,7 @@ class SesMailCommonHeaders(BaseModel): cc: Optional[List[str]] = None bcc: Optional[List[str]] = None sender: Optional[List[str]] = None - reply_to: Union[List[str], None] = Field(None, alias="reply-to") + reply_to: Optional[List[str]] = Field(None, alias="reply-to") returnPath: str messageId: str date: str From ce15df0e88089e411632f9f8864745a76766e645 Mon Sep 17 00:00:00 2001 From: Cavalcante Damascena Date: Mon, 17 Jul 2023 10:37:20 +0200 Subject: [PATCH 13/21] Removing the validators. Pydantic bug was fixed Signed-off-by: Cavalcante Damascena --- .github/workflows/quality_check_temp_pydanticv2.yml | 4 ++-- .../utilities/parser/models/apigw.py | 12 ++---------- .../utilities/parser/models/apigwv2.py | 12 ++---------- .../utilities/parser/models/cloudwatch.py | 10 +--------- .../utilities/parser/models/kafka.py | 10 +--------- aws_lambda_powertools/utilities/parser/models/sqs.py | 12 ++---------- 6 files changed, 10 insertions(+), 50 deletions(-) diff --git a/.github/workflows/quality_check_temp_pydanticv2.yml b/.github/workflows/quality_check_temp_pydanticv2.yml index e8ff1e7d074..3b6cbfde302 100644 --- a/.github/workflows/quality_check_temp_pydanticv2.yml +++ b/.github/workflows/quality_check_temp_pydanticv2.yml @@ -60,8 +60,8 @@ jobs: cache: "poetry" - name: Removing dev dependencies locked to Pydantic v1 run: poetry remove cfn-lint - - name: Replacing Pydantic v1 with v2 - run: poetry add "pydantic>=2.0" + - name: Replacing Pydantic v1 with v2 > 2.0.3 + run: poetry add "pydantic=^2.0.3" - name: Install dependencies run: make dev - name: Formatting and Linting diff --git a/aws_lambda_powertools/utilities/parser/models/apigw.py b/aws_lambda_powertools/utilities/parser/models/apigw.py index 4246b45efbc..c17b094d0c0 100644 --- a/aws_lambda_powertools/utilities/parser/models/apigw.py +++ b/aws_lambda_powertools/utilities/parser/models/apigw.py @@ -1,7 +1,7 @@ -from datetime import datetime, timezone +from datetime import datetime from typing import Any, Dict, List, Optional, Type, Union -from pydantic import BaseModel, root_validator, validator +from pydantic import BaseModel, root_validator from pydantic.networks import IPvAnyNetwork from aws_lambda_powertools.utilities.parser.types import Literal @@ -77,14 +77,6 @@ def check_message_id(cls, values): raise ValueError("messageId is available only when the `eventType` is `MESSAGE`") return values - # Validator to normalize the requestTimeEpoch field - # Converts the provided timestamp value to a UTC datetime object - # See: https://github.com/pydantic/pydantic/issues/6518 - @validator("requestTimeEpoch", pre=True) - def coerce_timestamp(cls, value): - date_utc = datetime.fromtimestamp(int(value) / 1000, tz=timezone.utc) - return date_utc - class APIGatewayProxyEventModel(BaseModel): version: Optional[str] = None diff --git a/aws_lambda_powertools/utilities/parser/models/apigwv2.py b/aws_lambda_powertools/utilities/parser/models/apigwv2.py index d08ac86fda3..3be793dd951 100644 --- a/aws_lambda_powertools/utilities/parser/models/apigwv2.py +++ b/aws_lambda_powertools/utilities/parser/models/apigwv2.py @@ -1,7 +1,7 @@ -from datetime import datetime, timezone +from datetime import datetime from typing import Any, Dict, List, Optional, Type, Union -from pydantic import BaseModel, Field, validator +from pydantic import BaseModel, Field from pydantic.networks import IPvAnyNetwork from aws_lambda_powertools.utilities.parser.types import Literal @@ -55,14 +55,6 @@ class RequestContextV2(BaseModel): timeEpoch: datetime http: RequestContextV2Http - # Validator to normalize the timeEpoch field - # Converts the provided timestamp value to a UTC datetime object - # See: https://github.com/pydantic/pydantic/issues/6518 - @validator("timeEpoch", pre=True) - def coerce_timestamp(cls, value): - date_utc = datetime.fromtimestamp(int(value) / 1000, tz=timezone.utc) - return date_utc - class APIGatewayProxyEventV2Model(BaseModel): version: str diff --git a/aws_lambda_powertools/utilities/parser/models/cloudwatch.py b/aws_lambda_powertools/utilities/parser/models/cloudwatch.py index fb6e24ab34e..71e560276a4 100644 --- a/aws_lambda_powertools/utilities/parser/models/cloudwatch.py +++ b/aws_lambda_powertools/utilities/parser/models/cloudwatch.py @@ -2,7 +2,7 @@ import json import logging import zlib -from datetime import datetime, timezone +from datetime import datetime from typing import List, Type, Union from pydantic import BaseModel, Field, validator @@ -15,14 +15,6 @@ class CloudWatchLogsLogEvent(BaseModel): timestamp: datetime message: Union[str, Type[BaseModel]] - # Validator to normalize the timestamp field - # Converts the provided timestamp value to a UTC datetime object - # See: https://github.com/pydantic/pydantic/issues/6518 - @validator("timestamp", pre=True) - def coerce_timestamp(cls, value): - date_utc = datetime.fromtimestamp(value / 1000, tz=timezone.utc) - return date_utc - class CloudWatchLogsDecode(BaseModel): messageType: str diff --git a/aws_lambda_powertools/utilities/parser/models/kafka.py b/aws_lambda_powertools/utilities/parser/models/kafka.py index 2bfe30ff5d7..1d9d8114e65 100644 --- a/aws_lambda_powertools/utilities/parser/models/kafka.py +++ b/aws_lambda_powertools/utilities/parser/models/kafka.py @@ -1,4 +1,4 @@ -from datetime import datetime, timezone +from datetime import datetime from typing import Dict, List, Type, Union from pydantic import BaseModel, validator @@ -34,14 +34,6 @@ def decode_headers_list(cls, value): header[key] = bytes(values) return value - # Validator to normalize the timestamp field - # Converts the provided timestamp value to a UTC datetime object - # See: https://github.com/pydantic/pydantic/issues/6518 - @validator("timestamp", pre=True) - def coerce_timestamp(cls, value): - date_utc = datetime.fromtimestamp(int(value) / 1000, tz=timezone.utc) - return date_utc - class KafkaBaseEventModel(BaseModel): bootstrapServers: List[str] diff --git a/aws_lambda_powertools/utilities/parser/models/sqs.py b/aws_lambda_powertools/utilities/parser/models/sqs.py index e1e025d34f1..63ea4b76e0e 100644 --- a/aws_lambda_powertools/utilities/parser/models/sqs.py +++ b/aws_lambda_powertools/utilities/parser/models/sqs.py @@ -1,7 +1,7 @@ -from datetime import datetime, timezone +from datetime import datetime from typing import Dict, List, Optional, Sequence, Type, Union -from pydantic import BaseModel, validator +from pydantic import BaseModel from aws_lambda_powertools.utilities.parser.types import Literal @@ -16,14 +16,6 @@ class SqsAttributesModel(BaseModel): SequenceNumber: Optional[str] = None AWSTraceHeader: Optional[str] = None - # Validator to normalize the ApproximateFirstReceiveTimestamp and SentTimestamp fields - # Converts the provided timestamp value to a UTC datetime object - # See: https://github.com/pydantic/pydantic/issues/6518 - @validator("ApproximateFirstReceiveTimestamp", "SentTimestamp", pre=True) - def coerce_timestamp(cls, value): - date_utc = datetime.fromtimestamp(int(value) / 1000, tz=timezone.utc) - return date_utc - class SqsMsgAttributeModel(BaseModel): stringValue: Optional[str] = None From f73a2221fc33a5ed95853c3ff5b6517f237d0d13 Mon Sep 17 00:00:00 2001 From: Cavalcante Damascena Date: Mon, 17 Jul 2023 16:04:56 +0200 Subject: [PATCH 14/21] Adding pytest ignore messages for Pydantic v2 Signed-off-by: Cavalcante Damascena --- pyproject.toml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 82af9d1915a..5bc3abd502c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -161,6 +161,15 @@ markers = [ "perf: marks perf tests to be deselected (deselect with '-m \"not perf\"')", ] +# Disable Pydantic v2 warnings +filterwarnings=[ + "ignore:.*The `parse_obj` method is deprecated*:DeprecationWarning", + "ignore:.*The `parse_raw` method is deprecated*:DeprecationWarning", + "ignore:.*load_str_bytes is deprecated*:DeprecationWarning", + "ignore:.*The `dict` method is deprecated; use `model_dump` instead*:DeprecationWarning", + "ignore:.*Pydantic V1 style `@validator` validators are deprecated*:DeprecationWarning" +] + [build-system] requires = ["poetry-core>=1.3.2"] build-backend = "poetry.core.masonry.api" From 1774a1c09b8c1443e91d28905ec96b955869865f Mon Sep 17 00:00:00 2001 From: Cavalcante Damascena Date: Mon, 17 Jul 2023 16:48:55 +0200 Subject: [PATCH 15/21] Adding pytest ignore messages for Pydantic v2 Signed-off-by: Cavalcante Damascena --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 5bc3abd502c..82cd52d5a04 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -161,7 +161,7 @@ markers = [ "perf: marks perf tests to be deselected (deselect with '-m \"not perf\"')", ] -# Disable Pydantic v2 warnings +# MAINTENANCE: Remove these lines when drop support to Pydantic v1 filterwarnings=[ "ignore:.*The `parse_obj` method is deprecated*:DeprecationWarning", "ignore:.*The `parse_raw` method is deprecated*:DeprecationWarning", From 3a5d26f695463b7846ae86f3426b87e6f551a487 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Thu, 20 Jul 2023 22:47:45 +0100 Subject: [PATCH 16/21] pydanticv2: removing duplicated workflow + disabling warning --- ...ticv2.yml => quality_check_pydanticv2.yml} | 2 +- .../quality_check_temp_pydanticv1.yml | 78 ------------------- .../utilities/parser/envelopes/base.py | 3 + .../utilities/parser/parser.py | 2 + 4 files changed, 6 insertions(+), 79 deletions(-) rename .github/workflows/{quality_check_temp_pydanticv2.yml => quality_check_pydanticv2.yml} (98%) delete mode 100644 .github/workflows/quality_check_temp_pydanticv1.yml diff --git a/.github/workflows/quality_check_temp_pydanticv2.yml b/.github/workflows/quality_check_pydanticv2.yml similarity index 98% rename from .github/workflows/quality_check_temp_pydanticv2.yml rename to .github/workflows/quality_check_pydanticv2.yml index 3b6cbfde302..b0e651518c5 100644 --- a/.github/workflows/quality_check_temp_pydanticv2.yml +++ b/.github/workflows/quality_check_pydanticv2.yml @@ -1,4 +1,4 @@ -name: Code quality temp - Pydanticv2 +name: Code quality - Pydanticv2 # PROCESS # diff --git a/.github/workflows/quality_check_temp_pydanticv1.yml b/.github/workflows/quality_check_temp_pydanticv1.yml deleted file mode 100644 index 6c2eeca36ce..00000000000 --- a/.github/workflows/quality_check_temp_pydanticv1.yml +++ /dev/null @@ -1,78 +0,0 @@ -name: Code quality temp - Pydanticv1 - -# PROCESS -# -# 1. Install all dependencies and spin off containers for all supported Python versions -# 2. Run code formatters and linters (various checks) for code standard -# 3. Run static typing checker for potential bugs -# 4. Run entire test suite for regressions except end-to-end (unit, functional, performance) -# 5. Run static analysis (in addition to CodeQL) for common insecure code practices -# 6. Run complexity baseline to avoid error-prone bugs and keep maintenance lower -# 7. Collect and report on test coverage - -# USAGE -# -# Always triggered on new PRs, PR changes and PR merge. - - -on: - pull_request: - paths: - - "aws_lambda_powertools/**" - - "tests/**" - - "pyproject.toml" - - "poetry.lock" - - "mypy.ini" - branches: - - poc/pydanticv2 - push: - paths: - - "aws_lambda_powertools/**" - - "tests/**" - - "pyproject.toml" - - "poetry.lock" - - "mypy.ini" - branches: - - poc/pydanticv2 - -permissions: - contents: read - -jobs: - quality_check: - runs-on: ubuntu-latest - strategy: - max-parallel: 4 - matrix: - python-version: ["3.7", "3.8", "3.9", "3.10"] - env: - PYTHON: "${{ matrix.python-version }}" - permissions: - contents: read # checkout code only - steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 - - name: Install poetry - run: pipx install poetry - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # v4.6.1 - with: - python-version: ${{ matrix.python-version }} - cache: "poetry" - - name: Install dependencies - run: make dev - - name: Formatting and Linting - run: make lint - - name: Static type checking - run: make mypy - - name: Test with pytest - run: make test - - name: Security baseline - run: make security-baseline - - name: Complexity baseline - run: make complexity-baseline - - name: Upload coverage to Codecov - uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # 3.1.4 - with: - file: ./coverage.xml - env_vars: PYTHON - name: aws-lambda-powertools-python-codecov diff --git a/aws_lambda_powertools/utilities/parser/envelopes/base.py b/aws_lambda_powertools/utilities/parser/envelopes/base.py index 85486fdd876..101e157ef69 100644 --- a/aws_lambda_powertools/utilities/parser/envelopes/base.py +++ b/aws_lambda_powertools/utilities/parser/envelopes/base.py @@ -2,6 +2,7 @@ from abc import ABC, abstractmethod from typing import Any, Dict, Optional, Type, TypeVar, Union +from aws_lambda_powertools.utilities.parser.compat import disable_pydantic_v2_warning from aws_lambda_powertools.utilities.parser.types import Model logger = logging.getLogger(__name__) @@ -26,6 +27,8 @@ def _parse(data: Optional[Union[Dict[str, Any], Any]], model: Type[Model]) -> Un Any Parsed data """ + disable_pydantic_v2_warning() + if data is None: logger.debug("Skipping parsing as event is None") return data diff --git a/aws_lambda_powertools/utilities/parser/parser.py b/aws_lambda_powertools/utilities/parser/parser.py index eeaa5612fff..7e2d69e429c 100644 --- a/aws_lambda_powertools/utilities/parser/parser.py +++ b/aws_lambda_powertools/utilities/parser/parser.py @@ -1,6 +1,7 @@ import logging from typing import Any, Callable, Dict, Optional, Type, overload +from aws_lambda_powertools.utilities.parser.compat import disable_pydantic_v2_warning from aws_lambda_powertools.utilities.parser.types import EventParserReturnType, Model from ...middleware_factory import lambda_handler_decorator @@ -156,6 +157,7 @@ def handler(event: Order, context: LambdaContext): raise InvalidEnvelopeError(f"Envelope must implement BaseEnvelope, envelope={envelope}") try: + disable_pydantic_v2_warning() logger.debug("Parsing and validating event model; no envelope used") if isinstance(event, str): return model.parse_raw(event) From 53e4e980ab926e6fee531b223352d951ec59d0dd Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Fri, 21 Jul 2023 09:00:45 +0100 Subject: [PATCH 17/21] pydanticv2: adding documentation --- docs/utilities/parser.md | 22 +++++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/docs/utilities/parser.md b/docs/utilities/parser.md index f482dcb0410..8074811731d 100644 --- a/docs/utilities/parser.md +++ b/docs/utilities/parser.md @@ -11,11 +11,16 @@ This utility provides data parsing and deep validation using [Pydantic](https:// * Defines data in pure Python classes, then parse, validate and extract only what you want * Built-in envelopes to unwrap, extend, and validate popular event sources payloads * Enforces type hints at runtime with user-friendly errors +* Support for Pydantic v1 and v2 ## Getting started ### Install +PowerTools for AWS Lambda (Python) supports Pydantic v1 and v2. See how to use each version in following sections. + +#### Using Pydantic v1 + !!! info "This is not necessary if you're installing Powertools for AWS Lambda (Python) via [Lambda Layer/SAR](../index.md#lambda-layer){target="_blank"}" Add `aws-lambda-powertools[parser]` as a dependency in your preferred tool: _e.g._, _requirements.txt_, _pyproject.toml_. This will ensure you have the required dependencies before using Parser. @@ -28,6 +33,17 @@ Add `aws-lambda-powertools[parser]` as a dependency in your preferred tool: _e.g Pip example: `SKIP_CYTHON=1 pip install --no-binary pydantic aws-lambda-powertools[parser]` +#### Using Pydantic v2 + +???+ info + Pydantic v2.0.3 or later is required due to regression. + + Please note that an early version of Pydantic v2 experienced a regression issue with `datetime` fields. To avoid any problems, it is crucial to use Pydantic v2 version 2.0.3 or a more recent release. + +To use Powertools for AWS Lambda (Python) with Pydantic v2, you need to bring Pydantic v2 as an external dependency. + +Add `aws-lambda-powertools` and `pydantic>=2.0.3` as a dependency in your preferred tool: _e.g._, _requirements.txt_, _pyproject.toml_. This will ensure you have the required dependencies before using Parser. + ### Defining models You can define models to parse incoming events by inheriting from `BaseModel`. @@ -45,7 +61,7 @@ class Order(BaseModel): id: int description: str items: List[OrderItem] # nesting models are supported - optional_field: Optional[str] # this field may or may not be available when parsing + optional_field: Optional[str] = None # this field may or may not be available when parsing ``` These are simply Python classes that inherit from BaseModel. **Parser** enforces type hints declared in your model at runtime. @@ -79,7 +95,7 @@ class Order(BaseModel): id: int description: str items: List[OrderItem] # nesting models are supported - optional_field: Optional[str] # this field may or may not be available when parsing + optional_field: Optional[str] = None # this field may or may not be available when parsing @event_parser(model=Order) @@ -124,7 +140,7 @@ class Order(BaseModel): id: int description: str items: List[OrderItem] # nesting models are supported - optional_field: Optional[str] # this field may or may not be available when parsing + optional_field: Optional[str] = None # this field may or may not be available when parsing payload = { From 49561b22f0fcd138219c0eed8acb0e8226f38166 Mon Sep 17 00:00:00 2001 From: Cavalcante Damascena Date: Fri, 21 Jul 2023 12:34:58 +0100 Subject: [PATCH 18/21] Adding cache to disable pydantic warnings Signed-off-by: Cavalcante Damascena --- aws_lambda_powertools/utilities/parser/compat.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/aws_lambda_powertools/utilities/parser/compat.py b/aws_lambda_powertools/utilities/parser/compat.py index a791d4042d0..c73098421b1 100644 --- a/aws_lambda_powertools/utilities/parser/compat.py +++ b/aws_lambda_powertools/utilities/parser/compat.py @@ -1,3 +1,7 @@ +import functools + + +@functools.lru_cache(maxsize=None) def disable_pydantic_v2_warning(): """ Disables the Pydantic version 2 warning by filtering out the related warnings. @@ -6,7 +10,9 @@ def disable_pydantic_v2_warning(): it filters out the PydanticDeprecationWarning and PydanticDeprecatedSince20 warnings to suppress them. - Note: This function assumes that Pydantic is already imported. + Since we only need to run the code once, we are using lru_cache to improve performance. + + Note: This function assumes that Pydantic is installed. Usage: disable_pydantic_v2_warning() From eef0dc1e2d90271195f21081bfe66c36beedeb19 Mon Sep 17 00:00:00 2001 From: Cavalcante Damascena Date: Fri, 21 Jul 2023 12:45:48 +0100 Subject: [PATCH 19/21] Adjusting workflow Signed-off-by: Cavalcante Damascena --- .github/workflows/quality_check_pydanticv2.yml | 4 ++-- docs/utilities/parser.md | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/quality_check_pydanticv2.yml b/.github/workflows/quality_check_pydanticv2.yml index b0e651518c5..a0cae217de3 100644 --- a/.github/workflows/quality_check_pydanticv2.yml +++ b/.github/workflows/quality_check_pydanticv2.yml @@ -24,7 +24,7 @@ on: - "poetry.lock" - "mypy.ini" branches: - - poc/pydanticv2 + - develop push: paths: - "aws_lambda_powertools/**" @@ -33,7 +33,7 @@ on: - "poetry.lock" - "mypy.ini" branches: - - poc/pydanticv2 + - develop permissions: contents: read diff --git a/docs/utilities/parser.md b/docs/utilities/parser.md index 8074811731d..0a5780d8ecd 100644 --- a/docs/utilities/parser.md +++ b/docs/utilities/parser.md @@ -17,7 +17,7 @@ This utility provides data parsing and deep validation using [Pydantic](https:// ### Install -PowerTools for AWS Lambda (Python) supports Pydantic v1 and v2. See how to use each version in following sections. +PowerTools for AWS Lambda (Python) supports Pydantic v1 and v2. See how to use each version in the following sections. #### Using Pydantic v1 From f8470f5e1682a02272fcc854e8c873fa7a9ba817 Mon Sep 17 00:00:00 2001 From: Cavalcante Damascena Date: Fri, 21 Jul 2023 13:04:51 +0100 Subject: [PATCH 20/21] Addressing Heitor's feedback Signed-off-by: Cavalcante Damascena --- docs/utilities/parser.md | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/docs/utilities/parser.md b/docs/utilities/parser.md index 0a5780d8ecd..d98835a8381 100644 --- a/docs/utilities/parser.md +++ b/docs/utilities/parser.md @@ -17,13 +17,13 @@ This utility provides data parsing and deep validation using [Pydantic](https:// ### Install -PowerTools for AWS Lambda (Python) supports Pydantic v1 and v2. See how to use each version in the following sections. +Powertools for AWS Lambda (Python) supports Pydantic v1 and v2. Each Pydantic version requires different dependencies before you can use Parser. #### Using Pydantic v1 !!! info "This is not necessary if you're installing Powertools for AWS Lambda (Python) via [Lambda Layer/SAR](../index.md#lambda-layer){target="_blank"}" -Add `aws-lambda-powertools[parser]` as a dependency in your preferred tool: _e.g._, _requirements.txt_, _pyproject.toml_. This will ensure you have the required dependencies before using Parser. +Add `aws-lambda-powertools[parser]` as a dependency in your preferred tool: _e.g._, _requirements.txt_, _pyproject.toml_. ???+ warning This will increase the compressed package size by >10MB due to the Pydantic dependency. @@ -35,14 +35,9 @@ Add `aws-lambda-powertools[parser]` as a dependency in your preferred tool: _e.g #### Using Pydantic v2 -???+ info - Pydantic v2.0.3 or later is required due to regression. - - Please note that an early version of Pydantic v2 experienced a regression issue with `datetime` fields. To avoid any problems, it is crucial to use Pydantic v2 version 2.0.3 or a more recent release. - -To use Powertools for AWS Lambda (Python) with Pydantic v2, you need to bring Pydantic v2 as an external dependency. +You need to bring Pydantic v2.0.3 or later as an external dependency. Note that [we suppress Pydantic v2 deprecation warnings](https://github.com/aws-powertools/powertools-lambda-python/issues/2672){target="_blank"} to reduce noise and optimize log costs. -Add `aws-lambda-powertools` and `pydantic>=2.0.3` as a dependency in your preferred tool: _e.g._, _requirements.txt_, _pyproject.toml_. This will ensure you have the required dependencies before using Parser. +Add `aws-lambda-powertools` and `pydantic>=2.0.3` as a dependency in your preferred tool: _e.g._, _requirements.txt_, _pyproject.toml_. ### Defining models From fa298d203fa056f084826689ea80d619b93b893c Mon Sep 17 00:00:00 2001 From: Cavalcante Damascena Date: Fri, 21 Jul 2023 13:07:01 +0100 Subject: [PATCH 21/21] Removed codecov upload Signed-off-by: Cavalcante Damascena --- .github/workflows/quality_check_pydanticv2.yml | 6 ------ 1 file changed, 6 deletions(-) diff --git a/.github/workflows/quality_check_pydanticv2.yml b/.github/workflows/quality_check_pydanticv2.yml index a0cae217de3..435ee5df868 100644 --- a/.github/workflows/quality_check_pydanticv2.yml +++ b/.github/workflows/quality_check_pydanticv2.yml @@ -74,9 +74,3 @@ jobs: run: make security-baseline - name: Complexity baseline run: make complexity-baseline - - name: Upload coverage to Codecov - uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # 3.1.4 - with: - file: ./coverage.xml - env_vars: PYTHON - name: aws-lambda-powertools-python-codecov