Skip to content
This repository was archived by the owner on Mar 23, 2026. It is now read-only.

Commit 4be950c

Browse files
authored
ASF: handle error serialization for Query-compatible services (#13172)
1 parent 26647b8 commit 4be950c

File tree

8 files changed

+424
-92
lines changed

8 files changed

+424
-92
lines changed

localstack-core/localstack/aws/protocol/serializer.py

Lines changed: 47 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -614,6 +614,15 @@ def _add_md5_header(self, response: Response):
614614
def _get_error_message(self, error: Exception) -> str | None:
615615
return str(error) if error is not None and str(error) != "None" else None
616616

617+
def _add_query_compatible_error_header(self, response: Response, error: ServiceException):
618+
"""
619+
Add an `x-amzn-query-error` header for client to translate errors codes from former `query` services
620+
into other protocols.
621+
"""
622+
623+
sender_fault = "Sender" if error.sender_fault else "Receiver"
624+
response.headers["x-amzn-query-error"] = f"{error.code};{sender_fault}"
625+
617626

618627
class BaseXMLResponseSerializer(ResponseSerializer):
619628
"""
@@ -1254,8 +1263,20 @@ def _serialize_error(
12541263
# TODO implement different service-specific serializer configurations
12551264
# - currently we set both, the `__type` member as well as the `X-Amzn-Errortype` header
12561265
# - the specification defines that it's either the __type field OR the header
1257-
response.headers["X-Amzn-Errortype"] = error.code
1258-
body["__type"] = error.code
1266+
# this depends on the JSON protocol version as well. If json-1.0 the Error should be the full shape ID, like
1267+
# com.amazon.coral.service#ExceptionName
1268+
# if json-1.1, it should only be the name
1269+
1270+
# if the operation is query compatible, we need to add to use shape name
1271+
# when we create `CommonServiceException` and they don't exist in the spec, we give already give the error name
1272+
# as the exception code.
1273+
if shape and operation_model.service_model.is_query_compatible:
1274+
code = shape.name
1275+
else:
1276+
code = error.code
1277+
1278+
response.headers["X-Amzn-Errortype"] = code
1279+
body["__type"] = code
12591280

12601281
if shape:
12611282
remaining_params = {}
@@ -1280,6 +1301,9 @@ def _serialize_error(
12801301
else:
12811302
response.set_json(body)
12821303

1304+
if operation_model.service_model.is_query_compatible:
1305+
self._add_query_compatible_error_header(response, error)
1306+
12831307
def _serialize_response(
12841308
self,
12851309
parameters: dict,
@@ -1858,10 +1882,17 @@ def _serialize_error(
18581882
) -> None:
18591883
body = bytearray()
18601884
response.content_type = mime_type # can only be 'application/cbor'
1861-
# TODO: the Botocore parser is able to look at the `x-amzn-query-error` header for the RpcV2 CBOR protocol
1862-
# we'll need to investigate which services need it
1885+
18631886
# Responses for the rpcv2Cbor protocol SHOULD NOT contain the X-Amzn-ErrorType header.
1864-
# Type information is always serialized in the payload. This is different than `json` protocol
1887+
# Type information is always serialized in the payload. This is different from the `json` protocol
1888+
1889+
# if the operation is query compatible, we need to add to use shape name
1890+
# when we create `CommonServiceException` and they don't exist in the spec, we give already give the error name
1891+
# as the exception code.
1892+
if shape and operation_model.service_model.is_query_compatible:
1893+
code = shape.name
1894+
else:
1895+
code = error.code
18651896

18661897
if shape:
18671898
# FIXME: we need to manually add the `__type` field to the shape as it is not part of the specs
@@ -1872,7 +1903,7 @@ def _serialize_error(
18721903
shape_copy.members["__type"] = StringShape(
18731904
shape_name="__type", shape_model={"type": "string"}
18741905
)
1875-
remaining_params = {"__type": error.code}
1906+
remaining_params = {"__type": code}
18761907

18771908
for member_name in shape_copy.members:
18781909
if hasattr(error, member_name):
@@ -1887,6 +1918,9 @@ def _serialize_error(
18871918

18881919
response.set_response(bytes(body))
18891920

1921+
if operation_model.service_model.is_query_compatible:
1922+
self._add_query_compatible_error_header(response, error)
1923+
18901924
def _prepare_additional_traits_in_response(
18911925
self, response: Response, operation_model: OperationModel, request_id: str
18921926
):
@@ -2191,33 +2225,20 @@ class SqsJsonResponseSerializer(JSONResponseSerializer):
21912225
"QueueNameExists": "QueueAlreadyExists",
21922226
}
21932227

2194-
def _serialize_error(
2195-
self,
2196-
error: ServiceException,
2197-
response: Response,
2198-
shape: StructureShape,
2199-
operation_model: OperationModel,
2200-
mime_type: str,
2201-
request_id: str,
2202-
) -> None:
2203-
"""
2204-
Overrides _serialize_error as SQS has a special header for query API legacy reason: 'x-amzn-query-error',
2205-
which contained the exception code as well as a Sender field.
2206-
Ex: 'x-amzn-query-error': 'InvalidParameterValue;Sender'
2207-
"""
2208-
# TODO: for body["__type"] = error.code, it seems AWS differs from what we send for SQS
2209-
# AWS: "com.amazon.coral.service#InvalidParameterValueException"
2210-
# or AWS: "com.amazonaws.sqs#BatchRequestTooLong"
2211-
# LocalStack: "InvalidParameterValue"
2212-
super()._serialize_error(error, response, shape, operation_model, mime_type, request_id)
2213-
# We need to add a prefix to certain errors, as they have been deleted in the specs. These will not change
2228+
# TODO: on body error serialization (body["__type"]),it seems AWS differs from what we send for SQS
2229+
# AWS: "com.amazon.coral.service#InvalidParameterValueException"
2230+
# or AWS: "com.amazonaws.sqs#BatchRequestTooLong"
2231+
# LocalStack: "InvalidParameterValue"
2232+
2233+
def _add_query_compatible_error_header(self, response: Response, error: ServiceException):
22142234
if error.code in self.JSON_TO_QUERY_ERROR_CODES:
22152235
code = self.JSON_TO_QUERY_ERROR_CODES[error.code]
22162236
elif error.code in self.QUERY_PREFIXED_ERRORS:
22172237
code = f"AWS.SimpleQueueService.{error.code}"
22182238
else:
22192239
code = error.code
22202240

2241+
# SQS exceptions all have sender fault set to False, so we hardcode it to `Sender`
22212242
response.headers["x-amzn-query-error"] = f"{code};Sender"
22222243

22232244

localstack-core/localstack/services/cloudwatch/provider_v2.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -738,7 +738,7 @@ def get_metric_statistics(
738738
for i, timestamp in enumerate(timestamps):
739739
stat_datapoints.setdefault(selected_unit, {})
740740
stat_datapoints[selected_unit].setdefault(timestamp, {})
741-
stat_datapoints[selected_unit][timestamp][stat] = values[i]
741+
stat_datapoints[selected_unit][timestamp][stat] = float(values[i])
742742
stat_datapoints[selected_unit][timestamp]["Unit"] = selected_unit
743743

744744
datapoints: list[Datapoint] = []
Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
import os
2+
from typing import TYPE_CHECKING
3+
4+
import pytest
5+
from botocore.parsers import create_parser
6+
from botocore.serialize import create_serializer
7+
8+
from localstack.aws.spec import load_service
9+
from localstack.testing.aws.util import is_aws_cloud
10+
11+
if TYPE_CHECKING:
12+
from mypy_boto3_cloudwatch import CloudWatchClient
13+
14+
15+
def is_old_provider():
16+
return os.environ.get("PROVIDER_OVERRIDE_CLOUDWATCH") == "v1" and not is_aws_cloud()
17+
18+
19+
@pytest.fixture(params=["query", "json", "smithy-rpc-v2-cbor"])
20+
def aws_cloudwatch_client(aws_client, monkeypatch, request) -> "CloudWatchClient":
21+
protocol = request.param
22+
if is_old_provider() and protocol in ("json", "smithy-rpc-v2-cbor"):
23+
pytest.skip(f"Protocol '{protocol}' not supported in Moto")
24+
"""
25+
Currently, there are no way to select which protocol to use when creating a Boto3 client for a service that supports
26+
multiple protocols, like CloudWatch.
27+
To avoid mutating clients by patching the client initialization logic, we can hardcode the parser and serializer
28+
used by the client instead.
29+
"""
30+
# TODO: remove once Botocore countains the new CloudWatch spec
31+
# for now, we need to also patch the botocore client to be sure it contains the updated service model via the
32+
# json patch
33+
service_model = load_service("cloudwatch")
34+
35+
# instantiate a client via our ExternalAwsClientFactory exposed via `aws_client` fixture
36+
cloudwatch_client_wrapper = aws_client.cloudwatch
37+
# this instance above is the `MetadataRequestInjector`, which wraps the actual client
38+
cloudwatch_client = cloudwatch_client_wrapper._client
39+
40+
# the default client behavior is to include validation
41+
protocol_serializer = create_serializer(protocol)
42+
protocol_parser = create_parser(protocol)
43+
44+
monkeypatch.setattr(cloudwatch_client.meta, "_service_model", service_model)
45+
monkeypatch.setattr(cloudwatch_client, "_serializer", protocol_serializer)
46+
monkeypatch.setattr(cloudwatch_client, "_response_parser", protocol_parser)
47+
monkeypatch.setattr(cloudwatch_client.meta.service_model, "resolved_protocol", protocol)
48+
49+
# this is useful to know from the test itself which protocol is currently used
50+
monkeypatch.setattr(cloudwatch_client, "test_client_protocol", protocol, raising=False)
51+
52+
yield cloudwatch_client

tests/aws/services/cloudwatch/test_cloudwatch.py

Lines changed: 38 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@
22
import gzip
33
import json
44
import logging
5-
import os
65
import threading
76
import time
87
from datetime import UTC, datetime, timedelta, timezone
@@ -24,30 +23,28 @@
2423
from localstack.utils.common import retry, short_uid, to_str
2524
from localstack.utils.sync import poll_condition, wait_until
2625

26+
from .conftest import is_old_provider
2727
from .utils import get_cloudwatch_client
2828

2929
if TYPE_CHECKING:
3030
from mypy_boto3_logs import CloudWatchLogsClient
31+
3132
PUBLICATION_RETRIES = 5
3233

3334
LOG = logging.getLogger(__name__)
3435

3536

36-
def is_old_provider():
37-
return os.environ.get("PROVIDER_OVERRIDE_CLOUDWATCH") == "v1" and not is_aws_cloud()
38-
39-
4037
class TestCloudwatch:
4138
@markers.aws.validated
42-
def test_put_metric_data_values_list(self, snapshot, aws_client):
39+
def test_put_metric_data_values_list(self, snapshot, aws_cloudwatch_client):
4340
metric_name = "test-metric"
4441
namespace = f"ns-{short_uid()}"
45-
utc_now = datetime.utcnow().replace(tzinfo=UTC)
42+
utc_now = datetime.now(tz=UTC)
4643
snapshot.add_transformer(
4744
snapshot.transform.key_value("Timestamp", reference_replacement=False)
4845
)
4946

50-
aws_client.cloudwatch.put_metric_data(
47+
aws_cloudwatch_client.put_metric_data(
5148
Namespace=namespace,
5249
MetricData=[
5350
{
@@ -60,9 +57,11 @@ def test_put_metric_data_values_list(self, snapshot, aws_client):
6057
],
6158
)
6259

60+
stats = {}
61+
6362
def get_stats() -> int:
64-
global stats
65-
stats = aws_client.cloudwatch.get_metric_statistics(
63+
nonlocal stats
64+
stats = aws_cloudwatch_client.get_metric_statistics(
6665
Namespace=namespace,
6766
MetricName=metric_name,
6867
StartTime=utc_now - timedelta(seconds=60),
@@ -77,7 +76,7 @@ def get_stats() -> int:
7776
snapshot.match("get_metric_statistics", stats)
7877

7978
@markers.aws.only_localstack
80-
def test_put_metric_data_gzip(self, aws_client, region_name):
79+
def test_put_metric_data_gzip_with_query_protocol(self, aws_client, region_name):
8180
metric_name = "test-metric"
8281
namespace = "namespace"
8382
data = (
@@ -117,13 +116,13 @@ def test_put_metric_data_gzip(self, aws_client, region_name):
117116

118117
@markers.aws.validated
119118
@pytest.mark.skipif(is_old_provider(), reason="not supported by the old provider")
120-
def test_put_metric_data_validation(self, aws_client):
119+
def test_put_metric_data_validation(self, aws_cloudwatch_client, snapshot):
121120
namespace = f"ns-{short_uid()}"
122-
utc_now = datetime.utcnow().replace(tzinfo=UTC)
121+
utc_now = datetime.now(tz=UTC)
123122

124123
# test invalid due to having both Values and Value
125-
with pytest.raises(Exception) as ex:
126-
aws_client.cloudwatch.put_metric_data(
124+
with pytest.raises(ClientError) as ex:
125+
aws_cloudwatch_client.put_metric_data(
127126
Namespace=namespace,
128127
MetricData=[
129128
{
@@ -135,16 +134,11 @@ def test_put_metric_data_validation(self, aws_client):
135134
}
136135
],
137136
)
138-
err = ex.value.response["Error"]
139-
assert err["Code"] == "InvalidParameterCombination"
140-
assert (
141-
err["Message"]
142-
== "The parameters MetricData.member.1.Value and MetricData.member.1.Values are mutually exclusive and you have specified both."
143-
)
137+
snapshot.match("invalid-param-combination", ex.value.response)
144138

145139
# test invalid due to data can not have and values mismatched_counts
146-
with pytest.raises(Exception) as ex:
147-
aws_client.cloudwatch.put_metric_data(
140+
with pytest.raises(ClientError) as ex:
141+
aws_cloudwatch_client.put_metric_data(
148142
Namespace=namespace,
149143
MetricData=[
150144
{
@@ -156,16 +150,11 @@ def test_put_metric_data_validation(self, aws_client):
156150
}
157151
],
158152
)
159-
err = ex.value.response["Error"]
160-
assert err["Code"] == "InvalidParameterValue"
161-
assert (
162-
err["Message"]
163-
== "The parameters MetricData.member.1.Values and MetricData.member.1.Counts must be of the same size."
164-
)
153+
snapshot.match("invalid-param-value", ex.value.response)
165154

166155
# test invalid due to inserting both value and statistic values
167-
with pytest.raises(Exception) as ex:
168-
aws_client.cloudwatch.put_metric_data(
156+
with pytest.raises(ClientError) as ex:
157+
aws_cloudwatch_client.put_metric_data(
169158
Namespace=namespace,
170159
MetricData=[
171160
{
@@ -182,15 +171,10 @@ def test_put_metric_data_validation(self, aws_client):
182171
}
183172
],
184173
)
185-
err = ex.value.response["Error"]
186-
assert err["Code"] == "InvalidParameterCombination"
187-
assert (
188-
err["Message"]
189-
== "The parameters MetricData.member.1.Value and MetricData.member.1.StatisticValues are mutually exclusive and you have specified both."
190-
)
174+
snapshot.match("invalid-param-combination-2", ex.value.response)
191175

192176
# For some strange reason the AWS implementation allows this
193-
aws_client.cloudwatch.put_metric_data(
177+
aws_cloudwatch_client.put_metric_data(
194178
Namespace=namespace,
195179
MetricData=[
196180
{
@@ -2937,8 +2921,6 @@ def test_invalid_amount_of_datapoints(self, aws_client, snapshot):
29372921

29382922

29392923
class TestCloudWatchMultiProtocol:
2940-
# TODO: run the whole test suite with all available protocols
2941-
29422924
@pytest.fixture
29432925
def cloudwatch_http_client(self, region_name, aws_http_client_factory):
29442926
def _create_client(protocol: str):
@@ -2948,6 +2930,22 @@ def _create_client(protocol: str):
29482930

29492931
return _create_client
29502932

2933+
@markers.aws.validated
2934+
def test_multi_protocol_client_fixture(self, aws_cloudwatch_client):
2935+
"""
2936+
Smoke test to validate that the client is indeed using the right protocol
2937+
"""
2938+
response = aws_cloudwatch_client.describe_alarms()
2939+
response_headers = response["ResponseMetadata"]["HTTPHeaders"]
2940+
content_type = response_headers["content-type"]
2941+
if aws_cloudwatch_client.test_client_protocol == "query":
2942+
assert content_type in ("text/xml", "application/xml")
2943+
elif aws_cloudwatch_client.test_client_protocol == "json":
2944+
assert content_type == "application/x-amz-json-1.0"
2945+
elif aws_cloudwatch_client.test_client_protocol == "smithy-rpc-v2-cbor":
2946+
assert content_type == "application/cbor"
2947+
assert response_headers["smithy-protocol"] == "rpc-v2-cbor"
2948+
29512949
@markers.aws.validated
29522950
@pytest.mark.parametrize("protocol", ["json", "smithy-rpc-v2-cbor", "query"])
29532951
@markers.snapshot.skip_snapshot_verify(

0 commit comments

Comments
 (0)