Skip to content

Commit 3635195

Browse files
chore(tests): sync client verification tests (#1046)
1 parent 5e861cc commit 3635195

File tree

9 files changed

+319
-15
lines changed

9 files changed

+319
-15
lines changed

packages/google-cloud-bigtable/.cross_sync/README.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -66,6 +66,8 @@ Generation can be initiated using `nox -s generate_sync`
6666
from the root of the project. This will find all classes with the `__CROSS_SYNC_OUTPUT__ = "path/to/output"`
6767
annotation, and generate a sync version of classes marked with `@CrossSync.convert_sync` at the output path.
6868

69+
There is a unit test at `tests/unit/data/test_sync_up_to_date.py` that verifies that the generated code is up to date
70+
6971
## Architecture
7072

7173
CrossSync is made up of two parts:

packages/google-cloud-bigtable/.github/workflows/conformance.yaml

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,15 @@ jobs:
2626
matrix:
2727
test-version: [ "v0.0.2" ]
2828
py-version: [ 3.8 ]
29-
client-type: [ "async", "legacy" ]
29+
client-type: [ "async", "sync", "legacy" ]
30+
include:
31+
- client-type: "sync"
32+
# sync client does not support concurrent streams
33+
test_args: "-skip _Generic_MultiStream"
34+
- client-type: "legacy"
35+
# legacy client is synchronous and does not support concurrent streams
36+
# legacy client does not expose mutate_row. Disable those tests
37+
test_args: "-skip _Generic_MultiStream -skip TestMutateRow_"
3038
fail-fast: false
3139
name: "${{ matrix.client-type }} client / python ${{ matrix.py-version }} / test tag ${{ matrix.test-version }}"
3240
steps:
@@ -53,4 +61,6 @@ jobs:
5361
env:
5462
CLIENT_TYPE: ${{ matrix.client-type }}
5563
PYTHONUNBUFFERED: 1
64+
TEST_ARGS: ${{ matrix.test_args }}
65+
PROXY_PORT: 9999
5666

packages/google-cloud-bigtable/.kokoro/conformance.sh

Lines changed: 1 addition & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -19,16 +19,7 @@ set -eo pipefail
1919
## cd to the parent directory, i.e. the root of the git repo
2020
cd $(dirname $0)/..
2121

22-
PROXY_ARGS=""
23-
TEST_ARGS=""
24-
if [[ "${CLIENT_TYPE^^}" == "LEGACY" ]]; then
25-
echo "Using legacy client"
26-
# legacy client does not expose mutate_row. Disable those tests
27-
TEST_ARGS="-skip TestMutateRow_"
28-
fi
29-
3022
# Build and start the proxy in a separate process
31-
PROXY_PORT=9999
3223
pushd test_proxy
3324
nohup python test_proxy.py --port $PROXY_PORT --client_type=$CLIENT_TYPE &
3425
proxyPID=$!
@@ -42,6 +33,7 @@ function cleanup() {
4233
trap cleanup EXIT
4334

4435
# Run the conformance test
36+
echo "running tests with args: $TEST_ARGS"
4537
pushd cloud-bigtable-clients-test/tests
4638
eval "go test -v -proxy_addr=:$PROXY_PORT $TEST_ARGS"
4739
RETURN_CODE=$?

packages/google-cloud-bigtable/noxfile.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -298,7 +298,7 @@ def system_emulated(session):
298298

299299

300300
@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS)
301-
@nox.parametrize("client_type", ["async"])
301+
@nox.parametrize("client_type", ["async", "sync", "legacy"])
302302
def conformance(session, client_type):
303303
# install dependencies
304304
constraints_path = str(

packages/google-cloud-bigtable/test_proxy/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ python test_proxy.py --port 8080
3131
```
3232

3333
By default, the test_proxy targets the async client. You can change this by passing in the `--client_type` flag.
34-
Valid options are `async` and `legacy`.
34+
Valid options are `async`, `sync`, and `legacy`.
3535

3636
```
3737
python test_proxy.py --client_type=legacy
Lines changed: 185 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,185 @@
1+
# Copyright 2024 Google LLC
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
15+
# This file is automatically generated by CrossSync. Do not edit manually.
16+
17+
"""
18+
This module contains the client handler process for proxy_server.py.
19+
"""
20+
import os
21+
from google.cloud.environment_vars import BIGTABLE_EMULATOR
22+
from google.cloud.bigtable.data._cross_sync import CrossSync
23+
from client_handler_data_async import error_safe
24+
25+
26+
class TestProxyClientHandler:
27+
"""
28+
Implements the same methods as the grpc server, but handles the client
29+
library side of the request.
30+
31+
Requests received in TestProxyGrpcServer are converted to a dictionary,
32+
and supplied to the TestProxyClientHandler methods as kwargs.
33+
The client response is then returned back to the TestProxyGrpcServer
34+
"""
35+
36+
def __init__(
37+
self,
38+
data_target=None,
39+
project_id=None,
40+
instance_id=None,
41+
app_profile_id=None,
42+
per_operation_timeout=None,
43+
**kwargs
44+
):
45+
self.closed = False
46+
os.environ[BIGTABLE_EMULATOR] = data_target
47+
self.client = CrossSync._Sync_Impl.DataClient(project=project_id)
48+
self.instance_id = instance_id
49+
self.app_profile_id = app_profile_id
50+
self.per_operation_timeout = per_operation_timeout
51+
52+
def close(self):
53+
self.closed = True
54+
55+
@error_safe
56+
async def ReadRows(self, request, **kwargs):
57+
table_id = request.pop("table_name").split("/")[-1]
58+
app_profile_id = self.app_profile_id or request.get("app_profile_id", None)
59+
table = self.client.get_table(self.instance_id, table_id, app_profile_id)
60+
kwargs["operation_timeout"] = (
61+
kwargs.get("operation_timeout", self.per_operation_timeout) or 20
62+
)
63+
result_list = table.read_rows(request, **kwargs)
64+
serialized_response = [row._to_dict() for row in result_list]
65+
return serialized_response
66+
67+
@error_safe
68+
async def ReadRow(self, row_key, **kwargs):
69+
table_id = kwargs.pop("table_name").split("/")[-1]
70+
app_profile_id = self.app_profile_id or kwargs.get("app_profile_id", None)
71+
table = self.client.get_table(self.instance_id, table_id, app_profile_id)
72+
kwargs["operation_timeout"] = (
73+
kwargs.get("operation_timeout", self.per_operation_timeout) or 20
74+
)
75+
result_row = table.read_row(row_key, **kwargs)
76+
if result_row:
77+
return result_row._to_dict()
78+
else:
79+
return "None"
80+
81+
@error_safe
82+
async def MutateRow(self, request, **kwargs):
83+
from google.cloud.bigtable.data.mutations import Mutation
84+
85+
table_id = request["table_name"].split("/")[-1]
86+
app_profile_id = self.app_profile_id or request.get("app_profile_id", None)
87+
table = self.client.get_table(self.instance_id, table_id, app_profile_id)
88+
kwargs["operation_timeout"] = (
89+
kwargs.get("operation_timeout", self.per_operation_timeout) or 20
90+
)
91+
row_key = request["row_key"]
92+
mutations = [Mutation._from_dict(d) for d in request["mutations"]]
93+
table.mutate_row(row_key, mutations, **kwargs)
94+
return "OK"
95+
96+
@error_safe
97+
async def BulkMutateRows(self, request, **kwargs):
98+
from google.cloud.bigtable.data.mutations import RowMutationEntry
99+
100+
table_id = request["table_name"].split("/")[-1]
101+
app_profile_id = self.app_profile_id or request.get("app_profile_id", None)
102+
table = self.client.get_table(self.instance_id, table_id, app_profile_id)
103+
kwargs["operation_timeout"] = (
104+
kwargs.get("operation_timeout", self.per_operation_timeout) or 20
105+
)
106+
entry_list = [
107+
RowMutationEntry._from_dict(entry) for entry in request["entries"]
108+
]
109+
table.bulk_mutate_rows(entry_list, **kwargs)
110+
return "OK"
111+
112+
@error_safe
113+
async def CheckAndMutateRow(self, request, **kwargs):
114+
from google.cloud.bigtable.data.mutations import Mutation, SetCell
115+
116+
table_id = request["table_name"].split("/")[-1]
117+
app_profile_id = self.app_profile_id or request.get("app_profile_id", None)
118+
table = self.client.get_table(self.instance_id, table_id, app_profile_id)
119+
kwargs["operation_timeout"] = (
120+
kwargs.get("operation_timeout", self.per_operation_timeout) or 20
121+
)
122+
row_key = request["row_key"]
123+
true_mutations = []
124+
for mut_dict in request.get("true_mutations", []):
125+
try:
126+
true_mutations.append(Mutation._from_dict(mut_dict))
127+
except ValueError:
128+
mutation = SetCell("", "", "", 0)
129+
true_mutations.append(mutation)
130+
false_mutations = []
131+
for mut_dict in request.get("false_mutations", []):
132+
try:
133+
false_mutations.append(Mutation._from_dict(mut_dict))
134+
except ValueError:
135+
false_mutations.append(SetCell("", "", "", 0))
136+
predicate_filter = request.get("predicate_filter", None)
137+
result = table.check_and_mutate_row(
138+
row_key,
139+
predicate_filter,
140+
true_case_mutations=true_mutations,
141+
false_case_mutations=false_mutations,
142+
**kwargs
143+
)
144+
return result
145+
146+
@error_safe
147+
async def ReadModifyWriteRow(self, request, **kwargs):
148+
from google.cloud.bigtable.data.read_modify_write_rules import IncrementRule
149+
from google.cloud.bigtable.data.read_modify_write_rules import AppendValueRule
150+
151+
table_id = request["table_name"].split("/")[-1]
152+
app_profile_id = self.app_profile_id or request.get("app_profile_id", None)
153+
table = self.client.get_table(self.instance_id, table_id, app_profile_id)
154+
kwargs["operation_timeout"] = (
155+
kwargs.get("operation_timeout", self.per_operation_timeout) or 20
156+
)
157+
row_key = request["row_key"]
158+
rules = []
159+
for rule_dict in request.get("rules", []):
160+
qualifier = rule_dict["column_qualifier"]
161+
if "append_value" in rule_dict:
162+
new_rule = AppendValueRule(
163+
rule_dict["family_name"], qualifier, rule_dict["append_value"]
164+
)
165+
else:
166+
new_rule = IncrementRule(
167+
rule_dict["family_name"], qualifier, rule_dict["increment_amount"]
168+
)
169+
rules.append(new_rule)
170+
result = table.read_modify_write_row(row_key, rules, **kwargs)
171+
if result:
172+
return result._to_dict()
173+
else:
174+
return "None"
175+
176+
@error_safe
177+
async def SampleRowKeys(self, request, **kwargs):
178+
table_id = request["table_name"].split("/")[-1]
179+
app_profile_id = self.app_profile_id or request.get("app_profile_id", None)
180+
table = self.client.get_table(self.instance_id, table_id, app_profile_id)
181+
kwargs["operation_timeout"] = (
182+
kwargs.get("operation_timeout", self.per_operation_timeout) or 20
183+
)
184+
result = table.sample_row_keys(**kwargs)
185+
return result

packages/google-cloud-bigtable/test_proxy/run_tests.sh

Lines changed: 15 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ fi
2727
SCRIPT_DIR=$(realpath $(dirname "$0"))
2828
cd $SCRIPT_DIR
2929

30-
export PROXY_SERVER_PORT=50055
30+
export PROXY_SERVER_PORT=$(shuf -i 50000-60000 -n 1)
3131

3232
# download test suite
3333
if [ ! -d "cloud-bigtable-clients-test" ]; then
@@ -43,6 +43,19 @@ function finish {
4343
}
4444
trap finish EXIT
4545

46+
if [[ $CLIENT_TYPE == "legacy" ]]; then
47+
echo "Using legacy client"
48+
# legacy client does not expose mutate_row. Disable those tests
49+
TEST_ARGS="-skip TestMutateRow_"
50+
fi
51+
52+
if [[ $CLIENT_TYPE != "async" ]]; then
53+
echo "Using legacy client"
54+
# sync and legacy client do not support concurrent streams
55+
TEST_ARGS="$TEST_ARGS -skip _Generic_MultiStream "
56+
fi
57+
4658
# run tests
4759
pushd cloud-bigtable-clients-test/tests
48-
go test -v -proxy_addr=:$PROXY_SERVER_PORT
60+
echo "Running with $TEST_ARGS"
61+
go test -v -proxy_addr=:$PROXY_SERVER_PORT $TEST_ARGS

packages/google-cloud-bigtable/test_proxy/test_proxy.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -114,6 +114,9 @@ def format_dict(input_obj):
114114
if client_type == "legacy":
115115
import client_handler_legacy
116116
client = client_handler_legacy.LegacyTestProxyClientHandler(**json_data)
117+
elif client_type == "sync":
118+
import client_handler_data_sync_autogen
119+
client = client_handler_data_sync_autogen.TestProxyClientHandler(**json_data)
117120
else:
118121
client = client_handler_data_async.TestProxyClientHandlerAsync(**json_data)
119122
client_map[client_id] = client
@@ -150,7 +153,7 @@ def client_handler_process(request_q, queue_pool, client_type="async"):
150153

151154
p = argparse.ArgumentParser()
152155
p.add_argument("--port", dest='port', default="50055")
153-
p.add_argument("--client_type", dest='client_type', default="async", choices=["async", "legacy"])
156+
p.add_argument("--client_type", dest='client_type', default="async", choices=["async", "sync", "legacy"])
154157

155158
if __name__ == "__main__":
156159
port = p.parse_args().port

0 commit comments

Comments
 (0)