Skip to content

Commit 479d842

Browse files
authored
fix(tests/bigquery): implement uuid-based isolation and reliable resource cleanup (#2547)
## Description ### **Description of Changes:** - **Centralized Cleanup**: Added `CleanupBigQueryDatasets` to `tests/common.go` which uses `DeleteWithContents` to recursively wipe test datasets. - **UUID Isolation**: Refactored BigQuery integration tests to generate a single `uniqueID` per run, used as a suffix for all datasets and tables. - **Reliable Teardown**: Replaced defer calls with `t.Cleanup` to guarantee resource removal even after test timeouts or fatal errors. - **Logging**: Added `t.Logf` statements to track the `uniqueID` throughout the test lifecycle for easier debugging. - **Deadlock Resolution (Log Draining)**: Added a background goroutine to drain `cmd.Out` using `io.Copy(io.Discard, cmd.Out)` immediately after the server starts. ### **Why it was required:** - **Prevent Resource Leaks**: Previous tests often leaked "Disallowed" datasets if table deletion failed, cluttering the `toolbox-testing-438616` project. - **Enable Concurrency**: Using a single shared project for CI/CD requires strict isolation so concurrent builds do not delete or overwrite each other's data. - **Timeout Resilience**: Standard `defer` blocks could fail if the test context was cancelled; using `context.Background()` in t.Cleanup ensures teardown logic has a fresh context to complete. - **Timeout & Deadlock Fix**: I observed a deadlock where the Toolbox server would block while attempting to write logs to a full `64KB` OS pipe buffer. Since the test runner was waiting for an HTTP response from the blocked server, the entire suite would time out at 10 minutes. ## PR Checklist > Thank you for opening a Pull Request! Before submitting your PR, there are a > few things you can do to make sure it goes smoothly: - [ ] Make sure you reviewed [CONTRIBUTING.md](https://github.com/googleapis/genai-toolbox/blob/main/CONTRIBUTING.md) - [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/genai-toolbox/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea - [ ] Ensure the tests and linter pass - [ ] Code coverage does not decrease (if any source code was changed) - [ ] Appropriate docs were updated (if necessary) - [ ] Make sure to add `!` if this involve a breaking change 🛠️ Fixes #<issue_number_goes_here>
1 parent ef8bfe5 commit 479d842

2 files changed

Lines changed: 80 additions & 40 deletions

File tree

tests/bigquery/bigquery_integration_test.go

Lines changed: 49 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -75,6 +75,9 @@ func initBigQueryConnection(project string) (*bigqueryapi.Client, error) {
7575

7676
func TestBigQueryToolEndpoints(t *testing.T) {
7777
sourceConfig := getBigQueryVars(t)
78+
uniqueID := strings.ReplaceAll(uuid.New().String(), "-", "")
79+
t.Logf("Starting test with uniqueID: %s", uniqueID)
80+
7881
ctx, cancel := context.WithTimeout(context.Background(), 20*time.Minute)
7982
defer cancel()
8083

@@ -86,8 +89,8 @@ func TestBigQueryToolEndpoints(t *testing.T) {
8689
}
8790

8891
// create table name with UUID
89-
datasetName := fmt.Sprintf("temp_toolbox_test_%s", strings.ReplaceAll(uuid.New().String(), "-", ""))
90-
tableName := fmt.Sprintf("param_table_%s", strings.ReplaceAll(uuid.New().String(), "-", ""))
92+
datasetName := fmt.Sprintf("temp_toolbox_test_%s", uniqueID)
93+
tableName := fmt.Sprintf("param_table_%s", uniqueID)
9194
tableNameParam := fmt.Sprintf("`%s.%s.%s`",
9295
BigqueryProject,
9396
datasetName,
@@ -96,54 +99,54 @@ func TestBigQueryToolEndpoints(t *testing.T) {
9699
tableNameAuth := fmt.Sprintf("`%s.%s.auth_table_%s`",
97100
BigqueryProject,
98101
datasetName,
99-
strings.ReplaceAll(uuid.New().String(), "-", ""),
102+
uniqueID,
100103
)
101104
tableNameTemplateParam := fmt.Sprintf("`%s.%s.template_param_table_%s`",
102105
BigqueryProject,
103106
datasetName,
104-
strings.ReplaceAll(uuid.New().String(), "-", ""),
107+
uniqueID,
105108
)
106109
tableNameDataType := fmt.Sprintf("`%s.%s.datatype_table_%s`",
107110
BigqueryProject,
108111
datasetName,
109-
strings.ReplaceAll(uuid.New().String(), "-", ""),
112+
uniqueID,
110113
)
111114
tableNameForecast := fmt.Sprintf("`%s.%s.forecast_table_%s`",
112115
BigqueryProject,
113116
datasetName,
114-
strings.ReplaceAll(uuid.New().String(), "-", ""),
117+
uniqueID,
115118
)
116119

117120
tableNameAnalyzeContribution := fmt.Sprintf("`%s.%s.analyze_contribution_table_%s`",
118121
BigqueryProject,
119122
datasetName,
120-
strings.ReplaceAll(uuid.New().String(), "-", ""),
123+
uniqueID,
121124
)
122125

126+
// global cleanup for this test run
127+
t.Cleanup(func() {
128+
tests.CleanupBigQueryDatasets(t, context.Background(), client, []string{datasetName})
129+
})
130+
123131
// set up data for param tool
124132
createParamTableStmt, insertParamTableStmt, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, paramTestParams := getBigQueryParamToolInfo(tableNameParam)
125-
teardownTable1 := setupBigQueryTable(t, ctx, client, createParamTableStmt, insertParamTableStmt, datasetName, tableNameParam, paramTestParams)
126-
defer teardownTable1(t)
133+
setupBigQueryTable(t, ctx, client, createParamTableStmt, insertParamTableStmt, datasetName, tableNameParam, paramTestParams)
127134

128135
// set up data for auth tool
129136
createAuthTableStmt, insertAuthTableStmt, authToolStmt, authTestParams := getBigQueryAuthToolInfo(tableNameAuth)
130-
teardownTable2 := setupBigQueryTable(t, ctx, client, createAuthTableStmt, insertAuthTableStmt, datasetName, tableNameAuth, authTestParams)
131-
defer teardownTable2(t)
137+
setupBigQueryTable(t, ctx, client, createAuthTableStmt, insertAuthTableStmt, datasetName, tableNameAuth, authTestParams)
132138

133139
// set up data for data type test tool
134140
createDataTypeTableStmt, insertDataTypeTableStmt, dataTypeToolStmt, arrayDataTypeToolStmt, dataTypeTestParams := getBigQueryDataTypeTestInfo(tableNameDataType)
135-
teardownTable3 := setupBigQueryTable(t, ctx, client, createDataTypeTableStmt, insertDataTypeTableStmt, datasetName, tableNameDataType, dataTypeTestParams)
136-
defer teardownTable3(t)
141+
setupBigQueryTable(t, ctx, client, createDataTypeTableStmt, insertDataTypeTableStmt, datasetName, tableNameDataType, dataTypeTestParams)
137142

138143
// set up data for forecast tool
139144
createForecastTableStmt, insertForecastTableStmt, forecastTestParams := getBigQueryForecastToolInfo(tableNameForecast)
140-
teardownTable4 := setupBigQueryTable(t, ctx, client, createForecastTableStmt, insertForecastTableStmt, datasetName, tableNameForecast, forecastTestParams)
141-
defer teardownTable4(t)
145+
setupBigQueryTable(t, ctx, client, createForecastTableStmt, insertForecastTableStmt, datasetName, tableNameForecast, forecastTestParams)
142146

143147
// set up data for analyze contribution tool
144148
createAnalyzeContributionTableStmt, insertAnalyzeContributionTableStmt, analyzeContributionTestParams := getBigQueryAnalyzeContributionToolInfo(tableNameAnalyzeContribution)
145-
teardownTable5 := setupBigQueryTable(t, ctx, client, createAnalyzeContributionTableStmt, insertAnalyzeContributionTableStmt, datasetName, tableNameAnalyzeContribution, analyzeContributionTestParams)
146-
defer teardownTable5(t)
149+
setupBigQueryTable(t, ctx, client, createAnalyzeContributionTableStmt, insertAnalyzeContributionTableStmt, datasetName, tableNameAnalyzeContribution, analyzeContributionTestParams)
147150

148151
// Write config into a file and pass it to command
149152
toolsFile := tests.GetToolsConfig(sourceConfig, BigqueryToolType, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, authToolStmt)
@@ -205,6 +208,8 @@ func TestBigQueryToolEndpoints(t *testing.T) {
205208
}
206209

207210
func TestBigQueryToolWithDatasetRestriction(t *testing.T) {
211+
uniqueID := strings.ReplaceAll(uuid.New().String(), "-", "")
212+
t.Logf("Starting restriction test with uniqueID: %s", uniqueID)
208213
ctx, cancel := context.WithTimeout(context.Background(), 4*time.Minute)
209214
defer cancel()
210215

@@ -213,11 +218,9 @@ func TestBigQueryToolWithDatasetRestriction(t *testing.T) {
213218
t.Fatalf("unable to create BigQuery client: %s", err)
214219
}
215220

216-
// Create two datasets, one allowed, one not.
217-
baseName := strings.ReplaceAll(uuid.New().String(), "-", "")
218-
allowedDatasetName1 := fmt.Sprintf("allowed_dataset_1_%s", baseName)
219-
allowedDatasetName2 := fmt.Sprintf("allowed_dataset_2_%s", baseName)
220-
disallowedDatasetName := fmt.Sprintf("disallowed_dataset_%s", baseName)
221+
allowedDatasetName1 := fmt.Sprintf("allowed_dataset_1_%s", uniqueID)
222+
allowedDatasetName2 := fmt.Sprintf("allowed_dataset_2_%s", uniqueID)
223+
disallowedDatasetName := fmt.Sprintf("disallowed_dataset_%s", uniqueID)
221224
allowedTableName1 := "allowed_table_1"
222225
allowedTableName2 := "allowed_table_2"
223226
disallowedTableName := "disallowed_table"
@@ -228,56 +231,53 @@ func TestBigQueryToolWithDatasetRestriction(t *testing.T) {
228231
allowedAnalyzeContributionTableName1 := "allowed_analyze_contribution_table_1"
229232
allowedAnalyzeContributionTableName2 := "allowed_analyze_contribution_table_2"
230233
disallowedAnalyzeContributionTableName := "disallowed_analyze_contribution_table"
234+
235+
// global cleanup for this test run
236+
t.Cleanup(func() {
237+
tests.CleanupBigQueryDatasets(t, context.Background(), client, []string{allowedDatasetName1, allowedDatasetName2, disallowedDatasetName})
238+
})
239+
231240
// Setup allowed table
232241
allowedTableNameParam1 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName1, allowedTableName1)
233242
createAllowedTableStmt1 := fmt.Sprintf("CREATE TABLE %s (id INT64)", allowedTableNameParam1)
234-
teardownAllowed1 := setupBigQueryTable(t, ctx, client, createAllowedTableStmt1, "", allowedDatasetName1, allowedTableNameParam1, nil)
235-
defer teardownAllowed1(t)
243+
setupBigQueryTable(t, ctx, client, createAllowedTableStmt1, "", allowedDatasetName1, allowedTableNameParam1, nil)
236244

237245
allowedTableNameParam2 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName2, allowedTableName2)
238246
createAllowedTableStmt2 := fmt.Sprintf("CREATE TABLE %s (id INT64)", allowedTableNameParam2)
239-
teardownAllowed2 := setupBigQueryTable(t, ctx, client, createAllowedTableStmt2, "", allowedDatasetName2, allowedTableNameParam2, nil)
240-
defer teardownAllowed2(t)
247+
setupBigQueryTable(t, ctx, client, createAllowedTableStmt2, "", allowedDatasetName2, allowedTableNameParam2, nil)
241248

242249
// Setup allowed forecast table
243250
allowedForecastTableFullName1 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName1, allowedForecastTableName1)
244251
createForecastStmt1, insertForecastStmt1, forecastParams1 := getBigQueryForecastToolInfo(allowedForecastTableFullName1)
245-
teardownAllowedForecast1 := setupBigQueryTable(t, ctx, client, createForecastStmt1, insertForecastStmt1, allowedDatasetName1, allowedForecastTableFullName1, forecastParams1)
246-
defer teardownAllowedForecast1(t)
252+
setupBigQueryTable(t, ctx, client, createForecastStmt1, insertForecastStmt1, allowedDatasetName1, allowedForecastTableFullName1, forecastParams1)
247253

248254
allowedForecastTableFullName2 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName2, allowedForecastTableName2)
249255
createForecastStmt2, insertForecastStmt2, forecastParams2 := getBigQueryForecastToolInfo(allowedForecastTableFullName2)
250-
teardownAllowedForecast2 := setupBigQueryTable(t, ctx, client, createForecastStmt2, insertForecastStmt2, allowedDatasetName2, allowedForecastTableFullName2, forecastParams2)
251-
defer teardownAllowedForecast2(t)
256+
setupBigQueryTable(t, ctx, client, createForecastStmt2, insertForecastStmt2, allowedDatasetName2, allowedForecastTableFullName2, forecastParams2)
252257

253258
// Setup disallowed table
254259
disallowedTableNameParam := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, disallowedDatasetName, disallowedTableName)
255260
createDisallowedTableStmt := fmt.Sprintf("CREATE TABLE %s (id INT64)", disallowedTableNameParam)
256-
teardownDisallowed := setupBigQueryTable(t, ctx, client, createDisallowedTableStmt, "", disallowedDatasetName, disallowedTableNameParam, nil)
257-
defer teardownDisallowed(t)
261+
setupBigQueryTable(t, ctx, client, createDisallowedTableStmt, "", disallowedDatasetName, disallowedTableNameParam, nil)
258262

259263
// Setup disallowed forecast table
260264
disallowedForecastTableFullName := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, disallowedDatasetName, disallowedForecastTableName)
261265
createDisallowedForecastStmt, insertDisallowedForecastStmt, disallowedForecastParams := getBigQueryForecastToolInfo(disallowedForecastTableFullName)
262-
teardownDisallowedForecast := setupBigQueryTable(t, ctx, client, createDisallowedForecastStmt, insertDisallowedForecastStmt, disallowedDatasetName, disallowedForecastTableFullName, disallowedForecastParams)
263-
defer teardownDisallowedForecast(t)
266+
setupBigQueryTable(t, ctx, client, createDisallowedForecastStmt, insertDisallowedForecastStmt, disallowedDatasetName, disallowedForecastTableFullName, disallowedForecastParams)
264267

265268
// Setup allowed analyze contribution table
266269
allowedAnalyzeContributionTableFullName1 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName1, allowedAnalyzeContributionTableName1)
267270
createAnalyzeContributionStmt1, insertAnalyzeContributionStmt1, analyzeContributionParams1 := getBigQueryAnalyzeContributionToolInfo(allowedAnalyzeContributionTableFullName1)
268-
teardownAllowedAnalyzeContribution1 := setupBigQueryTable(t, ctx, client, createAnalyzeContributionStmt1, insertAnalyzeContributionStmt1, allowedDatasetName1, allowedAnalyzeContributionTableFullName1, analyzeContributionParams1)
269-
defer teardownAllowedAnalyzeContribution1(t)
271+
setupBigQueryTable(t, ctx, client, createAnalyzeContributionStmt1, insertAnalyzeContributionStmt1, allowedDatasetName1, allowedAnalyzeContributionTableFullName1, analyzeContributionParams1)
270272

271273
allowedAnalyzeContributionTableFullName2 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName2, allowedAnalyzeContributionTableName2)
272274
createAnalyzeContributionStmt2, insertAnalyzeContributionStmt2, analyzeContributionParams2 := getBigQueryAnalyzeContributionToolInfo(allowedAnalyzeContributionTableFullName2)
273-
teardownAllowedAnalyzeContribution2 := setupBigQueryTable(t, ctx, client, createAnalyzeContributionStmt2, insertAnalyzeContributionStmt2, allowedDatasetName2, allowedAnalyzeContributionTableFullName2, analyzeContributionParams2)
274-
defer teardownAllowedAnalyzeContribution2(t)
275+
setupBigQueryTable(t, ctx, client, createAnalyzeContributionStmt2, insertAnalyzeContributionStmt2, allowedDatasetName2, allowedAnalyzeContributionTableFullName2, analyzeContributionParams2)
275276

276277
// Setup disallowed analyze contribution table
277278
disallowedAnalyzeContributionTableFullName := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, disallowedDatasetName, disallowedAnalyzeContributionTableName)
278279
createDisallowedAnalyzeContributionStmt, insertDisallowedAnalyzeContributionStmt, disallowedAnalyzeContributionParams := getBigQueryAnalyzeContributionToolInfo(disallowedAnalyzeContributionTableFullName)
279-
teardownDisallowedAnalyzeContribution := setupBigQueryTable(t, ctx, client, createDisallowedAnalyzeContributionStmt, insertDisallowedAnalyzeContributionStmt, disallowedDatasetName, disallowedAnalyzeContributionTableFullName, disallowedAnalyzeContributionParams)
280-
defer teardownDisallowedAnalyzeContribution(t)
280+
setupBigQueryTable(t, ctx, client, createDisallowedAnalyzeContributionStmt, insertDisallowedAnalyzeContributionStmt, disallowedDatasetName, disallowedAnalyzeContributionTableFullName, disallowedAnalyzeContributionParams)
281281

282282
// Configure source with dataset restriction.
283283
sourceConfig := getBigQueryVars(t)
@@ -341,6 +341,7 @@ func TestBigQueryToolWithDatasetRestriction(t *testing.T) {
341341
t.Fatalf("command initialization returned an error: %s", err)
342342
}
343343
defer cleanup()
344+
defer cmd.Close()
344345

345346
waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
346347
defer cancel()
@@ -350,6 +351,11 @@ func TestBigQueryToolWithDatasetRestriction(t *testing.T) {
350351
t.Fatalf("toolbox didn't start successfully: %s", err)
351352
}
352353

354+
// FIX: Background goroutine to drain server logs and prevent pipe buffer deadlock.
355+
go func() {
356+
_, _ = io.Copy(io.Discard, cmd.Out)
357+
}()
358+
353359
// Run tests
354360
runListDatasetIdsWithRestriction(t, allowedDatasetName1, allowedDatasetName2)
355361
runListTableIdsWithRestriction(t, allowedDatasetName1, disallowedDatasetName, allowedTableName1, allowedForecastTableName1, allowedAnalyzeContributionTableName1)
@@ -410,6 +416,7 @@ func TestBigQueryWriteModeAllowed(t *testing.T) {
410416
t.Fatalf("command initialization returned an error: %s", err)
411417
}
412418
defer cleanup()
419+
defer cmd.Close()
413420

414421
waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
415422
defer cancel()
@@ -453,6 +460,7 @@ func TestBigQueryWriteModeBlocked(t *testing.T) {
453460
t.Fatalf("command initialization returned an error: %s", err)
454461
}
455462
defer cleanup()
463+
defer cmd.Close()
456464

457465
waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
458466
defer cancel()
@@ -515,6 +523,7 @@ func TestBigQueryWriteModeProtected(t *testing.T) {
515523
t.Fatalf("command initialization returned an error: %s", err)
516524
}
517525
defer cleanup()
526+
defer cmd.Close()
518527

519528
waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
520529
defer cancel()

tests/common.go

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,12 +24,14 @@ import (
2424
"strings"
2525
"testing"
2626

27+
"cloud.google.com/go/bigquery"
2728
"github.com/google/go-cmp/cmp"
2829
"github.com/googleapis/genai-toolbox/internal/server"
2930
"github.com/googleapis/genai-toolbox/internal/sources/cloudsqlmysql"
3031
"github.com/googleapis/genai-toolbox/internal/testutils"
3132
"github.com/googleapis/genai-toolbox/internal/util/parameters"
3233
"github.com/jackc/pgx/v5/pgxpool"
34+
"google.golang.org/api/iterator"
3335
)
3436

3537
// GetToolsConfig returns a mock tools config file
@@ -1071,3 +1073,32 @@ func CleanupMSSQLTables(t *testing.T, ctx context.Context, pool *sql.DB) {
10711073
}
10721074

10731075
}
1076+
1077+
func CleanupBigQueryDatasets(t *testing.T, ctx context.Context, client *bigquery.Client, datasetIDs []string) {
1078+
for _, id := range datasetIDs {
1079+
t.Logf("INTEGRATION CLEANUP: Purging dataset %s", id)
1080+
ds := client.Dataset(id)
1081+
1082+
//Delete tables first since Dataset.Delete fails if not empty
1083+
tableIt := ds.Tables(ctx)
1084+
for {
1085+
table, err := tableIt.Next()
1086+
if err == iterator.Done {
1087+
break
1088+
}
1089+
if err != nil {
1090+
t.Errorf("INTEGRATION CLEANUP: Failed to iterate tables in %s: %v", id, err)
1091+
break
1092+
}
1093+
if err := table.Delete(ctx); err != nil {
1094+
t.Errorf("INTEGRATION CLEANUP: Failed to delete table %s: %v", table.TableID, err)
1095+
}
1096+
}
1097+
//delete empty dataset
1098+
if err := ds.Delete(ctx); err != nil {
1099+
t.Errorf("INTEGRATION CLEANUP: Failed to delete dataset %s: %v", id, err)
1100+
} else {
1101+
t.Logf("INTEGRATION CLEANUP SUCCESS: Wiped dataset %s", id)
1102+
}
1103+
}
1104+
}

0 commit comments

Comments
 (0)