Skip to content

Commit b73d4a6

Browse files
authored
Fix bugs and add clarity to the samples (GoogleCloudPlatform#7855)
* Bug fixes; add clarity to the sample.
1 parent 327d0bc commit b73d4a6

2 files changed

Lines changed: 17 additions & 15 deletions

File tree

datacatalog/snippets/src/main/java/com/example/datacatalog/CreateCustomConnector.java

Lines changed: 15 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,6 @@
2323
import com.google.cloud.datacatalog.v1.DataCatalogClient;
2424
import com.google.cloud.datacatalog.v1.DumpItem;
2525
import com.google.cloud.datacatalog.v1.Entry;
26-
import com.google.cloud.datacatalog.v1.EntryType;
2726
import com.google.cloud.datacatalog.v1.ImportEntriesMetadata;
2827
import com.google.cloud.datacatalog.v1.ImportEntriesRequest;
2928
import com.google.cloud.datacatalog.v1.ImportEntriesResponse;
@@ -75,8 +74,6 @@ public static void main(String[] args)
7574

7675
importEntriesViaCustomConnector(location, projectId, entryGroupId, storageProjectId,
7776
gcsBucketName);
78-
79-
8077
}
8178

8279
public static void importEntriesViaCustomConnector(String location, String projectId,
@@ -91,10 +88,10 @@ public static void importEntriesViaCustomConnector(String location, String proje
9188
DumpItem dumpItem = prepareDumpItem();
9289

9390
// Write metadata in Dataplex format to an existing Google Cloud Storage bucket.
94-
writeMetadataToGscBucket(dumpItem, storageProjectId, gcsBucketName);
91+
String pathToDump = writeMetadataToGscBucket(dumpItem, storageProjectId, gcsBucketName);
9592

9693
// Call DataplexCatalog ImportEntries() API to import the dump.
97-
importEntriesToCatalog(projectId, location, entryGroupId, gcsBucketName);
94+
importEntriesToCatalog(projectId, location, entryGroupId, pathToDump);
9895

9996
}
10097

@@ -142,8 +139,8 @@ private static DumpItem prepareDumpItem() {
142139
.build();
143140
Entry entry = Entry.newBuilder()
144141
.setFullyQualifiedName("my_system:my_db.my_table")
145-
.setUserSpecifiedSystem("My database system")
146-
.setType(EntryType.TABLE)
142+
.setUserSpecifiedSystem("My_system")
143+
.setUserSpecifiedType("special_table_type")
147144
// Do not set sourceSystemTimestamps if they are not readily available
148145
// from the source system.
149146
.setSourceSystemTimestamps(timestamps)
@@ -182,7 +179,7 @@ private static DumpItem prepareDumpItem() {
182179
.build();
183180
}
184181

185-
private static void writeMetadataToGscBucket(DumpItem dumpItem, String storageProjectId,
182+
private static String writeMetadataToGscBucket(DumpItem dumpItem, String storageProjectId,
186183
String gcsBucketName)
187184
throws IOException {
188185
// Use Google Cloud Storage API to write metadata dump.
@@ -208,19 +205,21 @@ private static void writeMetadataToGscBucket(DumpItem dumpItem, String storagePr
208205
byte[] protobufWireFormatBytes = baos.toByteArray();
209206
String base64EncodedStr = Base64.getMimeEncoder().encodeToString(protobufWireFormatBytes);
210207
*/
211-
String gcsPath = "gs://" + gcsBucketName + "/output/" + "output_0001.wire";
212-
BlobId blobId = BlobId.fromGsUtilUri(gcsPath);
208+
String gcsPath = "gs://" + gcsBucketName + "/output/";
209+
BlobId blobId = BlobId.fromGsUtilUri(gcsPath + "entries.wire");
213210
BlobInfo blobInfo = BlobInfo.newBuilder(blobId).build();
214211

215212
ByteArrayOutputStream encodedEntries = new ByteArrayOutputStream();
216213
// DumpItems must be delimited, so that when system reads the file, it can tell them apart.
217214
// For instance, in java you can use the writeDelimitedTo method.
218215
dumpItem.writeDelimitedTo(encodedEntries);
219216
storage.create(blobInfo, encodedEntries.toByteArray());
217+
218+
return gcsPath;
220219
}
221220

222221
private static void importEntriesToCatalog(String projectId, String location,
223-
String entryGroupName, String gcsBucketName)
222+
String entryGroupName, String pathToDump)
224223
throws ExecutionException, InterruptedException, IOException {
225224

226225
// Initialize client that will be used to send requests. This client only needs to be created
@@ -232,15 +231,17 @@ private static void importEntriesToCatalog(String projectId, String location,
232231
String parent = String.format(
233232
"projects/%s/locations/%s/entryGroups/%s", projectId, location, entryGroupName);
234233

235-
// Specify valid path to the dump stored in Google Cloud Storage
236-
String pathToDump = "gs://" + gcsBucketName + "/";
237-
238234
// Send ImportEntries request to the Dataplex Catalog.
239235
// ImportEntries is an async procedure,
240236
// and it returns a long-running operation that a client can query.
241237
OperationFuture<ImportEntriesResponse, ImportEntriesMetadata> importEntriesFuture =
242238
dataCatalogClient.importEntriesAsync(ImportEntriesRequest.newBuilder()
243239
.setParent(parent)
240+
/* Specify valid path to the dump stored in Google Cloud Storage.
241+
Path should point directly to the place with dump files.
242+
For example given a structure `bucket/a/b.wire`, "gcsBucketPath" should be set to
243+
`bucket/a/`
244+
*/
244245
.setGcsBucketPath(pathToDump)
245246
.build());
246247

datacatalog/snippets/src/main/java/com/example/datacatalog/WaitForImportEntries.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,9 +42,10 @@ public class WaitForImportEntries {
4242

4343
public static void main(String[] args)
4444
throws IOException {
45-
// TODO(developer): Replace these variables before running the sample.
45+
// TODO(developer): Replace this variable before running the sample.
4646
String longRunningOperationName =
4747
"projects/my-project/locations/us-central1/operations/import_entries_abc";
48+
4849
// When ImportEntries() method of Dataplex Catalog is called,
4950
// it returns a name of a long-running operation.
5051
// This operation can be queried to find out the state of the import.

0 commit comments

Comments
 (0)