diff --git a/README.md b/README.md index 4d22157ec9..44c44cd210 100644 --- a/README.md +++ b/README.md @@ -49,7 +49,7 @@ If you are using Maven without BOM, add this to your dependencies: If you are using Gradle 5.x or later, add this to your dependencies ```Groovy -implementation platform('com.google.cloud:libraries-bom:24.1.2') +implementation platform('com.google.cloud:libraries-bom:24.2.0') implementation 'com.google.cloud:google-cloud-bigquerystorage' ``` diff --git a/samples/snippets/src/main/java/com/example/bigquerystorage/WriteBufferedStream.java b/samples/snippets/src/main/java/com/example/bigquerystorage/WriteBufferedStream.java index da9be978cb..49c9e86fdb 100644 --- a/samples/snippets/src/main/java/com/example/bigquerystorage/WriteBufferedStream.java +++ b/samples/snippets/src/main/java/com/example/bigquerystorage/WriteBufferedStream.java @@ -21,6 +21,7 @@ import com.google.cloud.bigquery.storage.v1.AppendRowsResponse; import com.google.cloud.bigquery.storage.v1.BigQueryWriteClient; import com.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest; +import com.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest; import com.google.cloud.bigquery.storage.v1.FlushRowsRequest; import com.google.cloud.bigquery.storage.v1.FlushRowsResponse; import com.google.cloud.bigquery.storage.v1.JsonStreamWriter; @@ -78,7 +79,6 @@ public static void writeBufferedStream(String projectId, String datasetName, Str ApiFuture future = writer.append(jsonArr); AppendRowsResponse response = future.get(); } - // Flush the buffer. FlushRowsRequest flushRowsRequest = FlushRowsRequest.newBuilder() @@ -88,6 +88,10 @@ public static void writeBufferedStream(String projectId, String datasetName, Str FlushRowsResponse flushRowsResponse = client.flushRows(flushRowsRequest); // You can continue to write to the stream after flushing the buffer. } + // Finalize the stream after use. + FinalizeWriteStreamRequest finalizeWriteStreamRequest = + FinalizeWriteStreamRequest.newBuilder().setName(writeStream.getName()).build(); + client.finalizeWriteStream(finalizeWriteStreamRequest); System.out.println("Appended and committed records successfully."); } catch (ExecutionException e) { // If the wrapped exception is a StatusRuntimeException, check the state of the operation. diff --git a/samples/snippets/src/main/java/com/example/bigquerystorage/WriteCommittedStream.java b/samples/snippets/src/main/java/com/example/bigquerystorage/WriteCommittedStream.java index 016bed8ddb..62b5f557f8 100644 --- a/samples/snippets/src/main/java/com/example/bigquerystorage/WriteCommittedStream.java +++ b/samples/snippets/src/main/java/com/example/bigquerystorage/WriteCommittedStream.java @@ -21,6 +21,7 @@ import com.google.cloud.bigquery.storage.v1.AppendRowsResponse; import com.google.cloud.bigquery.storage.v1.BigQueryWriteClient; import com.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest; +import com.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest; import com.google.cloud.bigquery.storage.v1.JsonStreamWriter; import com.google.cloud.bigquery.storage.v1.TableName; import com.google.cloud.bigquery.storage.v1.WriteStream; @@ -64,7 +65,9 @@ public static void writeCommittedStream(String projectId, String datasetName, St try (JsonStreamWriter writer = JsonStreamWriter.newBuilder(writeStream.getName(), writeStream.getTableSchema()) .build()) { - // Write two batches to the stream, each with 10 JSON records. + // Write two batches to the stream, each with 10 JSON records. A writer should be + // used for as much writes as possible. Creating a writer for just one write is an + // antipattern. for (int i = 0; i < 2; i++) { // Create a JSON object that is compatible with the table schema. JSONArray jsonArr = new JSONArray(); @@ -79,6 +82,10 @@ public static void writeCommittedStream(String projectId, String datasetName, St ApiFuture future = writer.append(jsonArr, /*offset=*/ i * 10); AppendRowsResponse response = future.get(); } + // Finalize the stream after use. + FinalizeWriteStreamRequest finalizeWriteStreamRequest = + FinalizeWriteStreamRequest.newBuilder().setName(writeStream.getName()).build(); + client.finalizeWriteStream(finalizeWriteStreamRequest); } System.out.println("Appended records successfully."); } catch (ExecutionException e) { diff --git a/samples/snippets/src/main/java/com/example/bigquerystorage/WriteToDefaultStream.java b/samples/snippets/src/main/java/com/example/bigquerystorage/WriteToDefaultStream.java index 4142f9bc9b..0a4e5efbff 100644 --- a/samples/snippets/src/main/java/com/example/bigquerystorage/WriteToDefaultStream.java +++ b/samples/snippets/src/main/java/com/example/bigquerystorage/WriteToDefaultStream.java @@ -56,7 +56,8 @@ public static void writeToDefaultStream(String projectId, String datasetName, St // https://googleapis.dev/java/google-cloud-bigquerystorage/latest/com/google/cloud/bigquery/storage/v1beta2/JsonStreamWriter.html try (JsonStreamWriter writer = JsonStreamWriter.newBuilder(parentTable.toString(), tableSchema).build()) { - // Write two batches to the stream, each with 10 JSON records. + // Write two batches to the stream, each with 10 JSON records. A writer should be used for as + // much writes as possible. Creating a writer for just one write is an antipattern. for (int i = 0; i < 2; i++) { // Create a JSON object that is compatible with the table schema. JSONArray jsonArr = new JSONArray();