Skip to content

Commit

Permalink
docs(samples): add finalize call to our samples (#1471)
Browse files Browse the repository at this point in the history
* fix:Add finalize call to our samples

* .

* .

* .

* .

* .

* .

* .

* 🦉 Updates from OwlBot

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

* 🦉 Updates from OwlBot

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
Co-authored-by: Stephanie Wang <[email protected]>
  • Loading branch information
3 people authored Jan 10, 2022
1 parent e038151 commit 47e3654
Show file tree
Hide file tree
Showing 4 changed files with 16 additions and 4 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ If you are using Maven without BOM, add this to your dependencies:
If you are using Gradle 5.x or later, add this to your dependencies

```Groovy
implementation platform('com.google.cloud:libraries-bom:24.1.2')
implementation platform('com.google.cloud:libraries-bom:24.2.0')
implementation 'com.google.cloud:google-cloud-bigquerystorage'
```
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import com.google.cloud.bigquery.storage.v1.AppendRowsResponse;
import com.google.cloud.bigquery.storage.v1.BigQueryWriteClient;
import com.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest;
import com.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest;
import com.google.cloud.bigquery.storage.v1.FlushRowsRequest;
import com.google.cloud.bigquery.storage.v1.FlushRowsResponse;
import com.google.cloud.bigquery.storage.v1.JsonStreamWriter;
Expand Down Expand Up @@ -78,7 +79,6 @@ public static void writeBufferedStream(String projectId, String datasetName, Str
ApiFuture<AppendRowsResponse> future = writer.append(jsonArr);
AppendRowsResponse response = future.get();
}

// Flush the buffer.
FlushRowsRequest flushRowsRequest =
FlushRowsRequest.newBuilder()
Expand All @@ -88,6 +88,10 @@ public static void writeBufferedStream(String projectId, String datasetName, Str
FlushRowsResponse flushRowsResponse = client.flushRows(flushRowsRequest);
// You can continue to write to the stream after flushing the buffer.
}
// Finalize the stream after use.
FinalizeWriteStreamRequest finalizeWriteStreamRequest =
FinalizeWriteStreamRequest.newBuilder().setName(writeStream.getName()).build();
client.finalizeWriteStream(finalizeWriteStreamRequest);
System.out.println("Appended and committed records successfully.");
} catch (ExecutionException e) {
// If the wrapped exception is a StatusRuntimeException, check the state of the operation.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import com.google.cloud.bigquery.storage.v1.AppendRowsResponse;
import com.google.cloud.bigquery.storage.v1.BigQueryWriteClient;
import com.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest;
import com.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest;
import com.google.cloud.bigquery.storage.v1.JsonStreamWriter;
import com.google.cloud.bigquery.storage.v1.TableName;
import com.google.cloud.bigquery.storage.v1.WriteStream;
Expand Down Expand Up @@ -64,7 +65,9 @@ public static void writeCommittedStream(String projectId, String datasetName, St
try (JsonStreamWriter writer =
JsonStreamWriter.newBuilder(writeStream.getName(), writeStream.getTableSchema())
.build()) {
// Write two batches to the stream, each with 10 JSON records.
// Write two batches to the stream, each with 10 JSON records. A writer should be
// used for as much writes as possible. Creating a writer for just one write is an
// antipattern.
for (int i = 0; i < 2; i++) {
// Create a JSON object that is compatible with the table schema.
JSONArray jsonArr = new JSONArray();
Expand All @@ -79,6 +82,10 @@ public static void writeCommittedStream(String projectId, String datasetName, St
ApiFuture<AppendRowsResponse> future = writer.append(jsonArr, /*offset=*/ i * 10);
AppendRowsResponse response = future.get();
}
// Finalize the stream after use.
FinalizeWriteStreamRequest finalizeWriteStreamRequest =
FinalizeWriteStreamRequest.newBuilder().setName(writeStream.getName()).build();
client.finalizeWriteStream(finalizeWriteStreamRequest);
}
System.out.println("Appended records successfully.");
} catch (ExecutionException e) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,8 @@ public static void writeToDefaultStream(String projectId, String datasetName, St
// https://googleapis.dev/java/google-cloud-bigquerystorage/latest/com/google/cloud/bigquery/storage/v1beta2/JsonStreamWriter.html
try (JsonStreamWriter writer =
JsonStreamWriter.newBuilder(parentTable.toString(), tableSchema).build()) {
// Write two batches to the stream, each with 10 JSON records.
// Write two batches to the stream, each with 10 JSON records. A writer should be used for as
// much writes as possible. Creating a writer for just one write is an antipattern.
for (int i = 0; i < 2; i++) {
// Create a JSON object that is compatible with the table schema.
JSONArray jsonArr = new JSONArray();
Expand Down

0 comments on commit 47e3654

Please sign in to comment.