Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Enable FirestoreV1IT to run against a non default host #27256

Merged
merged 8 commits into from
Jul 26, 2023
Merged
4 changes: 4 additions & 0 deletions sdks/java/io/google-cloud-platform/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -186,11 +186,13 @@ task integrationTest(type: Test, dependsOn: processTestResources) {
def gcpProject = project.findProperty('gcpProject') ?: 'apache-beam-testing'
def gcpTempRoot = project.findProperty('gcpTempRoot') ?: 'gs://temp-storage-for-end-to-end-tests'
def firestoreDb = project.findProperty('firestoreDb') ?: 'firestoredb'
def host = project.findProperty('host') ?: 'batch-firestore.googleapis.com:443'
SabaSathya marked this conversation as resolved.
Show resolved Hide resolved
systemProperty "beamTestPipelineOptions", JsonOutput.toJson([
"--runner=DirectRunner",
"--project=${gcpProject}",
"--tempRoot=${gcpTempRoot}",
"--firestoreDb=${firestoreDb}",
"--host=${host}",
])

// Disable Gradle cache: these ITs interact with live service that should always be considered "out of date"
Expand Down Expand Up @@ -224,12 +226,14 @@ task integrationTestKms(type: Test) {
def gcpTempRoot = project.findProperty('gcpTempRootKms') ?: 'gs://temp-storage-for-end-to-end-tests-cmek'
def dataflowKmsKey = project.findProperty('dataflowKmsKey') ?: "projects/apache-beam-testing/locations/global/keyRings/beam-it/cryptoKeys/test"
def firestoreDb = project.findProperty('firestoreDb') ?: 'firestoredb'
def host = project.findProperty('host') ?: 'batch-firestore.googleapis.com:443'
systemProperty "beamTestPipelineOptions", JsonOutput.toJson([
"--runner=DirectRunner",
"--project=${gcpProject}",
"--tempRoot=${gcpTempRoot}",
"--dataflowKmsKey=${dataflowKmsKey}",
"--firestoreDb=${firestoreDb}",
"--host=${host}",
])

// Disable Gradle cache: these ITs interact with live service that should always be considered "out of date"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
*/
package org.apache.beam.sdk.io.gcp.firestore;

import javax.annotation.Nonnull;
import org.apache.beam.sdk.options.Default;
import org.apache.beam.sdk.options.Description;
import org.apache.beam.sdk.options.PipelineOptions;
Expand Down Expand Up @@ -57,4 +58,21 @@ public interface FirestoreOptions extends PipelineOptions {

/** Set the Firestore database ID to connect to. */
void setFirestoreDb(String firestoreDb);

/**
* A host port pair to allow connecting to a Cloud Firestore instead of the default live service.
*
* @return the string representation of a host and port pair to be used when constructing Cloud
* Firestore clients.
*/
@Nonnull
SabaSathya marked this conversation as resolved.
Show resolved Hide resolved
String getHost();

/**
* Define a host port pair to allow connecting to a Cloud Firestore instead of the default live
* service.
*
* @param host the host and port to connect to
*/
void setHost(String host);
}
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,10 @@
@Immutable
class FirestoreStatefulComponentFactory implements Serializable {

private static final String DEFAULT_FIRESTORE_HOST = "batch-firestore.googleapis.com:443";
private static final String FIRESTORE_HOST_ENV_VARIABLE = "FIRESTORE_HOST";
SabaSathya marked this conversation as resolved.
Show resolved Hide resolved
private static final String FIRESTORE_EMULATOR_HOST_ENV_VARIABLE = "FIRESTORE_EMULATOR_HOST";

static final FirestoreStatefulComponentFactory INSTANCE = new FirestoreStatefulComponentFactory();

private FirestoreStatefulComponentFactory() {}
Expand Down Expand Up @@ -86,6 +90,9 @@ FirestoreStub getFirestoreStub(PipelineOptions options) {

FirestoreOptions firestoreOptions = options.as(FirestoreOptions.class);
String emulatorHostPort = firestoreOptions.getEmulatorHost();
if (emulatorHostPort == null) {
SabaSathya marked this conversation as resolved.
Show resolved Hide resolved
emulatorHostPort = System.getenv(FIRESTORE_EMULATOR_HOST_ENV_VARIABLE);
}
if (emulatorHostPort != null) {
builder
.setCredentialsProvider(FixedCredentialsProvider.create(new EmulatorCredentials()))
Expand All @@ -97,9 +104,13 @@ FirestoreStub getFirestoreStub(PipelineOptions options) {
.build());
} else {
GcpOptions gcpOptions = options.as(GcpOptions.class);
String host = firestoreOptions.getHost();
if (host == null) {
host = System.getenv().getOrDefault(FIRESTORE_HOST_ENV_VARIABLE, DEFAULT_FIRESTORE_HOST);
SabaSathya marked this conversation as resolved.
Show resolved Hide resolved
}
builder
.setCredentialsProvider(FixedCredentialsProvider.create(gcpOptions.getGcpCredential()))
.setEndpoint("batch-firestore.googleapis.com:443");
.setEndpoint(host);
}

ClientContext clientContext = ClientContext.create(builder.build());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -92,12 +92,12 @@ abstract class BaseFirestoreIT {
.build();

protected static String project;
protected GcpOptions options;
protected GcpOptions gcpOptions;
SabaSathya marked this conversation as resolved.
Show resolved Hide resolved

@Before
public void setup() {
options = TestPipeline.testingPipelineOptions().as(GcpOptions.class);
project = options.getProject();
gcpOptions = TestPipeline.testingPipelineOptions().as(GcpOptions.class);
project = gcpOptions.getProject();
}

private static Instant toWriteTime(WriteResult result) {
Expand Down Expand Up @@ -166,7 +166,7 @@ public final void listCollections() throws Exception {
.build());

PAssert.that(actualCollectionIds).containsInAnyOrder(allCollectionIds);
testPipeline.run(options);
testPipeline.run(TestPipeline.testingPipelineOptions());

// Reading from readTime should only get collection IDs written in the batch before readTime.
PCollection<String> actualCollectionIdsAtReadTime =
Expand All @@ -181,7 +181,7 @@ public final void listCollections() throws Exception {
.withRpcQosOptions(RPC_QOS_OPTIONS)
.build());
PAssert.that(actualCollectionIdsAtReadTime).containsInAnyOrder(collectionIds);
testPipeline2.run(options);
testPipeline2.run(TestPipeline.testingPipelineOptions());
}

@Test
Expand Down Expand Up @@ -212,7 +212,7 @@ public final void listDocuments() throws Exception {
.apply(ParDo.of(new DocumentToName()));

PAssert.that(listDocumentPaths).containsInAnyOrder(allDocumentPaths);
testPipeline.run(options);
testPipeline.run(TestPipeline.testingPipelineOptions());

// Reading from readTime should only get the documents written before readTime.
PCollection<String> listDocumentPathsAtReadTime =
Expand All @@ -230,7 +230,7 @@ public final void listDocuments() throws Exception {

PAssert.that(listDocumentPathsAtReadTime)
.containsInAnyOrder(documentGenerator.expectedDocumentPaths());
testPipeline2.run(options);
testPipeline2.run(TestPipeline.testingPipelineOptions());
}

@Test
Expand Down Expand Up @@ -264,7 +264,7 @@ public final void runQuery() throws Exception {
.apply(ParDo.of(new DocumentToName()));

PAssert.that(listDocumentPaths).containsInAnyOrder(allDocumentPaths);
testPipeline.run(options);
testPipeline.run(TestPipeline.testingPipelineOptions());

// Reading from readTime should only get the documents written before readTime.
PCollection<String> listDocumentPathsAtReadTime =
Expand All @@ -283,7 +283,7 @@ public final void runQuery() throws Exception {

PAssert.that(listDocumentPathsAtReadTime)
.containsInAnyOrder(documentGenerator.expectedDocumentPaths());
testPipeline2.run(options);
testPipeline2.run(TestPipeline.testingPipelineOptions());
}

@Test
Expand Down Expand Up @@ -323,7 +323,7 @@ public final void partitionQuery() throws Exception {
.apply(ParDo.of(new DocumentToName()));

PAssert.that(listDocumentPaths).containsInAnyOrder(allDocumentPaths);
testPipeline.run(options);
testPipeline.run(TestPipeline.testingPipelineOptions());

// Reading from readTime should only get the documents written before readTime.
PCollection<String> listDocumentPathsAtReadTime =
Expand All @@ -343,7 +343,7 @@ public final void partitionQuery() throws Exception {

PAssert.that(listDocumentPathsAtReadTime)
.containsInAnyOrder(documentGenerator.expectedDocumentPaths());
testPipeline2.run(options);
testPipeline2.run(TestPipeline.testingPipelineOptions());
}

@Test
Expand Down Expand Up @@ -387,7 +387,7 @@ public final void batchGet() throws Exception {
.apply(ParDo.of(new DocumentToName()));

PAssert.that(listDocumentPaths).containsInAnyOrder(allDocumentPaths);
testPipeline.run(options);
testPipeline.run(TestPipeline.testingPipelineOptions());

// Reading from readTime should only get the documents written before readTime.
PCollection<String> listDocumentPathsAtReadTime =
Expand All @@ -407,7 +407,7 @@ public final void batchGet() throws Exception {

PAssert.that(listDocumentPathsAtReadTime)
.containsInAnyOrder(documentGenerator.expectedDocumentPaths());
testPipeline2.run(options);
testPipeline2.run(TestPipeline.testingPipelineOptions());
}

@Test
Expand Down Expand Up @@ -445,7 +445,7 @@ protected final void runWriteTest(
.apply(createWrite)
.apply(FirestoreIO.v1().write().batchWrite().withRpcQosOptions(RPC_QOS_OPTIONS).build());

testPipeline.run(options);
testPipeline.run(TestPipeline.testingPipelineOptions());

List<String> actualDocumentIds =
helper
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ enum DataLayout {
DataLayout value() default DataLayout.Shallow;
}

private final GcpOptions options;
private final GcpOptions gcpOptions;
private final org.apache.beam.sdk.io.gcp.firestore.FirestoreOptions firestoreBeamOptions;
private final FirestoreOptions firestoreOptions;

Expand All @@ -125,17 +125,17 @@ enum DataLayout {
"initialization.fields.uninitialized") // testClass and testName are managed via #apply
public FirestoreTestingHelper(CleanupMode cleanupMode) {
this.cleanupMode = cleanupMode;
options = TestPipeline.testingPipelineOptions().as(GcpOptions.class);
gcpOptions = TestPipeline.testingPipelineOptions().as(GcpOptions.class);
firestoreBeamOptions =
TestPipeline.testingPipelineOptions()
.as(org.apache.beam.sdk.io.gcp.firestore.FirestoreOptions.class);
firestoreOptions =
FirestoreOptions.newBuilder()
.setCredentials(options.getGcpCredential())
.setProjectId(options.getProject())
.setCredentials(gcpOptions.getGcpCredential())
.setProjectId(gcpOptions.getProject())
.setDatabaseId(firestoreBeamOptions.getFirestoreDb())
.setHost(firestoreBeamOptions.getHost())
SabaSathya marked this conversation as resolved.
Show resolved Hide resolved
.build();

fs = firestoreOptions.getService();
rpc = (FirestoreRpc) firestoreOptions.getRpc();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@
import org.apache.beam.sdk.io.gcp.firestore.FirestoreV1.WriteFailure;
import org.apache.beam.sdk.io.gcp.firestore.RpcQosOptions;
import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.transforms.Create;
import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.PTransform;
Expand Down Expand Up @@ -133,7 +134,7 @@ public void batchWrite_partialFailureOutputsToDeadLetterQueue()
assertFalse(iterator.hasNext());
return null;
});
testPipeline.run(this.options);
testPipeline.run(TestPipeline.testingPipelineOptions());

ApiFuture<QuerySnapshot> actualDocsQuery =
helper.getBaseDocument().collection(collectionId).orderBy("__name__").get();
Expand Down